Merge commit 'aa9bc86125' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2023-06-05 12:04:23 +03:00
parent 1570299af4
commit c48062fe2a
598 changed files with 57696 additions and 17615 deletions

View file

@ -3,12 +3,11 @@
//! reference to a type with the field `bar`. This is an approximation of the
//! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs).
use std::sync::Arc;
use chalk_ir::cast::Cast;
use hir_def::lang_item::LangItem;
use hir_expand::name::name;
use limit::Limit;
use triomphe::Arc;
use crate::{
db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt,
@ -23,6 +22,21 @@ pub(crate) enum AutoderefKind {
Overloaded,
}
pub fn autoderef(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
ty: Canonical<Ty>,
) -> impl Iterator<Item = Canonical<Ty>> + '_ {
let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty);
let mut autoderef = Autoderef::new(&mut table, ty);
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
v.push(autoderef.table.canonicalize(ty).value);
}
v.into_iter()
}
#[derive(Debug)]
pub(crate) struct Autoderef<'a, 'db> {
pub(crate) table: &'a mut InferenceTable<'db>,
@ -76,49 +90,43 @@ pub(crate) fn autoderef_step(
table: &mut InferenceTable<'_>,
ty: Ty,
) -> Option<(AutoderefKind, Ty)> {
if let Some(derefed) = builtin_deref(&ty) {
if let Some(derefed) = builtin_deref(table, &ty, false) {
Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
} else {
Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
}
}
// FIXME: replace uses of this with Autoderef above
pub fn autoderef(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
ty: Canonical<Ty>,
) -> impl Iterator<Item = Canonical<Ty>> + '_ {
let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty);
let mut autoderef = Autoderef::new(&mut table, ty);
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
v.push(autoderef.table.canonicalize(ty).value);
}
v.into_iter()
}
pub(crate) fn deref(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
let _p = profile::span("deref");
autoderef_step(table, ty).map(|(_, ty)| ty)
}
fn builtin_deref(ty: &Ty) -> Option<&Ty> {
pub(crate) fn builtin_deref<'ty>(
table: &mut InferenceTable<'_>,
ty: &'ty Ty,
explicit: bool,
) -> Option<&'ty Ty> {
match ty.kind(Interner) {
TyKind::Ref(.., ty) | TyKind::Raw(.., ty) => Some(ty),
TyKind::Ref(.., ty) => Some(ty),
// FIXME: Maybe accept this but diagnose if its not explicit?
TyKind::Raw(.., ty) if explicit => Some(ty),
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => {
if crate::lang_items::is_box(table.db, adt) {
substs.at(Interner, 0).ty(Interner)
} else {
None
}
}
_ => None,
}
}
fn deref_by_trait(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
pub(crate) fn deref_by_trait(
table @ &mut InferenceTable { db, .. }: &mut InferenceTable<'_>,
ty: Ty,
) -> Option<Ty> {
let _p = profile::span("deref_by_trait");
if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
// don't try to deref unknown variables
return None;
}
let db = table.db;
let deref_trait =
db.lang_item(table.trait_env.krate, LangItem::Deref).and_then(|l| l.as_trait())?;
let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;

View file

@ -18,7 +18,6 @@ use crate::{
consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive,
to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, BoundVar, CallableSig,
GenericArg, Interner, ProjectionTy, Substitution, TraitRef, Ty, TyDefId, TyExt, TyKind,
ValueTyDefId,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@ -195,6 +194,19 @@ impl TyBuilder<()> {
params.placeholder_subst(db)
}
pub fn unknown_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
let params = generics(db.upcast(), def.into());
Substitution::from_iter(
Interner,
params.iter_id().map(|id| match id {
either::Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
either::Either::Right(id) => {
unknown_const_as_generic(db.const_param_ty(id)).cast(Interner)
}
}),
)
}
pub fn subst_for_def(
db: &dyn HirDatabase,
def: impl Into<GenericDefId>,
@ -233,6 +245,25 @@ impl TyBuilder<()> {
TyBuilder::new((), params, parent_subst)
}
pub fn subst_for_closure(
db: &dyn HirDatabase,
parent: DefWithBodyId,
sig_ty: Ty,
) -> Substitution {
let sig_ty = sig_ty.cast(Interner);
let self_subst = iter::once(&sig_ty);
let Some(parent) = parent.as_generic_def_id() else {
return Substitution::from_iter(Interner, self_subst);
};
Substitution::from_iter(
Interner,
self_subst
.chain(generics(db.upcast(), parent).placeholder_subst(db).iter(Interner))
.cloned()
.collect::<Vec<_>>(),
)
}
pub fn build(self) -> Substitution {
let ((), subst) = self.build_internal();
subst
@ -362,21 +393,4 @@ impl TyBuilder<Binders<Ty>> {
pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> {
TyBuilder::subst_for_def(db, def, None).with_data(db.impl_self_ty(def))
}
pub fn value_ty(
db: &dyn HirDatabase,
def: ValueTyDefId,
parent_subst: Option<Substitution>,
) -> TyBuilder<Binders<Ty>> {
let poly_value_ty = db.value_ty(def);
let id = match def.to_generic_def_id() {
Some(id) => id,
None => {
// static items
assert!(parent_subst.is_none());
return TyBuilder::new_empty(poly_value_ty);
}
};
TyBuilder::subst_for_def(db, id, parent_subst).with_data(poly_value_ty)
}
}

View file

@ -1,8 +1,8 @@
//! The implementation of `RustIrDatabase` for Chalk, which provides information
//! about the code that Chalk needs.
use std::sync::Arc;
use core::ops;
use std::{iter, sync::Arc};
use cov_mark::hit;
use tracing::debug;
use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds};
@ -10,9 +10,9 @@ use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
use base_db::CrateId;
use hir_def::{
expr::Movability,
hir::Movability,
lang_item::{lang_attr, LangItem, LangItemTarget},
AssocItemId, GenericDefId, HasModule, ItemContainerId, Lookup, ModuleId, TypeAliasId,
AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId,
};
use hir_expand::name::name;
@ -25,7 +25,7 @@ use crate::{
method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
to_assoc_type_id, to_chalk_trait_id,
traits::ChalkContext,
utils::generics,
utils::{generics, ClosureSubst},
wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId,
Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef,
TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause,
@ -108,17 +108,6 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
_ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
};
fn local_impls(db: &dyn HirDatabase, module: ModuleId) -> Option<Arc<TraitImpls>> {
let block = module.containing_block()?;
hit!(block_local_impls);
db.trait_impls_in_block(block)
}
// Note: Since we're using impls_for_trait, only impls where the trait
// can be resolved should ever reach Chalk. impl_datum relies on that
// and will panic if the trait can't be resolved.
let in_deps = self.db.trait_impls_in_deps(self.krate);
let in_self = self.db.trait_impls_in_crate(self.krate);
let trait_module = trait_.module(self.db.upcast());
let type_module = match self_ty_fp {
Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())),
@ -128,33 +117,62 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())),
_ => None,
};
let impl_maps = [
Some(in_deps),
Some(in_self),
local_impls(self.db, trait_module),
type_module.and_then(|m| local_impls(self.db, m)),
];
let mut def_blocks =
[trait_module.containing_block(), type_module.and_then(|it| it.containing_block())];
// Note: Since we're using impls_for_trait, only impls where the trait
// can be resolved should ever reach Chalk. impl_datum relies on that
// and will panic if the trait can't be resolved.
let in_deps = self.db.trait_impls_in_deps(self.krate);
let in_self = self.db.trait_impls_in_crate(self.krate);
let block_impls = iter::successors(self.block, |&block_id| {
cov_mark::hit!(block_local_impls);
self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block())
})
.inspect(|&block_id| {
// make sure we don't search the same block twice
def_blocks.iter_mut().for_each(|block| {
if *block == Some(block_id) {
*block = None;
}
});
})
.map(|block_id| self.db.trait_impls_in_block(block_id));
let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
let result: Vec<_> = if fps.is_empty() {
debug!("Unrestricted search for {:?} impls...", trait_);
impl_maps
.iter()
.filter_map(|o| o.as_ref())
.flat_map(|impls| impls.for_trait(trait_).map(id_to_chalk))
.collect()
} else {
impl_maps
.iter()
.filter_map(|o| o.as_ref())
.flat_map(|impls| {
fps.iter().flat_map(move |fp| {
impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
})
})
.collect()
};
let mut result = vec![];
match fps {
[] => {
debug!("Unrestricted search for {:?} impls...", trait_);
let mut f = |impls: &TraitImpls| {
result.extend(impls.for_trait(trait_).map(id_to_chalk));
};
f(&in_self);
in_deps.iter().map(ops::Deref::deref).for_each(&mut f);
block_impls.for_each(|it| f(&it));
def_blocks
.into_iter()
.flatten()
.for_each(|it| f(&self.db.trait_impls_in_block(it)));
}
fps => {
let mut f =
|impls: &TraitImpls| {
result.extend(fps.iter().flat_map(|fp| {
impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
}));
};
f(&in_self);
in_deps.iter().map(ops::Deref::deref).for_each(&mut f);
block_impls.for_each(|it| f(&it));
def_blocks
.into_iter()
.flatten()
.for_each(|it| f(&self.db.trait_impls_in_block(it)));
}
}
debug!("impls_for_trait returned {} impls", result.len());
result
@ -193,7 +211,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
&self,
environment: &chalk_ir::Environment<Interner>,
) -> chalk_ir::ProgramClauses<Interner> {
self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
self.db.program_clauses_for_chalk_env(self.krate, self.block, environment.clone())
}
fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId<Interner>) -> Arc<OpaqueTyDatum> {
@ -321,7 +339,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
_closure_id: chalk_ir::ClosureId<Interner>,
substs: &chalk_ir::Substitution<Interner>,
) -> chalk_ir::Binders<rust_ir::FnDefInputsAndOutputDatum<Interner>> {
let sig_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
let sig_ty = ClosureSubst(substs).sig_ty();
let sig = &sig_ty.callable_sig(self.db).expect("first closure param should be fn ptr");
let io = rust_ir::FnDefInputsAndOutputDatum {
argument_types: sig.params().to_vec(),
@ -347,13 +365,19 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
let id = from_chalk_trait_id(trait_id);
self.db.trait_data(id).name.to_string()
self.db.trait_data(id).name.display(self.db.upcast()).to_string()
}
fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
match adt_id {
hir_def::AdtId::StructId(id) => self.db.struct_data(id).name.to_string(),
hir_def::AdtId::EnumId(id) => self.db.enum_data(id).name.to_string(),
hir_def::AdtId::UnionId(id) => self.db.union_data(id).name.to_string(),
hir_def::AdtId::StructId(id) => {
self.db.struct_data(id).name.display(self.db.upcast()).to_string()
}
hir_def::AdtId::EnumId(id) => {
self.db.enum_data(id).name.display(self.db.upcast()).to_string()
}
hir_def::AdtId::UnionId(id) => {
self.db.union_data(id).name.display(self.db.upcast()).to_string()
}
}
}
fn adt_size_align(&self, _id: chalk_ir::AdtId<Interner>) -> Arc<rust_ir::AdtSizeAlign> {
@ -362,7 +386,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
}
fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
let id = self.db.associated_ty_data(assoc_ty_id).name;
self.db.type_alias_data(id).name.to_string()
self.db.type_alias_data(id).name.display(self.db.upcast()).to_string()
}
fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
format!("Opaque_{}", opaque_ty_id.0)
@ -373,7 +397,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
fn generator_datum(
&self,
id: chalk_ir::GeneratorId<Interner>,
) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorDatum<Interner>> {
) -> Arc<chalk_solve::rust_ir::GeneratorDatum<Interner>> {
let (parent, expr) = self.db.lookup_intern_generator(id.into());
// We fill substitution with unknown type, because we only need to know whether the generic
@ -398,8 +422,8 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
let input_output = crate::make_type_and_const_binders(it, input_output);
let movability = match self.db.body(parent)[expr] {
hir_def::expr::Expr::Closure {
closure_kind: hir_def::expr::ClosureKind::Generator(movability),
hir_def::hir::Expr::Closure {
closure_kind: hir_def::hir::ClosureKind::Generator(movability),
..
} => movability,
_ => unreachable!("non generator expression interned as generator"),
@ -414,7 +438,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
fn generator_witness_datum(
&self,
id: chalk_ir::GeneratorId<Interner>,
) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorWitnessDatum<Interner>> {
) -> Arc<chalk_solve::rust_ir::GeneratorWitnessDatum<Interner>> {
// FIXME: calculate inner types
let inner_types =
rust_ir::GeneratorWitnessExistential { types: wrap_empty_binders(vec![]) };
@ -435,7 +459,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
}
}
impl<'a> chalk_ir::UnificationDatabase<Interner> for &'a dyn HirDatabase {
impl chalk_ir::UnificationDatabase<Interner> for &dyn HirDatabase {
fn fn_def_variance(
&self,
fn_def_id: chalk_ir::FnDefId<Interner>,
@ -451,9 +475,10 @@ impl<'a> chalk_ir::UnificationDatabase<Interner> for &'a dyn HirDatabase {
pub(crate) fn program_clauses_for_chalk_env_query(
db: &dyn HirDatabase,
krate: CrateId,
block: Option<BlockId>,
environment: chalk_ir::Environment<Interner>,
) -> chalk_ir::ProgramClauses<Interner> {
chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
chalk_solve::program_clauses_for_env(&ChalkContext { db, krate, block }, &environment)
}
pub(crate) fn associated_ty_data_query(
@ -786,17 +811,17 @@ pub(crate) fn adt_variance_query(
)
}
/// Returns instantiated predicates.
pub(super) fn convert_where_clauses(
db: &dyn HirDatabase,
def: GenericDefId,
substs: &Substitution,
) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
let generic_predicates = db.generic_predicates(def);
let mut result = Vec::with_capacity(generic_predicates.len());
for pred in generic_predicates.iter() {
result.push(pred.clone().substitute(Interner, substs));
}
result
db.generic_predicates(def)
.iter()
.cloned()
.map(|pred| pred.substitute(Interner, substs))
.collect()
}
pub(super) fn generic_predicate_to_inline_bound(

View file

@ -1,24 +1,28 @@
//! Various extensions traits for Chalk types.
use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy};
use chalk_ir::{cast::Cast, FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy};
use hir_def::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
generics::TypeOrConstParamData,
lang_item::LangItem,
type_ref::Rawness,
FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
DefWithBodyId, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
};
use crate::{
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
CallableDefId, CallableSig, DynTy, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
db::HirDatabase,
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
to_chalk_trait_id,
utils::{generics, ClosureSubst},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Canonical, CanonicalVarKinds,
ClosureId, DynTy, FnPointer, ImplTraitId, InEnvironment, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
};
pub trait TyExt {
fn is_unit(&self) -> bool;
fn is_integral(&self) -> bool;
fn is_scalar(&self) -> bool;
fn is_floating_point(&self) -> bool;
fn is_never(&self) -> bool;
fn is_unknown(&self) -> bool;
@ -28,8 +32,10 @@ pub trait TyExt {
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
fn as_builtin(&self) -> Option<BuiltinType>;
fn as_tuple(&self) -> Option<&Substitution>;
fn as_closure(&self) -> Option<ClosureId>;
fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId>;
fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>;
fn as_raw_ptr(&self) -> Option<(&Ty, Mutability)>;
fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)>;
fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId>;
@ -44,6 +50,7 @@ pub trait TyExt {
fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>>;
fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId>;
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool;
/// FIXME: Get rid of this, it's not a good abstraction
fn equals_ctor(&self, other: &Ty) -> bool;
@ -62,6 +69,10 @@ impl TyExt for Ty {
)
}
fn is_scalar(&self) -> bool {
matches!(self.kind(Interner), TyKind::Scalar(_))
}
fn is_floating_point(&self) -> bool {
matches!(
self.kind(Interner),
@ -128,12 +139,20 @@ impl TyExt for Ty {
}
}
fn as_closure(&self) -> Option<ClosureId> {
match self.kind(Interner) {
TyKind::Closure(id, _) => Some(*id),
_ => None,
}
}
fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId> {
match self.callable_def(db) {
Some(CallableDefId::FunctionId(func)) => Some(func),
Some(CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_)) | None => None,
}
}
fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)> {
match self.kind(Interner) {
TyKind::Ref(mutability, lifetime, ty) => Some((ty, lifetime.clone(), *mutability)),
@ -141,6 +160,13 @@ impl TyExt for Ty {
}
}
fn as_raw_ptr(&self) -> Option<(&Ty, Mutability)> {
match self.kind(Interner) {
TyKind::Raw(mutability, ty) => Some((ty, *mutability)),
_ => None,
}
}
fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> {
match self.kind(Interner) {
TyKind::Ref(mutability, _, ty) => Some((ty, Rawness::Ref, *mutability)),
@ -176,10 +202,7 @@ impl TyExt for Ty {
let sig = db.callable_item_signature(callable_def);
Some(sig.substitute(Interner, parameters))
}
TyKind::Closure(.., substs) => {
let sig_param = substs.at(Interner, 0).assert_ty_ref(Interner);
sig_param.callable_sig(db)
}
TyKind::Closure(.., substs) => ClosureSubst(substs).sig_ty().callable_sig(db),
_ => None,
}
}
@ -318,6 +341,20 @@ impl TyExt for Ty {
}
}
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
let crate_id = owner.module(db.upcast()).krate();
let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else {
return false;
};
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
let env = db.trait_environment_for_body(owner);
let goal = Canonical {
value: InEnvironment::new(&env.env, trait_ref.cast(Interner)),
binders: CanonicalVarKinds::empty(Interner),
};
db.trait_solve(crate_id, None, goal).is_some()
}
fn equals_ctor(&self, other: &Ty) -> bool {
match (self.kind(Interner), other.kind(Interner)) {
(TyKind::Adt(adt, ..), TyKind::Adt(adt2, ..)) => adt == adt2,

View file

@ -3,19 +3,20 @@
use base_db::CrateId;
use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData};
use hir_def::{
expr::Expr,
path::ModPath,
hir::Expr,
path::Path,
resolver::{Resolver, ValueNs},
type_ref::ConstRef,
ConstId, EnumVariantId,
EnumVariantId, GeneralConstId, StaticId,
};
use la_arena::{Idx, RawIdx};
use stdx::never;
use triomphe::Arc;
use crate::{
db::HirDatabase, infer::InferenceContext, layout::layout_of_ty, lower::ParamLoweringMode,
to_placeholder_idx, utils::Generics, Const, ConstData, ConstScalar, ConstValue, GenericArg,
Interner, MemoryMap, Ty, TyBuilder,
db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode,
mir::monomorphize_mir_body_bad, to_placeholder_idx, utils::Generics, Const, ConstData,
ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, Ty, TyBuilder,
};
use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
@ -57,7 +58,7 @@ pub enum ConstEvalError {
impl From<MirLowerError> for ConstEvalError {
fn from(value: MirLowerError) -> Self {
match value {
MirLowerError::ConstEvalError(e) => *e,
MirLowerError::ConstEvalError(_, e) => *e,
_ => ConstEvalError::MirLowerError(value),
}
}
@ -72,10 +73,11 @@ impl From<MirEvalError> for ConstEvalError {
pub(crate) fn path_to_const(
db: &dyn HirDatabase,
resolver: &Resolver,
path: &ModPath,
path: &Path,
mode: ParamLoweringMode,
args_lazy: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
expected_ty: Ty,
) -> Option<Const> {
match resolver.resolve_path_in_value_ns_fully(db.upcast(), path) {
Some(ValueNs::GenericParam(p)) => {
@ -89,7 +91,7 @@ pub(crate) fn path_to_const(
Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
None => {
never!(
"Generic list doesn't contain this param: {:?}, {}, {:?}",
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
args,
path,
p
@ -100,6 +102,10 @@ pub(crate) fn path_to_const(
};
Some(ConstData { ty, value }.intern(Interner))
}
Some(ValueNs::ConstId(c)) => Some(intern_const_scalar(
ConstScalar::UnevaluatedConst(c.into(), Substitution::empty(Interner)),
expected_ty,
)),
_ => None,
}
}
@ -124,14 +130,15 @@ pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
/// Interns a constant scalar with the given type
pub fn intern_const_ref(db: &dyn HirDatabase, value: &ConstRef, ty: Ty, krate: CrateId) -> Const {
let layout = db.layout_of_ty(ty.clone(), krate);
let bytes = match value {
ConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
ConstRef::UInt(i) => {
let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
ConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
@ -153,13 +160,17 @@ pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: CrateId) ->
)
}
pub fn try_const_usize(c: &Const) -> Option<u128> {
pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
match &c.data(Interner).value {
chalk_ir::ConstValue::BoundVar(_) => None,
chalk_ir::ConstValue::InferenceVar(_) => None,
chalk_ir::ConstValue::Placeholder(_) => None,
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
ConstScalar::UnevaluatedConst(c, subst) => {
let ec = db.const_eval(*c, subst.clone()).ok()?;
try_const_usize(db, &ec)
}
_ => None,
},
}
@ -168,7 +179,16 @@ pub fn try_const_usize(c: &Const) -> Option<u128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &[String],
_: &ConstId,
_: &GeneralConstId,
_: &Substitution,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_static_recover(
_: &dyn HirDatabase,
_: &[String],
_: &StaticId,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
@ -183,11 +203,39 @@ pub(crate) fn const_eval_discriminant_recover(
pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
const_id: ConstId,
def: GeneralConstId,
subst: Substitution,
) -> Result<Const, ConstEvalError> {
let def = const_id.into();
let body = db.mir_body(def)?;
let c = interpret_mir(db, &body, false)?;
let body = match def {
GeneralConstId::ConstId(c) => {
db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
}
GeneralConstId::AnonymousConstId(c) => {
let (def, root) = db.lookup_intern_anonymous_const(c);
let body = db.body(def);
let infer = db.infer(def);
Arc::new(monomorphize_mir_body_bad(
db,
lower_to_mir(db, def, &body, &infer, root)?,
subst,
db.trait_environment_for_body(def),
)?)
}
};
let c = interpret_mir(db, &body, false).0?;
Ok(c)
}
pub(crate) fn const_eval_static_query(
db: &dyn HirDatabase,
def: StaticId,
) -> Result<Const, ConstEvalError> {
let body = db.monomorphized_mir_body(
def.into(),
Substitution::empty(Interner),
db.trait_environment_for_body(def.into()),
)?;
let c = interpret_mir(db, &body, false).0?;
Ok(c)
}
@ -209,9 +257,13 @@ pub(crate) fn const_eval_discriminant_variant(
};
return Ok(value);
}
let mir_body = db.mir_body(def)?;
let c = interpret_mir(db, &mir_body, false)?;
let c = try_const_usize(&c).unwrap() as i128;
let mir_body = db.monomorphized_mir_body(
def,
Substitution::empty(Interner),
db.trait_environment_for_body(def),
)?;
let c = interpret_mir(db, &mir_body, false).0?;
let c = try_const_usize(db, &c).unwrap() as i128;
Ok(c)
}
@ -226,15 +278,16 @@ pub(crate) fn eval_to_const(
debruijn: DebruijnIndex,
) -> Const {
let db = ctx.db;
let infer = ctx.clone().resolve_all();
if let Expr::Path(p) = &ctx.body.exprs[expr] {
let resolver = &ctx.resolver;
if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) {
if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn, infer[expr].clone()) {
return c;
}
}
let infer = ctx.clone().resolve_all();
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
if let Ok(result) = interpret_mir(db, &mir_body, true) {
if let Ok(result) = interpret_mir(db, &mir_body, true).0 {
return result;
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,377 @@
use super::*;
#[test]
fn size_of() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn size_of<T>() -> usize;
}
const GOAL: usize = size_of::<i32>();
"#,
4,
);
}
#[test]
fn transmute() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn transmute<T, U>(e: T) -> U;
}
const GOAL: i32 = transmute((1i16, 1i16));
"#,
0x00010001,
);
}
#[test]
fn const_eval_select() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn const_eval_select<ARG, F, G, RET>(arg: ARG, called_in_const: F, called_at_rt: G) -> RET
where
G: FnOnce<ARG, Output = RET>,
F: FnOnce<ARG, Output = RET>;
}
const fn in_const(x: i32, y: i32) -> i32 {
x + y
}
fn in_rt(x: i32, y: i32) -> i32 {
x + y
}
const GOAL: i32 = const_eval_select((2, 3), in_const, in_rt);
"#,
5,
);
}
#[test]
fn wrapping_add() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn wrapping_add<T>(a: T, b: T) -> T;
}
const GOAL: u8 = wrapping_add(10, 250);
"#,
4,
);
}
#[test]
fn saturating_add() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn saturating_add<T>(a: T, b: T) -> T;
}
const GOAL: u8 = saturating_add(10, 250);
"#,
255,
);
check_number(
r#"
extern "rust-intrinsic" {
pub fn saturating_add<T>(a: T, b: T) -> T;
}
const GOAL: i8 = saturating_add(5, 8);
"#,
13,
);
}
#[test]
fn allocator() {
check_number(
r#"
extern "Rust" {
#[rustc_allocator]
fn __rust_alloc(size: usize, align: usize) -> *mut u8;
#[rustc_deallocator]
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
#[rustc_reallocator]
fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
#[rustc_allocator_zeroed]
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
}
const GOAL: u8 = unsafe {
let ptr = __rust_alloc(4, 1);
let ptr2 = ((ptr as usize) + 1) as *mut u8;
*ptr = 23;
*ptr2 = 32;
let ptr = __rust_realloc(ptr, 4, 1, 8);
let ptr2 = ((ptr as usize) + 1) as *mut u8;
*ptr + *ptr2
};
"#,
55,
);
}
#[test]
fn overflowing_add() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn add_with_overflow<T>(x: T, y: T) -> (T, bool);
}
const GOAL: u8 = add_with_overflow(1, 2).0;
"#,
3,
);
check_number(
r#"
extern "rust-intrinsic" {
pub fn add_with_overflow<T>(x: T, y: T) -> (T, bool);
}
const GOAL: u8 = add_with_overflow(1, 2).1 as u8;
"#,
0,
);
}
#[test]
fn needs_drop() {
check_number(
r#"
//- minicore: copy, sized
extern "rust-intrinsic" {
pub fn needs_drop<T: ?Sized>() -> bool;
}
struct X;
const GOAL: bool = !needs_drop::<i32>() && needs_drop::<X>();
"#,
1,
);
}
#[test]
fn likely() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn likely(b: bool) -> bool;
pub fn unlikely(b: bool) -> bool;
}
const GOAL: bool = likely(true) && unlikely(true) && !likely(false) && !unlikely(false);
"#,
1,
);
}
#[test]
fn floating_point() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn sqrtf32(x: f32) -> f32;
pub fn powf32(a: f32, x: f32) -> f32;
pub fn fmaf32(a: f32, b: f32, c: f32) -> f32;
}
const GOAL: f32 = sqrtf32(1.2) + powf32(3.4, 5.6) + fmaf32(-7.8, 1.3, 2.4);
"#,
i128::from_le_bytes(pad16(
&f32::to_le_bytes(1.2f32.sqrt() + 3.4f32.powf(5.6) + (-7.8f32).mul_add(1.3, 2.4)),
true,
)),
);
check_number(
r#"
extern "rust-intrinsic" {
pub fn powif64(a: f64, x: i32) -> f64;
pub fn sinf64(x: f64) -> f64;
pub fn minnumf64(x: f64, y: f64) -> f64;
}
const GOAL: f64 = powif64(1.2, 5) + sinf64(3.4) + minnumf64(-7.8, 1.3);
"#,
i128::from_le_bytes(pad16(
&f64::to_le_bytes(1.2f64.powi(5) + 3.4f64.sin() + (-7.8f64).min(1.3)),
true,
)),
);
}
#[test]
fn atomic() {
check_number(
r#"
//- minicore: copy
extern "rust-intrinsic" {
pub fn atomic_load_seqcst<T: Copy>(src: *const T) -> T;
pub fn atomic_xchg_acquire<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_cxchg_release_seqcst<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
pub fn atomic_cxchgweak_acquire_acquire<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
pub fn atomic_store_release<T: Copy>(dst: *mut T, val: T);
pub fn atomic_xadd_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_xsub_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_and_acquire<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_nand_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_or_release<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_xor_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
}
fn should_not_reach() {
_ // fails the test if executed
}
const GOAL: i32 = {
let mut x = 5;
atomic_store_release(&mut x, 10);
let mut y = atomic_xchg_acquire(&mut x, 100);
atomic_xadd_acqrel(&mut y, 20);
if (30, true) != atomic_cxchg_release_seqcst(&mut y, 30, 40) {
should_not_reach();
}
if (40, false) != atomic_cxchg_release_seqcst(&mut y, 30, 50) {
should_not_reach();
}
if (40, true) != atomic_cxchgweak_acquire_acquire(&mut y, 40, 30) {
should_not_reach();
}
let mut z = atomic_xsub_seqcst(&mut x, -200);
atomic_xor_seqcst(&mut x, 1024);
atomic_load_seqcst(&x) + z * 3 + atomic_load_seqcst(&y) * 2
};
"#,
660 + 1024,
);
}
#[test]
fn offset() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
extern "rust-intrinsic" {
pub fn offset<T>(dst: *const T, offset: isize) -> *const T;
}
const GOAL: u8 = unsafe {
let ar: &[(u8, u8, u8)] = &[
(10, 11, 12),
(20, 21, 22),
(30, 31, 32),
(40, 41, 42),
(50, 51, 52),
];
let ar: *const [(u8, u8, u8)] = ar;
let ar = ar as *const (u8, u8, u8);
let element = *offset(ar, 2);
element.1
};
"#,
31,
);
}
#[test]
fn arith_offset() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
extern "rust-intrinsic" {
pub fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
}
const GOAL: u8 = unsafe {
let ar: &[(u8, u8, u8)] = &[
(10, 11, 12),
(20, 21, 22),
(30, 31, 32),
(40, 41, 42),
(50, 51, 52),
];
let ar: *const [(u8, u8, u8)] = ar;
let ar = ar as *const (u8, u8, u8);
let element = *arith_offset(arith_offset(ar, 102), -100);
element.1
};
"#,
31,
);
}
#[test]
fn copy_nonoverlapping() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
}
const GOAL: u8 = unsafe {
let mut x = 2;
let y = 5;
copy_nonoverlapping(&y, &mut x, 1);
x
};
"#,
5,
);
}
#[test]
fn copy() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
extern "rust-intrinsic" {
pub fn copy<T>(src: *const T, dst: *mut T, count: usize);
}
const GOAL: i32 = unsafe {
let mut x = [1i32, 2, 3, 4, 5];
let y = (&mut x as *mut _) as *mut i32;
let z = (y as usize + 4) as *const i32;
copy(z, y, 4);
x[0] + x[1] + x[2] + x[3] + x[4]
};
"#,
19,
);
}
#[test]
fn ctpop() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn ctpop<T: Copy>(x: T) -> T;
}
const GOAL: i64 = ctpop(-29);
"#,
61,
);
}
#[test]
fn cttz() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn cttz<T: Copy>(x: T) -> T;
}
const GOAL: i64 = cttz(-24);
"#,
3,
);
}

View file

@ -1,27 +1,27 @@
//! The home of `HirDatabase`, which is the Salsa database containing all the
//! type inference-related queries.
use std::sync::Arc;
use std::sync;
use base_db::{impl_intern_key, salsa, CrateId, Upcast};
use hir_def::{
db::DefDatabase,
expr::ExprId,
layout::{Layout, LayoutError, TargetDataLayout},
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GenericDefId,
ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId,
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId,
DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
LifetimeParamId, LocalFieldId, StaticId, TypeOrConstParamId, VariantId,
};
use la_arena::ArenaMap;
use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
chalk_db,
consteval::ConstEvalError,
layout::{Layout, LayoutError},
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
mir::{BorrowckResult, MirBody, MirLowerError},
Binders, CallableDefId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner,
PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId,
ValueTyDefId,
Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult,
Interner, PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty,
TyDefId, ValueTyDefId,
};
use hir_expand::name::Name;
@ -38,8 +38,28 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::cycle(crate::mir::mir_body_recover)]
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure(&self, def: ClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
#[salsa::cycle(crate::mir::monomorphized_mir_body_recover)]
fn monomorphized_mir_body(
&self,
def: DefWithBodyId,
subst: Substitution,
env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
fn monomorphized_mir_body_for_closure(
&self,
def: ClosureId,
subst: Substitution,
env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::borrowck_query)]
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<BorrowckResult>, MirLowerError>;
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
#[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)]
@ -57,7 +77,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
fn const_eval(&self, def: ConstId) -> Result<Const, ConstEvalError>;
fn const_eval(&self, def: GeneralConstId, subst: Substitution)
-> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(crate::consteval::const_eval_static_recover)]
fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
@ -71,7 +96,16 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::layout::layout_of_adt_query)]
#[salsa::cycle(crate::layout::layout_of_adt_recover)]
fn layout_of_adt(&self, def: AdtId, subst: Substitution) -> Result<Layout, LayoutError>;
fn layout_of_adt(
&self,
def: AdtId,
subst: Substitution,
krate: CrateId,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
fn layout_of_ty(&self, ty: Ty, krate: CrateId) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
@ -97,6 +131,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<QuantifiedWhereClause>]>;
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<crate::TraitEnvironment>;
#[salsa::invoke(crate::lower::trait_environment_query)]
fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>;
@ -108,7 +146,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
#[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
fn inherent_impls_in_block(&self, block: BlockId) -> Arc<InherentImpls>;
/// Collects all crates in the dependency graph that have impls for the
/// given fingerprint. This is only used for primitive types and types
@ -125,10 +163,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
#[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
fn trait_impls_in_block(&self, krate: BlockId) -> Option<Arc<TraitImpls>>;
fn trait_impls_in_block(&self, block: BlockId) -> Arc<TraitImpls>;
#[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<[Arc<TraitImpls>]>;
// Interned IDs for Chalk integration
#[salsa::interned]
@ -148,24 +186,34 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn intern_generator(&self, id: (DefWithBodyId, ExprId)) -> InternedGeneratorId;
#[salsa::invoke(chalk_db::associated_ty_data_query)]
fn associated_ty_data(&self, id: chalk_db::AssocTypeId) -> Arc<chalk_db::AssociatedTyDatum>;
fn associated_ty_data(
&self,
id: chalk_db::AssocTypeId,
) -> sync::Arc<chalk_db::AssociatedTyDatum>;
#[salsa::invoke(chalk_db::trait_datum_query)]
fn trait_datum(&self, krate: CrateId, trait_id: chalk_db::TraitId)
-> Arc<chalk_db::TraitDatum>;
fn trait_datum(
&self,
krate: CrateId,
trait_id: chalk_db::TraitId,
) -> sync::Arc<chalk_db::TraitDatum>;
#[salsa::invoke(chalk_db::struct_datum_query)]
fn struct_datum(
&self,
krate: CrateId,
struct_id: chalk_db::AdtId,
) -> Arc<chalk_db::StructDatum>;
) -> sync::Arc<chalk_db::StructDatum>;
#[salsa::invoke(chalk_db::impl_datum_query)]
fn impl_datum(&self, krate: CrateId, impl_id: chalk_db::ImplId) -> Arc<chalk_db::ImplDatum>;
fn impl_datum(
&self,
krate: CrateId,
impl_id: chalk_db::ImplId,
) -> sync::Arc<chalk_db::ImplDatum>;
#[salsa::invoke(chalk_db::fn_def_datum_query)]
fn fn_def_datum(&self, krate: CrateId, fn_def_id: FnDefId) -> Arc<chalk_db::FnDefDatum>;
fn fn_def_datum(&self, krate: CrateId, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>;
#[salsa::invoke(chalk_db::fn_def_variance_query)]
fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
@ -178,7 +226,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
krate: CrateId,
id: chalk_db::AssociatedTyValueId,
) -> Arc<chalk_db::AssociatedTyValue>;
) -> sync::Arc<chalk_db::AssociatedTyValue>;
#[salsa::invoke(crate::traits::normalize_projection_query)]
#[salsa::transparent]
@ -193,6 +241,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn trait_solve(
&self,
krate: CrateId,
block: Option<BlockId>,
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
) -> Option<crate::Solution>;
@ -200,6 +249,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn trait_solve_query(
&self,
krate: CrateId,
block: Option<BlockId>,
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
) -> Option<crate::Solution>;
@ -207,19 +257,26 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn program_clauses_for_chalk_env(
&self,
krate: CrateId,
block: Option<BlockId>,
env: chalk_ir::Environment<Interner>,
) -> chalk_ir::ProgramClauses<Interner>;
}
fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile::span("infer:wait").detail(|| match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
DefWithBodyId::StaticId(it) => db.static_data(it).name.clone().to_string(),
DefWithBodyId::ConstId(it) => {
db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::StaticId(it) => {
db.static_data(it).name.clone().display(db.upcast()).to_string()
}
DefWithBodyId::ConstId(it) => db
.const_data(it)
.name
.clone()
.unwrap_or_else(Name::missing)
.display(db.upcast())
.to_string(),
DefWithBodyId::VariantId(it) => {
db.enum_data(it.parent).variants[it.local_id].name.to_string()
db.enum_data(it.parent).variants[it.local_id].name.display(db.upcast()).to_string()
}
});
db.infer_query(def)
@ -228,10 +285,11 @@ fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult>
fn trait_solve_wait(
db: &dyn HirDatabase,
krate: CrateId,
block: Option<BlockId>,
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
) -> Option<crate::Solution> {
let _p = profile::span("trait_solve::wait");
db.trait_solve_query(krate, goal)
db.trait_solve_query(krate, block, goal)
}
#[test]

View file

@ -16,8 +16,8 @@ use std::fmt;
use base_db::CrateId;
use hir_def::{
adt::VariantData,
expr::{Pat, PatId},
data::adt::VariantData,
hir::{Pat, PatId},
src::HasSource,
AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, StaticId,
StructId,
@ -223,7 +223,7 @@ impl<'a> DeclValidator<'a> {
}
// Check the function name.
let function_name = data.name.to_string();
let function_name = data.name.display(self.db.upcast()).to_string();
let fn_name_replacement = to_lower_snake_case(&function_name).map(|new_name| Replacement {
current_name: data.name.clone(),
suggested_text: new_name,
@ -244,7 +244,9 @@ impl<'a> DeclValidator<'a> {
id,
Replacement {
current_name: bind_name.clone(),
suggested_text: to_lower_snake_case(&bind_name.to_string())?,
suggested_text: to_lower_snake_case(
&bind_name.display(self.db.upcast()).to_string(),
)?,
expected_case: CaseType::LowerSnakeCase,
},
))
@ -287,7 +289,7 @@ impl<'a> DeclValidator<'a> {
ident_type: IdentType::Function,
ident: AstPtr::new(&ast_ptr),
expected_case: fn_name_replacement.expected_case,
ident_text: fn_name_replacement.current_name.to_string(),
ident_text: fn_name_replacement.current_name.display(self.db.upcast()).to_string(),
suggested_text: fn_name_replacement.suggested_text,
};
@ -343,7 +345,10 @@ impl<'a> DeclValidator<'a> {
ident_type,
ident: AstPtr::new(&name_ast),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
ident_text: replacement
.current_name
.display(self.db.upcast())
.to_string(),
suggested_text: replacement.suggested_text,
};
@ -362,7 +367,7 @@ impl<'a> DeclValidator<'a> {
let non_snake_case_allowed = self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false);
// Check the structure name.
let struct_name = data.name.to_string();
let struct_name = data.name.display(self.db.upcast()).to_string();
let struct_name_replacement = if !non_camel_case_allowed {
to_camel_case(&struct_name).map(|new_name| Replacement {
current_name: data.name.clone(),
@ -379,7 +384,7 @@ impl<'a> DeclValidator<'a> {
if !non_snake_case_allowed {
if let VariantData::Record(fields) = data.variant_data.as_ref() {
for (_, field) in fields.iter() {
let field_name = field.name.to_string();
let field_name = field.name.display(self.db.upcast()).to_string();
if let Some(new_name) = to_lower_snake_case(&field_name) {
let replacement = Replacement {
current_name: field.name.clone(),
@ -434,7 +439,7 @@ impl<'a> DeclValidator<'a> {
ident_type: IdentType::Structure,
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
suggested_text: replacement.suggested_text,
};
@ -479,7 +484,7 @@ impl<'a> DeclValidator<'a> {
ident_type: IdentType::Field,
ident: AstPtr::new(&ast_ptr),
expected_case: field_to_rename.expected_case,
ident_text: field_to_rename.current_name.to_string(),
ident_text: field_to_rename.current_name.display(self.db.upcast()).to_string(),
suggested_text: field_to_rename.suggested_text,
};
@ -496,7 +501,7 @@ impl<'a> DeclValidator<'a> {
}
// Check the enum name.
let enum_name = data.name.to_string();
let enum_name = data.name.display(self.db.upcast()).to_string();
let enum_name_replacement = to_camel_case(&enum_name).map(|new_name| Replacement {
current_name: data.name.clone(),
suggested_text: new_name,
@ -510,7 +515,9 @@ impl<'a> DeclValidator<'a> {
.filter_map(|(_, variant)| {
Some(Replacement {
current_name: variant.name.clone(),
suggested_text: to_camel_case(&variant.name.to_string())?,
suggested_text: to_camel_case(
&variant.name.display(self.db.upcast()).to_string(),
)?,
expected_case: CaseType::UpperCamelCase,
})
})
@ -558,7 +565,7 @@ impl<'a> DeclValidator<'a> {
ident_type: IdentType::Enum,
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
suggested_text: replacement.suggested_text,
};
@ -603,7 +610,7 @@ impl<'a> DeclValidator<'a> {
ident_type: IdentType::Variant,
ident: AstPtr::new(&ast_ptr),
expected_case: variant_to_rename.expected_case,
ident_text: variant_to_rename.current_name.to_string(),
ident_text: variant_to_rename.current_name.display(self.db.upcast()).to_string(),
suggested_text: variant_to_rename.suggested_text,
};
@ -623,7 +630,7 @@ impl<'a> DeclValidator<'a> {
None => return,
};
let const_name = name.to_string();
let const_name = name.display(self.db.upcast()).to_string();
let replacement = if let Some(new_name) = to_upper_snake_case(&const_name) {
Replacement {
current_name: name.clone(),
@ -648,7 +655,7 @@ impl<'a> DeclValidator<'a> {
ident_type: IdentType::Constant,
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
suggested_text: replacement.suggested_text,
};
@ -668,7 +675,7 @@ impl<'a> DeclValidator<'a> {
let name = &data.name;
let static_name = name.to_string();
let static_name = name.display(self.db.upcast()).to_string();
let replacement = if let Some(new_name) = to_upper_snake_case(&static_name) {
Replacement {
current_name: name.clone(),
@ -693,7 +700,7 @@ impl<'a> DeclValidator<'a> {
ident_type: IdentType::StaticVariable,
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
suggested_text: replacement.suggested_text,
};

View file

@ -3,7 +3,6 @@
//! fields, etc.
use std::fmt;
use std::sync::Arc;
use either::Either;
use hir_def::lang_item::LangItem;
@ -12,6 +11,7 @@ use hir_def::{ItemContainerId, Lookup};
use hir_expand::name;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use triomphe::Arc;
use typed_arena::Arena;
use crate::{
@ -27,7 +27,7 @@ use crate::{
pub(crate) use hir_def::{
body::Body,
expr::{Expr, ExprId, MatchArm, Pat, PatId},
hir::{Expr, ExprId, MatchArm, Pat, PatId},
LocalFieldId, VariantId,
};
@ -207,7 +207,7 @@ impl ExprValidator {
let report = compute_match_usefulness(&cx, &m_arms, scrut_ty);
// FIXME Report unreacheble arms
// FIXME Report unreachable arms
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
let witnesses = report.non_exhaustiveness_witnesses;

View file

@ -1,6 +1,6 @@
//! Validation of matches.
//!
//! This module provides lowering from [hir_def::expr::Pat] to [self::Pat] and match
//! This module provides lowering from [hir_def::hir::Pat] to [self::Pat] and match
//! checking algorithm.
//!
//! It is modeled on the rustc module `rustc_mir_build::thir::pattern`.
@ -12,7 +12,7 @@ pub(crate) mod usefulness;
use chalk_ir::Mutability;
use hir_def::{
adt::VariantData, body::Body, expr::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId,
body::Body, data::adt::VariantData, hir::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId,
};
use hir_expand::name::Name;
use stdx::{always, never};
@ -125,15 +125,15 @@ impl<'a> PatCtxt<'a> {
let variant = self.infer.variant_resolution_for_pat(pat);
let kind = match self.body[pat] {
hir_def::expr::Pat::Wild => PatKind::Wild,
hir_def::hir::Pat::Wild => PatKind::Wild,
hir_def::expr::Pat::Lit(expr) => self.lower_lit(expr),
hir_def::hir::Pat::Lit(expr) => self.lower_lit(expr),
hir_def::expr::Pat::Path(ref path) => {
hir_def::hir::Pat::Path(ref path) => {
return self.lower_path(pat, path);
}
hir_def::expr::Pat::Tuple { ref args, ellipsis } => {
hir_def::hir::Pat::Tuple { ref args, ellipsis } => {
let arity = match *ty.kind(Interner) {
TyKind::Tuple(arity, _) => arity,
_ => {
@ -146,13 +146,14 @@ impl<'a> PatCtxt<'a> {
PatKind::Leaf { subpatterns }
}
hir_def::expr::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.pat_binding_modes[&pat];
hir_def::hir::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.binding_modes[id];
ty = &self.infer[id];
let name = &self.body.bindings[id].name;
match (bm, ty.kind(Interner)) {
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
(BindingMode::Ref(_), _) => {
never!("`ref {}` has wrong type {:?}", name, ty);
never!("`ref {}` has wrong type {:?}", name.display(self.db.upcast()), ty);
self.errors.push(PatternError::UnexpectedType);
return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
}
@ -161,13 +162,13 @@ impl<'a> PatCtxt<'a> {
PatKind::Binding { name: name.clone(), subpattern: self.lower_opt_pattern(subpat) }
}
hir_def::expr::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
hir_def::hir::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
let expected_len = variant.unwrap().variant_data(self.db.upcast()).fields().len();
let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
self.lower_variant_or_leaf(pat, ty, subpatterns)
}
hir_def::expr::Pat::Record { ref args, .. } if variant.is_some() => {
hir_def::hir::Pat::Record { ref args, .. } if variant.is_some() => {
let variant_data = variant.unwrap().variant_data(self.db.upcast());
let subpatterns = args
.iter()
@ -187,12 +188,12 @@ impl<'a> PatCtxt<'a> {
}
}
}
hir_def::expr::Pat::TupleStruct { .. } | hir_def::expr::Pat::Record { .. } => {
hir_def::hir::Pat::TupleStruct { .. } | hir_def::hir::Pat::Record { .. } => {
self.errors.push(PatternError::UnresolvedVariant);
PatKind::Wild
}
hir_def::expr::Pat::Or(ref pats) => PatKind::Or { pats: self.lower_patterns(pats) },
hir_def::hir::Pat::Or(ref pats) => PatKind::Or { pats: self.lower_patterns(pats) },
_ => {
self.errors.push(PatternError::Unimplemented);
@ -279,8 +280,8 @@ impl<'a> PatCtxt<'a> {
}
}
fn lower_lit(&mut self, expr: hir_def::expr::ExprId) -> PatKind {
use hir_def::expr::{Expr, Literal::Bool};
fn lower_lit(&mut self, expr: hir_def::hir::ExprId) -> PatKind {
use hir_def::hir::{Expr, Literal::Bool};
match self.body[expr] {
Expr::Literal(Bool(value)) => PatKind::LiteralBool { value },
@ -297,7 +298,7 @@ impl HirDisplay for Pat {
match &*self.kind {
PatKind::Wild => write!(f, "_"),
PatKind::Binding { name, subpattern } => {
write!(f, "{name}")?;
write!(f, "{}", name.display(f.db.upcast()))?;
if let Some(subpattern) = subpattern {
write!(f, " @ ")?;
subpattern.hir_fmt(f)?;
@ -318,10 +319,14 @@ impl HirDisplay for Pat {
match variant {
VariantId::EnumVariantId(v) => {
let data = f.db.enum_data(v.parent);
write!(f, "{}", data.variants[v.local_id].name)?;
write!(f, "{}", data.variants[v.local_id].name.display(f.db.upcast()))?;
}
VariantId::StructId(s) => {
write!(f, "{}", f.db.struct_data(s).name.display(f.db.upcast()))?
}
VariantId::UnionId(u) => {
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))?
}
VariantId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
VariantId::UnionId(u) => write!(f, "{}", f.db.union_data(u).name)?,
};
let variant_data = variant.variant_data(f.db.upcast());
@ -335,7 +340,11 @@ impl HirDisplay for Pat {
.map(|p| {
printed += 1;
WriteWith(move |f| {
write!(f, "{}: ", rec_fields[p.field].name)?;
write!(
f,
"{}: ",
rec_fields[p.field].name.display(f.db.upcast())
)?;
p.pattern.hir_fmt(f)
})
});
@ -379,7 +388,7 @@ impl HirDisplay for Pat {
}
PatKind::Deref { subpattern } => {
match self.ty.kind(Interner) {
TyKind::Adt(adt, _) if is_box(adt.0, f.db) => write!(f, "box ")?,
TyKind::Adt(adt, _) if is_box(f.db, adt.0) => write!(f, "box ")?,
&TyKind::Ref(mutbl, ..) => {
write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })?
}

View file

@ -82,7 +82,7 @@ fn expand_or_pat(pat: &Pat) -> Vec<&Pat> {
pats
}
/// [Constructor] uses this in umimplemented variants.
/// [Constructor] uses this in unimplemented variants.
/// It allows porting match expressions from upstream algorithm without losing semantics.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(super) enum Void {}
@ -384,7 +384,7 @@ impl Constructor {
TyKind::Tuple(arity, ..) => arity,
TyKind::Ref(..) => 1,
TyKind::Adt(adt, ..) => {
if is_box(adt.0, pcx.cx.db) {
if is_box(pcx.cx.db, adt.0) {
// The only legal patterns of type `Box` (outside `std`) are `_` and box
// patterns. If we're here we can assume this is a box pattern.
1
@ -772,7 +772,7 @@ impl<'p> Fields<'p> {
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| {
let ty = field_ty[fid].clone().substitute(Interner, substs);
let ty = normalize(cx.db, cx.body, ty);
let ty = normalize(cx.db, cx.db.trait_environment_for_body(cx.body), ty);
let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
|| visibility[fid].is_visible_from(cx.db.upcast(), cx.module);
let is_uninhabited = cx.is_uninhabited(&ty);
@ -800,7 +800,7 @@ impl<'p> Fields<'p> {
}
TyKind::Ref(.., rty) => Fields::wildcards_from_tys(cx, once(rty.clone())),
&TyKind::Adt(AdtId(adt), ref substs) => {
if is_box(adt, cx.db) {
if is_box(cx.db, adt) {
// The only legal patterns of type `Box` (outside `std`) are `_` and box
// patterns. If we're here we can assume this is a box pattern.
let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
@ -905,7 +905,7 @@ impl<'p> DeconstructedPat<'p> {
}
fields = Fields::from_iter(cx, wilds)
}
TyKind::Adt(adt, substs) if is_box(adt.0, cx.db) => {
TyKind::Adt(adt, substs) if is_box(cx.db, adt.0) => {
// The only legal patterns of type `Box` (outside `std`) are `_` and box
// patterns. If we're here we can assume this is a box pattern.
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
@ -992,7 +992,7 @@ impl<'p> DeconstructedPat<'p> {
})
.collect(),
},
TyKind::Adt(adt, _) if is_box(adt.0, cx.db) => {
TyKind::Adt(adt, _) if is_box(cx.db, adt.0) => {
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
// of `std`). So this branch is only reachable when the feature is enabled and
// the pattern is a box pattern.

View file

@ -1,4 +1,4 @@
//! Pattern untilities.
//! Pattern utilities.
//!
//! Originates from `rustc_hir::pat_util`

View file

@ -755,7 +755,7 @@ pub(crate) enum Reachability {
/// The arm is reachable. This additionally carries a set of or-pattern branches that have been
/// found to be unreachable despite the overall arm being reachable. Used only in the presence
/// of or-patterns, otherwise it stays empty.
// FIXME: store ureachable subpattern IDs
// FIXME: store unreachable subpattern IDs
Reachable,
/// The arm is unreachable.
Unreachable,

View file

@ -3,7 +3,7 @@
use hir_def::{
body::Body,
expr::{Expr, ExprId, UnaryOp},
hir::{Expr, ExprId, UnaryOp},
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
DefWithBodyId,
};
@ -73,7 +73,7 @@ fn walk_unsafe(
}
Expr::Path(path) => {
let resolver = resolver_for_expr(db.upcast(), def, current);
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path());
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path);
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
if db.static_data(id).mutable {
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });

View file

@ -7,32 +7,36 @@ use std::fmt::{self, Debug};
use base_db::CrateId;
use chalk_ir::{BoundVar, TyKind};
use hir_def::{
adt::VariantData,
body,
data::adt::VariantData,
db::DefDatabase,
find_path,
generics::{TypeOrConstParamData, TypeParamProvenance},
item_scope::ItemInNs,
lang_item::{LangItem, LangItemTarget},
nameres::DefMap,
path::{Path, PathKind},
type_ref::{TraitBoundModifier, TypeBound, TypeRef},
visibility::Visibility,
HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId,
EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
TraitId,
};
use hir_expand::{hygiene::Hygiene, name::Name};
use intern::{Internable, Interned};
use itertools::Itertools;
use la_arena::ArenaMap;
use smallvec::SmallVec;
use stdx::never;
use crate::{
consteval::try_const_usize,
db::HirDatabase,
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
layout::layout_of_ty,
layout::Layout,
lt_from_placeholder_idx,
mapping::from_chalk,
mir::pad16,
primitive, to_assoc_type_id,
utils::{self, generics},
utils::{self, detect_variant_from_bytes, generics, ClosureSubst},
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
@ -64,6 +68,7 @@ pub struct HirFormatter<'a> {
curr_size: usize,
pub(crate) max_size: Option<usize>,
omit_verbose_types: bool,
closure_style: ClosureStyle,
display_target: DisplayTarget,
}
@ -87,6 +92,7 @@ pub trait HirDisplay {
max_size: Option<usize>,
omit_verbose_types: bool,
display_target: DisplayTarget,
closure_style: ClosureStyle,
) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
@ -95,7 +101,14 @@ pub trait HirDisplay {
!matches!(display_target, DisplayTarget::SourceCode { .. }),
"HirDisplayWrapper cannot fail with DisplaySourceCodeError, use HirDisplay::hir_fmt directly instead"
);
HirDisplayWrapper { db, t: self, max_size, omit_verbose_types, display_target }
HirDisplayWrapper {
db,
t: self,
max_size,
omit_verbose_types,
display_target,
closure_style,
}
}
/// Returns a `Display`able type that is human-readable.
@ -109,6 +122,7 @@ pub trait HirDisplay {
t: self,
max_size: None,
omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
}
}
@ -128,6 +142,7 @@ pub trait HirDisplay {
t: self,
max_size,
omit_verbose_types: true,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
}
}
@ -138,6 +153,7 @@ pub trait HirDisplay {
&'a self,
db: &'a dyn HirDatabase,
module_id: ModuleId,
allow_opaque: bool,
) -> Result<String, DisplaySourceCodeError> {
let mut result = String::new();
match self.hir_fmt(&mut HirFormatter {
@ -147,7 +163,8 @@ pub trait HirDisplay {
curr_size: 0,
max_size: None,
omit_verbose_types: false,
display_target: DisplayTarget::SourceCode { module_id },
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::SourceCode { module_id, allow_opaque },
}) {
Ok(()) => {}
Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
@ -166,6 +183,7 @@ pub trait HirDisplay {
t: self,
max_size: None,
omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Test,
}
}
@ -235,26 +253,34 @@ pub enum DisplayTarget {
Diagnostics,
/// Display types for inserting them in source files.
/// The generated code should compile, so paths need to be qualified.
SourceCode { module_id: ModuleId },
SourceCode { module_id: ModuleId, allow_opaque: bool },
/// Only for test purpose to keep real types
Test,
}
impl DisplayTarget {
fn is_source_code(&self) -> bool {
fn is_source_code(self) -> bool {
matches!(self, Self::SourceCode { .. })
}
fn is_test(&self) -> bool {
fn is_test(self) -> bool {
matches!(self, Self::Test)
}
fn allows_opaque(self) -> bool {
match self {
Self::SourceCode { allow_opaque, .. } => allow_opaque,
_ => true,
}
}
}
#[derive(Debug)]
pub enum DisplaySourceCodeError {
PathNotFound,
UnknownType,
Closure,
Generator,
OpaqueType,
}
pub enum HirDisplayError {
@ -274,9 +300,25 @@ pub struct HirDisplayWrapper<'a, T> {
t: &'a T,
max_size: Option<usize>,
omit_verbose_types: bool,
closure_style: ClosureStyle,
display_target: DisplayTarget,
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum ClosureStyle {
/// `impl FnX(i32, i32) -> i32`, where `FnX` is the most special trait between `Fn`, `FnMut`, `FnOnce` that the
/// closure implements. This is the default.
ImplFn,
/// `|i32, i32| -> i32`
RANotation,
/// `{closure#14825}`, useful for some diagnostics (like type mismatch) and internal usage.
ClosureWithId,
/// `{closure#14825}<i32, ()>`, useful for internal usage.
ClosureWithSubst,
/// `…`, which is the `TYPE_HINT_TRUNCATION`
Hide,
}
impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
pub fn write_to<F: HirWrite>(&self, f: &mut F) -> Result<(), HirDisplayError> {
self.t.hir_fmt(&mut HirFormatter {
@ -287,8 +329,14 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
max_size: self.max_size,
omit_verbose_types: self.omit_verbose_types,
display_target: self.display_target,
closure_style: self.closure_style,
})
}
pub fn with_closure_style(mut self, c: ClosureStyle) -> Self {
self.closure_style = c;
self
}
}
impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
@ -330,7 +378,13 @@ impl HirDisplay for ProjectionTy {
let trait_ref = self.trait_ref(f.db);
write!(f, "<")?;
fmt_trait_ref(f, &trait_ref, true)?;
write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
write!(
f,
">::{}",
f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id))
.name
.display(f.db.upcast())
)?;
let proj_params_count =
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count];
@ -373,10 +427,16 @@ impl HirDisplay for Const {
let id = from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params.type_or_consts[id.local_id];
write!(f, "{}", param_data.name().unwrap())
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?;
Ok(())
}
ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(b, m) => render_const_scalar(f, &b, m, &data.ty),
ConstScalar::UnevaluatedConst(c, parameters) => {
write!(f, "{}", c.name(f.db.upcast()))?;
hir_fmt_generics(f, parameters, c.generic_def(f.db.upcast()))?;
Ok(())
}
ConstScalar::Unknown => f.write_char('_'),
},
}
@ -411,8 +471,11 @@ fn render_const_scalar(
memory_map: &MemoryMap,
ty: &Ty,
) -> Result<(), HirDisplayError> {
// FIXME: We need to get krate from the final callers of the hir display
// infrastructure and have it here as a field on `f`.
let krate = *f.db.crate_graph().crates_in_topological_order().last().unwrap();
match ty.kind(Interner) {
chalk_ir::TyKind::Scalar(s) => match s {
TyKind::Scalar(s) => match s {
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
Scalar::Char => {
let x = u128::from_le_bytes(pad16(b, false)) as u32;
@ -440,22 +503,54 @@ fn render_const_scalar(
}
},
},
chalk_ir::TyKind::Ref(_, _, t) => match t.kind(Interner) {
chalk_ir::TyKind::Str => {
TyKind::Ref(_, _, t) => match t.kind(Interner) {
TyKind::Str => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let bytes = memory_map.0.get(&addr).map(|x| &**x).unwrap_or(&[]);
let s = std::str::from_utf8(bytes).unwrap_or("<utf8-error>");
let size = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
let Some(bytes) = memory_map.get(addr, size) else {
return f.write_str("<ref-data-not-available>");
};
let s = std::str::from_utf8(&bytes).unwrap_or("<utf8-error>");
write!(f, "{s:?}")
}
_ => f.write_str("<ref-not-supported>"),
TyKind::Slice(ty) => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let count = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
let Some(bytes) = memory_map.get(addr, size_one * count) else {
return f.write_str("<ref-data-not-available>");
};
f.write_str("&[")?;
let mut first = true;
for i in 0..count {
if first {
first = false;
} else {
f.write_str(", ")?;
}
let offset = size_one * i;
render_const_scalar(f, &bytes[offset..offset + size_one], memory_map, &ty)?;
}
f.write_str("]")
}
_ => {
let addr = usize::from_le_bytes(b.try_into().unwrap());
let Ok(layout) = f.db.layout_of_ty(t.clone(), krate) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
let Some(bytes) = memory_map.get(addr, size) else {
return f.write_str("<ref-data-not-available>");
};
f.write_str("&")?;
render_const_scalar(f, bytes, memory_map, t)
}
},
chalk_ir::TyKind::Tuple(_, subst) => {
// FIXME: Remove this line. If the target data layout is independent
// of the krate, the `db.target_data_layout` and its callers like `layout_of_ty` don't need
// to get krate. Otherwise, we need to get krate from the final callers of the hir display
// infrastructure and have it here as a field on `f`.
let krate = *f.db.crate_graph().crates_in_topological_order().last().unwrap();
let Ok(layout) = layout_of_ty(f.db, ty, krate) else {
TyKind::Tuple(_, subst) => {
let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
return f.write_str("<layout-error>");
};
f.write_str("(")?;
@ -468,7 +563,7 @@ fn render_const_scalar(
}
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
let offset = layout.fields.offset(id).bytes_usize();
let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
f.write_str("<layout-error>")?;
continue;
};
@ -477,62 +572,144 @@ fn render_const_scalar(
}
f.write_str(")")
}
chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
hir_def::AdtId::StructId(s) => {
let data = f.db.struct_data(s);
let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone()) else {
return f.write_str("<layout-error>");
};
match data.variant_data.as_ref() {
VariantData::Record(fields) | VariantData::Tuple(fields) => {
let field_types = f.db.field_types(s.into());
let krate = adt.0.module(f.db.upcast()).krate();
let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| {
let offset = layout
.fields
.offset(u32::from(id.into_raw()) as usize)
.bytes_usize();
let ty = field_types[id].clone().substitute(Interner, subst);
let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)
};
let mut it = fields.iter();
if matches!(data.variant_data.as_ref(), VariantData::Record(_)) {
write!(f, "{} {{", data.name)?;
if let Some((id, data)) = it.next() {
write!(f, " {}: ", data.name)?;
render_field(f, id)?;
}
for (id, data) in it {
write!(f, ", {}: ", data.name)?;
render_field(f, id)?;
}
write!(f, " }}")?;
} else {
let mut it = it.map(|x| x.0);
write!(f, "{}(", data.name)?;
if let Some(id) = it.next() {
render_field(f, id)?;
}
for id in it {
write!(f, ", ")?;
render_field(f, id)?;
}
write!(f, ")")?;
}
return Ok(());
}
VariantData::Unit => write!(f, "{}", data.name),
TyKind::Adt(adt, subst) => {
let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone(), krate) else {
return f.write_str("<layout-error>");
};
match adt.0 {
hir_def::AdtId::StructId(s) => {
let data = f.db.struct_data(s);
write!(f, "{}", data.name.display(f.db.upcast()))?;
let field_types = f.db.field_types(s.into());
render_variant_after_name(
&data.variant_data,
f,
&field_types,
adt.0.module(f.db.upcast()).krate(),
&layout,
subst,
b,
memory_map,
)
}
hir_def::AdtId::UnionId(u) => {
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))
}
hir_def::AdtId::EnumId(e) => {
let Some((var_id, var_layout)) =
detect_variant_from_bytes(&layout, f.db, krate, b, e) else {
return f.write_str("<failed-to-detect-variant>");
};
let data = &f.db.enum_data(e).variants[var_id];
write!(f, "{}", data.name.display(f.db.upcast()))?;
let field_types =
f.db.field_types(EnumVariantId { parent: e, local_id: var_id }.into());
render_variant_after_name(
&data.variant_data,
f,
&field_types,
adt.0.module(f.db.upcast()).krate(),
&var_layout,
subst,
b,
memory_map,
)
}
}
hir_def::AdtId::UnionId(u) => write!(f, "{}", f.db.union_data(u).name),
hir_def::AdtId::EnumId(_) => f.write_str("<enum-not-supported>"),
},
chalk_ir::TyKind::FnDef(..) => ty.hir_fmt(f),
_ => f.write_str("<not-supported>"),
}
TyKind::FnDef(..) => ty.hir_fmt(f),
TyKind::Function(_) | TyKind::Raw(_, _) => {
let x = u128::from_le_bytes(pad16(b, false));
write!(f, "{:#X} as ", x)?;
ty.hir_fmt(f)
}
TyKind::Array(ty, len) => {
let Some(len) = try_const_usize(f.db, len) else {
return f.write_str("<unknown-array-len>");
};
let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
f.write_str("[")?;
let mut first = true;
for i in 0..len as usize {
if first {
first = false;
} else {
f.write_str(", ")?;
}
let offset = size_one * i;
render_const_scalar(f, &b[offset..offset + size_one], memory_map, &ty)?;
}
f.write_str("]")
}
TyKind::Never => f.write_str("!"),
TyKind::Closure(_, _) => f.write_str("<closure>"),
TyKind::Generator(_, _) => f.write_str("<generator>"),
TyKind::GeneratorWitness(_, _) => f.write_str("<generator-witness>"),
// The below arms are unreachable, since const eval will bail out before here.
TyKind::Foreign(_) => f.write_str("<extern-type>"),
TyKind::Error
| TyKind::Placeholder(_)
| TyKind::Alias(_)
| TyKind::AssociatedType(_, _)
| TyKind::OpaqueType(_, _)
| TyKind::BoundVar(_)
| TyKind::InferenceVar(_, _) => f.write_str("<placeholder-or-unknown-type>"),
// The below arms are unreachable, since we handled them in ref case.
TyKind::Slice(_) | TyKind::Str | TyKind::Dyn(_) => f.write_str("<unsized-value>"),
}
}
fn render_variant_after_name(
data: &VariantData,
f: &mut HirFormatter<'_>,
field_types: &ArenaMap<LocalFieldId, Binders<Ty>>,
krate: CrateId,
layout: &Layout,
subst: &Substitution,
b: &[u8],
memory_map: &MemoryMap,
) -> Result<(), HirDisplayError> {
match data {
VariantData::Record(fields) | VariantData::Tuple(fields) => {
let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| {
let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize();
let ty = field_types[id].clone().substitute(Interner, subst);
let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)
};
let mut it = fields.iter();
if matches!(data, VariantData::Record(_)) {
write!(f, " {{")?;
if let Some((id, data)) = it.next() {
write!(f, " {}: ", data.name.display(f.db.upcast()))?;
render_field(f, id)?;
}
for (id, data) in it {
write!(f, ", {}: ", data.name.display(f.db.upcast()))?;
render_field(f, id)?;
}
write!(f, " }}")?;
} else {
let mut it = it.map(|x| x.0);
write!(f, "(")?;
if let Some(id) = it.next() {
render_field(f, id)?;
}
for id in it {
write!(f, ", ")?;
render_field(f, id)?;
}
write!(f, ")")?;
}
return Ok(());
}
VariantData::Unit => Ok(()),
}
}
@ -689,11 +866,17 @@ impl HirDisplay for Ty {
let sig = db.callable_item_signature(def).substitute(Interner, parameters);
f.start_location_link(def.into());
match def {
CallableDefId::FunctionId(ff) => write!(f, "fn {}", db.function_data(ff).name)?,
CallableDefId::StructId(s) => write!(f, "{}", db.struct_data(s).name)?,
CallableDefId::EnumVariantId(e) => {
write!(f, "{}", db.enum_data(e.parent).variants[e.local_id].name)?
CallableDefId::FunctionId(ff) => {
write!(f, "fn {}", db.function_data(ff).name.display(f.db.upcast()))?
}
CallableDefId::StructId(s) => {
write!(f, "{}", db.struct_data(s).name.display(f.db.upcast()))?
}
CallableDefId::EnumVariantId(e) => write!(
f,
"{}",
db.enum_data(e.parent).variants[e.local_id].name.display(f.db.upcast())
)?,
};
f.end_location_link();
if parameters.len(Interner) > 0 {
@ -733,16 +916,16 @@ impl HirDisplay for Ty {
hir_def::AdtId::UnionId(it) => db.union_data(it).name.clone(),
hir_def::AdtId::EnumId(it) => db.enum_data(it).name.clone(),
};
write!(f, "{name}")?;
write!(f, "{}", name.display(f.db.upcast()))?;
}
DisplayTarget::SourceCode { module_id } => {
DisplayTarget::SourceCode { module_id, allow_opaque: _ } => {
if let Some(path) = find_path::find_path(
db.upcast(),
ItemInNs::Types((*def_id).into()),
module_id,
false,
) {
write!(f, "{path}")?;
write!(f, "{}", path.display(f.db.upcast()))?;
} else {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::PathNotFound,
@ -752,82 +935,9 @@ impl HirDisplay for Ty {
}
f.end_location_link();
if parameters.len(Interner) > 0 {
let parameters_to_write = if f.display_target.is_source_code()
|| f.omit_verbose_types()
{
match self
.as_generic_def(db)
.map(|generic_def_id| db.generic_defaults(generic_def_id))
.filter(|defaults| !defaults.is_empty())
{
None => parameters.as_slice(Interner),
Some(default_parameters) => {
fn should_show(
parameter: &GenericArg,
default_parameters: &[Binders<GenericArg>],
i: usize,
parameters: &Substitution,
) -> bool {
if parameter.ty(Interner).map(|x| x.kind(Interner))
== Some(&TyKind::Error)
{
return true;
}
if let Some(ConstValue::Concrete(c)) = parameter
.constant(Interner)
.map(|x| &x.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
}
}
let default_parameter = match default_parameters.get(i) {
Some(x) => x,
None => return true,
};
let actual_default =
default_parameter.clone().substitute(Interner, &parameters);
parameter != &actual_default
}
let mut default_from = 0;
for (i, parameter) in parameters.iter(Interner).enumerate() {
if should_show(parameter, &default_parameters, i, parameters) {
default_from = i + 1;
}
}
&parameters.as_slice(Interner)[0..default_from]
}
}
} else {
parameters.as_slice(Interner)
};
if !parameters_to_write.is_empty() {
write!(f, "<")?;
let generic_def = self.as_generic_def(db);
if f.display_target.is_source_code() {
let mut first = true;
for generic_arg in parameters_to_write {
if !first {
write!(f, ", ")?;
}
first = false;
if generic_arg.ty(Interner).map(|ty| ty.kind(Interner))
== Some(&TyKind::Error)
{
write!(f, "_")?;
} else {
generic_arg.hir_fmt(f)?;
}
}
} else {
f.write_joined(parameters_to_write, ", ")?;
}
write!(f, ">")?;
}
}
hir_fmt_generics(f, parameters, generic_def)?;
}
TyKind::AssociatedType(assoc_type_id, parameters) => {
let type_alias = from_assoc_type_id(*assoc_type_id);
@ -841,12 +951,12 @@ impl HirDisplay for Ty {
// Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
if f.display_target.is_test() {
f.start_location_link(trait_.into());
write!(f, "{}", trait_data.name)?;
write!(f, "{}", trait_data.name.display(f.db.upcast()))?;
f.end_location_link();
write!(f, "::")?;
f.start_location_link(type_alias.into());
write!(f, "{}", type_alias_data.name)?;
write!(f, "{}", type_alias_data.name.display(f.db.upcast()))?;
f.end_location_link();
// Note that the generic args for the associated type come before those for the
// trait (including the self type).
@ -869,10 +979,15 @@ impl HirDisplay for Ty {
let alias = from_foreign_def_id(*type_alias);
let type_alias = db.type_alias_data(alias);
f.start_location_link(alias.into());
write!(f, "{}", type_alias.name)?;
write!(f, "{}", type_alias.name.display(f.db.upcast()))?;
f.end_location_link();
}
TyKind::OpaqueType(opaque_ty_id, parameters) => {
if !f.display_target.allows_opaque() {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::OpaqueType,
));
}
let impl_trait_id = db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
match impl_trait_id {
ImplTraitId::ReturnTypeImplTrait(func, idx) => {
@ -919,26 +1034,52 @@ impl HirDisplay for Ty {
}
}
}
TyKind::Closure(.., substs) => {
TyKind::Closure(id, substs) => {
if f.display_target.is_source_code() {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::Closure,
));
if !f.display_target.allows_opaque() {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::OpaqueType,
));
} else if f.closure_style != ClosureStyle::ImplFn {
never!("Only `impl Fn` is valid for displaying closures in source code");
}
}
let sig = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(db);
match f.closure_style {
ClosureStyle::Hide => return write!(f, "{TYPE_HINT_TRUNCATION}"),
ClosureStyle::ClosureWithId => {
return write!(f, "{{closure#{:?}}}", id.0.as_u32())
}
ClosureStyle::ClosureWithSubst => {
write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
return hir_fmt_generics(f, substs, None);
}
_ => (),
}
let sig = ClosureSubst(substs).sig_ty().callable_sig(db);
if let Some(sig) = sig {
let (def, _) = db.lookup_intern_closure((*id).into());
let infer = db.infer(def);
let (_, kind) = infer.closure_info(id);
match f.closure_style {
ClosureStyle::ImplFn => write!(f, "impl {kind:?}(")?,
ClosureStyle::RANotation => write!(f, "|")?,
_ => unreachable!(),
}
if sig.params().is_empty() {
write!(f, "||")?;
} else if f.should_truncate() {
write!(f, "|{TYPE_HINT_TRUNCATION}|")?;
write!(f, "{TYPE_HINT_TRUNCATION}")?;
} else {
write!(f, "|")?;
f.write_joined(sig.params(), ", ")?;
write!(f, "|")?;
};
write!(f, " -> ")?;
sig.ret().hir_fmt(f)?;
match f.closure_style {
ClosureStyle::ImplFn => write!(f, ")")?,
ClosureStyle::RANotation => write!(f, "|")?,
_ => unreachable!(),
}
if f.closure_style == ClosureStyle::RANotation || !sig.ret().is_unit() {
write!(f, " -> ")?;
sig.ret().hir_fmt(f)?;
}
} else {
write!(f, "{{closure}}")?;
}
@ -950,7 +1091,11 @@ impl HirDisplay for Ty {
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
write!(f, "{}", p.name.clone().unwrap_or_else(Name::missing))?
write!(
f,
"{}",
p.name.clone().unwrap_or_else(Name::missing).display(f.db.upcast())
)?
}
TypeParamProvenance::ArgumentImplTrait => {
let substs = generics.placeholder_subst(db);
@ -979,7 +1124,7 @@ impl HirDisplay for Ty {
}
},
TypeOrConstParamData::ConstParamData(p) => {
write!(f, "{}", p.name)?;
write!(f, "{}", p.name.display(f.db.upcast()))?;
}
}
}
@ -1004,6 +1149,11 @@ impl HirDisplay for Ty {
}
TyKind::Alias(AliasTy::Projection(p_ty)) => p_ty.hir_fmt(f)?,
TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
if !f.display_target.allows_opaque() {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::OpaqueType,
));
}
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into());
match impl_trait_id {
ImplTraitId::ReturnTypeImplTrait(func, idx) => {
@ -1067,6 +1217,88 @@ impl HirDisplay for Ty {
}
}
fn hir_fmt_generics(
f: &mut HirFormatter<'_>,
parameters: &Substitution,
generic_def: Option<hir_def::GenericDefId>,
) -> Result<(), HirDisplayError> {
let db = f.db;
let lifetime_args_count = generic_def.map_or(0, |g| db.generic_params(g).lifetimes.len());
if parameters.len(Interner) + lifetime_args_count > 0 {
let parameters_to_write = if f.display_target.is_source_code() || f.omit_verbose_types() {
match generic_def
.map(|generic_def_id| db.generic_defaults(generic_def_id))
.filter(|defaults| !defaults.is_empty())
{
None => parameters.as_slice(Interner),
Some(default_parameters) => {
fn should_show(
parameter: &GenericArg,
default_parameters: &[Binders<GenericArg>],
i: usize,
parameters: &Substitution,
) -> bool {
if parameter.ty(Interner).map(|x| x.kind(Interner)) == Some(&TyKind::Error)
{
return true;
}
if let Some(ConstValue::Concrete(c)) =
parameter.constant(Interner).map(|x| &x.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
}
}
let default_parameter = match default_parameters.get(i) {
Some(x) => x,
None => return true,
};
let actual_default =
default_parameter.clone().substitute(Interner, &parameters);
parameter != &actual_default
}
let mut default_from = 0;
for (i, parameter) in parameters.iter(Interner).enumerate() {
if should_show(parameter, &default_parameters, i, parameters) {
default_from = i + 1;
}
}
&parameters.as_slice(Interner)[0..default_from]
}
}
} else {
parameters.as_slice(Interner)
};
if !parameters_to_write.is_empty() || lifetime_args_count != 0 {
write!(f, "<")?;
let mut first = true;
for _ in 0..lifetime_args_count {
if !first {
write!(f, ", ")?;
}
first = false;
write!(f, "'_")?;
}
for generic_arg in parameters_to_write {
if !first {
write!(f, ", ")?;
}
first = false;
if f.display_target.is_source_code()
&& generic_arg.ty(Interner).map(|ty| ty.kind(Interner)) == Some(&TyKind::Error)
{
write!(f, "_")?;
} else {
generic_arg.hir_fmt(f)?;
}
}
write!(f, ">")?;
}
}
Ok(())
}
impl HirDisplay for CallableSig {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "fn(")?;
@ -1170,7 +1402,7 @@ fn write_bounds_like_dyn_trait(
// existential) here, which is the only thing that's
// possible in actual Rust, and hence don't print it
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name)?;
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
if let [_, params @ ..] = &*trait_ref.substitution.as_slice(Interner) {
if is_fn_trait {
@ -1209,7 +1441,7 @@ fn write_bounds_like_dyn_trait(
let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
let type_alias = f.db.type_alias_data(assoc_ty_id);
f.start_location_link(assoc_ty_id.into());
write!(f, "{}", type_alias.name)?;
write!(f, "{}", type_alias.name.display(f.db.upcast()))?;
f.end_location_link();
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
@ -1276,7 +1508,7 @@ fn fmt_trait_ref(
}
let trait_ = tr.hir_trait_id();
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name)?;
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
if tr.substitution.len(Interner) > 1 {
write!(f, "<")?;
@ -1306,7 +1538,7 @@ impl HirDisplay for WhereClause {
write!(f, ">::",)?;
let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
f.start_location_link(type_alias.into());
write!(f, "{}", f.db.type_alias_data(type_alias).name,)?;
write!(f, "{}", f.db.type_alias_data(type_alias).name.display(f.db.upcast()),)?;
f.end_location_link();
write!(f, " = ")?;
ty.hir_fmt(f)?;
@ -1344,7 +1576,8 @@ impl HirDisplay for LifetimeData {
let id = lt_from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params.lifetimes[id.local_id];
write!(f, "{}", param_data.name)
write!(f, "{}", param_data.name.display(f.db.upcast()))?;
Ok(())
}
LifetimeData::Static => write!(f, "'static"),
LifetimeData::Erased => Ok(()),
@ -1376,7 +1609,7 @@ pub fn write_visibility(
Visibility::Public => write!(f, "pub "),
Visibility::Module(vis_id) => {
let def_map = module_id.def_map(f.db.upcast());
let root_module_id = def_map.module_id(def_map.root());
let root_module_id = def_map.module_id(DefMap::ROOT);
if vis_id == module_id {
// pub(self) or omitted
Ok(())
@ -1420,7 +1653,7 @@ impl HirDisplay for TypeRef {
};
write!(f, "&")?;
if let Some(lifetime) = lifetime {
write!(f, "{} ", lifetime.name)?;
write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
}
write!(f, "{mutability}")?;
inner.hir_fmt(f)?;
@ -1428,7 +1661,7 @@ impl HirDisplay for TypeRef {
TypeRef::Array(inner, len) => {
write!(f, "[")?;
inner.hir_fmt(f)?;
write!(f, "; {len}]")?;
write!(f, "; {}]", len.display(f.db.upcast()))?;
}
TypeRef::Slice(inner) => {
write!(f, "[")?;
@ -1445,7 +1678,7 @@ impl HirDisplay for TypeRef {
for index in 0..function_parameters.len() {
let (param_name, param_type) = &function_parameters[index];
if let Some(name) = param_name {
write!(f, "{name}: ")?;
write!(f, "{}: ", name.display(f.db.upcast()))?;
}
param_type.hir_fmt(f)?;
@ -1477,7 +1710,10 @@ impl HirDisplay for TypeRef {
}
TypeRef::Macro(macro_call) => {
let macro_call = macro_call.to_node(f.db.upcast());
let ctx = body::LowerCtx::with_hygiene(f.db.upcast(), &Hygiene::new_unhygienic());
let ctx = hir_def::lower::LowerCtx::with_hygiene(
f.db.upcast(),
&Hygiene::new_unhygienic(),
);
match macro_call.path() {
Some(path) => match Path::from_src(path, &ctx) {
Some(path) => path.hir_fmt(f)?,
@ -1503,9 +1739,13 @@ impl HirDisplay for TypeBound {
}
path.hir_fmt(f)
}
TypeBound::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
TypeBound::Lifetime(lifetime) => write!(f, "{}", lifetime.name.display(f.db.upcast())),
TypeBound::ForLifetime(lifetimes, path) => {
write!(f, "for<{}> ", lifetimes.iter().format(", "))?;
write!(
f,
"for<{}> ",
lifetimes.iter().map(|it| it.display(f.db.upcast())).format(", ")
)?;
path.hir_fmt(f)
}
TypeBound::Error => write!(f, "{{error}}"),
@ -1551,7 +1791,7 @@ impl HirDisplay for Path {
if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
write!(f, "::")?;
}
write!(f, "{}", segment.name)?;
write!(f, "{}", segment.name.display(f.db.upcast()))?;
if let Some(generic_args) = segment.args_and_bindings {
// We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
// Do we actually format expressions?
@ -1598,7 +1838,7 @@ impl HirDisplay for Path {
} else {
write!(f, ", ")?;
}
write!(f, "{}", binding.name)?;
write!(f, "{}", binding.name.display(f.db.upcast()))?;
match &binding.type_ref {
Some(ty) => {
write!(f, " = ")?;
@ -1621,8 +1861,10 @@ impl HirDisplay for hir_def::path::GenericArg {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self {
hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f),
hir_def::path::GenericArg::Const(c) => write!(f, "{c}"),
hir_def::path::GenericArg::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
hir_def::path::GenericArg::Const(c) => write!(f, "{}", c.display(f.db.upcast())),
hir_def::path::GenericArg::Lifetime(lifetime) => {
write!(f, "{}", lifetime.name.display(f.db.upcast()))
}
}
}
}

View file

@ -13,34 +13,38 @@
//! to certain types. To record this, we use the union-find implementation from
//! the `ena` crate, which is extracted from rustc.
use std::ops::Index;
use std::sync::Arc;
use std::{convert::identity, ops::Index};
use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
use chalk_ir::{
cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety,
Scalar, TyKind, TypeFlags,
};
use either::Either;
use hir_def::{
body::Body,
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
data::{ConstData, StaticData},
expr::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId},
hir::LabelId,
hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId},
lang_item::{LangItem, LangItemTarget},
layout::Integer,
path::Path,
path::{ModPath, Path},
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
type_ref::TypeRef,
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule,
ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId,
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, ItemContainerId, Lookup,
TraitId, TypeAliasId, VariantId,
};
use hir_expand::name::{name, Name};
use la_arena::ArenaMap;
use la_arena::{ArenaMap, Entry};
use rustc_hash::{FxHashMap, FxHashSet};
use stdx::always;
use stdx::{always, never};
use triomphe::Arc;
use crate::{
db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany,
lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Const, DomainGoal,
db::HirDatabase, fold_tys, infer::coerce::CoerceMany, lower::ImplTraitLoweringMode,
static_lifetime, to_assoc_type_id, traits::FnTrait, AliasEq, AliasTy, ClosureId, DomainGoal,
GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, RpitId, Substitution,
TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
};
// This lint has a false positive here. See the link below for details.
@ -51,12 +55,15 @@ pub use coerce::could_coerce;
#[allow(unreachable_pub)]
pub use unify::could_unify;
pub(crate) use self::closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
pub(crate) mod unify;
mod path;
mod expr;
mod pat;
mod coerce;
mod closure;
pub(crate) mod closure;
mod mutability;
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
@ -99,6 +106,10 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
ctx.infer_body();
ctx.infer_mut_body();
ctx.infer_closures();
Arc::new(ctx.resolve_all())
}
@ -106,14 +117,15 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
///
/// This is appropriate to use only after type-check: it assumes
/// that normalization will succeed, for example.
pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> Ty {
if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) {
pub(crate) fn normalize(db: &dyn HirDatabase, trait_env: Arc<TraitEnvironment>, ty: Ty) -> Ty {
// FIXME: TypeFlags::HAS_CT_PROJECTION is not implemented in chalk, so TypeFlags::HAS_PROJECTION only
// works for the type case, so we check array unconditionally. Remove the array part
// when the bug in chalk becomes fixed.
if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION)
&& !matches!(ty.kind(Interner), TyKind::Array(..))
{
return ty;
}
let krate = owner.module(db.upcast()).krate();
let trait_env = owner
.as_generic_def_id()
.map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
let mut table = unify::InferenceTable::new(db, trait_env);
let ty_with_vars = table.normalize_associated_types_in(ty);
@ -188,7 +200,7 @@ pub enum InferenceDiagnostic {
/// Contains the type the field resolves to
field_with_same_name: Option<Ty>,
},
// FIXME: Make this proper
// FIXME: This should be emitted in body lowering
BreakOutsideOfLoop {
expr: ExprId,
is_break: bool,
@ -203,6 +215,10 @@ pub enum InferenceDiagnostic {
call_expr: ExprId,
found: Ty,
},
TypedHole {
expr: ExprId,
expected: Ty,
},
}
/// A mismatch between an expected and an inferred type.
@ -276,6 +292,13 @@ pub struct Adjustment {
pub target: Ty,
}
impl Adjustment {
pub fn borrow(m: Mutability, ty: Ty) -> Self {
let ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty }
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Adjust {
/// Go from ! to any type.
@ -304,6 +327,13 @@ pub enum AutoBorrow {
RawPtr(Mutability),
}
impl AutoBorrow {
fn mutability(self) -> Mutability {
let (AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) = self;
m
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum PointerCast {
/// Go from a fn-item type to a fn-pointer type.
@ -337,6 +367,10 @@ pub enum PointerCast {
}
/// The result of type inference: A mapping from expressions and patterns to types.
///
/// When you add a field that stores types (including `Substitution` and the like), don't forget
/// `resolve_completely()`'ing them in `InferenceContext::resolve_all()`. Inference variables must
/// not appear in the final inference result.
#[derive(Clone, PartialEq, Eq, Debug, Default)]
pub struct InferenceResult {
/// For each method call expr, records the function it resolves to.
@ -363,8 +397,11 @@ pub struct InferenceResult {
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
pub pat_binding_modes: FxHashMap<PatId, BindingMode>,
pub binding_modes: ArenaMap<BindingId, BindingMode>,
pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
}
impl InferenceResult {
@ -401,6 +438,9 @@ impl InferenceResult {
_ => None,
})
}
pub fn closure_info(&self, closure: &ClosureId) -> &(Vec<CapturedItem>, FnTrait) {
self.closure_info.get(closure).unwrap()
}
}
impl Index<ExprId> for InferenceResult {
@ -435,7 +475,6 @@ pub(crate) struct InferenceContext<'a> {
pub(crate) body: &'a Body,
pub(crate) resolver: Resolver,
table: unify::InferenceTable<'a>,
trait_env: Arc<TraitEnvironment>,
/// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult,
@ -453,6 +492,14 @@ pub(crate) struct InferenceContext<'a> {
resume_yield_tys: Option<(Ty, Ty)>,
diverges: Diverges,
breakables: Vec<BreakableContext>,
// fields related to closure capture
current_captures: Vec<CapturedItemWithoutTy>,
current_closure: Option<ClosureId>,
/// Stores the list of closure ids that need to be analyzed before this closure. See the
/// comment on `InferenceContext::sort_closures`
closure_dependencies: FxHashMap<ClosureId, Vec<ClosureId>>,
deferred_closures: FxHashMap<ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>>,
}
#[derive(Clone, Debug)]
@ -462,7 +509,7 @@ struct BreakableContext {
/// The coercion target of the context.
coerce: Option<CoerceMany>,
/// The optional label of the context.
label: Option<name::Name>,
label: Option<LabelId>,
kind: BreakableKind,
}
@ -477,21 +524,21 @@ enum BreakableKind {
fn find_breakable<'c>(
ctxs: &'c mut [BreakableContext],
label: Option<&name::Name>,
label: Option<LabelId>,
) -> Option<&'c mut BreakableContext> {
let mut ctxs = ctxs
.iter_mut()
.rev()
.take_while(|it| matches!(it.kind, BreakableKind::Block | BreakableKind::Loop));
match label {
Some(_) => ctxs.find(|ctx| ctx.label.as_ref() == label),
Some(_) => ctxs.find(|ctx| ctx.label == label),
None => ctxs.find(|ctx| matches!(ctx.kind, BreakableKind::Loop)),
}
}
fn find_continuable<'c>(
ctxs: &'c mut [BreakableContext],
label: Option<&name::Name>,
label: Option<LabelId>,
) -> Option<&'c mut BreakableContext> {
match label {
Some(_) => find_breakable(ctxs, label).filter(|it| matches!(it.kind, BreakableKind::Loop)),
@ -506,14 +553,10 @@ impl<'a> InferenceContext<'a> {
body: &'a Body,
resolver: Resolver,
) -> Self {
let krate = owner.module(db.upcast()).krate();
let trait_env = owner
.as_generic_def_id()
.map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
let trait_env = db.trait_environment_for_body(owner);
InferenceContext {
result: InferenceResult::default(),
table: unify::InferenceTable::new(db, trait_env.clone()),
trait_env,
table: unify::InferenceTable::new(db, trait_env),
return_ty: TyKind::Error.intern(Interner), // set in collect_* calls
resume_yield_tys: None,
return_coercion: None,
@ -524,6 +567,10 @@ impl<'a> InferenceContext<'a> {
resolver,
diverges: Diverges::Maybe,
breakables: Vec::new(),
current_captures: vec![],
current_closure: None,
deferred_closures: FxHashMap::default(),
closure_dependencies: FxHashMap::default(),
}
}
@ -533,6 +580,30 @@ impl<'a> InferenceContext<'a> {
// there is no problem in it being `pub(crate)`, remove this comment.
pub(crate) fn resolve_all(self) -> InferenceResult {
let InferenceContext { mut table, mut result, .. } = self;
// Destructure every single field so whenever new fields are added to `InferenceResult` we
// don't forget to handle them here.
let InferenceResult {
method_resolutions,
field_resolutions: _,
variant_resolutions: _,
assoc_resolutions,
diagnostics,
type_of_expr,
type_of_pat,
type_of_binding,
type_of_rpit,
type_of_for_iterator,
type_mismatches,
standard_types: _,
pat_adjustments,
binding_modes: _,
expr_adjustments,
// Types in `closure_info` have already been `resolve_completely()`'d during
// `InferenceContext::infer_closures()` (in `HirPlace::ty()` specifically), so no need
// to resolve them here.
closure_info: _,
mutated_bindings_in_closure: _,
} = &mut result;
table.fallback_if_possible();
@ -541,62 +612,63 @@ impl<'a> InferenceContext<'a> {
// make sure diverging type variables are marked as such
table.propagate_diverging_flag();
for ty in result.type_of_expr.values_mut() {
for ty in type_of_expr.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
for ty in result.type_of_pat.values_mut() {
for ty in type_of_pat.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
for ty in result.type_of_binding.values_mut() {
for ty in type_of_binding.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
for ty in result.type_of_rpit.values_mut() {
for ty in type_of_rpit.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
for ty in result.type_of_for_iterator.values_mut() {
for ty in type_of_for_iterator.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
for mismatch in result.type_mismatches.values_mut() {
for mismatch in type_mismatches.values_mut() {
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
mismatch.actual = table.resolve_completely(mismatch.actual.clone());
}
result.diagnostics.retain_mut(|diagnostic| {
if let InferenceDiagnostic::ExpectedFunction { found: ty, .. }
| InferenceDiagnostic::UnresolvedField { receiver: ty, .. }
| InferenceDiagnostic::UnresolvedMethodCall { receiver: ty, .. } = diagnostic
{
*ty = table.resolve_completely(ty.clone());
// FIXME: Remove this when we are on par with rustc in terms of inference
if ty.contains_unknown() {
return false;
}
diagnostics.retain_mut(|diagnostic| {
use InferenceDiagnostic::*;
match diagnostic {
ExpectedFunction { found: ty, .. }
| UnresolvedField { receiver: ty, .. }
| UnresolvedMethodCall { receiver: ty, .. } => {
*ty = table.resolve_completely(ty.clone());
// FIXME: Remove this when we are on par with rustc in terms of inference
if ty.contains_unknown() {
return false;
}
if let InferenceDiagnostic::UnresolvedMethodCall { field_with_same_name, .. } =
diagnostic
{
let clear = if let Some(ty) = field_with_same_name {
*ty = table.resolve_completely(ty.clone());
ty.contains_unknown()
} else {
false
};
if clear {
*field_with_same_name = None;
if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic {
if let Some(ty) = field_with_same_name {
*ty = table.resolve_completely(ty.clone());
if ty.contains_unknown() {
*field_with_same_name = None;
}
}
}
}
TypedHole { expected: ty, .. } => {
*ty = table.resolve_completely(ty.clone());
}
_ => (),
}
true
});
for (_, subst) in result.method_resolutions.values_mut() {
for (_, subst) in method_resolutions.values_mut() {
*subst = table.resolve_completely(subst.clone());
}
for (_, subst) in result.assoc_resolutions.values_mut() {
for (_, subst) in assoc_resolutions.values_mut() {
*subst = table.resolve_completely(subst.clone());
}
for adjustment in result.expr_adjustments.values_mut().flatten() {
for adjustment in expr_adjustments.values_mut().flatten() {
adjustment.target = table.resolve_completely(adjustment.target.clone());
}
for adjustment in result.pat_adjustments.values_mut().flatten() {
for adjustment in pat_adjustments.values_mut().flatten() {
*adjustment = table.resolve_completely(adjustment.clone());
}
result
@ -615,7 +687,7 @@ impl<'a> InferenceContext<'a> {
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
.with_impl_trait_mode(ImplTraitLoweringMode::Param);
let mut param_tys =
data.params.iter().map(|(_, type_ref)| ctx.lower_ty(type_ref)).collect::<Vec<_>>();
data.params.iter().map(|type_ref| ctx.lower_ty(type_ref)).collect::<Vec<_>>();
// Check if function contains a va_list, if it does then we append it to the parameter types
// that are collected from the function data
if data.is_varargs() {
@ -634,12 +706,7 @@ impl<'a> InferenceContext<'a> {
self.infer_top_pat(*pat, &ty);
}
let error_ty = &TypeRef::Error;
let return_ty = if data.has_async_kw() {
data.async_ret_type.as_deref().unwrap_or(error_ty)
} else {
&*data.ret_type
};
let return_ty = &*data.ret_type;
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
@ -649,36 +716,16 @@ impl<'a> InferenceContext<'a> {
let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) {
// RPIT opaque types use substitution of their parent function.
let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
fold_tys(
return_ty,
|ty, _| {
let opaque_ty_id = match ty.kind(Interner) {
TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id,
_ => return ty,
};
let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
ImplTraitId::ReturnTypeImplTrait(_, idx) => idx,
_ => unreachable!(),
};
let bounds = (*rpits).map_ref(|rpits| {
rpits.impl_traits[idx].bounds.map_ref(|it| it.into_iter())
});
let var = self.table.new_type_var();
let var_subst = Substitution::from1(Interner, var.clone());
for bound in bounds {
let predicate =
bound.map(|it| it.cloned()).substitute(Interner, &fn_placeholders);
let (var_predicate, binders) = predicate
.substitute(Interner, &var_subst)
.into_value_and_skipped_binders();
always!(binders.is_empty(Interner)); // quantified where clauses not yet handled
self.push_obligation(var_predicate.cast(Interner));
}
self.result.type_of_rpit.insert(idx, var.clone());
var
},
DebruijnIndex::INNERMOST,
)
let result =
self.insert_inference_vars_for_rpit(return_ty, rpits.clone(), fn_placeholders);
let rpits = rpits.skip_binders();
for (id, _) in rpits.impl_traits.iter() {
if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) {
never!("Missed RPIT in `insert_inference_vars_for_rpit`");
e.insert(TyKind::Error.intern(Interner));
}
}
result
} else {
return_ty
};
@ -687,6 +734,50 @@ impl<'a> InferenceContext<'a> {
self.return_coercion = Some(CoerceMany::new(self.return_ty.clone()));
}
fn insert_inference_vars_for_rpit<T>(
&mut self,
t: T,
rpits: Arc<chalk_ir::Binders<crate::ReturnTypeImplTraits>>,
fn_placeholders: Substitution,
) -> T
where
T: crate::HasInterner<Interner = Interner> + crate::TypeFoldable<Interner>,
{
fold_tys(
t,
|ty, _| {
let opaque_ty_id = match ty.kind(Interner) {
TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id,
_ => return ty,
};
let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
ImplTraitId::ReturnTypeImplTrait(_, idx) => idx,
_ => unreachable!(),
};
let bounds = (*rpits)
.map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.into_iter()));
let var = self.table.new_type_var();
let var_subst = Substitution::from1(Interner, var.clone());
for bound in bounds {
let predicate =
bound.map(|it| it.cloned()).substitute(Interner, &fn_placeholders);
let (var_predicate, binders) =
predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders();
always!(binders.is_empty(Interner)); // quantified where clauses not yet handled
let var_predicate = self.insert_inference_vars_for_rpit(
var_predicate,
rpits.clone(),
fn_placeholders.clone(),
);
self.push_obligation(var_predicate.cast(Interner));
}
self.result.type_of_rpit.insert(idx, var.clone());
var
},
DebruijnIndex::INNERMOST,
)
}
fn infer_body(&mut self) {
match self.return_coercion {
Some(_) => self.infer_return(self.body.body_expr),
@ -742,43 +833,16 @@ impl<'a> InferenceContext<'a> {
self.result.standard_types.unknown.clone()
}
/// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
let data = c.data(Interner);
match &data.value {
ConstValue::Concrete(cc) => match cc.interned {
crate::ConstScalar::Unknown => self.table.new_const_var(data.ty.clone()),
_ => c,
},
_ => c,
}
}
/// Replaces `Ty::Error` by a new type var, so we can maybe still infer it.
fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
match ty.kind(Interner) {
TyKind::Error => self.table.new_type_var(),
TyKind::InferenceVar(..) => {
let ty_resolved = self.resolve_ty_shallow(&ty);
if ty_resolved.is_unknown() {
self.table.new_type_var()
} else {
ty
}
}
_ => ty,
}
self.table.insert_type_vars_shallow(ty)
}
fn insert_type_vars(&mut self, ty: Ty) -> Ty {
fold_tys_and_consts(
ty,
|x, _| match x {
Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
},
DebruijnIndex::INNERMOST,
)
fn insert_type_vars<T>(&mut self, ty: T) -> T
where
T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
{
self.table.insert_type_vars(ty)
}
fn push_obligation(&mut self, o: DomainGoal) {
@ -789,13 +853,75 @@ impl<'a> InferenceContext<'a> {
self.table.unify(ty1, ty2)
}
/// Attempts to returns the deeply last field of nested structures, but
/// does not apply any normalization in its search. Returns the same type
/// if input `ty` is not a structure at all.
fn struct_tail_without_normalization(&mut self, ty: Ty) -> Ty {
self.struct_tail_with_normalize(ty, identity)
}
/// Returns the deeply last field of nested structures, or the same type if
/// not a structure at all. Corresponds to the only possible unsized field,
/// and its type can be used to determine unsizing strategy.
///
/// This is parameterized over the normalization strategy (i.e. how to
/// handle `<T as Trait>::Assoc` and `impl Trait`); pass the identity
/// function to indicate no normalization should take place.
fn struct_tail_with_normalize(
&mut self,
mut ty: Ty,
mut normalize: impl FnMut(Ty) -> Ty,
) -> Ty {
// FIXME: fetch the limit properly
let recursion_limit = 10;
for iteration in 0.. {
if iteration > recursion_limit {
return self.err_ty();
}
match ty.kind(Interner) {
TyKind::Adt(chalk_ir::AdtId(hir_def::AdtId::StructId(struct_id)), substs) => {
match self.db.field_types((*struct_id).into()).values().next_back().cloned() {
Some(field) => {
ty = field.substitute(Interner, substs);
}
None => break,
}
}
TyKind::Adt(..) => break,
TyKind::Tuple(_, substs) => {
match substs
.as_slice(Interner)
.split_last()
.and_then(|(last_ty, _)| last_ty.ty(Interner))
{
Some(last_ty) => ty = last_ty.clone(),
None => break,
}
}
TyKind::Alias(..) => {
let normalized = normalize(ty.clone());
if ty == normalized {
return ty;
} else {
ty = normalized;
}
}
_ => break,
}
}
ty
}
/// Recurses through the given type, normalizing associated types mentioned
/// in it by replacing them by type variables and registering obligations to
/// resolve later. This should be done once for every type we get from some
/// type annotation (e.g. from a let type annotation, field type or function
/// call). `make_ty` handles this already, but e.g. for field types we need
/// to do it as well.
fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
fn normalize_associated_types_in<T>(&mut self, ty: T) -> T
where
T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
{
self.table.normalize_associated_types_in(ty)
}
@ -848,10 +974,8 @@ impl<'a> InferenceContext<'a> {
None => return (self.err_ty(), None),
};
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
// FIXME: this should resolve assoc items as well, see this example:
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
let (resolution, unresolved) = if value_ns {
match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path) {
Some(ResolveValueResult::ValueNs(value)) => match value {
ValueNs::EnumVariantId(var) => {
let substs = ctx.substs_from_path(path, var.into(), true);
@ -872,11 +996,15 @@ impl<'a> InferenceContext<'a> {
None => return (self.err_ty(), None),
}
} else {
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
Some(it) => it,
None => return (self.err_ty(), None),
}
};
let Some(mod_path) = path.mod_path() else {
never!("resolver should always resolve lang item paths");
return (self.err_ty(), None);
};
return match resolution {
TypeNs::AdtId(AdtId::StructId(strukt)) => {
let substs = ctx.substs_from_path(path, strukt.into(), true);
@ -899,8 +1027,68 @@ impl<'a> InferenceContext<'a> {
TypeNs::SelfType(impl_id) => {
let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
let substs = generics.placeholder_subst(self.db);
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
self.resolve_variant_on_alias(ty, unresolved, path)
let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
let Some(mut remaining_idx) = unresolved else {
return self.resolve_variant_on_alias(ty, None, mod_path);
};
let mut remaining_segments = path.segments().skip(remaining_idx);
// We need to try resolving unresolved segments one by one because each may resolve
// to a projection, which `TyLoweringContext` cannot handle on its own.
while !remaining_segments.is_empty() {
let resolved_segment = path.segments().get(remaining_idx - 1).unwrap();
let current_segment = remaining_segments.take(1);
// If we can resolve to an enum variant, it takes priority over associated type
// of the same name.
if let Some((AdtId::EnumId(id), _)) = ty.as_adt() {
let enum_data = self.db.enum_data(id);
let name = current_segment.first().unwrap().name;
if let Some(local_id) = enum_data.variant(name) {
let variant = EnumVariantId { parent: id, local_id };
return if remaining_segments.len() == 1 {
(ty, Some(variant.into()))
} else {
// We still have unresolved paths, but enum variants never have
// associated types!
(self.err_ty(), None)
};
}
}
// `lower_partly_resolved_path()` returns `None` as type namespace unless
// `remaining_segments` is empty, which is never the case here. We don't know
// which namespace the new `ty` is in until normalized anyway.
(ty, _) = ctx.lower_partly_resolved_path(
resolution,
resolved_segment,
current_segment,
false,
);
ty = self.table.insert_type_vars(ty);
ty = self.table.normalize_associated_types_in(ty);
ty = self.table.resolve_ty_shallow(&ty);
if ty.is_unknown() {
return (self.err_ty(), None);
}
// FIXME(inherent_associated_types): update `resolution` based on `ty` here.
remaining_idx += 1;
remaining_segments = remaining_segments.skip(1);
}
let variant = ty.as_adt().and_then(|(id, _)| match id {
AdtId::StructId(s) => Some(VariantId::StructId(s)),
AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
AdtId::EnumId(_) => {
// FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
None
}
});
(ty, variant)
}
TypeNs::TypeAliasId(it) => {
let container = it.lookup(self.db.upcast()).container;
@ -917,7 +1105,7 @@ impl<'a> InferenceContext<'a> {
let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst)
.fill_with_inference_vars(&mut self.table)
.build();
self.resolve_variant_on_alias(ty, unresolved, path)
self.resolve_variant_on_alias(ty, unresolved, mod_path)
}
TypeNs::AdtSelfType(_) => {
// FIXME this could happen in array size expressions, once we're checking them
@ -953,9 +1141,9 @@ impl<'a> InferenceContext<'a> {
&mut self,
ty: Ty,
unresolved: Option<usize>,
path: &Path,
path: &ModPath,
) -> (Ty, Option<VariantId>) {
let remaining = unresolved.map(|x| path.segments().skip(x).len()).filter(|x| x > &0);
let remaining = unresolved.map(|x| path.segments()[x..].len()).filter(|x| x > &0);
match remaining {
None => {
let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
@ -969,7 +1157,7 @@ impl<'a> InferenceContext<'a> {
(ty, variant)
}
Some(1) => {
let segment = path.mod_path().segments().last().unwrap();
let segment = path.segments().last().unwrap();
// this could be an enum variant or associated type
if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
let enum_data = self.db.enum_data(enum_id);
@ -993,22 +1181,6 @@ impl<'a> InferenceContext<'a> {
self.db.lang_item(krate, item)
}
fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
let ItemContainerId::TraitId(trait_) = self.resolve_lang_item(LangItem::IntoIterIntoIter)?
.as_function()?
.lookup(self.db.upcast()).container
else { return None };
self.db.trait_data(trait_).associated_type_by_name(&name![IntoIter])
}
fn resolve_iterator_item(&self) -> Option<TypeAliasId> {
let ItemContainerId::TraitId(trait_) = self.resolve_lang_item(LangItem::IteratorNext)?
.as_function()?
.lookup(self.db.upcast()).container
else { return None };
self.db.trait_data(trait_).associated_type_by_name(&name![Item])
}
fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
}
@ -1017,10 +1189,6 @@ impl<'a> InferenceContext<'a> {
self.resolve_lang_item(lang)?.as_trait()
}
fn resolve_ops_try_output(&self) -> Option<TypeAliasId> {
self.resolve_output_on(self.resolve_lang_trait(LangItem::Try)?)
}
fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
self.resolve_output_on(self.resolve_lang_trait(LangItem::Neg)?)
}
@ -1136,9 +1304,8 @@ impl Expectation {
/// which still is useful, because it informs integer literals and the like.
/// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
/// for examples of where this comes up,.
fn rvalue_hint(table: &mut unify::InferenceTable<'_>, ty: Ty) -> Self {
// FIXME: do struct_tail_without_normalization
match table.resolve_ty_shallow(&ty).kind(Interner) {
fn rvalue_hint(ctx: &mut InferenceContext<'_>, ty: Ty) -> Self {
match ctx.struct_tail_without_normalization(ty.clone()).kind(Interner) {
TyKind::Slice(_) | TyKind::Str | TyKind::Dyn(_) => Expectation::RValueLikeUnsized(ty),
_ => Expectation::has_type(ty),
}

View file

@ -1,12 +1,33 @@
//! Inference of closure parameter types based on the closure's expected type.
use chalk_ir::{cast::Cast, AliasEq, AliasTy, FnSubst, WhereClause};
use hir_def::{expr::ExprId, HasModule};
use std::{cmp, collections::HashMap, convert::Infallible, mem};
use chalk_ir::{
cast::Cast,
fold::{FallibleTypeFolder, TypeFoldable},
AliasEq, AliasTy, BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, WhereClause,
};
use hir_def::{
data::adt::VariantData,
hir::{Array, BinaryOp, BindingId, CaptureBy, Expr, ExprId, Pat, PatId, Statement, UnaryOp},
lang_item::LangItem,
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
DefWithBodyId, FieldId, HasModule, VariantId,
};
use hir_expand::name;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use stdx::never;
use crate::{
to_chalk_trait_id, utils, ChalkTraitId, DynTy, FnPointer, FnSig, Interner, Substitution, Ty,
TyExt, TyKind,
db::HirDatabase,
from_placeholder_idx, make_binders,
mir::{BorrowKind, MirSpan, ProjectionElem},
static_lifetime, to_chalk_trait_id,
traits::FnTrait,
utils::{self, generics, Generics},
Adjust, Adjustment, Binders, BindingMode, ChalkTraitId, ClosureId, DynTy, FnPointer, FnSig,
Interner, Substitution, Ty, TyExt,
};
use super::{Expectation, InferenceContext};
@ -86,3 +107,906 @@ impl InferenceContext<'_> {
None
}
}
// The below functions handle capture and closure kind (Fn, FnMut, ..)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub(crate) struct HirPlace {
pub(crate) local: BindingId,
pub(crate) projections: Vec<ProjectionElem<Infallible, Ty>>,
}
impl HirPlace {
fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty {
let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone());
for p in &self.projections {
ty = p.projected_ty(
ty,
ctx.db,
|_, _, _| {
unreachable!("Closure field only happens in MIR");
},
ctx.owner.module(ctx.db.upcast()).krate(),
);
}
ty.clone()
}
fn capture_kind_of_truncated_place(
&self,
mut current_capture: CaptureKind,
len: usize,
) -> CaptureKind {
match current_capture {
CaptureKind::ByRef(BorrowKind::Mut { .. }) => {
if self.projections[len..].iter().any(|x| *x == ProjectionElem::Deref) {
current_capture = CaptureKind::ByRef(BorrowKind::Unique);
}
}
_ => (),
}
current_capture
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum CaptureKind {
ByRef(BorrowKind),
ByValue,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CapturedItem {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
pub(crate) span: MirSpan,
pub(crate) ty: Binders<Ty>,
}
impl CapturedItem {
pub fn local(&self) -> BindingId {
self.place.local
}
pub fn ty(&self, subst: &Substitution) -> Ty {
self.ty.clone().substitute(Interner, utils::ClosureSubst(subst).parent_subst())
}
pub fn kind(&self) -> CaptureKind {
self.kind
}
pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
let body = db.body(owner);
let mut result = body[self.place.local].name.display(db.upcast()).to_string();
let mut field_need_paren = false;
for proj in &self.place.projections {
match proj {
ProjectionElem::Deref => {
result = format!("*{result}");
field_need_paren = true;
}
ProjectionElem::Field(f) => {
if field_need_paren {
result = format!("({result})");
}
let variant_data = f.parent.variant_data(db.upcast());
let field = match &*variant_data {
VariantData::Record(fields) => fields[f.local_id]
.name
.as_str()
.unwrap_or("[missing field]")
.to_string(),
VariantData::Tuple(fields) => fields
.iter()
.position(|x| x.0 == f.local_id)
.unwrap_or_default()
.to_string(),
VariantData::Unit => "[missing field]".to_string(),
};
result = format!("{result}.{field}");
field_need_paren = false;
}
&ProjectionElem::TupleOrClosureField(field) => {
if field_need_paren {
result = format!("({result})");
}
result = format!("{result}.{field}");
field_need_paren = false;
}
ProjectionElem::Index(_)
| ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Subslice { .. }
| ProjectionElem::OpaqueCast(_) => {
never!("Not happen in closure capture");
continue;
}
}
}
result
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct CapturedItemWithoutTy {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
pub(crate) span: MirSpan,
}
impl CapturedItemWithoutTy {
fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
let ty = self.place.ty(ctx).clone();
let ty = match &self.kind {
CaptureKind::ByValue => ty,
CaptureKind::ByRef(bk) => {
let m = match bk {
BorrowKind::Mut { .. } => Mutability::Mut,
_ => Mutability::Not,
};
TyKind::Ref(m, static_lifetime(), ty).intern(Interner)
}
};
return CapturedItem {
place: self.place,
kind: self.kind,
span: self.span,
ty: replace_placeholder_with_binder(ctx.db, ctx.owner, ty),
};
fn replace_placeholder_with_binder(
db: &dyn HirDatabase,
owner: DefWithBodyId,
ty: Ty,
) -> Binders<Ty> {
struct Filler<'a> {
db: &'a dyn HirDatabase,
generics: Generics,
}
impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = ();
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn try_fold_free_placeholder_const(
&mut self,
ty: chalk_ir::Ty<Interner>,
idx: chalk_ir::PlaceholderIndex,
outer_binder: DebruijnIndex,
) -> Result<chalk_ir::Const<Interner>, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.param_idx(x) else {
return Err(());
};
Ok(BoundVar::new(outer_binder, idx).to_const(Interner, ty))
}
fn try_fold_free_placeholder_ty(
&mut self,
idx: chalk_ir::PlaceholderIndex,
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.param_idx(x) else {
return Err(());
};
Ok(BoundVar::new(outer_binder, idx).to_ty(Interner))
}
}
let Some(generic_def) = owner.as_generic_def_id() else {
return Binders::empty(Interner, ty);
};
let filler = &mut Filler { db, generics: generics(db.upcast(), generic_def) };
let result = ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST).unwrap_or(ty);
make_binders(db, &filler.generics, result)
}
}
}
impl InferenceContext<'_> {
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
let r = self.place_of_expr_without_adjust(tgt_expr)?;
let default = vec![];
let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
apply_adjusts_to_place(r, adjustments)
}
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
match &self.body[tgt_expr] {
Expr::Path(p) => {
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
if let Some(r) = resolver.resolve_path_in_value_ns(self.db.upcast(), p) {
if let ResolveValueResult::ValueNs(v) = r {
if let ValueNs::LocalBinding(b) = v {
return Some(HirPlace { local: b, projections: vec![] });
}
}
}
}
Expr::Field { expr, name } => {
let mut place = self.place_of_expr(*expr)?;
if let TyKind::Tuple(..) = self.expr_ty(*expr).kind(Interner) {
let index = name.as_tuple_index()?;
place.projections.push(ProjectionElem::TupleOrClosureField(index))
} else {
let field = self.result.field_resolution(tgt_expr)?;
place.projections.push(ProjectionElem::Field(field));
}
return Some(place);
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if matches!(
self.expr_ty_after_adjustments(*expr).kind(Interner),
TyKind::Ref(..) | TyKind::Raw(..)
) {
let mut place = self.place_of_expr(*expr)?;
place.projections.push(ProjectionElem::Deref);
return Some(place);
}
}
_ => (),
}
None
}
fn push_capture(&mut self, capture: CapturedItemWithoutTy) {
self.current_captures.push(capture);
}
fn ref_expr(&mut self, expr: ExprId) {
if let Some(place) = self.place_of_expr(expr) {
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared), expr.into());
}
self.walk_expr(expr);
}
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind, span: MirSpan) {
if self.is_upvar(&place) {
self.push_capture(CapturedItemWithoutTy { place, kind, span });
}
}
fn mutate_expr(&mut self, expr: ExprId) {
if let Some(place) = self.place_of_expr(expr) {
self.add_capture(
place,
CaptureKind::ByRef(BorrowKind::Mut { allow_two_phase_borrow: false }),
expr.into(),
);
}
self.walk_expr(expr);
}
fn consume_expr(&mut self, expr: ExprId) {
if let Some(place) = self.place_of_expr(expr) {
self.consume_place(place, expr.into());
}
self.walk_expr(expr);
}
fn consume_place(&mut self, place: HirPlace, span: MirSpan) {
if self.is_upvar(&place) {
let ty = place.ty(self).clone();
let kind = if self.is_ty_copy(ty) {
CaptureKind::ByRef(BorrowKind::Shared)
} else {
CaptureKind::ByValue
};
self.push_capture(CapturedItemWithoutTy { place, kind, span });
}
}
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
if let Some((last, rest)) = adjustment.split_last() {
match last.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
self.walk_expr_with_adjust(tgt_expr, rest)
}
Adjust::Deref(Some(m)) => match m.0 {
Some(m) => {
self.ref_capture_with_adjusts(m, tgt_expr, rest);
}
None => unreachable!(),
},
Adjust::Borrow(b) => {
self.ref_capture_with_adjusts(b.mutability(), tgt_expr, rest);
}
}
} else {
self.walk_expr_without_adjust(tgt_expr);
}
}
fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) {
let capture_kind = match m {
Mutability::Mut => {
CaptureKind::ByRef(BorrowKind::Mut { allow_two_phase_borrow: false })
}
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
};
if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) {
if let Some(place) = apply_adjusts_to_place(place, rest) {
self.add_capture(place, capture_kind, tgt_expr.into());
}
}
self.walk_expr_with_adjust(tgt_expr, rest);
}
fn walk_expr(&mut self, tgt_expr: ExprId) {
if let Some(x) = self.result.expr_adjustments.get_mut(&tgt_expr) {
// FIXME: this take is completely unneeded, and just is here to make borrow checker
// happy. Remove it if you can.
let x_taken = mem::take(x);
self.walk_expr_with_adjust(tgt_expr, &x_taken);
*self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
} else {
self.walk_expr_without_adjust(tgt_expr);
}
}
fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) {
match &self.body[tgt_expr] {
Expr::If { condition, then_branch, else_branch } => {
self.consume_expr(*condition);
self.consume_expr(*then_branch);
if let &Some(expr) = else_branch {
self.consume_expr(expr);
}
}
Expr::Async { statements, tail, .. }
| Expr::Unsafe { statements, tail, .. }
| Expr::Block { statements, tail, .. } => {
for s in statements.iter() {
match s {
Statement::Let { pat, type_ref: _, initializer, else_branch } => {
if let Some(else_branch) = else_branch {
self.consume_expr(*else_branch);
if let Some(initializer) = initializer {
self.consume_expr(*initializer);
}
return;
}
if let Some(initializer) = initializer {
self.walk_expr(*initializer);
if let Some(place) = self.place_of_expr(*initializer) {
self.consume_with_pat(place, *pat);
}
}
}
Statement::Expr { expr, has_semi: _ } => {
self.consume_expr(*expr);
}
}
}
if let Some(tail) = tail {
self.consume_expr(*tail);
}
}
Expr::While { condition, body, label: _ } => {
self.consume_expr(*condition);
self.consume_expr(*body);
}
Expr::Call { callee, args, is_assignee_expr: _ } => {
self.consume_expr(*callee);
self.consume_exprs(args.iter().copied());
}
Expr::MethodCall { receiver, args, .. } => {
self.consume_expr(*receiver);
self.consume_exprs(args.iter().copied());
}
Expr::Match { expr, arms } => {
for arm in arms.iter() {
self.consume_expr(arm.expr);
if let Some(guard) = arm.guard {
self.consume_expr(guard);
}
}
self.walk_expr(*expr);
if let Some(discr_place) = self.place_of_expr(*expr) {
if self.is_upvar(&discr_place) {
let mut capture_mode = None;
for arm in arms.iter() {
self.walk_pat(&mut capture_mode, arm.pat);
}
if let Some(c) = capture_mode {
self.push_capture(CapturedItemWithoutTy {
place: discr_place,
kind: c,
span: (*expr).into(),
})
}
}
}
}
Expr::Break { expr, label: _ }
| Expr::Return { expr }
| Expr::Yield { expr }
| Expr::Yeet { expr } => {
if let &Some(expr) = expr {
self.consume_expr(expr);
}
}
Expr::RecordLit { fields, spread, .. } => {
if let &Some(expr) = spread {
self.consume_expr(expr);
}
self.consume_exprs(fields.iter().map(|x| x.expr));
}
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if matches!(
self.expr_ty_after_adjustments(*expr).kind(Interner),
TyKind::Ref(..) | TyKind::Raw(..)
) {
self.select_from_expr(*expr);
} else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
let mutability = 'b: {
if let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut).and_then(|x| x.as_trait())
{
if let Some(deref_fn) =
self.db.trait_data(deref_trait).method_by_name(&name![deref_mut])
{
break 'b deref_fn == f;
}
}
false
};
if mutability {
self.mutate_expr(*expr);
} else {
self.ref_expr(*expr);
}
} else {
self.select_from_expr(*expr);
}
}
Expr::UnaryOp { expr, op: _ }
| Expr::Array(Array::Repeat { initializer: expr, repeat: _ })
| Expr::Await { expr }
| Expr::Loop { body: expr, label: _ }
| Expr::Let { pat: _, expr }
| Expr::Box { expr }
| Expr::Cast { expr, type_ref: _ } => {
self.consume_expr(*expr);
}
Expr::Ref { expr, rawness: _, mutability } => match mutability {
hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr),
hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr),
},
Expr::BinaryOp { lhs, rhs, op } => {
let Some(op) = op else {
return;
};
if matches!(op, BinaryOp::Assignment { .. }) {
self.mutate_expr(*lhs);
self.consume_expr(*rhs);
return;
}
self.consume_expr(*lhs);
self.consume_expr(*rhs);
}
Expr::Range { lhs, rhs, range_type: _ } => {
if let &Some(expr) = lhs {
self.consume_expr(expr);
}
if let &Some(expr) = rhs {
self.consume_expr(expr);
}
}
Expr::Index { base, index } => {
self.select_from_expr(*base);
self.consume_expr(*index);
}
Expr::Closure { .. } => {
let ty = self.expr_ty(tgt_expr);
let TyKind::Closure(id, _) = ty.kind(Interner) else {
never!("closure type is always closure");
return;
};
let (captures, _) =
self.result.closure_info.get(id).expect(
"We sort closures, so we should always have data for inner closures",
);
let mut cc = mem::take(&mut self.current_captures);
cc.extend(captures.iter().filter(|x| self.is_upvar(&x.place)).map(|x| {
CapturedItemWithoutTy { place: x.place.clone(), kind: x.kind, span: x.span }
}));
self.current_captures = cc;
}
Expr::Array(Array::ElementList { elements: exprs, is_assignee_expr: _ })
| Expr::Tuple { exprs, is_assignee_expr: _ } => {
self.consume_exprs(exprs.iter().copied())
}
Expr::Missing
| Expr::Continue { .. }
| Expr::Path(_)
| Expr::Literal(_)
| Expr::Const(_)
| Expr::Underscore => (),
}
}
fn walk_pat(&mut self, result: &mut Option<CaptureKind>, pat: PatId) {
let mut update_result = |ck: CaptureKind| match result {
Some(r) => {
*r = cmp::max(*r, ck);
}
None => *result = Some(ck),
};
self.walk_pat_inner(
pat,
&mut update_result,
BorrowKind::Mut { allow_two_phase_borrow: false },
);
}
fn walk_pat_inner(
&mut self,
p: PatId,
update_result: &mut impl FnMut(CaptureKind),
mut for_mut: BorrowKind,
) {
match &self.body[p] {
Pat::Ref { .. }
| Pat::Box { .. }
| Pat::Missing
| Pat::Wild
| Pat::Tuple { .. }
| Pat::Or(_) => (),
Pat::TupleStruct { .. } | Pat::Record { .. } => {
if let Some(variant) = self.result.variant_resolution_for_pat(p) {
let adt = variant.adt_id();
let is_multivariant = match adt {
hir_def::AdtId::EnumId(e) => self.db.enum_data(e).variants.len() != 1,
_ => false,
};
if is_multivariant {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
}
}
}
Pat::Slice { .. }
| Pat::ConstBlock(_)
| Pat::Path(_)
| Pat::Lit(_)
| Pat::Range { .. } => {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
}
Pat::Bind { id, .. } => match self.result.binding_modes[*id] {
crate::BindingMode::Move => {
if self.is_ty_copy(self.result.type_of_binding[*id].clone()) {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
} else {
update_result(CaptureKind::ByValue);
}
}
crate::BindingMode::Ref(r) => match r {
Mutability::Mut => update_result(CaptureKind::ByRef(for_mut)),
Mutability::Not => update_result(CaptureKind::ByRef(BorrowKind::Shared)),
},
},
}
if self.result.pat_adjustments.get(&p).map_or(false, |x| !x.is_empty()) {
for_mut = BorrowKind::Unique;
}
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
}
fn expr_ty(&self, expr: ExprId) -> Ty {
self.result[expr].clone()
}
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
let mut ty = None;
if let Some(x) = self.result.expr_adjustments.get(&e) {
if let Some(x) = x.last() {
ty = Some(x.target.clone());
}
}
ty.unwrap_or_else(|| self.expr_ty(e))
}
fn is_upvar(&self, place: &HirPlace) -> bool {
let b = &self.body[place.local];
if let Some(c) = self.current_closure {
let (_, root) = self.db.lookup_intern_closure(c.into());
return b.is_upvar(root);
}
false
}
fn is_ty_copy(&mut self, ty: Ty) -> bool {
if let TyKind::Closure(id, _) = ty.kind(Interner) {
// FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
// should probably let chalk know which closures are copy, but I don't know how doing it
// without creating query cycles.
return self.result.closure_info.get(id).map(|x| x.1 == FnTrait::Fn).unwrap_or(true);
}
self.table.resolve_completely(ty).is_copy(self.db, self.owner)
}
fn select_from_expr(&mut self, expr: ExprId) {
self.walk_expr(expr);
}
fn adjust_for_move_closure(&mut self) {
for capture in &mut self.current_captures {
if let Some(first_deref) =
capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
{
capture.place.projections.truncate(first_deref);
}
capture.kind = CaptureKind::ByValue;
}
}
fn minimize_captures(&mut self) {
self.current_captures.sort_by_key(|x| x.place.projections.len());
let mut hash_map = HashMap::<HirPlace, usize>::new();
let result = mem::take(&mut self.current_captures);
for item in result {
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
let mut it = item.place.projections.iter();
let prev_index = loop {
if let Some(k) = hash_map.get(&lookup_place) {
break Some(*k);
}
match it.next() {
Some(x) => lookup_place.projections.push(x.clone()),
None => break None,
}
};
match prev_index {
Some(p) => {
let len = self.current_captures[p].place.projections.len();
let kind_after_truncate =
item.place.capture_kind_of_truncated_place(item.kind, len);
self.current_captures[p].kind =
cmp::max(kind_after_truncate, self.current_captures[p].kind);
}
None => {
hash_map.insert(item.place.clone(), self.current_captures.len());
self.current_captures.push(item);
}
}
}
}
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
let cnt = self.result.pat_adjustments.get(&pat).map(|x| x.len()).unwrap_or_default();
place.projections = place
.projections
.iter()
.cloned()
.chain((0..cnt).map(|_| ProjectionElem::Deref))
.collect::<Vec<_>>()
.into();
match &self.body[pat] {
Pat::Missing | Pat::Wild => (),
Pat::Tuple { args, ellipsis } => {
let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len()));
let field_count = match self.result[pat].kind(Interner) {
TyKind::Tuple(_, s) => s.len(Interner),
_ => return,
};
let fields = 0..field_count;
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
for (arg, i) in it {
let mut p = place.clone();
p.projections.push(ProjectionElem::TupleOrClosureField(i));
self.consume_with_pat(p, *arg);
}
}
Pat::Or(pats) => {
for pat in pats.iter() {
self.consume_with_pat(place.clone(), *pat);
}
}
Pat::Record { args, .. } => {
let Some(variant) = self.result.variant_resolution_for_pat(pat) else {
return;
};
match variant {
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
self.consume_place(place, pat.into())
}
VariantId::StructId(s) => {
let vd = &*self.db.struct_data(s).variant_data;
for field_pat in args.iter() {
let arg = field_pat.pat;
let Some(local_id) = vd.field(&field_pat.name) else {
continue;
};
let mut p = place.clone();
p.projections.push(ProjectionElem::Field(FieldId {
parent: variant.into(),
local_id,
}));
self.consume_with_pat(p, arg);
}
}
}
}
Pat::Range { .. }
| Pat::Slice { .. }
| Pat::ConstBlock(_)
| Pat::Path(_)
| Pat::Lit(_) => self.consume_place(place, pat.into()),
Pat::Bind { id, subpat: _ } => {
let mode = self.result.binding_modes[*id];
let capture_kind = match mode {
BindingMode::Move => {
self.consume_place(place, pat.into());
return;
}
BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
BindingMode::Ref(Mutability::Mut) => {
BorrowKind::Mut { allow_two_phase_borrow: false }
}
};
self.add_capture(place, CaptureKind::ByRef(capture_kind), pat.into());
}
Pat::TupleStruct { path: _, args, ellipsis } => {
let Some(variant) = self.result.variant_resolution_for_pat(pat) else {
return;
};
match variant {
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
self.consume_place(place, pat.into())
}
VariantId::StructId(s) => {
let vd = &*self.db.struct_data(s).variant_data;
let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len()));
let fields = vd.fields().iter();
let it =
al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
for (arg, (i, _)) in it {
let mut p = place.clone();
p.projections.push(ProjectionElem::Field(FieldId {
parent: variant.into(),
local_id: i,
}));
self.consume_with_pat(p, *arg);
}
}
}
}
Pat::Ref { pat, mutability: _ } => {
place.projections.push(ProjectionElem::Deref);
self.consume_with_pat(place, *pat)
}
Pat::Box { .. } => (), // not supported
}
}
fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
for expr in exprs {
self.consume_expr(expr);
}
}
fn closure_kind(&self) -> FnTrait {
let mut r = FnTrait::Fn;
for x in &self.current_captures {
r = cmp::min(
r,
match &x.kind {
CaptureKind::ByRef(BorrowKind::Unique | BorrowKind::Mut { .. }) => {
FnTrait::FnMut
}
CaptureKind::ByRef(BorrowKind::Shallow | BorrowKind::Shared) => FnTrait::Fn,
CaptureKind::ByValue => FnTrait::FnOnce,
},
)
}
r
}
fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait {
let (_, root) = self.db.lookup_intern_closure(closure.into());
self.current_closure = Some(closure);
let Expr::Closure { body, capture_by, .. } = &self.body[root] else {
unreachable!("Closure expression id is always closure");
};
self.consume_expr(*body);
for item in &self.current_captures {
if matches!(item.kind, CaptureKind::ByRef(BorrowKind::Mut { .. }))
&& !item.place.projections.contains(&ProjectionElem::Deref)
{
// FIXME: remove the `mutated_bindings_in_closure` completely and add proper fake reads in
// MIR. I didn't do that due duplicate diagnostics.
self.result.mutated_bindings_in_closure.insert(item.place.local);
}
}
// closure_kind should be done before adjust_for_move_closure
let closure_kind = self.closure_kind();
match capture_by {
CaptureBy::Value => self.adjust_for_move_closure(),
CaptureBy::Ref => (),
}
self.minimize_captures();
let result = mem::take(&mut self.current_captures);
let captures = result.into_iter().map(|x| x.with_ty(self)).collect::<Vec<_>>();
self.result.closure_info.insert(closure, (captures, closure_kind));
closure_kind
}
pub(crate) fn infer_closures(&mut self) {
let deferred_closures = self.sort_closures();
for (closure, exprs) in deferred_closures.into_iter().rev() {
self.current_captures = vec![];
let kind = self.analyze_closure(closure);
for (derefed_callee, callee_ty, params, expr) in exprs {
if let &Expr::Call { callee, .. } = &self.body[expr] {
let mut adjustments =
self.result.expr_adjustments.remove(&callee).unwrap_or_default();
self.write_fn_trait_method_resolution(
kind,
&derefed_callee,
&mut adjustments,
&callee_ty,
&params,
expr,
);
self.result.expr_adjustments.insert(callee, adjustments);
}
}
}
}
/// We want to analyze some closures before others, to have a correct analysis:
/// * We should analyze nested closures before the parent, since the parent should capture some of
/// the things that its children captures.
/// * If a closure calls another closure, we need to analyze the callee, to find out how we should
/// capture it (e.g. by move for FnOnce)
///
/// These dependencies are collected in the main inference. We do a topological sort in this function. It
/// will consume the `deferred_closures` field and return its content in a sorted vector.
fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
let mut deferred_closures = mem::take(&mut self.deferred_closures);
let mut dependents_count: FxHashMap<ClosureId, usize> =
deferred_closures.keys().map(|x| (*x, 0)).collect();
for (_, deps) in &self.closure_dependencies {
for dep in deps {
*dependents_count.entry(*dep).or_default() += 1;
}
}
let mut queue: Vec<_> =
deferred_closures.keys().copied().filter(|x| dependents_count[x] == 0).collect();
let mut result = vec![];
while let Some(x) = queue.pop() {
if let Some(d) = deferred_closures.remove(&x) {
result.push((x, d));
}
for dep in self.closure_dependencies.get(&x).into_iter().flat_map(|x| x.iter()) {
let cnt = dependents_count.get_mut(dep).unwrap();
*cnt -= 1;
if *cnt == 0 {
queue.push(*dep);
}
}
}
result
}
}
fn apply_adjusts_to_place(mut r: HirPlace, adjustments: &[Adjustment]) -> Option<HirPlace> {
for adj in adjustments {
match &adj.kind {
Adjust::Deref(None) => {
r.projections.push(ProjectionElem::Deref);
}
_ => return None,
}
}
Some(r)
}

View file

@ -5,14 +5,15 @@
//! See <https://doc.rust-lang.org/nomicon/coercions.html> and
//! `rustc_hir_analysis/check/coercion.rs`.
use std::{iter, sync::Arc};
use std::iter;
use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyVariableKind};
use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyKind, TyVariableKind};
use hir_def::{
expr::ExprId,
hir::ExprId,
lang_item::{LangItem, LangItemTarget},
};
use stdx::always;
use triomphe::Arc;
use crate::{
autoderef::{Autoderef, AutoderefKind},
@ -21,8 +22,10 @@ use crate::{
Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
TypeError, TypeMismatch,
},
static_lifetime, Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner,
Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
static_lifetime,
utils::ClosureSubst,
Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner, Solution,
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
};
use super::unify::InferenceTable;
@ -47,15 +50,23 @@ fn success(
Ok(InferOk { goals, value: (adj, target) })
}
pub(super) enum CoercionCause {
// FIXME: Make better use of this. Right now things like return and break without a value
// use it to point to themselves, causing us to report a mismatch on those expressions even
// though technically they themselves are `!`
Expr(ExprId),
}
#[derive(Clone, Debug)]
pub(super) struct CoerceMany {
expected_ty: Ty,
final_ty: Option<Ty>,
expressions: Vec<ExprId>,
}
impl CoerceMany {
pub(super) fn new(expected: Ty) -> Self {
CoerceMany { expected_ty: expected, final_ty: None }
CoerceMany { expected_ty: expected, final_ty: None, expressions: vec![] }
}
/// Returns the "expected type" with which this coercion was
@ -86,8 +97,12 @@ impl CoerceMany {
}
}
pub(super) fn coerce_forced_unit(&mut self, ctx: &mut InferenceContext<'_>) {
self.coerce(ctx, None, &ctx.result.standard_types.unit.clone())
pub(super) fn coerce_forced_unit(
&mut self,
ctx: &mut InferenceContext<'_>,
cause: CoercionCause,
) {
self.coerce(ctx, None, &ctx.result.standard_types.unit.clone(), cause)
}
/// Merge two types from different branches, with possible coercion.
@ -102,6 +117,7 @@ impl CoerceMany {
ctx: &mut InferenceContext<'_>,
expr: Option<ExprId>,
expr_ty: &Ty,
cause: CoercionCause,
) {
let expr_ty = ctx.resolve_ty_shallow(expr_ty);
self.expected_ty = ctx.resolve_ty_shallow(&self.expected_ty);
@ -110,6 +126,8 @@ impl CoerceMany {
// pointers to have a chance at getting a match. See
// https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
let sig = match (self.merged_ty().kind(Interner), expr_ty.kind(Interner)) {
(TyKind::FnDef(x, _), TyKind::FnDef(y, _)) if x == y => None,
(TyKind::Closure(x, _), TyKind::Closure(y, _)) if x == y => None,
(TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
// FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
// we should be coercing the closure to a fn pointer of the safety of the FnDef
@ -125,8 +143,15 @@ impl CoerceMany {
let result1 = ctx.table.coerce_inner(self.merged_ty(), &target_ty);
let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
if let (Ok(result1), Ok(result2)) = (result1, result2) {
ctx.table.register_infer_ok(result1);
ctx.table.register_infer_ok(result2);
ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals });
for &e in &self.expressions {
ctx.write_expr_adj(e, result1.value.0.clone());
}
ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals });
if let Some(expr) = expr {
ctx.write_expr_adj(expr, result2.value.0);
self.expressions.push(expr);
}
return self.final_ty = Some(target_ty);
}
}
@ -140,14 +165,19 @@ impl CoerceMany {
} else if let Ok(res) = ctx.coerce(expr, &self.merged_ty(), &expr_ty) {
self.final_ty = Some(res);
} else {
if let Some(id) = expr {
ctx.result.type_mismatches.insert(
id.into(),
TypeMismatch { expected: self.merged_ty().clone(), actual: expr_ty.clone() },
);
match cause {
CoercionCause::Expr(id) => {
ctx.result.type_mismatches.insert(
id.into(),
TypeMismatch { expected: self.merged_ty(), actual: expr_ty.clone() },
);
}
}
cov_mark::hit!(coerce_merge_fail_fallback);
}
if let Some(expr) = expr {
self.expressions.push(expr);
}
}
}
@ -625,7 +655,7 @@ impl<'a> InferenceTable<'a> {
// Need to find out in what cases this is necessary
let solution = self
.db
.trait_solve(krate, canonicalized.value.clone().cast(Interner))
.trait_solve(krate, self.trait_env.block, canonicalized.value.clone().cast(Interner))
.ok_or(TypeError)?;
match solution {
@ -657,7 +687,7 @@ impl<'a> InferenceTable<'a> {
}
fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty {
let closure_sig = closure_substs.at(Interner, 0).assert_ty_ref(Interner).clone();
let closure_sig = ClosureSubst(closure_substs).sig_ty().clone();
match closure_sig.kind(Interner) {
TyKind::Function(fn_ty) => TyKind::Function(FnPointer {
num_binders: fn_ty.num_binders,

View file

@ -6,37 +6,43 @@ use std::{
};
use chalk_ir::{
cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyKind, TyVariableKind,
cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
};
use hir_def::{
expr::{
generics::TypeOrConstParamData,
hir::{
ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp,
},
generics::TypeOrConstParamData,
lang_item::LangItem,
lang_item::{LangItem, LangItemTarget},
path::{GenericArg, GenericArgs},
ConstParamId, FieldId, ItemContainerId, Lookup,
BlockId, ConstParamId, FieldId, ItemContainerId, Lookup,
};
use hir_expand::name::{name, Name};
use stdx::always;
use syntax::ast::RangeOp;
use triomphe::Arc;
use crate::{
autoderef::{self, Autoderef},
autoderef::{builtin_deref, deref_by_trait, Autoderef},
consteval,
infer::{
coerce::CoerceMany, find_continuable, pat::contains_explicit_ref_binding, BreakableKind,
coerce::{CoerceMany, CoercionCause},
find_continuable,
pat::contains_explicit_ref_binding,
BreakableKind,
},
lang_items::lang_items_for_bin_op,
lower::{
const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
},
mapping::{from_chalk, ToChalk},
method_resolution::{self, lang_items_for_bin_op, VisibleFromModule},
method_resolution::{self, VisibleFromModule},
primitive::{self, UintTy},
static_lifetime, to_chalk_trait_id,
traits::FnTrait,
utils::{generics, Generics},
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst,
Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt,
Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
};
use super::{
@ -83,10 +89,10 @@ impl<'a> InferenceContext<'a> {
}
}
pub(super) fn infer_expr_coerce_never(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
fn infer_expr_coerce_never(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
let ty = self.infer_expr_inner(expr, expected);
// While we don't allow *arbitrary* coercions here, we *do* allow
// coercions from ! to `expected`.
// coercions from `!` to `expected`.
if ty.is_never() {
if let Some(adjustments) = self.result.expr_adjustments.get(&expr) {
return if let [Adjustment { kind: Adjust::NeverToAny, target }] = &**adjustments {
@ -96,13 +102,22 @@ impl<'a> InferenceContext<'a> {
};
}
let adj_ty = self.table.new_type_var();
self.write_expr_adj(
expr,
vec![Adjustment { kind: Adjust::NeverToAny, target: adj_ty.clone() }],
);
adj_ty
if let Some(target) = expected.only_has_type(&mut self.table) {
self.coerce(Some(expr), &ty, &target)
.expect("never-to-any coercion should always succeed")
} else {
ty
}
} else {
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
let could_unify = self.unify(&ty, &expected_ty);
if !could_unify {
self.result.type_mismatches.insert(
expr.into(),
TypeMismatch { expected: expected_ty, actual: ty.clone() },
);
}
}
ty
}
}
@ -120,24 +135,28 @@ impl<'a> InferenceContext<'a> {
);
let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let mut both_arms_diverge = Diverges::Always;
let then_ty = self.infer_expr_inner(then_branch, expected);
both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
let then_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let mut coerce = CoerceMany::new(expected.coercion_target_type(&mut self.table));
coerce.coerce(self, Some(then_branch), &then_ty);
coerce.coerce(self, Some(then_branch), &then_ty, CoercionCause::Expr(then_branch));
match else_branch {
Some(else_branch) => {
let else_ty = self.infer_expr_inner(else_branch, expected);
coerce.coerce(self, Some(else_branch), &else_ty);
let else_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
coerce.coerce(
self,
Some(else_branch),
&else_ty,
CoercionCause::Expr(else_branch),
);
self.diverges = condition_diverges | then_diverges & else_diverges;
}
None => {
coerce.coerce_forced_unit(self);
coerce.coerce_forced_unit(self, CoercionCause::Expr(tgt_expr));
self.diverges = condition_diverges;
}
}
both_arms_diverge &= self.diverges;
self.diverges = condition_diverges | both_arms_diverge;
coerce.complete(self)
}
@ -146,67 +165,21 @@ impl<'a> InferenceContext<'a> {
self.infer_top_pat(pat, &input_ty);
self.result.standard_types.bool_.clone()
}
Expr::Block { statements, tail, label, id: _ } => {
self.infer_block(tgt_expr, statements, *tail, *label, expected)
Expr::Block { statements, tail, label, id } => {
self.infer_block(tgt_expr, *id, statements, *tail, *label, expected)
}
Expr::Unsafe { id: _, statements, tail } => {
self.infer_block(tgt_expr, statements, *tail, None, expected)
Expr::Unsafe { id, statements, tail } => {
self.infer_block(tgt_expr, *id, statements, *tail, None, expected)
}
Expr::Const { id: _, statements, tail } => {
Expr::Const(id) => {
self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
this.infer_block(tgt_expr, statements, *tail, None, expected)
let (_, expr) = this.db.lookup_intern_anonymous_const(*id);
this.infer_expr(expr, expected)
})
.1
}
Expr::TryBlock { id: _, statements, tail } => {
// The type that is returned from the try block
let try_ty = self.table.new_type_var();
if let Some(ty) = expected.only_has_type(&mut self.table) {
self.unify(&try_ty, &ty);
}
// The ok-ish type that is expected from the last expression
let ok_ty =
self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_output());
self.infer_block(
tgt_expr,
statements,
*tail,
None,
&Expectation::has_type(ok_ty.clone()),
);
try_ty
}
Expr::Async { id: _, statements, tail } => {
let ret_ty = self.table.new_type_var();
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
let prev_ret_coercion =
mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
let (_, inner_ty) =
self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
this.infer_block(
tgt_expr,
statements,
*tail,
None,
&Expectation::has_type(ret_ty),
)
});
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
self.return_coercion = prev_ret_coercion;
// Use the first type parameter as the output type of future.
// existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
let impl_trait_id =
crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, tgt_expr);
let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
.intern(Interner)
Expr::Async { id, statements, tail } => {
self.infer_async_block(tgt_expr, id, statements, tail)
}
&Expr::Loop { body, label } => {
// FIXME: should be:
@ -238,25 +211,7 @@ impl<'a> InferenceContext<'a> {
self.diverges = Diverges::Maybe;
TyBuilder::unit()
}
&Expr::For { iterable, body, pat, label } => {
let iterable_ty = self.infer_expr(iterable, &Expectation::none());
let into_iter_ty =
self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
let pat_ty = self
.resolve_associated_type(into_iter_ty.clone(), self.resolve_iterator_item());
self.result.type_of_for_iterator.insert(tgt_expr, into_iter_ty);
self.infer_top_pat(pat, &pat_ty);
self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
});
// the body may not run, so it diverging doesn't mean we diverge
self.diverges = Diverges::Maybe;
TyBuilder::unit()
}
Expr::Closure { body, args, ret_type, arg_types, closure_kind } => {
Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => {
assert_eq!(args.len(), arg_types.len());
let mut sig_tys = Vec::with_capacity(arg_types.len() + 1);
@ -276,18 +231,7 @@ impl<'a> InferenceContext<'a> {
None => self.table.new_type_var(),
};
if let ClosureKind::Async = closure_kind {
// Use the first type parameter as the output type of future.
// existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
let impl_trait_id =
crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
sig_tys.push(
TyKind::OpaqueType(
opaque_ty_id,
Substitution::from1(Interner, ret_ty.clone()),
)
.intern(Interner),
);
sig_tys.push(self.lower_async_block_type_impl_trait(ret_ty.clone(), *body));
} else {
sig_tys.push(ret_ty.clone());
}
@ -302,7 +246,7 @@ impl<'a> InferenceContext<'a> {
})
.intern(Interner);
let (ty, resume_yield_tys) = match closure_kind {
let (id, ty, resume_yield_tys) = match closure_kind {
ClosureKind::Generator(_) => {
// FIXME: report error when there are more than 1 parameter.
let resume_ty = match sig_tys.first() {
@ -322,17 +266,20 @@ impl<'a> InferenceContext<'a> {
let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
(generator_ty, Some((resume_ty, yield_ty)))
(None, generator_ty, Some((resume_ty, yield_ty)))
}
ClosureKind::Closure | ClosureKind::Async => {
let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
let closure_ty = TyKind::Closure(
closure_id,
Substitution::from1(Interner, sig_ty.clone()),
TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()),
)
.intern(Interner);
(closure_ty, None)
self.deferred_closures.entry(closure_id).or_default();
if let Some(c) = self.current_closure {
self.closure_dependencies.entry(c).or_default().push(closure_id);
}
(Some(closure_id), closure_ty, None)
}
};
@ -348,9 +295,10 @@ impl<'a> InferenceContext<'a> {
// FIXME: lift these out into a struct
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_closure = mem::replace(&mut self.current_closure, id);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
let prev_ret_coercion =
mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty)));
let prev_resume_yield_tys =
mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
@ -361,6 +309,7 @@ impl<'a> InferenceContext<'a> {
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
self.return_coercion = prev_ret_coercion;
self.current_closure = prev_closure;
self.resume_yield_tys = prev_resume_yield_tys;
ty
@ -385,16 +334,31 @@ impl<'a> InferenceContext<'a> {
|| res.is_none();
let (param_tys, ret_ty) = match res {
Some((func, params, ret_ty)) => {
let adjustments = auto_deref_adjust_steps(&derefs);
// FIXME: Handle call adjustments for Fn/FnMut
self.write_expr_adj(*callee, adjustments);
if let Some((trait_, func)) = func {
let subst = TyBuilder::subst_for_def(self.db, trait_, None)
.push(callee_ty.clone())
.push(TyBuilder::tuple_with(params.iter().cloned()))
.build();
self.write_method_resolution(tgt_expr, func, subst.clone());
let mut adjustments = auto_deref_adjust_steps(&derefs);
if let TyKind::Closure(c, _) =
self.table.resolve_completely(callee_ty.clone()).kind(Interner)
{
if let Some(par) = self.current_closure {
self.closure_dependencies.entry(par).or_default().push(*c);
}
self.deferred_closures.entry(*c).or_default().push((
derefed_callee.clone(),
callee_ty.clone(),
params.clone(),
tgt_expr,
));
}
if let Some(fn_x) = func {
self.write_fn_trait_method_resolution(
fn_x,
&derefed_callee,
&mut adjustments,
&callee_ty,
&params,
tgt_expr,
);
}
self.write_expr_adj(*callee, adjustments);
(params, ret_ty)
}
None => {
@ -470,7 +434,7 @@ impl<'a> InferenceContext<'a> {
let arm_ty = self.infer_expr_inner(arm.expr, &expected);
all_arms_diverge &= self.diverges;
coerce.coerce(self, Some(arm.expr), &arm_ty);
coerce.coerce(self, Some(arm.expr), &arm_ty, CoercionCause::Expr(arm.expr));
}
self.diverges = matchee_diverges | all_arms_diverge;
@ -484,8 +448,8 @@ impl<'a> InferenceContext<'a> {
self.resolver.reset_to_guard(g);
ty
}
Expr::Continue { label } => {
if let None = find_continuable(&mut self.breakables, label.as_ref()) {
&Expr::Continue { label } => {
if let None = find_continuable(&mut self.breakables, label) {
self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
expr: tgt_expr,
is_break: false,
@ -494,9 +458,9 @@ impl<'a> InferenceContext<'a> {
};
self.result.standard_types.never.clone()
}
Expr::Break { expr, label } => {
let val_ty = if let Some(expr) = *expr {
let opt_coerce_to = match find_breakable(&mut self.breakables, label.as_ref()) {
&Expr::Break { expr, label } => {
let val_ty = if let Some(expr) = expr {
let opt_coerce_to = match find_breakable(&mut self.breakables, label) {
Some(ctxt) => match &ctxt.coerce {
Some(coerce) => coerce.expected_ty(),
None => {
@ -515,13 +479,17 @@ impl<'a> InferenceContext<'a> {
TyBuilder::unit()
};
match find_breakable(&mut self.breakables, label.as_ref()) {
match find_breakable(&mut self.breakables, label) {
Some(ctxt) => match ctxt.coerce.take() {
Some(mut coerce) => {
coerce.coerce(self, *expr, &val_ty);
let cause = match expr {
Some(expr) => CoercionCause::Expr(expr),
None => CoercionCause::Expr(tgt_expr),
};
coerce.coerce(self, expr, &val_ty, cause);
// Avoiding borrowck
let ctxt = find_breakable(&mut self.breakables, label.as_ref())
let ctxt = find_breakable(&mut self.breakables, label)
.expect("breakable stack changed during coercion");
ctxt.may_break = true;
ctxt.coerce = Some(coerce);
@ -538,7 +506,7 @@ impl<'a> InferenceContext<'a> {
}
self.result.standard_types.never.clone()
}
&Expr::Return { expr } => self.infer_expr_return(expr),
&Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr),
Expr::Yield { expr } => {
if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() {
if let Some(expr) = expr {
@ -589,6 +557,9 @@ impl<'a> InferenceContext<'a> {
let field_ty = field_def.map_or(self.err_ty(), |it| {
field_types[it.local_id].clone().substitute(Interner, &substs)
});
// Field type might have some unknown types
// FIXME: we may want to emit a single type variable for all instance of type fields?
let field_ty = self.insert_type_vars(field_ty);
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
}
if let Some(expr) = spread {
@ -601,26 +572,18 @@ impl<'a> InferenceContext<'a> {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
}
Expr::Try { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
if let Some(trait_) = self.resolve_lang_trait(LangItem::Try) {
if let Some(func) = self.db.trait_data(trait_).method_by_name(&name!(branch)) {
let subst = TyBuilder::subst_for_def(self.db, trait_, None)
.push(inner_ty.clone())
.build();
self.write_method_resolution(tgt_expr, func, subst.clone());
}
let try_output = self.resolve_output_on(trait_);
self.resolve_associated_type(inner_ty, try_output)
} else {
self.err_ty()
}
}
Expr::Cast { expr, type_ref } => {
let cast_ty = self.make_ty(type_ref);
// FIXME: propagate the "castable to" expectation
let _inner_ty = self.infer_expr_no_expect(*expr);
// FIXME check the cast...
let inner_ty = self.infer_expr_no_expect(*expr);
match (inner_ty.kind(Interner), cast_ty.kind(Interner)) {
(TyKind::Ref(_, _, inner), TyKind::Raw(_, cast)) => {
// FIXME: record invalid cast diagnostic in case of mismatch
self.unify(inner, cast);
}
// FIXME check the other kinds of cast...
_ => (),
}
cast_ty
}
Expr::Ref { expr, rawness, mutability } => {
@ -638,7 +601,7 @@ impl<'a> InferenceContext<'a> {
// FIXME: record type error - expected reference but found ptr,
// which cannot be coerced
}
Expectation::rvalue_hint(&mut self.table, Ty::clone(exp_inner))
Expectation::rvalue_hint(self, Ty::clone(exp_inner))
} else {
Expectation::none()
};
@ -656,7 +619,25 @@ impl<'a> InferenceContext<'a> {
// FIXME: Note down method resolution her
match op {
UnaryOp::Deref => {
autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty())
if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref) {
if let Some(deref_fn) =
self.db.trait_data(deref_trait).method_by_name(&name![deref])
{
// FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that
// the mutability is not wrong, and will be fixed in `self.infer_mut`).
self.write_method_resolution(
tgt_expr,
deref_fn,
Substitution::empty(Interner),
);
}
}
if let Some(derefed) = builtin_deref(&mut self.table, &inner_ty, true) {
self.resolve_ty_shallow(derefed)
} else {
deref_by_trait(&mut self.table, inner_ty)
.unwrap_or_else(|| self.err_ty())
}
}
UnaryOp::Neg => {
match inner_ty.kind(Interner) {
@ -767,14 +748,16 @@ impl<'a> InferenceContext<'a> {
let canonicalized = self.canonicalize(base_ty.clone());
let receiver_adjustments = method_resolution::resolve_indexing_op(
self.db,
self.trait_env.clone(),
self.table.trait_env.clone(),
canonicalized.value,
index_trait,
);
let (self_ty, adj) = receiver_adjustments
let (self_ty, mut adj) = receiver_adjustments
.map_or((self.err_ty(), Vec::new()), |adj| {
adj.apply(&mut self.table, base_ty)
});
// mutability will be fixed up in `InferenceContext::infer_mut`;
adj.push(Adjustment::borrow(Mutability::Not, self_ty.clone()));
self.write_expr_adj(*base, adj);
if let Some(func) =
self.db.trait_data(index_trait).method_by_name(&name!(index))
@ -783,7 +766,7 @@ impl<'a> InferenceContext<'a> {
.push(self_ty.clone())
.push(index_ty.clone())
.build();
self.write_method_resolution(tgt_expr, func, substs.clone());
self.write_method_resolution(tgt_expr, func, substs);
}
self.resolve_associated_type_with_params(
self_ty,
@ -834,6 +817,20 @@ impl<'a> InferenceContext<'a> {
let array_type = TyKind::Array(byte_type, len).intern(Interner);
TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
}
Literal::CString(..) => TyKind::Ref(
Mutability::Not,
static_lifetime(),
self.resolve_lang_item(LangItem::CStr)
.and_then(LangItemTarget::as_struct)
.map_or_else(
|| self.err_ty(),
|strukt| {
TyKind::Adt(AdtId(strukt.into()), Substitution::empty(Interner))
.intern(Interner)
},
),
)
.intern(Interner),
Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(Interner),
Literal::Int(_v, ty) => match ty {
Some(int_ty) => {
@ -859,9 +856,15 @@ impl<'a> InferenceContext<'a> {
},
Expr::Underscore => {
// Underscore expressions may only appear in assignee expressions,
// which are handled by `infer_assignee_expr()`, so any underscore
// expression reaching this branch is an error.
self.err_ty()
// which are handled by `infer_assignee_expr()`.
// Any other underscore expression is an error, we render a specialized diagnostic
// to let the user know what type is expected though.
let expected = expected.to_option(&mut self.table).unwrap_or_else(|| self.err_ty());
self.push_diagnostic(InferenceDiagnostic::TypedHole {
expr: tgt_expr,
expected: expected.clone(),
});
expected
}
};
// use a new type variable if we got unknown here
@ -874,6 +877,88 @@ impl<'a> InferenceContext<'a> {
ty
}
fn infer_async_block(
&mut self,
tgt_expr: ExprId,
id: &Option<BlockId>,
statements: &[Statement],
tail: &Option<ExprId>,
) -> Ty {
let ret_ty = self.table.new_type_var();
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
let prev_ret_coercion =
mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
let (_, inner_ty) = self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
this.infer_block(tgt_expr, *id, statements, *tail, None, &Expectation::has_type(ret_ty))
});
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
self.return_coercion = prev_ret_coercion;
self.lower_async_block_type_impl_trait(inner_ty, tgt_expr)
}
pub(crate) fn lower_async_block_type_impl_trait(
&mut self,
inner_ty: Ty,
tgt_expr: ExprId,
) -> Ty {
// Use the first type parameter as the output type of future.
// existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, tgt_expr);
let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty)).intern(Interner)
}
pub(crate) fn write_fn_trait_method_resolution(
&mut self,
fn_x: FnTrait,
derefed_callee: &Ty,
adjustments: &mut Vec<Adjustment>,
callee_ty: &Ty,
params: &Vec<Ty>,
tgt_expr: ExprId,
) {
match fn_x {
FnTrait::FnOnce => (),
FnTrait::FnMut => {
if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) {
if adjustments
.last()
.map(|x| matches!(x.kind, Adjust::Borrow(_)))
.unwrap_or(true)
{
// prefer reborrow to move
adjustments
.push(Adjustment { kind: Adjust::Deref(None), target: inner.clone() });
adjustments.push(Adjustment::borrow(Mutability::Mut, inner.clone()))
}
} else {
adjustments.push(Adjustment::borrow(Mutability::Mut, derefed_callee.clone()));
}
}
FnTrait::Fn => {
if !matches!(derefed_callee.kind(Interner), TyKind::Ref(Mutability::Not, _, _)) {
adjustments.push(Adjustment::borrow(Mutability::Not, derefed_callee.clone()));
}
}
}
let Some(trait_) = fn_x.get_id(self.db, self.table.trait_env.krate) else {
return;
};
let trait_data = self.db.trait_data(trait_);
if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) {
let subst = TyBuilder::subst_for_def(self.db, trait_, None)
.push(callee_ty.clone())
.push(TyBuilder::tuple_with(params.iter().cloned()))
.build();
self.write_method_resolution(tgt_expr, func, subst.clone());
}
}
fn infer_expr_array(
&mut self,
array: &Array,
@ -892,10 +977,10 @@ impl<'a> InferenceContext<'a> {
(elem_ty, consteval::usize_const(self.db, Some(0), krate))
}
Array::ElementList { elements, .. } => {
let mut coerce = CoerceMany::new(elem_ty.clone());
let mut coerce = CoerceMany::new(elem_ty);
for &expr in elements.iter() {
let cur_elem_ty = self.infer_expr_inner(expr, &expected);
coerce.coerce(self, Some(expr), &cur_elem_ty);
coerce.coerce(self, Some(expr), &cur_elem_ty, CoercionCause::Expr(expr));
}
(
coerce.complete(self),
@ -904,12 +989,13 @@ impl<'a> InferenceContext<'a> {
}
&Array::Repeat { initializer, repeat } => {
self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty.clone()));
self.infer_expr(
repeat,
&Expectation::HasType(
TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
),
);
let usize = TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner);
match self.body[repeat] {
Expr::Underscore => {
self.write_expr_ty(repeat, usize);
}
_ => _ = self.infer_expr(repeat, &Expectation::HasType(usize)),
}
(
elem_ty,
@ -928,7 +1014,8 @@ impl<'a> InferenceContext<'a> {
)
}
};
// Try to evaluate unevaluated constant, and insert variable if is not possible.
let len = self.table.insert_const_vars_shallow(len);
TyKind::Array(elem_ty, len).intern(Interner)
}
@ -940,18 +1027,18 @@ impl<'a> InferenceContext<'a> {
.expected_ty();
let return_expr_ty = self.infer_expr_inner(expr, &Expectation::HasType(ret_ty));
let mut coerce_many = self.return_coercion.take().unwrap();
coerce_many.coerce(self, Some(expr), &return_expr_ty);
coerce_many.coerce(self, Some(expr), &return_expr_ty, CoercionCause::Expr(expr));
self.return_coercion = Some(coerce_many);
}
fn infer_expr_return(&mut self, expr: Option<ExprId>) -> Ty {
fn infer_expr_return(&mut self, ret: ExprId, expr: Option<ExprId>) -> Ty {
match self.return_coercion {
Some(_) => {
if let Some(expr) = expr {
self.infer_return(expr);
} else {
let mut coerce = self.return_coercion.take().unwrap();
coerce.coerce_forced_unit(self);
coerce.coerce_forced_unit(self, CoercionCause::Expr(ret));
self.return_coercion = Some(coerce);
}
}
@ -976,7 +1063,7 @@ impl<'a> InferenceContext<'a> {
.filter(|(e_adt, _)| e_adt == &box_id)
.map(|(_, subts)| {
let g = subts.at(Interner, 0);
Expectation::rvalue_hint(table, Ty::clone(g.assert_ty_ref(Interner)))
Expectation::rvalue_hint(self, Ty::clone(g.assert_ty_ref(Interner)))
})
.unwrap_or_else(Expectation::none);
@ -1185,6 +1272,7 @@ impl<'a> InferenceContext<'a> {
fn infer_block(
&mut self,
expr: ExprId,
block_id: Option<BlockId>,
statements: &[Statement],
tail: Option<ExprId>,
label: Option<LabelId>,
@ -1192,9 +1280,14 @@ impl<'a> InferenceContext<'a> {
) -> Ty {
let coerce_ty = expected.coercion_target_type(&mut self.table);
let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
let prev_env = block_id.map(|block_id| {
let prev_env = self.table.trait_env.clone();
Arc::make_mut(&mut self.table.trait_env).block = Some(block_id);
prev_env
});
let (break_ty, ty) =
self.with_breakable_ctx(BreakableKind::Block, Some(coerce_ty.clone()), label, |this| {
self.with_breakable_ctx(BreakableKind::Block, Some(coerce_ty), label, |this| {
for stmt in statements {
match stmt {
Statement::Let { pat, type_ref, initializer, else_branch } => {
@ -1280,6 +1373,9 @@ impl<'a> InferenceContext<'a> {
}
});
self.resolver.reset_to_guard(g);
if let Some(prev_env) = prev_env {
self.table.trait_env = prev_env;
}
break_ty.unwrap_or(ty)
}
@ -1378,7 +1474,7 @@ impl<'a> InferenceContext<'a> {
method_resolution::lookup_method(
self.db,
&canonicalized_receiver.value,
self.trait_env.clone(),
self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
name,
@ -1411,7 +1507,7 @@ impl<'a> InferenceContext<'a> {
let resolved = method_resolution::lookup_method(
self.db,
&canonicalized_receiver.value,
self.trait_env.clone(),
self.table.trait_env.clone(),
self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
method_name,
@ -1562,7 +1658,7 @@ impl<'a> InferenceContext<'a> {
// the parameter to coerce to the expected type (for example in
// `coerce_unsize_expected_type_4`).
let param_ty = self.normalize_associated_types_in(param_ty);
let expected = Expectation::rvalue_hint(&mut self.table, expected_ty);
let expected = Expectation::rvalue_hint(self, expected_ty);
// infer with the expected type we have...
let ty = self.infer_expr_inner(arg, &expected);
@ -1575,9 +1671,10 @@ impl<'a> InferenceContext<'a> {
} else {
param_ty
};
if !coercion_target.is_unknown()
&& self.coerce(Some(arg), &ty, &coercion_target).is_err()
{
// The function signature may contain some unknown types, so we need to insert
// type vars here to avoid type mismatch false positive.
let coercion_target = self.insert_type_vars(coercion_target);
if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
self.result.type_mismatches.insert(
arg.into(),
TypeMismatch { expected: coercion_target, actual: ty.clone() },
@ -1868,7 +1965,6 @@ impl<'a> InferenceContext<'a> {
cb: impl FnOnce(&mut Self) -> T,
) -> (Option<Ty>, T) {
self.breakables.push({
let label = label.map(|label| self.body[label].name.clone());
BreakableContext { kind, may_break: false, coerce: ty.map(CoerceMany::new), label }
});
let res = cb(self);

View file

@ -0,0 +1,218 @@
//! Finds if an expression is an immutable context or a mutable context, which is used in selecting
//! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar.
use chalk_ir::Mutability;
use hir_def::{
hir::{Array, BinaryOp, BindingAnnotation, Expr, ExprId, PatId, Statement, UnaryOp},
lang_item::LangItem,
};
use hir_expand::name;
use crate::{lower::lower_to_chalk_mutability, Adjust, Adjustment, AutoBorrow, OverloadedDeref};
use super::InferenceContext;
impl<'a> InferenceContext<'a> {
pub(crate) fn infer_mut_body(&mut self) {
self.infer_mut_expr(self.body.body_expr, Mutability::Not);
}
fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) {
if let Some(adjustments) = self.result.expr_adjustments.get_mut(&tgt_expr) {
for adj in adjustments.iter_mut().rev() {
match &mut adj.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (),
Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)),
Adjust::Borrow(b) => match b {
AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m) => mutability = *m,
},
}
}
}
self.infer_mut_expr_without_adjust(tgt_expr, mutability);
}
fn infer_mut_expr_without_adjust(&mut self, tgt_expr: ExprId, mutability: Mutability) {
match &self.body[tgt_expr] {
Expr::Missing => (),
&Expr::If { condition, then_branch, else_branch } => {
self.infer_mut_expr(condition, Mutability::Not);
self.infer_mut_expr(then_branch, Mutability::Not);
if let Some(else_branch) = else_branch {
self.infer_mut_expr(else_branch, Mutability::Not);
}
}
Expr::Const(id) => {
let (_, expr) = self.db.lookup_intern_anonymous_const(*id);
self.infer_mut_expr(expr, Mutability::Not);
}
Expr::Let { pat, expr } => self.infer_mut_expr(*expr, self.pat_bound_mutability(*pat)),
Expr::Block { id: _, statements, tail, label: _ }
| Expr::Async { id: _, statements, tail }
| Expr::Unsafe { id: _, statements, tail } => {
for st in statements.iter() {
match st {
Statement::Let { pat, type_ref: _, initializer, else_branch } => {
if let Some(i) = initializer {
self.infer_mut_expr(*i, self.pat_bound_mutability(*pat));
}
if let Some(e) = else_branch {
self.infer_mut_expr(*e, Mutability::Not);
}
}
Statement::Expr { expr, has_semi: _ } => {
self.infer_mut_expr(*expr, Mutability::Not);
}
}
}
if let Some(tail) = tail {
self.infer_mut_expr(*tail, Mutability::Not);
}
}
&Expr::While { condition: c, body, label: _ } => {
self.infer_mut_expr(c, Mutability::Not);
self.infer_mut_expr(body, Mutability::Not);
}
Expr::MethodCall { receiver: x, method_name: _, args, generic_args: _ }
| Expr::Call { callee: x, args, is_assignee_expr: _ } => {
self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*x)));
}
Expr::Match { expr, arms } => {
let m = self.pat_iter_bound_mutability(arms.iter().map(|x| x.pat));
self.infer_mut_expr(*expr, m);
for arm in arms.iter() {
self.infer_mut_expr(arm.expr, Mutability::Not);
if let Some(g) = arm.guard {
self.infer_mut_expr(g, Mutability::Not);
}
}
}
Expr::Yield { expr }
| Expr::Yeet { expr }
| Expr::Return { expr }
| Expr::Break { expr, label: _ } => {
if let &Some(expr) = expr {
self.infer_mut_expr(expr, Mutability::Not);
}
}
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
self.infer_mut_not_expr_iter(fields.iter().map(|x| x.expr).chain(*spread))
}
&Expr::Index { base, index } => {
if mutability == Mutability::Mut {
if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) {
if let Some(index_trait) = self
.db
.lang_item(self.table.trait_env.krate, LangItem::IndexMut)
.and_then(|l| l.as_trait())
{
if let Some(index_fn) =
self.db.trait_data(index_trait).method_by_name(&name![index_mut])
{
*f = index_fn;
let base_adjustments = self
.result
.expr_adjustments
.get_mut(&base)
.and_then(|it| it.last_mut());
if let Some(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutability)),
..
}) = base_adjustments
{
*mutability = Mutability::Mut;
}
}
}
}
}
self.infer_mut_expr(base, mutability);
self.infer_mut_expr(index, Mutability::Not);
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) {
if mutability == Mutability::Mut {
if let Some(deref_trait) = self
.db
.lang_item(self.table.trait_env.krate, LangItem::DerefMut)
.and_then(|l| l.as_trait())
{
if let Some(deref_fn) =
self.db.trait_data(deref_trait).method_by_name(&name![deref_mut])
{
*f = deref_fn;
}
}
}
}
self.infer_mut_expr(*expr, mutability);
}
Expr::Field { expr, name: _ } => {
self.infer_mut_expr(*expr, mutability);
}
Expr::UnaryOp { expr, op: _ }
| Expr::Range { lhs: Some(expr), rhs: None, range_type: _ }
| Expr::Range { rhs: Some(expr), lhs: None, range_type: _ }
| Expr::Await { expr }
| Expr::Box { expr }
| Expr::Loop { body: expr, label: _ }
| Expr::Cast { expr, type_ref: _ } => {
self.infer_mut_expr(*expr, Mutability::Not);
}
Expr::Ref { expr, rawness: _, mutability } => {
let mutability = lower_to_chalk_mutability(*mutability);
self.infer_mut_expr(*expr, mutability);
}
Expr::BinaryOp { lhs, rhs, op: Some(BinaryOp::Assignment { .. }) } => {
self.infer_mut_expr(*lhs, Mutability::Mut);
self.infer_mut_expr(*rhs, Mutability::Not);
}
Expr::Array(Array::Repeat { initializer: lhs, repeat: rhs })
| Expr::BinaryOp { lhs, rhs, op: _ }
| Expr::Range { lhs: Some(lhs), rhs: Some(rhs), range_type: _ } => {
self.infer_mut_expr(*lhs, Mutability::Not);
self.infer_mut_expr(*rhs, Mutability::Not);
}
Expr::Closure { body, .. } => {
self.infer_mut_expr(*body, Mutability::Not);
}
Expr::Tuple { exprs, is_assignee_expr: _ }
| Expr::Array(Array::ElementList { elements: exprs, is_assignee_expr: _ }) => {
self.infer_mut_not_expr_iter(exprs.iter().copied());
}
// These don't need any action, as they don't have sub expressions
Expr::Range { lhs: None, rhs: None, range_type: _ }
| Expr::Literal(_)
| Expr::Path(_)
| Expr::Continue { .. }
| Expr::Underscore => (),
}
}
fn infer_mut_not_expr_iter(&mut self, exprs: impl Iterator<Item = ExprId>) {
for expr in exprs {
self.infer_mut_expr(expr, Mutability::Not);
}
}
fn pat_iter_bound_mutability(&self, mut pat: impl Iterator<Item = PatId>) -> Mutability {
if pat.any(|p| self.pat_bound_mutability(p) == Mutability::Mut) {
Mutability::Mut
} else {
Mutability::Not
}
}
/// Checks if the pat contains a `ref mut` binding. Such paths makes the context of bounded expressions
/// mutable. For example in `let (ref mut x0, ref x1) = *x;` we need to use `DerefMut` for `*x` but in
/// `let (ref x0, ref x1) = *x;` we should use `Deref`.
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
let mut r = Mutability::Not;
self.body.walk_bindings_in_pat(pat, |b| {
if self.body.bindings[b].mode == BindingAnnotation::RefMut {
r = Mutability::Mut;
}
});
r
}
}

View file

@ -5,7 +5,7 @@ use std::iter::repeat_with;
use chalk_ir::Mutability;
use hir_def::{
body::Body,
expr::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId},
hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId},
path::Path,
};
use hir_expand::name::Name;
@ -255,15 +255,15 @@ impl<'a> InferenceContext<'a> {
self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm)
}
Pat::Wild => expected.clone(),
Pat::Range { start, end } => {
let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
self.infer_expr(*end, &Expectation::has_type(start_ty))
Pat::Range { .. } => {
// FIXME: do some checks here.
expected.clone()
}
&Pat::Lit(expr) => {
// Don't emit type mismatches again, the expression lowering already did that.
let ty = self.infer_lit_pat(expr, &expected);
self.write_pat_ty(pat, ty.clone());
return ty;
return self.pat_ty_after_adjustment(pat);
}
Pat::Box { inner } => match self.resolve_boxed_box() {
Some(box_adt) => {
@ -298,22 +298,38 @@ impl<'a> InferenceContext<'a> {
.type_mismatches
.insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
}
self.write_pat_ty(pat, ty.clone());
ty
self.write_pat_ty(pat, ty);
self.pat_ty_after_adjustment(pat)
}
fn pat_ty_after_adjustment(&self, pat: PatId) -> Ty {
self.result
.pat_adjustments
.get(&pat)
.and_then(|x| x.first())
.unwrap_or(&self.result.type_of_pat[pat])
.clone()
}
fn infer_ref_pat(
&mut self,
pat: PatId,
inner_pat: PatId,
mutability: Mutability,
expected: &Ty,
default_bm: BindingMode,
) -> Ty {
let expectation = match expected.as_reference() {
Some((inner_ty, _lifetime, _exp_mut)) => inner_ty.clone(),
_ => self.result.standard_types.unknown.clone(),
None => {
let inner_ty = self.table.new_type_var();
let ref_ty =
TyKind::Ref(mutability, static_lifetime(), inner_ty.clone()).intern(Interner);
// Unification failure will be reported by the caller.
self.unify(&ref_ty, expected);
inner_ty
}
};
let subty = self.infer_pat(pat, &expectation, default_bm);
let subty = self.infer_pat(inner_pat, &expectation, default_bm);
TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
}
@ -331,7 +347,7 @@ impl<'a> InferenceContext<'a> {
} else {
BindingMode::convert(mode)
};
self.result.pat_binding_modes.insert(pat, mode);
self.result.binding_modes.insert(binding, mode);
let inner_ty = match subpat {
Some(subpat) => self.infer_pat(subpat, &expected, default_bm),
@ -345,7 +361,7 @@ impl<'a> InferenceContext<'a> {
}
BindingMode::Move => inner_ty.clone(),
};
self.write_pat_ty(pat, bound_ty.clone());
self.write_pat_ty(pat, inner_ty.clone());
self.write_binding_ty(binding, bound_ty);
return inner_ty;
}
@ -370,7 +386,7 @@ impl<'a> InferenceContext<'a> {
if let &Some(slice_pat_id) = slice {
let rest_pat_ty = match expected.kind(Interner) {
TyKind::Array(_, length) => {
let len = try_const_usize(length);
let len = try_const_usize(self.db, length);
let len =
len.and_then(|len| len.checked_sub((prefix.len() + suffix.len()) as u128));
TyKind::Array(elem_ty.clone(), usize_const(self.db, len, self.resolver.krate()))
@ -419,17 +435,10 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
// FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
Pat::Path(..) => true,
Pat::ConstBlock(..) => true,
Pat::Lit(expr) => {
!matches!(body[*expr], Expr::Literal(Literal::String(..) | Literal::ByteString(..)))
}
Pat::Bind { id, subpat: Some(subpat), .. }
if matches!(
body.bindings[*id].mode,
BindingAnnotation::Mutable | BindingAnnotation::Unannotated
) =>
{
is_non_ref_pat(body, *subpat)
}
Pat::Lit(expr) => !matches!(
body[*expr],
Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
),
Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
}
}
@ -437,7 +446,7 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
let mut res = false;
body.walk_pats(pat_id, &mut |pat| {
res |= matches!(pat, Pat::Bind { id, .. } if body.bindings[*id].mode == BindingAnnotation::Ref);
res |= matches!(body[pat], Pat::Bind { id, .. } if body.bindings[id].mode == BindingAnnotation::Ref);
});
res
}

View file

@ -4,7 +4,7 @@ use chalk_ir::cast::Cast;
use hir_def::{
path::{Path, PathSegment},
resolver::{ResolveValueResult, TypeNs, ValueNs},
AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
AdtId, AssocItemId, EnumVariantId, GenericDefId, ItemContainerId, Lookup,
};
use hir_expand::name::Name;
use stdx::never;
@ -13,6 +13,7 @@ use crate::{
builder::ParamKind,
consteval,
method_resolution::{self, VisibleFromModule},
to_chalk_trait_id,
utils::generics,
InferenceDiagnostic, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
ValueTyDefId,
@ -20,26 +21,43 @@ use crate::{
use super::{ExprOrPatId, InferenceContext, TraitRef};
impl<'a> InferenceContext<'a> {
impl InferenceContext<'_> {
pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
let ty = self.resolve_value_path(path, id)?;
let ty = self.insert_type_vars(ty);
let (value_def, generic_def, substs) = match self.resolve_value_path(path, id)? {
ValuePathResolution::GenericDef(value_def, generic_def, substs) => {
(value_def, generic_def, substs)
}
ValuePathResolution::NonGeneric(ty) => return Some(ty),
};
let substs = self.insert_type_vars(substs);
let substs = self.normalize_associated_types_in(substs);
self.add_required_obligations_for_value_path(generic_def, &substs);
let ty = self.db.value_ty(value_def).substitute(Interner, &substs);
let ty = self.normalize_associated_types_in(ty);
Some(ty)
}
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
let Some(last) = path.segments().last() else { return None };
let ty = self.make_ty(type_ref);
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
let last = path.segments().last()?;
// Don't use `self.make_ty()` here as we need `orig_ns`.
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
let ty = self.table.insert_type_vars(ty);
let ty = self.table.normalize_associated_types_in(ty);
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
let ty = self.table.insert_type_vars(ty);
let ty = self.table.normalize_associated_types_in(ty);
self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
} else {
// FIXME: report error, unresolved first path segment
let value_or_partial =
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
match value_or_partial {
ResolveValueResult::ValueNs(it) => (it, None),
@ -49,9 +67,9 @@ impl<'a> InferenceContext<'a> {
}
};
let typable: ValueTyDefId = match value {
let value_def = match value {
ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
Some(ty) => return Some(ty.clone()),
Some(ty) => return Some(ValuePathResolution::NonGeneric(ty.clone())),
None => {
never!("uninferred pattern?");
return None;
@ -75,28 +93,45 @@ impl<'a> InferenceContext<'a> {
let substs = generics.placeholder_subst(self.db);
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
return Some(ty);
return Some(ValuePathResolution::GenericDef(
struct_id.into(),
struct_id.into(),
substs.clone(),
));
} else {
// FIXME: report error, invalid Self reference
return None;
}
}
ValueNs::GenericParam(it) => return Some(self.db.const_param_ty(it)),
ValueNs::GenericParam(it) => {
return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it)))
}
};
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let substs = ctx.substs_from_path(path, typable, true);
let substs = ctx.substs_from_path(path, value_def, true);
let substs = substs.as_slice(Interner);
let parent_substs = self_subst.or_else(|| {
let generics = generics(self.db.upcast(), typable.to_generic_def_id()?);
let generics = generics(self.db.upcast(), value_def.to_generic_def_id()?);
let parent_params_len = generics.parent_generics()?.len();
let parent_args = &substs[substs.len() - parent_params_len..];
Some(Substitution::from_iter(Interner, parent_args))
});
let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner));
let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned();
let ty = TyBuilder::value_ty(self.db, typable, parent_substs)
let Some(generic_def) = value_def.to_generic_def_id() else {
// `value_def` is the kind of item that can never be generic (i.e. statics, at least
// currently). We can just skip the binders to get its type.
let (ty, binders) = self.db.value_ty(value_def).into_value_and_skipped_binders();
stdx::always!(
parent_substs.is_none() && binders.is_empty(Interner),
"non-empty binders for non-generic def",
);
return Some(ValuePathResolution::NonGeneric(ty));
};
let builder = TyBuilder::subst_for_def(self.db, generic_def, parent_substs);
let substs = builder
.fill(|x| {
it.next().unwrap_or_else(|| match x {
ParamKind::Type => self.result.standard_types.unknown.clone().cast(Interner),
@ -104,7 +139,35 @@ impl<'a> InferenceContext<'a> {
})
})
.build();
Some(ty)
Some(ValuePathResolution::GenericDef(value_def, generic_def, substs))
}
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
let predicates = self.db.generic_predicates(def);
for predicate in predicates.iter() {
let (predicate, binders) =
predicate.clone().substitute(Interner, &subst).into_value_and_skipped_binders();
// Quantified where clauses are not yet handled.
stdx::always!(binders.is_empty(Interner));
self.push_obligation(predicate.cast(Interner));
}
// We need to add `Self: Trait` obligation when `def` is a trait assoc item.
let container = match def {
GenericDefId::FunctionId(id) => id.lookup(self.db.upcast()).container,
GenericDefId::ConstId(id) => id.lookup(self.db.upcast()).container,
_ => return,
};
if let ItemContainerId::TraitId(trait_) = container {
let param_len = generics(self.db.upcast(), def).len_self();
let parent_subst =
Substitution::from_iter(Interner, subst.iter(Interner).skip(param_len));
let trait_ref =
TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: parent_subst };
self.push_obligation(trait_ref.cast(Interner));
}
}
fn resolve_assoc_item(
@ -169,7 +232,7 @@ impl<'a> InferenceContext<'a> {
) -> Option<(ValueNs, Substitution)> {
let trait_ = trait_ref.hir_trait_id();
let item =
self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
self.db.trait_data(trait_).items.iter().map(|(_name, id)| *id).find_map(|item| {
match item {
AssocItemId::FunctionId(func) => {
if segment.name == &self.db.function_data(func).name {
@ -288,7 +351,7 @@ impl<'a> InferenceContext<'a> {
name: &Name,
id: ExprOrPatId,
) -> Option<(ValueNs, Substitution)> {
let ty = self.resolve_ty_shallow(ty);
let ty = self.resolve_ty_shallow(&ty);
let (enum_id, subst) = match ty.as_adt() {
Some((AdtId::EnumId(e), subst)) => (e, subst),
_ => return None,
@ -300,3 +363,10 @@ impl<'a> InferenceContext<'a> {
Some((ValueNs::EnumVariantId(variant), subst.clone()))
}
}
enum ValuePathResolution {
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
// conversion between them + `unwrap()`.
GenericDef(ValueTyDefId, GenericDefId, Substitution),
NonGeneric(Ty),
}

View file

@ -1,23 +1,25 @@
//! Unification and canonicalization logic.
use std::{fmt, iter, mem, sync::Arc};
use std::{fmt, iter, mem};
use chalk_ir::{
cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy,
IntTy, TyVariableKind, UniverseIndex,
};
use chalk_solve::infer::ParameterEnaVariableExt;
use either::Either;
use ena::unify::UnifyKey;
use hir_def::{FunctionId, TraitId};
use hir_expand::name;
use stdx::never;
use triomphe::Arc;
use super::{InferOk, InferResult, InferenceContext, TypeError};
use crate::{
db::HirDatabase, fold_tys, static_lifetime, traits::FnTrait, AliasEq, AliasTy, BoundVar,
Canonical, Const, DebruijnIndex, GenericArg, GenericArgData, Goal, Guidance, InEnvironment,
InferenceVar, Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution,
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
consteval::unknown_const, db::HirDatabase, fold_tys_and_consts, static_lifetime,
to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue,
DebruijnIndex, GenericArg, GenericArgData, Goal, Guidance, InEnvironment, InferenceVar,
Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution, Substitution,
TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
};
impl<'a> InferenceContext<'a> {
@ -130,7 +132,7 @@ pub(crate) fn unify(
}
bitflags::bitflags! {
#[derive(Default)]
#[derive(Default, Clone, Copy)]
pub(crate) struct TypeVariableFlags: u8 {
const DIVERGING = 1 << 0;
const INTEGER = 1 << 1;
@ -230,14 +232,40 @@ impl<'a> InferenceTable<'a> {
/// type annotation (e.g. from a let type annotation, field type or function
/// call). `make_ty` handles this already, but e.g. for field types we need
/// to do it as well.
pub(crate) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
fold_tys(
pub(crate) fn normalize_associated_types_in<T>(&mut self, ty: T) -> T
where
T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
{
fold_tys_and_consts(
ty,
|ty, _| match ty.kind(Interner) {
TyKind::Alias(AliasTy::Projection(proj_ty)) => {
self.normalize_projection_ty(proj_ty.clone())
}
_ => ty,
|e, _| match e {
Either::Left(ty) => Either::Left(match ty.kind(Interner) {
TyKind::Alias(AliasTy::Projection(proj_ty)) => {
self.normalize_projection_ty(proj_ty.clone())
}
_ => ty,
}),
Either::Right(c) => Either::Right(match &c.data(Interner).value {
chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
crate::ConstScalar::UnevaluatedConst(c_id, subst) => {
// FIXME: Ideally here we should do everything that we do with type alias, i.e. adding a variable
// and registering an obligation. But it needs chalk support, so we handle the most basic
// case (a non associated const without generic parameters) manually.
if subst.len(Interner) == 0 {
if let Ok(eval) = self.db.const_eval((*c_id).into(), subst.clone())
{
eval
} else {
unknown_const(c.data(Interner).ty.clone())
}
} else {
unknown_const(c.data(Interner).ty.clone())
}
}
_ => c,
},
_ => c,
}),
},
DebruijnIndex::INNERMOST,
)
@ -463,7 +491,8 @@ impl<'a> InferenceTable<'a> {
pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option<Solution> {
let in_env = InEnvironment::new(&self.trait_env.env, goal);
let canonicalized = self.canonicalize(in_env);
let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value);
let solution =
self.db.trait_solve(self.trait_env.krate, self.trait_env.block, canonicalized.value);
solution
}
@ -598,7 +627,11 @@ impl<'a> InferenceTable<'a> {
&mut self,
canonicalized: &Canonicalized<InEnvironment<Goal>>,
) -> bool {
let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value.clone());
let solution = self.db.trait_solve(
self.trait_env.krate,
self.trait_env.block,
canonicalized.value.clone(),
);
match solution {
Some(Solution::Unique(canonical_subst)) => {
@ -631,10 +664,13 @@ impl<'a> InferenceTable<'a> {
&mut self,
ty: &Ty,
num_args: usize,
) -> Option<(Option<(TraitId, FunctionId)>, Vec<Ty>, Ty)> {
) -> Option<(Option<FnTrait>, Vec<Ty>, Ty)> {
match ty.callable_sig(self.db) {
Some(sig) => Some((None, sig.params().to_vec(), sig.ret().clone())),
None => self.callable_sig_from_fn_trait(ty, num_args),
None => {
let (f, args_ty, return_ty) = self.callable_sig_from_fn_trait(ty, num_args)?;
Some((Some(f), args_ty, return_ty))
}
}
}
@ -642,7 +678,7 @@ impl<'a> InferenceTable<'a> {
&mut self,
ty: &Ty,
num_args: usize,
) -> Option<(Option<(TraitId, FunctionId)>, Vec<Ty>, Ty)> {
) -> Option<(FnTrait, Vec<Ty>, Ty)> {
let krate = self.trait_env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
let trait_data = self.db.trait_data(fn_once_trait);
@ -676,23 +712,90 @@ impl<'a> InferenceTable<'a> {
};
let trait_env = self.trait_env.env.clone();
let mut trait_ref = projection.trait_ref(self.db);
let obligation = InEnvironment {
goal: projection.trait_ref(self.db).cast(Interner),
environment: trait_env,
goal: trait_ref.clone().cast(Interner),
environment: trait_env.clone(),
};
let canonical = self.canonicalize(obligation.clone());
if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() {
if self
.db
.trait_solve(krate, self.trait_env.block, canonical.value.cast(Interner))
.is_some()
{
self.register_obligation(obligation.goal);
let return_ty = self.normalize_projection_ty(projection);
Some((
Some(fn_once_trait).zip(trait_data.method_by_name(&name!(call_once))),
arg_tys,
return_ty,
))
for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] {
let fn_x_trait = fn_x.get_id(self.db, krate)?;
trait_ref.trait_id = to_chalk_trait_id(fn_x_trait);
let obligation: chalk_ir::InEnvironment<chalk_ir::Goal<Interner>> = InEnvironment {
goal: trait_ref.clone().cast(Interner),
environment: trait_env.clone(),
};
let canonical = self.canonicalize(obligation.clone());
if self
.db
.trait_solve(krate, self.trait_env.block, canonical.value.cast(Interner))
.is_some()
{
return Some((fn_x, arg_tys, return_ty));
}
}
unreachable!("It should at least implement FnOnce at this point");
} else {
None
}
}
pub(super) fn insert_type_vars<T>(&mut self, ty: T) -> T
where
T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
{
fold_tys_and_consts(
ty,
|x, _| match x {
Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
},
DebruijnIndex::INNERMOST,
)
}
/// Replaces `Ty::Error` by a new type var, so we can maybe still infer it.
pub(super) fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
match ty.kind(Interner) {
TyKind::Error => self.new_type_var(),
TyKind::InferenceVar(..) => {
let ty_resolved = self.resolve_ty_shallow(&ty);
if ty_resolved.is_unknown() {
self.new_type_var()
} else {
ty
}
}
_ => ty,
}
}
/// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
pub(super) fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
let data = c.data(Interner);
match &data.value {
ConstValue::Concrete(cc) => match &cc.interned {
crate::ConstScalar::Unknown => self.new_const_var(data.ty.clone()),
// try to evaluate unevaluated const. Replace with new var if const eval failed.
crate::ConstScalar::UnevaluatedConst(id, subst) => {
if let Ok(eval) = self.db.const_eval(*id, subst.clone()) {
eval
} else {
self.new_const_var(data.ty.clone())
}
}
_ => c,
},
_ => c,
}
}
}
impl<'a> fmt::Debug for InferenceTable<'a> {

View file

@ -6,9 +6,10 @@ use chalk_ir::{
DebruijnIndex,
};
use hir_def::{
adt::VariantData, attr::Attrs, visibility::Visibility, AdtId, EnumVariantId, HasModule, Lookup,
ModuleId, VariantId,
attr::Attrs, data::adt::VariantData, visibility::Visibility, AdtId, EnumVariantId, HasModule,
Lookup, ModuleId, VariantId,
};
use rustc_hash::FxHashSet;
use crate::{
consteval::try_const_usize, db::HirDatabase, Binders, Interner, Substitution, Ty, TyKind,
@ -16,7 +17,8 @@ use crate::{
/// Checks whether a type is visibly uninhabited from a particular module.
pub(crate) fn is_ty_uninhabited_from(ty: &Ty, target_mod: ModuleId, db: &dyn HirDatabase) -> bool {
let mut uninhabited_from = UninhabitedFrom { target_mod, db };
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
@ -32,7 +34,8 @@ pub(crate) fn is_enum_variant_uninhabited_from(
let vars_attrs = db.variants_attrs(variant.parent);
let is_local = variant.parent.lookup(db.upcast()).container.krate() == target_mod.krate();
let mut uninhabited_from = UninhabitedFrom { target_mod, db };
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
let inhabitedness = uninhabited_from.visit_variant(
variant.into(),
&enum_data.variants[variant.local_id].variant_data,
@ -45,6 +48,9 @@ pub(crate) fn is_enum_variant_uninhabited_from(
struct UninhabitedFrom<'a> {
target_mod: ModuleId,
recursive_ty: FxHashSet<Ty>,
// guard for preventing stack overflow in non trivial non terminating types
max_depth: usize,
db: &'a dyn HirDatabase,
}
@ -65,17 +71,27 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
ty: &Ty,
outer_binder: DebruijnIndex,
) -> ControlFlow<VisiblyUninhabited> {
match ty.kind(Interner) {
if self.recursive_ty.contains(ty) || self.max_depth == 0 {
// rustc considers recursive types always inhabited. I think it is valid to consider
// recursive types as always uninhabited, but we should do what rustc is doing.
return CONTINUE_OPAQUELY_INHABITED;
}
self.recursive_ty.insert(ty.clone());
self.max_depth -= 1;
let r = match ty.kind(Interner) {
TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst),
TyKind::Never => BREAK_VISIBLY_UNINHABITED,
TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder),
TyKind::Array(item_ty, len) => match try_const_usize(len) {
TyKind::Array(item_ty, len) => match try_const_usize(self.db, len) {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
},
TyKind::Ref(..) | _ => CONTINUE_OPAQUELY_INHABITED,
}
};
self.recursive_ty.remove(ty);
self.max_depth += 1;
r
}
fn interner(&self) -> Interner {

View file

@ -7,7 +7,8 @@ use chalk_ir::{Goal, GoalData};
use hir_def::TypeAliasId;
use intern::{impl_internable, Interned};
use smallvec::SmallVec;
use std::{fmt, sync::Arc};
use std::fmt;
use triomphe::Arc;
#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub struct Interner;
@ -43,7 +44,7 @@ impl_internable!(
);
impl chalk_ir::interner::Interner for Interner {
type InternedType = Interned<InternedWrapper<chalk_ir::TyData<Interner>>>;
type InternedType = Interned<InternedWrapper<chalk_ir::TyData<Self>>>;
type InternedLifetime = Interned<InternedWrapper<chalk_ir::LifetimeData<Self>>>;
type InternedConst = Interned<InternedWrapper<chalk_ir::ConstData<Self>>>;
type InternedConcreteConst = ConstScalar;
@ -51,8 +52,8 @@ impl chalk_ir::interner::Interner for Interner {
type InternedGoal = Arc<GoalData<Self>>;
type InternedGoals = Vec<Goal<Self>>;
type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
type InternedProgramClauses = Interned<InternedWrapper<Vec<chalk_ir::ProgramClause<Self>>>>;
type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
type InternedQuantifiedWhereClauses =
Interned<InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Self>>>>;
type InternedVariableKinds = Interned<InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>>;
@ -86,6 +87,27 @@ impl chalk_ir::interner::Interner for Interner {
tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt)))
}
fn debug_opaque_ty_id(
opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
}
fn debug_fn_def_id(
fn_def_id: chalk_ir::FnDefId<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
}
fn debug_closure_id(
_fn_def_id: chalk_ir::ClosureId<Self>,
_fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
None
}
fn debug_alias(
alias: &chalk_ir::AliasTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
@ -113,13 +135,6 @@ impl chalk_ir::interner::Interner for Interner {
Some(write!(fmt, "{:?}", opaque_ty.opaque_ty_id))
}
fn debug_opaque_ty_id(
opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
}
fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", ty.data(Interner)))
}
@ -131,6 +146,13 @@ impl chalk_ir::interner::Interner for Interner {
Some(write!(fmt, "{:?}", lifetime.data(Interner)))
}
fn debug_const(
constant: &chalk_ir::Const<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", constant.data(Interner)))
}
fn debug_generic_arg(
parameter: &GenericArg,
fmt: &mut fmt::Formatter<'_>,
@ -138,69 +160,42 @@ impl chalk_ir::interner::Interner for Interner {
Some(write!(fmt, "{:?}", parameter.data(Interner).inner_debug()))
}
fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
let goal_data = goal.data(Interner);
Some(write!(fmt, "{goal_data:?}"))
}
fn debug_goals(
goals: &chalk_ir::Goals<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", goals.debug(Interner)))
}
fn debug_program_clause_implication(
pci: &chalk_ir::ProgramClauseImplication<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", pci.debug(Interner)))
}
fn debug_substitution(
substitution: &chalk_ir::Substitution<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", substitution.debug(Interner)))
}
fn debug_separator_trait_ref(
separator_trait_ref: &chalk_ir::SeparatorTraitRef<'_, Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner)))
}
fn debug_fn_def_id(
fn_def_id: chalk_ir::FnDefId<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
}
fn debug_const(
constant: &chalk_ir::Const<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", constant.data(Interner)))
}
fn debug_variable_kinds(
variable_kinds: &chalk_ir::VariableKinds<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", variable_kinds.as_slice(Interner)))
}
fn debug_variable_kinds_with_angles(
variable_kinds: &chalk_ir::VariableKinds<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", variable_kinds.inner_debug(Interner)))
}
fn debug_canonical_var_kinds(
canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", canonical_var_kinds.as_slice(Interner)))
}
fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
let goal_data = goal.data(Interner);
Some(write!(fmt, "{goal_data:?}"))
}
fn debug_goals(
goals: &chalk_ir::Goals<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", goals.debug(Interner)))
}
fn debug_program_clause_implication(
pci: &chalk_ir::ProgramClauseImplication<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", pci.debug(Interner)))
}
fn debug_program_clause(
clause: &chalk_ir::ProgramClause<Self>,
fmt: &mut fmt::Formatter<'_>,
@ -213,6 +208,19 @@ impl chalk_ir::interner::Interner for Interner {
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
}
fn debug_substitution(
substitution: &chalk_ir::Substitution<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", substitution.debug(Interner)))
}
fn debug_separator_trait_ref(
separator_trait_ref: &chalk_ir::SeparatorTraitRef<'_, Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner)))
}
fn debug_quantified_where_clauses(
clauses: &chalk_ir::QuantifiedWhereClauses<Self>,
fmt: &mut fmt::Formatter<'_>,
@ -220,6 +228,13 @@ impl chalk_ir::interner::Interner for Interner {
Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
}
fn debug_constraints(
_clauses: &chalk_ir::Constraints<Self>,
_fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
None
}
fn intern_ty(self, kind: chalk_ir::TyKind<Self>) -> Self::InternedType {
let flags = kind.compute_flags(self);
Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags }))
@ -272,6 +287,10 @@ impl chalk_ir::interner::Interner for Interner {
Arc::new(goal)
}
fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData<Self> {
goal
}
fn intern_goals<E>(
self,
data: impl IntoIterator<Item = Result<Goal<Self>, E>>,
@ -279,10 +298,6 @@ impl chalk_ir::interner::Interner for Interner {
data.into_iter().collect()
}
fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData<Self> {
goal
}
fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal<Interner>] {
goals
}
@ -367,32 +382,18 @@ impl chalk_ir::interner::Interner for Interner {
) -> &[chalk_ir::CanonicalVarKind<Self>] {
canonical_var_kinds
}
fn intern_constraints<E>(
self,
data: impl IntoIterator<Item = Result<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>, E>>,
) -> Result<Self::InternedConstraints, E> {
data.into_iter().collect()
}
fn constraints_data(
self,
constraints: &Self::InternedConstraints,
) -> &[chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
constraints
}
fn debug_closure_id(
_fn_def_id: chalk_ir::ClosureId<Self>,
_fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
None
}
fn debug_constraints(
_clauses: &chalk_ir::Constraints<Self>,
_fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
None
}
fn intern_variances<E>(
self,

View file

@ -1,19 +1,65 @@
//! Functions to detect special lang items
use hir_def::{lang_item::LangItem, AdtId, HasModule};
use hir_def::{data::adt::StructFlags, lang_item::LangItem, AdtId};
use hir_expand::name::Name;
use crate::db::HirDatabase;
pub fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool {
let krate = adt.module(db.upcast()).krate();
let box_adt =
db.lang_item(krate, LangItem::OwnedBox).and_then(|it| it.as_struct()).map(AdtId::from);
Some(adt) == box_adt
pub fn is_box(db: &dyn HirDatabase, adt: AdtId) -> bool {
let AdtId::StructId(id) = adt else { return false };
db.struct_data(id).flags.contains(StructFlags::IS_BOX)
}
pub fn is_unsafe_cell(adt: AdtId, db: &dyn HirDatabase) -> bool {
let krate = adt.module(db.upcast()).krate();
let box_adt =
db.lang_item(krate, LangItem::UnsafeCell).and_then(|it| it.as_struct()).map(AdtId::from);
Some(adt) == box_adt
pub fn is_unsafe_cell(db: &dyn HirDatabase, adt: AdtId) -> bool {
let AdtId::StructId(id) = adt else { return false };
db.struct_data(id).flags.contains(StructFlags::IS_UNSAFE_CELL)
}
pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> {
use hir_expand::name;
use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
Some(match op {
BinaryOp::LogicOp(_) => return None,
BinaryOp::ArithOp(aop) => match aop {
ArithOp::Add => (name![add], LangItem::Add),
ArithOp::Mul => (name![mul], LangItem::Mul),
ArithOp::Sub => (name![sub], LangItem::Sub),
ArithOp::Div => (name![div], LangItem::Div),
ArithOp::Rem => (name![rem], LangItem::Rem),
ArithOp::Shl => (name![shl], LangItem::Shl),
ArithOp::Shr => (name![shr], LangItem::Shr),
ArithOp::BitXor => (name![bitxor], LangItem::BitXor),
ArithOp::BitOr => (name![bitor], LangItem::BitOr),
ArithOp::BitAnd => (name![bitand], LangItem::BitAnd),
},
BinaryOp::Assignment { op: Some(aop) } => match aop {
ArithOp::Add => (name![add_assign], LangItem::AddAssign),
ArithOp::Mul => (name![mul_assign], LangItem::MulAssign),
ArithOp::Sub => (name![sub_assign], LangItem::SubAssign),
ArithOp::Div => (name![div_assign], LangItem::DivAssign),
ArithOp::Rem => (name![rem_assign], LangItem::RemAssign),
ArithOp::Shl => (name![shl_assign], LangItem::ShlAssign),
ArithOp::Shr => (name![shr_assign], LangItem::ShrAssign),
ArithOp::BitXor => (name![bitxor_assign], LangItem::BitXorAssign),
ArithOp::BitOr => (name![bitor_assign], LangItem::BitOrAssign),
ArithOp::BitAnd => (name![bitand_assign], LangItem::BitAndAssign),
},
BinaryOp::CmpOp(cop) => match cop {
CmpOp::Eq { negated: false } => (name![eq], LangItem::PartialEq),
CmpOp::Eq { negated: true } => (name![ne], LangItem::PartialEq),
CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
(name![le], LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
(name![lt], LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
(name![ge], LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
(name![gt], LangItem::PartialOrd)
}
},
BinaryOp::Assignment { op: None } => return None,
})
}

View file

@ -1,19 +1,23 @@
//! Compute the binary representation of a type
use base_db::CrateId;
use chalk_ir::{AdtId, TyKind};
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
use hir_def::{
layout::{
Abi, FieldsShape, Integer, Layout, LayoutCalculator, LayoutError, Primitive, ReprOptions,
RustcEnumVariantIdx, Scalar, Size, StructKind, TargetDataLayout, Variants, WrappingRange,
Abi, FieldsShape, Integer, LayoutCalculator, LayoutS, Primitive, ReprOptions, Scalar, Size,
StructKind, TargetDataLayout, WrappingRange,
},
LocalFieldId,
LocalEnumVariantId, LocalFieldId,
};
use la_arena::{Idx, RawIdx};
use stdx::never;
use triomphe::Arc;
use crate::{consteval::try_const_usize, db::HirDatabase, Interner, Substitution, Ty};
use crate::{
consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx,
utils::ClosureSubst, Interner, Substitution, TraitEnvironment, Ty,
};
use self::adt::struct_variant_idx;
pub use self::{
adt::{layout_of_adt_query, layout_of_adt_recover},
target::target_data_layout_query,
@ -28,6 +32,34 @@ macro_rules! user_error {
mod adt;
mod target;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct RustcEnumVariantIdx(pub LocalEnumVariantId);
impl rustc_index::vec::Idx for RustcEnumVariantIdx {
fn new(idx: usize) -> Self {
RustcEnumVariantIdx(Idx::from_raw(RawIdx::from(idx as u32)))
}
fn index(self) -> usize {
u32::from(self.0.into_raw()) as usize
}
}
pub type Layout = LayoutS<RustcEnumVariantIdx>;
pub type TagEncoding = hir_def::layout::TagEncoding<RustcEnumVariantIdx>;
pub type Variants = hir_def::layout::Variants<RustcEnumVariantIdx>;
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LayoutError {
UserError(String),
SizeOverflow,
TargetLayoutNotAvailable,
HasPlaceholder,
HasErrorType,
NotImplemented,
Unknown,
}
struct LayoutCx<'a> {
krate: CrateId,
target: &'a TargetDataLayout,
@ -45,20 +77,18 @@ impl<'a> LayoutCalculator for LayoutCx<'a> {
}
}
fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar {
Scalar::Initialized { value, valid_range: WrappingRange::full(value.size(dl)) }
}
fn scalar(dl: &TargetDataLayout, value: Primitive) -> Layout {
Layout::scalar(dl, scalar_unit(dl, value))
}
pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Layout, LayoutError> {
pub fn layout_of_ty_query(
db: &dyn HirDatabase,
ty: Ty,
krate: CrateId,
) -> Result<Arc<Layout>, LayoutError> {
let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
let cx = LayoutCx { krate, target: &target };
let dl = &*cx.current_data_layout();
Ok(match ty.kind(Interner) {
TyKind::Adt(AdtId(def), subst) => db.layout_of_adt(*def, subst.clone())?,
let trait_env = Arc::new(TraitEnvironment::empty(krate));
let ty = normalize(db, trait_env, ty.clone());
let result = match ty.kind(Interner) {
TyKind::Adt(AdtId(def), subst) => return db.layout_of_adt(*def, subst.clone(), krate),
TyKind::Scalar(s) => match s {
chalk_ir::Scalar::Bool => Layout::scalar(
dl,
@ -78,12 +108,12 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
dl,
Primitive::Int(
match i {
chalk_ir::IntTy::Isize => dl.ptr_sized_integer(),
chalk_ir::IntTy::I8 => Integer::I8,
chalk_ir::IntTy::I16 => Integer::I16,
chalk_ir::IntTy::I32 => Integer::I32,
chalk_ir::IntTy::I64 => Integer::I64,
chalk_ir::IntTy::I128 => Integer::I128,
IntTy::Isize => dl.ptr_sized_integer(),
IntTy::I8 => Integer::I8,
IntTy::I16 => Integer::I16,
IntTy::I32 => Integer::I32,
IntTy::I64 => Integer::I64,
IntTy::I128 => Integer::I128,
},
true,
),
@ -92,12 +122,12 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
dl,
Primitive::Int(
match i {
chalk_ir::UintTy::Usize => dl.ptr_sized_integer(),
chalk_ir::UintTy::U8 => Integer::I8,
chalk_ir::UintTy::U16 => Integer::I16,
chalk_ir::UintTy::U32 => Integer::I32,
chalk_ir::UintTy::U64 => Integer::I64,
chalk_ir::UintTy::U128 => Integer::I128,
UintTy::Usize => dl.ptr_sized_integer(),
UintTy::U8 => Integer::I8,
UintTy::U16 => Integer::I16,
UintTy::U32 => Integer::I32,
UintTy::U64 => Integer::I64,
UintTy::U128 => Integer::I128,
},
false,
),
@ -105,8 +135,8 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
chalk_ir::Scalar::Float(f) => scalar(
dl,
match f {
chalk_ir::FloatTy::F32 => Primitive::F32,
chalk_ir::FloatTy::F64 => Primitive::F64,
FloatTy::F32 => Primitive::F32,
FloatTy::F64 => Primitive::F64,
},
),
},
@ -115,17 +145,17 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
let fields = tys
.iter(Interner)
.map(|k| layout_of_ty(db, k.assert_ty_ref(Interner), krate))
.map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), krate))
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().collect::<Vec<_>>();
let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
TyKind::Array(element, count) => {
let count = try_const_usize(&count).ok_or(LayoutError::UserError(
"mismatched type of const generic parameter".to_string(),
let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(
"unevaluated or mistyped const generic parameter".to_string(),
))? as u64;
let element = layout_of_ty(db, element, krate)?;
let element = db.layout_of_ty(element.clone(), krate)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
@ -146,7 +176,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
}
}
TyKind::Slice(element) => {
let element = layout_of_ty(db, element, krate)?;
let element = db.layout_of_ty(element.clone(), krate)?;
Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: element.size, count: 0 },
@ -180,7 +210,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
}
_ => {
// pointee is sized
return Ok(Layout::scalar(dl, data_ptr));
return Ok(Arc::new(Layout::scalar(dl, data_ptr)));
}
};
@ -222,23 +252,51 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = db.infer(func.into());
layout_of_ty(db, &infer.type_of_rpit[idx], krate)?
return db.layout_of_ty(infer.type_of_rpit[idx].clone(), krate);
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
return Err(LayoutError::NotImplemented)
}
}
}
TyKind::Closure(_, _) | TyKind::Generator(_, _) | TyKind::GeneratorWitness(_, _) => {
TyKind::Closure(c, subst) => {
let (def, _) = db.lookup_intern_closure((*c).into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(c);
let fields = captures
.iter()
.map(|x| {
db.layout_of_ty(
x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
krate,
)
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
.ok_or(LayoutError::Unknown)?
}
TyKind::Generator(_, _) | TyKind::GeneratorWitness(_, _) => {
return Err(LayoutError::NotImplemented)
}
TyKind::Error => return Err(LayoutError::HasErrorType),
TyKind::AssociatedType(_, _)
| TyKind::Error
| TyKind::Alias(_)
| TyKind::Placeholder(_)
| TyKind::BoundVar(_)
| TyKind::InferenceVar(_, _) => return Err(LayoutError::HasPlaceholder),
})
};
Ok(Arc::new(result))
}
pub fn layout_of_ty_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Ty,
_: &CrateId,
) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type");
}
fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> {
@ -274,5 +332,13 @@ fn field_ty(
db.field_types(def)[fd].clone().substitute(Interner, subst)
}
fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar {
Scalar::Initialized { value, valid_range: WrappingRange::full(value.size(dl)) }
}
fn scalar(dl: &TargetDataLayout, value: Primitive) -> Layout {
Layout::scalar(dl, scalar_unit(dl, value))
}
#[cfg(test)]
mod tests;

View file

@ -1,18 +1,25 @@
//! Compute the binary representation of structs, unions and enums
use std::ops::Bound;
use std::{cmp, ops::Bound};
use base_db::CrateId;
use hir_def::{
adt::VariantData,
layout::{Integer, IntegerExt, Layout, LayoutCalculator, LayoutError, RustcEnumVariantIdx},
AdtId, EnumVariantId, HasModule, LocalEnumVariantId, VariantId,
data::adt::VariantData,
layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout},
AdtId, EnumVariantId, LocalEnumVariantId, VariantId,
};
use la_arena::RawIdx;
use smallvec::SmallVec;
use triomphe::Arc;
use crate::{db::HirDatabase, lang_items::is_unsafe_cell, layout::field_ty, Substitution};
use crate::{
db::HirDatabase,
lang_items::is_unsafe_cell,
layout::{field_ty, Layout, LayoutError, RustcEnumVariantIdx},
Substitution,
};
use super::{layout_of_ty, LayoutCx};
use super::LayoutCx;
pub(crate) fn struct_variant_idx() -> RustcEnumVariantIdx {
RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from(0)))
@ -22,29 +29,29 @@ pub fn layout_of_adt_query(
db: &dyn HirDatabase,
def: AdtId,
subst: Substitution,
) -> Result<Layout, LayoutError> {
let krate = def.module(db.upcast()).krate();
krate: CrateId,
) -> Result<Arc<Layout>, LayoutError> {
let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
let cx = LayoutCx { krate, target: &target };
let dl = cx.current_data_layout();
let handle_variant = |def: VariantId, var: &VariantData| {
var.fields()
.iter()
.map(|(fd, _)| layout_of_ty(db, &field_ty(db, def, fd, &subst), cx.krate))
.map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), cx.krate))
.collect::<Result<Vec<_>, _>>()
};
let (variants, is_enum, is_union, repr) = match def {
let (variants, repr) = match def {
AdtId::StructId(s) => {
let data = db.struct_data(s);
let mut r = SmallVec::<[_; 1]>::new();
r.push(handle_variant(s.into(), &data.variant_data)?);
(r, false, false, data.repr.unwrap_or_default())
(r, data.repr.unwrap_or_default())
}
AdtId::UnionId(id) => {
let data = db.union_data(id);
let mut r = SmallVec::new();
r.push(handle_variant(id.into(), &data.variant_data)?);
(r, false, true, data.repr.unwrap_or_default())
(r, data.repr.unwrap_or_default())
}
AdtId::EnumId(e) => {
let data = db.enum_data(e);
@ -58,22 +65,24 @@ pub fn layout_of_adt_query(
)
})
.collect::<Result<SmallVec<_>, _>>()?;
(r, true, false, data.repr.unwrap_or_default())
(r, data.repr.unwrap_or_default())
}
};
let variants =
variants.iter().map(|x| x.iter().collect::<Vec<_>>()).collect::<SmallVec<[_; 1]>>();
let variants = variants
.iter()
.map(|x| x.iter().map(|x| &**x).collect::<Vec<_>>())
.collect::<SmallVec<[_; 1]>>();
let variants = variants.iter().map(|x| x.iter().collect()).collect();
if is_union {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)
let result = if matches!(def, AdtId::UnionId(..)) {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
} else {
cx.layout_of_struct_or_enum(
&repr,
&variants,
is_enum,
is_unsafe_cell(def, db),
matches!(def, AdtId::EnumId(..)),
is_unsafe_cell(db, def),
layout_scalar_valid_range(db, def),
|min, max| Integer::repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)),
|min, max| repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)),
variants.iter_enumerated().filter_map(|(id, _)| {
let AdtId::EnumId(e) = def else { return None };
let d =
@ -90,15 +99,16 @@ pub fn layout_of_adt_query(
// .iter_enumerated()
// .any(|(i, v)| v.discr != ty::VariantDiscr::Relative(i.as_u32()))
repr.inhibit_enum_layout_opt(),
!is_enum
!matches!(def, AdtId::EnumId(..))
&& variants
.iter()
.next()
.and_then(|x| x.last().map(|x| x.is_unsized()))
.unwrap_or(true),
)
.ok_or(LayoutError::SizeOverflow)
}
.ok_or(LayoutError::SizeOverflow)?
};
Ok(Arc::new(result))
}
fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>, Bound<u128>) {
@ -122,6 +132,54 @@ pub fn layout_of_adt_recover(
_: &[String],
_: &AdtId,
_: &Substitution,
) -> Result<Layout, LayoutError> {
_: &CrateId,
) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type");
}
/// Finds the appropriate Integer type and signedness for the given
/// signed discriminant range and `#[repr]` attribute.
/// N.B.: `u128` values above `i128::MAX` will be treated as signed, but
/// that shouldn't affect anything, other than maybe debuginfo.
fn repr_discr(
dl: &TargetDataLayout,
repr: &ReprOptions,
min: i128,
max: i128,
) -> Result<(Integer, bool), LayoutError> {
// Theoretically, negative values could be larger in unsigned representation
// than the unsigned representation of the signed minimum. However, if there
// are any negative values, the only valid unsigned representation is u128
// which can fit all i128 values, so the result remains unaffected.
let unsigned_fit = Integer::fit_unsigned(cmp::max(min as u128, max as u128));
let signed_fit = cmp::max(Integer::fit_signed(min), Integer::fit_signed(max));
if let Some(ity) = repr.int {
let discr = Integer::from_attr(dl, ity);
let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
if discr < fit {
return Err(LayoutError::UserError(
"Integer::repr_discr: `#[repr]` hint too small for \
discriminant range of enum "
.to_string(),
));
}
return Ok((discr, ity.is_signed()));
}
let at_least = if repr.c() {
// This is usually I32, however it can be different on some platforms,
// notably hexagon and arm-none/thumb-none
dl.c_enum_min_size
} else {
// repr(Rust) enums try to be as small as possible
Integer::I8
};
// If there are no negative values, we can use the unsigned fit.
Ok(if min >= 0 {
(cmp::max(unsigned_fit, at_least), false)
} else {
(cmp::max(signed_fit, at_least), true)
})
}

View file

@ -1,9 +1,8 @@
//! Target dependent parameters needed for layouts
use std::sync::Arc;
use base_db::CrateId;
use hir_def::layout::TargetDataLayout;
use triomphe::Arc;
use crate::db::HirDatabase;

View file

@ -2,49 +2,55 @@ use std::collections::HashMap;
use base_db::fixture::WithFixture;
use chalk_ir::{AdtId, TyKind};
use hir_def::{
db::DefDatabase,
use hir_def::db::DefDatabase;
use triomphe::Arc;
use crate::{
db::HirDatabase,
layout::{Layout, LayoutError},
test_db::TestDB,
Interner, Substitution,
};
use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};
use super::layout_of_ty;
mod closure;
fn current_machine_data_layout() -> String {
project_model::target_data_layout::get(None, None, &HashMap::default()).unwrap()
}
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}",
);
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let adt_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::AdtId(x) => {
let name = match x {
hir_def::AdtId::StructId(x) => db.struct_data(x).name.to_smol_str(),
hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(),
hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(),
};
(name == "Goal").then_some(x)
}
_ => None,
let (db, file_ids) = TestDB::with_many_files(&ra_fixture);
let (adt_id, module_id) = file_ids
.into_iter()
.find_map(|file_id| {
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let adt_id = scope.declarations().find_map(|x| match x {
hir_def::ModuleDefId::AdtId(x) => {
let name = match x {
hir_def::AdtId::StructId(x) => db.struct_data(x).name.to_smol_str(),
hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(),
hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(),
};
(name == "Goal").then_some(x)
}
_ => None,
})?;
Some((adt_id, module_id))
})
.unwrap();
let goal_ty = TyKind::Adt(AdtId(adt_id), Substitution::empty(Interner)).intern(Interner);
layout_of_ty(&db, &goal_ty, module_id.krate())
db.layout_of_ty(goal_ty, module_id.krate())
}
/// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait`
fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}",
@ -68,7 +74,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
let b = hir_body.bindings.iter().find(|x| x.1.name.to_smol_str() == "goal").unwrap().0;
let infer = db.infer(adt_id.into());
let goal_ty = infer.type_of_binding[b].clone();
layout_of_ty(&db, &goal_ty, module_id.krate())
db.layout_of_ty(goal_ty, module_id.krate())
}
#[track_caller]
@ -81,8 +87,8 @@ fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64)
#[track_caller]
fn check_size_and_align_expr(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
let l = eval_expr(ra_fixture, minicore).unwrap();
assert_eq!(l.size.bytes(), size);
assert_eq!(l.align.abi.bytes(), align);
assert_eq!(l.size.bytes(), size, "size mismatch");
assert_eq!(l.align.abi.bytes(), align, "align mismatch");
}
#[track_caller]
@ -118,13 +124,31 @@ macro_rules! size_and_align {
};
}
#[macro_export]
macro_rules! size_and_align_expr {
(minicore: $($x:tt),*; stmts: [$($s:tt)*] $($t:tt)*) => {
{
#[allow(dead_code)]
#[allow(unused_must_use)]
#[allow(path_statements)]
{
$($s)*
let val = { $($t)* };
$crate::layout::tests::check_size_and_align_expr(
&format!("{{ {} let val = {{ {} }}; val }}", stringify!($($s)*), stringify!($($t)*)),
&format!("//- minicore: {}\n", stringify!($($x),*)),
::std::mem::size_of_val(&val) as u64,
::std::mem::align_of_val(&val) as u64,
);
}
}
};
($($t:tt)*) => {
{
#[allow(dead_code)]
{
let val = { $($t)* };
check_size_and_align_expr(
$crate::layout::tests::check_size_and_align_expr(
stringify!($($t)*),
"",
::std::mem::size_of_val(&val) as u64,
@ -196,6 +220,44 @@ fn generic() {
}
}
#[test]
fn associated_types() {
size_and_align! {
trait Tr {
type Ty;
}
impl Tr for i32 {
type Ty = i64;
}
struct Foo<A: Tr>(<A as Tr>::Ty);
struct Bar<A: Tr>(A::Ty);
struct Goal(Foo<i32>, Bar<i32>, <i32 as Tr>::Ty);
}
check_size_and_align(
r#"
//- /b/mod.rs crate:b
pub trait Tr {
type Ty;
}
pub struct Foo<A: Tr>(<A as Tr>::Ty);
//- /a/mod.rs crate:a deps:b
use b::{Tr, Foo};
struct S;
impl Tr for S {
type Ty = i64;
}
struct Goal(Foo<S>);
"#,
"",
8,
8,
);
}
#[test]
fn return_position_impl_trait() {
size_and_align_expr! {
@ -212,6 +274,45 @@ fn return_position_impl_trait() {
fn foo() -> (impl T, impl T, impl T) { (2i64, 5i32, 7i32) }
foo()
}
size_and_align_expr! {
minicore: iterators;
stmts: []
trait Tr {}
impl Tr for i32 {}
fn foo() -> impl Iterator<Item = impl Tr> {
[1, 2, 3].into_iter()
}
let mut iter = foo();
let item = iter.next();
(iter, item)
}
size_and_align_expr! {
minicore: future;
stmts: []
use core::{future::Future, task::{Poll, Context}, pin::pin};
use std::{task::Wake, sync::Arc};
trait Tr {}
impl Tr for i32 {}
async fn f() -> impl Tr {
2
}
fn unwrap_fut<T>(inp: impl Future<Output = T>) -> Poll<T> {
// In a normal test we could use `loop {}` or `panic!()` here,
// but rustc actually runs this code.
let pinned = pin!(inp);
struct EmptyWaker;
impl Wake for EmptyWaker {
fn wake(self: Arc<Self>) {
}
}
let waker = Arc::new(EmptyWaker).into();
let mut context = Context::from_waker(&waker);
let x = pinned.poll(&mut context);
x
}
let x = unwrap_fut(f());
x
}
size_and_align_expr! {
struct Foo<T>(T, T, (T, T));
trait T {}
@ -276,6 +377,14 @@ fn niche_optimization() {
}
}
#[test]
fn const_eval() {
size_and_align! {
const X: usize = 5;
struct Goal([i32; X]);
}
}
#[test]
fn enums_with_discriminants() {
size_and_align! {

View file

@ -0,0 +1,257 @@
use crate::size_and_align_expr;
#[test]
fn zero_capture_simple() {
size_and_align_expr! {
|x: i32| x + 2
}
}
#[test]
fn move_simple() {
size_and_align_expr! {
minicore: copy;
stmts: []
let y: i32 = 5;
move |x: i32| {
x + y
}
}
}
#[test]
fn ref_simple() {
size_and_align_expr! {
minicore: copy;
stmts: [
let y: i32 = 5;
]
|x: i32| {
x + y
}
}
size_and_align_expr! {
minicore: copy;
stmts: [
let mut y: i32 = 5;
]
|x: i32| {
y = y + x;
y
}
}
size_and_align_expr! {
minicore: copy, deref_mut;
stmts: [
let y: &mut i32 = &mut 5;
]
|x: i32| {
*y += x;
}
}
size_and_align_expr! {
minicore: copy;
stmts: [
struct X(i32, i64);
let x: X = X(2, 6);
]
|| {
x
}
}
size_and_align_expr! {
minicore: copy, deref_mut;
stmts: [
struct X(i32, i64);
let x: &mut X = &mut X(2, 6);
]
|| {
(*x).0 as i64 + x.1
}
}
}
#[test]
fn ref_then_mut_then_move() {
size_and_align_expr! {
minicore: copy;
stmts: [
struct X(i32, i64);
let mut x: X = X(2, 6);
]
|| {
&x;
&mut x;
x;
}
}
}
#[test]
fn nested_closures() {
size_and_align_expr! {
|| {
|| {
|| {
let x = 2;
move || {
move || {
x
}
}
}
}
}
}
}
#[test]
fn capture_specific_fields2() {
size_and_align_expr! {
minicore: copy;
stmts: [
let x = &mut 2;
]
|| {
*x = 5;
&x;
}
}
}
#[test]
fn capture_specific_fields() {
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
y.0 + x + (y.2 .0 as i64)
}
}
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
let _ = &y;
y.0 + x + (y.2 .0 as i64)
}
}
size_and_align_expr! {
minicore: copy;
stmts: [
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
]
let y = &y;
move |x: i64| {
y.0 + x + (y.2 .0 as i64)
}
}
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
let X(a, _, (b, _)) = y;
a + x + (b as i64)
}
}
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
let y = &&X(2, 5, (7, 3));
move |x: i64| {
let X(a, _, (b, _)) = y;
*a + x + (*b as i64)
}
}
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
match y {
X(a, _, (b, _)) => a + x + (b as i64),
}
}
}
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
let X(a @ 2, _, (b, _)) = y else { return 5 };
a + x + (b as i64)
}
}
}
#[test]
fn match_pattern() {
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
move |x: i64| {
match y {
_ => x,
}
}
}
size_and_align_expr! {
minicore: copy;
stmts: [
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
]
|x: i64| {
match y {
X(_a, _, _c) => x,
}
}
}
size_and_align_expr! {
minicore: copy;
stmts: [
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
]
|x: i64| {
match y {
_y => x,
}
}
}
size_and_align_expr! {
minicore: copy;
stmts: [
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
]
|x: i64| {
match y {
ref _y => x,
}
}
}
}
#[test]
fn ellipsis_pattern() {
size_and_align_expr! {
struct X(i8, u16, i32, u64, i128, u8);
let y: X = X(1, 2, 3, 4, 5, 6);
move |_: i64| {
let X(_a, .., _b, _c) = y;
}
}
size_and_align_expr! {
struct X { a: i32, b: u8, c: i128}
let y: X = X { a: 1, b: 2, c: 3 };
move |_: i64| {
let X { a, b, .. } = y;
_ = (a, b);
}
}
size_and_align_expr! {
let y: (&&&(i8, u16, i32, u64, i128, u8), u16, i32, u64, i128, u8) = (&&&(1, 2, 3, 4, 5, 6), 2, 3, 4, 5, 6);
move |_: i64| {
let ((_a, .., _b, _c), .., _e, _f) = y;
}
}
}

View file

@ -1,6 +1,5 @@
//! The type system. We currently use this to infer types for completion, hover
//! information and various assists.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#[allow(unused)]
@ -8,12 +7,9 @@ macro_rules! eprintln {
($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
}
mod autoderef;
mod builder;
mod chalk_db;
mod chalk_ext;
pub mod consteval;
pub mod mir;
mod infer;
mod inhabitedness;
mod interner;
@ -21,21 +17,28 @@ mod lower;
mod mapping;
mod tls;
mod utils;
pub mod autoderef;
pub mod consteval;
pub mod db;
pub mod diagnostics;
pub mod display;
pub mod lang_items;
pub mod layout;
pub mod method_resolution;
pub mod mir;
pub mod primitive;
pub mod traits;
pub mod layout;
pub mod lang_items;
#[cfg(test)]
mod tests;
#[cfg(test)]
mod test_db;
use std::{collections::HashMap, hash::Hash, sync::Arc};
use std::{
collections::{hash_map::Entry, HashMap},
hash::Hash,
};
use chalk_ir::{
fold::{Shift, TypeFoldable},
@ -44,12 +47,13 @@ use chalk_ir::{
NoSolution, TyData,
};
use either::Either;
use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
use hir_def::{hir::ExprId, type_ref::Rawness, GeneralConstId, TypeOrConstParamId};
use hir_expand::name;
use la_arena::{Arena, Idx};
use mir::MirEvalError;
use mir::{MirEvalError, VTableMap};
use rustc_hash::FxHashSet;
use traits::FnTrait;
use triomphe::Arc;
use utils::Generics;
use crate::{
@ -60,6 +64,7 @@ pub use autoderef::autoderef;
pub use builder::{ParamKind, TyBuilder};
pub use chalk_ext::*;
pub use infer::{
closure::{CaptureKind, CapturedItem},
could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic,
InferenceResult, OverloadedDeref, PointerCast,
};
@ -148,14 +153,26 @@ pub type Guidance = chalk_solve::Guidance<Interner>;
pub type WhereClause = chalk_ir::WhereClause<Interner>;
/// A constant can have reference to other things. Memory map job is holding
/// the neccessary bits of memory of the const eval session to keep the constant
/// the necessary bits of memory of the const eval session to keep the constant
/// meaningful.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct MemoryMap(pub HashMap<usize, Vec<u8>>);
pub struct MemoryMap {
pub memory: HashMap<usize, Vec<u8>>,
pub vtable: VTableMap,
}
impl MemoryMap {
fn insert(&mut self, addr: usize, x: Vec<u8>) {
self.0.insert(addr, x);
match self.memory.entry(addr) {
Entry::Occupied(mut e) => {
if e.get().len() < x.len() {
e.insert(x);
}
}
Entry::Vacant(e) => {
e.insert(x);
}
}
}
/// This functions convert each address by a function `f` which gets the byte intervals and assign an address
@ -165,7 +182,15 @@ impl MemoryMap {
&self,
mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>,
) -> Result<HashMap<usize, usize>, MirEvalError> {
self.0.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
self.memory.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
}
fn get<'a>(&'a self, addr: usize, size: usize) -> Option<&'a [u8]> {
if size == 0 {
Some(&[])
} else {
self.memory.get(&addr)?.get(0..size)
}
}
}
@ -173,6 +198,9 @@ impl MemoryMap {
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstScalar {
Bytes(Vec<u8>, MemoryMap),
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
UnevaluatedConst(GeneralConstId, Substitution),
/// Case of an unknown value that rustc might know but we don't
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
@ -283,16 +311,19 @@ impl CallableSig {
pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
CallableSig {
// FIXME: what to do about lifetime params? -> return PolyFnSig
params_and_return: fn_ptr
.substitution
.clone()
.shifted_out_to(Interner, DebruijnIndex::ONE)
.expect("unexpected lifetime vars in fn ptr")
.0
.as_slice(Interner)
.iter()
.map(|arg| arg.assert_ty_ref(Interner).clone())
.collect(),
// FIXME: use `Arc::from_iter` when it becomes available
params_and_return: Arc::from(
fn_ptr
.substitution
.clone()
.shifted_out_to(Interner, DebruijnIndex::ONE)
.expect("unexpected lifetime vars in fn ptr")
.0
.as_slice(Interner)
.iter()
.map(|arg| arg.assert_ty_ref(Interner).clone())
.collect::<Vec<_>>(),
),
is_varargs: fn_ptr.sig.variadic,
safety: fn_ptr.sig.safety,
}
@ -576,15 +607,19 @@ where
}
pub fn callable_sig_from_fnonce(
self_ty: &Ty,
mut self_ty: &Ty,
env: Arc<TraitEnvironment>,
db: &dyn HirDatabase,
) -> Option<CallableSig> {
if let Some((ty, _, _)) = self_ty.as_reference() {
// This will happen when it implements fn or fn mut, since we add a autoborrow adjustment
self_ty = ty;
}
let krate = env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
let mut table = InferenceTable::new(db, env.clone());
let mut table = InferenceTable::new(db, env);
let b = TyBuilder::trait_ref(db, fn_once_trait);
if b.remaining() != 2 {
return None;

View file

@ -8,7 +8,6 @@
use std::{
cell::{Cell, RefCell, RefMut},
iter,
sync::Arc,
};
use base_db::CrateId;
@ -18,19 +17,20 @@ use chalk_ir::{
use either::Either;
use hir_def::{
adt::StructKind,
body::{Expander, LowerCtx},
builtin_type::BuiltinType,
data::adt::StructKind,
expander::Expander,
generics::{
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
},
lang_item::{lang_attr, LangItem},
path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
nameres::MacroSubNs,
path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs},
type_ref::{ConstRefOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef},
AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, StructId,
TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
GenericDefId, HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId,
StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
};
use hir_expand::{name::Name, ExpandResult};
use intern::Interned;
@ -39,6 +39,7 @@ use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use stdx::{impl_from, never};
use syntax::ast;
use triomphe::Arc;
use crate::{
all_super_traits,
@ -103,7 +104,7 @@ impl ImplTraitLoweringState {
#[derive(Debug)]
pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase,
pub resolver: &'a Resolver,
resolver: &'a Resolver,
in_binders: DebruijnIndex,
/// Note: Conceptually, it's thinkable that we could be in a location where
/// some type params should be represented as placeholders, and others
@ -378,10 +379,19 @@ impl<'a> TyLoweringContext<'a> {
};
let ty = {
let macro_call = macro_call.to_node(self.db.upcast());
match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
let resolver = |path| {
self.resolver.resolve_path_as_macro(
self.db.upcast(),
&path,
Some(MacroSubNs::Bang),
)
};
match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call, resolver)
{
Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
let ctx = LowerCtx::new(self.db.upcast(), expander.current_file_id());
let type_ref = TypeRef::from_ast(&ctx, expanded);
let ctx = expander.ctx(self.db.upcast());
// FIXME: Report syntax errors in expansion here
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
drop(expander);
let ty = self.lower_ty(&type_ref);
@ -425,11 +435,10 @@ impl<'a> TyLoweringContext<'a> {
if path.segments().len() > 1 {
return None;
}
let resolution =
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
Some((it, None)) => it,
_ => return None,
};
let resolution = match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
Some((it, None)) => it,
_ => return None,
};
match resolution {
TypeNs::GenericParam(param_id) => Some(param_id.into()),
_ => None,
@ -608,7 +617,7 @@ impl<'a> TyLoweringContext<'a> {
}
let (resolution, remaining_index) =
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
Some(it) => it,
None => return (TyKind::Error.intern(Interner), None),
};
@ -716,7 +725,7 @@ impl<'a> TyLoweringContext<'a> {
resolved: ValueTyDefId,
infer_args: bool,
) -> Substitution {
let last = path.segments().last().expect("path should have at least one segment");
let last = path.segments().last();
let (segment, generic_def) = match resolved {
ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
ValueTyDefId::StructId(it) => (last, Some(it.into())),
@ -732,13 +741,20 @@ impl<'a> TyLoweringContext<'a> {
let len = path.segments().len();
let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx));
let segment = match penultimate {
Some(segment) if segment.args_and_bindings.is_some() => segment,
Some(segment) if segment.args_and_bindings.is_some() => Some(segment),
_ => last,
};
(segment, Some(var.parent.into()))
}
};
self.substs_from_path_segment(segment, generic_def, infer_args, None)
if let Some(segment) = segment {
self.substs_from_path_segment(segment, generic_def, infer_args, None)
} else if let Some(generic_def) = generic_def {
// lang item
self.substs_from_args_and_bindings(None, Some(generic_def), infer_args, None)
} else {
Substitution::empty(Interner)
}
}
fn substs_from_path_segment(
@ -747,6 +763,21 @@ impl<'a> TyLoweringContext<'a> {
def: Option<GenericDefId>,
infer_args: bool,
explicit_self_ty: Option<Ty>,
) -> Substitution {
self.substs_from_args_and_bindings(
segment.args_and_bindings,
def,
infer_args,
explicit_self_ty,
)
}
fn substs_from_args_and_bindings(
&self,
args_and_bindings: Option<&GenericArgs>,
def: Option<GenericDefId>,
infer_args: bool,
explicit_self_ty: Option<Ty>,
) -> Substitution {
// Remember that the item's own generic args come before its parent's.
let mut substs = Vec::new();
@ -780,7 +811,7 @@ impl<'a> TyLoweringContext<'a> {
};
let mut had_explicit_args = false;
if let Some(generic_args) = &segment.args_and_bindings {
if let Some(generic_args) = &args_and_bindings {
if !generic_args.has_self_type {
fill_self_params();
}
@ -879,12 +910,11 @@ impl<'a> TyLoweringContext<'a> {
path: &Path,
explicit_self_ty: Option<Ty>,
) -> Option<TraitRef> {
let resolved =
match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
// FIXME(trait_alias): We need to handle trait alias here.
TypeNs::TraitId(tr) => tr,
_ => return None,
};
let resolved = match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path)? {
// FIXME(trait_alias): We need to handle trait alias here.
TypeNs::TraitId(tr) => tr,
_ => return None,
};
let segment = path.segments().last().expect("path should have at least one segment");
Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
}
@ -968,7 +998,7 @@ impl<'a> TyLoweringContext<'a> {
// ignore `T: Drop` or `T: Destruct` bounds.
// - `T: ~const Drop` has a special meaning in Rust 1.61 that we don't implement.
// (So ideally, we'd only ignore `~const Drop` here)
// - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
// - `Destruct` impls are built-in in 1.62 (current nightly as of 08-04-2022), so until
// the builtin impls are supported by Chalk, we ignore them here.
if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
if matches!(lang, LangItem::Drop | LangItem::Destruct) {
@ -1062,23 +1092,23 @@ impl<'a> TyLoweringContext<'a> {
associated_ty_id: to_assoc_type_id(associated_ty),
substitution,
};
let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity(
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
);
if let Some(type_ref) = &binding.type_ref {
let ty = self.lower_ty(type_ref);
let alias_eq =
AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
}
for bound in binding.bounds.iter() {
preds.extend(self.lower_type_bound(
predicates.extend(self.lower_type_bound(
bound,
TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
false,
));
}
preds
predicates
})
}
@ -1145,7 +1175,7 @@ impl<'a> TyLoweringContext<'a> {
return None;
}
// As multiple occurrences of the same auto traits *are* permitted, we dedulicate the
// As multiple occurrences of the same auto traits *are* permitted, we deduplicate the
// bounds. We shouldn't have repeated elements besides auto traits at this point.
bounds.dedup();
@ -1381,9 +1411,7 @@ pub(crate) fn generic_predicates_for_param_query(
Some(it) => it,
None => return true,
};
let tr = match resolver
.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
{
let tr = match resolver.resolve_path_in_type_ns_fully(db.upcast(), path) {
Some(TypeNs::TraitId(tr)) => tr,
_ => return false,
};
@ -1420,7 +1448,19 @@ pub(crate) fn generic_predicates_for_param_recover(
_param_id: &TypeOrConstParamId,
_assoc_name: &Option<Name>,
) -> Arc<[Binders<QuantifiedWhereClause>]> {
Arc::new([])
// FIXME: use `Arc::from_iter` when it becomes available
Arc::from(vec![])
}
pub(crate) fn trait_environment_for_body_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Arc<TraitEnvironment> {
let Some(def) = def.as_generic_def_id() else {
let krate = def.module(db.upcast()).krate();
return Arc::new(TraitEnvironment::empty(krate));
};
db.trait_environment(def)
}
pub(crate) fn trait_environment_query(
@ -1478,7 +1518,7 @@ pub(crate) fn trait_environment_query(
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
Arc::new(TraitEnvironment { krate, traits_from_clauses: traits_in_scope, env })
Arc::new(TraitEnvironment { krate, block: None, traits_from_clauses: traits_in_scope, env })
}
/// Resolve the where clause(s) of an item with generics.
@ -1547,30 +1587,33 @@ pub(crate) fn generic_defaults_query(
let generic_params = generics(db.upcast(), def);
let parent_start_idx = generic_params.len_self();
let defaults = generic_params
.iter()
.enumerate()
.map(|(idx, (id, p))| {
let p = match p {
TypeOrConstParamData::TypeParamData(p) => p,
TypeOrConstParamData::ConstParamData(_) => {
// FIXME: implement const generic defaults
let val = unknown_const_as_generic(
db.const_param_ty(ConstParamId::from_unchecked(id)),
);
return make_binders(db, &generic_params, val);
}
};
let mut ty =
p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
let defaults = Arc::from(
generic_params
.iter()
.enumerate()
.map(|(idx, (id, p))| {
let p = match p {
TypeOrConstParamData::TypeParamData(p) => p,
TypeOrConstParamData::ConstParamData(_) => {
// FIXME: implement const generic defaults
let val = unknown_const_as_generic(
db.const_param_ty(ConstParamId::from_unchecked(id)),
);
return make_binders(db, &generic_params, val);
}
};
let mut ty =
p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
// Each default can only refer to previous parameters.
// Type variable default referring to parameter coming
// after it is forbidden (FIXME: report diagnostic)
ty = fallback_bound_vars(ty, idx, parent_start_idx);
crate::make_binders(db, &generic_params, ty.cast(Interner))
})
.collect();
// Each default can only refer to previous parameters.
// Type variable default referring to parameter coming
// after it is forbidden (FIXME: report diagnostic)
ty = fallback_bound_vars(ty, idx, parent_start_idx);
crate::make_binders(db, &generic_params, ty.cast(Interner))
})
// FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(),
);
defaults
}
@ -1583,18 +1626,21 @@ pub(crate) fn generic_defaults_recover(
let generic_params = generics(db.upcast(), *def);
// FIXME: this code is not covered in tests.
// we still need one default per parameter
let defaults = generic_params
.iter_id()
.map(|id| {
let val = match id {
Either::Left(_) => {
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
}
Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
};
crate::make_binders(db, &generic_params, val)
})
.collect();
let defaults = Arc::from(
generic_params
.iter_id()
.map(|id| {
let val = match id {
Either::Left(_) => {
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
}
Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
};
crate::make_binders(db, &generic_params, val)
})
// FIXME: use `Arc::from_iter` when it becomes available
.collect::<Vec<_>>(),
);
defaults
}
@ -1605,7 +1651,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
let ctx_params = TyLoweringContext::new(db, &resolver)
.with_impl_trait_mode(ImplTraitLoweringMode::Variable)
.with_type_param_mode(ParamLoweringMode::Variable);
let params = data.params.iter().map(|(_, tr)| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
let params = data.params.iter().map(|tr| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
let ctx_ret = TyLoweringContext::new(db, &resolver)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
@ -1948,7 +1994,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
// as types. Maybe here is not the best place to do it, but
// it works.
if let TypeRef::Path(p) = t {
let p = p.mod_path();
let p = p.mod_path()?;
if p.kind == PathKind::Plain {
if let [n] = p.segments() {
let c = ConstRefOrPath::Path(n.clone());
@ -1977,8 +2023,16 @@ pub(crate) fn const_or_path_to_chalk(
ConstRefOrPath::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()),
ConstRefOrPath::Path(n) => {
let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
path_to_const(db, resolver, &path, mode, args, debruijn)
.unwrap_or_else(|| unknown_const(expected_ty))
path_to_const(
db,
resolver,
&Path::from_known_path_with_no_generic(path),
mode,
args,
debruijn,
expected_ty.clone(),
)
.unwrap_or_else(|| unknown_const(expected_ty))
}
}
}

View file

@ -2,25 +2,28 @@
//! For details about how this works in rustc, see the method lookup page in the
//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
//! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs.
use std::{ops::ControlFlow, sync::Arc};
use std::ops::ControlFlow;
use base_db::{CrateId, Edition};
use chalk_ir::{cast::Cast, Mutability, TyKind, UniverseIndex};
use chalk_ir::{cast::Cast, Mutability, TyKind, UniverseIndex, WhereClause};
use hir_def::{
data::ImplData, item_scope::ItemScope, lang_item::LangItem, nameres::DefMap, AssocItemId,
BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId,
ModuleId, TraitId,
data::{adt::StructFlags, ImplData},
item_scope::ItemScope,
nameres::DefMap,
AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup,
ModuleDefId, ModuleId, TraitId,
};
use hir_expand::name::Name;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use stdx::never;
use triomphe::Arc;
use crate::{
autoderef::{self, AutoderefKind},
db::HirDatabase,
from_chalk_trait_id, from_foreign_def_id,
infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
infer::{unify::InferenceTable, Adjust, Adjustment, OverloadedDeref, PointerCast},
primitive::{FloatTy, IntTy, UintTy},
static_lifetime, to_chalk_trait_id,
utils::all_super_traits,
@ -147,31 +150,30 @@ impl TraitImpls {
Arc::new(impls)
}
pub(crate) fn trait_impls_in_block_query(
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
pub(crate) fn trait_impls_in_block_query(db: &dyn HirDatabase, block: BlockId) -> Arc<Self> {
let _p = profile::span("trait_impls_in_block_query");
let mut impls = Self { map: FxHashMap::default() };
let block_def_map = db.block_def_map(block)?;
let block_def_map = db.block_def_map(block);
impls.collect_def_map(db, &block_def_map);
impls.shrink_to_fit();
Some(Arc::new(impls))
Arc::new(impls)
}
pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
pub(crate) fn trait_impls_in_deps_query(
db: &dyn HirDatabase,
krate: CrateId,
) -> Arc<[Arc<Self>]> {
let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
let crate_graph = db.crate_graph();
let mut res = Self { map: FxHashMap::default() };
for krate in crate_graph.transitive_deps(krate) {
res.merge(&db.trait_impls_in_crate(krate));
}
res.shrink_to_fit();
Arc::new(res)
// FIXME: use `Arc::from_iter` when it becomes available
Arc::from(
crate_graph
.transitive_deps(krate)
.map(|krate| db.trait_impls_in_crate(krate))
.collect::<Vec<_>>(),
)
}
fn shrink_to_fit(&mut self) {
@ -185,6 +187,15 @@ impl TraitImpls {
fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
for (_module_id, module_data) in def_map.modules() {
for impl_id in module_data.scope.impls() {
// Reservation impls should be ignored during trait resolution, so we never need
// them during type analysis. See rust-lang/rust#64631 for details.
//
// FIXME: Reservation impls should be considered during coherence checks. If we are
// (ever) to implement coherence checks, this filtering should be done by the trait
// solver.
if db.attrs(impl_id.into()).by_key("rustc_reservation_impl").exists() {
continue;
}
let target_trait = match db.impl_trait(impl_id) {
Some(tr) => tr.skip_binders().hir_trait_id(),
None => continue,
@ -210,15 +221,6 @@ impl TraitImpls {
}
}
fn merge(&mut self, other: &Self) {
for (trait_, other_map) in &other.map {
let map = self.map.entry(*trait_).or_default();
for (fp, impls) in other_map {
map.entry(*fp).or_default().extend(impls);
}
}
}
/// Queries all trait impls for the given type.
pub fn for_self_ty_without_blanket_impls(
&self,
@ -271,6 +273,7 @@ pub struct InherentImpls {
impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("inherent_impls_in_crate_query").detail(|| format!("{krate:?}"));
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let crate_def_map = db.crate_def_map(krate);
@ -280,17 +283,15 @@ impl InherentImpls {
Arc::new(impls)
}
pub(crate) fn inherent_impls_in_block_query(
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
pub(crate) fn inherent_impls_in_block_query(db: &dyn HirDatabase, block: BlockId) -> Arc<Self> {
let _p = profile::span("inherent_impls_in_block_query");
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
if let Some(block_def_map) = db.block_def_map(block) {
impls.collect_def_map(db, &block_def_map);
impls.shrink_to_fit();
return Some(Arc::new(impls));
}
None
let block_def_map = db.block_def_map(block);
impls.collect_def_map(db, &block_def_map);
impls.shrink_to_fit();
Arc::new(impls)
}
fn shrink_to_fit(&mut self) {
@ -404,12 +405,14 @@ pub fn def_crates(
match ty.kind(Interner) {
&TyKind::Adt(AdtId(def_id), _) => {
let rustc_has_incoherent_inherent_impls = match def_id {
hir_def::AdtId::StructId(id) => {
db.struct_data(id).rustc_has_incoherent_inherent_impls
}
hir_def::AdtId::UnionId(id) => {
db.union_data(id).rustc_has_incoherent_inherent_impls
}
hir_def::AdtId::StructId(id) => db
.struct_data(id)
.flags
.contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
hir_def::AdtId::UnionId(id) => db
.union_data(id)
.flags
.contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
hir_def::AdtId::EnumId(id) => db.enum_data(id).rustc_has_incoherent_inherent_impls,
};
Some(if rustc_has_incoherent_inherent_impls {
@ -449,55 +452,6 @@ pub fn def_crates(
}
}
pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> {
use hir_expand::name;
use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
Some(match op {
BinaryOp::LogicOp(_) => return None,
BinaryOp::ArithOp(aop) => match aop {
ArithOp::Add => (name![add], LangItem::Add),
ArithOp::Mul => (name![mul], LangItem::Mul),
ArithOp::Sub => (name![sub], LangItem::Sub),
ArithOp::Div => (name![div], LangItem::Div),
ArithOp::Rem => (name![rem], LangItem::Rem),
ArithOp::Shl => (name![shl], LangItem::Shl),
ArithOp::Shr => (name![shr], LangItem::Shr),
ArithOp::BitXor => (name![bitxor], LangItem::BitXor),
ArithOp::BitOr => (name![bitor], LangItem::BitOr),
ArithOp::BitAnd => (name![bitand], LangItem::BitAnd),
},
BinaryOp::Assignment { op: Some(aop) } => match aop {
ArithOp::Add => (name![add_assign], LangItem::AddAssign),
ArithOp::Mul => (name![mul_assign], LangItem::MulAssign),
ArithOp::Sub => (name![sub_assign], LangItem::SubAssign),
ArithOp::Div => (name![div_assign], LangItem::DivAssign),
ArithOp::Rem => (name![rem_assign], LangItem::RemAssign),
ArithOp::Shl => (name![shl_assign], LangItem::ShlAssign),
ArithOp::Shr => (name![shr_assign], LangItem::ShrAssign),
ArithOp::BitXor => (name![bitxor_assign], LangItem::BitXorAssign),
ArithOp::BitOr => (name![bitor_assign], LangItem::BitOrAssign),
ArithOp::BitAnd => (name![bitand_assign], LangItem::BitAndAssign),
},
BinaryOp::CmpOp(cop) => match cop {
CmpOp::Eq { negated: false } => (name![eq], LangItem::PartialEq),
CmpOp::Eq { negated: true } => (name![ne], LangItem::PartialEq),
CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
(name![le], LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
(name![lt], LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
(name![ge], LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
(name![gt], LangItem::PartialOrd)
}
},
BinaryOp::Assignment { op: None } => return None,
})
}
/// Look up the method with the given name.
pub(crate) fn lookup_method(
db: &dyn HirDatabase,
@ -600,9 +554,9 @@ impl ReceiverAdjustments {
}
}
if let Some(m) = self.autoref {
ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
adjust
.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() });
let a = Adjustment::borrow(m, ty);
ty = a.target.clone();
adjust.push(a);
}
if self.unsize_array {
ty = 'x: {
@ -692,6 +646,39 @@ pub fn lookup_impl_const(
.unwrap_or((const_id, subs))
}
/// Checks if the self parameter of `Trait` method is the `dyn Trait` and we should
/// call the method using the vtable.
pub fn is_dyn_method(
db: &dyn HirDatabase,
_env: Arc<TraitEnvironment>,
func: FunctionId,
fn_subst: Substitution,
) -> Option<usize> {
let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
return None;
};
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
let fn_params = fn_subst.len(Interner) - trait_params;
let trait_ref = TraitRef {
trait_id: to_chalk_trait_id(trait_id),
substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)),
};
let self_ty = trait_ref.self_type_parameter(Interner);
if let TyKind::Dyn(d) = self_ty.kind(Interner) {
let is_my_trait_in_bounds =
d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() {
// rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter
// what the generics are, we are sure that the method is come from the vtable.
WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id,
_ => false,
});
if is_my_trait_in_bounds {
return Some(fn_params);
}
}
None
}
/// Looks up the impl method that actually runs for the trait method `func`.
///
/// Returns `func` if it's not a method defined in a trait or the lookup failed.
@ -701,9 +688,8 @@ pub fn lookup_impl_method(
func: FunctionId,
fn_subst: Substitution,
) -> (FunctionId, Substitution) {
let trait_id = match func.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => id,
_ => return (func, fn_subst),
let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
return (func, fn_subst)
};
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
let fn_params = fn_subst.len(Interner) - trait_params;
@ -713,7 +699,7 @@ pub fn lookup_impl_method(
};
let name = &db.function_data(func).name;
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
let Some((impl_fn, impl_subst)) = lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
.and_then(|assoc| {
if let (AssocItemId::FunctionId(id), subst) = assoc {
Some((id, subst))
@ -721,7 +707,16 @@ pub fn lookup_impl_method(
None
}
})
.unwrap_or((func, fn_subst))
else {
return (func, fn_subst);
};
(
impl_fn,
Substitution::from_iter(
Interner,
fn_subst.iter(Interner).take(fn_params).chain(impl_subst.iter(Interner)),
),
)
}
fn lookup_impl_assoc_item_for_trait_ref(
@ -730,10 +725,20 @@ fn lookup_impl_assoc_item_for_trait_ref(
env: Arc<TraitEnvironment>,
name: &Name,
) -> Option<(AssocItemId, Substitution)> {
let hir_trait_id = trait_ref.hir_trait_id();
let self_ty = trait_ref.self_type_parameter(Interner);
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
let impls = db.trait_impls_in_deps(env.krate);
let impls = impls.for_trait_and_self_ty(trait_ref.hir_trait_id(), self_ty_fp);
let self_impls = match self_ty.kind(Interner) {
TyKind::Adt(id, _) => {
id.0.module(db.upcast()).containing_block().map(|x| db.trait_impls_in_block(x))
}
_ => None,
};
let impls = impls
.iter()
.chain(self_impls.as_ref())
.flat_map(|impls| impls.for_trait_and_self_ty(hir_trait_id, self_ty_fp));
let table = InferenceTable::new(db, env);
@ -759,9 +764,8 @@ fn find_matching_impl(
actual_trait_ref: TraitRef,
) -> Option<(Arc<ImplData>, Substitution)> {
let db = table.db;
loop {
let impl_ = impls.next()?;
let r = table.run_in_snapshot(|table| {
impls.find_map(|impl_| {
table.run_in_snapshot(|table| {
let impl_data = db.impl_data(impl_);
let impl_substs =
TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build();
@ -778,12 +782,11 @@ fn find_matching_impl(
.into_iter()
.map(|b| b.cast(Interner));
let goal = crate::Goal::all(Interner, wcs);
table.try_obligation(goal).map(|_| (impl_data, table.resolve_completely(impl_substs)))
});
if r.is_some() {
break r;
}
}
table.try_obligation(goal.clone())?;
table.register_obligation(goal);
Some((impl_data, table.resolve_completely(impl_substs)))
})
})
}
fn is_inherent_impl_coherent(
@ -824,12 +827,14 @@ fn is_inherent_impl_coherent(
| TyKind::Scalar(_) => true,
&TyKind::Adt(AdtId(adt), _) => match adt {
hir_def::AdtId::StructId(it) => {
db.struct_data(it).rustc_has_incoherent_inherent_impls
}
hir_def::AdtId::UnionId(it) => {
db.union_data(it).rustc_has_incoherent_inherent_impls
}
hir_def::AdtId::StructId(id) => db
.struct_data(id)
.flags
.contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
hir_def::AdtId::UnionId(id) => db
.union_data(id)
.flags
.contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls,
},
TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
@ -963,7 +968,14 @@ fn iterate_method_candidates_with_autoref(
)
};
iterate_method_candidates_by_receiver(receiver_ty, first_adjustment.clone())?;
let mut maybe_reborrowed = first_adjustment.clone();
if let Some((_, _, m)) = receiver_ty.value.as_reference() {
// Prefer reborrow of references to move
maybe_reborrowed.autoref = Some(m);
maybe_reborrowed.autoderefs += 1;
}
iterate_method_candidates_by_receiver(receiver_ty, maybe_reborrowed)?;
let refed = Canonical {
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
@ -1108,7 +1120,7 @@ fn iterate_trait_method_candidates(
};
if !known_implemented {
let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty);
if db.trait_solve(env.krate, goal.cast(Interner)).is_none() {
if db.trait_solve(env.krate, env.block, goal.cast(Interner)).is_none() {
continue 'traits;
}
}
@ -1180,23 +1192,19 @@ fn iterate_inherent_methods(
};
while let Some(block_id) = block {
if let Some(impls) = db.inherent_impls_in_block(block_id) {
impls_for_self_ty(
&impls,
self_ty,
table,
name,
receiver_ty,
receiver_adjustments.clone(),
module,
callback,
)?;
}
let impls = db.inherent_impls_in_block(block_id);
impls_for_self_ty(
&impls,
self_ty,
table,
name,
receiver_ty,
receiver_adjustments.clone(),
module,
callback,
)?;
block = db
.block_def_map(block_id)
.and_then(|map| map.parent())
.and_then(|module| module.containing_block());
block = db.block_def_map(block_id).parent().and_then(|module| module.containing_block());
}
for krate in def_crates {
@ -1274,7 +1282,7 @@ fn iterate_inherent_methods(
}
/// Returns the receiver type for the index trait call.
pub fn resolve_indexing_op(
pub(crate) fn resolve_indexing_op(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
ty: Canonical<Ty>,
@ -1284,8 +1292,11 @@ pub fn resolve_indexing_op(
let ty = table.instantiate_canonical(ty);
let deref_chain = autoderef_method_receiver(&mut table, ty);
for (ty, adj) in deref_chain {
let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
let goal = generic_implements_goal(db, table.trait_env.clone(), index_trait, &ty);
if db
.trait_solve(table.trait_env.krate, table.trait_env.block, goal.cast(Interner))
.is_some()
{
return Some(adj);
}
}
@ -1310,14 +1321,12 @@ fn is_valid_candidate(
) -> IsValidCandidate {
let db = table.db;
match item {
AssocItemId::FunctionId(m) => {
is_valid_fn_candidate(table, m, name, receiver_ty, self_ty, visible_from_module)
AssocItemId::FunctionId(f) => {
is_valid_fn_candidate(table, f, name, receiver_ty, self_ty, visible_from_module)
}
AssocItemId::ConstId(c) => {
let data = db.const_data(c);
check_that!(receiver_ty.is_none());
check_that!(name.map_or(true, |n| data.name.as_ref() == Some(n)));
check_that!(name.map_or(true, |n| db.const_data(c).name.as_ref() == Some(n)));
if let Some(from_module) = visible_from_module {
if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) {
@ -1441,7 +1450,7 @@ pub fn implements_trait(
trait_: TraitId,
) -> bool {
let goal = generic_implements_goal(db, env.clone(), trait_, ty);
let solution = db.trait_solve(env.krate, goal.cast(Interner));
let solution = db.trait_solve(env.krate, env.block, goal.cast(Interner));
solution.is_some()
}
@ -1453,7 +1462,7 @@ pub fn implements_trait_unique(
trait_: TraitId,
) -> bool {
let goal = generic_implements_goal(db, env.clone(), trait_, ty);
let solution = db.trait_solve(env.krate, goal.cast(Interner));
let solution = db.trait_solve(env.krate, env.block, goal.cast(Interner));
matches!(solution, Some(crate::Solution::Unique(_)))
}

View file

@ -3,12 +3,15 @@
use std::{fmt::Display, iter};
use crate::{
infer::PointerCast, Const, ConstScalar, InferenceResult, Interner, MemoryMap, Substitution, Ty,
consteval::usize_const, db::HirDatabase, display::HirDisplay, infer::PointerCast,
lang_items::is_box, mapping::ToChalk, CallableDefId, ClosureId, Const, ConstScalar,
InferenceResult, Interner, MemoryMap, Substitution, Ty, TyKind,
};
use base_db::CrateId;
use chalk_ir::Mutability;
use hir_def::{
expr::{BindingId, Expr, ExprId, Ordering, PatId},
DefWithBodyId, FieldId, UnionId, VariantId,
hir::{BindingId, Expr, ExprId, Ordering, PatId},
DefWithBodyId, FieldId, StaticId, UnionId, VariantId,
};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
@ -16,12 +19,19 @@ mod eval;
mod lower;
mod borrowck;
mod pretty;
mod monomorphization;
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError};
pub use lower::{lower_to_mir, mir_body_query, mir_body_recover, MirLowerError};
pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError, VTableMap};
pub use lower::{
lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
};
pub use monomorphization::{
monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query,
monomorphized_mir_body_query, monomorphized_mir_body_recover,
};
use smallvec::{smallvec, SmallVec};
use stdx::impl_from;
use stdx::{impl_from, never};
use super::consteval::{intern_const_scalar, try_const_usize};
@ -32,7 +42,7 @@ fn return_slot() -> LocalId {
LocalId::from_raw(RawIdx::from(0))
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Local {
pub ty: Ty,
}
@ -52,7 +62,7 @@ pub struct Local {
/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
/// something we can even decide without knowing more about Rust's memory model?
///
/// **Needs clarifiation:** Is loading a place that has its variant index set well-formed? Miri
/// **Needs clarification:** Is loading a place that has its variant index set well-formed? Miri
/// currently implements it, but it seems like this may be something to check against in the
/// validator.
#[derive(Debug, PartialEq, Eq, Clone)]
@ -73,6 +83,9 @@ pub enum Operand {
Move(Place),
/// Constants are already semantically values, and remain unchanged.
Constant(Const),
/// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc
/// handles it with the `Constant` variant somehow.
Static(StaticId),
}
impl Operand {
@ -87,31 +100,141 @@ impl Operand {
fn const_zst(ty: Ty) -> Operand {
Self::from_bytes(vec![], ty)
}
fn from_fn(
db: &dyn HirDatabase,
func_id: hir_def::FunctionId,
generic_args: Substitution,
) -> Operand {
let ty =
chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args)
.intern(Interner);
Operand::from_bytes(vec![], ty)
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProjectionElem<V, T> {
Deref,
Field(FieldId),
TupleField(usize),
// FIXME: get rid of this, and use FieldId for tuples and closures
TupleOrClosureField(usize),
Index(V),
ConstantIndex { offset: u64, min_length: u64, from_end: bool },
Subslice { from: u64, to: u64, from_end: bool },
ConstantIndex { offset: u64, from_end: bool },
Subslice { from: u64, to: u64 },
//Downcast(Option<Symbol>, VariantIdx),
OpaqueCast(T),
}
impl<V, T> ProjectionElem<V, T> {
pub fn projected_ty(
&self,
base: Ty,
db: &dyn HirDatabase,
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
krate: CrateId,
) -> Ty {
match self {
ProjectionElem::Deref => match &base.data(Interner).kind {
TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
TyKind::Adt(adt, subst) if is_box(db, adt.0) => {
subst.at(Interner, 0).assert_ty_ref(Interner).clone()
}
_ => {
never!("Overloaded deref on type {} is not a projection", base.display(db));
return TyKind::Error.intern(Interner);
}
},
ProjectionElem::Field(f) => match &base.data(Interner).kind {
TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
}
_ => {
never!("Only adt has field");
return TyKind::Error.intern(Interner);
}
},
ProjectionElem::TupleOrClosureField(f) => match &base.data(Interner).kind {
TyKind::Tuple(_, subst) => subst
.as_slice(Interner)
.get(*f)
.map(|x| x.assert_ty_ref(Interner))
.cloned()
.unwrap_or_else(|| {
never!("Out of bound tuple field");
TyKind::Error.intern(Interner)
}),
TyKind::Closure(id, subst) => closure_field(*id, subst, *f),
_ => {
never!("Only tuple or closure has tuple or closure field");
return TyKind::Error.intern(Interner);
}
},
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => {
match &base.data(Interner).kind {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => {
never!("Overloaded index is not a projection");
return TyKind::Error.intern(Interner);
}
}
}
&ProjectionElem::Subslice { from, to } => match &base.data(Interner).kind {
TyKind::Array(inner, c) => {
let next_c = usize_const(
db,
match try_const_usize(db, c) {
None => None,
Some(x) => x.checked_sub(u128::from(from + to)),
},
krate,
);
TyKind::Array(inner.clone(), next_c).intern(Interner)
}
TyKind::Slice(_) => base.clone(),
_ => {
never!("Subslice projection should only happen on slice and array");
return TyKind::Error.intern(Interner);
}
},
ProjectionElem::OpaqueCast(_) => {
never!("We don't emit these yet");
return TyKind::Error.intern(Interner);
}
}
}
}
type PlaceElem = ProjectionElem<LocalId, Ty>;
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Place {
pub local: LocalId,
pub projection: Vec<PlaceElem>,
pub projection: Box<[PlaceElem]>,
}
impl Place {
fn is_parent(&self, child: &Place) -> bool {
self.local == child.local && child.projection.starts_with(&self.projection)
}
fn iterate_over_parents(&self) -> impl Iterator<Item = Place> + '_ {
(0..self.projection.len())
.map(|x| &self.projection[0..x])
.map(|x| Place { local: self.local, projection: x.to_vec().into() })
}
fn project(&self, projection: PlaceElem) -> Place {
Place {
local: self.local,
projection: self.projection.iter().cloned().chain([projection]).collect(),
}
}
}
impl From<LocalId> for Place {
fn from(local: LocalId) -> Self {
Self { local, projection: vec![] }
Self { local, projection: vec![].into() }
}
}
@ -123,7 +246,7 @@ pub enum AggregateKind {
Tuple(Ty),
Adt(VariantId, Substitution),
Union(UnionId, FieldId),
//Closure(LocalDefId, SubstsRef),
Closure(Ty),
//Generator(LocalDefId, SubstsRef, Movability),
}
@ -197,7 +320,13 @@ impl SwitchTargets {
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Terminator {
pub struct Terminator {
span: MirSpan,
kind: TerminatorKind,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TerminatorKind {
/// Block has one successor; we continue execution there.
Goto { target: BasicBlockId },
@ -320,7 +449,7 @@ pub enum Terminator {
/// These are owned by the callee, which is free to modify them.
/// This allows the memory occupied by "by-value" arguments to be
/// reused across function calls without duplicating the contents.
args: Vec<Operand>,
args: Box<[Operand]>,
/// Where the returned value will be written
destination: Place,
/// Where to go after this call returns. If none, the call necessarily diverges.
@ -418,7 +547,7 @@ pub enum Terminator {
},
}
#[derive(Debug, PartialEq, Eq, Clone)]
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
pub enum BorrowKind {
/// Data must be immutable and is aliasable.
Shared,
@ -564,6 +693,20 @@ pub enum BinOp {
Offset,
}
impl BinOp {
fn run_compare<T: PartialEq + PartialOrd>(&self, l: T, r: T) -> bool {
match self {
BinOp::Ge => l >= r,
BinOp::Gt => l > r,
BinOp::Le => l <= r,
BinOp::Lt => l < r,
BinOp::Eq => l == r,
BinOp::Ne => l != r,
x => panic!("`run_compare` called on operator {x:?}"),
}
}
}
impl Display for BinOp {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(match self {
@ -588,32 +731,32 @@ impl Display for BinOp {
}
}
impl From<hir_def::expr::ArithOp> for BinOp {
fn from(value: hir_def::expr::ArithOp) -> Self {
impl From<hir_def::hir::ArithOp> for BinOp {
fn from(value: hir_def::hir::ArithOp) -> Self {
match value {
hir_def::expr::ArithOp::Add => BinOp::Add,
hir_def::expr::ArithOp::Mul => BinOp::Mul,
hir_def::expr::ArithOp::Sub => BinOp::Sub,
hir_def::expr::ArithOp::Div => BinOp::Div,
hir_def::expr::ArithOp::Rem => BinOp::Rem,
hir_def::expr::ArithOp::Shl => BinOp::Shl,
hir_def::expr::ArithOp::Shr => BinOp::Shr,
hir_def::expr::ArithOp::BitXor => BinOp::BitXor,
hir_def::expr::ArithOp::BitOr => BinOp::BitOr,
hir_def::expr::ArithOp::BitAnd => BinOp::BitAnd,
hir_def::hir::ArithOp::Add => BinOp::Add,
hir_def::hir::ArithOp::Mul => BinOp::Mul,
hir_def::hir::ArithOp::Sub => BinOp::Sub,
hir_def::hir::ArithOp::Div => BinOp::Div,
hir_def::hir::ArithOp::Rem => BinOp::Rem,
hir_def::hir::ArithOp::Shl => BinOp::Shl,
hir_def::hir::ArithOp::Shr => BinOp::Shr,
hir_def::hir::ArithOp::BitXor => BinOp::BitXor,
hir_def::hir::ArithOp::BitOr => BinOp::BitOr,
hir_def::hir::ArithOp::BitAnd => BinOp::BitAnd,
}
}
}
impl From<hir_def::expr::CmpOp> for BinOp {
fn from(value: hir_def::expr::CmpOp) -> Self {
impl From<hir_def::hir::CmpOp> for BinOp {
fn from(value: hir_def::hir::CmpOp) -> Self {
match value {
hir_def::expr::CmpOp::Eq { negated: false } => BinOp::Eq,
hir_def::expr::CmpOp::Eq { negated: true } => BinOp::Ne,
hir_def::expr::CmpOp::Ord { ordering: Ordering::Greater, strict: false } => BinOp::Ge,
hir_def::expr::CmpOp::Ord { ordering: Ordering::Greater, strict: true } => BinOp::Gt,
hir_def::expr::CmpOp::Ord { ordering: Ordering::Less, strict: false } => BinOp::Le,
hir_def::expr::CmpOp::Ord { ordering: Ordering::Less, strict: true } => BinOp::Lt,
hir_def::hir::CmpOp::Eq { negated: false } => BinOp::Eq,
hir_def::hir::CmpOp::Eq { negated: true } => BinOp::Ne,
hir_def::hir::CmpOp::Ord { ordering: Ordering::Greater, strict: false } => BinOp::Ge,
hir_def::hir::CmpOp::Ord { ordering: Ordering::Greater, strict: true } => BinOp::Gt,
hir_def::hir::CmpOp::Ord { ordering: Ordering::Less, strict: false } => BinOp::Le,
hir_def::hir::CmpOp::Ord { ordering: Ordering::Less, strict: true } => BinOp::Lt,
}
}
}
@ -642,7 +785,6 @@ pub enum CastKind {
FloatToInt,
FloatToFloat,
IntToFloat,
PtrToPtr,
FnPtrToPtr,
}
@ -653,13 +795,8 @@ pub enum Rvalue {
/// Creates an array where each element is the value of the operand.
///
/// This is the cause of a bug in the case where the repetition count is zero because the value
/// is not dropped, see [#74836].
///
/// Corresponds to source code like `[x; 32]`.
///
/// [#74836]: https://github.com/rust-lang/rust/issues/74836
//Repeat(Operand, ty::Const),
Repeat(Operand, Const),
/// Creates a reference of the indicated kind to the place.
///
@ -768,7 +905,7 @@ pub enum Rvalue {
///
/// Disallowed after deaggregation for all aggregate kinds except `Array` and `Generator`. After
/// generator lowering, `Generator` aggregate kinds are disallowed too.
Aggregate(AggregateKind, Vec<Operand>),
Aggregate(AggregateKind, Box<[Operand]>),
/// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
///
@ -777,6 +914,9 @@ pub enum Rvalue {
/// affects alias analysis.
ShallowInitBox(Operand, Ty),
/// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer.
ShallowInitBoxWithAlloc(Ty),
/// A CopyForDeref is equivalent to a read from a place at the
/// codegen level, but is treated specially by drop elaboration. When such a read happens, it
/// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator)
@ -816,7 +956,7 @@ pub struct Statement {
pub span: MirSpan,
}
#[derive(Debug, Default, PartialEq, Eq)]
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct BasicBlock {
/// List of statements in this block.
pub statements: Vec<Statement>,
@ -838,19 +978,118 @@ pub struct BasicBlock {
pub is_cleanup: bool,
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MirBody {
pub basic_blocks: Arena<BasicBlock>,
pub locals: Arena<Local>,
pub start_block: BasicBlockId,
pub owner: DefWithBodyId,
pub arg_count: usize,
pub binding_locals: ArenaMap<BindingId, LocalId>,
pub param_locals: Vec<LocalId>,
/// This field stores the closures directly owned by this body. It is used
/// in traversing every mir body.
pub closures: Vec<ClosureId>,
}
fn const_as_usize(c: &Const) -> usize {
try_const_usize(c).unwrap() as usize
impl MirBody {
fn walk_places(&mut self, mut f: impl FnMut(&mut Place)) {
fn for_operand(op: &mut Operand, f: &mut impl FnMut(&mut Place)) {
match op {
Operand::Copy(p) | Operand::Move(p) => {
f(p);
}
Operand::Constant(_) | Operand::Static(_) => (),
}
}
for (_, block) in self.basic_blocks.iter_mut() {
for statement in &mut block.statements {
match &mut statement.kind {
StatementKind::Assign(p, r) => {
f(p);
match r {
Rvalue::ShallowInitBoxWithAlloc(_) => (),
Rvalue::ShallowInitBox(o, _)
| Rvalue::UnaryOp(_, o)
| Rvalue::Cast(_, o, _)
| Rvalue::Repeat(o, _)
| Rvalue::Use(o) => for_operand(o, &mut f),
Rvalue::CopyForDeref(p)
| Rvalue::Discriminant(p)
| Rvalue::Len(p)
| Rvalue::Ref(_, p) => f(p),
Rvalue::CheckedBinaryOp(_, o1, o2) => {
for_operand(o1, &mut f);
for_operand(o2, &mut f);
}
Rvalue::Aggregate(_, ops) => {
for op in ops.iter_mut() {
for_operand(op, &mut f);
}
}
}
}
StatementKind::Deinit(p) => f(p),
StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
}
match &mut block.terminator {
Some(x) => match &mut x.kind {
TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, &mut f),
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::GeneratorDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable => (),
TerminatorKind::Drop { place, .. } => {
f(place);
}
TerminatorKind::DropAndReplace { place, value, .. } => {
f(place);
for_operand(value, &mut f);
}
TerminatorKind::Call { func, args, destination, .. } => {
for_operand(func, &mut f);
args.iter_mut().for_each(|x| for_operand(x, &mut f));
f(destination);
}
TerminatorKind::Assert { cond, .. } => {
for_operand(cond, &mut f);
}
TerminatorKind::Yield { value, resume_arg, .. } => {
for_operand(value, &mut f);
f(resume_arg);
}
},
None => (),
}
}
}
fn shrink_to_fit(&mut self) {
let MirBody {
basic_blocks,
locals,
start_block: _,
owner: _,
binding_locals,
param_locals,
closures,
} = self;
basic_blocks.shrink_to_fit();
locals.shrink_to_fit();
binding_locals.shrink_to_fit();
param_locals.shrink_to_fit();
closures.shrink_to_fit();
for (_, b) in basic_blocks.iter_mut() {
let BasicBlock { statements, terminator: _, is_cleanup: _ } = b;
statements.shrink_to_fit();
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]

View file

@ -3,17 +3,20 @@
// Currently it is an ad-hoc implementation, only useful for mutability analysis. Feel free to remove all of these
// if needed for implementing a proper borrow checker.
use std::sync::Arc;
use std::iter;
use hir_def::DefWithBodyId;
use hir_def::{DefWithBodyId, HasModule};
use la_arena::ArenaMap;
use stdx::never;
use triomphe::Arc;
use crate::db::HirDatabase;
use crate::{
db::HirDatabase, mir::Operand, utils::ClosureSubst, ClosureId, Interner, Ty, TyExt, TypeFlags,
};
use super::{
BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, Place, ProjectionElem,
Rvalue, StatementKind, Terminator,
Rvalue, StatementKind, TerminatorKind,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@ -23,26 +26,167 @@ pub enum MutabilityReason {
Not,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MovedOutOfRef {
pub ty: Ty,
pub span: MirSpan,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowckResult {
pub mir_body: Arc<MirBody>,
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef>,
}
fn all_mir_bodies(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Box<dyn Iterator<Item = Result<Arc<MirBody>, MirLowerError>> + '_> {
fn for_closure(
db: &dyn HirDatabase,
c: ClosureId,
) -> Box<dyn Iterator<Item = Result<Arc<MirBody>, MirLowerError>> + '_> {
match db.mir_body_for_closure(c) {
Ok(body) => {
let closures = body.closures.clone();
Box::new(
iter::once(Ok(body))
.chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
)
}
Err(e) => Box::new(iter::once(Err(e))),
}
}
match db.mir_body(def) {
Ok(body) => {
let closures = body.closures.clone();
Box::new(
iter::once(Ok(body)).chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
)
}
Err(e) => Box::new(iter::once(Err(e))),
}
}
pub fn borrowck_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Result<Arc<BorrowckResult>, MirLowerError> {
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
let _p = profile::span("borrowck_query");
let body = db.mir_body(def)?;
let r = BorrowckResult { mutability_of_locals: mutability_of_locals(&body), mir_body: body };
Ok(Arc::new(r))
let r = all_mir_bodies(db, def)
.map(|body| {
let body = body?;
Ok(BorrowckResult {
mutability_of_locals: mutability_of_locals(db, &body),
moved_out_of_ref: moved_out_of_ref(db, &body),
mir_body: body,
})
})
.collect::<Result<Vec<_>, MirLowerError>>()?;
Ok(r.into())
}
fn is_place_direct(lvalue: &Place) -> bool {
!lvalue.projection.iter().any(|x| *x == ProjectionElem::Deref)
fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> {
let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op {
Operand::Copy(p) | Operand::Move(p) => {
let mut ty: Ty = body.locals[p.local].ty.clone();
let mut is_dereference_of_ref = false;
for proj in &*p.projection {
if *proj == ProjectionElem::Deref && ty.as_reference().is_some() {
is_dereference_of_ref = true;
}
ty = proj.projected_ty(
ty,
db,
|c, subst, f| {
let (def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
},
body.owner.module(db.upcast()).krate(),
);
}
if is_dereference_of_ref
&& !ty.clone().is_copy(db, body.owner)
&& !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR)
{
result.push(MovedOutOfRef { span, ty });
}
}
Operand::Constant(_) | Operand::Static(_) => (),
};
for (_, block) in body.basic_blocks.iter() {
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(_, r) => match r {
Rvalue::ShallowInitBoxWithAlloc(_) => (),
Rvalue::ShallowInitBox(o, _)
| Rvalue::UnaryOp(_, o)
| Rvalue::Cast(_, o, _)
| Rvalue::Repeat(o, _)
| Rvalue::Use(o) => for_operand(o, statement.span),
Rvalue::CopyForDeref(_)
| Rvalue::Discriminant(_)
| Rvalue::Len(_)
| Rvalue::Ref(_, _) => (),
Rvalue::CheckedBinaryOp(_, o1, o2) => {
for_operand(o1, statement.span);
for_operand(o2, statement.span);
}
Rvalue::Aggregate(_, ops) => {
for op in ops.iter() {
for_operand(op, statement.span);
}
}
},
StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
}
match &block.terminator {
Some(terminator) => match &terminator.kind {
TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, terminator.span),
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::GeneratorDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. } => (),
TerminatorKind::DropAndReplace { value, .. } => {
for_operand(value, terminator.span);
}
TerminatorKind::Call { func, args, .. } => {
for_operand(func, terminator.span);
args.iter().for_each(|x| for_operand(x, terminator.span));
}
TerminatorKind::Assert { cond, .. } => {
for_operand(cond, terminator.span);
}
TerminatorKind::Yield { value, .. } => {
for_operand(value, terminator.span);
}
},
None => (),
}
}
result
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ProjectionCase {
/// Projection is a local
Direct,
@ -52,20 +196,39 @@ enum ProjectionCase {
Indirect,
}
fn place_case(lvalue: &Place) -> ProjectionCase {
fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> ProjectionCase {
let mut is_part_of = false;
for proj in lvalue.projection.iter().rev() {
let mut ty = body.locals[lvalue.local].ty.clone();
for proj in lvalue.projection.iter() {
match proj {
ProjectionElem::Deref => return ProjectionCase::Indirect, // It's indirect
ProjectionElem::ConstantIndex { .. }
ProjectionElem::Deref if ty.as_adt().is_none() => return ProjectionCase::Indirect, // It's indirect in case of reference and raw
ProjectionElem::Deref // It's direct in case of `Box<T>`
| ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Subslice { .. }
| ProjectionElem::Field(_)
| ProjectionElem::TupleField(_)
| ProjectionElem::TupleOrClosureField(_)
| ProjectionElem::Index(_) => {
is_part_of = true;
}
ProjectionElem::OpaqueCast(_) => (),
}
ty = proj.projected_ty(
ty,
db,
|c, subst, f| {
let (def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
},
body.owner.module(db.upcast()).krate(),
);
}
if is_part_of {
ProjectionCase::DirectPart
@ -76,7 +239,7 @@ fn place_case(lvalue: &Place) -> ProjectionCase {
/// Returns a map from basic blocks to the set of locals that might be ever initialized before
/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
/// `Uninit` and `drop` and similars after initialization.
/// `Uninit` and `drop` and similar after initialization.
fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
@ -107,26 +270,28 @@ fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<Local
never!("Terminator should be none only in construction");
return;
};
let targets = match terminator {
Terminator::Goto { target } => vec![*target],
Terminator::SwitchInt { targets, .. } => targets.all_targets().to_vec(),
Terminator::Resume
| Terminator::Abort
| Terminator::Return
| Terminator::Unreachable => vec![],
Terminator::Call { target, cleanup, destination, .. } => {
let targets = match &terminator.kind {
TerminatorKind::Goto { target } => vec![*target],
TerminatorKind::SwitchInt { targets, .. } => targets.all_targets().to_vec(),
TerminatorKind::Resume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable => vec![],
TerminatorKind::Call { target, cleanup, destination, .. } => {
if destination.projection.len() == 0 && destination.local == l {
is_ever_initialized = true;
}
target.into_iter().chain(cleanup.into_iter()).copied().collect()
}
Terminator::Drop { .. }
| Terminator::DropAndReplace { .. }
| Terminator::Assert { .. }
| Terminator::Yield { .. }
| Terminator::GeneratorDrop
| Terminator::FalseEdge { .. }
| Terminator::FalseUnwind { .. } => {
TerminatorKind::Drop { target, unwind, place: _ } => {
Some(target).into_iter().chain(unwind.into_iter()).copied().collect()
}
TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => {
never!("We don't emit these MIR terminators yet");
vec![]
}
@ -151,7 +316,10 @@ fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<Local
result
}
fn mutability_of_locals(body: &MirBody) -> ArenaMap<LocalId, MutabilityReason> {
fn mutability_of_locals(
db: &dyn HirDatabase,
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let mut result: ArenaMap<LocalId, MutabilityReason> =
body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect();
let mut push_mut_span = |local, span| match &mut result[local] {
@ -164,7 +332,7 @@ fn mutability_of_locals(body: &MirBody) -> ArenaMap<LocalId, MutabilityReason> {
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(place, value) => {
match place_case(place) {
match place_case(db, body, place) {
ProjectionCase::Direct => {
if ever_init_map.get(place.local).copied().unwrap_or_default() {
push_mut_span(place.local, statement.span);
@ -179,7 +347,7 @@ fn mutability_of_locals(body: &MirBody) -> ArenaMap<LocalId, MutabilityReason> {
ProjectionCase::Indirect => (),
}
if let Rvalue::Ref(BorrowKind::Mut { .. }, p) = value {
if is_place_direct(p) {
if place_case(db, body, p) != ProjectionCase::Indirect {
push_mut_span(p.local, statement.span);
}
}
@ -194,21 +362,21 @@ fn mutability_of_locals(body: &MirBody) -> ArenaMap<LocalId, MutabilityReason> {
never!("Terminator should be none only in construction");
continue;
};
match terminator {
Terminator::Goto { .. }
| Terminator::Resume
| Terminator::Abort
| Terminator::Return
| Terminator::Unreachable
| Terminator::FalseEdge { .. }
| Terminator::FalseUnwind { .. }
| Terminator::GeneratorDrop
| Terminator::SwitchInt { .. }
| Terminator::Drop { .. }
| Terminator::DropAndReplace { .. }
| Terminator::Assert { .. }
| Terminator::Yield { .. } => (),
Terminator::Call { destination, .. } => {
match &terminator.kind {
TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. } => (),
TerminatorKind::Call { destination, .. } => {
if destination.projection.len() == 0 {
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
push_mut_span(destination.local, MirSpan::Unknown);

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,792 @@
//! Interpret intrinsics, lang items and `extern "C"` wellknown functions which their implementation
//! is not available.
use std::cmp;
use super::*;
macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
Ok(x) => x,
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
}))
};
}
macro_rules! not_supported {
($x: expr) => {
return Err(MirEvalError::NotSupported(format!($x)))
};
}
impl Evaluator<'_> {
pub(super) fn detect_and_exec_special_function(
&mut self,
def: FunctionId,
args: &[IntervalAndTy],
generic_args: &Substitution,
locals: &Locals<'_>,
destination: Interval,
span: MirSpan,
) -> Result<bool> {
let function_data = self.db.function_data(def);
let is_intrinsic = match &function_data.abi {
Some(abi) => *abi == Interned::new_str("rust-intrinsic"),
None => match def.lookup(self.db.upcast()).container {
hir_def::ItemContainerId::ExternBlockId(block) => {
let id = block.lookup(self.db.upcast()).id;
id.item_tree(self.db.upcast())[id.value].abi.as_deref()
== Some("rust-intrinsic")
}
_ => false,
},
};
if is_intrinsic {
self.exec_intrinsic(
function_data.name.as_text().unwrap_or_default().as_str(),
args,
generic_args,
destination,
&locals,
span,
)?;
return Ok(true);
}
let is_extern_c = match def.lookup(self.db.upcast()).container {
hir_def::ItemContainerId::ExternBlockId(block) => {
let id = block.lookup(self.db.upcast()).id;
id.item_tree(self.db.upcast())[id.value].abi.as_deref() == Some("C")
}
_ => false,
};
if is_extern_c {
self.exec_extern_c(
function_data.name.as_text().unwrap_or_default().as_str(),
args,
generic_args,
destination,
&locals,
span,
)?;
return Ok(true);
}
let alloc_fn = function_data
.attrs
.iter()
.filter_map(|x| x.path().as_ident())
.filter_map(|x| x.as_str())
.find(|x| {
[
"rustc_allocator",
"rustc_deallocator",
"rustc_reallocator",
"rustc_allocator_zeroed",
]
.contains(x)
});
if let Some(alloc_fn) = alloc_fn {
self.exec_alloc_fn(alloc_fn, args, destination)?;
return Ok(true);
}
if let Some(x) = self.detect_lang_function(def) {
let arg_bytes =
args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
let result = self.exec_lang_item(x, generic_args, &arg_bytes, locals, span)?;
destination.write_from_bytes(self, &result)?;
return Ok(true);
}
Ok(false)
}
fn exec_alloc_fn(
&mut self,
alloc_fn: &str,
args: &[IntervalAndTy],
destination: Interval,
) -> Result<()> {
match alloc_fn {
"rustc_allocator_zeroed" | "rustc_allocator" => {
let [size, align] = args else {
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
};
let size = from_bytes!(usize, size.get(self)?);
let align = from_bytes!(usize, align.get(self)?);
let result = self.heap_allocate(size, align);
destination.write_from_bytes(self, &result.to_bytes())?;
}
"rustc_deallocator" => { /* no-op for now */ }
"rustc_reallocator" => {
let [ptr, old_size, align, new_size] = args else {
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
};
let ptr = Address::from_bytes(ptr.get(self)?)?;
let old_size = from_bytes!(usize, old_size.get(self)?);
let new_size = from_bytes!(usize, new_size.get(self)?);
let align = from_bytes!(usize, align.get(self)?);
let result = self.heap_allocate(new_size, align);
Interval { addr: result, size: old_size }
.write_from_interval(self, Interval { addr: ptr, size: old_size })?;
destination.write_from_bytes(self, &result.to_bytes())?;
}
_ => not_supported!("unknown alloc function"),
}
Ok(())
}
fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
use LangItem::*;
let candidate = lang_attr(self.db.upcast(), def)?;
// We want to execute these functions with special logic
if [PanicFmt, BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
return Some(candidate);
}
None
}
fn exec_lang_item(
&mut self,
x: LangItem,
generic_args: &Substitution,
args: &[Vec<u8>],
locals: &Locals<'_>,
span: MirSpan,
) -> Result<Vec<u8>> {
use LangItem::*;
let mut args = args.iter();
match x {
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
PanicFmt => {
let message = (|| {
let arguments_struct =
self.db.lang_item(self.crate_id, LangItem::FormatArguments)?.as_struct()?;
let arguments_layout = self
.layout_adt(arguments_struct.into(), Substitution::empty(Interner))
.ok()?;
let arguments_field_pieces =
self.db.struct_data(arguments_struct).variant_data.field(&name![pieces])?;
let pieces_offset = arguments_layout
.fields
.offset(u32::from(arguments_field_pieces.into_raw()) as usize)
.bytes_usize();
let ptr_size = self.ptr_size();
let arg = args.next()?;
let pieces_array_addr =
Address::from_bytes(&arg[pieces_offset..pieces_offset + ptr_size]).ok()?;
let pieces_array_len = usize::from_le_bytes(
(&arg[pieces_offset + ptr_size..pieces_offset + 2 * ptr_size])
.try_into()
.ok()?,
);
let mut message = "".to_string();
for i in 0..pieces_array_len {
let piece_ptr_addr = pieces_array_addr.offset(2 * i * ptr_size);
let piece_addr =
Address::from_bytes(self.read_memory(piece_ptr_addr, ptr_size).ok()?)
.ok()?;
let piece_len = usize::from_le_bytes(
self.read_memory(piece_ptr_addr.offset(ptr_size), ptr_size)
.ok()?
.try_into()
.ok()?,
);
let piece_data = self.read_memory(piece_addr, piece_len).ok()?;
message += &std::string::String::from_utf8_lossy(piece_data);
}
Some(message)
})()
.unwrap_or_else(|| "<format-args-evaluation-failed>".to_string());
Err(MirEvalError::Panic(message))
}
SliceLen => {
let arg = args
.next()
.ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?;
let ptr_size = arg.len() / 2;
Ok(arg[ptr_size..].into())
}
DropInPlace => {
let ty =
generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)).ok_or(
MirEvalError::TypeError(
"generic argument of drop_in_place is not provided",
),
)?;
let arg = args
.next()
.ok_or(MirEvalError::TypeError("argument of drop_in_place is not provided"))?;
self.run_drop_glue_deep(
ty.clone(),
locals,
Address::from_bytes(&arg[0..self.ptr_size()])?,
&arg[self.ptr_size()..],
span,
)?;
Ok(vec![])
}
x => not_supported!("Executing lang item {x:?}"),
}
}
fn exec_extern_c(
&mut self,
as_str: &str,
args: &[IntervalAndTy],
_generic_args: &Substitution,
destination: Interval,
locals: &Locals<'_>,
_span: MirSpan,
) -> Result<()> {
match as_str {
"memcmp" => {
let [ptr1, ptr2, size] = args else {
return Err(MirEvalError::TypeError("memcmp args are not provided"));
};
let addr1 = Address::from_bytes(ptr1.get(self)?)?;
let addr2 = Address::from_bytes(ptr2.get(self)?)?;
let size = from_bytes!(usize, size.get(self)?);
let slice1 = self.read_memory(addr1, size)?;
let slice2 = self.read_memory(addr2, size)?;
let r: i128 = match slice1.cmp(slice2) {
cmp::Ordering::Less => -1,
cmp::Ordering::Equal => 0,
cmp::Ordering::Greater => 1,
};
destination.write_from_bytes(self, &r.to_le_bytes()[..destination.size])
}
"write" => {
let [fd, ptr, len] = args else {
return Err(MirEvalError::TypeError("libc::write args are not provided"));
};
let fd = u128::from_le_bytes(pad16(fd.get(self)?, false));
let interval = Interval {
addr: Address::from_bytes(ptr.get(self)?)?,
size: from_bytes!(usize, len.get(self)?),
};
match fd {
1 => {
self.write_to_stdout(interval)?;
}
2 => {
self.write_to_stderr(interval)?;
}
_ => not_supported!("write to arbitrary file descriptor"),
}
destination.write_from_interval(self, len.interval)?;
Ok(())
}
"pthread_key_create" => {
let key = self.thread_local_storage.create_key();
let Some(arg0) = args.get(0) else {
return Err(MirEvalError::TypeError("pthread_key_create arg0 is not provided"));
};
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() {
ty
} else {
return Err(MirEvalError::TypeError(
"pthread_key_create arg0 is not a pointer",
));
};
let arg0_interval = Interval::new(
arg0_addr,
self.size_of_sized(key_ty, locals, "pthread_key_create key arg")?,
);
arg0_interval.write_from_bytes(self, &key.to_le_bytes()[0..arg0_interval.size])?;
// return 0 as success
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
Ok(())
}
"pthread_getspecific" => {
let Some(arg0) = args.get(0) else {
return Err(MirEvalError::TypeError("pthread_getspecific arg0 is not provided"));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let value = self.thread_local_storage.get_key(key)?;
destination.write_from_bytes(self, &value.to_le_bytes()[0..destination.size])?;
Ok(())
}
"pthread_setspecific" => {
let Some(arg0) = args.get(0) else {
return Err(MirEvalError::TypeError("pthread_setspecific arg0 is not provided"));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let Some(arg1) = args.get(1) else {
return Err(MirEvalError::TypeError("pthread_setspecific arg1 is not provided"));
};
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
self.thread_local_storage.set_key(key, value)?;
// return 0 as success
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
Ok(())
}
"pthread_key_delete" => {
// we ignore this currently
// return 0 as success
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
Ok(())
}
_ => not_supported!("unknown external function {as_str}"),
}
}
fn exec_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
destination: Interval,
locals: &Locals<'_>,
span: MirSpan,
) -> Result<()> {
if let Some(name) = name.strip_prefix("atomic_") {
return self.exec_atomic_intrinsic(name, args, generic_args, destination, locals, span);
}
if let Some(name) = name.strip_suffix("f64") {
let result = match name {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64) -> f64"));
};
let arg = from_bytes!(f64, arg.get(self)?);
match name {
"sqrt" => arg.sqrt(),
"sin" => arg.sin(),
"cos" => arg.cos(),
"exp" => arg.exp(),
"exp2" => arg.exp2(),
"log" => arg.ln(),
"log10" => arg.log10(),
"log2" => arg.log2(),
"fabs" => arg.abs(),
"floor" => arg.floor(),
"ceil" => arg.ceil(),
"trunc" => arg.trunc(),
// FIXME: these rounds should be different, but only `.round()` is stable now.
"rint" => arg.round(),
"nearbyint" => arg.round(),
"round" => arg.round(),
"roundeven" => arg.round(),
_ => unreachable!(),
}
}
"pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else {
return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64, f64) -> f64"));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(f64, arg2.get(self)?);
match name {
"pow" => arg1.powf(arg2),
"minnum" => arg1.min(arg2),
"maxnum" => arg1.max(arg2),
"copysign" => arg1.copysign(arg2),
_ => unreachable!(),
}
}
"powi" => {
let [arg1, arg2] = args else {
return Err(MirEvalError::TypeError("powif64 signature doesn't match fn (f64, i32) -> f64"));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(i32, arg2.get(self)?);
arg1.powi(arg2)
}
"fma" => {
let [arg1, arg2, arg3] = args else {
return Err(MirEvalError::TypeError("fmaf64 signature doesn't match fn (f64, f64, f64) -> f64"));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(f64, arg2.get(self)?);
let arg3 = from_bytes!(f64, arg3.get(self)?);
arg1.mul_add(arg2, arg3)
}
_ => not_supported!("unknown f64 intrinsic {name}"),
};
return destination.write_from_bytes(self, &result.to_le_bytes());
}
if let Some(name) = name.strip_suffix("f32") {
let result = match name {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32) -> f32"));
};
let arg = from_bytes!(f32, arg.get(self)?);
match name {
"sqrt" => arg.sqrt(),
"sin" => arg.sin(),
"cos" => arg.cos(),
"exp" => arg.exp(),
"exp2" => arg.exp2(),
"log" => arg.ln(),
"log10" => arg.log10(),
"log2" => arg.log2(),
"fabs" => arg.abs(),
"floor" => arg.floor(),
"ceil" => arg.ceil(),
"trunc" => arg.trunc(),
// FIXME: these rounds should be different, but only `.round()` is stable now.
"rint" => arg.round(),
"nearbyint" => arg.round(),
"round" => arg.round(),
"roundeven" => arg.round(),
_ => unreachable!(),
}
}
"pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else {
return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32, f32) -> f32"));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(f32, arg2.get(self)?);
match name {
"pow" => arg1.powf(arg2),
"minnum" => arg1.min(arg2),
"maxnum" => arg1.max(arg2),
"copysign" => arg1.copysign(arg2),
_ => unreachable!(),
}
}
"powi" => {
let [arg1, arg2] = args else {
return Err(MirEvalError::TypeError("powif32 signature doesn't match fn (f32, i32) -> f32"));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(i32, arg2.get(self)?);
arg1.powi(arg2)
}
"fma" => {
let [arg1, arg2, arg3] = args else {
return Err(MirEvalError::TypeError("fmaf32 signature doesn't match fn (f32, f32, f32) -> f32"));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(f32, arg2.get(self)?);
let arg3 = from_bytes!(f32, arg3.get(self)?);
arg1.mul_add(arg2, arg3)
}
_ => not_supported!("unknown f32 intrinsic {name}"),
};
return destination.write_from_bytes(self, &result.to_le_bytes());
}
match name {
"size_of" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
let size = self.size_of_sized(ty, locals, "size_of arg")?;
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
}
"min_align_of" | "pref_align_of" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
};
let align = self.layout(ty)?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
}
"needs_drop" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
let result = !ty.clone().is_copy(self.db, locals.body.owner);
destination.write_from_bytes(self, &[u8::from(result)])
}
"ptr_guaranteed_cmp" => {
// FIXME: this is wrong for const eval, it should return 2 in some
// cases.
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
};
let ans = lhs.get(self)? == rhs.get(self)?;
destination.write_from_bytes(self, &[u8::from(ans)])
}
"saturating_add" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("saturating_add args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.saturating_add(rhs);
let bits = destination.size * 8;
// FIXME: signed
let is_signed = false;
let mx: u128 = if is_signed { (1 << (bits - 1)) - 1 } else { (1 << bits) - 1 };
// FIXME: signed
let mn: u128 = 0;
let ans = cmp::min(mx, cmp::max(mn, ans));
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"wrapping_add" | "unchecked_add" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_add(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"wrapping_sub" | "unchecked_sub" | "ptr_offset_from_unsigned" | "ptr_offset_from" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_sub(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"wrapping_mul" | "unchecked_mul" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_mul args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_mul(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"unchecked_rem" => {
// FIXME: signed
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("unchecked_rem args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.checked_rem(rhs).ok_or_else(|| {
MirEvalError::UndefinedBehavior("unchecked_rem with bad inputs".to_owned())
})?;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"unchecked_div" | "exact_div" => {
// FIXME: signed
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("unchecked_div args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.checked_div(rhs).ok_or_else(|| {
MirEvalError::UndefinedBehavior("unchecked_rem with bad inputs".to_owned())
})?;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
};
let result_ty = TyKind::Tuple(
2,
Substitution::from_iter(Interner, [lhs.ty.clone(), TyBuilder::bool()]),
)
.intern(Interner);
let op_size =
self.size_of_sized(&lhs.ty, locals, "operand of add_with_overflow")?;
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let (ans, u128overflow) = match name {
"add_with_overflow" => lhs.overflowing_add(rhs),
"sub_with_overflow" => lhs.overflowing_sub(rhs),
"mul_with_overflow" => lhs.overflowing_mul(rhs),
_ => unreachable!(),
};
let is_overflow = u128overflow
|| ans.to_le_bytes()[op_size..].iter().any(|&x| x != 0 && x != 255);
let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(&result_ty)?;
let result = self.make_by_layout(
layout.size.bytes_usize(),
&layout,
None,
[ans.to_le_bytes()[0..op_size].to_vec(), is_overflow]
.into_iter()
.map(IntervalOrOwned::Owned),
)?;
destination.write_from_bytes(self, &result)
}
"copy" | "copy_nonoverlapping" => {
let [src, dst, offset] = args else {
return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided"));
};
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided"));
};
let src = Address::from_bytes(src.get(self)?)?;
let dst = Address::from_bytes(dst.get(self)?)?;
let offset = from_bytes!(usize, offset.get(self)?);
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
let size = offset * size;
let src = Interval { addr: src, size };
let dst = Interval { addr: dst, size };
dst.write_from_interval(self, src)
}
"offset" | "arith_offset" => {
let [ptr, offset] = args else {
return Err(MirEvalError::TypeError("offset args are not provided"));
};
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
};
let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
let offset = u128::from_le_bytes(pad16(offset.get(self)?, false));
let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128;
let ans = ptr + offset * size;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"assert_inhabited" | "assert_zero_valid" | "assert_uninit_valid" | "assume" => {
// FIXME: We should actually implement these checks
Ok(())
}
"forget" => {
// We don't call any drop glue yet, so there is nothing here
Ok(())
}
"transmute" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("trasmute arg is not provided"));
};
destination.write_from_interval(self, arg.interval)
}
"likely" | "unlikely" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("likely arg is not provided"));
};
destination.write_from_interval(self, arg.interval)
}
"ctpop" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("likely arg is not provided"));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
destination
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
}
"cttz" | "cttz_nonzero" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("likely arg is not provided"));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
destination
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
}
"const_eval_select" => {
let [tuple, const_fn, _] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
};
let mut args = vec![const_fn.clone()];
let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else {
return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple"));
};
let layout = self.layout(&tuple.ty)?;
for (i, field) in fields.iter(Interner).enumerate() {
let field = field.assert_ty_ref(Interner).clone();
let offset = layout.fields.offset(i).bytes_usize();
let addr = tuple.interval.addr.offset(offset);
args.push(IntervalAndTy::new(addr, field, self, locals)?);
}
self.exec_fn_trait(&args, destination, locals, span)
}
_ => not_supported!("unknown intrinsic {name}"),
}
}
fn exec_atomic_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
destination: Interval,
locals: &Locals<'_>,
_span: MirSpan,
) -> Result<()> {
// We are a single threaded runtime with no UB checking and no optimization, so
// we can implement these as normal functions.
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
};
let Some(arg0) = args.get(0) else {
return Err(MirEvalError::TypeError("atomic intrinsic arg0 is not provided"));
};
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
let arg0_interval =
Interval::new(arg0_addr, self.size_of_sized(ty, locals, "atomic intrinsic type arg")?);
if name.starts_with("load_") {
return destination.write_from_interval(self, arg0_interval);
}
let Some(arg1) = args.get(1) else {
return Err(MirEvalError::TypeError("atomic intrinsic arg1 is not provided"));
};
if name.starts_with("store_") {
return arg0_interval.write_from_interval(self, arg1.interval);
}
if name.starts_with("xchg_") {
destination.write_from_interval(self, arg0_interval)?;
return arg0_interval.write_from_interval(self, arg1.interval);
}
if name.starts_with("xadd_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs.wrapping_add(rhs);
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("xsub_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs.wrapping_sub(rhs);
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("and_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs & rhs;
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("or_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs | rhs;
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("xor_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = lhs ^ rhs;
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
if name.starts_with("nand_") {
destination.write_from_interval(self, arg0_interval)?;
let lhs = u128::from_le_bytes(pad16(arg0_interval.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(arg1.get(self)?, false));
let ans = !(lhs & rhs);
return arg0_interval.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]);
}
let Some(arg2) = args.get(2) else {
return Err(MirEvalError::TypeError("atomic intrinsic arg2 is not provided"));
};
if name.starts_with("cxchg_") || name.starts_with("cxchgweak_") {
let dest = if arg1.get(self)? == arg0_interval.get(self)? {
arg0_interval.write_from_interval(self, arg2.interval)?;
(arg1.interval, true)
} else {
(arg0_interval, false)
};
let result_ty = TyKind::Tuple(
2,
Substitution::from_iter(Interner, [ty.clone(), TyBuilder::bool()]),
)
.intern(Interner);
let layout = self.layout(&result_ty)?;
let result = self.make_by_layout(
layout.size.bytes_usize(),
&layout,
None,
[IntervalOrOwned::Borrowed(dest.0), IntervalOrOwned::Owned(vec![u8::from(dest.1)])]
.into_iter(),
)?;
return destination.write_from_bytes(self, &result);
}
not_supported!("unknown atomic intrinsic {name}");
}
}

View file

@ -0,0 +1,676 @@
use base_db::{fixture::WithFixture, FileId};
use hir_def::db::DefDatabase;
use syntax::{TextRange, TextSize};
use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};
use super::{interpret_mir, MirEvalError};
fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalError> {
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
let func_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
if db.function_data(x).name.display(db).to_string() == "main" {
Some(x)
} else {
None
}
}
_ => None,
})
.expect("no main function found");
let body = db
.monomorphized_mir_body(
func_id.into(),
Substitution::empty(Interner),
db.trait_environment(func_id.into()),
)
.map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
let (result, stdout, stderr) = interpret_mir(db, &body, false);
result?;
Ok((stdout, stderr))
}
fn check_pass(ra_fixture: &str) {
check_pass_and_stdio(ra_fixture, "", "");
}
fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr: &str) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let x = eval_main(&db, file_id);
match x {
Err(e) => {
let mut err = String::new();
let line_index = |size: TextSize| {
let mut size = u32::from(size) as usize;
let mut lines = ra_fixture.lines().enumerate();
while let Some((i, l)) = lines.next() {
if let Some(x) = size.checked_sub(l.len()) {
size = x;
} else {
return (i, size);
}
}
(usize::MAX, size)
};
let span_formatter = |file, range: TextRange| {
format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
};
e.pretty_print(&mut err, &db, span_formatter).unwrap();
panic!("Error in interpreting: {err}");
}
Ok((stdout, stderr)) => {
assert_eq!(stdout, expected_stdout);
assert_eq!(stderr, expected_stderr);
}
}
}
#[test]
fn function_with_extern_c_abi() {
check_pass(
r#"
extern "C" fn foo(a: i32, b: i32) -> i32 {
a + b
}
fn main() {
let x = foo(2, 3);
}
"#,
);
}
#[test]
fn drop_basic() {
check_pass(
r#"
//- minicore: drop, add
struct X<'a>(&'a mut i32);
impl<'a> Drop for X<'a> {
fn drop(&mut self) {
*self.0 += 1;
}
}
struct NestedX<'a> { f1: X<'a>, f2: X<'a> }
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn my_drop2(x: X<'_>) {
return;
}
fn my_drop(x: X<'_>) {
drop(x);
}
fn main() {
let mut s = 10;
let mut x = X(&mut s);
my_drop(x);
x = X(&mut s);
my_drop2(x);
X(&mut s); // dropped immediately
let x = X(&mut s);
NestedX { f1: x, f2: X(&mut s) };
if s != 15 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn drop_if_let() {
check_pass(
r#"
//- minicore: drop, add, option, cell, builtin_impls
use core::cell::Cell;
struct X<'a>(&'a Cell<i32>);
impl<'a> Drop for X<'a> {
fn drop(&mut self) {
self.0.set(self.0.get() + 1)
}
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
#[test]
fn main() {
let s = Cell::new(0);
let x = Some(X(&s));
if let Some(y) = x {
if s.get() != 0 {
should_not_reach();
}
if s.get() != 0 {
should_not_reach();
}
} else {
should_not_reach();
}
if s.get() != 1 {
should_not_reach();
}
let x = Some(X(&s));
if let None = x {
should_not_reach();
} else {
if s.get() != 1 {
should_not_reach();
}
}
if s.get() != 1 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn drop_in_place() {
check_pass(
r#"
//- minicore: drop, add, coerce_unsized
use core::ptr::drop_in_place;
struct X<'a>(&'a mut i32);
impl<'a> Drop for X<'a> {
fn drop(&mut self) {
*self.0 += 1;
}
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let mut s = 2;
let x = X(&mut s);
drop_in_place(&mut x);
drop(x);
if s != 4 {
should_not_reach();
}
let p: &mut [X] = &mut [X(&mut 2)];
drop_in_place(p);
}
"#,
);
}
#[test]
fn manually_drop() {
check_pass(
r#"
//- minicore: manually_drop
use core::mem::ManuallyDrop;
struct X;
impl Drop for X {
fn drop(&mut self) {
should_not_reach();
}
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let x = ManuallyDrop::new(X);
}
"#,
);
}
#[test]
fn generic_impl_for_trait_with_generic_method() {
check_pass(
r#"
//- minicore: drop
struct S<T>(T);
trait Tr {
fn f<F>(&self, x: F);
}
impl<T> Tr for S<T> {
fn f<F>(&self, x: F) {
}
}
fn main() {
let s = S(1u8);
s.f(5i64);
}
"#,
);
}
#[test]
fn index_of_slice_should_preserve_len() {
check_pass(
r#"
//- minicore: index, slice, coerce_unsized
struct X;
impl core::ops::Index<X> for [i32] {
type Output = i32;
fn index(&self, _: X) -> &i32 {
if self.len() != 3 {
should_not_reach();
}
&self[0]
}
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let x: &[i32] = &[1, 2, 3];
&x[X];
}
"#,
);
}
#[test]
fn memcmp() {
check_pass(
r#"
//- minicore: slice, coerce_unsized, index
fn should_not_reach() -> bool {
_ // FIXME: replace this function with panic when that works
}
extern "C" {
fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
}
fn my_cmp(x: &[u8], y: &[u8]) -> i32 {
memcmp(x as *const u8, y as *const u8, x.len())
}
fn main() {
if my_cmp(&[1, 2, 3], &[1, 2, 3]) != 0 {
should_not_reach();
}
if my_cmp(&[1, 20, 3], &[1, 2, 3]) <= 0 {
should_not_reach();
}
if my_cmp(&[1, 2, 3], &[1, 20, 3]) >= 0 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn unix_write_stdout() {
check_pass_and_stdio(
r#"
//- minicore: slice, index, coerce_unsized
type pthread_key_t = u32;
type c_void = u8;
type c_int = i32;
extern "C" {
pub fn write(fd: i32, buf: *const u8, count: usize) -> usize;
}
fn main() {
let stdout = b"stdout";
let stderr = b"stderr";
write(1, &stdout[0], 6);
write(2, &stderr[0], 6);
}
"#,
"stdout",
"stderr",
);
}
#[test]
fn closure_layout_in_rpit() {
check_pass(
r#"
//- minicore: fn
fn f<F: Fn()>(x: F) {
fn g(x: impl Fn()) -> impl FnOnce() {
move || {
x();
}
}
g(x)();
}
fn main() {
f(|| {});
}
"#,
);
}
#[test]
fn from_fn() {
check_pass(
r#"
//- minicore: fn, iterator
struct FromFn<F>(F);
impl<T, F: FnMut() -> Option<T>> Iterator for FromFn<F> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
(self.0)()
}
}
fn main() {
let mut tokenize = {
FromFn(move || Some(2))
};
let s = tokenize.next();
}
"#,
);
}
#[test]
fn for_loop() {
check_pass(
r#"
//- minicore: iterator, add
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
struct X;
struct XIter(i32);
impl IntoIterator for X {
type Item = i32;
type IntoIter = XIter;
fn into_iter(self) -> Self::IntoIter {
XIter(0)
}
}
impl Iterator for XIter {
type Item = i32;
fn next(&mut self) -> Option<Self::Item> {
if self.0 == 5 {
None
} else {
self.0 += 1;
Some(self.0)
}
}
}
fn main() {
let mut s = 0;
for x in X {
s += x;
}
if s != 15 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn field_with_associated_type() {
check_pass(
r#"
//- /b/mod.rs crate:b
pub trait Tr {
fn f(self);
}
pub trait Tr2 {
type Ty: Tr;
}
pub struct S<T: Tr2> {
pub t: T::Ty,
}
impl<T: Tr2> S<T> {
pub fn g(&self) {
let k = (self.t, self.t);
self.t.f();
}
}
//- /a/mod.rs crate:a deps:b
use b::{Tr, Tr2, S};
struct A(i32);
struct B(u8);
impl Tr for A {
fn f(&self) {
}
}
impl Tr2 for B {
type Ty = A;
}
#[test]
fn main() {
let s: S<B> = S { t: A(2) };
s.g();
}
"#,
);
}
#[test]
fn specialization_array_clone() {
check_pass(
r#"
//- minicore: copy, derive, slice, index, coerce_unsized
impl<T: Clone, const N: usize> Clone for [T; N] {
#[inline]
fn clone(&self) -> Self {
SpecArrayClone::clone(self)
}
}
trait SpecArrayClone: Clone {
fn clone<const N: usize>(array: &[Self; N]) -> [Self; N];
}
impl<T: Clone> SpecArrayClone for T {
#[inline]
default fn clone<const N: usize>(array: &[T; N]) -> [T; N] {
// FIXME: panic here when we actually implement specialization.
from_slice(array)
}
}
fn from_slice<T, const N: usize>(s: &[T]) -> [T; N] {
[s[0]; N]
}
impl<T: Copy> SpecArrayClone for T {
#[inline]
fn clone<const N: usize>(array: &[T; N]) -> [T; N] {
*array
}
}
#[derive(Clone, Copy)]
struct X(i32);
fn main() {
let ar = [X(1), X(2)];
ar.clone();
}
"#,
);
}
#[test]
fn short_circuit_operator() {
check_pass(
r#"
fn should_not_reach() -> bool {
_ // FIXME: replace this function with panic when that works
}
fn main() {
if false && should_not_reach() {
should_not_reach();
}
true || should_not_reach();
}
"#,
);
}
#[test]
fn closure_state() {
check_pass(
r#"
//- minicore: fn, add, copy
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let mut x = 2;
let mut c = move || {
x += 1;
x
};
c();
c();
c();
if x != 2 {
should_not_reach();
}
if c() != 6 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn closure_capture_array_const_generic() {
check_pass(
r#"
//- minicore: fn, add, copy
struct X(i32);
fn f<const N: usize>(mut x: [X; N]) { // -> impl FnOnce() {
let c = || {
x;
};
c();
}
fn main() {
let s = f([X(1)]);
//s();
}
"#,
);
}
#[test]
fn posix_tls() {
check_pass(
r#"
//- minicore: option
type pthread_key_t = u32;
type c_void = u8;
type c_int = i32;
extern "C" {
pub fn pthread_key_create(
key: *mut pthread_key_t,
dtor: Option<unsafe extern "C" fn(*mut c_void)>,
) -> c_int;
pub fn pthread_key_delete(key: pthread_key_t) -> c_int;
pub fn pthread_getspecific(key: pthread_key_t) -> *mut c_void;
pub fn pthread_setspecific(key: pthread_key_t, value: *const c_void) -> c_int;
}
fn main() {
let mut key = 2;
pthread_key_create(&mut key, None);
}
"#,
);
}
#[test]
fn regression_14966() {
check_pass(
r#"
//- minicore: fn, copy, coerce_unsized
trait A<T> {
fn a(&self) {}
}
impl A<()> for () {}
struct B;
impl B {
pub fn b<T>(s: &dyn A<T>) -> Self {
B
}
}
struct C;
impl C {
fn c<T>(a: &dyn A<T>) -> Self {
let mut c = C;
let b = B::b(a);
c.d(|| a.a());
c
}
fn d(&mut self, f: impl FnOnce()) {}
}
fn main() {
C::c(&());
}
"#,
);
}

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,7 @@
//! MIR lowering for places
use super::*;
use hir_def::{lang_item::lang_attr, FunctionId};
use hir_expand::name;
macro_rules! not_supported {
@ -15,8 +16,8 @@ impl MirLowerCtx<'_> {
expr_id: ExprId,
prev_block: BasicBlockId,
) -> Result<Option<(Place, BasicBlockId)>> {
let ty = self.expr_ty(expr_id);
let place = self.temp(ty)?;
let ty = self.expr_ty_without_adjust(expr_id);
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
return Ok(None);
};
@ -29,9 +30,11 @@ impl MirLowerCtx<'_> {
prev_block: BasicBlockId,
adjustments: &[Adjustment],
) -> Result<Option<(Place, BasicBlockId)>> {
let ty =
adjustments.last().map(|x| x.target.clone()).unwrap_or_else(|| self.expr_ty(expr_id));
let place = self.temp(ty)?;
let ty = adjustments
.last()
.map(|x| x.target.clone())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
return Ok(None);
};
@ -62,7 +65,7 @@ impl MirLowerCtx<'_> {
)? else {
return Ok(None);
};
x.0.projection.push(ProjectionElem::Deref);
x.0 = x.0.project(ProjectionElem::Deref);
Ok(Some(x))
}
Adjust::Deref(Some(od)) => {
@ -79,7 +82,7 @@ impl MirLowerCtx<'_> {
r,
rest.last()
.map(|x| x.target.clone())
.unwrap_or_else(|| self.expr_ty(expr_id)),
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
last.target.clone(),
expr_id.into(),
match od.0 {
@ -125,35 +128,74 @@ impl MirLowerCtx<'_> {
match &self.body.exprs[expr_id] {
Expr::Path(p) => {
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p.mod_path()) else {
return Err(MirLowerError::unresolved_path(self.db, p));
};
let pr = match pr {
ResolveValueResult::ValueNs(v) => v,
ResolveValueResult::Partial(..) => return try_rvalue(self),
let Some(pr) = resolver.resolve_path_in_value_ns_fully(self.db.upcast(), p) else {
return try_rvalue(self);
};
match pr {
ValueNs::LocalBinding(pat_id) => {
Ok(Some((self.result.binding_locals[pat_id].into(), current)))
Ok(Some((self.binding_local(pat_id)?.into(), current)))
}
ValueNs::StaticId(s) => {
let ty = self.expr_ty_without_adjust(expr_id);
let ref_ty =
TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner);
let temp: Place = self.temp(ref_ty, current, expr_id.into())?.into();
self.push_assignment(
current,
temp.clone(),
Operand::Static(s).into(),
expr_id.into(),
);
Ok(Some((temp.project(ProjectionElem::Deref), current)))
}
_ => try_rvalue(self),
}
}
Expr::UnaryOp { expr, op } => match op {
hir_def::expr::UnaryOp::Deref => {
if !matches!(
self.expr_ty(*expr).kind(Interner),
TyKind::Ref(..) | TyKind::Raw(..)
) {
let Some(_) = self.lower_expr_as_place(current, *expr, true)? else {
hir_def::hir::UnaryOp::Deref => {
let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) {
TyKind::Ref(..) | TyKind::Raw(..) => true,
TyKind::Adt(id, _) => {
if let Some(lang_item) = lang_attr(self.db.upcast(), id.0) {
lang_item == LangItem::OwnedBox
} else {
false
}
}
_ => false,
};
if !is_builtin {
let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
return Ok(None);
};
not_supported!("explicit overloaded deref");
return self.lower_overloaded_deref(
current,
p,
self.expr_ty_after_adjustments(*expr),
self.expr_ty_without_adjust(expr_id),
expr_id.into(),
'b: {
if let Some((f, _)) = self.infer.method_resolution(expr_id) {
if let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut)?.as_trait()
{
if let Some(deref_fn) = self
.db
.trait_data(deref_trait)
.method_by_name(&name![deref_mut])
{
break 'b deref_fn == f;
}
}
}
false
},
);
}
let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
return Ok(None);
};
r.projection.push(ProjectionElem::Deref);
r = r.project(ProjectionElem::Deref);
Ok(Some((r, current)))
}
_ => try_rvalue(self),
@ -169,25 +211,84 @@ impl MirLowerCtx<'_> {
let base_ty = self.expr_ty_after_adjustments(*base);
let index_ty = self.expr_ty_after_adjustments(*index);
if index_ty != TyBuilder::usize()
|| !matches!(base_ty.kind(Interner), TyKind::Array(..) | TyKind::Slice(..))
|| !matches!(
base_ty.strip_reference().kind(Interner),
TyKind::Array(..) | TyKind::Slice(..)
)
{
not_supported!("overloaded index");
let Some(index_fn) = self.infer.method_resolution(expr_id) else {
return Err(MirLowerError::UnresolvedMethod("[overloaded index]".to_string()));
};
let Some((base_place, current)) = self.lower_expr_as_place(current, *base, true)? else {
return Ok(None);
};
let Some((index_operand, current)) = self.lower_expr_to_some_operand(*index, current)? else {
return Ok(None);
};
return self.lower_overloaded_index(
current,
base_place,
base_ty,
self.expr_ty_without_adjust(expr_id),
index_operand,
expr_id.into(),
index_fn,
);
}
let adjusts = self
.infer
.expr_adjustments
.get(base)
.and_then(|x| x.split_last())
.map(|x| x.1)
.unwrap_or(&[]);
let Some((mut p_base, current)) =
self.lower_expr_as_place(current, *base, true)? else {
self.lower_expr_as_place_with_adjust(current, *base, true, adjusts)?
else {
return Ok(None);
};
let l_index = self.temp(self.expr_ty_after_adjustments(*index))?;
let l_index =
self.temp(self.expr_ty_after_adjustments(*index), current, expr_id.into())?;
let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else {
return Ok(None);
};
p_base.projection.push(ProjectionElem::Index(l_index));
p_base = p_base.project(ProjectionElem::Index(l_index));
Ok(Some((p_base, current)))
}
_ => try_rvalue(self),
}
}
fn lower_overloaded_index(
&mut self,
current: BasicBlockId,
place: Place,
base_ty: Ty,
result_ty: Ty,
index_operand: Operand,
span: MirSpan,
index_fn: (FunctionId, Substitution),
) -> Result<Option<(Place, BasicBlockId)>> {
let mutability = match base_ty.as_reference() {
Some((_, _, mutability)) => mutability,
None => Mutability::Not,
};
let result_ref = TyKind::Ref(mutability, static_lifetime(), result_ty).intern(Interner);
let mut result: Place = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(
TyKind::FnDef(
self.db.intern_callable_def(CallableDefId::FunctionId(index_fn.0)).into(),
index_fn.1,
)
.intern(Interner),
);
let Some(current) = self.lower_call(index_fn_op, Box::new([Operand::Copy(place), index_operand]), result.clone(), current, false, span)? else {
return Ok(None);
};
result = result.project(ProjectionElem::Deref);
Ok(Some((result, current)))
}
fn lower_overloaded_deref(
&mut self,
current: BasicBlockId,
@ -209,7 +310,7 @@ impl MirLowerCtx<'_> {
};
let ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), source_ty.clone()).intern(Interner);
let target_ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), target_ty).intern(Interner);
let ref_place: Place = self.temp(ty_ref)?.into();
let ref_place: Place = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place.clone(), Rvalue::Ref(borrow_kind, place), span);
let deref_trait = self
.resolve_lang_item(trait_lang_item)?
@ -227,11 +328,11 @@ impl MirLowerCtx<'_> {
)
.intern(Interner),
);
let mut result: Place = self.temp(target_ty_ref)?.into();
let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false)? else {
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
let Some(current) = self.lower_call(deref_fn_op, Box::new([Operand::Copy(ref_place)]), result.clone(), current, false, span)? else {
return Ok(None);
};
result.projection.push(ProjectionElem::Deref);
result = result.project(ProjectionElem::Deref);
Ok(Some((result, current)))
}
}

View file

@ -0,0 +1,617 @@
//! MIR lowering for patterns
use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId};
use crate::BindingMode;
use super::*;
macro_rules! not_supported {
($x: expr) => {
return Err(MirLowerError::NotSupported(format!($x)))
};
}
pub(super) enum AdtPatternShape<'a> {
Tuple { args: &'a [PatId], ellipsis: Option<usize> },
Record { args: &'a [RecordFieldPat] },
Unit,
}
/// We need to do pattern matching in two phases: One to check if the pattern matches, and one to fill the bindings
/// of patterns. This is necessary to prevent double moves and similar problems. For example:
/// ```ignore
/// struct X;
/// match (X, 3) {
/// (b, 2) | (b, 3) => {},
/// _ => {}
/// }
/// ```
/// If we do everything in one pass, we will move `X` to the first `b`, then we see that the second field of tuple
/// doesn't match and we should move the `X` to the second `b` (which here is the same thing, but doesn't need to be) and
/// it might even doesn't match the second pattern and we may want to not move `X` at all.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum MatchingMode {
/// Check that if this pattern matches
Check,
/// Assume that this pattern matches, fill bindings
Bind,
}
impl MirLowerCtx<'_> {
/// It gets a `current` unterminated block, appends some statements and possibly a terminator to it to check if
/// the pattern matches and write bindings, and returns two unterminated blocks, one for the matched path (which
/// can be the `current` block) and one for the mismatched path. If the input pattern is irrefutable, the
/// mismatched path block is `None`.
///
/// By default, it will create a new block for mismatched path. If you already have one, you can provide it with
/// `current_else` argument to save an unnecessary jump. If `current_else` isn't `None`, the result mismatched path
/// wouldn't be `None` as well. Note that this function will add jumps to the beginning of the `current_else` block,
/// so it should be an empty block.
pub(super) fn pattern_match(
&mut self,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
cond_place: Place,
pattern: PatId,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let (current, current_else) = self.pattern_match_inner(
current,
current_else,
cond_place.clone(),
pattern,
MatchingMode::Check,
)?;
let (current, current_else) = self.pattern_match_inner(
current,
current_else,
cond_place,
pattern,
MatchingMode::Bind,
)?;
Ok((current, current_else))
}
fn pattern_match_inner(
&mut self,
mut current: BasicBlockId,
mut current_else: Option<BasicBlockId>,
mut cond_place: Place,
pattern: PatId,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let cnt = self.infer.pat_adjustments.get(&pattern).map(|x| x.len()).unwrap_or_default();
cond_place.projection = cond_place
.projection
.iter()
.cloned()
.chain((0..cnt).map(|_| ProjectionElem::Deref))
.collect::<Vec<_>>()
.into();
Ok(match &self.body.pats[pattern] {
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
let subst = match self.infer[pattern].kind(Interner) {
TyKind::Tuple(_, s) => s,
_ => {
return Err(MirLowerError::TypeError(
"non tuple type matched with tuple pattern",
))
}
};
self.pattern_match_tuple_like(
current,
current_else,
args,
*ellipsis,
(0..subst.len(Interner)).map(|i| PlaceElem::TupleOrClosureField(i)),
&(&mut cond_place),
mode,
)?
}
Pat::Or(pats) => {
let then_target = self.new_basic_block();
let mut finished = false;
for pat in &**pats {
let (mut next, next_else) = self.pattern_match_inner(
current,
None,
(&mut cond_place).clone(),
*pat,
MatchingMode::Check,
)?;
if mode == MatchingMode::Bind {
(next, _) = self.pattern_match_inner(
next,
None,
(&mut cond_place).clone(),
*pat,
MatchingMode::Bind,
)?;
}
self.set_goto(next, then_target, pattern.into());
match next_else {
Some(t) => {
current = t;
}
None => {
finished = true;
break;
}
}
}
if !finished {
if mode == MatchingMode::Bind {
self.set_terminator(current, TerminatorKind::Unreachable, pattern.into());
} else {
let ce = *current_else.get_or_insert_with(|| self.new_basic_block());
self.set_goto(current, ce, pattern.into());
}
}
(then_target, current_else)
}
Pat::Record { args, .. } => {
let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
not_supported!("unresolved variant for record");
};
self.pattern_matching_variant(
cond_place,
variant,
current,
pattern.into(),
current_else,
AdtPatternShape::Record { args: &*args },
mode,
)?
}
Pat::Range { start, end } => {
let mut add_check = |l: &LiteralOrConst, binop| -> Result<()> {
let lv =
self.lower_literal_or_const_to_operand(self.infer[pattern].clone(), l)?;
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
let next = self.new_basic_block();
let discr: Place =
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
Rvalue::CheckedBinaryOp(
binop,
lv,
Operand::Copy((&mut cond_place).clone()),
),
pattern.into(),
);
let discr = Operand::Copy(discr);
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr,
targets: SwitchTargets::static_if(1, next, else_target),
},
pattern.into(),
);
current = next;
Ok(())
};
if mode == MatchingMode::Check {
if let Some(start) = start {
add_check(start, BinOp::Le)?;
}
if let Some(end) = end {
add_check(end, BinOp::Ge)?;
}
}
(current, current_else)
}
Pat::Slice { prefix, slice, suffix } => {
if mode == MatchingMode::Check {
// emit runtime length check for slice
if let TyKind::Slice(_) = self.infer[pattern].kind(Interner) {
let pattern_len = prefix.len() + suffix.len();
let place_len: Place =
self.temp(TyBuilder::usize(), current, pattern.into())?.into();
self.push_assignment(
current,
place_len.clone(),
Rvalue::Len((&mut cond_place).clone()),
pattern.into(),
);
let else_target =
*current_else.get_or_insert_with(|| self.new_basic_block());
let next = self.new_basic_block();
if slice.is_none() {
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: Operand::Copy(place_len),
targets: SwitchTargets::static_if(
pattern_len as u128,
next,
else_target,
),
},
pattern.into(),
);
} else {
let c = Operand::from_concrete_const(
pattern_len.to_le_bytes().to_vec(),
MemoryMap::default(),
TyBuilder::usize(),
);
let discr: Place =
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
Rvalue::CheckedBinaryOp(BinOp::Le, c, Operand::Copy(place_len)),
pattern.into(),
);
let discr = Operand::Copy(discr);
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr,
targets: SwitchTargets::static_if(1, next, else_target),
},
pattern.into(),
);
}
current = next;
}
}
for (i, &pat) in prefix.iter().enumerate() {
let next_place = (&mut cond_place).project(ProjectionElem::ConstantIndex {
offset: i as u64,
from_end: false,
});
(current, current_else) =
self.pattern_match_inner(current, current_else, next_place, pat, mode)?;
}
if let Some(slice) = slice {
if mode == MatchingMode::Bind {
if let Pat::Bind { id, subpat: _ } = self.body[*slice] {
let next_place = (&mut cond_place).project(ProjectionElem::Subslice {
from: prefix.len() as u64,
to: suffix.len() as u64,
});
(current, current_else) = self.pattern_match_binding(
id,
next_place,
(*slice).into(),
current,
current_else,
)?;
}
}
}
for (i, &pat) in suffix.iter().enumerate() {
let next_place = (&mut cond_place).project(ProjectionElem::ConstantIndex {
offset: i as u64,
from_end: true,
});
(current, current_else) =
self.pattern_match_inner(current, current_else, next_place, pat, mode)?;
}
(current, current_else)
}
Pat::Path(p) => match self.infer.variant_resolution_for_pat(pattern) {
Some(variant) => self.pattern_matching_variant(
cond_place,
variant,
current,
pattern.into(),
current_else,
AdtPatternShape::Unit,
mode,
)?,
None => {
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let resolver = self.owner.resolver(self.db.upcast());
let pr = resolver
.resolve_path_in_value_ns(self.db.upcast(), p)
.ok_or_else(unresolved_name)?;
let (c, subst) = 'b: {
if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) {
if let AssocItemId::ConstId(c) = x.0 {
break 'b (c, x.1);
}
}
if let ResolveValueResult::ValueNs(v) = pr {
if let ValueNs::ConstId(c) = v {
break 'b (c, Substitution::empty(Interner));
}
}
not_supported!("path in pattern position that is not const or variant")
};
let tmp: Place =
self.temp(self.infer[pattern].clone(), current, pattern.into())?.into();
let span = pattern.into();
self.lower_const(
c.into(),
current,
tmp.clone(),
subst,
span,
self.infer[pattern].clone(),
)?;
let tmp2: Place = self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
tmp2.clone(),
Rvalue::CheckedBinaryOp(
BinOp::Eq,
Operand::Copy(tmp),
Operand::Copy(cond_place),
),
span,
);
let next = self.new_basic_block();
let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: Operand::Copy(tmp2),
targets: SwitchTargets::static_if(1, next, else_target),
},
span,
);
(next, Some(else_target))
}
},
Pat::Lit(l) => match &self.body.exprs[*l] {
Expr::Literal(l) => {
let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
if mode == MatchingMode::Check {
self.pattern_match_const(current_else, current, c, cond_place, pattern)?
} else {
(current, current_else)
}
}
_ => not_supported!("expression path literal"),
},
Pat::Bind { id, subpat } => {
if let Some(subpat) = subpat {
(current, current_else) = self.pattern_match_inner(
current,
current_else,
(&mut cond_place).clone(),
*subpat,
mode,
)?
}
if mode == MatchingMode::Bind {
self.pattern_match_binding(
*id,
cond_place,
pattern.into(),
current,
current_else,
)?
} else {
(current, current_else)
}
}
Pat::TupleStruct { path: _, args, ellipsis } => {
let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
not_supported!("unresolved variant");
};
self.pattern_matching_variant(
cond_place,
variant,
current,
pattern.into(),
current_else,
AdtPatternShape::Tuple { args, ellipsis: *ellipsis },
mode,
)?
}
Pat::Ref { pat, mutability: _ } => self.pattern_match_inner(
current,
current_else,
cond_place.project(ProjectionElem::Deref),
*pat,
mode,
)?,
Pat::Box { .. } => not_supported!("box pattern"),
Pat::ConstBlock(_) => not_supported!("const block pattern"),
})
}
fn pattern_match_binding(
&mut self,
id: BindingId,
cond_place: Place,
span: MirSpan,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let target_place = self.binding_local(id)?;
let mode = self.infer.binding_modes[id];
self.push_storage_live(id, current)?;
self.push_assignment(
current,
target_place.into(),
match mode {
BindingMode::Move => Operand::Copy(cond_place).into(),
BindingMode::Ref(Mutability::Not) => Rvalue::Ref(BorrowKind::Shared, cond_place),
BindingMode::Ref(Mutability::Mut) => {
Rvalue::Ref(BorrowKind::Mut { allow_two_phase_borrow: false }, cond_place)
}
},
span,
);
Ok((current, current_else))
}
fn pattern_match_const(
&mut self,
current_else: Option<BasicBlockId>,
current: BasicBlockId,
c: Operand,
cond_place: Place,
pattern: Idx<Pat>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let then_target = self.new_basic_block();
let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
let discr: Place = self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
Rvalue::CheckedBinaryOp(BinOp::Eq, c, Operand::Copy(cond_place)),
pattern.into(),
);
let discr = Operand::Copy(discr);
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr,
targets: SwitchTargets::static_if(1, then_target, else_target),
},
pattern.into(),
);
Ok((then_target, Some(else_target)))
}
fn pattern_matching_variant(
&mut self,
cond_place: Place,
variant: VariantId,
mut current: BasicBlockId,
span: MirSpan,
mut current_else: Option<BasicBlockId>,
shape: AdtPatternShape<'_>,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
Ok(match variant {
VariantId::EnumVariantId(v) => {
if mode == MatchingMode::Check {
let e = self.const_eval_discriminant(v)? as u128;
let tmp = self.discr_temp_place(current);
self.push_assignment(
current,
tmp.clone(),
Rvalue::Discriminant(cond_place.clone()),
span,
);
let next = self.new_basic_block();
let else_target = current_else.get_or_insert_with(|| self.new_basic_block());
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: Operand::Copy(tmp),
targets: SwitchTargets::static_if(e, next, *else_target),
},
span,
);
current = next;
}
let enum_data = self.db.enum_data(v.parent);
self.pattern_matching_variant_fields(
shape,
&enum_data.variants[v.local_id].variant_data,
variant,
current,
current_else,
&cond_place,
mode,
)?
}
VariantId::StructId(s) => {
let struct_data = self.db.struct_data(s);
self.pattern_matching_variant_fields(
shape,
&struct_data.variant_data,
variant,
current,
current_else,
&cond_place,
mode,
)?
}
VariantId::UnionId(_) => {
return Err(MirLowerError::TypeError("pattern matching on union"))
}
})
}
fn pattern_matching_variant_fields(
&mut self,
shape: AdtPatternShape<'_>,
variant_data: &VariantData,
v: VariantId,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
cond_place: &Place,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
Ok(match shape {
AdtPatternShape::Record { args } => {
let it = args
.iter()
.map(|x| {
let field_id =
variant_data.field(&x.name).ok_or(MirLowerError::UnresolvedField)?;
Ok((
PlaceElem::Field(FieldId { parent: v.into(), local_id: field_id }),
x.pat,
))
})
.collect::<Result<Vec<_>>>()?;
self.pattern_match_adt(current, current_else, it.into_iter(), cond_place, mode)?
}
AdtPatternShape::Tuple { args, ellipsis } => {
let fields = variant_data
.fields()
.iter()
.map(|(x, _)| PlaceElem::Field(FieldId { parent: v.into(), local_id: x }));
self.pattern_match_tuple_like(
current,
current_else,
args,
ellipsis,
fields,
cond_place,
mode,
)?
}
AdtPatternShape::Unit => (current, current_else),
})
}
fn pattern_match_adt(
&mut self,
mut current: BasicBlockId,
mut current_else: Option<BasicBlockId>,
args: impl Iterator<Item = (PlaceElem, PatId)>,
cond_place: &Place,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
for (proj, arg) in args {
let cond_place = cond_place.project(proj);
(current, current_else) =
self.pattern_match_inner(current, current_else, cond_place, arg, mode)?;
}
Ok((current, current_else))
}
fn pattern_match_tuple_like(
&mut self,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
args: &[PatId],
ellipsis: Option<usize>,
fields: impl DoubleEndedIterator<Item = PlaceElem> + Clone,
cond_place: &Place,
mode: MatchingMode,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len()));
let it = al
.iter()
.zip(fields.clone())
.chain(ar.iter().rev().zip(fields.rev()))
.map(|(x, y)| (y, *x));
self.pattern_match_adt(current, current_else, it, cond_place, mode)
}
}

View file

@ -0,0 +1,351 @@
//! Monomorphization of mir, which is used in mir interpreter and const eval.
//!
//! The job of monomorphization is:
//! * Monomorphization. That is, replacing `Option<T>` with `Option<i32>` where `T:=i32` substitution
//! is provided
//! * Normalizing types, for example replacing RPIT of other functions called in this body.
//!
//! So the monomorphization should be called even if the substitution is empty.
use std::mem;
use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
ConstData, DebruijnIndex,
};
use hir_def::{DefWithBodyId, GeneralConstId};
use triomphe::Arc;
use crate::{
consteval::unknown_const,
db::HirDatabase,
from_placeholder_idx,
infer::normalize,
method_resolution::lookup_impl_const,
utils::{generics, Generics},
ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
};
use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
macro_rules! not_supported {
($x: expr) => {
return Err(MirLowerError::NotSupported(format!($x)))
};
}
struct Filler<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
subst: &'a Substitution,
generics: Option<Generics>,
owner: DefWithBodyId,
}
impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = MirLowerError;
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn try_fold_ty(
&mut self,
ty: Ty,
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
match ty.kind(Interner) {
TyKind::AssociatedType(id, subst) => {
// I don't know exactly if and why this is needed, but it looks like `normalize_ty` likes
// this kind of associated types.
Ok(TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
associated_ty_id: *id,
substitution: subst.clone().try_fold_with(self, outer_binder)?,
}))
.intern(Interner))
}
TyKind::OpaqueType(id, subst) => {
let impl_trait_id = self.db.lookup_intern_impl_trait_id((*id).into());
let subst = subst.clone().try_fold_with(self.as_dyn(), outer_binder)?;
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = self.db.infer(func.into());
let filler = &mut Filler {
db: self.db,
owner: self.owner,
trait_env: self.trait_env.clone(),
subst: &subst,
generics: Some(generics(self.db.upcast(), func.into())),
};
filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
not_supported!("async block impl trait");
}
}
}
_ => ty.try_super_fold_with(self.as_dyn(), outer_binder),
}
}
fn try_fold_free_placeholder_const(
&mut self,
_ty: chalk_ir::Ty<Interner>,
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
.and_then(|x| x.constant(Interner))
.ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
.clone())
}
fn try_fold_free_placeholder_ty(
&mut self,
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
.and_then(|x| x.ty(Interner))
.ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
.clone())
}
fn try_fold_const(
&mut self,
constant: chalk_ir::Const<Interner>,
outer_binder: DebruijnIndex,
) -> Result<chalk_ir::Const<Interner>, Self::Error> {
let next_ty = normalize(
self.db,
self.trait_env.clone(),
constant.data(Interner).ty.clone().try_fold_with(self, outer_binder)?,
);
ConstData { ty: next_ty, value: constant.data(Interner).value.clone() }
.intern(Interner)
.try_super_fold_with(self, outer_binder)
}
}
impl Filler<'_> {
fn fill_ty(&mut self, ty: &mut Ty) -> Result<(), MirLowerError> {
let tmp = mem::replace(ty, TyKind::Error.intern(Interner));
*ty = normalize(
self.db,
self.trait_env.clone(),
tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?,
);
Ok(())
}
fn fill_const(&mut self, c: &mut Const) -> Result<(), MirLowerError> {
let tmp = mem::replace(c, unknown_const(c.data(Interner).ty.clone()));
*c = tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?;
Ok(())
}
fn fill_subst(&mut self, ty: &mut Substitution) -> Result<(), MirLowerError> {
let tmp = mem::replace(ty, Substitution::empty(Interner));
*ty = tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?;
Ok(())
}
fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> {
match op {
Operand::Constant(c) => {
match &c.data(Interner).value {
chalk_ir::ConstValue::BoundVar(b) => {
let resolved = self
.subst
.as_slice(Interner)
.get(b.index)
.ok_or_else(|| {
MirLowerError::GenericArgNotProvided(
self.generics
.as_ref()
.and_then(|x| x.iter().nth(b.index))
.unwrap()
.0,
self.subst.clone(),
)
})?
.assert_const_ref(Interner);
*c = resolved.clone();
}
chalk_ir::ConstValue::InferenceVar(_)
| chalk_ir::ConstValue::Placeholder(_) => {}
chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
crate::ConstScalar::UnevaluatedConst(const_id, subst) => {
let mut const_id = *const_id;
let mut subst = subst.clone();
self.fill_subst(&mut subst)?;
if let GeneralConstId::ConstId(c) = const_id {
let (c, s) = lookup_impl_const(
self.db,
self.db.trait_environment_for_body(self.owner),
c,
subst,
);
const_id = GeneralConstId::ConstId(c);
subst = s;
}
let result =
self.db.const_eval(const_id.into(), subst).map_err(|e| {
let name = const_id.name(self.db.upcast());
MirLowerError::ConstEvalError(name, Box::new(e))
})?;
*c = result;
}
crate::ConstScalar::Bytes(_, _) | crate::ConstScalar::Unknown => (),
},
}
self.fill_const(c)?;
}
Operand::Copy(_) | Operand::Move(_) | Operand::Static(_) => (),
}
Ok(())
}
fn fill_body(&mut self, body: &mut MirBody) -> Result<(), MirLowerError> {
for (_, l) in body.locals.iter_mut() {
self.fill_ty(&mut l.ty)?;
}
for (_, bb) in body.basic_blocks.iter_mut() {
for statement in &mut bb.statements {
match &mut statement.kind {
StatementKind::Assign(_, r) => match r {
Rvalue::Aggregate(ak, ops) => {
for op in &mut **ops {
self.fill_operand(op)?;
}
match ak {
super::AggregateKind::Array(ty)
| super::AggregateKind::Tuple(ty)
| super::AggregateKind::Closure(ty) => self.fill_ty(ty)?,
super::AggregateKind::Adt(_, subst) => self.fill_subst(subst)?,
super::AggregateKind::Union(_, _) => (),
}
}
Rvalue::ShallowInitBox(_, ty) | Rvalue::ShallowInitBoxWithAlloc(ty) => {
self.fill_ty(ty)?;
}
Rvalue::Use(op) => {
self.fill_operand(op)?;
}
Rvalue::Repeat(op, len) => {
self.fill_operand(op)?;
self.fill_const(len)?;
}
Rvalue::Ref(_, _)
| Rvalue::Len(_)
| Rvalue::Cast(_, _, _)
| Rvalue::CheckedBinaryOp(_, _, _)
| Rvalue::UnaryOp(_, _)
| Rvalue::Discriminant(_)
| Rvalue::CopyForDeref(_) => (),
},
StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
}
if let Some(terminator) = &mut bb.terminator {
match &mut terminator.kind {
TerminatorKind::Call { func, args, .. } => {
self.fill_operand(func)?;
for op in &mut **args {
self.fill_operand(op)?;
}
}
TerminatorKind::SwitchInt { discr, .. } => {
self.fill_operand(discr)?;
}
TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => (),
}
}
}
Ok(())
}
}
pub fn monomorphized_mir_body_query(
db: &dyn HirDatabase,
owner: DefWithBodyId,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
let body = db.mir_body(owner)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;
Ok(Arc::new(body))
}
pub fn monomorphized_mir_body_recover(
_: &dyn HirDatabase,
_: &[String],
_: &DefWithBodyId,
_: &Substitution,
_: &Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
return Err(MirLowerError::Loop);
}
pub fn monomorphized_mir_body_for_closure_query(
db: &dyn HirDatabase,
closure: ClosureId,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
let (owner, _) = db.lookup_intern_closure(closure.into());
let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
let body = db.mir_body_for_closure(closure)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;
Ok(Arc::new(body))
}
// FIXME: remove this function. Monomorphization is a time consuming job and should always be a query.
pub fn monomorphize_mir_body_bad(
db: &dyn HirDatabase,
mut body: MirBody,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
) -> Result<MirBody, MirLowerError> {
let owner = body.owner;
let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
filler.fill_body(&mut body)?;
Ok(body)
}

View file

@ -1,39 +1,25 @@
//! A pretty-printer for MIR.
use std::fmt::{Display, Write};
use std::{
fmt::{Debug, Display, Write},
mem,
};
use hir_def::{body::Body, expr::BindingId};
use hir_def::{body::Body, hir::BindingId};
use hir_expand::name::Name;
use la_arena::ArenaMap;
use crate::{
db::HirDatabase,
display::HirDisplay,
mir::{PlaceElem, ProjectionElem, StatementKind, Terminator},
display::{ClosureStyle, HirDisplay},
mir::{PlaceElem, ProjectionElem, StatementKind, TerminatorKind},
ClosureId,
};
use super::{
AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, Operand, Place, Rvalue, UnOp,
};
impl MirBody {
pub fn pretty_print(&self, db: &dyn HirDatabase) -> String {
let hir_body = db.body(self.owner);
let mut ctx = MirPrettyCtx::new(self, &hir_body, db);
ctx.for_body();
ctx.result
}
}
struct MirPrettyCtx<'a> {
body: &'a MirBody,
hir_body: &'a Body,
db: &'a dyn HirDatabase,
result: String,
ident: String,
local_to_binding: ArenaMap<LocalId, BindingId>,
}
macro_rules! w {
($dst:expr, $($arg:tt)*) => {
{ let _ = write!($dst, $($arg)*); }
@ -49,6 +35,57 @@ macro_rules! wln {
};
}
impl MirBody {
pub fn pretty_print(&self, db: &dyn HirDatabase) -> String {
let hir_body = db.body(self.owner);
let mut ctx = MirPrettyCtx::new(self, &hir_body, db);
ctx.for_body(|this| match ctx.body.owner {
hir_def::DefWithBodyId::FunctionId(id) => {
let data = db.function_data(id);
w!(this, "fn {}() ", data.name.display(db.upcast()));
}
hir_def::DefWithBodyId::StaticId(id) => {
let data = db.static_data(id);
w!(this, "static {}: _ = ", data.name.display(db.upcast()));
}
hir_def::DefWithBodyId::ConstId(id) => {
let data = db.const_data(id);
w!(
this,
"const {}: _ = ",
data.name.as_ref().unwrap_or(&Name::missing()).display(db.upcast())
);
}
hir_def::DefWithBodyId::VariantId(id) => {
let data = db.enum_data(id.parent);
w!(this, "enum {} = ", data.name.display(db.upcast()));
}
});
ctx.result
}
// String with lines is rendered poorly in `dbg` macros, which I use very much, so this
// function exists to solve that.
pub fn dbg(&self, db: &dyn HirDatabase) -> impl Debug {
struct StringDbg(String);
impl Debug for StringDbg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
StringDbg(self.pretty_print(db))
}
}
struct MirPrettyCtx<'a> {
body: &'a MirBody,
hir_body: &'a Body,
db: &'a dyn HirDatabase,
result: String,
indent: String,
local_to_binding: ArenaMap<LocalId, BindingId>,
}
impl Write for MirPrettyCtx<'_> {
fn write_str(&mut self, s: &str) -> std::fmt::Result {
let mut it = s.split('\n'); // note: `.lines()` is wrong here
@ -66,31 +103,62 @@ enum LocalName {
Binding(Name, LocalId),
}
impl Display for LocalName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
impl HirDisplay for LocalName {
fn hir_fmt(
&self,
f: &mut crate::display::HirFormatter<'_>,
) -> Result<(), crate::display::HirDisplayError> {
match self {
LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
LocalName::Binding(n, l) => write!(f, "{n}_{}", u32::from(l.into_raw())),
LocalName::Binding(n, l) => {
write!(f, "{}_{}", n.display(f.db.upcast()), u32::from(l.into_raw()))
}
}
}
}
impl<'a> MirPrettyCtx<'a> {
fn for_body(&mut self) {
fn for_body(&mut self, name: impl FnOnce(&mut MirPrettyCtx<'_>)) {
name(self);
self.with_block(|this| {
this.locals();
wln!(this);
this.blocks();
});
for &closure in &self.body.closures {
self.for_closure(closure);
}
}
fn for_closure(&mut self, closure: ClosureId) {
let body = match self.db.mir_body_for_closure(closure) {
Ok(x) => x,
Err(e) => {
wln!(self, "// error in {closure:?}: {e:?}");
return;
}
};
let result = mem::take(&mut self.result);
let indent = mem::take(&mut self.indent);
let mut ctx = MirPrettyCtx {
body: &body,
local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(),
result,
indent,
..*self
};
ctx.for_body(|this| wln!(this, "// Closure: {:?}", closure));
self.result = ctx.result;
self.indent = ctx.indent;
}
fn with_block(&mut self, f: impl FnOnce(&mut MirPrettyCtx<'_>)) {
self.ident += " ";
self.indent += " ";
wln!(self, "{{");
f(self);
for _ in 0..4 {
self.result.pop();
self.ident.pop();
self.indent.pop();
}
wln!(self, "}}");
}
@ -101,7 +169,7 @@ impl<'a> MirPrettyCtx<'a> {
body,
db,
result: String::new(),
ident: String::new(),
indent: String::new(),
local_to_binding,
hir_body,
}
@ -109,7 +177,7 @@ impl<'a> MirPrettyCtx<'a> {
fn write_line(&mut self) {
self.result.push('\n');
self.result += &self.ident;
self.result += &self.indent;
}
fn write(&mut self, line: &str) {
@ -118,7 +186,12 @@ impl<'a> MirPrettyCtx<'a> {
fn locals(&mut self) {
for (id, local) in self.body.locals.iter() {
wln!(self, "let {}: {};", self.local_name(id), local.ty.display(self.db));
wln!(
self,
"let {}: {};",
self.local_name(id).display(self.db),
self.hir_display(&local.ty)
);
}
}
@ -147,10 +220,10 @@ impl<'a> MirPrettyCtx<'a> {
wln!(this, ";");
}
StatementKind::StorageDead(p) => {
wln!(this, "StorageDead({})", this.local_name(*p));
wln!(this, "StorageDead({})", this.local_name(*p).display(self.db));
}
StatementKind::StorageLive(p) => {
wln!(this, "StorageLive({})", this.local_name(*p));
wln!(this, "StorageLive({})", this.local_name(*p).display(self.db));
}
StatementKind::Deinit(p) => {
w!(this, "Deinit(");
@ -161,11 +234,11 @@ impl<'a> MirPrettyCtx<'a> {
}
}
match &block.terminator {
Some(terminator) => match terminator {
Terminator::Goto { target } => {
Some(terminator) => match &terminator.kind {
TerminatorKind::Goto { target } => {
wln!(this, "goto 'bb{};", u32::from(target.into_raw()))
}
Terminator::SwitchInt { discr, targets } => {
TerminatorKind::SwitchInt { discr, targets } => {
w!(this, "switch ");
this.operand(discr);
w!(this, " ");
@ -176,7 +249,7 @@ impl<'a> MirPrettyCtx<'a> {
wln!(this, "_ => {},", this.basic_block_id(targets.otherwise()));
});
}
Terminator::Call { func, args, destination, target, .. } => {
TerminatorKind::Call { func, args, destination, target, .. } => {
w!(this, "Call ");
this.with_block(|this| {
w!(this, "func: ");
@ -208,7 +281,7 @@ impl<'a> MirPrettyCtx<'a> {
fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) {
let Some((last, head)) = projections.split_last() else {
// no projection
w!(this, "{}", this.local_name(local));
w!(this, "{}", this.local_name(local).display(this.db));
return;
};
match last {
@ -226,21 +299,26 @@ impl<'a> MirPrettyCtx<'a> {
f(this, local, head);
let variant_name =
&this.db.enum_data(e.parent).variants[e.local_id].name;
w!(this, " as {}).{}", variant_name, name);
w!(
this,
" as {}).{}",
variant_name.display(this.db.upcast()),
name.display(this.db.upcast())
);
}
hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
f(this, local, head);
w!(this, ".{name}");
w!(this, ".{}", name.display(this.db.upcast()));
}
}
}
ProjectionElem::TupleField(x) => {
ProjectionElem::TupleOrClosureField(x) => {
f(this, local, head);
w!(this, ".{}", x);
}
ProjectionElem::Index(l) => {
f(this, local, head);
w!(this, "[{}]", this.local_name(*l));
w!(this, "[{}]", this.local_name(*l).display(this.db));
}
x => {
f(this, local, head);
@ -258,7 +336,8 @@ impl<'a> MirPrettyCtx<'a> {
// equally. Feel free to change it.
self.place(p);
}
Operand::Constant(c) => w!(self, "Const({})", c.display(self.db)),
Operand::Constant(c) => w!(self, "Const({})", self.hir_display(c)),
Operand::Static(s) => w!(self, "Static({:?})", s),
}
}
@ -284,11 +363,21 @@ impl<'a> MirPrettyCtx<'a> {
self.operand_list(x);
w!(self, "]");
}
Rvalue::Repeat(op, len) => {
w!(self, "[");
self.operand(op);
w!(self, "; {}]", len.display(self.db));
}
Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => {
w!(self, "Adt(");
self.operand_list(x);
w!(self, ")");
}
Rvalue::Aggregate(AggregateKind::Closure(_), x) => {
w!(self, "Closure(");
self.operand_list(x);
w!(self, ")");
}
Rvalue::Aggregate(AggregateKind::Union(_, _), x) => {
w!(self, "Union(");
self.operand_list(x);
@ -300,9 +389,9 @@ impl<'a> MirPrettyCtx<'a> {
w!(self, ")");
}
Rvalue::Cast(ck, op, ty) => {
w!(self, "Discriminant({ck:?}");
w!(self, "Cast({ck:?}, ");
self.operand(op);
w!(self, "{})", ty.display(self.db));
w!(self, ", {})", self.hir_display(ty));
}
Rvalue::CheckedBinaryOp(b, o1, o2) => {
self.operand(o1);
@ -322,6 +411,7 @@ impl<'a> MirPrettyCtx<'a> {
self.place(p);
w!(self, ")");
}
Rvalue::ShallowInitBoxWithAlloc(_) => w!(self, "ShallowInitBoxWithAlloc"),
Rvalue::ShallowInitBox(op, _) => {
w!(self, "ShallowInitBox(");
self.operand(op);
@ -345,4 +435,8 @@ impl<'a> MirPrettyCtx<'a> {
}
}
}
fn hir_display<T: HirDisplay>(&self, ty: &'a T) -> impl Display + 'a {
ty.display(self.db).with_closure_style(ClosureStyle::ClosureWithSubst)
}
}

View file

@ -1,18 +1,18 @@
//! Database used for testing `hir`.
use std::{
fmt, panic,
sync::{Arc, Mutex},
};
use std::{fmt, panic, sync::Mutex};
use base_db::{
salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
salsa::{self, Durability},
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::db::ExpandDatabase;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use nohash_hasher::IntMap;
use rustc_hash::FxHashSet;
use syntax::TextRange;
use test_utils::extract_annotations;
use triomphe::Arc;
#[salsa::database(
base_db::SourceDatabaseExtStorage,
@ -30,7 +30,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
this.set_enable_proc_attr_macros(true);
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
}
@ -74,13 +74,13 @@ impl salsa::ParallelDatabase for TestDB {
impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> {
fn file_text(&self, file_id: FileId) -> Arc<str> {
FileLoaderDelegate(self).file_text(file_id)
}
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}
@ -102,7 +102,7 @@ impl TestDB {
self.module_for_file_opt(file_id).unwrap()
}
pub(crate) fn extract_annotations(&self) -> NoHashHashMap<FileId, Vec<(TextRange, String)>> {
pub(crate) fn extract_annotations(&self) -> IntMap<FileId, Vec<(TextRange, String)>> {
let mut files = Vec::new();
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {

View file

@ -10,14 +10,14 @@ mod display_source_code;
mod incremental;
mod diagnostics;
use std::{collections::HashMap, env, sync::Arc};
use std::{collections::HashMap, env};
use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt};
use expect_test::Expect;
use hir_def::{
body::{Body, BodySourceMap, SyntheticSyntax},
db::{DefDatabase, InternDatabase},
expr::{ExprId, PatId},
hir::{ExprId, Pat, PatId},
item_scope::ItemScope,
nameres::DefMap,
src::HasSource,
@ -32,6 +32,7 @@ use syntax::{
};
use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
use tracing_tree::HierarchicalLayer;
use triomphe::Arc;
use crate::{
db::HirDatabase,
@ -148,10 +149,13 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
});
let mut unexpected_type_mismatches = String::new();
for def in defs {
let (_body, body_source_map) = db.body_with_source_map(def);
let (body, body_source_map) = db.body_with_source_map(def);
let inference_result = db.infer(def);
for (pat, ty) in inference_result.type_of_pat.iter() {
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body.pats[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match pat_node(&body_source_map, pat, &db) {
Some(value) => value,
None => continue,
@ -159,7 +163,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
let range = node.as_ref().original_file_range(&db);
if let Some(expected) = types.remove(&range) {
let actual = if display_source {
ty.display_source_code(&db, def.module(&db)).unwrap()
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db).to_string()
};
@ -175,7 +179,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
let range = node.as_ref().original_file_range(&db);
if let Some(expected) = types.remove(&range) {
let actual = if display_source {
ty.display_source_code(&db, def.module(&db)).unwrap()
ty.display_source_code(&db, def.module(&db), true).unwrap()
} else {
ty.display_test(&db).to_string()
};
@ -198,8 +202,8 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
for (expr_or_pat, mismatch) in inference_result.type_mismatches() {
let Some(node) = (match expr_or_pat {
hir_def::expr::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
hir_def::expr::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
hir_def::hir::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
hir_def::hir::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
}) else { continue; };
let range = node.as_ref().original_file_range(&db);
let actual = format!(
@ -246,7 +250,7 @@ fn expr_node(
) -> Option<InFile<SyntaxNode>> {
Some(match body_source_map.expr_syntax(expr) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id).unwrap();
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => return None,
@ -260,7 +264,7 @@ fn pat_node(
) -> Option<InFile<SyntaxNode>> {
Some(match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id).unwrap();
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| {
ptr.either(
|it| it.to_node(&root).syntax().clone(),
@ -283,14 +287,18 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let mut buf = String::new();
let mut infer_def = |inference_result: Arc<InferenceResult>,
body: Arc<Body>,
body_source_map: Arc<BodySourceMap>| {
let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
for (pat, ty) in inference_result.type_of_pat.iter() {
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body.pats[pat] {
ty = &inference_result.type_of_binding[id];
}
let syntax_ptr = match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id).unwrap();
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| {
ptr.either(
|it| it.to_node(&root).syntax().clone(),
@ -309,7 +317,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
for (expr, ty) in inference_result.type_of_expr.iter() {
let node = match body_source_map.expr_syntax(expr) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id).unwrap();
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => continue,
@ -385,9 +393,9 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
});
for def in defs {
let (_body, source_map) = db.body_with_source_map(def);
let (body, source_map) = db.body_with_source_map(def);
let infer = db.infer(def);
infer_def(infer, source_map);
infer_def(infer, body, source_map);
}
buf.truncate(buf.trim_end().len());
@ -572,10 +580,9 @@ fn salsa_bug() {
let x = 1;
x.push(1);
}
"
.to_string();
";
db.set_file_text(pos.file_id, Arc::new(new_text));
db.set_file_text(pos.file_id, Arc::from(new_text));
let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db);

View file

@ -258,7 +258,6 @@ fn test() {
#[test]
fn coerce_autoderef_block() {
// FIXME: We should know mutability in overloaded deref
check_no_mismatches(
r#"
//- minicore: deref
@ -268,7 +267,7 @@ fn takes_ref_str(x: &str) {}
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(None))), Borrow(Ref(Not))
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
}
"#,
);
@ -396,10 +395,40 @@ fn test() {
);
}
#[test]
fn coerce_fn_item_to_fn_ptr_in_array() {
check_no_mismatches(
r"
fn foo(x: u32) -> isize { 1 }
fn bar(x: u32) -> isize { 1 }
fn test() {
let f = [foo, bar];
// ^^^ adjustments: Pointer(ReifyFnPointer)
}",
);
}
#[test]
fn coerce_fn_items_in_match_arms() {
cov_mark::check!(coerce_fn_reification);
check_no_mismatches(
r"
fn foo1(x: u32) -> isize { 1 }
fn foo2(x: u32) -> isize { 2 }
fn foo3(x: u32) -> isize { 3 }
fn test() {
let x = match 1 {
1 => foo1,
// ^^^^ adjustments: Pointer(ReifyFnPointer)
2 => foo2,
// ^^^^ adjustments: Pointer(ReifyFnPointer)
_ => foo3,
// ^^^^ adjustments: Pointer(ReifyFnPointer)
};
x;
}",
);
check_types(
r"
fn foo1(x: u32) -> isize { 1 }
@ -507,7 +536,6 @@ fn test() {
#[test]
fn coerce_unsize_generic() {
// FIXME: fix the type mismatches here
check(
r#"
//- minicore: coerce_unsized
@ -516,9 +544,9 @@ struct Bar<T>(Foo<T>);
fn test() {
let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
//^^^^^^^^^ expected [usize], got [usize; 3]
//^^^^^^^^^^^^^^^^^^^^^ expected &Foo<[usize]>, got &Foo<[i32; 3]>
let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
//^^^^^^^^^ expected [usize], got [usize; 3]
//^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &Bar<[usize]>, got &Bar<[i32; 3]>
}
"#,
);
@ -547,7 +575,7 @@ fn two_closures_lub() {
fn foo(c: i32) {
let add = |a: i32, b: i32| a + b;
let sub = |a, b| a - b;
//^^^^^^^^^^^^ |i32, i32| -> i32
//^^^^^^^^^^^^ impl Fn(i32, i32) -> i32
if c > 42 { add } else { sub };
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ fn(i32, i32) -> i32
}
@ -842,3 +870,74 @@ fn test() {
}",
);
}
#[test]
fn adjust_index() {
check_no_mismatches(
r"
//- minicore: index, slice, coerce_unsized
fn test() {
let x = [1, 2, 3];
x[2] = 6;
// ^ adjustments: Borrow(Ref(Mut))
}
",
);
check_no_mismatches(
r"
//- minicore: index
struct Struct;
impl core::ops::Index<usize> for Struct {
type Output = ();
fn index(&self, index: usize) -> &Self::Output { &() }
}
struct StructMut;
impl core::ops::Index<usize> for StructMut {
type Output = ();
fn index(&self, index: usize) -> &Self::Output { &() }
}
impl core::ops::IndexMut for StructMut {
fn index_mut(&mut self, index: usize) -> &mut Self::Output { &mut () }
}
fn test() {
Struct[0];
// ^^^^^^ adjustments: Borrow(Ref(Not))
StructMut[0];
// ^^^^^^^^^ adjustments: Borrow(Ref(Not))
&mut StructMut[0];
// ^^^^^^^^^ adjustments: Borrow(Ref(Mut))
}",
);
}
#[test]
fn regression_14443_dyn_coercion_block_impls() {
check_no_mismatches(
r#"
//- minicore: coerce_unsized
trait T {}
fn dyn_t(d: &dyn T) {}
fn main() {
struct A;
impl T for A {}
let a = A;
let b = {
struct B;
impl T for B {}
B
};
dyn_t(&a);
dyn_t(&b);
}
"#,
)
}

View file

@ -1,6 +1,5 @@
use std::sync::Arc;
use base_db::{fixture::WithFixture, SourceDatabaseExt};
use triomphe::Arc;
use crate::{db::HirDatabase, test_db::TestDB};
@ -33,10 +32,9 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
+
1
}
"
.to_string();
";
db.set_file_text(pos.file_id, Arc::new(new_text));
db.set_file_text(pos.file_id, Arc::from(new_text));
{
let events = db.log_executed(|| {

View file

@ -140,6 +140,7 @@ fn infer_path_qualified_macros_expanded() {
fn expr_macro_def_expanded_in_various_places() {
check_infer(
r#"
//- minicore: iterator
macro spam() {
1isize
}
@ -195,10 +196,19 @@ fn expr_macro_def_expanded_in_various_places() {
!0..6 '1isize': isize
39..442 '{ ...!(); }': ()
73..94 'spam!(...am!())': {unknown}
100..119 'for _ ...!() {}': fn into_iter<isize>(isize) -> <isize as IntoIterator>::IntoIter
100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': !
100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
100..119 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
100..119 'for _ ...!() {}': ()
104..105 '_': {unknown}
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
104..105 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
117..119 '{}': ()
124..134 '|| spam!()': || -> isize
124..134 '|| spam!()': impl Fn() -> isize
140..156 'while ...!() {}': ()
154..156 '{}': ()
161..174 'break spam!()': !
@ -221,6 +231,7 @@ fn expr_macro_def_expanded_in_various_places() {
fn expr_macro_rules_expanded_in_various_places() {
check_infer(
r#"
//- minicore: iterator
macro_rules! spam {
() => (1isize);
}
@ -276,10 +287,19 @@ fn expr_macro_rules_expanded_in_various_places() {
!0..6 '1isize': isize
53..456 '{ ...!(); }': ()
87..108 'spam!(...am!())': {unknown}
114..133 'for _ ...!() {}': fn into_iter<isize>(isize) -> <isize as IntoIterator>::IntoIter
114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': !
114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
114..133 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
114..133 'for _ ...!() {}': ()
118..119 '_': {unknown}
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
118..119 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
131..133 '{}': ()
138..148 '|| spam!()': || -> isize
138..148 '|| spam!()': impl Fn() -> isize
154..170 'while ...!() {}': ()
168..170 '{}': ()
175..188 'break spam!()': !
@ -661,8 +681,9 @@ fn infer_builtin_macros_line() {
"#,
expect![[r#"
!0..1 '0': i32
!0..6 '0asu32': u32
63..87 '{ ...!(); }': ()
73..74 'x': i32
73..74 'x': u32
"#]],
);
}
@ -699,8 +720,9 @@ fn infer_builtin_macros_column() {
"#,
expect![[r#"
!0..1 '0': i32
!0..6 '0asu32': u32
65..91 '{ ...!(); }': ()
75..76 'x': i32
75..76 'x': u32
"#]],
);
}
@ -945,7 +967,7 @@ fn infer_builtin_macros_concat_with_lazy() {
#[test]
fn infer_builtin_macros_env() {
check_infer(
check_types(
r#"
//- /main.rs env:foo=bar
#[rustc_builtin_macro]
@ -953,13 +975,26 @@ fn infer_builtin_macros_env() {
fn main() {
let x = env!("foo");
//^ &str
}
"#,
);
}
#[test]
fn infer_builtin_macros_option_env() {
check_types(
r#"
//- minicore: option
//- /main.rs env:foo=bar
#[rustc_builtin_macro]
macro_rules! option_env {() => {}}
fn main() {
let x = option_env!("foo");
//^ Option<&str>
}
"#,
expect![[r#"
!0..22 '"__RA_...TED__"': &str
62..90 '{ ...o"); }': ()
72..73 'x': &str
"#]],
);
}

View file

@ -388,6 +388,24 @@ mod bar_test {
);
}
#[test]
fn infer_trait_method_multiple_mutable_reference() {
check_types(
r#"
trait Trait {
fn method(&mut self) -> i32 { 5 }
}
struct S;
impl Trait for &mut &mut S {}
fn test() {
let s = &mut &mut &mut S;
s.method();
//^^^^^^^^^^ i32
}
"#,
);
}
#[test]
fn infer_trait_method_generic_1() {
// the trait implementation is intentionally incomplete -- it shouldn't matter
@ -1255,7 +1273,6 @@ fn foo<T: Trait>(a: &T) {
#[test]
fn autoderef_visibility_field() {
// FIXME: We should know mutability in overloaded deref
check(
r#"
//- minicore: deref
@ -1277,7 +1294,7 @@ mod a {
mod b {
fn foo() {
let x = super::a::Bar::new().0;
// ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(None)))
// ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not))))
// ^^^^^^^^^^^^^^^^^^^^^^ type: char
}
}
@ -1723,7 +1740,7 @@ fn test() {
Foo.foo();
//^^^ adjustments: Borrow(Ref(Not))
(&Foo).foo();
// ^^^^ adjustments: ,
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not))
}
"#,
);
@ -1922,3 +1939,54 @@ fn foo() {
"#,
);
}
#[test]
fn box_deref_is_builtin() {
check(
r#"
//- minicore: deref
use core::ops::Deref;
#[lang = "owned_box"]
struct Box<T>(*mut T);
impl<T> Box<T> {
fn new(t: T) -> Self {
loop {}
}
}
impl<T> Deref for Box<T> {
type Target = T;
fn deref(&self) -> &Self::Target;
}
struct Foo;
impl Foo {
fn foo(&self) {}
}
fn test() {
Box::new(Foo).foo();
//^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref(Not))
}
"#,
);
}
#[test]
fn manually_drop_deref_is_not_builtin() {
check(
r#"
//- minicore: manually_drop, deref
struct Foo;
impl Foo {
fn foo(&self) {}
}
use core::mem::ManuallyDrop;
fn test() {
ManuallyDrop::new(Foo).foo();
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
}
"#,
);
}

View file

@ -327,6 +327,7 @@ fn diverging_expression_2() {
fn diverging_expression_3_break() {
check_infer_with_mismatches(
r"
//- minicore: iterator
//- /main.rs
fn test1() {
// should give type mismatch
@ -360,6 +361,15 @@ fn diverging_expression_3_break() {
97..343 '{ ...; }; }': ()
140..141 'x': u32
149..175 '{ for ...; }; }': u32
151..172 'for a ...eak; }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
151..172 'for a ...eak; }': {unknown}
151..172 'for a ...eak; }': !
151..172 'for a ...eak; }': {unknown}
151..172 'for a ...eak; }': &mut {unknown}
151..172 'for a ...eak; }': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
151..172 'for a ...eak; }': Option<{unknown}>
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
155..156 'a': {unknown}
160..161 'b': {unknown}
@ -367,12 +377,30 @@ fn diverging_expression_3_break() {
164..169 'break': !
226..227 'x': u32
235..253 '{ for ... {}; }': u32
237..250 'for a in b {}': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
237..250 'for a in b {}': {unknown}
237..250 'for a in b {}': !
237..250 'for a in b {}': {unknown}
237..250 'for a in b {}': &mut {unknown}
237..250 'for a in b {}': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
237..250 'for a in b {}': Option<{unknown}>
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
241..242 'a': {unknown}
246..247 'b': {unknown}
248..250 '{}': ()
304..305 'x': u32
313..340 '{ for ...; }; }': u32
315..337 'for a ...urn; }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
315..337 'for a ...urn; }': {unknown}
315..337 'for a ...urn; }': !
315..337 'for a ...urn; }': {unknown}
315..337 'for a ...urn; }': &mut {unknown}
315..337 'for a ...urn; }': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
315..337 'for a ...urn; }': Option<{unknown}>
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
319..320 'a': {unknown}
324..325 'b': {unknown}
@ -483,3 +511,22 @@ fn example() -> bool {
"#,
);
}
#[test]
fn reservation_impl_should_be_ignored() {
// See rust-lang/rust#64631.
check_types(
r#"
//- minicore: from
struct S;
#[rustc_reservation_impl]
impl<T> From<!> for T {}
fn foo<T, U: From<T>>(_: U) -> T { loop {} }
fn test() {
let s = foo(S);
//^ S
}
"#,
);
}

View file

@ -1,11 +1,12 @@
use expect_test::expect;
use super::{check, check_infer, check_infer_with_mismatches, check_types};
use super::{check, check_infer, check_infer_with_mismatches, check_no_mismatches, check_types};
#[test]
fn infer_pattern() {
check_infer(
r#"
//- minicore: iterator
fn test(x: &i32) {
let y = x;
let &z = x;
@ -46,6 +47,15 @@ fn infer_pattern() {
82..94 '(1, "hello")': (i32, &str)
83..84 '1': i32
86..93 '"hello"': &str
101..151 'for (e... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
101..151 'for (e... }': {unknown}
101..151 'for (e... }': !
101..151 'for (e... }': {unknown}
101..151 'for (e... }': &mut {unknown}
101..151 'for (e... }': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
101..151 'for (e... }': Option<({unknown}, {unknown})>
101..151 'for (e... }': ()
101..151 'for (e... }': ()
101..151 'for (e... }': ()
105..111 '(e, f)': ({unknown}, {unknown})
106..107 'e': {unknown}
@ -70,8 +80,8 @@ fn infer_pattern() {
228..233 '&true': &bool
229..233 'true': bool
234..236 '{}': ()
246..252 'lambda': |u64, u64, i32| -> i32
255..287 '|a: u6...b; c }': |u64, u64, i32| -> i32
246..252 'lambda': impl Fn(u64, u64, i32) -> i32
255..287 '|a: u6...b; c }': impl Fn(u64, u64, i32) -> i32
256..257 'a': u64
264..265 'b': u64
267..268 'c': i32
@ -240,6 +250,21 @@ fn infer_pattern_match_ergonomics_ref() {
);
}
#[test]
fn ref_pat_with_inference_variable() {
check_no_mismatches(
r#"
enum E { A }
fn test() {
let f = |e| match e {
&E::A => {}
};
f(&E::A);
}
"#,
);
}
#[test]
fn infer_pattern_match_slice() {
check_infer(
@ -476,7 +501,7 @@ fn infer_adt_pattern() {
183..184 'x': usize
190..191 'x': usize
201..205 'E::B': E
209..212 'foo': {unknown}
209..212 'foo': bool
216..217 '1': usize
227..231 'E::B': E
235..237 '10': usize
@ -677,25 +702,25 @@ fn test() {
51..58 'loop {}': !
56..58 '{}': ()
72..171 '{ ... x); }': ()
78..81 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
78..81 'foo': fn foo<&(i32, &str), i32, impl Fn(&(i32, &str)) -> i32>(&(i32, &str), impl Fn(&(i32, &str)) -> i32) -> i32
78..105 'foo(&(...y)| x)': i32
82..91 '&(1, "a")': &(i32, &str)
83..91 '(1, "a")': (i32, &str)
84..85 '1': i32
87..90 '"a"': &str
93..104 '|&(x, y)| x': |&(i32, &str)| -> i32
93..104 '|&(x, y)| x': impl Fn(&(i32, &str)) -> i32
94..101 '&(x, y)': &(i32, &str)
95..101 '(x, y)': (i32, &str)
96..97 'x': i32
99..100 'y': &str
103..104 'x': i32
142..145 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
142..145 'foo': fn foo<&(i32, &str), &i32, impl Fn(&(i32, &str)) -> &i32>(&(i32, &str), impl Fn(&(i32, &str)) -> &i32) -> &i32
142..168 'foo(&(...y)| x)': &i32
146..155 '&(1, "a")': &(i32, &str)
147..155 '(1, "a")': (i32, &str)
148..149 '1': i32
151..154 '"a"': &str
157..167 '|(x, y)| x': |&(i32, &str)| -> &i32
157..167 '|(x, y)| x': impl Fn(&(i32, &str)) -> &i32
158..164 '(x, y)': (i32, &str)
159..160 'x': &i32
162..163 'y': &&str
@ -1084,7 +1109,7 @@ fn var_args() {
#[lang = "va_list"]
pub struct VaListImpl<'f>;
fn my_fn(foo: ...) {}
//^^^ VaListImpl
//^^^ VaListImpl<'_>
"#,
);
}

View file

@ -246,6 +246,7 @@ fn infer_std_crash_5() {
// taken from rustc
check_infer(
r#"
//- minicore: iterator
fn extra_compiler_flags() {
for content in doesnt_matter {
let name = if doesnt_matter {
@ -264,13 +265,22 @@ fn infer_std_crash_5() {
"#,
expect![[r#"
26..322 '{ ... } }': ()
32..320 'for co... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
32..320 'for co... }': {unknown}
32..320 'for co... }': !
32..320 'for co... }': {unknown}
32..320 'for co... }': &mut {unknown}
32..320 'for co... }': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
32..320 'for co... }': Option<{unknown}>
32..320 'for co... }': ()
32..320 'for co... }': ()
32..320 'for co... }': ()
36..43 'content': {unknown}
47..60 'doesnt_matter': {unknown}
61..320 '{ ... }': ()
75..79 'name': &{unknown}
82..166 'if doe... }': &{unknown}
85..98 'doesnt_matter': {unknown}
85..98 'doesnt_matter': bool
99..128 '{ ... }': &{unknown}
113..118 'first': &{unknown}
134..166 '{ ... }': &{unknown}
@ -279,7 +289,7 @@ fn infer_std_crash_5() {
181..188 'content': &{unknown}
191..313 'if ICE... }': &{unknown}
194..231 'ICE_RE..._VALUE': {unknown}
194..247 'ICE_RE...&name)': {unknown}
194..247 'ICE_RE...&name)': bool
241..246 '&name': &&{unknown}
242..246 'name': &{unknown}
248..276 '{ ... }': &{unknown}
@ -805,19 +815,19 @@ fn issue_4966() {
225..229 'iter': T
244..246 '{}': Vec<A>
258..402 '{ ...r(); }': ()
268..273 'inner': Map<|&f64| -> f64>
276..300 'Map { ... 0.0 }': Map<|&f64| -> f64>
285..298 '|_: &f64| 0.0': |&f64| -> f64
268..273 'inner': Map<impl Fn(&f64) -> f64>
276..300 'Map { ... 0.0 }': Map<impl Fn(&f64) -> f64>
285..298 '|_: &f64| 0.0': impl Fn(&f64) -> f64
286..287 '_': &f64
295..298 '0.0': f64
311..317 'repeat': Repeat<Map<|&f64| -> f64>>
320..345 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
338..343 'inner': Map<|&f64| -> f64>
356..359 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
362..371 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
362..379 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
372..378 'repeat': Repeat<Map<|&f64| -> f64>>
386..389 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
311..317 'repeat': Repeat<Map<impl Fn(&f64) -> f64>>
320..345 'Repeat...nner }': Repeat<Map<impl Fn(&f64) -> f64>>
338..343 'inner': Map<impl Fn(&f64) -> f64>
356..359 'vec': Vec<IntoIterator::Item<Repeat<Map<impl Fn(&f64) -> f64>>>>
362..371 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<impl Fn(&f64) -> f64>>>, Repeat<Map<impl Fn(&f64) -> f64>>>(Repeat<Map<impl Fn(&f64) -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<impl Fn(&f64) -> f64>>>>
362..379 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<impl Fn(&f64) -> f64>>>>
372..378 'repeat': Repeat<Map<impl Fn(&f64) -> f64>>
386..389 'vec': Vec<IntoIterator::Item<Repeat<Map<impl Fn(&f64) -> f64>>>>
386..399 'vec.foo_bar()': {unknown}
"#]],
);
@ -852,7 +862,7 @@ fn main() {
123..126 'S()': S<i32>
132..133 's': S<i32>
132..144 's.g(|_x| {})': ()
136..143 '|_x| {}': |&i32| -> ()
136..143 '|_x| {}': impl Fn(&i32)
137..139 '_x': &i32
141..143 '{}': ()
150..151 's': S<i32>
@ -886,13 +896,13 @@ fn flush(&self) {
"#,
expect![[r#"
123..127 'self': &Mutex<T>
150..152 '{}': MutexGuard<T>
150..152 '{}': MutexGuard<'_, T>
234..238 'self': &{unknown}
240..290 '{ ...()); }': ()
250..251 'w': &Mutex<BufWriter>
276..287 '*(w.lock())': BufWriter
278..279 'w': &Mutex<BufWriter>
278..286 'w.lock()': MutexGuard<BufWriter>
278..286 'w.lock()': MutexGuard<'_, BufWriter>
"#]],
);
}
@ -1060,13 +1070,30 @@ fn infix_parse<T, S>(_state: S, _level_code: &Fn(S)) -> T {
loop {}
}
fn parse_arule() {
fn parse_a_rule() {
infix_parse((), &(|_recurse| ()))
}
"#,
)
}
#[test]
fn nested_closure() {
check_types(
r#"
//- minicore: fn, option
fn map<T, U>(o: Option<T>, f: impl FnOnce(T) -> U) -> Option<U> { loop {} }
fn test() {
let o = Some(Some(2));
map(o, |s| map(s, |x| x));
// ^ i32
}
"#,
);
}
#[test]
fn call_expected_type_closure() {
check_types(
@ -1198,6 +1225,7 @@ fn mamba(a: U32!(), p: u32) -> u32 {
fn for_loop_block_expr_iterable() {
check_infer(
r#"
//- minicore: iterator
fn test() {
for _ in { let x = 0; } {
let y = 0;
@ -1206,8 +1234,17 @@ fn test() {
"#,
expect![[r#"
10..68 '{ ... } }': ()
16..66 'for _ ... }': fn into_iter<()>(()) -> <() as IntoIterator>::IntoIter
16..66 'for _ ... }': IntoIterator::IntoIter<()>
16..66 'for _ ... }': !
16..66 'for _ ... }': IntoIterator::IntoIter<()>
16..66 'for _ ... }': &mut IntoIterator::IntoIter<()>
16..66 'for _ ... }': fn next<IntoIterator::IntoIter<()>>(&mut IntoIterator::IntoIter<()>) -> Option<<IntoIterator::IntoIter<()> as Iterator>::Item>
16..66 'for _ ... }': Option<Iterator::Item<IntoIterator::IntoIter<()>>>
16..66 'for _ ... }': ()
20..21 '_': {unknown}
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
20..21 '_': Iterator::Item<IntoIterator::IntoIter<()>>
25..39 '{ let x = 0; }': ()
31..32 'x': i32
35..36 '0': i32
@ -1458,13 +1495,12 @@ fn regression_11688_3() {
struct Ar<T, const N: u8>(T);
fn f<const LEN: usize, T, const BASE: u8>(
num_zeros: usize,
) -> dyn Iterator<Item = [Ar<T, BASE>; LEN]> {
) -> &dyn Iterator<Item = [Ar<T, BASE>; LEN]> {
loop {}
}
fn dynamic_programming() {
for board in f::<9, u8, 7>(1) {
//^^^^^ [Ar<u8, 7>; 9]
}
let board = f::<9, u8, 7>(1).next();
//^^^^^ Option<[Ar<u8, 7>; 9]>
}
"#,
);
@ -1757,6 +1793,21 @@ const C: usize = 2 + 2;
);
}
#[test]
fn regression_14456() {
check_types(
r#"
//- minicore: future
async fn x() {}
fn f() {
let fut = x();
let t = [0u8; { let a = 2 + 2; a }];
//^ [u8; 4]
}
"#,
);
}
#[test]
fn regression_14164() {
check_types(
@ -1788,3 +1839,119 @@ where
"#,
);
}
#[test]
fn match_ergonomics_with_binding_modes_interaction() {
check_types(
r"
enum E { A }
fn foo() {
match &E::A {
b @ (x @ E::A | x) => {
b;
//^ &E
x;
//^ &E
}
}
}",
);
}
#[test]
fn regression_14844() {
check_no_mismatches(
r#"
pub type Ty = Unknown;
pub struct Inner<T>();
pub struct Outer {
pub inner: Inner<Ty>,
}
fn main() {
_ = Outer {
inner: Inner::<i32>(),
};
}
"#,
);
check_no_mismatches(
r#"
pub const ONE: usize = 1;
pub struct Inner<const P: usize>();
pub struct Outer {
pub inner: Inner<ONE>,
}
fn main() {
_ = Outer {
inner: Inner::<1>(),
};
}
"#,
);
check_no_mismatches(
r#"
pub const ONE: usize = unknown();
pub struct Inner<const P: usize>();
pub struct Outer {
pub inner: Inner<ONE>,
}
fn main() {
_ = Outer {
inner: Inner::<1>(),
};
}
"#,
);
check_no_mismatches(
r#"
pub const N: usize = 2 + 2;
fn f(t: [u8; N]) {}
fn main() {
let a = [1, 2, 3, 4];
f(a);
let b = [1; 4];
let c: [u8; N] = b;
let d = [1; N];
let e: [u8; N] = d;
let f = [1; N];
let g = match f {
[a, b, c, d] => a + b + c + d,
};
}
"#,
);
}
#[test]
fn regression_14844_2() {
check_no_mismatches(
r#"
//- minicore: fn
pub const ONE: usize = 1;
pub type MyInner = Inner<ONE>;
pub struct Inner<const P: usize>();
impl Inner<1> {
fn map<F>(&self, func: F) -> bool
where
F: Fn(&MyInner) -> bool,
{
func(self)
}
}
"#,
);
}

View file

@ -854,9 +854,9 @@ fn test2(a1: *const A, a2: *mut A) {
237..239 'a2': *mut A
249..272 '{ ...2.b; }': ()
255..257 'a1': *const A
255..259 'a1.b': B
255..259 'a1.b': {unknown}
265..267 'a2': *mut A
265..269 'a2.b': B
265..269 'a2.b': {unknown}
"#]],
);
}
@ -1812,6 +1812,20 @@ fn main() {
//^ [(); 7]
}"#,
);
check_types(
r#"
trait Foo {
fn x(self);
}
impl Foo for u8 {
fn x(self) {
let t = [0; 4 + 2];
//^ [i32; 6]
}
}
"#,
);
}
#[test]
@ -1906,8 +1920,8 @@ fn closure_return() {
"#,
expect![[r#"
16..58 '{ ...; }; }': u32
26..27 'x': || -> usize
30..55 '|| -> ...n 1; }': || -> usize
26..27 'x': impl Fn() -> usize
30..55 '|| -> ...n 1; }': impl Fn() -> usize
42..55 '{ return 1; }': usize
44..52 'return 1': !
51..52 '1': usize
@ -1925,8 +1939,8 @@ fn closure_return_unit() {
"#,
expect![[r#"
16..47 '{ ...; }; }': u32
26..27 'x': || -> ()
30..44 '|| { return; }': || -> ()
26..27 'x': impl Fn()
30..44 '|| { return; }': impl Fn()
33..44 '{ return; }': ()
35..41 'return': !
"#]],
@ -1943,8 +1957,8 @@ fn closure_return_inferred() {
"#,
expect![[r#"
16..46 '{ ..." }; }': u32
26..27 'x': || -> &str
30..43 '|| { "test" }': || -> &str
26..27 'x': impl Fn() -> &str
30..43 '|| { "test" }': impl Fn() -> &str
33..43 '{ "test" }': &str
35..41 '"test"': &str
"#]],
@ -2033,6 +2047,56 @@ fn test() {
);
}
#[test]
fn tuple_pattern_nested_match_ergonomics() {
check_no_mismatches(
r#"
fn f(x: (&i32, &i32)) -> i32 {
match x {
(3, 4) => 5,
_ => 12,
}
}
"#,
);
check_types(
r#"
fn f(x: (&&&&i32, &&&i32)) {
let f = match x {
t @ (3, 4) => t,
_ => loop {},
};
f;
//^ (&&&&i32, &&&i32)
}
"#,
);
check_types(
r#"
fn f() {
let x = &&&(&&&2, &&&&&3);
let (y, z) = x;
//^ &&&&i32
let t @ (y, z) = x;
t;
//^ &&&(&&&i32, &&&&&i32)
}
"#,
);
check_types(
r#"
fn f() {
let x = &&&(&&&2, &&&&&3);
let (y, z) = x;
//^ &&&&i32
let t @ (y, z) = x;
t;
//^ &&&(&&&i32, &&&&&i32)
}
"#,
);
}
#[test]
fn fn_pointer_return() {
check_infer(
@ -2050,7 +2114,7 @@ fn fn_pointer_return() {
47..120 '{ ...hod; }': ()
57..63 'vtable': Vtable
66..90 'Vtable...| {} }': Vtable
83..88 '|| {}': || -> ()
83..88 '|| {}': impl Fn()
86..88 '{}': ()
100..101 'm': fn()
104..110 'vtable': Vtable
@ -2087,6 +2151,7 @@ async fn main() {
136..138 '()': ()
150..151 'w': i32
154..166 'const { 92 }': i32
154..166 'const { 92 }': i32
162..164 '92': i32
176..177 't': i32
180..190 ''a: { 92 }': i32
@ -2094,6 +2159,24 @@ async fn main() {
"#]],
)
}
#[test]
fn async_fn_and_try_operator() {
check_no_mismatches(
r#"
//- minicore: future, result, fn, try, from
async fn foo() -> Result<(), ()> {
Ok(())
}
async fn bar() -> Result<(), ()> {
let x = foo().await?;
Ok(x)
}
"#,
)
}
#[test]
fn async_block_early_return() {
check_infer(
@ -2124,9 +2207,9 @@ fn main() {
149..151 'Ok': Ok<(), ()>(()) -> Result<(), ()>
149..155 'Ok(())': Result<(), ()>
152..154 '()': ()
167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
167..171 'test': fn test<(), (), impl Fn() -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(impl Fn() -> impl Future<Output = Result<(), ()>>)
167..228 'test(|... })': ()
172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
172..227 '|| asy... }': impl Fn() -> impl Future<Output = Result<(), ()>>
175..227 'async ... }': impl Future<Output = Result<(), ()>>
191..205 'return Err(())': !
198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
@ -2252,8 +2335,8 @@ fn infer_labelled_break_with_val() {
"#,
expect![[r#"
9..335 '{ ... }; }': ()
19..21 '_x': || -> bool
24..332 '|| 'ou... }': || -> bool
19..21 '_x': impl Fn() -> bool
24..332 '|| 'ou... }': impl Fn() -> bool
27..332 ''outer... }': bool
40..332 '{ ... }': ()
54..59 'inner': i8
@ -2677,6 +2760,179 @@ impl B for Astruct {}
)
}
#[test]
fn capture_kinds_simple() {
check_types(
r#"
struct S;
impl S {
fn read(&self) -> &S { self }
fn write(&mut self) -> &mut S { self }
fn consume(self) -> S { self }
}
fn f() {
let x = S;
let c1 = || x.read();
//^^ impl Fn() -> &S
let c2 = || x.write();
//^^ impl FnMut() -> &mut S
let c3 = || x.consume();
//^^ impl FnOnce() -> S
let c3 = || x.consume().consume().consume();
//^^ impl FnOnce() -> S
let c3 = || x.consume().write().read();
//^^ impl FnOnce() -> &S
let x = &mut x;
let c1 = || x.write();
//^^ impl FnMut() -> &mut S
let x = S;
let c1 = || { let ref t = x; t };
//^^ impl Fn() -> &S
let c2 = || { let ref mut t = x; t };
//^^ impl FnMut() -> &mut S
let c3 = || { let t = x; t };
//^^ impl FnOnce() -> S
}
"#,
)
}
#[test]
fn capture_kinds_closure() {
check_types(
r#"
//- minicore: copy, fn
fn f() {
let mut x = 2;
x = 5;
let mut c1 = || { x = 3; x };
//^^^^^^ impl FnMut() -> i32
let mut c2 = || { c1() };
//^^^^^^ impl FnMut() -> i32
let mut c1 = || { x };
//^^^^^^ impl Fn() -> i32
let mut c2 = || { c1() };
//^^^^^^ impl Fn() -> i32
struct X;
let x = X;
let mut c1 = || { x };
//^^^^^^ impl FnOnce() -> X
let mut c2 = || { c1() };
//^^^^^^ impl FnOnce() -> X
}
"#,
);
}
#[test]
fn capture_kinds_overloaded_deref() {
check_types(
r#"
//- minicore: fn, deref_mut
use core::ops::{Deref, DerefMut};
struct Foo;
impl Deref for Foo {
type Target = (i32, u8);
fn deref(&self) -> &(i32, u8) {
&(5, 2)
}
}
impl DerefMut for Foo {
fn deref_mut(&mut self) -> &mut (i32, u8) {
&mut (5, 2)
}
}
fn test() {
let mut x = Foo;
let c1 = || *x;
//^^ impl Fn() -> (i32, u8)
let c2 = || { *x = (2, 5); };
//^^ impl FnMut()
let c3 = || { x.1 };
//^^ impl Fn() -> u8
let c4 = || { x.1 = 6; };
//^^ impl FnMut()
}
"#,
);
}
#[test]
fn capture_kinds_with_copy_types() {
check_types(
r#"
//- minicore: copy, clone, derive
#[derive(Clone, Copy)]
struct Copy;
struct NotCopy;
#[derive(Clone, Copy)]
struct Generic<T>(T);
trait Tr {
type Assoc;
}
impl Tr for Copy {
type Assoc = NotCopy;
}
#[derive(Clone, Copy)]
struct AssocGeneric<T: Tr>(T::Assoc);
fn f() {
let a = Copy;
let b = NotCopy;
let c = Generic(Copy);
let d = Generic(NotCopy);
let e: AssocGeneric<Copy> = AssocGeneric(NotCopy);
let c1 = || a;
//^^ impl Fn() -> Copy
let c2 = || b;
//^^ impl FnOnce() -> NotCopy
let c3 = || c;
//^^ impl Fn() -> Generic<Copy>
let c3 = || d;
//^^ impl FnOnce() -> Generic<NotCopy>
let c3 = || e;
//^^ impl FnOnce() -> AssocGeneric<Copy>
}
"#,
)
}
#[test]
fn derive_macro_should_work_for_associated_type() {
check_types(
r#"
//- minicore: copy, clone, derive
#[derive(Clone)]
struct X;
#[derive(Clone)]
struct Y;
trait Tr {
type Assoc;
}
impl Tr for X {
type Assoc = Y;
}
#[derive(Clone)]
struct AssocGeneric<T: Tr>(T::Assoc);
fn f() {
let e: AssocGeneric<X> = AssocGeneric(Y);
let e_clone = e.clone();
//^^^^^^^ AssocGeneric<X>
}
"#,
)
}
#[test]
fn cfgd_out_assoc_items() {
check_types(
@ -2696,6 +2952,21 @@ fn f() {
)
}
#[test]
fn infer_ref_to_raw_cast() {
check_types(
r#"
struct S;
fn f() {
let s = &mut S;
let s = s as *mut _;
//^ *mut S
}
"#,
);
}
#[test]
fn infer_missing_type() {
check_types(
@ -3258,35 +3529,60 @@ fn f<T>(t: Ark<T>) {
);
}
// FIXME
#[test]
fn castable_to2() {
check_infer(
fn const_dependent_on_local() {
check_types(
r#"
fn func() {
let x = &0u32 as *const _;
fn main() {
let s = 5;
let t = [2; s];
//^ [i32; _]
}
"#,
expect![[r#"
10..44 '{ ...t _; }': ()
20..21 'x': *const {unknown}
24..29 '&0u32': &u32
24..41 '&0u32 ...onst _': *const {unknown}
25..29 '0u32': u32
"#]],
);
}
#[test]
fn issue_14275() {
// FIXME: evaluate const generic
check_types(
r#"
struct Foo<const T: bool>;
fn main() {
const B: bool = false;
let foo = Foo::<B>;
//^^^ Foo<_>
//^^^ Foo<false>
}
"#,
);
check_types(
r#"
struct Foo<const T: bool>;
impl Foo<true> {
fn foo(self) -> u8 { 2 }
}
impl Foo<false> {
fn foo(self) -> u16 { 5 }
}
fn main() {
const B: bool = false;
let foo: Foo<B> = Foo;
let x = foo.foo();
//^ u16
}
"#,
);
}
#[test]
fn cstring_literals() {
check_types(
r#"
#[lang = "CStr"]
pub struct CStr;
fn main() {
c"ello";
//^^^^^^^ &CStr
}
"#,
);

View file

@ -90,7 +90,7 @@ fn infer_async_closure() {
async fn test() {
let f = async move |x: i32| x + 42;
f;
// ^ |i32| -> impl Future<Output = i32>
// ^ impl Fn(i32) -> impl Future<Output = i32>
let a = f(4);
a;
// ^ impl Future<Output = i32>
@ -99,7 +99,7 @@ async fn test() {
// ^ i32
let f = async move || 42;
f;
// ^ || -> impl Future<Output = i32>
// ^ impl Fn() -> impl Future<Output = i32>
let a = f();
a;
// ^ impl Future<Output = i32>
@ -116,7 +116,7 @@ async fn test() {
};
let _: Option<u64> = c().await;
c;
// ^ || -> impl Future<Output = Option<u64>>
// ^ impl Fn() -> impl Future<Output = Option<u64>>
}
"#,
);
@ -206,19 +206,27 @@ fn test() {
fn infer_try_trait() {
check_types(
r#"
//- minicore: try, result
//- minicore: try, result, from
fn test() {
let r: Result<i32, u64> = Result::Ok(1);
let v = r?;
v;
} //^ i32
impl<O, E> core::ops::Try for Result<O, E> {
type Output = O;
type Error = Result<core::convert::Infallible, E>;
"#,
);
}
impl<T, E, F: From<E>> core::ops::FromResidual<Result<core::convert::Infallible, E>> for Result<T, F> {}
#[test]
fn infer_try_block() {
// FIXME: We should test more cases, but it currently doesn't work, since
// our labeled block type inference is broken.
check_types(
r#"
//- minicore: try, option
fn test() {
let x: Option<_> = try { Some(2)?; };
//^ Option<()>
}
"#,
);
}
@ -542,7 +550,7 @@ fn test() -> u64 {
53..54 'a': S
57..58 'S': S(fn(u32) -> u64) -> S
57..74 'S(|i| ...s u64)': S
59..73 '|i| 2*i as u64': |u32| -> u64
59..73 '|i| 2*i as u64': impl Fn(u32) -> u64
60..61 'i': u32
63..64 '2': u64
63..73 '2*i as u64': u64
@ -1325,9 +1333,9 @@ fn foo<const C: u8, T>() -> (impl FnOnce(&str, T), impl Trait<u8>) {
}
"#,
expect![[r#"
134..165 '{ ...(C)) }': (|&str, T| -> (), Bar<u8>)
140..163 '(|inpu...ar(C))': (|&str, T| -> (), Bar<u8>)
141..154 '|input, t| {}': |&str, T| -> ()
134..165 '{ ...(C)) }': (impl Fn(&str, T), Bar<u8>)
140..163 '(|inpu...ar(C))': (impl Fn(&str, T), Bar<u8>)
141..154 '|input, t| {}': impl Fn(&str, T)
142..147 'input': &str
149..150 't': T
152..154 '{}': ()
@ -1498,8 +1506,8 @@ fn main() {
71..105 '{ ...()); }': ()
77..78 'f': fn f(&dyn Fn(S))
77..102 'f(&|nu...foo())': ()
79..101 '&|numb....foo()': &|S| -> ()
80..101 '|numbe....foo()': |S| -> ()
79..101 '&|numb....foo()': &impl Fn(S)
80..101 '|numbe....foo()': impl Fn(S)
81..87 'number': S
89..95 'number': S
89..101 'number.foo()': ()
@ -1904,13 +1912,13 @@ fn test() {
131..132 'f': F
151..153 '{}': Lazy<T, F>
251..497 '{ ...o(); }': ()
261..266 'lazy1': Lazy<Foo, || -> Foo>
283..292 'Lazy::new': fn new<Foo, || -> Foo>(|| -> Foo) -> Lazy<Foo, || -> Foo>
283..300 'Lazy::...| Foo)': Lazy<Foo, || -> Foo>
293..299 '|| Foo': || -> Foo
261..266 'lazy1': Lazy<Foo, impl Fn() -> Foo>
283..292 'Lazy::new': fn new<Foo, impl Fn() -> Foo>(impl Fn() -> Foo) -> Lazy<Foo, impl Fn() -> Foo>
283..300 'Lazy::...| Foo)': Lazy<Foo, impl Fn() -> Foo>
293..299 '|| Foo': impl Fn() -> Foo
296..299 'Foo': Foo
310..312 'r1': usize
315..320 'lazy1': Lazy<Foo, || -> Foo>
315..320 'lazy1': Lazy<Foo, impl Fn() -> Foo>
315..326 'lazy1.foo()': usize
368..383 'make_foo_fn_ptr': fn() -> Foo
399..410 'make_foo_fn': fn make_foo_fn() -> Foo
@ -1955,20 +1963,20 @@ fn test() {
163..167 '1u32': u32
174..175 'x': Option<u32>
174..190 'x.map(...v + 1)': Option<u32>
180..189 '|v| v + 1': |u32| -> u32
180..189 '|v| v + 1': impl Fn(u32) -> u32
181..182 'v': u32
184..185 'v': u32
184..189 'v + 1': u32
188..189 '1': u32
196..197 'x': Option<u32>
196..212 'x.map(... 1u64)': Option<u64>
202..211 '|_v| 1u64': |u32| -> u64
202..211 '|_v| 1u64': impl Fn(u32) -> u64
203..205 '_v': u32
207..211 '1u64': u64
222..223 'y': Option<i64>
239..240 'x': Option<u32>
239..252 'x.map(|_v| 1)': Option<i64>
245..251 '|_v| 1': |u32| -> i64
245..251 '|_v| 1': impl Fn(u32) -> i64
246..248 '_v': u32
250..251 '1': i64
"#]],
@ -1997,11 +2005,11 @@ fn test<F: FnOnce(u32) -> u64>(f: F) {
//^^^^ u64
let g = |v| v + 1;
//^^^^^ u64
//^^^^^^^^^ |u64| -> u64
//^^^^^^^^^ impl Fn(u64) -> u64
g(1u64);
//^^^^^^^ u64
let h = |v| 1u128 + v;
//^^^^^^^^^^^^^ |u128| -> u128
//^^^^^^^^^^^^^ impl Fn(u128) -> u128
}"#,
);
}
@ -2054,17 +2062,17 @@ fn test() {
312..314 '{}': ()
330..489 '{ ... S); }': ()
340..342 'x1': u64
345..349 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64
345..349 'foo1': fn foo1<S, u64, impl Fn(S) -> u64>(S, impl Fn(S) -> u64) -> u64
345..368 'foo1(S...hod())': u64
350..351 'S': S
353..367 '|s| s.method()': |S| -> u64
353..367 '|s| s.method()': impl Fn(S) -> u64
354..355 's': S
357..358 's': S
357..367 's.method()': u64
378..380 'x2': u64
383..387 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64
383..387 'foo2': fn foo2<S, u64, impl Fn(S) -> u64>(impl Fn(S) -> u64, S) -> u64
383..406 'foo2(|...(), S)': u64
388..402 '|s| s.method()': |S| -> u64
388..402 '|s| s.method()': impl Fn(S) -> u64
389..390 's': S
392..393 's': S
392..402 's.method()': u64
@ -2073,14 +2081,14 @@ fn test() {
421..422 'S': S
421..446 'S.foo1...hod())': u64
428..429 'S': S
431..445 '|s| s.method()': |S| -> u64
431..445 '|s| s.method()': impl Fn(S) -> u64
432..433 's': S
435..436 's': S
435..445 's.method()': u64
456..458 'x4': u64
461..462 'S': S
461..486 'S.foo2...(), S)': u64
468..482 '|s| s.method()': |S| -> u64
468..482 '|s| s.method()': impl Fn(S) -> u64
469..470 's': S
472..473 's': S
472..482 's.method()': u64
@ -2554,9 +2562,9 @@ fn main() {
72..74 '_v': F
117..120 '{ }': ()
132..163 '{ ... }); }': ()
138..148 'f::<(), _>': fn f<(), |&()| -> ()>(|&()| -> ())
138..148 'f::<(), _>': fn f<(), impl Fn(&())>(impl Fn(&()))
138..160 'f::<()... z; })': ()
149..159 '|z| { z; }': |&()| -> ()
149..159 '|z| { z; }': impl Fn(&())
150..151 'z': &()
153..159 '{ z; }': ()
155..156 'z': &()
@ -2713,9 +2721,9 @@ fn main() {
983..998 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
983..1000 'Vec::<...:new()': Vec<i32>
983..1012 'Vec::<...iter()': IntoIter<i32>
983..1075 'Vec::<...one })': FilterMap<IntoIter<i32>, |i32| -> Option<u32>>
983..1075 'Vec::<...one })': FilterMap<IntoIter<i32>, impl Fn(i32) -> Option<u32>>
983..1101 'Vec::<... y; })': ()
1029..1074 '|x| if...None }': |i32| -> Option<u32>
1029..1074 '|x| if...None }': impl Fn(i32) -> Option<u32>
1030..1031 'x': i32
1033..1074 'if x >...None }': Option<u32>
1036..1037 'x': i32
@ -2728,7 +2736,7 @@ fn main() {
1049..1057 'x as u32': u32
1066..1074 '{ None }': Option<u32>
1068..1072 'None': Option<u32>
1090..1100 '|y| { y; }': |u32| -> ()
1090..1100 '|y| { y; }': impl Fn(u32)
1091..1092 'y': u32
1094..1100 '{ y; }': ()
1096..1097 'y': u32
@ -2971,13 +2979,13 @@ fn foo() {
52..126 '{ ...)(s) }': ()
62..63 's': Option<i32>
66..78 'Option::None': Option<i32>
88..89 'f': |Option<i32>| -> ()
92..111 '|x: Op...2>| {}': |Option<i32>| -> ()
88..89 'f': impl Fn(Option<i32>)
92..111 '|x: Op...2>| {}': impl Fn(Option<i32>)
93..94 'x': Option<i32>
109..111 '{}': ()
117..124 '(&f)(s)': ()
118..120 '&f': &|Option<i32>| -> ()
119..120 'f': |Option<i32>| -> ()
118..120 '&f': &impl Fn(Option<i32>)
119..120 'f': impl Fn(Option<i32>)
122..123 's': Option<i32>
"#]],
);
@ -3043,7 +3051,7 @@ impl<T: ?Sized> core::ops::Deref for Box<T> {
type Target = T;
fn deref(&self) -> &T {
&self.inner
unsafe { &*self.inner }
}
}
@ -3054,23 +3062,25 @@ fn foo() {
}"#,
expect![[r#"
154..158 'self': &Box<T>
166..193 '{ ... }': &T
176..187 '&self.inner': &*mut T
177..181 'self': &Box<T>
177..187 'self.inner': *mut T
206..296 '{ ...&s); }': ()
216..217 's': Option<i32>
220..224 'None': Option<i32>
234..235 'f': Box<dyn FnOnce(&Option<i32>)>
269..282 'box (|ps| {})': Box<|&Option<i32>| -> ()>
274..281 '|ps| {}': |&Option<i32>| -> ()
275..277 'ps': &Option<i32>
279..281 '{}': ()
288..289 'f': Box<dyn FnOnce(&Option<i32>)>
288..293 'f(&s)': ()
290..292 '&s': &Option<i32>
291..292 's': Option<i32>
269..282: expected Box<dyn FnOnce(&Option<i32>)>, got Box<|&Option<i32>| -> ()>
166..205 '{ ... }': &T
176..199 'unsafe...nner }': &T
185..197 '&*self.inner': &T
186..197 '*self.inner': T
187..191 'self': &Box<T>
187..197 'self.inner': *mut T
218..308 '{ ...&s); }': ()
228..229 's': Option<i32>
232..236 'None': Option<i32>
246..247 'f': Box<dyn FnOnce(&Option<i32>)>
281..294 'box (|ps| {})': Box<impl Fn(&Option<i32>)>
286..293 '|ps| {}': impl Fn(&Option<i32>)
287..289 'ps': &Option<i32>
291..293 '{}': ()
300..301 'f': Box<dyn FnOnce(&Option<i32>)>
300..305 'f(&s)': ()
302..304 '&s': &Option<i32>
303..304 's': Option<i32>
281..294: expected Box<dyn FnOnce(&Option<i32>)>, got Box<impl Fn(&Option<i32>)>
"#]],
);
}
@ -3709,7 +3719,6 @@ async fn get_accounts() -> Result<u32, ()> {
#[test]
fn local_impl_1() {
check!(block_local_impls);
check_types(
r#"
trait Trait<T> {
@ -3731,7 +3740,6 @@ fn test() {
#[test]
fn local_impl_2() {
check!(block_local_impls);
check_types(
r#"
struct S;
@ -3753,7 +3761,6 @@ fn test() {
#[test]
fn local_impl_3() {
check!(block_local_impls);
check_types(
r#"
trait Trait<T> {
@ -3777,6 +3784,62 @@ fn test() {
);
}
#[test]
fn foreign_trait_with_local_trait_impl() {
check!(block_local_impls);
check(
r#"
mod module {
pub trait T {
const C: usize;
fn f(&self);
}
}
fn f() {
use module::T;
impl T for usize {
const C: usize = 0;
fn f(&self) {}
}
0usize.f();
//^^^^^^^^^^ type: ()
usize::C;
//^^^^^^^^type: usize
}
"#,
);
}
#[test]
fn regression_14443_trait_solve() {
check_no_mismatches(
r#"
trait T {
fn f(&self) {}
}
fn main() {
struct A;
impl T for A {}
let a = A;
let b = {
struct B;
impl T for B {}
B
};
a.f();
b.f();
}
"#,
)
}
#[test]
fn associated_type_sized_bounds() {
check_infer(
@ -4149,3 +4212,201 @@ fn test() {
"#,
);
}
#[test]
fn associated_type_in_struct_expr_path() {
// FIXME: All annotation should be resolvable.
// For lines marked as unstable, see rust-lang/rust#86935.
// FIXME: Remove the comments once stablized.
check_types(
r#"
trait Trait {
type Assoc;
fn f();
}
struct S { x: u32 }
impl Trait for () {
type Assoc = S;
fn f() {
let x = 42;
let a = Self::Assoc { x };
// ^ S
let a = <Self>::Assoc { x }; // unstable
// ^ {unknown}
// should be `Copy` but we don't track ownership anyway.
let value = S { x };
if let Self::Assoc { x } = value {}
// ^ u32
if let <Self>::Assoc { x } = value {} // unstable
// ^ {unknown}
}
}
"#,
);
}
#[test]
fn associated_type_in_struct_expr_path_enum() {
// FIXME: All annotation should be resolvable.
// For lines marked as unstable, see rust-lang/rust#86935.
// FIXME: Remove the comments once stablized.
check_types(
r#"
trait Trait {
type Assoc;
fn f();
}
enum E {
Unit,
Struct { x: u32 },
}
impl Trait for () {
type Assoc = E;
fn f() {
let a = Self::Assoc::Unit;
// ^ E
let a = <Self>::Assoc::Unit;
// ^ E
let a = <Self::Assoc>::Unit;
// ^ E
let a = <<Self>::Assoc>::Unit;
// ^ E
// should be `Copy` but we don't track ownership anyway.
let value = E::Unit;
if let Self::Assoc::Unit = value {}
// ^^^^^^^^^^^^^^^^^ E
if let <Self>::Assoc::Unit = value {}
// ^^^^^^^^^^^^^^^^^^^ E
if let <Self::Assoc>::Unit = value {}
// ^^^^^^^^^^^^^^^^^^^ E
if let <<Self>::Assoc>::Unit = value {}
// ^^^^^^^^^^^^^^^^^^^^^ E
let x = 42;
let a = Self::Assoc::Struct { x };
// ^ E
let a = <Self>::Assoc::Struct { x }; // unstable
// ^ {unknown}
let a = <Self::Assoc>::Struct { x }; // unstable
// ^ {unknown}
let a = <<Self>::Assoc>::Struct { x }; // unstable
// ^ {unknown}
// should be `Copy` but we don't track ownership anyway.
let value = E::Struct { x: 42 };
if let Self::Assoc::Struct { x } = value {}
// ^ u32
if let <Self>::Assoc::Struct { x } = value {} // unstable
// ^ {unknown}
if let <Self::Assoc>::Struct { x } = value {} // unstable
// ^ {unknown}
if let <<Self>::Assoc>::Struct { x } = value {} // unstable
// ^ {unknown}
}
}
"#,
);
}
#[test]
fn derive_macro_bounds() {
check_types(
r#"
//- minicore: clone, derive
#[derive(Clone)]
struct Copy;
struct NotCopy;
#[derive(Clone)]
struct Generic<T>(T);
trait Tr {
type Assoc;
}
impl Tr for Copy {
type Assoc = NotCopy;
}
#[derive(Clone)]
struct AssocGeneric<T: Tr>(T::Assoc);
#[derive(Clone)]
struct AssocGeneric2<T: Tr>(<T as Tr>::Assoc);
#[derive(Clone)]
struct AssocGeneric3<T: Tr>(Generic<T::Assoc>);
#[derive(Clone)]
struct Vec<T>();
#[derive(Clone)]
struct R1(Vec<R2>);
#[derive(Clone)]
struct R2(R1);
fn f() {
let x = (&Copy).clone();
//^ Copy
let x = (&NotCopy).clone();
//^ &NotCopy
let x = (&Generic(Copy)).clone();
//^ Generic<Copy>
let x = (&Generic(NotCopy)).clone();
//^ &Generic<NotCopy>
let x: &AssocGeneric<Copy> = &AssocGeneric(NotCopy);
let x = x.clone();
//^ &AssocGeneric<Copy>
let x: &AssocGeneric2<Copy> = &AssocGeneric2(NotCopy);
let x = x.clone();
//^ &AssocGeneric2<Copy>
let x: &AssocGeneric3<Copy> = &AssocGeneric3(Generic(NotCopy));
let x = x.clone();
//^ &AssocGeneric3<Copy>
let x = (&R1(Vec())).clone();
//^ R1
let x = (&R2(R1(Vec()))).clone();
//^ R2
}
"#,
);
}
#[test]
fn trait_obligations_should_be_registered_during_path_inference() {
check_types(
r#"
//- minicore: fn, from
struct S<T>(T);
fn map<T, U, F: FnOnce(T) -> S<U>>(_: T, _: F) -> U { loop {} }
fn test(v: S<i32>) {
let res = map(v, Into::into);
//^^^ i32
}
"#,
);
}
#[test]
fn fn_obligation_should_be_registered_during_path_inference() {
check_types(
r#"
//- minicore: fn, from
struct S<T>(T);
impl<T> S<T> {
fn foo<U: Into<S<T>>>(_: U) -> Self { loop {} }
}
fn map<T, U, F: FnOnce(T) -> U>(_: T, _: F) -> U { loop {} }
fn test(v: S<i32>) {
let res = map(v, S::foo);
//^^^ S<i32>
}
"#,
);
}

View file

@ -24,7 +24,8 @@ impl DebugContext<'_> {
AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
};
name.fmt(f)
name.display(self.0.upcast()).fmt(f)?;
Ok(())
}
pub(crate) fn debug_trait_id(
@ -34,7 +35,8 @@ impl DebugContext<'_> {
) -> Result<(), fmt::Error> {
let trait_: hir_def::TraitId = from_chalk_trait_id(id);
let trait_data = self.0.trait_data(trait_);
trait_data.name.fmt(f)
trait_data.name.display(self.0.upcast()).fmt(f)?;
Ok(())
}
pub(crate) fn debug_assoc_type_id(
@ -49,7 +51,13 @@ impl DebugContext<'_> {
_ => panic!("associated type not in trait"),
};
let trait_data = self.0.trait_data(trait_);
write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
write!(
fmt,
"{}::{}",
trait_data.name.display(self.0.upcast()),
type_alias_data.name.display(self.0.upcast())
)?;
Ok(())
}
pub(crate) fn debug_projection_ty(
@ -67,7 +75,7 @@ impl DebugContext<'_> {
let trait_ref = projection_ty.trait_ref(self.0);
let trait_params = trait_ref.substitution.as_slice(Interner);
let self_ty = trait_ref.self_type_parameter(Interner);
write!(fmt, "<{self_ty:?} as {trait_name}")?;
write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast()))?;
if trait_params.len() > 1 {
write!(
fmt,
@ -75,7 +83,7 @@ impl DebugContext<'_> {
trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))),
)?;
}
write!(fmt, ">::{}", type_alias_data.name)?;
write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast()))?;
let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len();
let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count];
@ -105,9 +113,9 @@ impl DebugContext<'_> {
}
};
match def {
CallableDefId::FunctionId(_) => write!(fmt, "{{fn {name}}}"),
CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name.display(self.0.upcast())),
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
write!(fmt, "{{ctor {name}}}")
write!(fmt, "{{ctor {}}}", name.display(self.0.upcast()))
}
}
}

View file

@ -1,22 +1,24 @@
//! Trait solving using Chalk.
use std::{env::var, sync::Arc};
use std::env::var;
use chalk_ir::GoalData;
use chalk_ir::{fold::TypeFoldable, DebruijnIndex, GoalData};
use chalk_recursive::Cache;
use chalk_solve::{logging_db::LoggingRustIrDatabase, Solver};
use chalk_solve::{logging_db::LoggingRustIrDatabase, rust_ir, Solver};
use base_db::CrateId;
use hir_def::{
lang_item::{LangItem, LangItemTarget},
TraitId,
BlockId, TraitId,
};
use hir_expand::name::{name, Name};
use stdx::panic_context;
use triomphe::Arc;
use crate::{
db::HirDatabase, infer::unify::InferenceTable, AliasEq, AliasTy, Canonical, DomainGoal, Goal,
Guidance, InEnvironment, Interner, ProjectionTy, ProjectionTyExt, Solution, TraitRefExt, Ty,
TyKind, WhereClause,
db::HirDatabase, infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, AliasEq,
AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy,
ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, WhereClause,
};
/// This controls how much 'time' we give the Chalk solver before giving up.
@ -26,6 +28,7 @@ const CHALK_SOLVER_FUEL: i32 = 1000;
pub(crate) struct ChalkContext<'a> {
pub(crate) db: &'a dyn HirDatabase,
pub(crate) krate: CrateId,
pub(crate) block: Option<BlockId>,
}
fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
@ -43,6 +46,7 @@ fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TraitEnvironment {
pub krate: CrateId,
pub block: Option<BlockId>,
// FIXME make this a BTreeMap
pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>,
pub env: chalk_ir::Environment<Interner>,
@ -52,6 +56,7 @@ impl TraitEnvironment {
pub fn empty(krate: CrateId) -> Self {
TraitEnvironment {
krate,
block: None,
traits_from_clauses: Vec::new(),
env: chalk_ir::Environment::new(Interner),
}
@ -78,11 +83,12 @@ pub(crate) fn normalize_projection_query(
pub(crate) fn trait_solve_query(
db: &dyn HirDatabase,
krate: CrateId,
block: Option<BlockId>,
goal: Canonical<InEnvironment<Goal>>,
) -> Option<Solution> {
let _p = profile::span("trait_solve_query").detail(|| match &goal.value.goal.data(Interner) {
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
db.trait_data(it.hir_trait_id()).name.to_string()
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
}
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
_ => "??".to_string(),
@ -100,18 +106,25 @@ pub(crate) fn trait_solve_query(
}
}
// Chalk see `UnevaluatedConst` as a unique concrete value, but we see it as an alias for another const. So
// we should get rid of it when talking to chalk.
let goal = goal
.try_fold_with(&mut UnevaluatedConstEvaluatorFolder { db }, DebruijnIndex::INNERMOST)
.unwrap();
// We currently don't deal with universes (I think / hope they're not yet
// relevant for our use cases?)
let u_canonical = chalk_ir::UCanonical { canonical: goal, universes: 1 };
solve(db, krate, &u_canonical)
solve(db, krate, block, &u_canonical)
}
fn solve(
db: &dyn HirDatabase,
krate: CrateId,
block: Option<BlockId>,
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
) -> Option<chalk_solve::Solution<Interner>> {
let context = ChalkContext { db, krate };
let context = ChalkContext { db, krate, block };
tracing::debug!("solve goal: {:?}", goal);
let mut solver = create_chalk_solver();
@ -171,8 +184,10 @@ fn is_chalk_print() -> bool {
std::env::var("CHALK_PRINT").is_ok()
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum FnTrait {
// Warning: Order is important. If something implements `x` it should also implement
// `y` if `y <= x`.
FnOnce,
FnMut,
Fn,
@ -187,7 +202,23 @@ impl FnTrait {
}
}
pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
pub const fn to_chalk_ir(self) -> rust_ir::ClosureKind {
match self {
FnTrait::FnOnce => rust_ir::ClosureKind::FnOnce,
FnTrait::FnMut => rust_ir::ClosureKind::FnMut,
FnTrait::Fn => rust_ir::ClosureKind::Fn,
}
}
pub fn method_name(self) -> Name {
match self {
FnTrait::FnOnce => name!(call_once),
FnTrait::FnMut => name!(call_mut),
FnTrait::Fn => name!(call),
}
}
pub fn get_id(self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
let target = db.lang_item(krate, self.lang_item())?;
match target {
LangItemTarget::Trait(t) => Some(t),

View file

@ -4,7 +4,11 @@
use std::iter;
use base_db::CrateId;
use chalk_ir::{cast::Cast, fold::Shift, BoundVar, DebruijnIndex};
use chalk_ir::{
cast::Cast,
fold::{FallibleTypeFolder, Shift},
BoundVar, DebruijnIndex,
};
use either::Either;
use hir_def::{
db::DefDatabase,
@ -15,16 +19,22 @@ use hir_def::{
lang_item::LangItem,
resolver::{HasResolver, TypeNs},
type_ref::{TraitBoundModifier, TypeRef},
ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId,
TypeOrConstParamId, TypeParamId,
ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, ItemContainerId,
LocalEnumVariantId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
};
use hir_expand::name::Name;
use intern::Interned;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
use stdx::never;
use crate::{
db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, WhereClause,
consteval::unknown_const,
db::HirDatabase,
layout::{Layout, TagEncoding},
mir::pad16,
ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt,
Ty, WhereClause,
};
pub(crate) fn fn_traits(
@ -69,9 +79,7 @@ pub(super) fn all_super_trait_refs<T>(
cb: impl FnMut(TraitRef) -> Option<T>,
) -> Option<T> {
let seen = iter::once(trait_ref.trait_id).collect();
let mut stack = Vec::new();
stack.push(trait_ref);
SuperTraits { db, seen, stack }.find_map(cb)
SuperTraits { db, seen, stack: vec![trait_ref] }.find_map(cb)
}
struct SuperTraits<'a> {
@ -130,7 +138,7 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra
WherePredicate::Lifetime { .. } => None,
})
.filter(|(_, bound_modifier)| matches!(bound_modifier, TraitBoundModifier::None))
.filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
.filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path) {
Some(TypeNs::TraitId(t)) => Some(t),
_ => None,
})
@ -176,6 +184,37 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
Generics { def, params: db.generic_params(def), parent_generics }
}
/// It is a bit different from the rustc equivalent. Currently it stores:
/// - 0: the function signature, encoded as a function pointer type
/// - 1..n: generics of the parent
///
/// and it doesn't store the closure types and fields.
///
/// Codes should not assume this ordering, and should always use methods available
/// on this struct for retriving, and `TyBuilder::substs_for_closure` for creating.
pub(crate) struct ClosureSubst<'a>(pub(crate) &'a Substitution);
impl<'a> ClosureSubst<'a> {
pub(crate) fn parent_subst(&self) -> &'a [GenericArg] {
match self.0.as_slice(Interner) {
[_, x @ ..] => x,
_ => {
never!("Closure missing parameter");
&[]
}
}
}
pub(crate) fn sig_ty(&self) -> &'a Ty {
match self.0.as_slice(Interner) {
[x, ..] => x.assert_ty_ref(Interner),
_ => {
unreachable!("Closure missing sig_ty parameter");
}
}
}
}
#[derive(Debug)]
pub(crate) struct Generics {
def: GenericDefId,
@ -354,3 +393,74 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
_ => false,
}
}
pub(crate) struct UnevaluatedConstEvaluatorFolder<'a> {
pub(crate) db: &'a dyn HirDatabase,
}
impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
type Error = ();
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = ()> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn try_fold_const(
&mut self,
constant: Const,
_outer_binder: DebruijnIndex,
) -> Result<Const, Self::Error> {
if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value {
if let ConstScalar::UnevaluatedConst(id, subst) = &c.interned {
if let Ok(eval) = self.db.const_eval(*id, subst.clone()) {
return Ok(eval);
} else {
return Ok(unknown_const(constant.data(Interner).ty.clone()));
}
}
}
Ok(constant)
}
}
pub(crate) fn detect_variant_from_bytes<'a>(
layout: &'a Layout,
db: &dyn HirDatabase,
krate: CrateId,
b: &[u8],
e: EnumId,
) -> Option<(LocalEnumVariantId, &'a Layout)> {
let (var_id, var_layout) = match &layout.variants {
hir_def::layout::Variants::Single { index } => (index.0, &*layout),
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
let target_data_layout = db.target_data_layout(krate)?;
let size = tag.size(&*target_data_layout).bytes_usize();
let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
let tag = i128::from_le_bytes(pad16(&b[offset..offset + size], false));
match tag_encoding {
TagEncoding::Direct => {
let x = variants.iter_enumerated().find(|x| {
db.const_eval_discriminant(EnumVariantId { parent: e, local_id: x.0 .0 })
== Ok(tag)
})?;
(x.0 .0, x.1)
}
TagEncoding::Niche { untagged_variant, niche_start, .. } => {
let candidate_tag = tag.wrapping_sub(*niche_start as i128) as usize;
let variant = variants
.iter_enumerated()
.map(|(x, _)| x)
.filter(|x| x != untagged_variant)
.nth(candidate_tag)
.unwrap_or(*untagged_variant);
(variant.0, &variants[variant])
}
}
}
};
Some((var_id, var_layout))
}