Rewrite method resolution to follow rustc more closely

It cannot be exactly the same, because we have needs rustc doesn't have (namely, accurate enumeration of all methods, not just with a specific name, for completions etc., while rustc also needs a best-effort implementation for diagnostics) but it is closer than the previous impl.

In addition we rewrite the closely related handling of operator inference and impl collection.

This in turn necessitate changing some other parts of inference in order to retain behavior. As a result, the behavior more closely matches rustc and is also more correct.

This fixes 2 type mismatches on self (1 remains) and 4 diagnostics (1 remains), plus some unknown types.
This commit is contained in:
Chayim Refael Friedman 2025-11-05 18:17:17 +02:00
parent 5ffe3f45ce
commit 50384460c6
90 changed files with 7394 additions and 4231 deletions

View file

@ -273,7 +273,7 @@ pub trait RootQueryDb: SourceDatabase + salsa::Database {
fn transitive_rev_deps(&self, of: Crate) -> FxHashSet<Crate>;
}
pub fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet<Crate> {
fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet<Crate> {
// There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible
// and removing that is a bit difficult.
let mut worklist = vec![crate_id];

View file

@ -708,6 +708,20 @@ impl<'db> Resolver<'db> {
self.item_scope_().0
}
#[inline]
pub fn top_level_def_map(&self) -> &'db DefMap {
self.module_scope.def_map
}
#[inline]
pub fn is_visible(&self, db: &dyn DefDatabase, visibility: Visibility) -> bool {
visibility.is_visible_from_def_map(
db,
self.module_scope.def_map,
self.module_scope.module_id,
)
}
pub fn generic_def(&self) -> Option<GenericDefId> {
self.scopes().find_map(|scope| match scope {
Scope::GenericParams { def, .. } => Some(*def),

View file

@ -13,11 +13,11 @@ use triomphe::Arc;
use crate::{
TraitEnvironment,
db::HirDatabase,
infer::unify::InferenceTable,
infer::InferenceContext,
next_solver::{
Canonical, TraitRef, Ty, TyKind,
Canonical, DbInterner, ParamEnv, TraitRef, Ty, TyKind, TypingMode,
infer::{
InferOk,
DbInternerInferExt, InferCtxt,
traits::{Obligation, ObligationCause, PredicateObligations},
},
obligation_ctxt::ObligationCtxt,
@ -38,14 +38,15 @@ pub fn autoderef<'db>(
env: Arc<TraitEnvironment<'db>>,
ty: Canonical<'db, Ty<'db>>,
) -> impl Iterator<Item = Ty<'db>> + use<'db> {
let mut table = InferenceTable::new(db, env, None);
let ty = table.instantiate_canonical(ty);
let mut autoderef = Autoderef::new_no_tracking(&mut table, ty);
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let (ty, _) = infcx.instantiate_canonical(&ty);
let autoderef = Autoderef::new(&infcx, &env, ty);
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
for (ty, _steps) in autoderef {
// `ty` may contain unresolved inference variables. Since there's no chance they would be
// resolved, just replace with fallback type.
let resolved = autoderef.table.resolve_completely(ty);
let resolved = infcx.resolve_vars_if_possible(ty).replace_infer_with_error(interner);
// If the deref chain contains a cycle (e.g. `A` derefs to `B` and `B` derefs to `A`), we
// would revisit some already visited types. Stop here to avoid duplication.
@ -105,13 +106,48 @@ struct AutoderefTraits {
trait_target: TypeAliasId,
}
// We use a trait here and a generic implementation unfortunately, because sometimes (specifically
// in place_op.rs), you need to have mutable access to the `InferenceContext` while the `Autoderef`
// borrows it.
pub(crate) trait AutoderefCtx<'db> {
fn infcx(&self) -> &InferCtxt<'db>;
fn env(&self) -> &TraitEnvironment<'db>;
}
pub(crate) struct DefaultAutoderefCtx<'a, 'db> {
infcx: &'a InferCtxt<'db>,
env: &'a TraitEnvironment<'db>,
}
impl<'db> AutoderefCtx<'db> for DefaultAutoderefCtx<'_, 'db> {
#[inline]
fn infcx(&self) -> &InferCtxt<'db> {
self.infcx
}
#[inline]
fn env(&self) -> &TraitEnvironment<'db> {
self.env
}
}
pub(crate) struct InferenceContextAutoderefCtx<'a, 'b, 'db>(&'a mut InferenceContext<'b, 'db>);
impl<'db> AutoderefCtx<'db> for InferenceContextAutoderefCtx<'_, '_, 'db> {
#[inline]
fn infcx(&self) -> &InferCtxt<'db> {
&self.0.table.infer_ctxt
}
#[inline]
fn env(&self) -> &TraitEnvironment<'db> {
&self.0.table.trait_env
}
}
/// Recursively dereference a type, considering both built-in
/// dereferences (`*`) and the `Deref` trait.
/// Although called `Autoderef` it can be configured to use the
/// `Receiver` trait instead of the `Deref` trait.
pub(crate) struct Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> {
pub(crate) struct GeneralAutoderef<'db, Ctx, Steps = Vec<(Ty<'db>, AutoderefKind)>> {
// Meta infos:
pub(crate) table: &'a mut InferenceTable<'db>,
ctx: Ctx,
traits: Option<AutoderefTraits>,
// Current state:
@ -122,7 +158,16 @@ pub(crate) struct Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> {
use_receiver_trait: bool,
}
impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Iterator for Autoderef<'a, 'db, Steps> {
pub(crate) type Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> =
GeneralAutoderef<'db, DefaultAutoderefCtx<'a, 'db>, Steps>;
pub(crate) type InferenceContextAutoderef<'a, 'b, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> =
GeneralAutoderef<'db, InferenceContextAutoderefCtx<'a, 'b, 'db>, Steps>;
impl<'db, Ctx, Steps> Iterator for GeneralAutoderef<'db, Ctx, Steps>
where
Ctx: AutoderefCtx<'db>,
Steps: TrackAutoderefSteps<'db>,
{
type Item = (Ty<'db>, usize);
fn next(&mut self) -> Option<Self::Item> {
@ -148,26 +193,26 @@ impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Iterator for Autoderef<'a, 'db, S
// be better to skip this clause and use the Overloaded case only, since &T
// and &mut T implement Receiver. But built-in derefs apply equally to Receiver
// and Deref, and this has benefits for const and the emitted MIR.
let (kind, new_ty) = if let Some(ty) =
self.state.cur_ty.builtin_deref(self.table.db, self.include_raw_pointers)
{
debug_assert_eq!(ty, self.table.infer_ctxt.resolve_vars_if_possible(ty));
// NOTE: we may still need to normalize the built-in deref in case
// we have some type like `&<Ty as Trait>::Assoc`, since users of
// autoderef expect this type to have been structurally normalized.
if let TyKind::Alias(..) = ty.kind() {
let (normalized_ty, obligations) = structurally_normalize_ty(self.table, ty)?;
self.state.obligations.extend(obligations);
(AutoderefKind::Builtin, normalized_ty)
let (kind, new_ty) =
if let Some(ty) = self.state.cur_ty.builtin_deref(self.include_raw_pointers) {
debug_assert_eq!(ty, self.infcx().resolve_vars_if_possible(ty));
// NOTE: we may still need to normalize the built-in deref in case
// we have some type like `&<Ty as Trait>::Assoc`, since users of
// autoderef expect this type to have been structurally normalized.
if let TyKind::Alias(..) = ty.kind() {
let (normalized_ty, obligations) =
structurally_normalize_ty(self.infcx(), self.env().env, ty)?;
self.state.obligations.extend(obligations);
(AutoderefKind::Builtin, normalized_ty)
} else {
(AutoderefKind::Builtin, ty)
}
} else if let Some(ty) = self.overloaded_deref_ty(self.state.cur_ty) {
// The overloaded deref check already normalizes the pointee type.
(AutoderefKind::Overloaded, ty)
} else {
(AutoderefKind::Builtin, ty)
}
} else if let Some(ty) = self.overloaded_deref_ty(self.state.cur_ty) {
// The overloaded deref check already normalizes the pointee type.
(AutoderefKind::Overloaded, ty)
} else {
return None;
};
return None;
};
self.state.steps.push(self.state.cur_ty, kind);
debug!(
@ -183,34 +228,84 @@ impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Iterator for Autoderef<'a, 'db, S
}
impl<'a, 'db> Autoderef<'a, 'db> {
pub(crate) fn new(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
Self::new_impl(table, base_ty)
#[inline]
pub(crate) fn new_with_tracking(
infcx: &'a InferCtxt<'db>,
env: &'a TraitEnvironment<'db>,
base_ty: Ty<'db>,
) -> Self {
Self::new_impl(DefaultAutoderefCtx { infcx, env }, base_ty)
}
}
impl<'a, 'b, 'db> InferenceContextAutoderef<'a, 'b, 'db> {
#[inline]
pub(crate) fn new_from_inference_context(
ctx: &'a mut InferenceContext<'b, 'db>,
base_ty: Ty<'db>,
) -> Self {
Self::new_impl(InferenceContextAutoderefCtx(ctx), base_ty)
}
#[inline]
pub(crate) fn ctx(&mut self) -> &mut InferenceContext<'b, 'db> {
self.ctx.0
}
}
impl<'a, 'db> Autoderef<'a, 'db, usize> {
pub(crate) fn new_no_tracking(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
Self::new_impl(table, base_ty)
#[inline]
pub(crate) fn new(
infcx: &'a InferCtxt<'db>,
env: &'a TraitEnvironment<'db>,
base_ty: Ty<'db>,
) -> Self {
Self::new_impl(DefaultAutoderefCtx { infcx, env }, base_ty)
}
}
impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
fn new_impl(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
Autoderef {
impl<'db, Ctx, Steps> GeneralAutoderef<'db, Ctx, Steps>
where
Ctx: AutoderefCtx<'db>,
Steps: TrackAutoderefSteps<'db>,
{
#[inline]
fn new_impl(ctx: Ctx, base_ty: Ty<'db>) -> Self {
GeneralAutoderef {
state: AutoderefSnapshot {
steps: Steps::default(),
cur_ty: table.infer_ctxt.resolve_vars_if_possible(base_ty),
cur_ty: ctx.infcx().resolve_vars_if_possible(base_ty),
obligations: PredicateObligations::new(),
at_start: true,
reached_recursion_limit: false,
},
table,
ctx,
traits: None,
include_raw_pointers: false,
use_receiver_trait: false,
}
}
#[inline]
fn infcx(&self) -> &InferCtxt<'db> {
self.ctx.infcx()
}
#[inline]
fn env(&self) -> &TraitEnvironment<'db> {
self.ctx.env()
}
#[inline]
fn interner(&self) -> DbInterner<'db> {
self.infcx().interner
}
#[inline]
fn db(&self) -> &'db dyn HirDatabase {
self.interner().db
}
fn autoderef_traits(&mut self) -> Option<AutoderefTraits> {
match &mut self.traits {
Some(it) => Some(*it),
@ -219,25 +314,23 @@ impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
(|| {
Some(AutoderefTraits {
trait_: LangItem::Receiver
.resolve_trait(self.table.db, self.table.trait_env.krate)?,
.resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::ReceiverTarget
.resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
.resolve_type_alias(self.db(), self.env().krate)?,
})
})()
.or_else(|| {
Some(AutoderefTraits {
trait_: LangItem::Deref
.resolve_trait(self.table.db, self.table.trait_env.krate)?,
trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::DerefTarget
.resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
.resolve_type_alias(self.db(), self.env().krate)?,
})
})?
} else {
AutoderefTraits {
trait_: LangItem::Deref
.resolve_trait(self.table.db, self.table.trait_env.krate)?,
trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::DerefTarget
.resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
.resolve_type_alias(self.db(), self.env().krate)?,
}
};
Some(*self.traits.insert(traits))
@ -247,31 +340,32 @@ impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
fn overloaded_deref_ty(&mut self, ty: Ty<'db>) -> Option<Ty<'db>> {
debug!("overloaded_deref_ty({:?})", ty);
let interner = self.table.interner();
let interner = self.interner();
// <ty as Deref>, or whatever the equivalent trait is that we've been asked to walk.
let AutoderefTraits { trait_, trait_target } = self.autoderef_traits()?;
let trait_ref = TraitRef::new(interner, trait_.into(), [ty]);
let obligation =
Obligation::new(interner, ObligationCause::new(), self.table.trait_env.env, trait_ref);
Obligation::new(interner, ObligationCause::new(), self.env().env, trait_ref);
// We detect whether the self type implements `Deref` before trying to
// structurally normalize. We use `predicate_may_hold_opaque_types_jank`
// to support not-yet-defined opaque types. It will succeed for `impl Deref`
// but fail for `impl OtherTrait`.
if !self.table.infer_ctxt.predicate_may_hold_opaque_types_jank(&obligation) {
if !self.infcx().predicate_may_hold_opaque_types_jank(&obligation) {
debug!("overloaded_deref_ty: cannot match obligation");
return None;
}
let (normalized_ty, obligations) = structurally_normalize_ty(
self.table,
self.infcx(),
self.env().env,
Ty::new_projection(interner, trait_target.into(), [ty]),
)?;
debug!("overloaded_deref_ty({:?}) = ({:?}, {:?})", ty, normalized_ty, obligations);
self.state.obligations.extend(obligations);
Some(self.table.infer_ctxt.resolve_vars_if_possible(normalized_ty))
Some(self.infcx().resolve_vars_if_possible(normalized_ty))
}
/// Returns the final type we ended up with, which may be an unresolved
@ -292,7 +386,6 @@ impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
&self.state.steps
}
#[expect(dead_code)]
pub(crate) fn reached_recursion_limit(&self) -> bool {
self.state.reached_recursion_limit
}
@ -316,12 +409,12 @@ impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
}
fn structurally_normalize_ty<'db>(
table: &InferenceTable<'db>,
infcx: &InferCtxt<'db>,
param_env: ParamEnv<'db>,
ty: Ty<'db>,
) -> Option<(Ty<'db>, PredicateObligations<'db>)> {
let mut ocx = ObligationCtxt::new(&table.infer_ctxt);
let Ok(normalized_ty) =
ocx.structurally_normalize_ty(&ObligationCause::misc(), table.trait_env.env, ty)
let mut ocx = ObligationCtxt::new(infcx);
let Ok(normalized_ty) = ocx.structurally_normalize_ty(&ObligationCause::misc(), param_env, ty)
else {
// We shouldn't have errors here in the old solver, except for
// evaluate/fulfill mismatches, but that's not a reason for an ICE.
@ -334,17 +427,3 @@ fn structurally_normalize_ty<'db>(
Some((normalized_ty, ocx.into_pending_obligations()))
}
pub(crate) fn overloaded_deref_ty<'db>(
table: &InferenceTable<'db>,
ty: Ty<'db>,
) -> Option<InferOk<'db, Ty<'db>>> {
let interner = table.interner();
let trait_target = LangItem::DerefTarget.resolve_type_alias(table.db, table.trait_env.krate)?;
let (normalized_ty, obligations) =
structurally_normalize_ty(table, Ty::new_projection(interner, trait_target.into(), [ty]))?;
Some(InferOk { value: normalized_ty, obligations })
}

View file

@ -851,6 +851,7 @@ fn ifs() {
fn loops() {
check_number(
r#"
//- minicore: add, builtin_impls
const GOAL: u8 = {
let mut x = 0;
loop {
@ -871,6 +872,7 @@ fn loops() {
);
check_number(
r#"
//- minicore: add, builtin_impls
const GOAL: u8 = {
let mut x = 0;
loop {
@ -885,6 +887,7 @@ fn loops() {
);
check_number(
r#"
//- minicore: add, builtin_impls
const GOAL: u8 = {
'a: loop {
let x = 'b: loop {
@ -907,7 +910,7 @@ fn loops() {
);
check_number(
r#"
//- minicore: add
//- minicore: add, builtin_impls
const GOAL: u8 = {
let mut x = 0;
'a: loop {
@ -1277,7 +1280,7 @@ fn pattern_matching_ergonomics() {
fn destructing_assignment() {
check_number(
r#"
//- minicore: add
//- minicore: add, builtin_impls
const fn f(i: &mut u8) -> &mut u8 {
*i += 1;
i
@ -1469,11 +1472,11 @@ fn result_layout_niche_optimization() {
fn options() {
check_number(
r#"
//- minicore: option
//- minicore: option, add, builtin_impls
const GOAL: u8 = {
let x = Some(2);
match x {
Some(y) => 2 * y,
Some(y) => 2 + y,
_ => 10,
}
};
@ -1482,7 +1485,7 @@ fn options() {
);
check_number(
r#"
//- minicore: option
//- minicore: option, add, builtin_impls
fn f(x: Option<Option<i32>>) -> i32 {
if let Some(y) = x && let Some(z) = y {
z
@ -1498,11 +1501,11 @@ fn options() {
);
check_number(
r#"
//- minicore: option
//- minicore: option, add, builtin_impls
const GOAL: u8 = {
let x = None;
match x {
Some(y) => 2 * y,
Some(y) => 2 + y,
_ => 10,
}
};

View file

@ -3,15 +3,12 @@
use base_db::{Crate, target::TargetLoadError};
use hir_def::{
AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId,
TypeAliasId, TypeOrConstParamId, VariantId, db::DefDatabase, hir::ExprId,
layout::TargetDataLayout,
AdtId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId,
GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId,
TypeOrConstParamId, VariantId, db::DefDatabase, hir::ExprId, layout::TargetDataLayout,
};
use hir_expand::name::Name;
use la_arena::ArenaMap;
use salsa::plumbing::AsId;
use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
@ -19,8 +16,7 @@ use crate::{
consteval::ConstEvalError,
dyn_compatibility::DynCompatibilityViolation,
layout::{Layout, LayoutError},
lower::{Diagnostics, GenericDefaults, GenericPredicates, ImplTraits},
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
lower::{Diagnostics, GenericDefaults},
mir::{BorrowckResult, MirBody, MirLowerError},
next_solver::{Const, EarlyBinder, GenericArgs, PolyFnSig, TraitRef, Ty, VariancesOf},
};
@ -190,43 +186,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
def: CallableDefId,
) -> EarlyBinder<'db, PolyFnSig<'db>>;
#[salsa::invoke(crate::lower::return_type_impl_traits)]
fn return_type_impl_traits<'db>(
&'db self,
def: FunctionId,
) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>;
#[salsa::invoke(crate::lower::type_alias_impl_traits)]
fn type_alias_impl_traits<'db>(
&'db self,
def: TypeAliasId,
) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>;
#[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
fn generic_predicates_without_parent_with_diagnostics<'db>(
&'db self,
def: GenericDefId,
) -> (GenericPredicates<'db>, Diagnostics);
#[salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
#[salsa::transparent]
fn generic_predicates_without_parent<'db>(
&'db self,
def: GenericDefId,
) -> GenericPredicates<'db>;
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
#[salsa::cycle(cycle_result = crate::lower::generic_predicates_for_param_cycle_result)]
fn generic_predicates_for_param<'db>(
&'db self,
def: GenericDefId,
param_id: TypeOrConstParamId,
assoc_name: Option<Name>,
) -> GenericPredicates<'db>;
#[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates<'db>(&'db self, def: GenericDefId) -> GenericPredicates<'db>;
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId)
@ -249,32 +208,6 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn generic_defaults<'db>(&'db self, def: GenericDefId) -> GenericDefaults<'db>;
#[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
fn inherent_impls_in_crate(&self, krate: Crate) -> Arc<InherentImpls>;
#[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
/// Collects all crates in the dependency graph that have impls for the
/// given fingerprint. This is only used for primitive types and types
/// annotated with `rustc_has_incoherent_inherent_impls`; for other types
/// we just look at the crate where the type is defined.
#[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)]
fn incoherent_inherent_impl_crates(
&self,
krate: Crate,
fp: TyFingerprint,
) -> SmallVec<[Crate; 2]>;
#[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
fn trait_impls_in_crate(&self, krate: Crate) -> Arc<TraitImpls>;
#[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>;
#[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc<TraitImpls>]>;
// Interned IDs for solver integration
#[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId<'_>) -> InternedOpaqueTyId;

View file

@ -52,6 +52,7 @@ use crate::{
db::{HirDatabase, InternedClosure, InternedCoroutine},
generics::generics,
layout::Layout,
lower::GenericPredicates,
mir::pad16,
next_solver::{
AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, EarlyBinder,
@ -625,23 +626,20 @@ fn write_projection<'db>(
{
// FIXME: We shouldn't use `param.id`, it should be removed. We should know the
// `GenericDefId` from the formatted type (store it inside the `HirFormatter`).
let bounds =
f.db.generic_predicates(param.id.parent())
.instantiate_identity()
.into_iter()
.flatten()
.filter(|wc| {
let ty = match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty(),
ClauseKind::TypeOutlives(t) => t.0,
_ => return false,
};
let TyKind::Alias(AliasTyKind::Projection, a) = ty.kind() else {
return false;
};
a == *alias
})
.collect::<Vec<_>>();
let bounds = GenericPredicates::query_all(f.db, param.id.parent())
.iter_identity_copied()
.filter(|wc| {
let ty = match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty(),
ClauseKind::TypeOutlives(t) => t.0,
_ => return false,
};
let TyKind::Alias(AliasTyKind::Projection, a) = ty.kind() else {
return false;
};
a == *alias
})
.collect::<Vec<_>>();
if !bounds.is_empty() {
return f.format_bounds_with(*alias, |f| {
write_bounds_like_dyn_trait_with_prefix(
@ -1122,13 +1120,8 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
_ => unreachable!(),
};
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id);
if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id {
let datas = db
.return_type_impl_traits(func)
.expect("impl trait id without data");
let data = (*datas)
.as_ref()
.map_bound(|rpit| &rpit.impl_traits[idx].predicates);
if let ImplTraitId::ReturnTypeImplTrait(func, _) = impl_trait_id {
let data = impl_trait_id.predicates(db);
let bounds =
|| data.iter_instantiated_copied(f.interner, ty.args.as_slice());
let mut len = bounds().count();
@ -1354,43 +1347,24 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
));
}
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id);
match impl_trait_id {
ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let datas =
db.return_type_impl_traits(func).expect("impl trait id without data");
let data =
(*datas).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
let bounds = data
.iter_instantiated_copied(interner, alias_ty.args.as_slice())
.collect::<Vec<_>>();
let krate = func.krate(db);
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(*self),
&bounds,
SizedByDefault::Sized { anchor: krate },
)?;
let data = impl_trait_id.predicates(db);
let bounds = data
.iter_instantiated_copied(interner, alias_ty.args.as_slice())
.collect::<Vec<_>>();
let krate = match impl_trait_id {
ImplTraitId::ReturnTypeImplTrait(func, _) => {
func.krate(db)
// FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
}
ImplTraitId::TypeAliasImplTrait(alias, idx) => {
let datas =
db.type_alias_impl_traits(alias).expect("impl trait id without data");
let data =
(*datas).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
let bounds = data
.iter_instantiated_copied(interner, alias_ty.args.as_slice())
.collect::<Vec<_>>();
let krate = alias.krate(db);
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(*self),
&bounds,
SizedByDefault::Sized { anchor: krate },
)?;
}
}
ImplTraitId::TypeAliasImplTrait(alias, _) => alias.krate(db),
};
write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(*self),
&bounds,
SizedByDefault::Sized { anchor: krate },
)?;
}
TyKind::Closure(id, substs) => {
let id = id.0;
@ -1541,11 +1515,8 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
)?
}
TypeParamProvenance::ArgumentImplTrait => {
let bounds = db
.generic_predicates(param.id.parent())
.instantiate_identity()
.into_iter()
.flatten()
let bounds = GenericPredicates::query_all(f.db, param.id.parent())
.iter_identity_copied()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == *self,
ClauseKind::Projection(proj) => proj.self_ty() == *self,

View file

@ -9,9 +9,9 @@ use triomphe::Arc;
use crate::{
TraitEnvironment, consteval,
db::HirDatabase,
method_resolution::TyFingerprint,
method_resolution::TraitImpls,
next_solver::{
Ty, TyKind,
SimplifiedType, Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@ -27,13 +27,13 @@ fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
return false;
};
let impls = match module.containing_block() {
Some(block) => match db.trait_impls_in_block(block) {
Some(block) => match TraitImpls::for_block(db, block) {
Some(it) => it,
None => return false,
},
None => db.trait_impls_in_crate(module.krate()),
None => &**TraitImpls::for_crate(db, module.krate()),
};
impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some()
!impls.for_trait_and_self_ty(drop_trait, &SimplifiedType::Adt(adt.into())).is_empty()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]

View file

@ -18,7 +18,7 @@ use smallvec::SmallVec;
use crate::{
ImplTraitId,
db::{HirDatabase, InternedOpaqueTyId},
lower::associated_ty_item_bounds,
lower::{GenericPredicates, associated_ty_item_bounds},
next_solver::{
Binder, Clause, Clauses, DbInterner, EarlyBinder, GenericArgs, Goal, ParamEnv, ParamTy,
SolverDefId, TraitPredicate, TraitRef, Ty, TypingMode, infer::DbInternerInferExt, mk_param,
@ -136,11 +136,11 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b
};
let interner = DbInterner::new_with(db, Some(krate), None);
let predicates = db.generic_predicates(def);
let predicates = GenericPredicates::query_explicit(db, def);
// FIXME: We should use `explicit_predicates_of` here, which hasn't been implemented to
// rust-analyzer yet
// https://github.com/rust-lang/rust/blob/ddaf12390d3ffb7d5ba74491a48f3cd528e5d777/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L490
elaborate::elaborate(interner, predicates.iter().copied()).any(|pred| {
elaborate::elaborate(interner, predicates.iter_identity_copied()).any(|pred| {
match pred.kind().skip_binder() {
ClauseKind::Trait(trait_pred) => {
if sized == trait_pred.def_id().0
@ -162,8 +162,8 @@ pub fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> b
// but we don't have good way to render such locations.
// So, just return single boolean value for existence of such `Self` reference
fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
db.generic_predicates(trait_.into())
.iter()
GenericPredicates::query_explicit(db, trait_.into())
.iter_identity_copied()
.any(|pred| predicate_references_self(db, trait_, pred, AllowSelfProjection::No))
}
@ -199,7 +199,7 @@ enum AllowSelfProjection {
fn predicate_references_self<'db>(
db: &'db dyn HirDatabase,
trait_: TraitId,
predicate: &Clause<'db>,
predicate: Clause<'db>,
allow_self_projection: AllowSelfProjection,
) -> bool {
match predicate.kind().skip_binder() {
@ -363,8 +363,8 @@ where
cb(MethodViolationCode::UndispatchableReceiver)?;
}
let predicates = &*db.generic_predicates_without_parent(func.into());
for pred in predicates {
let predicates = GenericPredicates::query_own(db, func.into());
for pred in predicates.iter_identity_copied() {
let pred = pred.kind().skip_binder();
if matches!(pred, ClauseKind::TypeOutlives(_)) {
@ -440,7 +440,7 @@ fn receiver_is_dispatchable<'db>(
let unsized_receiver_ty = receiver_for_self_ty(interner, func, receiver_ty, unsized_self_ty);
let param_env = {
let generic_predicates = &*db.generic_predicates(func.into());
let generic_predicates = GenericPredicates::query_all(db, func.into());
// Self: Unsize<U>
let unsize_predicate =
@ -458,7 +458,7 @@ fn receiver_is_dispatchable<'db>(
ParamEnv {
clauses: Clauses::new_from_iter(
interner,
generic_predicates.iter().copied().chain([
generic_predicates.iter_identity_copied().chain([
unsize_predicate.upcast(interner),
trait_predicate.upcast(interner),
meta_sized_predicate.upcast(interner),

View file

@ -21,9 +21,11 @@ pub(crate) mod diagnostics;
mod expr;
mod fallback;
mod mutability;
mod op;
mod opaques;
mod pat;
mod path;
mod place_op;
pub(crate) mod unify;
use std::{cell::OnceCell, convert::identity, iter, ops::Index};
@ -45,12 +47,14 @@ use hir_expand::{mod_path::ModPath, name::Name};
use indexmap::IndexSet;
use intern::sym;
use la_arena::ArenaMap;
use macros::{TypeFoldable, TypeVisitable};
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::{
AliasTyKind, TypeFoldable,
inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _},
};
use span::Edition;
use stdx::never;
use triomphe::Arc;
@ -65,10 +69,13 @@ use crate::{
lower::{
ImplTraitIdx, ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic,
},
method_resolution::{CandidateId, MethodResolutionUnstableFeatures},
mir::MirSpan,
next_solver::{
AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind,
Tys, abi::Safety, infer::traits::ObligationCause,
Tys,
abi::Safety,
infer::{InferCtxt, traits::ObligationCause},
},
traits::FnTrait,
utils::TargetFeatureIsSafeInTarget,
@ -330,16 +337,21 @@ pub struct TypeMismatch<'db> {
/// At some point, of course, `Box` should move out of the compiler, in which
/// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub struct Adjustment<'db> {
pub kind: Adjust<'db>,
#[type_visitable(ignore)]
#[type_foldable(identity)]
pub kind: Adjust,
pub target: Ty<'db>,
}
impl<'db> Adjustment<'db> {
pub fn borrow(interner: DbInterner<'db>, m: Mutability, ty: Ty<'db>, lt: Region<'db>) -> Self {
let ty = Ty::new_ref(interner, lt, ty, m);
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt, m)), target: ty }
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::new(m, AllowTwoPhase::No))),
target: ty,
}
}
}
@ -357,20 +369,20 @@ impl<'db> Adjustment<'db> {
/// capable mutable borrows.
/// See #49434 for tracking.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub(crate) enum AllowTwoPhase {
pub enum AllowTwoPhase {
// FIXME: We should use this when appropriate.
Yes,
No,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Adjust<'db> {
pub enum Adjust {
/// Go from ! to any type.
NeverToAny,
/// Dereference once, producing a place.
Deref(Option<OverloadedDeref>),
/// Take the address and produce either a `&` or `*` pointer.
Borrow(AutoBorrow<'db>),
Borrow(AutoBorrow),
Pointer(PointerCast),
}
@ -381,18 +393,47 @@ pub enum Adjust<'db> {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct OverloadedDeref(pub Option<Mutability>);
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum AutoBorrow<'db> {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum AutoBorrowMutability {
Mut { allow_two_phase_borrow: AllowTwoPhase },
Not,
}
impl AutoBorrowMutability {
/// Creates an `AutoBorrowMutability` from a mutability and allowance of two phase borrows.
///
/// Note that when `mutbl.is_not()`, `allow_two_phase_borrow` is ignored
pub fn new(mutbl: Mutability, allow_two_phase_borrow: AllowTwoPhase) -> Self {
match mutbl {
Mutability::Not => Self::Not,
Mutability::Mut => Self::Mut { allow_two_phase_borrow },
}
}
}
impl From<AutoBorrowMutability> for Mutability {
fn from(m: AutoBorrowMutability) -> Self {
match m {
AutoBorrowMutability::Mut { .. } => Mutability::Mut,
AutoBorrowMutability::Not => Mutability::Not,
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AutoBorrow {
/// Converts from T to &T.
Ref(Region<'db>, Mutability),
Ref(AutoBorrowMutability),
/// Converts from T to *T.
RawPtr(Mutability),
}
impl<'db> AutoBorrow<'db> {
fn mutability(&self) -> Mutability {
let (AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) = self;
*m
impl AutoBorrow {
fn mutability(self) -> Mutability {
match self {
AutoBorrow::Ref(mutbl) => mutbl.into(),
AutoBorrow::RawPtr(mutbl) => mutbl,
}
}
}
@ -442,7 +483,7 @@ pub struct InferenceResult<'db> {
/// For each struct literal or pattern, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, (AssocItemId, GenericArgs<'db>)>,
assoc_resolutions: FxHashMap<ExprOrPatId, (CandidateId, GenericArgs<'db>)>,
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
@ -457,7 +498,7 @@ pub struct InferenceResult<'db> {
pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>,
pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>,
pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>,
pub(crate) type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
// `TyKind::Error`.
@ -535,16 +576,16 @@ impl<'db> InferenceResult<'db> {
pub fn assoc_resolutions_for_expr(
&self,
id: ExprId,
) -> Option<(AssocItemId, GenericArgs<'db>)> {
) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).copied()
}
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(AssocItemId, GenericArgs<'db>)> {
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).copied()
}
pub fn assoc_resolutions_for_expr_or_pat(
&self,
id: ExprOrPatId,
) -> Option<(AssocItemId, GenericArgs<'db>)> {
) -> Option<(CandidateId, GenericArgs<'db>)> {
match id {
ExprOrPatId::ExprId(id) => self.assoc_resolutions_for_expr(id),
ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id),
@ -769,8 +810,10 @@ pub(crate) struct InferenceContext<'body, 'db> {
/// and resolve the path via its methods. This will ensure proper error reporting.
pub(crate) resolver: Resolver<'db>,
target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
pub(crate) unstable_features: MethodResolutionUnstableFeatures,
pub(crate) edition: Edition,
pub(crate) generic_def: GenericDefId,
table: unify::InferenceTable<'db>,
pub(crate) table: unify::InferenceTable<'db>,
/// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult<'db>,
@ -873,6 +916,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
return_ty: types.error, // set in collect_* calls
types,
target_features: OnceCell::new(),
unstable_features: MethodResolutionUnstableFeatures::from_def_map(
resolver.top_level_def_map(),
),
edition: resolver.krate().data(db).edition,
table,
tuple_field_accesses_rev: Default::default(),
resume_yield_tys: None,
@ -906,18 +953,15 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.resolver.krate()
}
fn target_features<'a>(
db: &dyn HirDatabase,
target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
owner: DefWithBodyId,
krate: Crate,
) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) {
let (target_features, target_feature_is_safe) = target_features.get_or_init(|| {
let target_features = match owner {
DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())),
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
let (target_features, target_feature_is_safe) = self.target_features.get_or_init(|| {
let target_features = match self.owner {
DefWithBodyId::FunctionId(id) => {
TargetFeatures::from_attrs(&self.db.attrs(id.into()))
}
_ => TargetFeatures::default(),
};
let target_feature_is_safe = match &krate.workspace_data(db).target {
let target_feature_is_safe = match &self.krate().workspace_data(self.db).target {
Ok(target) => crate::utils::target_feature_is_safe_in_target(target),
Err(_) => TargetFeatureIsSafeInTarget::No,
};
@ -927,7 +971,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
#[inline]
pub(crate) fn set_tainted_by_errors(&mut self) {
fn set_tainted_by_errors(&mut self) {
self.result.has_errors = true;
}
@ -1162,6 +1206,11 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.table.interner()
}
#[inline]
pub(crate) fn infcx(&self) -> &InferCtxt<'db> {
&self.table.infer_ctxt
}
fn infer_body(&mut self) {
match self.return_coercion {
Some(_) => self.infer_return(self.body.body_expr),
@ -1179,7 +1228,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.result.type_of_expr.insert(expr, ty);
}
fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) {
pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) {
if adjustments.is_empty() {
return;
}
@ -1212,7 +1261,12 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.result.pat_adjustments.entry(pat).or_default().extend(adjustments);
}
fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: GenericArgs<'db>) {
pub(crate) fn write_method_resolution(
&mut self,
expr: ExprId,
func: FunctionId,
subst: GenericArgs<'db>,
) {
self.result.method_resolutions.insert(expr, (func, subst));
}
@ -1223,7 +1277,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
fn write_assoc_resolution(
&mut self,
id: ExprOrPatId,
item: AssocItemId,
item: CandidateId,
subs: GenericArgs<'db>,
) {
self.result.assoc_resolutions.insert(id, (item, subs));
@ -1237,7 +1291,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.result.type_of_binding.insert(id, ty);
}
fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) {
pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) {
self.diagnostics.push(diagnostic);
}
@ -1284,7 +1338,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.process_user_written_ty(ty)
}
fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> {
pub(crate) fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> {
self.make_ty(
type_ref,
self.body,
@ -1293,7 +1347,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
)
}
fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty<'db>) -> Const<'db> {
pub(crate) fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty<'db>) -> Const<'db> {
let const_ = self.with_ty_lowering(
self.body,
InferenceTyDiagnosticSource::Body,
@ -1303,7 +1357,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.insert_type_vars(const_)
}
fn make_path_as_body_const(&mut self, path: &Path, ty: Ty<'db>) -> Const<'db> {
pub(crate) fn make_path_as_body_const(&mut self, path: &Path, ty: Ty<'db>) -> Const<'db> {
let const_ = self.with_ty_lowering(
self.body,
InferenceTyDiagnosticSource::Body,
@ -1317,7 +1371,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.types.error
}
fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> {
pub(crate) fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> {
let lt = self.with_ty_lowering(
self.body,
InferenceTyDiagnosticSource::Body,
@ -1399,19 +1453,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
/// Whenever you lower a user-written type, you should call this.
fn process_user_written_ty<T>(&mut self, ty: T) -> T
where
T: TypeFoldable<DbInterner<'db>>,
{
fn process_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
self.table.process_user_written_ty(ty)
}
/// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation,
/// while `process_user_written_ty()` should (but doesn't currently).
fn process_remote_user_written_ty<T>(&mut self, ty: T) -> T
where
T: TypeFoldable<DbInterner<'db>>,
{
fn process_remote_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
self.table.process_remote_user_written_ty(ty)
}
@ -1430,8 +1478,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
fn demand_eqtype(&mut self, expected: Ty<'db>, actual: Ty<'db>) {
let result = self
.table
.infer_ctxt
.at(&ObligationCause::new(), self.table.trait_env.env)
.at(&ObligationCause::new())
.eq(expected, actual)
.map(|infer_ok| self.table.register_infer_ok(infer_ok));
if let Err(_err) = result {
@ -1439,6 +1486,46 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
}
fn demand_suptype(&mut self, expected: Ty<'db>, actual: Ty<'db>) {
let result = self
.table
.at(&ObligationCause::new())
.sup(expected, actual)
.map(|infer_ok| self.table.register_infer_ok(infer_ok));
if let Err(_err) = result {
// FIXME: Emit diagnostic.
}
}
fn demand_coerce(
&mut self,
expr: ExprId,
checked_ty: Ty<'db>,
expected: Ty<'db>,
allow_two_phase: AllowTwoPhase,
expr_is_read: ExprIsRead,
) -> Ty<'db> {
let result = self.coerce(expr.into(), checked_ty, expected, allow_two_phase, expr_is_read);
if let Err(_err) = result {
// FIXME: Emit diagnostic.
}
result.unwrap_or(self.types.error)
}
fn expr_ty(&self, expr: ExprId) -> Ty<'db> {
self.result[expr]
}
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
let mut ty = None;
if let Some(it) = self.result.expr_adjustments.get(&e)
&& let Some(it) = it.last()
{
ty = Some(it.target);
}
ty.unwrap_or_else(|| self.expr_ty(e))
}
fn resolve_associated_type_with_params(
&mut self,
inner_ty: Ty<'db>,
@ -1596,9 +1683,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
(ty, _) = path_ctx.lower_partly_resolved_path(resolution, true);
tried_resolving_once = true;
ty = self.table.insert_type_vars(ty);
ty = self.table.normalize_associated_types_in(ty);
ty = self.table.structurally_resolve_type(ty);
ty = self.table.process_user_written_ty(ty);
if ty.is_ty_error() {
return (self.err_ty(), None);
}
@ -1709,18 +1794,6 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
trait_.trait_items(self.db).associated_type_by_name(&Name::new_symbol_root(sym::Output))
}
fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> {
self.resolve_lang_item(lang)?.as_trait()
}
fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
self.resolve_output_on(self.resolve_lang_trait(LangItem::Neg)?)
}
fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
self.resolve_output_on(self.resolve_lang_trait(LangItem::Not)?)
}
fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
let ItemContainerId::TraitId(trait_) = self
.resolve_lang_item(LangItem::IntoFutureIntoFuture)?
@ -1768,24 +1841,17 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
Some(struct_.into())
}
fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
self.resolve_output_on(self.resolve_lang_trait(LangItem::Index)?)
}
fn resolve_va_list(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
Some(struct_.into())
}
fn get_traits_in_scope<'a>(
resolver: &Resolver<'db>,
traits_in_scope: &'a FxHashSet<TraitId>,
) -> Either<FxHashSet<TraitId>, &'a FxHashSet<TraitId>> {
let mut b_traits = resolver.traits_in_scope_from_block_scopes().peekable();
pub(crate) fn get_traits_in_scope(&self) -> Either<FxHashSet<TraitId>, &FxHashSet<TraitId>> {
let mut b_traits = self.resolver.traits_in_scope_from_block_scopes().peekable();
if b_traits.peek().is_some() {
Either::Left(traits_in_scope.iter().copied().chain(b_traits).collect())
Either::Left(self.traits_in_scope.iter().copied().chain(b_traits).collect())
} else {
Either::Right(traits_in_scope)
Either::Right(&self.traits_in_scope)
}
}
}

View file

@ -6,7 +6,7 @@ use rustc_ast_ir::Mutability;
use crate::{
Adjust, Adjustment, OverloadedDeref,
autoderef::{Autoderef, AutoderefKind},
autoderef::{Autoderef, AutoderefCtx, AutoderefKind, GeneralAutoderef},
infer::unify::InferenceTable,
next_solver::{
Ty,
@ -15,18 +15,16 @@ use crate::{
};
impl<'db> InferenceTable<'db> {
pub(crate) fn autoderef(&mut self, base_ty: Ty<'db>) -> Autoderef<'_, 'db> {
Autoderef::new(self, base_ty)
pub(crate) fn autoderef(&self, base_ty: Ty<'db>) -> Autoderef<'_, 'db, usize> {
Autoderef::new(&self.infer_ctxt, &self.trait_env, base_ty)
}
pub(crate) fn autoderef_with_tracking(&self, base_ty: Ty<'db>) -> Autoderef<'_, 'db> {
Autoderef::new_with_tracking(&self.infer_ctxt, &self.trait_env, base_ty)
}
}
impl<'db> Autoderef<'_, 'db> {
/// Returns the adjustment steps.
pub(crate) fn adjust_steps(mut self) -> Vec<Adjustment<'db>> {
let infer_ok = self.adjust_steps_as_infer_ok();
self.table.register_infer_ok(infer_ok)
}
impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> {
pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment<'db>>> {
let steps = self.steps();
if steps.is_empty() {

View file

@ -11,7 +11,7 @@ use stdx::never;
use crate::{
InferenceDiagnostic,
db::HirDatabase,
infer::{AllowTwoPhase, InferenceContext, coerce::CoerceNever},
infer::{AllowTwoPhase, InferenceContext, expr::ExprIsRead},
next_solver::{BoundExistentialPredicates, DbInterner, ParamTy, Ty, TyKind},
};
@ -120,7 +120,7 @@ impl<'db> CastCheck<'db> {
self.expr_ty,
self.cast_ty,
AllowTwoPhase::No,
CoerceNever::Yes,
ExprIsRead::Yes,
)
.is_ok()
{
@ -167,7 +167,7 @@ impl<'db> CastCheck<'db> {
self.expr_ty,
fn_ptr,
AllowTwoPhase::No,
CoerceNever::Yes,
ExprIsRead::Yes,
)
.is_ok()
{
@ -248,7 +248,7 @@ impl<'db> CastCheck<'db> {
self.expr_ty,
array_ptr_type,
AllowTwoPhase::No,
CoerceNever::Yes,
ExprIsRead::Yes,
)
.is_ok()
{
@ -263,7 +263,7 @@ impl<'db> CastCheck<'db> {
// This is a less strict condition than rustc's `demand_eqtype`,
// but false negative is better than false positive
if ctx
.coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, CoerceNever::Yes)
.coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, ExprIsRead::Yes)
.is_ok()
{
return Ok(());

View file

@ -31,7 +31,6 @@ use crate::{
BoundRegionConversionTime, InferOk, InferResult,
traits::{ObligationCause, PredicateObligations},
},
util::explicit_item_bounds,
},
traits::FnTrait,
};
@ -255,8 +254,10 @@ impl<'db> InferenceContext<'_, 'db> {
.deduce_closure_signature_from_predicates(
expected_ty,
closure_kind,
explicit_item_bounds(self.interner(), def_id)
.iter_instantiated(self.interner(), args)
def_id
.expect_opaque_ty()
.predicates(self.db)
.iter_instantiated_copied(self.interner(), args.as_slice())
.map(|clause| clause.as_predicate()),
),
TyKind::Dynamic(object_type, ..) => {

View file

@ -11,11 +11,8 @@ use hir_def::{
Statement, UnaryOp,
},
item_tree::FieldsShape,
lang_item::LangItem,
resolver::ValueNs,
};
use hir_expand::name::Name;
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
@ -351,10 +348,12 @@ impl<'db> InferenceContext<'_, 'db> {
return Some(place);
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if matches!(
self.expr_ty_after_adjustments(*expr).kind(),
TyKind::Ref(..) | TyKind::RawPtr(..)
) {
let is_builtin_deref = match self.expr_ty(*expr).kind() {
TyKind::Ref(..) | TyKind::RawPtr(..) => true,
TyKind::Adt(adt_def, _) if adt_def.is_box() => true,
_ => false,
};
if is_builtin_deref {
let mut place = self.place_of_expr(*expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
place.projections.push(ProjectionElem::Deref);
@ -609,28 +608,19 @@ impl<'db> InferenceContext<'_, 'db> {
}
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if matches!(
self.expr_ty_after_adjustments(*expr).kind(),
TyKind::Ref(..) | TyKind::RawPtr(..)
) {
self.select_from_expr(*expr);
} else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
let mutability = 'b: {
if let Some(deref_trait) =
self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
&& let Some(deref_fn) = deref_trait
.trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
break 'b deref_fn == f;
if self.result.method_resolution(tgt_expr).is_some() {
// Overloaded deref.
match self.expr_ty_after_adjustments(*expr).kind() {
TyKind::Ref(_, _, mutability) => {
let place = self.place_of_expr(*expr);
match mutability {
Mutability::Mut => self.mutate_expr(*expr, place),
Mutability::Not => self.ref_expr(*expr, place),
}
}
false
};
let place = self.place_of_expr(*expr);
if mutability {
self.mutate_expr(*expr, place);
} else {
self.ref_expr(*expr, place);
// FIXME: Is this correct wrt. raw pointer derefs?
TyKind::RawPtr(..) => self.select_from_expr(*expr),
_ => never!("deref adjustments should include taking a mutable reference"),
}
} else {
self.select_from_expr(*expr);
@ -806,20 +796,6 @@ impl<'db> InferenceContext<'_, 'db> {
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
}
fn expr_ty(&self, expr: ExprId) -> Ty<'db> {
self.result[expr]
}
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
let mut ty = None;
if let Some(it) = self.result.expr_adjustments.get(&e)
&& let Some(it) = it.last()
{
ty = Some(it.target);
}
ty.unwrap_or_else(|| self.expr_ty(e))
}
fn is_upvar(&self, place: &HirPlace<'db>) -> bool {
if let Some(c) = self.current_closure {
let InternedClosure(_, root) = self.db.lookup_intern_closure(c);

View file

@ -44,7 +44,8 @@ use hir_def::{
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
BoundVar, TypeAndMut,
BoundVar, DebruijnIndex, TyVid, TypeAndMut, TypeFoldable, TypeFolder, TypeSuperFoldable,
TypeVisitableExt,
error::TypeError,
inherent::{Const as _, GenericArg as _, IntoKind, Safety, SliceLike, Ty as _},
};
@ -56,13 +57,16 @@ use crate::{
Adjust, Adjustment, AutoBorrow, PointerCast, TargetFeatures, TraitEnvironment,
autoderef::Autoderef,
db::{HirDatabase, InternedClosureId},
infer::{AllowTwoPhase, InferenceContext, TypeMismatch, unify::InferenceTable},
infer::{
AllowTwoPhase, AutoBorrowMutability, InferenceContext, TypeMismatch, expr::ExprIsRead,
},
next_solver::{
Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, CallableIdWrapper,
Canonical, ClauseKind, CoercePredicate, Const, ConstKind, DbInterner, ErrorGuaranteed,
GenericArgs, PolyFnSig, PredicateKind, Region, RegionKind, TraitRef, Ty, TyKind,
TypingMode,
infer::{
InferCtxt, InferOk, InferResult,
DbInternerInferExt, InferCtxt, InferOk, InferResult,
relate::RelateResult,
select::{ImplSource, SelectionError},
traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations},
@ -72,10 +76,20 @@ use crate::{
utils::TargetFeatureIsSafeInTarget,
};
struct Coerce<'a, 'b, 'db> {
table: &'a mut InferenceTable<'db>,
has_errors: &'a mut bool,
target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures, TargetFeatureIsSafeInTarget),
trait CoerceDelegate<'db> {
fn infcx(&self) -> &InferCtxt<'db>;
fn env(&self) -> &TraitEnvironment<'db>;
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget);
fn set_diverging(&mut self, diverging_ty: Ty<'db>);
fn set_tainted_by_errors(&mut self);
fn type_var_is_sized(&mut self, var: TyVid) -> bool;
}
struct Coerce<D> {
delegate: D,
use_lub: bool,
/// Determines whether or not allow_two_phase_borrow is set on any
/// autoref adjustments we create while coercing. We don't want to
@ -109,43 +123,56 @@ fn success<'db>(
Ok(InferOk { value: (adj, target), obligations })
}
impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
impl<'db, D> Coerce<D>
where
D: CoerceDelegate<'db>,
{
#[inline]
fn set_tainted_by_errors(&mut self) {
*self.has_errors = true;
self.delegate.set_tainted_by_errors();
}
#[inline]
fn infcx(&self) -> &InferCtxt<'db> {
self.delegate.infcx()
}
#[inline]
fn env(&self) -> &TraitEnvironment<'db> {
self.delegate.env()
}
#[inline]
fn interner(&self) -> DbInterner<'db> {
self.table.interner()
self.infcx().interner
}
#[inline]
fn infer_ctxt(&self) -> &InferCtxt<'db> {
&self.table.infer_ctxt
fn db(&self) -> &'db dyn HirDatabase {
self.interner().db
}
pub(crate) fn commit_if_ok<T, E>(
&mut self,
f: impl FnOnce(&mut Self) -> Result<T, E>,
) -> Result<T, E> {
let snapshot = self.table.snapshot();
let snapshot = self.infcx().start_snapshot();
let result = f(self);
match result {
Ok(_) => {}
Err(_) => {
self.table.rollback_to(snapshot);
self.infcx().rollback_to(snapshot);
}
}
result
}
fn unify_raw(&mut self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> {
fn unify_raw(&self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> {
debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub);
self.commit_if_ok(|this| {
let at = this.infer_ctxt().at(&this.cause, this.table.trait_env.env);
self.infcx().commit_if_ok(|_| {
let at = self.infcx().at(&self.cause, self.env().env);
let res = if this.use_lub {
let res = if self.use_lub {
at.lub(b, a)
} else {
at.sup(b, a)
@ -157,7 +184,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
// Filter these cases out to make sure our coercion is more accurate.
match res {
Ok(InferOk { value, obligations }) => {
let mut ocx = ObligationCtxt::new(this.infer_ctxt());
let mut ocx = ObligationCtxt::new(self.infcx());
ocx.register_obligations(obligations);
if ocx.try_evaluate_obligations().is_empty() {
Ok(InferOk { value, obligations: ocx.into_pending_obligations() })
@ -182,7 +209,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
a: Ty<'db>,
b: Ty<'db>,
adjustments: impl IntoIterator<Item = Adjustment<'db>>,
final_adjustment: Adjust<'db>,
final_adjustment: Adjust,
) -> CoerceResult<'db> {
self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| {
success(
@ -199,15 +226,15 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
#[instrument(skip(self))]
fn coerce(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
// First, remove any resolved type variables (at the top level, at least):
let a = self.table.shallow_resolve(a);
let b = self.table.shallow_resolve(b);
let a = self.infcx().shallow_resolve(a);
let b = self.infcx().shallow_resolve(b);
debug!("Coerce.tys({:?} => {:?})", a, b);
// Coercing from `!` to any type is allowed:
if a.is_never() {
// If we're coercing into an inference var, mark it as possibly diverging.
if b.is_infer() {
self.table.set_diverging(b);
self.delegate.set_diverging(b);
}
if self.coerce_never {
@ -290,12 +317,12 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
/// fall back to subtyping (`unify_and`).
fn coerce_from_inference_variable(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
debug!("coerce_from_inference_variable(a={:?}, b={:?})", a, b);
debug_assert!(a.is_infer() && self.table.shallow_resolve(a) == a);
debug_assert!(self.table.shallow_resolve(b) == b);
debug_assert!(a.is_infer() && self.infcx().shallow_resolve(a) == a);
debug_assert!(self.infcx().shallow_resolve(b) == b);
if b.is_infer() {
// Two unresolved type variables: create a `Coerce` predicate.
let target_ty = if self.use_lub { self.table.next_ty_var() } else { b };
let target_ty = if self.use_lub { self.infcx().next_ty_var() } else { b };
let mut obligations = PredicateObligations::with_capacity(2);
for &source_ty in &[a, b] {
@ -303,7 +330,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
obligations.push(Obligation::new(
self.interner(),
self.cause.clone(),
self.table.trait_env.env,
self.env().env,
Binder::dummy(PredicateKind::Coerce(CoercePredicate {
a: source_ty,
b: target_ty,
@ -335,8 +362,8 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
mutbl_b: Mutability,
) -> CoerceResult<'db> {
debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b);
debug_assert!(self.table.shallow_resolve(a) == a);
debug_assert!(self.table.shallow_resolve(b) == b);
debug_assert!(self.infcx().shallow_resolve(a) == a);
debug_assert!(self.infcx().shallow_resolve(b) == b);
// If we have a parameter of type `&M T_a` and the value
// provided is `expr`, we will be adding an implicit borrow,
@ -355,10 +382,10 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
let mut first_error = None;
let mut r_borrow_var = None;
let mut autoderef = Autoderef::new(self.table, a);
let mut autoderef = Autoderef::new_with_tracking(self.infcx(), self.env(), a);
let mut found = None;
while let Some((referent_ty, autoderefs)) = autoderef.next() {
for (referent_ty, autoderefs) in autoderef.by_ref() {
if autoderefs == 0 {
// Don't let this pass, otherwise it would cause
// &T to autoref to &&T.
@ -442,28 +469,18 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
} else {
if r_borrow_var.is_none() {
// create var lazily, at most once
let r = autoderef.table.next_region_var();
let r = self.infcx().next_region_var();
r_borrow_var = Some(r); // [4] above
}
r_borrow_var.unwrap()
};
let derefd_ty_a = Ty::new_ref(
autoderef.table.interner(),
self.interner(),
r,
referent_ty,
mutbl_b, // [1] above
);
// We need to construct a new `Coerce` because of lifetimes.
let mut coerce = Coerce {
table: autoderef.table,
has_errors: self.has_errors,
target_features: self.target_features,
use_lub: self.use_lub,
allow_two_phase: self.allow_two_phase,
coerce_never: self.coerce_never,
cause: self.cause.clone(),
};
match coerce.unify_raw(derefd_ty_a, b) {
match self.unify_raw(derefd_ty_a, b) {
Ok(ok) => {
found = Some(ok);
break;
@ -515,15 +532,9 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
autoderef.adjust_steps_as_infer_ok();
obligations.extend(o);
// Now apply the autoref. We have to extract the region out of
// the final ref type we got.
let TyKind::Ref(region, _, _) = ty.kind() else {
panic!("expected a ref type, got {:?}", ty);
};
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl_b)),
target: ty,
});
// Now apply the autoref.
let mutbl = AutoBorrowMutability::new(mutbl_b, self.allow_two_phase);
adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty });
debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments);
@ -538,8 +549,8 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
#[instrument(skip(self), level = "debug")]
fn coerce_unsized(&mut self, source: Ty<'db>, target: Ty<'db>) -> CoerceResult<'db> {
debug!(?source, ?target);
debug_assert!(self.table.shallow_resolve(source) == source);
debug_assert!(self.table.shallow_resolve(target) == target);
debug_assert!(self.infcx().shallow_resolve(source) == source);
debug_assert!(self.infcx().shallow_resolve(target) == target);
// We don't apply any coercions incase either the source or target
// aren't sufficiently well known but tend to instead just equate
@ -602,8 +613,8 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
}
let traits = (
LangItem::Unsize.resolve_trait(self.table.db, self.table.trait_env.krate),
LangItem::CoerceUnsized.resolve_trait(self.table.db, self.table.trait_env.krate),
LangItem::Unsize.resolve_trait(self.db(), self.env().krate),
LangItem::CoerceUnsized.resolve_trait(self.db(), self.env().krate),
);
let (Some(unsize_did), Some(coerce_unsized_did)) = traits else {
debug!("missing Unsize or CoerceUnsized traits");
@ -620,18 +631,17 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
(TyKind::Ref(_, ty_a, mutbl_a), TyKind::Ref(_, _, mutbl_b)) => {
coerce_mutbls(mutbl_a, mutbl_b)?;
let r_borrow = self.table.next_region_var();
let r_borrow = self.infcx().next_region_var();
// We don't allow two-phase borrows here, at least for initial
// implementation. If it happens that this coercion is a function argument,
// the reborrow in coerce_borrowed_ptr will pick it up.
// let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
let mutbl = mutbl_b;
let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)),
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b),
},
))
@ -655,7 +665,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
// the `CoerceUnsized` target type and the expected type.
// We only have the latter, so we use an inference variable
// for the former and let type inference do the rest.
let coerce_target = self.table.next_ty_var();
let coerce_target = self.infcx().next_ty_var();
let mut coercion = self.unify_and(
coerce_target,
@ -677,7 +687,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
let mut queue: SmallVec<[PredicateObligation<'db>; 4]> = smallvec![Obligation::new(
self.interner(),
cause,
self.table.trait_env.env,
self.env().env,
TraitRef::new(
self.interner(),
coerce_unsized_did.into(),
@ -694,14 +704,14 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
Some(PredicateKind::Clause(ClauseKind::Trait(trait_pred)))
if traits.contains(&trait_pred.def_id().0) =>
{
self.infer_ctxt().resolve_vars_if_possible(trait_pred)
self.infcx().resolve_vars_if_possible(trait_pred)
}
// Eagerly process alias-relate obligations in new trait solver,
// since these can be emitted in the process of solving trait goals,
// but we need to constrain vars before processing goals mentioning
// them.
Some(PredicateKind::AliasRelate(..)) => {
let mut ocx = ObligationCtxt::new(self.infer_ctxt());
let mut ocx = ObligationCtxt::new(self.infcx());
ocx.register_obligation(obligation);
if !ocx.try_evaluate_obligations().is_empty() {
return Err(TypeError::Mismatch);
@ -715,7 +725,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
}
};
debug!("coerce_unsized resolve step: {:?}", trait_pred);
match self.infer_ctxt().select(&obligation.with(self.interner(), trait_pred)) {
match self.infcx().select(&obligation.with(self.interner(), trait_pred)) {
// Uncertain or unimplemented.
Ok(None) => {
if trait_pred.def_id().0 == unsize_did {
@ -724,7 +734,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred);
match (self_ty.kind(), unsize_ty.kind()) {
(TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..))
if self.table.type_var_is_sized(v) =>
if self.delegate.type_var_is_sized(v) =>
{
debug!("coerce_unsized: have sized infer {:?}", v);
coercion.obligations.push(obligation);
@ -794,9 +804,9 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
&mut self,
fn_ty_a: PolyFnSig<'db>,
b: Ty<'db>,
adjustment: Option<Adjust<'db>>,
adjustment: Option<Adjust>,
) -> CoerceResult<'db> {
debug_assert!(self.table.shallow_resolve(b) == b);
debug_assert!(self.infcx().shallow_resolve(b) == b);
self.commit_if_ok(|this| {
if let TyKind::FnPtr(_, hdr_b) = b.kind()
@ -825,15 +835,15 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
fn coerce_from_fn_pointer(&mut self, fn_ty_a: PolyFnSig<'db>, b: Ty<'db>) -> CoerceResult<'db> {
debug!(?fn_ty_a, ?b, "coerce_from_fn_pointer");
debug_assert!(self.table.shallow_resolve(b) == b);
debug_assert!(self.infcx().shallow_resolve(b) == b);
self.coerce_from_safe_fn(fn_ty_a, b, None)
}
fn coerce_from_fn_item(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b);
debug_assert!(self.table.shallow_resolve(a) == a);
debug_assert!(self.table.shallow_resolve(b) == b);
debug_assert!(self.infcx().shallow_resolve(a) == a);
debug_assert!(self.infcx().shallow_resolve(b) == b);
match b.kind() {
TyKind::FnPtr(_, b_hdr) => {
@ -841,11 +851,11 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
if let TyKind::FnDef(def_id, _) = a.kind() {
// Intrinsics are not coercible to function pointers
if let CallableDefId::FunctionId(def_id) = def_id.0 {
if FunctionSignature::is_intrinsic(self.table.db, def_id) {
if FunctionSignature::is_intrinsic(self.db(), def_id) {
return Err(TypeError::IntrinsicCast);
}
let attrs = self.table.db.attrs(def_id.into());
let attrs = self.db().attrs(def_id.into());
if attrs.by_key(sym::rustc_force_inline).exists() {
return Err(TypeError::ForceInlineCast);
}
@ -856,7 +866,7 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
// Allow the coercion if the current function has all the features that would be
// needed to call the coercee safely.
let (target_features, target_feature_is_safe) =
(self.target_features)();
self.delegate.target_features();
if target_feature_is_safe == TargetFeatureIsSafeInTarget::No
&& !target_features.enabled.is_superset(&fn_target_features.enabled)
{
@ -887,8 +897,8 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
args_a: GenericArgs<'db>,
b: Ty<'db>,
) -> CoerceResult<'db> {
debug_assert!(self.table.shallow_resolve(a) == a);
debug_assert!(self.table.shallow_resolve(b) == b);
debug_assert!(self.infcx().shallow_resolve(a) == a);
debug_assert!(self.infcx().shallow_resolve(b) == b);
match b.kind() {
// FIXME: We need to have an `upvars_mentioned()` query:
@ -930,8 +940,8 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
fn coerce_raw_ptr(&mut self, a: Ty<'db>, b: Ty<'db>, mutbl_b: Mutability) -> CoerceResult<'db> {
debug!("coerce_raw_ptr(a={:?}, b={:?})", a, b);
debug_assert!(self.table.shallow_resolve(a) == a);
debug_assert!(self.table.shallow_resolve(b) == b);
debug_assert!(self.infcx().shallow_resolve(a) == a);
debug_assert!(self.infcx().shallow_resolve(b) == b);
let (is_ref, mt_a) = match a.kind() {
TyKind::Ref(_, ty, mutbl) => (true, TypeAndMut::<DbInterner<'db>> { ty, mutbl }),
@ -960,10 +970,36 @@ impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum CoerceNever {
No,
Yes,
struct InferenceCoercionDelegate<'a, 'b, 'db>(&'a mut InferenceContext<'b, 'db>);
impl<'db> CoerceDelegate<'db> for InferenceCoercionDelegate<'_, '_, 'db> {
#[inline]
fn infcx(&self) -> &InferCtxt<'db> {
&self.0.table.infer_ctxt
}
#[inline]
fn env(&self) -> &TraitEnvironment<'db> {
&self.0.table.trait_env
}
#[inline]
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
self.0.target_features()
}
#[inline]
fn set_diverging(&mut self, diverging_ty: Ty<'db>) {
self.0.table.set_diverging(diverging_ty);
}
#[inline]
fn set_tainted_by_errors(&mut self) {
self.0.set_tainted_by_errors();
}
#[inline]
fn type_var_is_sized(&mut self, var: TyVid) -> bool {
self.0.table.type_var_is_sized(var)
}
}
impl<'db> InferenceContext<'_, 'db> {
@ -977,24 +1013,26 @@ impl<'db> InferenceContext<'_, 'db> {
expr_ty: Ty<'db>,
mut target: Ty<'db>,
allow_two_phase: AllowTwoPhase,
coerce_never: CoerceNever,
expr_is_read: ExprIsRead,
) -> RelateResult<'db, Ty<'db>> {
let source = self.table.try_structurally_resolve_type(expr_ty);
target = self.table.try_structurally_resolve_type(target);
debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target);
let cause = ObligationCause::new();
let krate = self.krate();
let coerce_never = match expr {
ExprOrPatId::ExprId(idx) => {
self.expr_guaranteed_to_constitute_read_for_never(idx, expr_is_read)
}
// `PatId` is passed for `PatKind::Path`.
ExprOrPatId::PatId(_) => false,
};
let mut coerce = Coerce {
table: &mut self.table,
has_errors: &mut self.result.has_errors,
delegate: InferenceCoercionDelegate(self),
cause,
allow_two_phase,
coerce_never: matches!(coerce_never, CoerceNever::Yes),
coerce_never,
use_lub: false,
target_features: &mut || {
Self::target_features(self.db, &self.target_features, self.owner, krate)
},
};
let ok = coerce.commit_if_ok(|coerce| coerce.coerce(source, target))?;
@ -1157,23 +1195,18 @@ impl<'db> InferenceContext<'_, 'db> {
//
// NOTE: we set `coerce_never` to `true` here because coercion LUBs only
// operate on values and not places, so a never coercion is valid.
let krate = self.krate();
let mut coerce = Coerce {
table: &mut self.table,
has_errors: &mut self.result.has_errors,
delegate: InferenceCoercionDelegate(self),
cause: ObligationCause::new(),
allow_two_phase: AllowTwoPhase::No,
coerce_never: true,
use_lub: true,
target_features: &mut || {
Self::target_features(self.db, &self.target_features, self.owner, krate)
},
};
// First try to coerce the new expression to the type of the previous ones,
// but only if the new expression has no coercion already applied to it.
let mut first_error = None;
if !self.result.expr_adjustments.contains_key(&new) {
if !coerce.delegate.0.result.expr_adjustments.contains_key(&new) {
let result = coerce.commit_if_ok(|coerce| coerce.coerce(new_ty, prev_ty));
match result {
Ok(ok) => {
@ -1335,8 +1368,9 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
cause: &ObligationCause,
expression: ExprId,
expression_ty: Ty<'db>,
expr_is_read: ExprIsRead,
) {
self.coerce_inner(icx, cause, expression, expression_ty, false, false)
self.coerce_inner(icx, cause, expression, expression_ty, false, false, expr_is_read)
}
/// Indicates that one of the inputs is a "forced unit". This
@ -1357,8 +1391,17 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
expr: ExprId,
cause: &ObligationCause,
label_unit_as_expected: bool,
expr_is_read: ExprIsRead,
) {
self.coerce_inner(icx, cause, expr, icx.types.unit, true, label_unit_as_expected)
self.coerce_inner(
icx,
cause,
expr,
icx.types.unit,
true,
label_unit_as_expected,
expr_is_read,
)
}
/// The inner coercion "engine". If `expression` is `None`, this
@ -1372,6 +1415,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
mut expression_ty: Ty<'db>,
force_unit: bool,
label_expression_as_expected: bool,
expr_is_read: ExprIsRead,
) {
// Incorporate whatever type inference information we have
// until now; in principle we might also want to process
@ -1408,7 +1452,7 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
expression_ty,
self.expected_ty,
AllowTwoPhase::No,
CoerceNever::Yes,
expr_is_read,
)
} else {
match self.expressions {
@ -1504,88 +1548,170 @@ pub fn could_coerce<'db>(
coerce(db, env, tys).is_ok()
}
struct HirCoercionDelegate<'a, 'db> {
infcx: &'a InferCtxt<'db>,
env: &'a TraitEnvironment<'db>,
target_features: &'a TargetFeatures,
}
impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
#[inline]
fn infcx(&self) -> &InferCtxt<'db> {
self.infcx
}
#[inline]
fn env(&self) -> &TraitEnvironment<'db> {
self.env
}
fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
(self.target_features, TargetFeatureIsSafeInTarget::No)
}
fn set_diverging(&mut self, _diverging_ty: Ty<'db>) {}
fn set_tainted_by_errors(&mut self) {}
fn type_var_is_sized(&mut self, _var: TyVid) -> bool {
false
}
}
fn coerce<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
) -> Result<(Vec<Adjustment<'db>>, Ty<'db>), TypeError<DbInterner<'db>>> {
let mut table = InferenceTable::new(db, env, None);
let interner = table.interner();
let ((ty1_with_vars, ty2_with_vars), vars) = table.infer_ctxt.instantiate_canonical(tys);
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys);
let cause = ObligationCause::new();
// FIXME: Target features.
let target_features = TargetFeatures::default();
let mut coerce = Coerce {
table: &mut table,
has_errors: &mut false,
delegate: HirCoercionDelegate {
infcx: &infcx,
env: &env,
target_features: &target_features,
},
cause,
allow_two_phase: AllowTwoPhase::No,
coerce_never: true,
use_lub: false,
target_features: &mut || (&target_features, TargetFeatureIsSafeInTarget::No),
};
let InferOk { value: (adjustments, ty), obligations } =
coerce.coerce(ty1_with_vars, ty2_with_vars)?;
table.register_predicates(obligations);
let infer_ok = coerce.coerce(ty1_with_vars, ty2_with_vars)?;
let mut ocx = ObligationCtxt::new(&infcx);
let (adjustments, ty) = ocx.register_infer_ok_obligations(infer_ok);
_ = ocx.try_evaluate_obligations();
let (adjustments, ty) = infcx.resolve_vars_if_possible((adjustments, ty));
// default any type vars that weren't unified back to their original bound vars
// (kind of hacky)
let mut fallback_ty = |debruijn, infer| {
let var = vars.var_values.iter().position(|arg| {
arg.as_type().is_some_and(|ty| match ty.kind() {
TyKind::Infer(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Ty::new_error(interner, ErrorGuaranteed),
|i| {
Ty::new_bound(
interner,
debruijn,
BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_usize(i) },
struct Resolver<'db> {
interner: DbInterner<'db>,
debruijn: DebruijnIndex,
var_values: GenericArgs<'db>,
}
impl<'db> TypeFolder<DbInterner<'db>> for Resolver<'db> {
fn cx(&self) -> DbInterner<'db> {
self.interner
}
fn fold_binder<T>(&mut self, t: Binder<'db, T>) -> Binder<'db, T>
where
T: TypeFoldable<DbInterner<'db>>,
{
self.debruijn.shift_in(1);
let result = t.super_fold_with(self);
self.debruijn.shift_out(1);
result
}
fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
if !t.has_infer() {
return t;
}
if let TyKind::Infer(infer) = t.kind() {
let var = self.var_values.iter().position(|arg| {
arg.as_type().is_some_and(|ty| match ty.kind() {
TyKind::Infer(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Ty::new_error(self.interner, ErrorGuaranteed),
|i| {
Ty::new_bound(
self.interner,
self.debruijn,
BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_usize(i) },
)
},
)
},
)
};
let mut fallback_const = |debruijn, infer| {
let var = vars.var_values.iter().position(|arg| {
arg.as_const().is_some_and(|ty| match ty.kind() {
ConstKind::Infer(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Const::new_error(interner, ErrorGuaranteed),
|i| Const::new_bound(interner, debruijn, BoundConst { var: BoundVar::from_usize(i) }),
)
};
let mut fallback_region = |debruijn, infer| {
let var = vars.var_values.iter().position(|arg| {
arg.as_region().is_some_and(|ty| match ty.kind() {
RegionKind::ReVar(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Region::error(interner),
|i| {
Region::new_bound(
interner,
debruijn,
BoundRegion { kind: BoundRegionKind::Anon, var: BoundVar::from_usize(i) },
} else {
t.super_fold_with(self)
}
}
fn fold_const(&mut self, c: Const<'db>) -> Const<'db> {
if !c.has_infer() {
return c;
}
if let ConstKind::Infer(infer) = c.kind() {
let var = self.var_values.iter().position(|arg| {
arg.as_const().is_some_and(|ty| match ty.kind() {
ConstKind::Infer(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Const::new_error(self.interner, ErrorGuaranteed),
|i| {
Const::new_bound(
self.interner,
self.debruijn,
BoundConst { var: BoundVar::from_usize(i) },
)
},
)
},
)
};
// FIXME also map the types in the adjustments
// FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferenceTable`.
let ty = table.resolve_with_fallback(
ty,
&mut fallback_ty,
&mut fallback_const,
&mut fallback_region,
);
} else {
c.super_fold_with(self)
}
}
fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
if let RegionKind::ReVar(infer) = r.kind() {
let var = self.var_values.iter().position(|arg| {
arg.as_region().is_some_and(|ty| match ty.kind() {
RegionKind::ReVar(it) => infer == it,
_ => false,
})
});
var.map_or_else(
|| Region::error(self.interner),
|i| {
Region::new_bound(
self.interner,
self.debruijn,
BoundRegion {
kind: BoundRegionKind::Anon,
var: BoundVar::from_usize(i),
},
)
},
)
} else {
r
}
}
}
// FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferCtxt`.
let (adjustments, ty) = (adjustments, ty).fold_with(&mut Resolver {
interner,
debruijn: DebruijnIndex::ZERO,
var_values: vars.var_values,
});
Ok((adjustments, ty))
}

File diff suppressed because it is too large Load diff

View file

@ -1,25 +1,15 @@
//! Finds if an expression is an immutable context or a mutable context, which is used in selecting
//! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar.
use hir_def::{
hir::{
Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement,
UnaryOp,
},
lang_item::LangItem,
use hir_def::hir::{
Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, UnaryOp,
};
use hir_expand::name::Name;
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::IntoKind;
use crate::next_solver::infer::traits::{Obligation, ObligationCause};
use crate::next_solver::{GenericArgs, TraitRef};
use crate::{
Adjust, Adjustment, AutoBorrow, OverloadedDeref,
infer::{Expectation, InferenceContext, expr::ExprIsRead},
Adjust, AutoBorrow, OverloadedDeref,
infer::{InferenceContext, place_op::PlaceOp},
lower::lower_mutability,
next_solver::TyKind,
};
impl<'db> InferenceContext<'_, 'db> {
@ -28,13 +18,33 @@ impl<'db> InferenceContext<'_, 'db> {
}
fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) {
let krate = self.krate();
if let Some(adjustments) = self.result.expr_adjustments.get_mut(&tgt_expr) {
for adj in adjustments.iter_mut().rev() {
let mut adjustments = adjustments.iter_mut().rev().peekable();
while let Some(adj) = adjustments.next() {
match &mut adj.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (),
Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)),
Adjust::Deref(Some(d)) => {
if mutability == Mutability::Mut {
let source_ty = match adjustments.peek() {
Some(prev_adj) => prev_adj.target,
None => self.result.type_of_expr[tgt_expr],
};
if let Some(infer_ok) = Self::try_mutable_overloaded_place_op(
&self.table,
krate,
source_ty,
None,
PlaceOp::Deref,
) {
self.table.register_predicates(infer_ok.obligations);
}
*d = OverloadedDeref(Some(mutability));
}
}
Adjust::Borrow(b) => match b {
AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m) => mutability = *m,
AutoBorrow::Ref(m) => mutability = (*m).into(),
AutoBorrow::RawPtr(m) => mutability = *m,
},
}
}
@ -128,75 +138,15 @@ impl<'db> InferenceContext<'_, 'db> {
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
}
&Expr::Index { base, index } => {
if mutability == Mutability::Mut
&& let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr)
&& let Some(index_trait) =
LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate)
&& let Some(index_fn) = index_trait
.trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::index_mut))
{
*f = index_fn;
let mut base_ty = None;
let base_adjustments =
self.result.expr_adjustments.get_mut(&base).and_then(|it| it.last_mut());
if let Some(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)),
target,
}) = base_adjustments
{
if let TyKind::Ref(_, ty, _) = target.kind() {
base_ty = Some(ty);
}
*mutability = Mutability::Mut;
}
// Apply `IndexMut` obligation for non-assignee expr
if let Some(base_ty) = base_ty {
let index_ty = if let Some(ty) = self.result.type_of_expr.get(index) {
*ty
} else {
self.infer_expr(index, &Expectation::none(), ExprIsRead::Yes)
};
let trait_ref = TraitRef::new(
self.interner(),
index_trait.into(),
GenericArgs::new_from_iter(
self.interner(),
[base_ty.into(), index_ty.into()],
),
);
self.table.register_predicate(Obligation::new(
self.interner(),
ObligationCause::new(),
self.table.trait_env.env,
trait_ref,
));
}
if mutability == Mutability::Mut {
self.convert_place_op_to_mutable(PlaceOp::Index, tgt_expr, base, Some(index));
}
self.infer_mut_expr(base, mutability);
self.infer_mut_expr(index, Mutability::Not);
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
let mut mutability = mutability;
if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr)
&& mutability == Mutability::Mut
&& let Some(deref_trait) =
LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate)
{
let ty = self.result.type_of_expr.get(*expr);
let is_mut_ptr = ty.is_some_and(|ty| {
let ty = self.table.shallow_resolve(*ty);
matches!(ty.kind(), TyKind::RawPtr(_, Mutability::Mut))
});
if is_mut_ptr {
mutability = Mutability::Not;
} else if let Some(deref_fn) = deref_trait
.trait_items(self.db)
.method_by_name(&Name::new_symbol_root(sym::deref_mut))
{
*f = deref_fn;
}
if mutability == Mutability::Mut {
self.convert_place_op_to_mutable(PlaceOp::Deref, tgt_expr, *expr, None);
}
self.infer_mut_expr(*expr, mutability);
}

View file

@ -0,0 +1,468 @@
//! Inference of binary and unary operators.
use std::collections::hash_map;
use hir_def::{GenericParamId, TraitId, hir::ExprId, lang_item::LangItem};
use intern::{Symbol, sym};
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::{IntoKind, Ty as _};
use syntax::ast::{ArithOp, BinaryOp, UnaryOp};
use tracing::debug;
use crate::{
Adjust, Adjustment, AutoBorrow,
infer::{AllowTwoPhase, AutoBorrowMutability, Expectation, InferenceContext, expr::ExprIsRead},
method_resolution::{MethodCallee, TreatNotYetDefinedOpaques},
next_solver::{
GenericArgs, TraitRef, Ty, TyKind,
fulfill::NextSolverError,
infer::traits::{Obligation, ObligationCause},
obligation_ctxt::ObligationCtxt,
},
};
impl<'a, 'db> InferenceContext<'a, 'db> {
/// Checks a `a <op>= b`
pub(crate) fn infer_assign_op_expr(
&mut self,
expr: ExprId,
op: ArithOp,
lhs: ExprId,
rhs: ExprId,
) -> Ty<'db> {
let (lhs_ty, rhs_ty, return_ty) =
self.infer_overloaded_binop(expr, lhs, rhs, BinaryOp::Assignment { op: Some(op) });
let category = BinOpCategory::from(op);
let ty = if !lhs_ty.is_ty_var()
&& !rhs_ty.is_ty_var()
&& is_builtin_binop(lhs_ty, rhs_ty, category)
{
self.enforce_builtin_binop_types(lhs_ty, rhs_ty, category);
self.types.unit
} else {
return_ty
};
self.check_lhs_assignable(lhs);
ty
}
/// Checks a potentially overloaded binary operator.
pub(crate) fn infer_binop_expr(
&mut self,
expr: ExprId,
op: BinaryOp,
lhs_expr: ExprId,
rhs_expr: ExprId,
) -> Ty<'db> {
debug!(
"check_binop(expr.hir_id={:?}, expr={:?}, op={:?}, lhs_expr={:?}, rhs_expr={:?})",
expr, expr, op, lhs_expr, rhs_expr
);
match op {
BinaryOp::LogicOp(_) => {
// && and || are a simple case.
self.infer_expr_coerce(
lhs_expr,
&Expectation::HasType(self.types.bool),
ExprIsRead::Yes,
);
let lhs_diverges = self.diverges;
self.infer_expr_coerce(
rhs_expr,
&Expectation::HasType(self.types.bool),
ExprIsRead::Yes,
);
// Depending on the LHS' value, the RHS can never execute.
self.diverges = lhs_diverges;
self.types.bool
}
_ => {
// Otherwise, we always treat operators as if they are
// overloaded. This is the way to be most flexible w/r/t
// types that get inferred.
let (lhs_ty, rhs_ty, return_ty) =
self.infer_overloaded_binop(expr, lhs_expr, rhs_expr, op);
// Supply type inference hints if relevant. Probably these
// hints should be enforced during select as part of the
// `consider_unification_despite_ambiguity` routine, but this
// more convenient for now.
//
// The basic idea is to help type inference by taking
// advantage of things we know about how the impls for
// scalar types are arranged. This is important in a
// scenario like `1_u32 << 2`, because it lets us quickly
// deduce that the result type should be `u32`, even
// though we don't know yet what type 2 has and hence
// can't pin this down to a specific impl.
let category = BinOpCategory::from(op);
if !lhs_ty.is_ty_var()
&& !rhs_ty.is_ty_var()
&& is_builtin_binop(lhs_ty, rhs_ty, category)
{
let builtin_return_ty =
self.enforce_builtin_binop_types(lhs_ty, rhs_ty, category);
self.demand_eqtype(builtin_return_ty, return_ty);
builtin_return_ty
} else {
return_ty
}
}
}
}
fn enforce_builtin_binop_types(
&mut self,
lhs_ty: Ty<'db>,
rhs_ty: Ty<'db>,
category: BinOpCategory,
) -> Ty<'db> {
debug_assert!(is_builtin_binop(lhs_ty, rhs_ty, category));
// Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work.
// (See https://github.com/rust-lang/rust/issues/57447.)
let (lhs_ty, rhs_ty) = (deref_ty_if_possible(lhs_ty), deref_ty_if_possible(rhs_ty));
match category {
BinOpCategory::Shortcircuit => {
self.demand_suptype(self.types.bool, lhs_ty);
self.demand_suptype(self.types.bool, rhs_ty);
self.types.bool
}
BinOpCategory::Shift => {
// result type is same as LHS always
lhs_ty
}
BinOpCategory::Math | BinOpCategory::Bitwise => {
// both LHS and RHS and result will have the same type
self.demand_suptype(lhs_ty, rhs_ty);
lhs_ty
}
BinOpCategory::Comparison => {
// both LHS and RHS and result will have the same type
self.demand_suptype(lhs_ty, rhs_ty);
self.types.bool
}
}
}
fn infer_overloaded_binop(
&mut self,
expr: ExprId,
lhs_expr: ExprId,
rhs_expr: ExprId,
op: BinaryOp,
) -> (Ty<'db>, Ty<'db>, Ty<'db>) {
debug!("infer_overloaded_binop(expr.hir_id={:?}, op={:?})", expr, op);
let lhs_ty = match op {
BinaryOp::Assignment { .. } => {
// rust-lang/rust#52126: We have to use strict
// equivalence on the LHS of an assign-op like `+=`;
// overwritten or mutably-borrowed places cannot be
// coerced to a supertype.
self.infer_expr_no_expect(lhs_expr, ExprIsRead::Yes)
}
_ => {
// Find a suitable supertype of the LHS expression's type, by coercing to
// a type variable, to pass as the `Self` to the trait, avoiding invariant
// trait matching creating lifetime constraints that are too strict.
// e.g., adding `&'a T` and `&'b T`, given `&'x T: Add<&'x T>`, will result
// in `&'a T <: &'x T` and `&'b T <: &'x T`, instead of `'a = 'b = 'x`.
let lhs_ty = self.infer_expr_no_expect(lhs_expr, ExprIsRead::No);
let fresh_var = self.table.next_ty_var();
self.demand_coerce(lhs_expr, lhs_ty, fresh_var, AllowTwoPhase::No, ExprIsRead::No)
}
};
let lhs_ty = self.table.resolve_vars_with_obligations(lhs_ty);
// N.B., as we have not yet type-checked the RHS, we don't have the
// type at hand. Make a variable to represent it. The whole reason
// for this indirection is so that, below, we can check the expr
// using this variable as the expected type, which sometimes lets
// us do better coercions than we would be able to do otherwise,
// particularly for things like `String + &String`.
let rhs_ty_var = self.table.next_ty_var();
let result = self.lookup_op_method(
lhs_ty,
Some((rhs_expr, rhs_ty_var)),
self.lang_item_for_bin_op(op),
);
// see `NB` above
let rhs_ty =
self.infer_expr_coerce(rhs_expr, &Expectation::HasType(rhs_ty_var), ExprIsRead::No);
let rhs_ty = self.table.resolve_vars_with_obligations(rhs_ty);
let return_ty = match result {
Ok(method) => {
let by_ref_binop = !is_op_by_value(op);
if (matches!(op, BinaryOp::Assignment { .. }) || by_ref_binop)
&& let TyKind::Ref(_, _, mutbl) =
method.sig.inputs_and_output.inputs()[0].kind()
{
let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
let autoref = Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: method.sig.inputs_and_output.inputs()[0],
};
self.write_expr_adj(lhs_expr, Box::new([autoref]));
}
if by_ref_binop
&& let TyKind::Ref(_, _, mutbl) =
method.sig.inputs_and_output.inputs()[1].kind()
{
// Allow two-phase borrows for binops in initial deployment
// since they desugar to methods
let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
let autoref = Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: method.sig.inputs_and_output.inputs()[1],
};
// HACK(eddyb) Bypass checks due to reborrows being in
// some cases applied on the RHS, on top of which we need
// to autoref, which is not allowed by write_expr_adj.
// self.write_expr_adj(rhs_expr, Box::new([autoref]));
match self.result.expr_adjustments.entry(rhs_expr) {
hash_map::Entry::Occupied(mut entry) => {
let mut adjustments = Vec::from(std::mem::take(entry.get_mut()));
adjustments.reserve_exact(1);
adjustments.push(autoref);
entry.insert(adjustments.into_boxed_slice());
}
hash_map::Entry::Vacant(entry) => {
entry.insert(Box::new([autoref]));
}
};
}
self.write_method_resolution(expr, method.def_id, method.args);
method.sig.output()
}
Err(_errors) => {
// FIXME: Report diagnostic.
self.types.error
}
};
(lhs_ty, rhs_ty, return_ty)
}
pub(crate) fn infer_user_unop(
&mut self,
ex: ExprId,
operand_ty: Ty<'db>,
op: UnaryOp,
) -> Ty<'db> {
match self.lookup_op_method(operand_ty, None, self.lang_item_for_unop(op)) {
Ok(method) => {
self.write_method_resolution(ex, method.def_id, method.args);
method.sig.output()
}
Err(_errors) => {
// FIXME: Report diagnostic.
self.types.error
}
}
}
fn lookup_op_method(
&mut self,
lhs_ty: Ty<'db>,
opt_rhs: Option<(ExprId, Ty<'db>)>,
(opname, trait_did): (Symbol, Option<TraitId>),
) -> Result<MethodCallee<'db>, Vec<NextSolverError<'db>>> {
let Some(trait_did) = trait_did else {
// Bail if the operator trait is not defined.
return Err(vec![]);
};
debug!(
"lookup_op_method(lhs_ty={:?}, opname={:?}, trait_did={:?})",
lhs_ty, opname, trait_did
);
let opt_rhs_ty = opt_rhs.map(|it| it.1);
let cause = ObligationCause::new();
// We don't consider any other candidates if this lookup fails
// so we can freely treat opaque types as inference variables here
// to allow more code to compile.
let treat_opaques = TreatNotYetDefinedOpaques::AsInfer;
let method = self.table.lookup_method_for_operator(
cause.clone(),
opname,
trait_did,
lhs_ty,
opt_rhs_ty,
treat_opaques,
);
match method {
Some(ok) => {
let method = self.table.register_infer_ok(ok);
self.table.select_obligations_where_possible();
Ok(method)
}
None => {
// Guide inference for the RHS expression if it's provided --
// this will allow us to better error reporting, at the expense
// of making some error messages a bit more specific.
if let Some((rhs_expr, rhs_ty)) = opt_rhs
&& rhs_ty.is_ty_var()
{
self.infer_expr_coerce(rhs_expr, &Expectation::HasType(rhs_ty), ExprIsRead::No);
}
// Construct an obligation `self_ty : Trait<input_tys>`
let args = GenericArgs::for_item(
self.interner(),
trait_did.into(),
|param_idx, param_id, _| match param_id {
GenericParamId::LifetimeParamId(_) | GenericParamId::ConstParamId(_) => {
unreachable!("did not expect operand trait to have lifetime/const args")
}
GenericParamId::TypeParamId(_) => {
if param_idx == 0 {
lhs_ty.into()
} else {
opt_rhs_ty.expect("expected RHS for binop").into()
}
}
},
);
let obligation = Obligation::new(
self.interner(),
cause,
self.table.trait_env.env,
TraitRef::new_from_args(self.interner(), trait_did.into(), args),
);
let mut ocx = ObligationCtxt::new(self.infcx());
ocx.register_obligation(obligation);
Err(ocx.evaluate_obligations_error_on_ambiguity())
}
}
}
fn lang_item_for_bin_op(&self, op: BinaryOp) -> (Symbol, Option<TraitId>) {
let (method_name, trait_lang_item) =
crate::lang_items::lang_items_for_bin_op(op).expect("invalid operator provided");
(method_name, trait_lang_item.resolve_trait(self.db, self.krate()))
}
fn lang_item_for_unop(&self, op: UnaryOp) -> (Symbol, Option<TraitId>) {
let (method_name, trait_lang_item) = match op {
UnaryOp::Not => (sym::not, LangItem::Not),
UnaryOp::Neg => (sym::neg, LangItem::Neg),
UnaryOp::Deref => panic!("Deref is not overloadable"),
};
(method_name, trait_lang_item.resolve_trait(self.db, self.krate()))
}
}
// Binary operator categories. These categories summarize the behavior
// with respect to the builtin operations supported.
#[derive(Clone, Copy)]
enum BinOpCategory {
/// &&, || -- cannot be overridden
Shortcircuit,
/// <<, >> -- when shifting a single integer, rhs can be any
/// integer type. For simd, types must match.
Shift,
/// +, -, etc -- takes equal types, produces same type as input,
/// applicable to ints/floats/simd
Math,
/// &, |, ^ -- takes equal types, produces same type as input,
/// applicable to ints/floats/simd/bool
Bitwise,
/// ==, !=, etc -- takes equal types, produces bools, except for simd,
/// which produce the input type
Comparison,
}
impl From<BinaryOp> for BinOpCategory {
fn from(op: BinaryOp) -> BinOpCategory {
match op {
BinaryOp::LogicOp(_) => BinOpCategory::Shortcircuit,
BinaryOp::ArithOp(op) | BinaryOp::Assignment { op: Some(op) } => op.into(),
BinaryOp::CmpOp(_) => BinOpCategory::Comparison,
BinaryOp::Assignment { op: None } => unreachable!(
"assignment is lowered into `Expr::Assignment`, not into `Expr::BinaryOp`"
),
}
}
}
impl From<ArithOp> for BinOpCategory {
fn from(op: ArithOp) -> BinOpCategory {
use ArithOp::*;
match op {
Shl | Shr => BinOpCategory::Shift,
Add | Sub | Mul | Div | Rem => BinOpCategory::Math,
BitXor | BitAnd | BitOr => BinOpCategory::Bitwise,
}
}
}
/// Returns `true` if the binary operator takes its arguments by value.
fn is_op_by_value(op: BinaryOp) -> bool {
!matches!(op, BinaryOp::CmpOp(_))
}
/// Dereferences a single level of immutable referencing.
fn deref_ty_if_possible(ty: Ty<'_>) -> Ty<'_> {
match ty.kind() {
TyKind::Ref(_, ty, Mutability::Not) => ty,
_ => ty,
}
}
/// Returns `true` if this is a built-in arithmetic operation (e.g.,
/// u32 + u32, i16x4 == i16x4) and false if these types would have to be
/// overloaded to be legal. There are two reasons that we distinguish
/// builtin operations from overloaded ones (vs trying to drive
/// everything uniformly through the trait system and intrinsics or
/// something like that):
///
/// 1. Builtin operations can trivially be evaluated in constants.
/// 2. For comparison operators applied to SIMD types the result is
/// not of type `bool`. For example, `i16x4 == i16x4` yields a
/// type like `i16x4`. This means that the overloaded trait
/// `PartialEq` is not applicable.
///
/// Reason #2 is the killer. I tried for a while to always use
/// overloaded logic and just check the types in constants/codegen after
/// the fact, and it worked fine, except for SIMD types. -nmatsakis
fn is_builtin_binop<'db>(lhs: Ty<'db>, rhs: Ty<'db>, category: BinOpCategory) -> bool {
// Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work.
// (See https://github.com/rust-lang/rust/issues/57447.)
let (lhs, rhs) = (deref_ty_if_possible(lhs), deref_ty_if_possible(rhs));
match category {
BinOpCategory::Shortcircuit => true,
BinOpCategory::Shift => lhs.is_integral() && rhs.is_integral(),
BinOpCategory::Math => {
lhs.is_integral() && rhs.is_integral()
|| lhs.is_floating_point() && rhs.is_floating_point()
}
BinOpCategory::Bitwise => {
lhs.is_integral() && rhs.is_integral()
|| lhs.is_floating_point() && rhs.is_floating_point()
|| lhs.is_bool() && rhs.is_bool()
}
BinOpCategory::Comparison => lhs.is_scalar() && rhs.is_scalar(),
}
}

View file

@ -16,8 +16,7 @@ use crate::{
DeclContext, DeclOrigin, InferenceDiagnostic,
consteval::{self, try_const_usize, usize_const},
infer::{
AllowTwoPhase, BindingMode, Expectation, InferenceContext, TypeMismatch,
coerce::CoerceNever, expr::ExprIsRead,
AllowTwoPhase, BindingMode, Expectation, InferenceContext, TypeMismatch, expr::ExprIsRead,
},
lower::lower_mutability,
next_solver::{GenericArgs, Ty, TyKind},
@ -306,7 +305,7 @@ impl<'db> InferenceContext<'_, 'db> {
expected,
ty_inserted_vars,
AllowTwoPhase::No,
CoerceNever::Yes,
ExprIsRead::No,
) {
Ok(coerced_ty) => {
self.write_pat_ty(pat, coerced_ty);
@ -374,16 +373,17 @@ impl<'db> InferenceContext<'_, 'db> {
Pat::Expr(expr) => {
let old_inside_assign = std::mem::replace(&mut self.inside_assignment, false);
// LHS of assignment doesn't constitute reads.
let expr_is_read = ExprIsRead::No;
let result =
self.infer_expr_coerce(*expr, &Expectation::has_type(expected), ExprIsRead::No);
self.infer_expr_coerce(*expr, &Expectation::has_type(expected), expr_is_read);
// We are returning early to avoid the unifiability check below.
let lhs_ty = self.insert_type_vars_shallow(result);
let ty = match self.coerce(
pat.into(),
(*expr).into(),
expected,
lhs_ty,
AllowTwoPhase::No,
CoerceNever::Yes,
expr_is_read,
) {
Ok(ty) => ty,
Err(_) => {

View file

@ -13,11 +13,12 @@ use crate::{
InferenceDiagnostic, ValueTyDefId,
generics::generics,
infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext,
lower::LifetimeElisionKind,
method_resolution::{self, VisibleFromModule},
lower::{GenericPredicates, LifetimeElisionKind},
method_resolution::{self, CandidateId, MethodError},
next_solver::{
GenericArg, GenericArgs, TraitRef, Ty,
infer::traits::{Obligation, ObligationCause},
util::clauses_as_obligations,
},
};
@ -31,7 +32,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
ValuePathResolution::NonGeneric(ty) => return Some(ty),
};
let args = self.process_remote_user_written_ty(substs);
let args = self.insert_type_vars(substs);
self.add_required_obligations_for_value_path(generic_def, args);
@ -221,14 +222,14 @@ impl<'db> InferenceContext<'_, 'db> {
def: GenericDefId,
subst: GenericArgs<'db>,
) {
let predicates = self.db.generic_predicates(def);
let interner = self.interner();
let predicates = GenericPredicates::query_all(self.db, def);
let param_env = self.table.trait_env.env;
if let Some(predicates) = predicates.instantiate(self.interner(), subst) {
self.table.register_predicates(predicates.map(|predicate| {
Obligation::new(interner, ObligationCause::new(), param_env, predicate)
}));
}
self.table.register_predicates(clauses_as_obligations(
predicates.iter_instantiated_copied(interner, subst.as_slice()),
ObligationCause::new(),
param_env,
));
// We need to add `Self: Trait` obligation when `def` is a trait assoc item.
let container = match def {
@ -265,7 +266,7 @@ impl<'db> InferenceContext<'_, 'db> {
match item {
AssocItemId::FunctionId(func) => {
if segment.name == &self.db.function_signature(func).name {
Some(AssocItemId::FunctionId(func))
Some(CandidateId::FunctionId(func))
} else {
None
}
@ -273,7 +274,7 @@ impl<'db> InferenceContext<'_, 'db> {
AssocItemId::ConstId(konst) => {
if self.db.const_signature(konst).name.as_ref() == Some(segment.name) {
Some(AssocItemId::ConstId(konst))
Some(CandidateId::ConstId(konst))
} else {
None
}
@ -282,9 +283,8 @@ impl<'db> InferenceContext<'_, 'db> {
}
})?;
let def = match item {
AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
AssocItemId::ConstId(c) => ValueNs::ConstId(c),
AssocItemId::TypeAliasId(_) => unreachable!(),
CandidateId::FunctionId(f) => ValueNs::FunctionId(f),
CandidateId::ConstId(c) => ValueNs::ConstId(c),
};
self.write_assoc_resolution(id, item, trait_ref.args);
@ -305,39 +305,23 @@ impl<'db> InferenceContext<'_, 'db> {
return Some(result);
}
let canonical_ty = self.canonicalize(ty);
let mut not_visible = None;
let res = method_resolution::iterate_method_candidates(
&canonical_ty,
&mut self.table,
Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope)
.as_ref()
.left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
Some(name),
method_resolution::LookupMode::Path,
|_ty, item, visible| {
if visible {
Some((item, true))
} else {
if not_visible.is_none() {
not_visible = Some((item, false));
}
None
let res = self.with_method_resolution(|ctx| {
ctx.probe_for_name(method_resolution::Mode::Path, name.clone(), ty)
});
let (item, visible) = match res {
Ok(res) => (res.item, true),
Err(error) => match error {
MethodError::PrivateMatch(candidate_id) => (candidate_id.item, false),
_ => {
self.push_diagnostic(InferenceDiagnostic::UnresolvedAssocItem { id });
return None;
}
},
);
let res = res.or(not_visible);
if res.is_none() {
self.push_diagnostic(InferenceDiagnostic::UnresolvedAssocItem { id });
}
let (item, visible) = res?;
};
let (def, container) = match item {
AssocItemId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container),
AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container),
AssocItemId::TypeAliasId(_) => unreachable!(),
CandidateId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container),
CandidateId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container),
};
let substs = match container {
ItemContainerId::ImplId(impl_id) => {
@ -372,6 +356,10 @@ impl<'db> InferenceContext<'_, 'db> {
self.write_assoc_resolution(id, item, substs);
if !visible {
let item = match item {
CandidateId::FunctionId(it) => it.into(),
CandidateId::ConstId(it) => it.into(),
};
self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item });
}
Some((def, substs))

View file

@ -0,0 +1,329 @@
//! Inference of *place operators*: deref and indexing (operators that create places, as opposed to values).
use base_db::Crate;
use hir_def::{hir::ExprId, lang_item::LangItem};
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::{IntoKind, Ty as _};
use tracing::debug;
use crate::{
Adjust, Adjustment, AutoBorrow, PointerCast,
autoderef::InferenceContextAutoderef,
infer::{AllowTwoPhase, AutoBorrowMutability, InferenceContext, unify::InferenceTable},
method_resolution::{MethodCallee, TreatNotYetDefinedOpaques},
next_solver::{
ClauseKind, Ty, TyKind,
infer::{
InferOk,
traits::{Obligation, ObligationCause},
},
},
};
#[derive(Debug, Copy, Clone)]
pub(super) enum PlaceOp {
Deref,
Index,
}
impl<'a, 'db> InferenceContext<'a, 'db> {
pub(super) fn try_overloaded_deref(
&self,
base_ty: Ty<'db>,
) -> Option<InferOk<'db, MethodCallee<'db>>> {
self.try_overloaded_place_op(base_ty, None, PlaceOp::Deref)
}
/// For the overloaded place expressions (`*x`, `x[3]`), the trait
/// returns a type of `&T`, but the actual type we assign to the
/// *expression* is `T`. So this function just peels off the return
/// type by one layer to yield `T`.
fn make_overloaded_place_return_type(&self, method: MethodCallee<'db>) -> Ty<'db> {
// extract method return type, which will be &T;
let ret_ty = method.sig.output();
// method returns &T, but the type as visible to user is T, so deref
ret_ty.builtin_deref(true).unwrap()
}
/// Type-check `*oprnd_expr` with `oprnd_expr` type-checked already.
pub(super) fn lookup_derefing(
&mut self,
expr: ExprId,
oprnd_expr: ExprId,
oprnd_ty: Ty<'db>,
) -> Option<Ty<'db>> {
if let Some(ty) = oprnd_ty.builtin_deref(true) {
return Some(ty);
}
let ok = self.try_overloaded_deref(oprnd_ty)?;
let method = self.table.register_infer_ok(ok);
if let TyKind::Ref(_, _, Mutability::Not) = method.sig.inputs_and_output.inputs()[0].kind()
{
self.write_expr_adj(
oprnd_expr,
Box::new([Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)),
target: method.sig.inputs_and_output.inputs()[0],
}]),
);
} else {
panic!("input to deref is not a ref?");
}
let ty = self.make_overloaded_place_return_type(method);
self.write_method_resolution(expr, method.def_id, method.args);
Some(ty)
}
/// Type-check `*base_expr[index_expr]` with `base_expr` and `index_expr` type-checked already.
pub(super) fn lookup_indexing(
&mut self,
expr: ExprId,
base_expr: ExprId,
base_ty: Ty<'db>,
idx_ty: Ty<'db>,
) -> Option<(/*index type*/ Ty<'db>, /*element type*/ Ty<'db>)> {
// FIXME(#18741) -- this is almost but not quite the same as the
// autoderef that normal method probing does. They could likely be
// consolidated.
let mut autoderef = InferenceContextAutoderef::new_from_inference_context(self, base_ty);
let mut result = None;
while result.is_none() && autoderef.next().is_some() {
result = Self::try_index_step(expr, base_expr, &mut autoderef, idx_ty);
}
result
}
/// To type-check `base_expr[index_expr]`, we progressively autoderef
/// (and otherwise adjust) `base_expr`, looking for a type which either
/// supports builtin indexing or overloaded indexing.
/// This loop implements one step in that search; the autoderef loop
/// is implemented by `lookup_indexing`.
fn try_index_step(
expr: ExprId,
base_expr: ExprId,
autoderef: &mut InferenceContextAutoderef<'_, 'a, 'db>,
index_ty: Ty<'db>,
) -> Option<(/*index type*/ Ty<'db>, /*element type*/ Ty<'db>)> {
let ty = autoderef.final_ty();
let adjusted_ty = autoderef.ctx().table.structurally_resolve_type(ty);
debug!(
"try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \
index_ty={:?})",
expr, base_expr, adjusted_ty, index_ty
);
for unsize in [false, true] {
let mut self_ty = adjusted_ty;
if unsize {
// We only unsize arrays here.
if let TyKind::Array(element_ty, ct) = adjusted_ty.kind() {
let ctx = autoderef.ctx();
ctx.table.register_predicate(Obligation::new(
ctx.interner(),
ObligationCause::new(),
ctx.table.trait_env.env,
ClauseKind::ConstArgHasType(ct, ctx.types.usize),
));
self_ty = Ty::new_slice(ctx.interner(), element_ty);
} else {
continue;
}
}
// If some lookup succeeds, write callee into table and extract index/element
// type from the method signature.
// If some lookup succeeded, install method in table
let input_ty = autoderef.ctx().table.next_ty_var();
let method =
autoderef.ctx().try_overloaded_place_op(self_ty, Some(input_ty), PlaceOp::Index);
if let Some(result) = method {
debug!("try_index_step: success, using overloaded indexing");
let method = autoderef.ctx().table.register_infer_ok(result);
let infer_ok = autoderef.adjust_steps_as_infer_ok();
let mut adjustments = autoderef.ctx().table.register_infer_ok(infer_ok);
if let TyKind::Ref(region, _, Mutability::Not) =
method.sig.inputs_and_output.inputs()[0].kind()
{
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)),
target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty),
});
} else {
panic!("input to index is not a ref?");
}
if unsize {
adjustments.push(Adjustment {
kind: Adjust::Pointer(PointerCast::Unsize),
target: method.sig.inputs_and_output.inputs()[0],
});
}
autoderef.ctx().write_expr_adj(base_expr, adjustments.into_boxed_slice());
autoderef.ctx().write_method_resolution(expr, method.def_id, method.args);
return Some((input_ty, autoderef.ctx().make_overloaded_place_return_type(method)));
}
}
None
}
/// Try to resolve an overloaded place op. We only deal with the immutable
/// variant here (Deref/Index). In some contexts we would need the mutable
/// variant (DerefMut/IndexMut); those would be later converted by
/// `convert_place_derefs_to_mutable`.
pub(super) fn try_overloaded_place_op(
&self,
base_ty: Ty<'db>,
opt_rhs_ty: Option<Ty<'db>>,
op: PlaceOp,
) -> Option<InferOk<'db, MethodCallee<'db>>> {
debug!("try_overloaded_place_op({:?},{:?})", base_ty, op);
let (Some(imm_tr), imm_op) = (match op {
PlaceOp::Deref => (LangItem::Deref.resolve_trait(self.db, self.krate()), sym::deref),
PlaceOp::Index => (LangItem::Index.resolve_trait(self.db, self.krate()), sym::index),
}) else {
// Bail if `Deref` or `Index` isn't defined.
return None;
};
// FIXME(trait-system-refactor-initiative#231): we may want to treat
// opaque types as rigid here to support `impl Deref<Target = impl Index<usize>>`.
let treat_opaques = TreatNotYetDefinedOpaques::AsInfer;
self.table.lookup_method_for_operator(
ObligationCause::new(),
imm_op,
imm_tr,
base_ty,
opt_rhs_ty,
treat_opaques,
)
}
pub(super) fn try_mutable_overloaded_place_op(
table: &InferenceTable<'db>,
krate: Crate,
base_ty: Ty<'db>,
opt_rhs_ty: Option<Ty<'db>>,
op: PlaceOp,
) -> Option<InferOk<'db, MethodCallee<'db>>> {
debug!("try_mutable_overloaded_place_op({:?},{:?})", base_ty, op);
let (Some(mut_tr), mut_op) = (match op {
PlaceOp::Deref => (LangItem::DerefMut.resolve_trait(table.db, krate), sym::deref_mut),
PlaceOp::Index => (LangItem::IndexMut.resolve_trait(table.db, krate), sym::index_mut),
}) else {
// Bail if `DerefMut` or `IndexMut` isn't defined.
return None;
};
// We have to replace the operator with the mutable variant for the
// program to compile, so we don't really have a choice here and want
// to just try using `DerefMut` even if its not in the item bounds
// of the opaque.
let treat_opaques = TreatNotYetDefinedOpaques::AsInfer;
table.lookup_method_for_operator(
ObligationCause::new(),
mut_op,
mut_tr,
base_ty,
opt_rhs_ty,
treat_opaques,
)
}
pub(super) fn convert_place_op_to_mutable(
&mut self,
op: PlaceOp,
expr: ExprId,
base_expr: ExprId,
index_expr: Option<ExprId>,
) {
debug!("convert_place_op_to_mutable({:?}, {:?}, {:?})", op, expr, base_expr);
if !self.result.method_resolutions.contains_key(&expr) {
debug!("convert_place_op_to_mutable - builtin, nothing to do");
return;
}
// Need to deref because overloaded place ops take self by-reference.
let base_ty = self
.expr_ty_after_adjustments(base_expr)
.builtin_deref(false)
.expect("place op takes something that is not a ref");
let arg_ty = match op {
PlaceOp::Deref => None,
PlaceOp::Index => {
// We would need to recover the `T` used when we resolve `<_ as Index<T>>::index`
// in try_index_step. This is the arg at index 1.
//
// FIXME: rustc does not use the type of `index_expr` with the following explanation.
//
// Note: we should *not* use `expr_ty` of index_expr here because autoderef
// during coercions can cause type of index_expr to differ from `T` (#72002).
// We also could not use `expr_ty_adjusted` of index_expr because reborrowing
// during coercions can also cause type of index_expr to differ from `T`,
// which can potentially cause regionck failure (#74933).
Some(self.expr_ty_after_adjustments(
index_expr.expect("`PlaceOp::Index` should have `index_expr`"),
))
}
};
let method =
Self::try_mutable_overloaded_place_op(&self.table, self.krate(), base_ty, arg_ty, op);
let method = match method {
Some(ok) => self.table.register_infer_ok(ok),
// Couldn't find the mutable variant of the place op, keep the
// current, immutable version.
None => return,
};
debug!("convert_place_op_to_mutable: method={:?}", method);
self.result.method_resolutions.insert(expr, (method.def_id, method.args));
let TyKind::Ref(region, _, Mutability::Mut) =
method.sig.inputs_and_output.inputs()[0].kind()
else {
panic!("input to mutable place op is not a mut ref?");
};
// Convert the autoref in the base expr to mutable with the correct
// region and mutability.
let base_expr_ty = self.expr_ty(base_expr);
let interner = self.interner();
if let Some(adjustments) = self.result.expr_adjustments.get_mut(&base_expr) {
let mut source = base_expr_ty;
for adjustment in &mut adjustments[..] {
if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind {
debug!("convert_place_op_to_mutable: converting autoref {:?}", adjustment);
let mutbl = AutoBorrowMutability::Mut {
// Deref/indexing can be desugared to a method call,
// so maybe we could use two-phase here.
// See the documentation of AllowTwoPhase for why that's
// not the case today.
allow_two_phase_borrow: AllowTwoPhase::No,
};
adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(mutbl));
adjustment.target = Ty::new_ref(interner, region, source, mutbl.into());
}
source = adjustment.target;
}
// If we have an autoref followed by unsizing at the end, fix the unsize target.
if let [
..,
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. },
Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target },
] = adjustments[..]
{
*target = method.sig.inputs_and_output.inputs()[0];
}
}
}
}

View file

@ -7,9 +7,8 @@ use hir_expand::name::Name;
use intern::sym;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
DebruijnIndex, InferConst, InferTy, RegionVid, TyVid, TypeFoldable, TypeFolder,
TypeSuperFoldable, TypeVisitableExt, UpcastFrom,
inherent::{Const as _, IntoKind, Ty as _},
TyVid, TypeFoldable, TypeVisitableExt, UpcastFrom,
inherent::{Const as _, GenericArg as _, IntoKind, SliceLike, Ty as _},
solve::{Certainty, GoalSource},
};
use smallvec::SmallVec;
@ -18,15 +17,14 @@ use triomphe::Arc;
use crate::{
TraitEnvironment,
db::HirDatabase,
infer::InferenceContext,
next_solver::{
self, AliasTy, Binder, Canonical, ClauseKind, Const, ConstKind, DbInterner,
ErrorGuaranteed, GenericArg, GenericArgs, Predicate, PredicateKind, Region, RegionKind,
SolverDefId, TraitRef, Ty, TyKind, TypingMode,
AliasTy, Canonical, ClauseKind, Const, DbInterner, ErrorGuaranteed, GenericArg,
GenericArgs, Goal, Predicate, PredicateKind, Region, SolverDefId, Term, TraitRef, Ty,
TyKind, TypingMode,
fulfill::{FulfillmentCtxt, NextSolverError},
infer::{
DbInternerInferExt, InferCtxt, InferOk, InferResult,
at::ToTrace,
at::{At, ToTrace},
snapshot::CombinedSnapshot,
traits::{Obligation, ObligationCause, PredicateObligation},
},
@ -38,15 +36,6 @@ use crate::{
},
};
impl<'db> InferenceContext<'_, 'db> {
pub(super) fn canonicalize<T>(&mut self, t: T) -> rustc_type_ir::Canonical<DbInterner<'db>, T>
where
T: rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
self.table.canonicalize(t)
}
}
struct NestedObligationsForSelfTy<'a, 'db> {
ctx: &'a InferenceTable<'db>,
self_ty: TyVid,
@ -292,10 +281,7 @@ impl<'db> InferenceTable<'db> {
T: TypeFoldable<DbInterner<'db>> + Clone,
{
let ty = self.resolve_vars_with_obligations(ty);
self.infer_ctxt
.at(&ObligationCause::new(), self.trait_env.env)
.deeply_normalize(ty.clone())
.unwrap_or(ty)
self.at(&ObligationCause::new()).deeply_normalize(ty.clone()).unwrap_or(ty)
}
/// Works almost same as [`Self::normalize_associated_types_in`], but this also resolves shallow
@ -316,19 +302,19 @@ impl<'db> InferenceTable<'db> {
.unwrap_or(alias)
}
pub(crate) fn next_ty_var(&mut self) -> Ty<'db> {
pub(crate) fn next_ty_var(&self) -> Ty<'db> {
self.infer_ctxt.next_ty_var()
}
pub(crate) fn next_const_var(&mut self) -> Const<'db> {
pub(crate) fn next_const_var(&self) -> Const<'db> {
self.infer_ctxt.next_const_var()
}
pub(crate) fn next_int_var(&mut self) -> Ty<'db> {
pub(crate) fn next_int_var(&self) -> Ty<'db> {
self.infer_ctxt.next_int_var()
}
pub(crate) fn next_float_var(&mut self) -> Ty<'db> {
pub(crate) fn next_float_var(&self) -> Ty<'db> {
self.infer_ctxt.next_float_var()
}
@ -338,101 +324,12 @@ impl<'db> InferenceTable<'db> {
var
}
pub(crate) fn next_region_var(&mut self) -> Region<'db> {
pub(crate) fn next_region_var(&self) -> Region<'db> {
self.infer_ctxt.next_region_var()
}
pub(crate) fn next_var_for_param(&mut self, id: GenericParamId) -> GenericArg<'db> {
match id {
GenericParamId::TypeParamId(_) => self.next_ty_var().into(),
GenericParamId::ConstParamId(_) => self.next_const_var().into(),
GenericParamId::LifetimeParamId(_) => self.next_region_var().into(),
}
}
pub(crate) fn resolve_with_fallback<T>(
&mut self,
t: T,
fallback_ty: &mut dyn FnMut(DebruijnIndex, InferTy) -> Ty<'db>,
fallback_const: &mut dyn FnMut(DebruijnIndex, InferConst) -> Const<'db>,
fallback_region: &mut dyn FnMut(DebruijnIndex, RegionVid) -> Region<'db>,
) -> T
where
T: TypeFoldable<DbInterner<'db>>,
{
struct Resolver<'a, 'db> {
table: &'a mut InferenceTable<'db>,
binder: DebruijnIndex,
fallback_ty: &'a mut dyn FnMut(DebruijnIndex, InferTy) -> Ty<'db>,
fallback_const: &'a mut dyn FnMut(DebruijnIndex, InferConst) -> Const<'db>,
fallback_region: &'a mut dyn FnMut(DebruijnIndex, RegionVid) -> Region<'db>,
}
impl<'db> TypeFolder<DbInterner<'db>> for Resolver<'_, 'db> {
fn cx(&self) -> DbInterner<'db> {
self.table.interner()
}
fn fold_binder<T>(&mut self, t: Binder<'db, T>) -> Binder<'db, T>
where
T: TypeFoldable<DbInterner<'db>>,
{
self.binder.shift_in(1);
let result = t.super_fold_with(self);
self.binder.shift_out(1);
result
}
fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
if !t.has_infer() {
return t;
}
if let TyKind::Infer(infer) = t.kind() {
(self.fallback_ty)(self.binder, infer)
} else {
t.super_fold_with(self)
}
}
fn fold_const(&mut self, c: Const<'db>) -> Const<'db> {
if !c.has_infer() {
return c;
}
if let ConstKind::Infer(infer) = c.kind() {
(self.fallback_const)(self.binder, infer)
} else {
c.super_fold_with(self)
}
}
fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
if let RegionKind::ReVar(infer) = r.kind() {
(self.fallback_region)(self.binder, infer)
} else {
r
}
}
}
t.fold_with(&mut Resolver {
table: self,
binder: DebruijnIndex::ZERO,
fallback_ty,
fallback_const,
fallback_region,
})
}
pub(crate) fn instantiate_canonical<T>(
&mut self,
canonical: rustc_type_ir::Canonical<DbInterner<'db>, T>,
) -> T
where
T: rustc_type_ir::TypeFoldable<DbInterner<'db>>,
{
self.infer_ctxt.instantiate_canonical(&canonical).0
pub(crate) fn next_var_for_param(&self, id: GenericParamId) -> GenericArg<'db> {
self.infer_ctxt.next_var_for_param(id)
}
pub(crate) fn resolve_completely<T>(&mut self, value: T) -> T
@ -456,7 +353,11 @@ impl<'db> InferenceTable<'db> {
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
/// caller needs to deal with them.
pub(crate) fn try_unify<T: ToTrace<'db>>(&mut self, t1: T, t2: T) -> InferResult<'db, ()> {
self.infer_ctxt.at(&ObligationCause::new(), self.trait_env.env).eq(t1, t2)
self.at(&ObligationCause::new()).eq(t1, t2)
}
pub(crate) fn at<'a>(&'a self, cause: &'a ObligationCause) -> At<'a, 'db> {
self.infer_ctxt.at(cause, self.trait_env.env)
}
pub(crate) fn shallow_resolve(&self, ty: Ty<'db>) -> Ty<'db> {
@ -486,15 +387,6 @@ impl<'db> InferenceTable<'db> {
self.infer_ctxt.fresh_args_for_item(def)
}
/// Like `fresh_args_for_item()`, but first uses the args from `first`.
pub(crate) fn fill_rest_fresh_args(
&self,
def_id: SolverDefId,
first: impl IntoIterator<Item = GenericArg<'db>>,
) -> GenericArgs<'db> {
self.infer_ctxt.fill_rest_fresh_args(def_id, first)
}
/// Try to resolve `ty` to a structural type, normalizing aliases.
///
/// In case there is still ambiguity, the returned type may be an inference
@ -535,17 +427,6 @@ impl<'db> InferenceTable<'db> {
self.fulfillment_cx = snapshot.obligations;
}
#[tracing::instrument(skip_all)]
pub(crate) fn run_in_snapshot<T>(
&mut self,
f: impl FnOnce(&mut InferenceTable<'db>) -> T,
) -> T {
let snapshot = self.snapshot();
let result = f(self);
self.rollback_to(snapshot);
result
}
pub(crate) fn commit_if_ok<T, E>(
&mut self,
f: impl FnOnce(&mut InferenceTable<'db>) -> Result<T, E>,
@ -566,22 +447,19 @@ impl<'db> InferenceTable<'db> {
/// choice (during e.g. method resolution or deref).
#[tracing::instrument(level = "debug", skip(self))]
pub(crate) fn try_obligation(&mut self, predicate: Predicate<'db>) -> NextTraitSolveResult {
let goal = next_solver::Goal { param_env: self.trait_env.env, predicate };
let goal = Goal { param_env: self.trait_env.env, predicate };
let canonicalized = self.canonicalize(goal);
next_trait_solve_canonical_in_ctxt(&self.infer_ctxt, canonicalized)
}
pub(crate) fn register_obligation(&mut self, predicate: Predicate<'db>) {
let goal = next_solver::Goal { param_env: self.trait_env.env, predicate };
let goal = Goal { param_env: self.trait_env.env, predicate };
self.register_obligation_in_env(goal)
}
#[tracing::instrument(level = "debug", skip(self))]
fn register_obligation_in_env(
&mut self,
goal: next_solver::Goal<'db, next_solver::Predicate<'db>>,
) {
fn register_obligation_in_env(&mut self, goal: Goal<'db, Predicate<'db>>) {
let result = next_trait_solve_in_ctxt(&self.infer_ctxt, goal);
tracing::debug!(?result);
match result {
@ -619,7 +497,7 @@ impl<'db> InferenceTable<'db> {
self.fulfillment_cx.register_predicate_obligation(&self.infer_ctxt, obligation);
}
pub(super) fn register_predicates<I>(&mut self, obligations: I)
pub(crate) fn register_predicates<I>(&mut self, obligations: I)
where
I: IntoIterator<Item = PredicateObligation<'db>>,
{
@ -628,6 +506,23 @@ impl<'db> InferenceTable<'db> {
});
}
/// checking later, during regionck, that `arg` is well-formed.
pub(crate) fn register_wf_obligation(&mut self, term: Term<'db>, cause: ObligationCause) {
self.register_predicate(Obligation::new(
self.interner(),
cause,
self.trait_env.env,
ClauseKind::WellFormed(term),
));
}
/// Registers obligations that all `args` are well-formed.
pub(crate) fn add_wf_bounds(&mut self, args: GenericArgs<'db>) {
for term in args.iter().filter_map(|it| it.as_term()) {
self.register_wf_obligation(term, ObligationCause::new());
}
}
pub(crate) fn callable_sig(
&mut self,
ty: Ty<'db>,
@ -714,26 +609,20 @@ impl<'db> InferenceTable<'db> {
}
/// Whenever you lower a user-written type, you should call this.
pub(crate) fn process_user_written_ty<T>(&mut self, ty: T) -> T
where
T: TypeFoldable<DbInterner<'db>>,
{
pub(crate) fn process_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
self.process_remote_user_written_ty(ty)
// FIXME: Register a well-formed obligation.
}
/// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation,
/// while `process_user_written_ty()` should (but doesn't currently).
pub(crate) fn process_remote_user_written_ty<T>(&mut self, ty: T) -> T
where
T: TypeFoldable<DbInterner<'db>>,
{
pub(crate) fn process_remote_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
let ty = self.insert_type_vars(ty);
// See https://github.com/rust-lang/rust/blob/cdb45c87e2cd43495379f7e867e3cc15dcee9f93/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs#L487-L495:
// Even though the new solver only lazily normalizes usually, here we eagerly normalize so that not everything needs
// to normalize before inspecting the `TyKind`.
// FIXME(next-solver): We should not deeply normalize here, only shallowly.
self.normalize_associated_types_in(ty)
self.try_structurally_resolve_type(ty)
}
/// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
@ -852,7 +741,7 @@ mod resolve_completely {
{
let value = if self.should_normalize {
let cause = ObligationCause::new();
let at = self.ctx.infer_ctxt.at(&cause, self.ctx.trait_env.env);
let at = self.ctx.at(&cause);
let universes = vec![None; outer_exclusive_binder(value).as_usize()];
match deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
at, value, universes,

View file

@ -1,8 +1,7 @@
//! Functions to detect special lang items
use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags};
use hir_expand::name::Name;
use intern::sym;
use intern::{Symbol, sym};
use crate::db::HirDatabase;
@ -11,48 +10,48 @@ pub fn is_box(db: &dyn HirDatabase, adt: AdtId) -> bool {
db.struct_signature(id).flags.contains(StructFlags::IS_BOX)
}
pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> {
pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Symbol, LangItem)> {
use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
Some(match op {
BinaryOp::LogicOp(_) => return None,
BinaryOp::ArithOp(aop) => match aop {
ArithOp::Add => (Name::new_symbol_root(sym::add), LangItem::Add),
ArithOp::Mul => (Name::new_symbol_root(sym::mul), LangItem::Mul),
ArithOp::Sub => (Name::new_symbol_root(sym::sub), LangItem::Sub),
ArithOp::Div => (Name::new_symbol_root(sym::div), LangItem::Div),
ArithOp::Rem => (Name::new_symbol_root(sym::rem), LangItem::Rem),
ArithOp::Shl => (Name::new_symbol_root(sym::shl), LangItem::Shl),
ArithOp::Shr => (Name::new_symbol_root(sym::shr), LangItem::Shr),
ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor), LangItem::BitXor),
ArithOp::BitOr => (Name::new_symbol_root(sym::bitor), LangItem::BitOr),
ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand), LangItem::BitAnd),
ArithOp::Add => (sym::add, LangItem::Add),
ArithOp::Mul => (sym::mul, LangItem::Mul),
ArithOp::Sub => (sym::sub, LangItem::Sub),
ArithOp::Div => (sym::div, LangItem::Div),
ArithOp::Rem => (sym::rem, LangItem::Rem),
ArithOp::Shl => (sym::shl, LangItem::Shl),
ArithOp::Shr => (sym::shr, LangItem::Shr),
ArithOp::BitXor => (sym::bitxor, LangItem::BitXor),
ArithOp::BitOr => (sym::bitor, LangItem::BitOr),
ArithOp::BitAnd => (sym::bitand, LangItem::BitAnd),
},
BinaryOp::Assignment { op: Some(aop) } => match aop {
ArithOp::Add => (Name::new_symbol_root(sym::add_assign), LangItem::AddAssign),
ArithOp::Mul => (Name::new_symbol_root(sym::mul_assign), LangItem::MulAssign),
ArithOp::Sub => (Name::new_symbol_root(sym::sub_assign), LangItem::SubAssign),
ArithOp::Div => (Name::new_symbol_root(sym::div_assign), LangItem::DivAssign),
ArithOp::Rem => (Name::new_symbol_root(sym::rem_assign), LangItem::RemAssign),
ArithOp::Shl => (Name::new_symbol_root(sym::shl_assign), LangItem::ShlAssign),
ArithOp::Shr => (Name::new_symbol_root(sym::shr_assign), LangItem::ShrAssign),
ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor_assign), LangItem::BitXorAssign),
ArithOp::BitOr => (Name::new_symbol_root(sym::bitor_assign), LangItem::BitOrAssign),
ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand_assign), LangItem::BitAndAssign),
ArithOp::Add => (sym::add_assign, LangItem::AddAssign),
ArithOp::Mul => (sym::mul_assign, LangItem::MulAssign),
ArithOp::Sub => (sym::sub_assign, LangItem::SubAssign),
ArithOp::Div => (sym::div_assign, LangItem::DivAssign),
ArithOp::Rem => (sym::rem_assign, LangItem::RemAssign),
ArithOp::Shl => (sym::shl_assign, LangItem::ShlAssign),
ArithOp::Shr => (sym::shr_assign, LangItem::ShrAssign),
ArithOp::BitXor => (sym::bitxor_assign, LangItem::BitXorAssign),
ArithOp::BitOr => (sym::bitor_assign, LangItem::BitOrAssign),
ArithOp::BitAnd => (sym::bitand_assign, LangItem::BitAndAssign),
},
BinaryOp::CmpOp(cop) => match cop {
CmpOp::Eq { negated: false } => (Name::new_symbol_root(sym::eq), LangItem::PartialEq),
CmpOp::Eq { negated: true } => (Name::new_symbol_root(sym::ne), LangItem::PartialEq),
CmpOp::Eq { negated: false } => (sym::eq, LangItem::PartialEq),
CmpOp::Eq { negated: true } => (sym::ne, LangItem::PartialEq),
CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
(Name::new_symbol_root(sym::le), LangItem::PartialOrd)
(sym::le, LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
(Name::new_symbol_root(sym::lt), LangItem::PartialOrd)
(sym::lt, LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
(Name::new_symbol_root(sym::ge), LangItem::PartialOrd)
(sym::ge, LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
(Name::new_symbol_root(sym::gt), LangItem::PartialOrd)
(sym::gt, LangItem::PartialOrd)
}
},
BinaryOp::Assignment { op: None } => return None,

View file

@ -89,13 +89,12 @@ pub use infer::{
could_coerce, could_unify, could_unify_deeply,
};
pub use lower::{
LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId,
GenericPredicates, ImplTraits, LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId,
associated_type_shorthand_candidates, diagnostics::*,
};
pub use method_resolution::check_orphan_rules;
pub use next_solver::interner::{attach_db, attach_db_allow_change, with_attached_db};
pub use target_feature::TargetFeatures;
pub use traits::TraitEnvironment;
pub use traits::{TraitEnvironment, check_orphan_rules};
pub use utils::{
TargetFeatureIsSafeInTarget, Unsafety, all_super_traits, direct_super_traits,
is_fn_unsafe_to_call, target_feature_is_safe_in_target,

View file

@ -8,12 +8,9 @@
pub(crate) mod diagnostics;
pub(crate) mod path;
use std::{
cell::OnceCell,
iter, mem,
ops::{self, Deref, Not as _},
};
use std::{cell::OnceCell, iter, mem};
use arrayvec::ArrayVec;
use base_db::Crate;
use either::Either;
use hir_def::{
@ -45,7 +42,7 @@ use rustc_type_ir::{
AliasTyKind, BoundVarIndexKind, ConstKind, DebruijnIndex, ExistentialPredicate,
ExistentialProjection, ExistentialTraitRef, FnSig, OutlivesPredicate,
TyKind::{self},
TypeVisitableExt,
TypeVisitableExt, Upcast,
inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _},
};
use salsa::plumbing::AsId;
@ -56,7 +53,7 @@ use triomphe::{Arc, ThinArc};
use crate::{
FnAbi, ImplTraitId, TraitEnvironment, TyLoweringDiagnostic, TyLoweringDiagnosticKind,
consteval::intern_const_ref,
db::HirDatabase,
db::{HirDatabase, InternedOpaqueTyId},
generics::{Generics, generics, trait_self_param_idx},
next_solver::{
AliasTy, Binder, BoundExistentialPredicates, Clause, Clauses, Const, DbInterner,
@ -75,7 +72,7 @@ pub struct ImplTraits<'db> {
#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTrait<'db> {
pub(crate) predicates: Vec<Clause<'db>>,
pub(crate) predicates: Box<[Clause<'db>]>,
}
pub type ImplTraitIdx<'db> = Idx<ImplTrait<'db>>;
@ -473,7 +470,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
let idx = self
.impl_trait_mode
.opaque_type_data
.alloc(ImplTrait { predicates: Vec::default() });
.alloc(ImplTrait { predicates: Box::default() });
let impl_trait_id = origin.either(
|f| ImplTraitId::ReturnTypeImplTrait(f, idx),
@ -916,8 +913,7 @@ impl<'db, 'a> TyLoweringContext<'db, 'a> {
});
predicates.extend(sized_clause);
}
predicates.shrink_to_fit();
predicates
predicates.into_boxed_slice()
});
ImplTrait { predicates }
}
@ -982,50 +978,89 @@ pub(crate) fn impl_trait_with_diagnostics_query<'db>(
Some((trait_ref, create_diagnostics(ctx.diagnostics)))
}
pub(crate) fn return_type_impl_traits<'db>(
db: &'db dyn HirDatabase,
def: hir_def::FunctionId,
) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>> {
// FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
let data = db.function_signature(def);
let resolver = def.resolver(db);
let mut ctx_ret =
TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
if let Some(ret_type) = data.ret_type {
let _ret = ctx_ret.lower_ty(ret_type);
}
let return_type_impl_traits =
ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data };
if return_type_impl_traits.impl_traits.is_empty() {
None
} else {
Some(Arc::new(EarlyBinder::bind(return_type_impl_traits)))
impl<'db> ImplTraitId<'db> {
#[inline]
pub fn predicates(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> {
let (impl_traits, idx) = match self {
ImplTraitId::ReturnTypeImplTrait(owner, idx) => {
(ImplTraits::return_type_impl_traits(db, owner), idx)
}
ImplTraitId::TypeAliasImplTrait(owner, idx) => {
(ImplTraits::type_alias_impl_traits(db, owner), idx)
}
};
impl_traits
.as_deref()
.expect("owner should have opaque type")
.as_ref()
.map_bound(|it| &*it.impl_traits[idx].predicates)
}
}
pub(crate) fn type_alias_impl_traits<'db>(
db: &'db dyn HirDatabase,
def: hir_def::TypeAliasId,
) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>> {
let data = db.type_alias_signature(def);
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&data.store,
def.into(),
LifetimeElisionKind::AnonymousReportError,
)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
if let Some(type_ref) = data.ty {
let _ty = ctx.lower_ty(type_ref);
impl InternedOpaqueTyId {
#[inline]
pub fn predicates<'db>(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> {
self.loc(db).predicates(db)
}
let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data };
if type_alias_impl_traits.impl_traits.is_empty() {
None
} else {
Some(Arc::new(EarlyBinder::bind(type_alias_impl_traits)))
}
#[salsa::tracked]
impl<'db> ImplTraits<'db> {
#[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
pub(crate) fn return_type_impl_traits(
db: &'db dyn HirDatabase,
def: hir_def::FunctionId,
) -> Option<Box<EarlyBinder<'db, ImplTraits<'db>>>> {
// FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
let data = db.function_signature(def);
let resolver = def.resolver(db);
let mut ctx_ret = TyLoweringContext::new(
db,
&resolver,
&data.store,
def.into(),
LifetimeElisionKind::Infer,
)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
if let Some(ret_type) = data.ret_type {
let _ret = ctx_ret.lower_ty(ret_type);
}
let mut return_type_impl_traits =
ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data };
if return_type_impl_traits.impl_traits.is_empty() {
None
} else {
return_type_impl_traits.impl_traits.shrink_to_fit();
Some(Box::new(EarlyBinder::bind(return_type_impl_traits)))
}
}
#[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
pub(crate) fn type_alias_impl_traits(
db: &'db dyn HirDatabase,
def: hir_def::TypeAliasId,
) -> Option<Box<EarlyBinder<'db, ImplTraits<'db>>>> {
let data = db.type_alias_signature(def);
let resolver = def.resolver(db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&data.store,
def.into(),
LifetimeElisionKind::AnonymousReportError,
)
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
if let Some(type_ref) = data.ty {
let _ty = ctx.lower_ty(type_ref);
}
let mut type_alias_impl_traits =
ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data };
if type_alias_impl_traits.impl_traits.is_empty() {
None
} else {
type_alias_impl_traits.impl_traits.shrink_to_fit();
Some(Box::new(EarlyBinder::bind(type_alias_impl_traits)))
}
}
}
@ -1331,12 +1366,13 @@ pub(crate) fn field_types_with_diagnostics_query<'db>(
/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
#[tracing::instrument(skip(db), ret)]
pub(crate) fn generic_predicates_for_param_query<'db>(
#[salsa::tracked(returns(ref), unsafe(non_update_return_type), cycle_result = generic_predicates_for_param_cycle_result)]
pub(crate) fn generic_predicates_for_param<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
param_id: TypeOrConstParamId,
assoc_name: Option<Name>,
) -> GenericPredicates<'db> {
) -> EarlyBinder<'db, Box<[Clause<'db>]>> {
let generics = generics(db, def);
let interner = DbInterner::new_with(db, None, None);
let resolver = def.resolver(db);
@ -1436,44 +1472,140 @@ pub(crate) fn generic_predicates_for_param_query<'db>(
predicates.extend(implicitly_sized_predicates);
};
}
GenericPredicates(predicates.is_empty().not().then(|| predicates.into()))
EarlyBinder::bind(predicates.into_boxed_slice())
}
pub(crate) fn generic_predicates_for_param_cycle_result(
_db: &dyn HirDatabase,
pub(crate) fn generic_predicates_for_param_cycle_result<'db>(
_db: &'db dyn HirDatabase,
_def: GenericDefId,
_param_id: TypeOrConstParamId,
_assoc_name: Option<Name>,
) -> GenericPredicates<'_> {
GenericPredicates(None)
) -> EarlyBinder<'db, Box<[Clause<'db>]>> {
EarlyBinder::bind(Box::new([]))
}
#[inline]
pub(crate) fn type_alias_bounds<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
type_alias_bounds_with_diagnostics(db, type_alias).0.as_ref().map_bound(|it| &**it)
}
#[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
pub fn type_alias_bounds_with_diagnostics<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> (EarlyBinder<'db, Box<[Clause<'db>]>>, Diagnostics) {
let type_alias_data = db.type_alias_signature(type_alias);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&type_alias_data.store,
type_alias.into(),
LifetimeElisionKind::AnonymousReportError,
);
let interner = ctx.interner;
let def_id = type_alias.into();
let item_args = GenericArgs::identity_for_item(interner, def_id);
let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args);
let mut bounds = Vec::new();
for bound in &type_alias_data.bounds {
ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| {
bounds.push(pred);
});
}
if !ctx.unsized_types.contains(&interner_ty) {
let sized_trait = LangItem::Sized
.resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate"));
if let Some(sized_trait) = sized_trait {
let trait_ref = TraitRef::new_from_args(
interner,
sized_trait.into(),
GenericArgs::new_from_iter(interner, [interner_ty.into()]),
);
bounds.push(trait_ref.upcast(interner));
};
}
(EarlyBinder::bind(bounds.into_boxed_slice()), create_diagnostics(ctx.diagnostics))
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericPredicates<'db>(Option<Arc<[Clause<'db>]>>);
pub struct GenericPredicates<'db> {
// The order is the following: first, if `parent_is_trait == true`, comes the implicit trait predicate for the
// parent. Then come the explicit predicates for the parent, then the explicit trait predicate for the child,
// then the implicit trait predicate for the child, if `is_trait` is `true`.
predicates: EarlyBinder<'db, Box<[Clause<'db>]>>,
own_predicates_start: u32,
is_trait: bool,
parent_is_trait: bool,
}
#[salsa::tracked]
impl<'db> GenericPredicates<'db> {
#[inline]
pub fn instantiate(
&self,
interner: DbInterner<'db>,
args: GenericArgs<'db>,
) -> Option<impl Iterator<Item = Clause<'db>>> {
self.0
.as_ref()
.map(|it| EarlyBinder::bind(it.iter().copied()).iter_instantiated(interner, args))
}
#[inline]
pub fn instantiate_identity(&self) -> Option<impl Iterator<Item = Clause<'db>>> {
self.0.as_ref().map(|it| it.iter().copied())
/// Resolve the where clause(s) of an item with generics.
///
/// Diagnostics are computed only for this item's predicates, not for parents.
#[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
pub fn query_with_diagnostics(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> (GenericPredicates<'db>, Diagnostics) {
generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true)
}
}
impl<'db> ops::Deref for GenericPredicates<'db> {
type Target = [Clause<'db>];
impl<'db> GenericPredicates<'db> {
#[inline]
pub fn query(db: &'db dyn HirDatabase, def: GenericDefId) -> &'db GenericPredicates<'db> {
&Self::query_with_diagnostics(db, def).0
}
fn deref(&self) -> &Self::Target {
self.0.as_deref().unwrap_or(&[])
#[inline]
pub fn query_all(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
Self::query(db, def).all_predicates()
}
#[inline]
pub fn query_own(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
Self::query(db, def).own_predicates()
}
#[inline]
pub fn query_explicit(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> EarlyBinder<'db, &'db [Clause<'db>]> {
Self::query(db, def).explicit_predicates()
}
#[inline]
pub fn all_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> {
self.predicates.as_ref().map_bound(|it| &**it)
}
#[inline]
pub fn own_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> {
self.predicates.as_ref().map_bound(|it| &it[self.own_predicates_start as usize..])
}
/// Returns the predicates, minus the implicit `Self: Trait` predicate for a trait.
#[inline]
pub fn explicit_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> {
self.predicates.as_ref().map_bound(|it| {
&it[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)]
})
}
}
@ -1591,37 +1723,12 @@ pub(crate) fn trait_environment_query<'db>(
TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env)
}
#[derive(Copy, Clone, Debug)]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum PredicateFilter {
SelfTrait,
All,
}
/// Resolve the where clause(s) of an item with generics.
#[tracing::instrument(skip(db))]
pub(crate) fn generic_predicates_query<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> GenericPredicates<'db> {
generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true).0
}
pub(crate) fn generic_predicates_without_parent_query<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> GenericPredicates<'db> {
generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def).0
}
/// Resolve the where clause(s) of an item with generics,
/// except the ones inherited from the parent
pub(crate) fn generic_predicates_without_parent_with_diagnostics_query<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> (GenericPredicates<'db>, Diagnostics) {
generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def)
}
/// Resolve the where clause(s) of an item with generics,
/// with a given filter
#[tracing::instrument(skip(db, filter), ret)]
@ -1644,15 +1751,35 @@ where
def,
LifetimeElisionKind::AnonymousReportError,
);
let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
let mut predicates = Vec::new();
for maybe_parent_generics in
let all_generics =
std::iter::successors(Some(&generics), |generics| generics.parent_generics())
{
ctx.store = maybe_parent_generics.store();
for pred in maybe_parent_generics.where_predicates() {
tracing::debug!(?pred);
if filter(maybe_parent_generics.def()) {
.collect::<ArrayVec<_, 2>>();
let mut is_trait = false;
let mut parent_is_trait = false;
if all_generics.len() > 1 {
add_implicit_trait_predicate(
interner,
all_generics.last().unwrap().def(),
predicate_filter,
&mut predicates,
&mut parent_is_trait,
);
}
// We need to lower parent predicates first - see the comment below lowering of implicit `Sized` predicates
// for why.
let mut own_predicates_start = 0;
for &maybe_parent_generics in all_generics.iter().rev() {
let current_def_predicates_start = predicates.len();
// Collect only diagnostics from the child, not including parents.
ctx.diagnostics.clear();
if filter(maybe_parent_generics.def()) {
ctx.store = maybe_parent_generics.store();
for pred in maybe_parent_generics.where_predicates() {
tracing::debug!(?pred);
predicates.extend(ctx.lower_where_predicate(
pred,
false,
@ -1660,66 +1787,110 @@ where
predicate_filter,
));
}
if let Some(sized_trait) = sized_trait {
let mut add_sized_clause = |param_idx, param_id, param_data| {
let (
GenericParamId::TypeParamId(param_id),
GenericParamDataRef::TypeParamData(param_data),
) = (param_id, param_data)
else {
return;
};
if param_data.provenance == TypeParamProvenance::TraitSelf {
return;
}
let param_ty = Ty::new_param(interner, param_id, param_idx);
if ctx.unsized_types.contains(&param_ty) {
return;
}
let trait_ref = TraitRef::new_from_args(
interner,
sized_trait.into(),
GenericArgs::new_from_iter(interner, [param_ty.into()]),
);
let clause = Clause(Predicate::new(
interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::Trait(TraitPredicate {
trait_ref,
polarity: rustc_type_ir::PredicatePolarity::Positive,
}),
)),
));
predicates.push(clause);
};
if generics.parent_generics().is_some_and(|parent| filter(parent.def())) {
generics.iter_parent().enumerate().for_each(
|(param_idx, (param_id, param_data))| {
add_sized_clause(param_idx as u32, param_id, param_data);
},
);
}
if filter(def) {
let parent_params_len = generics.len_parent();
generics.iter_self().enumerate().for_each(
|(param_idx, (param_id, param_data))| {
add_sized_clause(
(param_idx + parent_params_len) as u32,
param_id,
param_data,
);
},
);
}
}
// We do not clear `ctx.unsized_types`, as the `?Sized` clause of a child (e.g. an associated type) can
// be declared on the parent (e.g. the trait). It is nevertheless fine to register the implicit `Sized`
// predicates before lowering the child, as a child cannot define a `?Sized` predicate for its parent.
// But we do have to lower the parent first.
}
if maybe_parent_generics.def() == def {
own_predicates_start = current_def_predicates_start as u32;
}
}
let explicitly_unsized_tys = ctx.unsized_types;
add_implicit_trait_predicate(interner, def, predicate_filter, &mut predicates, &mut is_trait);
let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
if let Some(sized_trait) = sized_trait {
let mut add_sized_clause = |param_idx, param_id, param_data| {
let (
GenericParamId::TypeParamId(param_id),
GenericParamDataRef::TypeParamData(param_data),
) = (param_id, param_data)
else {
return;
};
let diagnostics = create_diagnostics(ctx.diagnostics);
let predicates = GenericPredicates {
own_predicates_start,
is_trait,
parent_is_trait,
predicates: EarlyBinder::bind(predicates.into_boxed_slice()),
};
return (predicates, diagnostics);
if param_data.provenance == TypeParamProvenance::TraitSelf {
return;
}
let param_ty = Ty::new_param(interner, param_id, param_idx);
if explicitly_unsized_tys.contains(&param_ty) {
return;
}
let trait_ref = TraitRef::new_from_args(
interner,
sized_trait.into(),
GenericArgs::new_from_iter(interner, [param_ty.into()]),
);
let clause = Clause(Predicate::new(
interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::Trait(TraitPredicate {
trait_ref,
polarity: rustc_type_ir::PredicatePolarity::Positive,
}),
)),
));
predicates.push(clause);
};
if generics.parent_generics().is_some_and(|parent| filter(parent.def())) {
generics.iter_parent().enumerate().for_each(|(param_idx, (param_id, param_data))| {
add_sized_clause(param_idx as u32, param_id, param_data);
});
}
if filter(def) {
let parent_params_len = generics.len_parent();
generics.iter_self().enumerate().for_each(|(param_idx, (param_id, param_data))| {
add_sized_clause((param_idx + parent_params_len) as u32, param_id, param_data);
});
fn add_implicit_trait_predicate<'db>(
interner: DbInterner<'db>,
def: GenericDefId,
predicate_filter: PredicateFilter,
predicates: &mut Vec<Clause<'db>>,
set_is_trait: &mut bool,
) {
// For traits, add `Self: Trait` predicate. This is
// not part of the predicates that a user writes, but it
// is something that one must prove in order to invoke a
// method or project an associated type.
//
// In the chalk setup, this predicate is not part of the
// "predicates" for a trait item. But it is useful in
// rustc because if you directly (e.g.) invoke a trait
// method like `Trait::method(...)`, you must naturally
// prove that the trait applies to the types that were
// used, and adding the predicate into this list ensures
// that this is done.
if let GenericDefId::TraitId(def_id) = def
&& predicate_filter == PredicateFilter::All
{
*set_is_trait = true;
predicates.push(TraitRef::identity(interner, def_id.into()).upcast(interner));
}
}
// FIXME: rustc gathers more predicates by recursing through resulting trait predicates.
// See https://github.com/rust-lang/rust/blob/76c5ed2847cdb26ef2822a3a165d710f6b772217/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L689-L715
(
GenericPredicates(predicates.is_empty().not().then(|| predicates.into())),
create_diagnostics(ctx.diagnostics),
)
}
/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
@ -2112,7 +2283,8 @@ fn named_associated_type_shorthand_candidates<'db, R>(
|pred| pred != def && pred == GenericDefId::TraitId(trait_ref.def_id.0),
)
.0
.deref()
.predicates
.instantiate_identity()
{
tracing::debug!(?pred);
let sup_trait_ref = match pred.kind().skip_binder() {
@ -2158,10 +2330,11 @@ fn named_associated_type_shorthand_candidates<'db, R>(
}
let predicates =
db.generic_predicates_for_param(def, param_id.into(), assoc_name.clone());
generic_predicates_for_param(db, def, param_id.into(), assoc_name.clone());
predicates
.iter()
.find_map(|pred| match (*pred).kind().skip_binder() {
.as_ref()
.iter_identity_copied()
.find_map(|pred| match pred.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate),
_ => None,
})

View file

@ -774,7 +774,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
}
}
fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db> {
fn parent_arg(&mut self, _param_idx: u32, param_id: GenericParamId) -> GenericArg<'db> {
match param_id {
GenericParamId::TypeParamId(_) => {
Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into()
@ -992,7 +992,7 @@ pub(crate) trait GenericArgsLowerer<'db> {
preceding_args: &[GenericArg<'db>],
) -> GenericArg<'db>;
fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db>;
fn parent_arg(&mut self, param_idx: u32, param_id: GenericParamId) -> GenericArg<'db>;
}
/// Returns true if there was an error.
@ -1129,7 +1129,9 @@ pub(crate) fn substs_from_args_and_bindings<'db>(
let mut substs = Vec::with_capacity(def_generics.len());
substs.extend(def_generics.iter_parent_id().map(|id| ctx.parent_arg(id)));
substs.extend(
def_generics.iter_parent_id().enumerate().map(|(idx, id)| ctx.parent_arg(idx as u32, id)),
);
let mut args = args_slice.iter().enumerate().peekable();
let mut params = def_generics.iter_self().peekable();

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,616 @@
//! Confirmation step of method selection, meaning ensuring the selected candidate
//! is valid and registering all obligations.
use hir_def::{
FunctionId, GenericDefId, GenericParamId, ItemContainerId, TraitId,
expr_store::path::{GenericArg as HirGenericArg, GenericArgs as HirGenericArgs},
hir::{ExprId, generics::GenericParamDataRef},
lang_item::LangItem,
};
use rustc_type_ir::{
TypeFoldable,
elaborate::elaborate,
inherent::{BoundExistentialPredicates, IntoKind, SliceLike, Ty as _},
};
use tracing::debug;
use crate::{
Adjust, Adjustment, AutoBorrow, IncorrectGenericsLenKind, InferenceDiagnostic,
LifetimeElisionKind, PointerCast,
db::HirDatabase,
infer::{AllowTwoPhase, AutoBorrowMutability, InferenceContext, TypeMismatch},
lower::{
GenericPredicates,
path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings},
},
method_resolution::{CandidateId, MethodCallee, probe},
next_solver::{
Binder, Clause, ClauseKind, Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, FnSig,
GenericArg, GenericArgs, ParamConst, PolyExistentialTraitRef, PolyTraitRef, Region,
TraitRef, Ty, TyKind,
infer::{
BoundRegionConversionTime, InferCtxt,
traits::{ObligationCause, PredicateObligation},
},
util::{clauses_as_obligations, upcast_choices},
},
};
struct ConfirmContext<'a, 'b, 'db> {
ctx: &'a mut InferenceContext<'b, 'db>,
candidate: FunctionId,
expr: ExprId,
}
#[derive(Debug)]
pub(crate) struct ConfirmResult<'db> {
pub(crate) callee: MethodCallee<'db>,
pub(crate) illegal_sized_bound: bool,
pub(crate) adjustments: Box<[Adjustment<'db>]>,
}
impl<'a, 'db> InferenceContext<'a, 'db> {
pub(crate) fn confirm_method(
&mut self,
pick: &probe::Pick<'db>,
unadjusted_self_ty: Ty<'db>,
expr: ExprId,
generic_args: Option<&HirGenericArgs>,
) -> ConfirmResult<'db> {
debug!(
"confirm(unadjusted_self_ty={:?}, pick={:?}, generic_args={:?})",
unadjusted_self_ty, pick, generic_args,
);
let CandidateId::FunctionId(candidate) = pick.item else {
panic!("confirmation is only done for method calls, not path lookups");
};
let mut confirm_cx = ConfirmContext::new(self, candidate, expr);
confirm_cx.confirm(unadjusted_self_ty, pick, generic_args)
}
}
impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
fn new(
ctx: &'a mut InferenceContext<'b, 'db>,
candidate: FunctionId,
expr: ExprId,
) -> ConfirmContext<'a, 'b, 'db> {
ConfirmContext { ctx, candidate, expr }
}
#[inline]
fn db(&self) -> &'db dyn HirDatabase {
self.ctx.table.infer_ctxt.interner.db
}
#[inline]
fn interner(&self) -> DbInterner<'db> {
self.ctx.table.infer_ctxt.interner
}
#[inline]
fn infcx(&self) -> &InferCtxt<'db> {
&self.ctx.table.infer_ctxt
}
fn confirm(
&mut self,
unadjusted_self_ty: Ty<'db>,
pick: &probe::Pick<'db>,
generic_args: Option<&HirGenericArgs>,
) -> ConfirmResult<'db> {
// Adjust the self expression the user provided and obtain the adjusted type.
let (self_ty, adjustments) = self.adjust_self_ty(unadjusted_self_ty, pick);
// Create generic args for the method's type parameters.
let rcvr_args = self.fresh_receiver_args(self_ty, pick);
let all_args = self.instantiate_method_args(generic_args, rcvr_args);
debug!("rcvr_args={rcvr_args:?}, all_args={all_args:?}");
// Create the final signature for the method, replacing late-bound regions.
let (method_sig, method_predicates) =
self.instantiate_method_sig(pick, all_args.as_slice());
// If there is a `Self: Sized` bound and `Self` is a trait object, it is possible that
// something which derefs to `Self` actually implements the trait and the caller
// wanted to make a static dispatch on it but forgot to import the trait.
// See test `tests/ui/issues/issue-35976.rs`.
//
// In that case, we'll error anyway, but we'll also re-run the search with all traits
// in scope, and if we find another method which can be used, we'll output an
// appropriate hint suggesting to import the trait.
let filler_args = GenericArgs::fill_rest(
self.interner(),
self.candidate.into(),
rcvr_args,
|index, id, _| match id {
GenericParamId::TypeParamId(id) => Ty::new_param(self.interner(), id, index).into(),
GenericParamId::ConstParamId(id) => {
Const::new_param(self.interner(), ParamConst { id, index }).into()
}
GenericParamId::LifetimeParamId(id) => {
Region::new_early_param(self.interner(), EarlyParamRegion { id, index }).into()
}
},
);
let illegal_sized_bound = self.predicates_require_illegal_sized_bound(
GenericPredicates::query_all(self.db(), self.candidate.into())
.iter_instantiated_copied(self.interner(), filler_args.as_slice()),
);
// Unify the (adjusted) self type with what the method expects.
//
// SUBTLE: if we want good error messages, because of "guessing" while matching
// traits, no trait system method can be called before this point because they
// could alter our Self-type, except for normalizing the receiver from the
// signature (which is also done during probing).
let method_sig_rcvr = method_sig.inputs().as_slice()[0];
debug!(
"confirm: self_ty={:?} method_sig_rcvr={:?} method_sig={:?}",
self_ty, method_sig_rcvr, method_sig
);
self.unify_receivers(self_ty, method_sig_rcvr, pick);
// Make sure nobody calls `drop()` explicitly.
self.check_for_illegal_method_calls();
// Lint when an item is shadowing a supertrait item.
self.lint_shadowed_supertrait_items(pick);
// Add any trait/regions obligations specified on the method's type parameters.
// We won't add these if we encountered an illegal sized bound, so that we can use
// a custom error in that case.
if !illegal_sized_bound {
self.add_obligations(method_sig, all_args, method_predicates);
}
// Create the final `MethodCallee`.
let callee = MethodCallee { def_id: self.candidate, args: all_args, sig: method_sig };
ConfirmResult { callee, illegal_sized_bound, adjustments }
}
///////////////////////////////////////////////////////////////////////////
// ADJUSTMENTS
fn adjust_self_ty(
&mut self,
unadjusted_self_ty: Ty<'db>,
pick: &probe::Pick<'db>,
) -> (Ty<'db>, Box<[Adjustment<'db>]>) {
// Commit the autoderefs by calling `autoderef` again, but this
// time writing the results into the various typeck results.
let mut autoderef = self.ctx.table.autoderef_with_tracking(unadjusted_self_ty);
let Some((mut target, n)) = autoderef.nth(pick.autoderefs) else {
return (Ty::new_error(self.interner(), ErrorGuaranteed), Box::new([]));
};
assert_eq!(n, pick.autoderefs);
let mut adjustments =
self.ctx.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok());
match pick.autoref_or_ptr_adjustment {
Some(probe::AutorefOrPtrAdjustment::Autoref { mutbl, unsize }) => {
let region = self.infcx().next_region_var();
// Type we're wrapping in a reference, used later for unsizing
let base_ty = target;
target = Ty::new_ref(self.interner(), region, target, mutbl);
// Method call receivers are the primary use case
// for two-phase borrows.
let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
adjustments
.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target });
if unsize {
let unsized_ty = if let TyKind::Array(elem_ty, _) = base_ty.kind() {
Ty::new_slice(self.interner(), elem_ty)
} else {
panic!(
"AutorefOrPtrAdjustment's unsize flag should only be set for array ty, found {:?}",
base_ty
)
};
target = Ty::new_ref(self.interner(), region, unsized_ty, mutbl.into());
adjustments
.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target });
}
}
Some(probe::AutorefOrPtrAdjustment::ToConstPtr) => {
target = match target.kind() {
TyKind::RawPtr(ty, mutbl) => {
assert!(mutbl.is_mut());
Ty::new_imm_ptr(self.interner(), ty)
}
other => panic!("Cannot adjust receiver type {other:?} to const ptr"),
};
adjustments.push(Adjustment {
kind: Adjust::Pointer(PointerCast::MutToConstPointer),
target,
});
}
None => {}
}
(target, adjustments.into_boxed_slice())
}
/// Returns a set of generic parameters for the method *receiver* where all type and region
/// parameters are instantiated with fresh variables. This generic parameters does not include any
/// parameters declared on the method itself.
///
/// Note that this generic parameters may include late-bound regions from the impl level. If so,
/// these are instantiated later in the `instantiate_method_sig` routine.
fn fresh_receiver_args(
&mut self,
self_ty: Ty<'db>,
pick: &probe::Pick<'db>,
) -> GenericArgs<'db> {
match pick.kind {
probe::InherentImplPick(impl_def_id) => {
self.infcx().fresh_args_for_item(impl_def_id.into())
}
probe::ObjectPick(trait_def_id) => {
// If the trait is not object safe (specifically, we care about when
// the receiver is not valid), then there's a chance that we will not
// actually be able to recover the object by derefing the receiver like
// we should if it were valid.
if self.db().dyn_compatibility_of_trait(trait_def_id).is_some() {
return GenericArgs::error_for_item(self.interner(), trait_def_id.into());
}
self.extract_existential_trait_ref(self_ty, |this, object_ty, principal| {
// The object data has no entry for the Self
// Type. For the purposes of this method call, we
// instantiate the object type itself. This
// wouldn't be a sound instantiation in all cases,
// since each instance of the object type is a
// different existential and hence could match
// distinct types (e.g., if `Self` appeared as an
// argument type), but those cases have already
// been ruled out when we deemed the trait to be
// "dyn-compatible".
let original_poly_trait_ref =
principal.with_self_ty(this.interner(), object_ty);
let upcast_poly_trait_ref = this.upcast(original_poly_trait_ref, trait_def_id);
let upcast_trait_ref =
this.instantiate_binder_with_fresh_vars(upcast_poly_trait_ref);
debug!(
"original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}",
original_poly_trait_ref, upcast_trait_ref, trait_def_id
);
upcast_trait_ref.args
})
}
probe::TraitPick(trait_def_id) => {
// Make a trait reference `$0 : Trait<$1...$n>`
// consisting entirely of type variables. Later on in
// the process we will unify the transformed-self-type
// of the method with the actual type in order to
// unify some of these variables.
self.infcx().fresh_args_for_item(trait_def_id.into())
}
probe::WhereClausePick(poly_trait_ref) => {
// Where clauses can have bound regions in them. We need to instantiate
// those to convert from a poly-trait-ref to a trait-ref.
self.instantiate_binder_with_fresh_vars(poly_trait_ref).args
}
}
}
fn extract_existential_trait_ref<R, F>(&self, self_ty: Ty<'db>, mut closure: F) -> R
where
F: FnMut(&ConfirmContext<'a, 'b, 'db>, Ty<'db>, PolyExistentialTraitRef<'db>) -> R,
{
// If we specified that this is an object method, then the
// self-type ought to be something that can be dereferenced to
// yield an object-type (e.g., `&Object` or `Box<Object>`
// etc).
let mut autoderef = self.ctx.table.autoderef(self_ty);
// We don't need to gate this behind arbitrary self types
// per se, but it does make things a bit more gated.
if self.ctx.unstable_features.arbitrary_self_types
|| self.ctx.unstable_features.arbitrary_self_types_pointers
{
autoderef = autoderef.use_receiver_trait();
}
autoderef
.include_raw_pointers()
.find_map(|(ty, _)| match ty.kind() {
TyKind::Dynamic(data, ..) => Some(closure(
self,
ty,
data.principal().expect("calling trait method on empty object?"),
)),
_ => None,
})
.unwrap_or_else(|| {
panic!("self-type `{:?}` for ObjectPick never dereferenced to an object", self_ty)
})
}
fn instantiate_method_args(
&mut self,
generic_args: Option<&HirGenericArgs>,
parent_args: GenericArgs<'db>,
) -> GenericArgs<'db> {
struct LowererCtx<'a, 'b, 'db> {
ctx: &'a mut InferenceContext<'b, 'db>,
expr: ExprId,
parent_args: &'a [GenericArg<'db>],
}
impl<'db> GenericArgsLowerer<'db> for LowererCtx<'_, '_, 'db> {
fn report_len_mismatch(
&mut self,
def: GenericDefId,
provided_count: u32,
expected_count: u32,
kind: IncorrectGenericsLenKind,
) {
self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsLen {
expr: self.expr,
provided_count,
expected_count,
kind,
def,
});
}
fn report_arg_mismatch(
&mut self,
param_id: GenericParamId,
arg_idx: u32,
has_self_arg: bool,
) {
self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsOrder {
expr: self.expr,
param_id,
arg_idx,
has_self_arg,
});
}
fn provided_kind(
&mut self,
param_id: GenericParamId,
param: GenericParamDataRef<'_>,
arg: &HirGenericArg,
) -> GenericArg<'db> {
match (param, arg) {
(
GenericParamDataRef::LifetimeParamData(_),
HirGenericArg::Lifetime(lifetime),
) => self.ctx.make_body_lifetime(*lifetime).into(),
(GenericParamDataRef::TypeParamData(_), HirGenericArg::Type(type_ref)) => {
self.ctx.make_body_ty(*type_ref).into()
}
(GenericParamDataRef::ConstParamData(_), HirGenericArg::Const(konst)) => {
let GenericParamId::ConstParamId(const_id) = param_id else {
unreachable!("non-const param ID for const param");
};
let const_ty = self.ctx.db.const_param_ty_ns(const_id);
self.ctx.make_body_const(*konst, const_ty).into()
}
_ => unreachable!("unmatching param kinds were passed to `provided_kind()`"),
}
}
fn provided_type_like_const(
&mut self,
const_ty: Ty<'db>,
arg: TypeLikeConst<'_>,
) -> Const<'db> {
match arg {
TypeLikeConst::Path(path) => self.ctx.make_path_as_body_const(path, const_ty),
TypeLikeConst::Infer => self.ctx.table.next_const_var(),
}
}
fn inferred_kind(
&mut self,
_def: GenericDefId,
param_id: GenericParamId,
_param: GenericParamDataRef<'_>,
_infer_args: bool,
_preceding_args: &[GenericArg<'db>],
) -> GenericArg<'db> {
// Always create an inference var, even when `infer_args == false`. This helps with diagnostics,
// and I think it's also required in the presence of `impl Trait` (that must be inferred).
self.ctx.table.next_var_for_param(param_id)
}
fn parent_arg(&mut self, param_idx: u32, _param_id: GenericParamId) -> GenericArg<'db> {
self.parent_args[param_idx as usize]
}
fn report_elided_lifetimes_in_path(
&mut self,
_def: GenericDefId,
_expected_count: u32,
_hard_error: bool,
) {
unreachable!("we set `LifetimeElisionKind::Infer`")
}
fn report_elision_failure(&mut self, _def: GenericDefId, _expected_count: u32) {
unreachable!("we set `LifetimeElisionKind::Infer`")
}
fn report_missing_lifetime(&mut self, _def: GenericDefId, _expected_count: u32) {
unreachable!("we set `LifetimeElisionKind::Infer`")
}
}
substs_from_args_and_bindings(
self.db(),
self.ctx.body,
generic_args,
self.candidate.into(),
true,
LifetimeElisionKind::Infer,
false,
None,
&mut LowererCtx { ctx: self.ctx, expr: self.expr, parent_args: parent_args.as_slice() },
)
}
fn unify_receivers(
&mut self,
self_ty: Ty<'db>,
method_self_ty: Ty<'db>,
pick: &probe::Pick<'db>,
) {
debug!(
"unify_receivers: self_ty={:?} method_self_ty={:?} pick={:?}",
self_ty, method_self_ty, pick
);
let cause = ObligationCause::new();
match self.ctx.table.at(&cause).sup(method_self_ty, self_ty) {
Ok(infer_ok) => {
self.ctx.table.register_infer_ok(infer_ok);
}
Err(_) => {
if self.ctx.unstable_features.arbitrary_self_types {
self.ctx.result.type_mismatches.insert(
self.expr.into(),
TypeMismatch { expected: method_self_ty, actual: self_ty },
);
}
}
}
}
// NOTE: this returns the *unnormalized* predicates and method sig. Because of
// inference guessing, the predicates and method signature can't be normalized
// until we unify the `Self` type.
fn instantiate_method_sig<'c>(
&mut self,
pick: &probe::Pick<'db>,
all_args: &'c [GenericArg<'db>],
) -> (FnSig<'db>, impl Iterator<Item = PredicateObligation<'db>> + use<'c, 'db>) {
debug!("instantiate_method_sig(pick={:?}, all_args={:?})", pick, all_args);
// Instantiate the bounds on the method with the
// type/early-bound-regions instantiations performed. There can
// be no late-bound regions appearing here.
let def_id = self.candidate;
let method_predicates = clauses_as_obligations(
GenericPredicates::query_all(self.db(), def_id.into())
.iter_instantiated_copied(self.interner(), all_args),
ObligationCause::new(),
self.ctx.table.trait_env.env,
);
let sig =
self.db().callable_item_signature(def_id.into()).instantiate(self.interner(), all_args);
debug!("type scheme instantiated, sig={:?}", sig);
let sig = self.instantiate_binder_with_fresh_vars(sig);
debug!("late-bound lifetimes from method instantiated, sig={:?}", sig);
(sig, method_predicates)
}
fn add_obligations(
&mut self,
sig: FnSig<'db>,
all_args: GenericArgs<'db>,
method_predicates: impl Iterator<Item = PredicateObligation<'db>>,
) {
debug!("add_obligations: sig={:?} all_args={:?}", sig, all_args);
self.ctx.table.register_predicates(method_predicates);
// this is a projection from a trait reference, so we have to
// make sure that the trait reference inputs are well-formed.
self.ctx.table.add_wf_bounds(all_args);
// the function type must also be well-formed (this is not
// implied by the args being well-formed because of inherent
// impls and late-bound regions - see issue #28609).
for ty in sig.inputs_and_output {
self.ctx.table.register_wf_obligation(ty.into(), ObligationCause::new());
}
}
///////////////////////////////////////////////////////////////////////////
// MISCELLANY
fn predicates_require_illegal_sized_bound(
&self,
predicates: impl Iterator<Item = Clause<'db>>,
) -> bool {
let Some(sized_def_id) =
LangItem::Sized.resolve_trait(self.db(), self.ctx.resolver.krate())
else {
return false;
};
elaborate(self.interner(), predicates)
// We don't care about regions here.
.filter_map(|pred| match pred.kind().skip_binder() {
ClauseKind::Trait(trait_pred) if trait_pred.def_id().0 == sized_def_id => {
Some(trait_pred)
}
_ => None,
})
.any(|trait_pred| matches!(trait_pred.self_ty().kind(), TyKind::Dynamic(..)))
}
fn check_for_illegal_method_calls(&self) {
// Disallow calls to the method `drop` defined in the `Drop` trait.
if let ItemContainerId::TraitId(trait_def_id) = self.candidate.loc(self.db()).container
&& LangItem::Drop
.resolve_trait(self.db(), self.ctx.resolver.krate())
.is_some_and(|drop_trait| drop_trait == trait_def_id)
{
// FIXME: Report an error.
}
}
#[expect(clippy::needless_return)]
fn lint_shadowed_supertrait_items(&self, pick: &probe::Pick<'_>) {
if pick.shadowed_candidates.is_empty() {
return;
}
// FIXME: Emit the lint.
}
fn upcast(
&self,
source_trait_ref: PolyTraitRef<'db>,
target_trait_def_id: TraitId,
) -> PolyTraitRef<'db> {
let upcast_trait_refs =
upcast_choices(self.interner(), source_trait_ref, target_trait_def_id);
// must be exactly one trait ref or we'd get an ambig error etc
if let &[upcast_trait_ref] = upcast_trait_refs.as_slice() {
upcast_trait_ref
} else {
Binder::dummy(TraitRef::new_from_args(
self.interner(),
target_trait_def_id.into(),
GenericArgs::error_for_item(self.interner(), target_trait_def_id.into()),
))
}
}
fn instantiate_binder_with_fresh_vars<T>(&self, value: Binder<'db, T>) -> T
where
T: TypeFoldable<DbInterner<'db>> + Copy,
{
self.infcx().instantiate_binder_with_fresh_vars(BoundRegionConversionTime::FnCall, value)
}
}

File diff suppressed because it is too large Load diff

View file

@ -12,7 +12,7 @@ use hir_def::{
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_ast_ir::Mutability;
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, SliceLike, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{impl_from, never};
@ -22,7 +22,6 @@ use crate::{
db::{HirDatabase, InternedClosureId},
display::{DisplayTarget, HirDisplay},
infer::PointerCast,
lang_items::is_box,
next_solver::{
Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
@ -185,7 +184,7 @@ impl<V, T> ProjectionElem<V, T> {
match self {
ProjectionElem::Deref => match base.kind() {
TyKind::RawPtr(inner, _) | TyKind::Ref(_, inner, _) => inner,
TyKind::Adt(adt_def, subst) if is_box(db, adt_def.def_id().0) => subst.type_at(0),
TyKind::Adt(adt_def, subst) if adt_def.is_box() => subst.type_at(0),
_ => {
never!(
"Overloaded deref on type {} is not a projection",

View file

@ -544,7 +544,7 @@ fn main() {
fn for_loop() {
check_pass(
r#"
//- minicore: iterator, add
//- minicore: iterator, add, builtin_impls
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
@ -706,7 +706,7 @@ fn main() {
fn closure_state() {
check_pass(
r#"
//- minicore: fn, add, copy
//- minicore: fn, add, copy, builtin_impls
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}

View file

@ -33,6 +33,7 @@ use crate::{
infer::{CaptureKind, CapturedItem, TypeMismatch, cast::CastTy},
inhabitedness::is_ty_uninhabited_from,
layout::LayoutError,
method_resolution::CandidateId,
mir::{
AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, BorrowKind, CastKind, Either, Expr,
FieldId, GenericArgs, Idx, InferenceResult, Local, LocalId, MemoryMap, MirBody, MirSpan,
@ -388,15 +389,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
);
Ok(Some(current))
}
Adjust::Borrow(AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) => {
let Some((p, current)) =
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
else {
return Ok(None);
};
let bk = BorrowKind::from_rustc(*m);
self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
Ok(Some(current))
Adjust::Borrow(AutoBorrow::Ref(m)) => self.lower_expr_to_place_with_borrow_adjust(
expr_id,
place,
current,
rest,
(*m).into(),
),
Adjust::Borrow(AutoBorrow::RawPtr(m)) => {
self.lower_expr_to_place_with_borrow_adjust(expr_id, place, current, rest, *m)
}
Adjust::Pointer(cast) => {
let Some((p, current)) =
@ -421,6 +422,24 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
}
fn lower_expr_to_place_with_borrow_adjust(
&mut self,
expr_id: ExprId,
place: Place<'db>,
current: BasicBlockId<'db>,
rest: &[Adjustment<'db>],
m: Mutability,
) -> Result<'db, Option<BasicBlockId<'db>>> {
let Some((p, current)) =
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
else {
return Ok(None);
};
let bk = BorrowKind::from_rustc(m);
self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
Ok(Some(current))
}
fn lower_expr_to_place(
&mut self,
expr_id: ExprId,
@ -460,18 +479,14 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
let pr =
if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) {
match assoc {
hir_def::AssocItemId::ConstId(c) => {
CandidateId::ConstId(c) => {
self.lower_const(c.into(), current, place, subst, expr_id.into())?;
return Ok(Some(current));
}
hir_def::AssocItemId::FunctionId(_) => {
CandidateId::FunctionId(_) => {
// FnDefs are zero sized, no action is needed.
return Ok(Some(current));
}
hir_def::AssocItemId::TypeAliasId(_) => {
// FIXME: If it is unreachable, use proper error instead of `not_supported`.
not_supported!("associated functions and types")
}
}
} else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
match variant {

View file

@ -193,7 +193,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
return self.lower_overloaded_deref(
current,
p,
self.expr_ty_after_adjustments(*expr),
self.expr_ty_without_adjust(*expr),
self.expr_ty_without_adjust(expr_id),
expr_id.into(),
'b: {

View file

@ -1,9 +1,8 @@
//! MIR lowering for patterns
use hir_def::{AssocItemId, hir::ExprId, signatures::VariantFields};
use hir_def::{hir::ExprId, signatures::VariantFields};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
use crate::next_solver::GenericArgs;
use crate::{
BindingMode,
mir::{
@ -16,6 +15,7 @@ use crate::{
},
},
};
use crate::{method_resolution::CandidateId, next_solver::GenericArgs};
macro_rules! not_supported {
($x: expr) => {
@ -393,7 +393,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
}
let (c, subst) = 'b: {
if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern)
&& let AssocItemId::ConstId(c) = x.0
&& let CandidateId::ConstId(c) = x.0
{
break 'b (c, x.1);
}

View file

@ -1,8 +1,9 @@
//! Definition of `SolverDefId`
use hir_def::{
AdtId, CallableDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
GeneralConstId, GenericDefId, ImplId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
AdtId, AttrDefId, CallableDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
GeneralConstId, GenericDefId, HasModule, ImplId, ModuleId, StaticId, StructId, TraitId,
TypeAliasId, UnionId, db::DefDatabase,
};
use rustc_type_ir::inherent;
use stdx::impl_from;
@ -154,6 +155,28 @@ impl From<DefWithBodyId> for SolverDefId {
}
}
impl TryFrom<SolverDefId> for AttrDefId {
type Error = ();
#[inline]
fn try_from(value: SolverDefId) -> Result<Self, Self::Error> {
match value {
SolverDefId::AdtId(it) => Ok(it.into()),
SolverDefId::ConstId(it) => Ok(it.into()),
SolverDefId::FunctionId(it) => Ok(it.into()),
SolverDefId::ImplId(it) => Ok(it.into()),
SolverDefId::StaticId(it) => Ok(it.into()),
SolverDefId::TraitId(it) => Ok(it.into()),
SolverDefId::TypeAliasId(it) => Ok(it.into()),
SolverDefId::EnumVariantId(it) => Ok(it.into()),
SolverDefId::Ctor(Ctor::Struct(it)) => Ok(it.into()),
SolverDefId::Ctor(Ctor::Enum(it)) => Ok(it.into()),
SolverDefId::InternedClosureId(_)
| SolverDefId::InternedCoroutineId(_)
| SolverDefId::InternedOpaqueTyId(_) => Err(()),
}
}
}
impl TryFrom<SolverDefId> for DefWithBodyId {
type Error = ();
@ -218,6 +241,28 @@ impl SolverDefId {
}
}
impl HasModule for SolverDefId {
fn module(&self, db: &dyn DefDatabase) -> ModuleId {
match *self {
SolverDefId::AdtId(id) => id.module(db),
SolverDefId::ConstId(id) => id.module(db),
SolverDefId::FunctionId(id) => id.module(db),
SolverDefId::ImplId(id) => id.module(db),
SolverDefId::StaticId(id) => id.module(db),
SolverDefId::TraitId(id) => id.module(db),
SolverDefId::TypeAliasId(id) => id.module(db),
SolverDefId::InternedClosureId(id) => id.loc(db).0.module(db),
SolverDefId::InternedCoroutineId(id) => id.loc(db).0.module(db),
SolverDefId::InternedOpaqueTyId(id) => match id.loc(db) {
crate::ImplTraitId::ReturnTypeImplTrait(owner, _) => owner.module(db),
crate::ImplTraitId::TypeAliasImplTrait(owner, _) => owner.module(db),
},
SolverDefId::Ctor(Ctor::Enum(id)) | SolverDefId::EnumVariantId(id) => id.module(db),
SolverDefId::Ctor(Ctor::Struct(id)) => id.module(db),
}
}
}
impl<'db> inherent::DefId<DbInterner<'db>> for SolverDefId {
fn as_local(self) -> Option<SolverDefId> {
Some(self)

View file

@ -5,7 +5,7 @@ use rustc_type_ir::{
TypeVisitableExt, inherent::IntoKind,
};
use crate::next_solver::BoundConst;
use crate::next_solver::{BoundConst, FxIndexMap};
use super::{
Binder, BoundRegion, BoundTy, Const, ConstKind, DbInterner, Predicate, Region, Ty, TyKind,
@ -158,3 +158,65 @@ pub fn fold_tys<'db, T: TypeFoldable<DbInterner<'db>>>(
t.fold_with(&mut Folder { interner, callback })
}
impl<'db> DbInterner<'db> {
/// Replaces all regions bound by the given `Binder` with the
/// results returned by the closure; the closure is expected to
/// return a free region (relative to this binder), and hence the
/// binder is removed in the return type. The closure is invoked
/// once for each unique `BoundRegionKind`; multiple references to the
/// same `BoundRegionKind` will reuse the previous result. A map is
/// returned at the end with each bound region and the free region
/// that replaced it.
///
/// # Panics
///
/// This method only replaces late bound regions. Any types or
/// constants bound by `value` will cause an ICE.
pub fn instantiate_bound_regions<T, F>(
self,
value: Binder<'db, T>,
mut fld_r: F,
) -> (T, FxIndexMap<BoundRegion, Region<'db>>)
where
F: FnMut(BoundRegion) -> Region<'db>,
T: TypeFoldable<DbInterner<'db>>,
{
let mut region_map = FxIndexMap::default();
let real_fld_r = |br: BoundRegion| *region_map.entry(br).or_insert_with(|| fld_r(br));
let value = self.instantiate_bound_regions_uncached(value, real_fld_r);
(value, region_map)
}
pub fn instantiate_bound_regions_uncached<T, F>(
self,
value: Binder<'db, T>,
mut replace_regions: F,
) -> T
where
F: FnMut(BoundRegion) -> Region<'db>,
T: TypeFoldable<DbInterner<'db>>,
{
let value = value.skip_binder();
if !value.has_escaping_bound_vars() {
value
} else {
let delegate = FnMutDelegate {
regions: &mut replace_regions,
types: &mut |b| panic!("unexpected bound ty in binder: {b:?}"),
consts: &mut |b| panic!("unexpected bound ct in binder: {b:?}"),
};
let mut replacer = BoundVarReplacer::new(self, delegate);
value.fold_with(&mut replacer)
}
}
/// Replaces any late-bound regions bound in `value` with `'erased`. Useful in codegen but also
/// method lookup and a few other places where precise region relationships are not required.
pub fn instantiate_bound_regions_with_erased<T>(self, value: Binder<'db, T>) -> T
where
T: TypeFoldable<DbInterner<'db>>,
{
self.instantiate_bound_regions(value, |_| Region::new_erased(self)).0
}
}

View file

@ -10,8 +10,8 @@ use rustc_index::Idx;
use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar};
use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _};
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, CanonicalQueryInput, DebruijnIndex, Flags, InferConst, RegionKind,
TyVid, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags,
TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
};
use smallvec::SmallVec;
use tracing::debug;
@ -19,7 +19,7 @@ use tracing::debug;
use crate::next_solver::infer::InferCtxt;
use crate::next_solver::{
Binder, Canonical, CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, GenericArg,
ParamEnvAnd, Placeholder, Region, Ty, TyKind,
Placeholder, Region, Ty, TyKind,
};
/// When we canonicalize a value to form a query, we wind up replacing
@ -66,33 +66,19 @@ impl<'db> InferCtxt<'db> {
/// [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html#canonicalizing-the-query
pub fn canonicalize_query<V>(
&self,
value: ParamEnvAnd<'db, V>,
value: V,
query_state: &mut OriginalQueryValues<'db>,
) -> CanonicalQueryInput<DbInterner<'db>, ParamEnvAnd<'db, V>>
) -> Canonical<'db, V>
where
V: TypeFoldable<DbInterner<'db>>,
{
let (param_env, value) = value.into_parts();
// FIXME(#118965): We don't canonicalize the static lifetimes that appear in the
// `param_env` because they are treated differently by trait selection.
let canonical_param_env = Canonicalizer::canonicalize(
param_env,
self,
self.interner,
&CanonicalizeFreeRegionsOtherThanStatic,
query_state,
);
let canonical = Canonicalizer::canonicalize_with_base(
canonical_param_env,
Canonicalizer::canonicalize(
value,
self,
self.interner,
&CanonicalizeAllFreeRegions,
query_state,
)
.unchecked_map(|(param_env, value)| ParamEnvAnd { param_env, value });
CanonicalQueryInput { canonical, typing_mode: self.typing_mode() }
}
/// Canonicalizes a query *response* `V`. When we canonicalize a
@ -285,26 +271,6 @@ impl CanonicalizeMode for CanonicalizeAllFreeRegions {
}
}
struct CanonicalizeFreeRegionsOtherThanStatic;
impl CanonicalizeMode for CanonicalizeFreeRegionsOtherThanStatic {
fn canonicalize_free_region<'db>(
&self,
canonicalizer: &mut Canonicalizer<'_, 'db>,
r: Region<'db>,
) -> Region<'db> {
if r.is_static() { r } else { canonicalizer.canonical_var_for_region_in_root_universe(r) }
}
fn any(&self) -> bool {
true
}
fn preserve_universes(&self) -> bool {
false
}
}
struct Canonicalizer<'cx, 'db> {
/// Set to `None` to disable the resolution of inference variables.
infcx: &'cx InferCtxt<'db>,

View file

@ -1,21 +1,31 @@
//! This module contains code to instantiate new values into a
//! `Canonical<'tcx, T>`.
//! `Canonical<'db, T>`.
//!
//! For an overview of what canonicalization is and how it fits into
//! rustc, check out the [chapter in the rustc dev guide][c].
//!
//! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html
use std::{fmt::Debug, iter};
use crate::next_solver::{
BoundConst, BoundRegion, BoundTy, Canonical, CanonicalVarValues, Clauses, Const, ConstKind,
DbInterner, GenericArg, Predicate, Region, RegionKind, Ty, TyKind, fold::FnMutDelegate,
BoundConst, BoundRegion, BoundTy, Canonical, CanonicalVarKind, CanonicalVarValues, Clauses,
Const, ConstKind, DbInterner, GenericArg, ParamEnv, Predicate, Region, RegionKind, Ty, TyKind,
fold::FnMutDelegate,
infer::{
InferCtxt, InferOk, InferResult,
canonical::{QueryRegionConstraints, QueryResponse, canonicalizer::OriginalQueryValues},
traits::{ObligationCause, PredicateObligations},
},
};
use rustc_hash::FxHashMap;
use rustc_index::{Idx as _, IndexVec};
use rustc_type_ir::{
BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable,
TypeVisitableExt,
BoundVar, BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder,
TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
inherent::{GenericArg as _, IntoKind, SliceLike},
};
use tracing::{debug, instrument};
pub trait CanonicalExt<'db, V> {
fn instantiate(&self, tcx: DbInterner<'db>, var_values: &CanonicalVarValues<'db>) -> V
@ -169,3 +179,331 @@ impl<'db, 'a> TypeFolder<DbInterner<'db>> for CanonicalInstantiator<'db, 'a> {
c.super_fold_with(self)
}
}
impl<'db> InferCtxt<'db> {
/// A version of `make_canonicalized_query_response` that does
/// not pack in obligations, for contexts that want to drop
/// pending obligations instead of treating them as an ambiguity (e.g.
/// typeck "probing" contexts).
///
/// If you DO want to keep track of pending obligations (which
/// include all region obligations, so this includes all cases
/// that care about regions) with this function, you have to
/// do it yourself, by e.g., having them be a part of the answer.
pub fn make_query_response_ignoring_pending_obligations<T>(
&self,
inference_vars: CanonicalVarValues<'db>,
answer: T,
) -> Canonical<'db, QueryResponse<'db, T>>
where
T: TypeFoldable<DbInterner<'db>>,
{
// While we ignore region constraints and pending obligations,
// we do return constrained opaque types to avoid unconstrained
// inference variables in the response. This is important as we want
// to check that opaques in deref steps stay unconstrained.
//
// This doesn't handle the more general case for non-opaques as
// ambiguous `Projection` obligations have same the issue.
let opaque_types = self
.inner
.borrow_mut()
.opaque_type_storage
.iter_opaque_types()
.map(|(k, v)| (k, v.ty))
.collect();
self.canonicalize_response(QueryResponse {
var_values: inference_vars,
region_constraints: QueryRegionConstraints::default(),
opaque_types,
value: answer,
})
}
/// Given the (canonicalized) result to a canonical query,
/// instantiates the result so it can be used, plugging in the
/// values from the canonical query. (Note that the result may
/// have been ambiguous; you should check the certainty level of
/// the query before applying this function.)
///
/// To get a good understanding of what is happening here, check
/// out the [chapter in the rustc dev guide][c].
///
/// [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html#processing-the-canonicalized-query-result
pub fn instantiate_query_response_and_region_obligations<R>(
&self,
cause: &ObligationCause,
param_env: ParamEnv<'db>,
original_values: &OriginalQueryValues<'db>,
query_response: &Canonical<'db, QueryResponse<'db, R>>,
) -> InferResult<'db, R>
where
R: TypeFoldable<DbInterner<'db>>,
{
let InferOk { value: result_args, obligations } =
self.query_response_instantiation(cause, param_env, original_values, query_response)?;
for predicate in &query_response.value.region_constraints.outlives {
let predicate = instantiate_value(self.interner, &result_args, *predicate);
self.register_outlives_constraint(predicate);
}
for assumption in &query_response.value.region_constraints.assumptions {
let assumption = instantiate_value(self.interner, &result_args, *assumption);
self.register_region_assumption(assumption);
}
let user_result: R =
query_response
.instantiate_projected(self.interner, &result_args, |q_r| q_r.value.clone());
Ok(InferOk { value: user_result, obligations })
}
/// Given the original values and the (canonicalized) result from
/// computing a query, returns an instantiation that can be applied
/// to the query result to convert the result back into the
/// original namespace.
///
/// The instantiation also comes accompanied with subobligations
/// that arose from unification; these might occur if (for
/// example) we are doing lazy normalization and the value
/// assigned to a type variable is unified with an unnormalized
/// projection.
fn query_response_instantiation<R>(
&self,
cause: &ObligationCause,
param_env: ParamEnv<'db>,
original_values: &OriginalQueryValues<'db>,
query_response: &Canonical<'db, QueryResponse<'db, R>>,
) -> InferResult<'db, CanonicalVarValues<'db>>
where
R: Debug + TypeFoldable<DbInterner<'db>>,
{
debug!(
"query_response_instantiation(original_values={:#?}, query_response={:#?})",
original_values, query_response,
);
let mut value = self.query_response_instantiation_guess(
cause,
param_env,
original_values,
query_response,
)?;
value.obligations.extend(
self.unify_query_response_instantiation_guess(
cause,
param_env,
original_values,
&value.value,
query_response,
)?
.into_obligations(),
);
Ok(value)
}
/// Given the original values and the (canonicalized) result from
/// computing a query, returns a **guess** at an instantiation that
/// can be applied to the query result to convert the result back
/// into the original namespace. This is called a **guess**
/// because it uses a quick heuristic to find the values for each
/// canonical variable; if that quick heuristic fails, then we
/// will instantiate fresh inference variables for each canonical
/// variable instead. Therefore, the result of this method must be
/// properly unified
#[instrument(level = "debug", skip(self, param_env))]
fn query_response_instantiation_guess<R>(
&self,
cause: &ObligationCause,
param_env: ParamEnv<'db>,
original_values: &OriginalQueryValues<'db>,
query_response: &Canonical<'db, QueryResponse<'db, R>>,
) -> InferResult<'db, CanonicalVarValues<'db>>
where
R: Debug + TypeFoldable<DbInterner<'db>>,
{
// For each new universe created in the query result that did
// not appear in the original query, create a local
// superuniverse.
let mut universe_map = original_values.universe_map.clone();
let num_universes_in_query = original_values.universe_map.len();
let num_universes_in_response = query_response.max_universe.as_usize() + 1;
for _ in num_universes_in_query..num_universes_in_response {
universe_map.push(self.create_next_universe());
}
assert!(!universe_map.is_empty()); // always have the root universe
assert_eq!(universe_map[UniverseIndex::ROOT.as_usize()], UniverseIndex::ROOT);
// Every canonical query result includes values for each of
// the inputs to the query. Therefore, we begin by unifying
// these values with the original inputs that were
// canonicalized.
let result_values = &query_response.value.var_values;
assert_eq!(original_values.var_values.len(), result_values.len());
// Quickly try to find initial values for the canonical
// variables in the result in terms of the query. We do this
// by iterating down the values that the query gave to each of
// the canonical inputs. If we find that one of those values
// is directly equal to one of the canonical variables in the
// result, then we can type the corresponding value from the
// input. See the example above.
let mut opt_values: IndexVec<BoundVar, Option<GenericArg<'db>>> =
IndexVec::from_elem_n(None, query_response.variables.len());
for (original_value, result_value) in iter::zip(&original_values.var_values, result_values)
{
match result_value.kind() {
GenericArgKind::Type(result_value) => {
// We disable the instantiation guess for inference variables
// and only use it for placeholders. We need to handle the
// `sub_root` of type inference variables which would make this
// more involved. They are also a lot rarer than region variables.
if let TyKind::Bound(index_kind, b) = result_value.kind()
&& !matches!(
query_response.variables.as_slice()[b.var.as_usize()],
CanonicalVarKind::Ty { .. }
)
{
// We only allow a `Canonical` index in generic parameters.
assert!(matches!(index_kind, BoundVarIndexKind::Canonical));
opt_values[b.var] = Some(*original_value);
}
}
GenericArgKind::Lifetime(result_value) => {
if let RegionKind::ReBound(index_kind, b) = result_value.kind() {
// We only allow a `Canonical` index in generic parameters.
assert!(matches!(index_kind, BoundVarIndexKind::Canonical));
opt_values[b.var] = Some(*original_value);
}
}
GenericArgKind::Const(result_value) => {
if let ConstKind::Bound(index_kind, b) = result_value.kind() {
// We only allow a `Canonical` index in generic parameters.
assert!(matches!(index_kind, BoundVarIndexKind::Canonical));
opt_values[b.var] = Some(*original_value);
}
}
}
}
// Create result arguments: if we found a value for a
// given variable in the loop above, use that. Otherwise, use
// a fresh inference variable.
let interner = self.interner;
let variables = query_response.variables;
let var_values =
CanonicalVarValues::instantiate(interner, variables, |var_values, kind| {
if kind.universe() != UniverseIndex::ROOT {
// A variable from inside a binder of the query. While ideally these shouldn't
// exist at all, we have to deal with them for now.
self.instantiate_canonical_var(kind, var_values, |u| universe_map[u.as_usize()])
} else if kind.is_existential() {
match opt_values[BoundVar::new(var_values.len())] {
Some(k) => k,
None => self.instantiate_canonical_var(kind, var_values, |u| {
universe_map[u.as_usize()]
}),
}
} else {
// For placeholders which were already part of the input, we simply map this
// universal bound variable back the placeholder of the input.
opt_values[BoundVar::new(var_values.len())]
.expect("expected placeholder to be unified with itself during response")
}
});
let mut obligations = PredicateObligations::new();
// Carry all newly resolved opaque types to the caller's scope
for &(a, b) in &query_response.value.opaque_types {
let a = instantiate_value(self.interner, &var_values, a);
let b = instantiate_value(self.interner, &var_values, b);
debug!(?a, ?b, "constrain opaque type");
// We use equate here instead of, for example, just registering the
// opaque type's hidden value directly, because the hidden type may have been an inference
// variable that got constrained to the opaque type itself. In that case we want to equate
// the generic args of the opaque with the generic params of its hidden type version.
obligations.extend(
self.at(cause, param_env)
.eq(Ty::new_opaque(self.interner, a.def_id, a.args), b)?
.obligations,
);
}
Ok(InferOk { value: var_values, obligations })
}
/// Given a "guess" at the values for the canonical variables in
/// the input, try to unify with the *actual* values found in the
/// query result. Often, but not always, this is a no-op, because
/// we already found the mapping in the "guessing" step.
///
/// See also: [`Self::query_response_instantiation_guess`]
fn unify_query_response_instantiation_guess<R>(
&self,
cause: &ObligationCause,
param_env: ParamEnv<'db>,
original_values: &OriginalQueryValues<'db>,
result_args: &CanonicalVarValues<'db>,
query_response: &Canonical<'db, QueryResponse<'db, R>>,
) -> InferResult<'db, ()>
where
R: Debug + TypeFoldable<DbInterner<'db>>,
{
// A closure that yields the result value for the given
// canonical variable; this is taken from
// `query_response.var_values` after applying the instantiation
// by `result_args`.
let instantiated_query_response = |index: BoundVar| -> GenericArg<'db> {
query_response
.instantiate_projected(self.interner, result_args, |v| v.var_values[index])
};
// Unify the original value for each variable with the value
// taken from `query_response` (after applying `result_args`).
self.unify_canonical_vars(cause, param_env, original_values, instantiated_query_response)
}
/// Given two sets of values for the same set of canonical variables, unify them.
/// The second set is produced lazily by supplying indices from the first set.
fn unify_canonical_vars(
&self,
cause: &ObligationCause,
param_env: ParamEnv<'db>,
variables1: &OriginalQueryValues<'db>,
variables2: impl Fn(BoundVar) -> GenericArg<'db>,
) -> InferResult<'db, ()> {
let mut obligations = PredicateObligations::new();
for (index, value1) in variables1.var_values.iter().enumerate() {
let value2 = variables2(BoundVar::new(index));
match (value1.kind(), value2.kind()) {
(GenericArgKind::Type(v1), GenericArgKind::Type(v2)) => {
obligations.extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
}
(GenericArgKind::Lifetime(re1), GenericArgKind::Lifetime(re2))
if re1.is_erased() && re2.is_erased() =>
{
// no action needed
}
(GenericArgKind::Lifetime(v1), GenericArgKind::Lifetime(v2)) => {
self.inner.borrow_mut().unwrap_region_constraints().make_eqregion(v1, v2);
}
(GenericArgKind::Const(v1), GenericArgKind::Const(v2)) => {
let ok = self.at(cause, param_env).eq(v1, v2)?;
obligations.extend(ok.into_obligations());
}
_ => {
panic!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
}
}
}
Ok(InferOk { value: (), obligations })
}
}

View file

@ -22,10 +22,12 @@
//! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html
use crate::next_solver::{
Canonical, CanonicalVarValues, Const, DbInterner, GenericArg, PlaceholderConst,
PlaceholderRegion, PlaceholderTy, Region, Ty, TyKind, infer::InferCtxt,
ArgOutlivesPredicate, Canonical, CanonicalVarValues, Const, DbInterner, GenericArg,
OpaqueTypeKey, PlaceholderConst, PlaceholderRegion, PlaceholderTy, Region, Ty, TyKind,
infer::InferCtxt,
};
use instantiate::CanonicalExt;
use macros::{TypeFoldable, TypeVisitable};
use rustc_index::IndexVec;
use rustc_type_ir::inherent::IntoKind;
use rustc_type_ir::{CanonicalVarKind, InferTy, TypeFoldable, UniverseIndex, inherent::Ty as _};
@ -135,3 +137,22 @@ impl<'db> InferCtxt<'db> {
}
}
}
/// After we execute a query with a canonicalized key, we get back a
/// `Canonical<QueryResponse<..>>`. You can use
/// `instantiate_query_result` to access the data in this result.
#[derive(Clone, Debug, TypeVisitable, TypeFoldable)]
pub struct QueryResponse<'db, R> {
pub var_values: CanonicalVarValues<'db>,
pub region_constraints: QueryRegionConstraints<'db>,
pub opaque_types: Vec<(OpaqueTypeKey<'db>, Ty<'db>)>,
pub value: R,
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub struct QueryRegionConstraints<'db> {
pub outlives: Vec<QueryOutlivesConstraint<'db>>,
pub assumptions: Vec<ArgOutlivesPredicate<'db>>,
}
pub type QueryOutlivesConstraint<'tcx> = ArgOutlivesPredicate<'tcx>;

View file

@ -29,9 +29,10 @@ use type_variable::TypeVariableOrigin;
use unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey};
use crate::next_solver::{
BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, SolverContext,
ArgOutlivesPredicate, BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, Predicate,
SolverContext,
fold::BoundVarReplacerDelegate,
infer::{select::EvaluationResult, traits::PredicateObligation},
infer::{at::ToTrace, select::EvaluationResult, traits::PredicateObligation},
obligation_ctxt::ObligationCtxt,
};
@ -47,6 +48,7 @@ pub mod at;
pub mod canonical;
mod context;
pub mod opaque_types;
mod outlives;
pub mod region_constraints;
pub mod relate;
pub mod resolve;
@ -141,7 +143,14 @@ pub struct InferCtxtInner<'db> {
/// for each body-id in this map, which will process the
/// obligations within. This is expected to be done 'late enough'
/// that all type inference variables have been bound and so forth.
pub(crate) region_obligations: Vec<RegionObligation<'db>>,
pub(crate) region_obligations: Vec<TypeOutlivesConstraint<'db>>,
/// The outlives bounds that we assume must hold about placeholders that
/// come from instantiating the binder of coroutine-witnesses. These bounds
/// are deduced from the well-formedness of the witness's types, and are
/// necessary because of the way we anonymize the regions in a coroutine,
/// which may cause types to no longer be considered well-formed.
region_assumptions: Vec<ArgOutlivesPredicate<'db>>,
/// Caches for opaque type inference.
pub(crate) opaque_type_storage: OpaqueTypeStorage<'db>,
@ -158,12 +167,13 @@ impl<'db> InferCtxtInner<'db> {
float_unification_storage: Default::default(),
region_constraint_storage: Some(Default::default()),
region_obligations: vec![],
region_assumptions: Default::default(),
opaque_type_storage: Default::default(),
}
}
#[inline]
pub fn region_obligations(&self) -> &[RegionObligation<'db>] {
pub fn region_obligations(&self) -> &[TypeOutlivesConstraint<'db>] {
&self.region_obligations
}
@ -318,7 +328,7 @@ impl fmt::Display for FixupError {
/// See the `region_obligations` field for more information.
#[derive(Clone, Debug)]
pub struct RegionObligation<'db> {
pub struct TypeOutlivesConstraint<'db> {
pub sub_region: Region<'db>,
pub sup_type: Ty<'db>,
}
@ -387,6 +397,12 @@ impl<'db> InferCtxt<'db> {
self.typing_mode
}
/// Evaluates whether the predicate can be satisfied (by any means)
/// in the given `ParamEnv`.
pub fn predicate_may_hold(&self, obligation: &PredicateObligation<'db>) -> bool {
self.evaluate_obligation(obligation).may_apply()
}
/// See the comment on [OpaqueTypesJank](crate::solve::OpaqueTypesJank)
/// for more details.
pub fn predicate_may_hold_opaque_types_jank(
@ -507,6 +523,22 @@ impl<'db> InferCtxt<'db> {
})
}
pub fn can_eq<T: ToTrace<'db>>(&self, param_env: ParamEnv<'db>, a: T, b: T) -> bool {
self.probe(|_| {
let mut ocx = ObligationCtxt::new(self);
let Ok(()) = ocx.eq(&ObligationCause::dummy(), param_env, a, b) else {
return false;
};
ocx.try_evaluate_obligations().is_empty()
})
}
/// See the comment on [OpaqueTypesJank](crate::solve::OpaqueTypesJank)
/// for more details.
pub fn goal_may_hold_opaque_types_jank(&self, goal: Goal<'db, Predicate<'db>>) -> bool {
<&SolverContext<'db>>::from(self).root_goal_may_hold_opaque_types_jank(goal)
}
pub fn type_is_copy_modulo_regions(&self, param_env: ParamEnv<'db>, ty: Ty<'db>) -> bool {
let ty = self.resolve_vars_if_possible(ty);
@ -632,6 +664,14 @@ impl<'db> InferCtxt<'db> {
self.inner.borrow_mut().type_variables().num_vars()
}
pub fn next_var_for_param(&self, id: GenericParamId) -> GenericArg<'db> {
match id {
GenericParamId::TypeParamId(_) => self.next_ty_var().into(),
GenericParamId::ConstParamId(_) => self.next_const_var().into(),
GenericParamId::LifetimeParamId(_) => self.next_region_var().into(),
}
}
pub fn next_ty_var(&self) -> Ty<'db> {
self.next_ty_var_with_origin(TypeVariableOrigin { param_def_id: None })
}
@ -846,6 +886,22 @@ impl<'db> InferCtxt<'db> {
self.inner.borrow_mut().opaque_type_storage.iter_opaque_types().collect()
}
pub fn has_opaques_with_sub_unified_hidden_type(&self, ty_vid: TyVid) -> bool {
let ty_sub_vid = self.sub_unification_table_root_var(ty_vid);
let inner = &mut *self.inner.borrow_mut();
let mut type_variables = inner.type_variable_storage.with_log(&mut inner.undo_log);
inner.opaque_type_storage.iter_opaque_types().any(|(_, hidden_ty)| {
if let TyKind::Infer(InferTy::TyVar(hidden_vid)) = hidden_ty.ty.kind() {
let opaque_sub_vid = type_variables.sub_unification_table_root_var(hidden_vid);
if opaque_sub_vid == ty_sub_vid {
return true;
}
}
false
})
}
#[inline(always)]
pub fn can_define_opaque_ty(&self, id: impl Into<SolverDefId>) -> bool {
match self.typing_mode_unchecked() {

View file

@ -0,0 +1 @@
mod obligations;

View file

@ -0,0 +1,68 @@
use ena::undo_log::UndoLogs;
use rustc_type_ir::{OutlivesPredicate, TypeVisitableExt};
use tracing::{debug, instrument};
use crate::next_solver::{
ArgOutlivesPredicate, GenericArg, Region, RegionOutlivesPredicate, Ty,
infer::{InferCtxt, TypeOutlivesConstraint, snapshot::undo_log::UndoLog},
};
impl<'db> InferCtxt<'db> {
pub fn register_outlives_constraint(
&self,
OutlivesPredicate(arg, r2): ArgOutlivesPredicate<'db>,
) {
match arg {
GenericArg::Lifetime(r1) => {
self.register_region_outlives_constraint(OutlivesPredicate(r1, r2));
}
GenericArg::Ty(ty1) => {
self.register_type_outlives_constraint(ty1, r2);
}
GenericArg::Const(_) => unreachable!(),
}
}
pub fn register_region_outlives_constraint(
&self,
OutlivesPredicate(r_a, r_b): RegionOutlivesPredicate<'db>,
) {
// `'a: 'b` ==> `'b <= 'a`
self.sub_regions(r_b, r_a);
}
/// Registers that the given region obligation must be resolved
/// from within the scope of `body_id`. These regions are enqueued
/// and later processed by regionck, when full type information is
/// available (see `region_obligations` field for more
/// information).
#[instrument(level = "debug", skip(self))]
pub fn register_type_outlives_constraint_inner(&self, obligation: TypeOutlivesConstraint<'db>) {
let mut inner = self.inner.borrow_mut();
inner.undo_log.push(UndoLog::PushTypeOutlivesConstraint);
inner.region_obligations.push(obligation);
}
pub fn register_type_outlives_constraint(&self, sup_type: Ty<'db>, sub_region: Region<'db>) {
// `is_global` means the type has no params, infer, placeholder, or non-`'static`
// free regions. If the type has none of these things, then we can skip registering
// this outlives obligation since it has no components which affect lifetime
// checking in an interesting way.
if sup_type.is_global() {
return;
}
debug!(?sup_type, ?sub_region);
self.register_type_outlives_constraint_inner(TypeOutlivesConstraint {
sup_type,
sub_region,
});
}
pub fn register_region_assumption(&self, assumption: ArgOutlivesPredicate<'db>) {
let mut inner = self.inner.borrow_mut();
inner.undo_log.push(UndoLog::PushRegionAssumption);
inner.region_assumptions.push(assumption);
}
}

View file

@ -28,8 +28,8 @@ pub(crate) enum UndoLog<'db> {
FloatUnificationTable(sv::UndoLog<ut::Delegate<FloatVid>>),
RegionConstraintCollector(region_constraints::UndoLog<'db>),
RegionUnificationTable(sv::UndoLog<ut::Delegate<RegionVidKey<'db>>>),
#[expect(dead_code, reason = "this is used in rustc")]
PushRegionObligation,
PushTypeOutlivesConstraint,
PushRegionAssumption,
}
macro_rules! impl_from {
@ -75,8 +75,13 @@ impl<'db> Rollback<UndoLog<'db>> for InferCtxtInner<'db> {
UndoLog::RegionUnificationTable(undo) => {
self.region_constraint_storage.as_mut().unwrap().unification_table.reverse(undo)
}
UndoLog::PushRegionObligation => {
self.region_obligations.pop();
UndoLog::PushTypeOutlivesConstraint => {
let popped = self.region_obligations.pop();
assert!(popped.is_some(), "pushed region constraint but could not pop it");
}
UndoLog::PushRegionAssumption => {
let popped = self.region_assumptions.pop();
assert!(popped.is_some(), "pushed region assumption but could not pop it");
}
}
}

View file

@ -1,14 +1,15 @@
//! Things related to the Interner in the next-trait-solver.
use std::{fmt, ops::ControlFlow};
use std::fmt;
use rustc_ast_ir::{FloatTy, IntTy, UintTy};
pub use tls_cache::clear_tls_solver_cache;
pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db};
use base_db::Crate;
use hir_def::{
AdtId, AttrDefId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, ItemContainerId,
StructId, UnionId, VariantId,
AdtId, AttrDefId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule,
ItemContainerId, StructId, UnionId, VariantId,
lang_item::LangItem,
signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags},
};
@ -22,6 +23,7 @@ use rustc_type_ir::{
TypeVisitableExt, UniverseIndex, Upcast, Variance,
elaborate::elaborate,
error::TypeError,
fast_reject,
inherent::{self, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _},
lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem},
solve::SizedTraitKind,
@ -30,12 +32,13 @@ use rustc_type_ir::{
use crate::{
FnAbi,
db::{HirDatabase, InternedCoroutine, InternedCoroutineId},
method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint},
lower::GenericPredicates,
method_resolution::TraitImpls,
next_solver::{
AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, OpaqueTypeKey,
RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper,
util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls},
RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds, TraitIdWrapper,
TypeAliasIdWrapper, util::explicit_item_bounds,
},
};
@ -583,6 +586,10 @@ impl AdtDef {
self.inner().flags.is_enum
}
pub fn is_box(&self) -> bool {
self.inner().flags.is_box
}
#[inline]
pub fn repr(self) -> ReprOptions {
self.inner().repr
@ -1264,27 +1271,21 @@ impl<'db> Interner for DbInterner<'db> {
})
}
#[tracing::instrument(skip(self), ret)]
#[tracing::instrument(skip(self))]
fn item_bounds(
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
explicit_item_bounds(self, def_id).map_bound(|bounds| {
Clauses::new_from_iter(self, elaborate(self, bounds).collect::<Vec<_>>())
})
explicit_item_bounds(self, def_id).map_bound(|bounds| elaborate(self, bounds))
}
#[tracing::instrument(skip(self), ret)]
#[tracing::instrument(skip(self))]
fn item_self_bounds(
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
explicit_item_bounds(self, def_id).map_bound(|bounds| {
Clauses::new_from_iter(
self,
elaborate(self, bounds).filter_only_self().collect::<Vec<_>>(),
)
})
explicit_item_bounds(self, def_id)
.map_bound(|bounds| elaborate(self, bounds).filter_only_self())
}
fn item_non_self_bounds(
@ -1309,9 +1310,8 @@ impl<'db> Interner for DbInterner<'db> {
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
let predicates = self.db().generic_predicates(def_id.try_into().unwrap());
let predicates: Vec<_> = predicates.iter().cloned().collect();
EarlyBinder::bind(predicates.into_iter())
GenericPredicates::query_all(self.db, def_id.try_into().unwrap())
.map_bound(|it| it.iter().copied())
}
#[tracing::instrument(level = "debug", skip(self), ret)]
@ -1319,9 +1319,8 @@ impl<'db> Interner for DbInterner<'db> {
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
let predicates = self.db().generic_predicates_without_parent(def_id.try_into().unwrap());
let predicates: Vec<_> = predicates.iter().cloned().collect();
EarlyBinder::bind(predicates.into_iter())
GenericPredicates::query_own(self.db, def_id.try_into().unwrap())
.map_bound(|it| it.iter().copied())
}
#[tracing::instrument(skip(self), ret)]
@ -1334,23 +1333,21 @@ impl<'db> Interner for DbInterner<'db> {
_ => false,
};
let predicates: Vec<(Clause<'db>, Span)> = self
.db()
.generic_predicates(def_id.0.into())
.iter()
.filter(|p| match p.kind().skip_binder() {
// rustc has the following assertion:
// https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608
rustc_type_ir::ClauseKind::Trait(it) => is_self(it.self_ty()),
rustc_type_ir::ClauseKind::TypeOutlives(it) => is_self(it.0),
rustc_type_ir::ClauseKind::Projection(it) => is_self(it.self_ty()),
rustc_type_ir::ClauseKind::HostEffect(it) => is_self(it.self_ty()),
_ => false,
})
.cloned()
.map(|p| (p, Span::dummy()))
.collect();
EarlyBinder::bind(predicates)
GenericPredicates::query_explicit(self.db, def_id.0.into()).map_bound(move |predicates| {
predicates
.iter()
.copied()
.filter(move |p| match p.kind().skip_binder() {
// rustc has the following assertion:
// https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608
ClauseKind::Trait(it) => is_self(it.self_ty()),
ClauseKind::TypeOutlives(it) => is_self(it.0),
ClauseKind::Projection(it) => is_self(it.self_ty()),
ClauseKind::HostEffect(it) => is_self(it.self_ty()),
_ => false,
})
.map(|p| (p, Span::dummy()))
})
}
#[tracing::instrument(skip(self), ret)]
@ -1368,25 +1365,25 @@ impl<'db> Interner for DbInterner<'db> {
}
}
let predicates: Vec<(Clause<'db>, Span)> = self
.db()
.generic_predicates(def_id.try_into().unwrap())
.iter()
.filter(|p| match p.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()),
rustc_type_ir::ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0),
rustc_type_ir::ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()),
rustc_type_ir::ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()),
// FIXME: Not sure is this correct to allow other clauses but we might replace
// `generic_predicates_ns` query here with something closer to rustc's
// `implied_bounds_with_filter`, which is more granular lowering than this
// "lower at once and then filter" implementation.
_ => true,
})
.cloned()
.map(|p| (p, Span::dummy()))
.collect();
EarlyBinder::bind(predicates)
GenericPredicates::query_explicit(self.db, def_id.try_into().unwrap()).map_bound(
|predicates| {
predicates
.iter()
.copied()
.filter(|p| match p.kind().skip_binder() {
ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()),
ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0),
ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()),
ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()),
// FIXME: Not sure is this correct to allow other clauses but we might replace
// `generic_predicates_ns` query here with something closer to rustc's
// `implied_bounds_with_filter`, which is more granular lowering than this
// "lower at once and then filter" implementation.
_ => true,
})
.map(|p| (p, Span::dummy()))
},
)
}
fn impl_super_outlives(
@ -1396,15 +1393,12 @@ impl<'db> Interner for DbInterner<'db> {
let trait_ref = self.db().impl_trait(impl_id.0).expect("expected an impl of trait");
trait_ref.map_bound(|trait_ref| {
let clause: Clause<'_> = trait_ref.upcast(self);
Clauses::new_from_iter(
self,
rustc_type_ir::elaborate::elaborate(self, [clause]).filter(|clause| {
matches!(
clause.kind().skip_binder(),
ClauseKind::TypeOutlives(_) | ClauseKind::RegionOutlives(_)
)
}),
)
elaborate(self, [clause]).filter(|clause| {
matches!(
clause.kind().skip_binder(),
ClauseKind::TypeOutlives(_) | ClauseKind::RegionOutlives(_)
)
})
})
}
@ -1609,79 +1603,152 @@ impl<'db> Interner for DbInterner<'db> {
fn for_each_relevant_impl(
self,
trait_: Self::TraitId,
trait_def_id: Self::TraitId,
self_ty: Self::Ty,
mut f: impl FnMut(Self::ImplId),
) {
let trait_ = trait_.0;
let self_ty_fp = TyFingerprint::for_trait_impl(self_ty);
let fps: &[TyFingerprint] = match self_ty.kind() {
TyKind::Infer(InferTy::IntVar(..)) => &ALL_INT_FPS,
TyKind::Infer(InferTy::FloatVar(..)) => &ALL_FLOAT_FPS,
_ => self_ty_fp.as_slice(),
let krate = self.krate.expect("trait solving requires setting `DbInterner::krate`");
let trait_block = trait_def_id.0.loc(self.db).container.containing_block();
let mut consider_impls_for_simplified_type = |simp: SimplifiedType| {
let type_block = simp.def().and_then(|def_id| {
let module = match def_id {
SolverDefId::AdtId(AdtId::StructId(id)) => id.module(self.db),
SolverDefId::AdtId(AdtId::EnumId(id)) => id.module(self.db),
SolverDefId::AdtId(AdtId::UnionId(id)) => id.module(self.db),
SolverDefId::TraitId(id) => id.module(self.db),
SolverDefId::TypeAliasId(id) => id.module(self.db),
SolverDefId::ConstId(_)
| SolverDefId::FunctionId(_)
| SolverDefId::ImplId(_)
| SolverDefId::StaticId(_)
| SolverDefId::InternedClosureId(_)
| SolverDefId::InternedCoroutineId(_)
| SolverDefId::InternedOpaqueTyId(_)
| SolverDefId::EnumVariantId(_)
| SolverDefId::Ctor(_) => return None,
};
module.containing_block()
});
TraitImpls::for_each_crate_and_block_trait_and_type(
self.db,
krate,
type_block,
trait_block,
&mut |impls| {
for &impl_ in impls.for_trait_and_self_ty(trait_def_id.0, &simp) {
f(impl_.into());
}
},
);
};
if fps.is_empty() {
_ = for_trait_impls(
self.db(),
self.krate.expect("Must have self.krate"),
self.block,
trait_,
self_ty_fp,
|impls| {
for i in impls.for_trait(trait_) {
use rustc_type_ir::TypeVisitable;
let contains_errors = self.db().impl_trait(i).map_or(false, |b| {
b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break()
});
if contains_errors {
continue;
}
match self_ty.kind() {
TyKind::Bool
| TyKind::Char
| TyKind::Int(_)
| TyKind::Uint(_)
| TyKind::Float(_)
| TyKind::Adt(_, _)
| TyKind::Foreign(_)
| TyKind::Str
| TyKind::Array(_, _)
| TyKind::Pat(_, _)
| TyKind::Slice(_)
| TyKind::RawPtr(_, _)
| TyKind::Ref(_, _, _)
| TyKind::FnDef(_, _)
| TyKind::FnPtr(..)
| TyKind::Dynamic(_, _)
| TyKind::Closure(..)
| TyKind::CoroutineClosure(..)
| TyKind::Coroutine(_, _)
| TyKind::Never
| TyKind::Tuple(_)
| TyKind::UnsafeBinder(_) => {
let simp =
fast_reject::simplify_type(self, self_ty, fast_reject::TreatParams::AsRigid)
.unwrap();
consider_impls_for_simplified_type(simp);
}
f(i.into());
}
ControlFlow::Continue(())
},
);
} else {
_ = for_trait_impls(
self.db(),
self.krate.expect("Must have self.krate"),
self.block,
trait_,
self_ty_fp,
|impls| {
for fp in fps {
for i in impls.for_trait_and_self_ty(trait_, *fp) {
use rustc_type_ir::TypeVisitable;
let contains_errors = self.db().impl_trait(i).map_or(false, |b| {
b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break()
});
if contains_errors {
continue;
}
// HACK: For integer and float variables we have to manually look at all impls
// which have some integer or float as a self type.
TyKind::Infer(InferTy::IntVar(_)) => {
use IntTy::*;
use UintTy::*;
// This causes a compiler error if any new integer kinds are added.
let (I8 | I16 | I32 | I64 | I128 | Isize): IntTy;
let (U8 | U16 | U32 | U64 | U128 | Usize): UintTy;
let possible_integers = [
// signed integers
SimplifiedType::Int(I8),
SimplifiedType::Int(I16),
SimplifiedType::Int(I32),
SimplifiedType::Int(I64),
SimplifiedType::Int(I128),
SimplifiedType::Int(Isize),
// unsigned integers
SimplifiedType::Uint(U8),
SimplifiedType::Uint(U16),
SimplifiedType::Uint(U32),
SimplifiedType::Uint(U64),
SimplifiedType::Uint(U128),
SimplifiedType::Uint(Usize),
];
for simp in possible_integers {
consider_impls_for_simplified_type(simp);
}
}
f(i.into());
}
}
ControlFlow::Continue(())
},
);
TyKind::Infer(InferTy::FloatVar(_)) => {
// This causes a compiler error if any new float kinds are added.
let (FloatTy::F16 | FloatTy::F32 | FloatTy::F64 | FloatTy::F128);
let possible_floats = [
SimplifiedType::Float(FloatTy::F16),
SimplifiedType::Float(FloatTy::F32),
SimplifiedType::Float(FloatTy::F64),
SimplifiedType::Float(FloatTy::F128),
];
for simp in possible_floats {
consider_impls_for_simplified_type(simp);
}
}
// The only traits applying to aliases and placeholders are blanket impls.
//
// Impls which apply to an alias after normalization are handled by
// `assemble_candidates_after_normalizing_self_ty`.
TyKind::Alias(_, _) | TyKind::Placeholder(..) | TyKind::Error(_) => (),
// FIXME: These should ideally not exist as a self type. It would be nice for
// the builtin auto trait impls of coroutines to instead directly recurse
// into the witness.
TyKind::CoroutineWitness(..) => (),
// These variants should not exist as a self type.
TyKind::Infer(
InferTy::TyVar(_)
| InferTy::FreshTy(_)
| InferTy::FreshIntTy(_)
| InferTy::FreshFloatTy(_),
)
| TyKind::Param(_)
| TyKind::Bound(_, _) => panic!("unexpected self type: {self_ty:?}"),
}
self.for_each_blanket_impl(trait_def_id, f)
}
fn for_each_blanket_impl(self, trait_def_id: Self::TraitId, mut f: impl FnMut(Self::ImplId)) {
let Some(krate) = self.krate else { return };
let block = trait_def_id.0.loc(self.db).container.containing_block();
for impls in self.db.trait_impls_in_deps(krate).iter() {
for impl_id in impls.for_trait(trait_def_id.0) {
let impl_data = self.db.impl_signature(impl_id);
let self_ty_ref = &impl_data.store[impl_data.self_ty];
if matches!(self_ty_ref, hir_def::type_ref::TypeRef::TypeParam(_)) {
f(impl_id.into());
}
TraitImpls::for_each_crate_and_block(self.db, krate, block, &mut |impls| {
for &impl_ in impls.blanket_impls(trait_def_id.0) {
f(impl_.into());
}
}
});
}
fn has_item_definition(self, _def_id: Self::DefId) -> bool {

View file

@ -13,7 +13,7 @@ use rustc_type_ir::{
};
use smallvec::SmallVec;
use crate::next_solver::{InternedWrapperNoDebug, TraitIdWrapper};
use crate::next_solver::{GenericArg, InternedWrapperNoDebug, TraitIdWrapper};
use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db};
@ -43,6 +43,7 @@ pub type PolyProjectionPredicate<'db> = Binder<'db, ProjectionPredicate<'db>>;
pub type PolyTraitRef<'db> = Binder<'db, TraitRef<'db>>;
pub type PolyExistentialTraitRef<'db> = Binder<'db, ExistentialTraitRef<'db>>;
pub type PolyExistentialProjection<'db> = Binder<'db, ExistentialProjection<'db>>;
pub type ArgOutlivesPredicate<'db> = OutlivesPredicate<'db, GenericArg<'db>>;
/// Compares via an ordering that will not change if modules are reordered or other changes are
/// made to the tree. In particular, this ordering is preserved across incremental compilations.

View file

@ -79,6 +79,10 @@ impl<'db> Region<'db> {
matches!(self.inner(), RegionKind::ReStatic)
}
pub fn is_erased(&self) -> bool {
matches!(self.inner(), RegionKind::ReErased)
}
pub fn is_var(&self) -> bool {
matches!(self.inner(), RegionKind::ReVar(_))
}

View file

@ -11,13 +11,10 @@ use rustc_type_ir::{
};
use tracing::debug;
use crate::{
ImplTraitId,
next_solver::{
AliasTy, CanonicalVarKind, Clause, ClauseKind, CoercePredicate, GenericArgs, ImplIdWrapper,
ParamEnv, Predicate, PredicateKind, SubtypePredicate, Ty, TyKind, fold::fold_tys,
util::sizedness_fast_path,
},
use crate::next_solver::{
AliasTy, CanonicalVarKind, Clause, ClauseKind, CoercePredicate, GenericArgs, ImplIdWrapper,
ParamEnv, Predicate, PredicateKind, SubtypePredicate, Ty, TyKind, fold::fold_tys,
util::sizedness_fast_path,
};
use super::{
@ -163,20 +160,7 @@ impl<'db> SolverDelegate for SolverContext<'db> {
})
};
let db = interner.db;
let (opaques_table, opaque_idx) = match opaque_id.loc(db) {
ImplTraitId::ReturnTypeImplTrait(func, opaque_idx) => {
(db.return_type_impl_traits(func), opaque_idx)
}
ImplTraitId::TypeAliasImplTrait(type_alias, opaque_idx) => {
(db.type_alias_impl_traits(type_alias), opaque_idx)
}
};
let item_bounds = opaques_table
.as_deref()
.unwrap()
.as_ref()
.map_bound(|table| &table.impl_traits[opaque_idx].predicates);
let item_bounds = opaque_id.predicates(interner.db);
for predicate in item_bounds.iter_instantiated_copied(interner, args.as_slice()) {
let predicate = replace_opaques_in(predicate);

View file

@ -25,8 +25,8 @@ use rustc_type_ir::{
};
use crate::{
ImplTraitId,
db::{HirDatabase, InternedCoroutine},
lower::GenericPredicates,
next_solver::{
AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const,
CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
@ -41,6 +41,7 @@ use super::{
util::{FloatExt, IntegerExt},
};
pub type SimplifiedType = rustc_type_ir::fast_reject::SimplifiedType<SolverDefId>;
pub type TyKind<'db> = rustc_type_ir::TyKind<DbInterner<'db>>;
pub type FnHeader<'db> = rustc_type_ir::FnHeader<DbInterner<'db>>;
@ -127,6 +128,22 @@ impl<'db> Ty<'db> {
Ty::new_tup(interner, &[])
}
pub fn new_imm_ptr(interner: DbInterner<'db>, ty: Ty<'db>) -> Self {
Ty::new_ptr(interner, ty, Mutability::Not)
}
pub fn new_imm_ref(interner: DbInterner<'db>, region: Region<'db>, ty: Ty<'db>) -> Self {
Ty::new_ref(interner, region, ty, Mutability::Not)
}
pub fn new_opaque(
interner: DbInterner<'db>,
def_id: SolverDefId,
args: GenericArgs<'db>,
) -> Self {
Ty::new_alias(interner, AliasTyKind::Opaque, AliasTy::new_from_args(interner, def_id, args))
}
/// Returns the `Size` for primitive types (bool, uint, int, char, float).
pub fn primitive_size(self, interner: DbInterner<'db>) -> Size {
match self.kind() {
@ -326,11 +343,40 @@ impl<'db> Ty<'db> {
matches!(self.kind(), TyKind::Never)
}
#[inline]
pub fn is_bool(self) -> bool {
matches!(self.kind(), TyKind::Bool)
}
/// A scalar type is one that denotes an atomic datum, with no sub-components.
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
/// contents are abstract to rustc.)
#[inline]
pub fn is_scalar(self) -> bool {
matches!(
self.kind(),
TyKind::Bool
| TyKind::Char
| TyKind::Int(_)
| TyKind::Float(_)
| TyKind::Uint(_)
| TyKind::FnDef(..)
| TyKind::FnPtr(..)
| TyKind::RawPtr(_, _)
| TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_))
)
}
#[inline]
pub fn is_infer(self) -> bool {
matches!(self.kind(), TyKind::Infer(..))
}
#[inline]
pub fn is_numeric(self) -> bool {
self.is_integral() || self.is_floating_point()
}
#[inline]
pub fn is_str(self) -> bool {
matches!(self.kind(), TyKind::Str)
@ -346,10 +392,27 @@ impl<'db> Ty<'db> {
matches!(self.kind(), TyKind::RawPtr(..))
}
#[inline]
pub fn is_array(self) -> bool {
matches!(self.kind(), TyKind::Array(..))
}
#[inline]
pub fn is_slice(self) -> bool {
matches!(self.kind(), TyKind::Slice(..))
}
pub fn is_union(self) -> bool {
self.as_adt().is_some_and(|(adt, _)| matches!(adt, AdtId::UnionId(_)))
}
pub fn boxed_ty(self) -> Option<Ty<'db>> {
match self.kind() {
TyKind::Adt(adt_def, args) if adt_def.is_box() => Some(args.type_at(0)),
_ => None,
}
}
#[inline]
pub fn as_adt(self) -> Option<(AdtId, GenericArgs<'db>)> {
match self.kind() {
@ -378,11 +441,9 @@ impl<'db> Ty<'db> {
///
/// The parameter `explicit` indicates if this is an *explicit* dereference.
/// Some types -- notably raw ptrs -- can only be dereferenced explicitly.
pub fn builtin_deref(self, db: &dyn HirDatabase, explicit: bool) -> Option<Ty<'db>> {
pub fn builtin_deref(self, explicit: bool) -> Option<Ty<'db>> {
match self.kind() {
TyKind::Adt(adt, substs) if crate::lang_items::is_box(db, adt.def_id().0) => {
Some(substs.as_slice()[0].expect_ty())
}
TyKind::Adt(adt, substs) if adt.is_box() => Some(substs.as_slice()[0].expect_ty()),
TyKind::Ref(_, ty, _) => Some(ty),
TyKind::RawPtr(ty, _) if explicit => Some(ty),
_ => None,
@ -562,26 +623,14 @@ impl<'db> Ty<'db> {
let interner = DbInterner::new_with(db, None, None);
match self.kind() {
TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => {
match db.lookup_intern_impl_trait_id(opaque_ty.def_id.expect_opaque_ty()) {
ImplTraitId::ReturnTypeImplTrait(func, idx) => {
db.return_type_impl_traits(func).map(|it| {
let data =
(*it).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
data.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
.collect()
})
}
ImplTraitId::TypeAliasImplTrait(alias, idx) => {
db.type_alias_impl_traits(alias).map(|it| {
let data =
(*it).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
data.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
.collect()
})
}
}
}
TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => Some(
opaque_ty
.def_id
.expect_opaque_ty()
.predicates(db)
.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
.collect(),
),
TyKind::Param(param) => {
// FIXME: We shouldn't use `param.id` here.
let generic_params = db.generic_params(param.id.parent());
@ -589,11 +638,8 @@ impl<'db> Ty<'db> {
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::ArgumentImplTrait => {
let predicates = db
.generic_predicates(param.id.parent())
.instantiate_identity()
.into_iter()
.flatten()
let predicates = GenericPredicates::query_all(db, param.id.parent())
.iter_identity_copied()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == self,
ClauseKind::Projection(pred) => pred.self_ty() == self,

View file

@ -1,39 +1,30 @@
//! Various utilities for the next-trait-solver.
use std::{
iter,
ops::{self, ControlFlow},
};
use std::ops::ControlFlow;
use base_db::Crate;
use hir_def::{BlockId, HasModule, lang_item::LangItem};
use la_arena::Idx;
use hir_def::TraitId;
use rustc_abi::{Float, HasDataLayout, Integer, IntegerType, Primitive, ReprOptions};
use rustc_type_ir::{
ConstKind, CoroutineArgs, DebruijnIndex, FloatTy, INNERMOST, IntTy, Interner,
PredicatePolarity, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable,
TypeVisitableExt, TypeVisitor, UintTy, UniverseIndex,
inherent::{
AdtDef, GenericArg as _, GenericArgs as _, IntoKind, ParamEnv as _, SliceLike, Ty as _,
},
TypeVisitableExt, TypeVisitor, UintTy, UniverseIndex, elaborate,
inherent::{AdtDef, GenericArg as _, IntoKind, ParamEnv as _, SliceLike, Ty as _},
lang_items::SolverTraitLangItem,
solve::SizedTraitKind,
};
use crate::{
db::HirDatabase,
lower::{LifetimeElisionKind, TyLoweringContext},
method_resolution::{TraitImpls, TyFingerprint},
next_solver::{
BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion,
infer::InferCtxt,
use crate::next_solver::{
BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion,
PolyTraitRef,
infer::{
InferCtxt,
traits::{Obligation, ObligationCause, PredicateObligation},
},
};
use super::{
Binder, BoundRegion, BoundTy, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder,
GenericArgs, Predicate, PredicateKind, Region, SolverDefId, TraitPredicate, TraitRef, Ty,
TyKind,
Binder, BoundRegion, BoundTy, Clause, ClauseKind, Const, DbInterner, EarlyBinder, GenericArgs,
Predicate, PredicateKind, Region, SolverDefId, Ty, TyKind,
fold::{BoundVarReplacer, FnMutDelegate},
};
@ -388,54 +379,6 @@ where
}
}
pub(crate) fn for_trait_impls(
db: &dyn HirDatabase,
krate: Crate,
block: Option<BlockId>,
trait_id: hir_def::TraitId,
self_ty_fp: Option<TyFingerprint>,
mut f: impl FnMut(&TraitImpls) -> ControlFlow<()>,
) -> ControlFlow<()> {
// Note: Since we're using `impls_for_trait` and `impl_provided_for`,
// only impls where the trait can be resolved should ever reach Chalk.
// `impl_datum` relies on that and will panic if the trait can't be resolved.
let in_self_and_deps = db.trait_impls_in_deps(krate);
let trait_module = trait_id.module(db);
let type_module = match self_ty_fp {
Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(db)),
Some(TyFingerprint::ForeignType(type_id)) => Some(type_id.module(db)),
Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(db)),
_ => None,
};
let mut def_blocks =
[trait_module.containing_block(), type_module.and_then(|it| it.containing_block())];
let block_impls = iter::successors(block, |&block_id| {
cov_mark::hit!(block_local_impls);
block_id.loc(db).module.containing_block()
})
.inspect(|&block_id| {
// make sure we don't search the same block twice
def_blocks.iter_mut().for_each(|block| {
if *block == Some(block_id) {
*block = None;
}
});
})
.filter_map(|block_id| db.trait_impls_in_block(block_id));
for it in in_self_and_deps.iter().map(ops::Deref::deref) {
f(it)?;
}
for it in block_impls {
f(&it)?;
}
for it in def_blocks.into_iter().flatten().filter_map(|it| db.trait_impls_in_block(it)) {
f(&it)?;
}
ControlFlow::Continue(())
}
// FIXME(next-trait-solver): uplift
pub fn sizedness_constraint_for_ty<'db>(
interner: DbInterner<'db>,
@ -507,79 +450,14 @@ pub fn apply_args_to_binder<'db, T: TypeFoldable<DbInterner<'db>>>(
pub fn explicit_item_bounds<'db>(
interner: DbInterner<'db>,
def_id: SolverDefId,
) -> EarlyBinder<'db, Clauses<'db>> {
) -> EarlyBinder<'db, impl DoubleEndedIterator<Item = Clause<'db>> + ExactSizeIterator> {
let db = interner.db();
match def_id {
SolverDefId::TypeAliasId(type_alias) => {
// Lower bounds -- we could/should maybe move this to a separate query in `lower`
let type_alias_data = db.type_alias_signature(type_alias);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
let mut ctx = TyLoweringContext::new(
db,
&resolver,
&type_alias_data.store,
type_alias.into(),
LifetimeElisionKind::AnonymousReportError,
);
let item_args = GenericArgs::identity_for_item(interner, def_id);
let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args);
let mut bounds = Vec::new();
for bound in &type_alias_data.bounds {
ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| {
bounds.push(pred);
});
}
if !ctx.unsized_types.contains(&interner_ty) {
let sized_trait = LangItem::Sized
.resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate"));
let sized_bound = sized_trait.map(|trait_id| {
let trait_ref = TraitRef::new_from_args(
interner,
trait_id.into(),
GenericArgs::new_from_iter(interner, [interner_ty.into()]),
);
Clause(Predicate::new(
interner,
Binder::dummy(rustc_type_ir::PredicateKind::Clause(
rustc_type_ir::ClauseKind::Trait(TraitPredicate {
trait_ref,
polarity: rustc_type_ir::PredicatePolarity::Positive,
}),
)),
))
});
bounds.extend(sized_bound);
bounds.shrink_to_fit();
}
rustc_type_ir::EarlyBinder::bind(Clauses::new_from_iter(interner, bounds))
}
SolverDefId::InternedOpaqueTyId(id) => {
let full_id = db.lookup_intern_impl_trait_id(id);
match full_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let datas = db
.return_type_impl_traits(func)
.expect("impl trait id without impl traits");
let datas = (*datas).as_ref().skip_binder();
let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())];
EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone()))
}
crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => {
let datas = db
.type_alias_impl_traits(alias)
.expect("impl trait id without impl traits");
let datas = (*datas).as_ref().skip_binder();
let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())];
EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone()))
}
}
}
let clauses = match def_id {
SolverDefId::TypeAliasId(type_alias) => crate::lower::type_alias_bounds(db, type_alias),
SolverDefId::InternedOpaqueTyId(id) => id.predicates(db),
_ => panic!("Unexpected GenericDefId"),
}
};
clauses.map_bound(|clauses| clauses.iter().copied())
}
pub struct ContainsTypeErrors;
@ -792,3 +670,34 @@ pub fn sizedness_fast_path<'db>(
false
}
/// Casts a trait reference into a reference to one of its super
/// traits; returns `None` if `target_trait_def_id` is not a
/// supertrait.
pub(crate) fn upcast_choices<'db>(
interner: DbInterner<'db>,
source_trait_ref: PolyTraitRef<'db>,
target_trait_def_id: TraitId,
) -> Vec<PolyTraitRef<'db>> {
if source_trait_ref.def_id().0 == target_trait_def_id {
return vec![source_trait_ref]; // Shortcut the most common case.
}
elaborate::supertraits(interner, source_trait_ref)
.filter(|r| r.def_id().0 == target_trait_def_id)
.collect()
}
#[inline]
pub(crate) fn clauses_as_obligations<'db>(
clauses: impl IntoIterator<Item = Clause<'db>>,
cause: ObligationCause,
param_env: ParamEnv<'db>,
) -> impl Iterator<Item = PredicateObligation<'db>> {
clauses.into_iter().map(move |clause| Obligation {
cause: cause.clone(),
param_env,
predicate: clause.as_predicate(),
recursion_depth: 0,
})
}

View file

@ -7,7 +7,6 @@ use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_type_ir::inherent::Ty as _;
use syntax::ast;
use triomphe::Arc;
use crate::{
ImplTraitId,
@ -29,7 +28,7 @@ pub(crate) fn opaque_types_defined_by(
// A function may define its own RPITs.
extend_with_opaques(
db,
db.return_type_impl_traits(func),
ImplTraits::return_type_impl_traits(db, func),
|opaque_idx| ImplTraitId::ReturnTypeImplTrait(func, opaque_idx),
result,
);
@ -38,7 +37,7 @@ pub(crate) fn opaque_types_defined_by(
let extend_with_taits = |type_alias| {
extend_with_opaques(
db,
db.type_alias_impl_traits(type_alias),
ImplTraits::type_alias_impl_traits(db, type_alias),
|opaque_idx| ImplTraitId::TypeAliasImplTrait(type_alias, opaque_idx),
result,
);
@ -75,12 +74,12 @@ pub(crate) fn opaque_types_defined_by(
fn extend_with_opaques<'db>(
db: &'db dyn HirDatabase,
opaques: Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>,
opaques: &Option<Box<EarlyBinder<'db, ImplTraits<'db>>>>,
mut make_impl_trait: impl FnMut(ImplTraitIdx<'db>) -> ImplTraitId<'db>,
result: &mut Vec<SolverDefId>,
) {
if let Some(opaques) = opaques {
for (opaque_idx, _) in (*opaques).as_ref().skip_binder().impl_traits.iter() {
for (opaque_idx, _) in (**opaques).as_ref().skip_binder().impl_traits.iter() {
let opaque_id = InternedOpaqueTyId::new(db, make_impl_trait(opaque_idx));
result.push(opaque_id.into());
}
@ -109,6 +108,14 @@ pub(crate) fn tait_hidden_types<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> {
// Call this first, to not perform redundant work if there are no TAITs.
let Some(taits_count) = ImplTraits::type_alias_impl_traits(db, type_alias)
.as_deref()
.map(|taits| taits.as_ref().skip_binder().impl_traits.len())
else {
return ArenaMap::new();
};
let loc = type_alias.loc(db);
let module = loc.module(db);
let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
@ -119,10 +126,6 @@ pub(crate) fn tait_hidden_types<'db>(
let defining_bodies = tait_defining_bodies(db, &loc);
let taits_count = db
.type_alias_impl_traits(type_alias)
.map_or(0, |taits| (*taits).as_ref().skip_binder().impl_traits.len());
let mut result = ArenaMap::with_capacity(taits_count);
for defining_body in defining_bodies {
let infer = db.infer(defining_body);

View file

@ -2,17 +2,17 @@
use hir_def::{ImplId, nameres::crate_def_map};
use intern::sym;
use rustc_type_ir::inherent::SliceLike;
use tracing::debug;
use crate::{
db::HirDatabase,
lower::GenericPredicates,
next_solver::{
DbInterner, TypingMode,
infer::{
DbInternerInferExt,
traits::{Obligation, ObligationCause},
},
infer::{DbInternerInferExt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
util::clauses_as_obligations,
},
};
@ -102,14 +102,12 @@ fn specializes_query(
// Now check that the source trait ref satisfies all the where clauses of the target impl.
// This is not just for correctness; we also need this to constrain any params that may
// only be referenced via projection predicates.
if let Some(predicates) =
db.generic_predicates(parent_impl_def_id.into()).instantiate(interner, parent_args)
{
ocx.register_obligations(
predicates
.map(|predicate| Obligation::new(interner, cause.clone(), param_env, predicate)),
);
}
ocx.register_obligations(clauses_as_obligations(
GenericPredicates::query_all(db, parent_impl_def_id.into())
.iter_instantiated_copied(interner, parent_args.as_slice()),
cause.clone(),
param_env,
));
let errors = ocx.evaluate_obligations_error_on_ambiguity();
if !errors.is_empty() {

View file

@ -49,7 +49,7 @@ fn let_stmt_coerce() {
//- minicore: coerce_unsized
fn test() {
let x: &[isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
let x: *const [isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
}
@ -96,7 +96,7 @@ fn foo<T>(x: &[T]) -> &[T] { x }
fn test() {
let x = if true {
foo(&[1])
// ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
} else {
&[1]
};
@ -148,7 +148,7 @@ fn foo<T>(x: &[T]) -> &[T] { x }
fn test(i: i32) {
let x = match i {
2 => foo(&[2]),
// ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
1 => &[1],
_ => &[3],
};
@ -268,7 +268,7 @@ fn takes_ref_str(x: &str) {}
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('{region error}, Not))
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
}
"#,
);
@ -854,8 +854,8 @@ impl core::cmp::PartialEq for Struct {
}
fn test() {
Struct == Struct;
// ^^^^^^ adjustments: Borrow(Ref('{region error}, Not))
// ^^^^^^ adjustments: Borrow(Ref('{region error}, Not))
// ^^^^^^ adjustments: Borrow(Ref(Not))
// ^^^^^^ adjustments: Borrow(Ref(Not))
}",
);
}
@ -871,7 +871,7 @@ impl core::ops::AddAssign for Struct {
}
fn test() {
Struct += Struct;
// ^^^^^^ adjustments: Borrow(Ref('{region error}, Mut))
// ^^^^^^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: Yes }))
// ^^^^^^ adjustments:
}",
);
@ -885,7 +885,7 @@ fn adjust_index() {
fn test() {
let x = [1, 2, 3];
x[2] = 6;
// ^ adjustments: Borrow(Ref('?0, Mut))
// ^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: No }))
}
",
);
@ -910,11 +910,11 @@ impl core::ops::IndexMut<usize> for StructMut {
}
fn test() {
Struct[0];
// ^^^^^^ adjustments: Borrow(Ref('?0, Not))
// ^^^^^^ adjustments: Borrow(Ref(Not))
StructMut[0];
// ^^^^^^^^^ adjustments: Borrow(Ref('?1, Not))
// ^^^^^^^^^ adjustments: Borrow(Ref(Not))
&mut StructMut[0];
// ^^^^^^^^^ adjustments: Borrow(Ref('?2, Mut))
// ^^^^^^^^^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: No }))
}",
);
}

View file

@ -4,7 +4,7 @@ use hir_def::{DefWithBodyId, ModuleDefId};
use salsa::EventKind;
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB};
use crate::{db::HirDatabase, method_resolution::TraitImpls, test_db::TestDB};
use super::visit_module;
@ -44,7 +44,7 @@ fn foo() -> i32 {
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
"lang_item",
"crate_lang_items",
@ -131,7 +131,7 @@ fn baz() -> i32 {
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
"lang_item",
"crate_lang_items",
@ -143,7 +143,7 @@ fn baz() -> i32 {
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
"infer_shim",
"function_signature_shim",
@ -151,7 +151,7 @@ fn baz() -> i32 {
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
]
"#]],
@ -230,9 +230,9 @@ $0",
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
TraitImpls::for_crate(&db, module.krate());
},
&[("trait_impls_in_crate_shim", 1)],
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
@ -241,7 +241,7 @@ $0",
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
]
"#]],
);
@ -267,9 +267,9 @@ pub struct NewStruct {
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
TraitImpls::for_crate(&db, module.krate());
},
&[("trait_impls_in_crate_shim", 1)],
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@ -277,7 +277,7 @@ pub struct NewStruct {
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
]
"#]],
);
@ -302,9 +302,9 @@ $0",
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
TraitImpls::for_crate(&db, module.krate());
},
&[("trait_impls_in_crate_shim", 1)],
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
@ -313,7 +313,7 @@ $0",
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
]
"#]],
);
@ -340,9 +340,9 @@ pub enum SomeEnum {
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
TraitImpls::for_crate(&db, module.krate());
},
&[("trait_impls_in_crate_shim", 1)],
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@ -350,7 +350,7 @@ pub enum SomeEnum {
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
]
"#]],
);
@ -375,9 +375,9 @@ $0",
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
TraitImpls::for_crate(&db, module.krate());
},
&[("trait_impls_in_crate_shim", 1)],
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
@ -386,7 +386,7 @@ $0",
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
]
"#]],
);
@ -410,9 +410,9 @@ fn bar() -> f32 {
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
TraitImpls::for_crate(&db, module.krate());
},
&[("trait_impls_in_crate_shim", 1)],
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@ -420,7 +420,7 @@ fn bar() -> f32 {
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
]
"#]],
);
@ -449,9 +449,9 @@ $0",
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
TraitImpls::for_crate(&db, module.krate());
},
&[("trait_impls_in_crate_shim", 1)],
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
@ -460,7 +460,7 @@ $0",
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
]
"#]],
);
@ -492,9 +492,9 @@ impl SomeStruct {
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
db.trait_impls_in_crate(module.krate());
TraitImpls::for_crate(&db, module.krate());
},
&[("trait_impls_in_crate_shim", 1)],
&[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@ -502,7 +502,7 @@ impl SomeStruct {
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
"attrs_shim",
"impl_trait_with_diagnostics_shim",
"impl_signature_shim",
@ -585,33 +585,32 @@ fn main() {
"crate_lang_items",
"attrs_shim",
"attrs_shim",
"generic_predicates_shim",
"return_type_impl_traits_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
"trait_environment_shim",
"return_type_impl_traits_shim",
"ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
"struct_signature_shim",
"struct_signature_with_source_map_shim",
"generic_predicates_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"value_ty_shim",
"VariantFields::firewall_",
"VariantFields::query_",
"lang_item",
"lang_item",
"inherent_impls_in_crate_shim",
"InherentImpls::for_crate_",
"impl_signature_shim",
"impl_signature_with_source_map_shim",
"callable_item_signature_shim",
"trait_impls_in_deps_shim",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_and_deps_",
"TraitImpls::for_crate_",
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
"generic_predicates_shim",
"value_ty_shim",
"generic_predicates_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"lang_item",
]
"#]],
);
@ -683,24 +682,24 @@ fn main() {
"attrs_shim",
"attrs_shim",
"attrs_shim",
"generic_predicates_shim",
"return_type_impl_traits_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"infer_shim",
"function_signature_with_source_map_shim",
"return_type_impl_traits_shim",
"ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
"generic_predicates_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"VariantFields::query_",
"inherent_impls_in_crate_shim",
"InherentImpls::for_crate_",
"impl_signature_with_source_map_shim",
"impl_signature_shim",
"callable_item_signature_shim",
"trait_impls_in_crate_shim",
"TraitImpls::for_crate_",
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
"generic_predicates_shim",
"generic_predicates_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates < 'db >::query_with_diagnostics_",
]
"#]],
);

View file

@ -8,6 +8,7 @@ use super::{check_infer, check_no_mismatches, check_types};
fn infer_slice_method() {
check_types(
r#"
//- /core.rs crate:core
impl<T> [T] {
#[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
@ -27,13 +28,13 @@ fn test(x: &[u8]) {
fn cross_crate_primitive_method() {
check_types(
r#"
//- /main.rs crate:main deps:other_crate
//- /main.rs crate:main deps:core
fn test() {
let x = 1f32;
x.foo();
} //^^^^^^^ f32
//- /lib.rs crate:other_crate
//- /lib.rs crate:core
mod foo {
impl f32 {
#[rustc_allow_incoherent_impl]
@ -48,6 +49,7 @@ mod foo {
fn infer_array_inherent_impl() {
check_types(
r#"
//- /core.rs crate:core
impl<T, const N: usize> [T; N] {
#[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
@ -981,7 +983,6 @@ fn main() {
#[test]
fn method_resolution_overloaded_const() {
cov_mark::check!(const_candidate_self_type_mismatch);
check_types(
r#"
struct Wrapper<T>(T);
@ -1376,7 +1377,6 @@ mod b {
#[test]
fn autoderef_visibility_method() {
cov_mark::check!(autoderef_candidate_not_visible);
check(
r#"
//- minicore: receiver
@ -1415,7 +1415,6 @@ mod b {
#[test]
fn trait_vs_private_inherent_const() {
cov_mark::check!(const_candidate_not_visible);
check(
r#"
mod a {
@ -1505,6 +1504,7 @@ fn f() {
fn resolve_const_generic_array_methods() {
check_types(
r#"
//- /core.rs crate:core
#[lang = "array"]
impl<T, const N: usize> [T; N] {
#[rustc_allow_incoherent_impl]
@ -1536,6 +1536,7 @@ fn f() {
fn resolve_const_generic_method() {
check_types(
r#"
//- /core.rs crate:core
struct Const<const N: usize>;
#[lang = "array"]
@ -1714,8 +1715,8 @@ fn f<S: Sized, T, U: ?Sized>() {
95..103 'u32::foo': fn foo<u32>() -> u8
109..115 'S::foo': fn foo<S>() -> u8
121..127 'T::foo': fn foo<T>() -> u8
133..139 'U::foo': {unknown}
145..157 '<[u32]>::foo': {unknown}
133..139 'U::foo': fn foo<U>() -> u8
145..157 '<[u32]>::foo': fn foo<[u32]>() -> u8
"#]],
);
}
@ -1869,6 +1870,7 @@ fn main() {
"#,
);
}
#[test]
fn receiver_adjustment_autoref() {
check(
@ -1879,9 +1881,9 @@ impl Foo {
}
fn test() {
Foo.foo();
//^^^ adjustments: Borrow(Ref('?0, Not))
//^^^ adjustments: Borrow(Ref(Not))
(&Foo).foo();
// ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not))
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not))
}
"#,
);
@ -1895,7 +1897,7 @@ fn receiver_adjustment_unsize_array() {
fn test() {
let a = [1, 2, 3];
a.len();
} //^ adjustments: Borrow(Ref('?0, Not)), Pointer(Unsize)
} //^ adjustments: Borrow(Ref(Not)), Pointer(Unsize)
"#,
);
}
@ -2036,6 +2038,7 @@ fn incoherent_impls() {
check(
r#"
//- minicore: error, send
//- /std.rs crate:std
pub struct Box<T>(T);
use core::error::Error;
@ -2108,7 +2111,7 @@ impl Foo {
}
fn test() {
Box::new(Foo).foo();
//^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref('?0, Not))
//^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref(Not))
}
"#,
);
@ -2126,7 +2129,7 @@ impl Foo {
use core::mem::ManuallyDrop;
fn test() {
ManuallyDrop::new(Foo).foo();
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('?0, Not))
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
}
"#,
);
@ -2176,6 +2179,8 @@ fn receiver_without_deref_impl() {
check(
r#"
//- minicore: receiver
#![feature(arbitrary_self_types)]
use core::ops::Receiver;
struct Foo;

View file

@ -6,7 +6,7 @@ use super::{check, check_infer, check_infer_with_mismatches, check_no_mismatches
fn infer_pattern() {
check_infer(
r#"
//- minicore: iterator
//- minicore: iterator, add, builtin_impls
fn test(x: &i32) {
let y = x;
let &z = x;

View file

@ -292,7 +292,7 @@ fn infer_std_crash_5() {
149..156 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
181..188 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
191..313 'if ICE... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
194..231 'ICE_RE..._VALUE': bool
194..231 'ICE_RE..._VALUE': {unknown}
194..247 'ICE_RE...&name)': bool
241..246 '&name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
242..246 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
@ -629,7 +629,7 @@ fn issue_4053_diesel_where_clauses() {
65..69 'self': Self
267..271 'self': Self
466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
488..522 '{ ... }': <SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> as BoxedDsl<DB>>::Output
488..522 '{ ... }': {unknown}
498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
498..508 'self.order': O
498..515 'self.o...into()': dyn QueryFragment<DB> + 'static
@ -725,7 +725,7 @@ fn issue_4885() {
138..146 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
142..145 'key': &'? K
162..165 'key': &'? K
224..227 '{ }': impl Future<Output = <K as Foo<R>>::Bar>
224..227 '{ }': ()
"#]],
);
}

View file

@ -180,7 +180,7 @@ impl<'a> IntoIterator for &'a Grid {
"#,
expect![[r#"
150..154 'self': &'a Grid
174..181 '{ }': <&'a Grid as IntoIterator>::IntoIter
174..181 '{ }': ()
"#]],
);
}
@ -414,7 +414,7 @@ fn foo() {
244..246 '_x': {unknown}
249..257 'to_bytes': fn to_bytes() -> [u8; _]
249..259 'to_bytes()': [u8; _]
249..268 'to_byt..._vec()': Vec<<[u8; _] as Foo>::Item>
249..268 'to_byt..._vec()': {unknown}
"#]],
);
}

View file

@ -645,10 +645,13 @@ impl E {
fn infer_binary_op() {
check_infer(
r#"
//- minicore: add, builtin_impls
fn f(x: bool) -> i32 {
0i32
}
const CONST_2: isize = 0;
fn test() -> bool {
let x = a && b;
let y = true || false;
@ -658,8 +661,9 @@ fn test() -> bool {
let h = minus_forty <= CONST_2;
let c = f(z || y) + 5;
let d = b;
let g = minus_forty ^= i;
let g = minus_forty += i;
let ten: usize = 10;
let some_num = 0usize;
let ten_is_eleven = ten == some_num;
ten < 3
@ -669,53 +673,56 @@ fn test() -> bool {
5..6 'x': bool
21..33 '{ 0i32 }': i32
27..31 '0i32': i32
53..369 '{ ... < 3 }': bool
63..64 'x': bool
67..68 'a': bool
67..73 'a && b': bool
72..73 'b': bool
83..84 'y': bool
87..91 'true': bool
87..100 'true || false': bool
95..100 'false': bool
110..111 'z': bool
114..115 'x': bool
114..120 'x == y': bool
119..120 'y': bool
130..131 't': bool
134..135 'x': bool
134..140 'x != y': bool
139..140 'y': bool
150..161 'minus_forty': isize
171..179 '-40isize': isize
172..179 '40isize': isize
189..190 'h': bool
193..204 'minus_forty': isize
193..215 'minus_...ONST_2': bool
208..215 'CONST_2': isize
225..226 'c': i32
229..230 'f': fn f(bool) -> i32
229..238 'f(z || y)': i32
229..242 'f(z || y) + 5': i32
231..232 'z': bool
231..237 'z || y': bool
236..237 'y': bool
241..242 '5': i32
252..253 'd': {unknown}
256..257 'b': {unknown}
267..268 'g': ()
271..282 'minus_forty': isize
271..287 'minus_...y ^= i': ()
286..287 'i': isize
297..300 'ten': usize
310..312 '10': usize
322..335 'ten_is_eleven': bool
338..341 'ten': usize
338..353 'ten == some_num': bool
345..353 'some_num': usize
360..363 'ten': usize
360..367 'ten < 3': bool
366..367 '3': usize
58..59 '0': isize
80..423 '{ ... < 3 }': bool
90..91 'x': bool
94..95 'a': bool
94..100 'a && b': bool
99..100 'b': bool
110..111 'y': bool
114..118 'true': bool
114..127 'true || false': bool
122..127 'false': bool
137..138 'z': bool
141..142 'x': bool
141..147 'x == y': bool
146..147 'y': bool
157..158 't': bool
161..162 'x': bool
161..167 'x != y': bool
166..167 'y': bool
177..188 'minus_forty': isize
198..206 '-40isize': isize
199..206 '40isize': isize
216..217 'h': bool
220..231 'minus_forty': isize
220..242 'minus_...ONST_2': bool
235..242 'CONST_2': isize
252..253 'c': i32
256..257 'f': fn f(bool) -> i32
256..265 'f(z || y)': i32
256..269 'f(z || y) + 5': i32
258..259 'z': bool
258..264 'z || y': bool
263..264 'y': bool
268..269 '5': i32
279..280 'd': {unknown}
283..284 'b': {unknown}
294..295 'g': ()
298..309 'minus_forty': isize
298..314 'minus_...y += i': ()
313..314 'i': isize
324..327 'ten': usize
337..339 '10': usize
349..357 'some_num': usize
360..366 '0usize': usize
376..389 'ten_is_eleven': bool
392..395 'ten': usize
392..407 'ten == some_num': bool
399..407 'some_num': usize
414..417 'ten': usize
414..421 'ten < 3': bool
420..421 '3': usize
"#]],
);
}
@ -1071,6 +1078,7 @@ fn infer_inherent_method() {
fn infer_inherent_method_str() {
check_infer(
r#"
//- /core.rs crate:core
#![rustc_coherence_is_core]
#[lang = "str"]
impl str {
@ -2691,6 +2699,7 @@ fn inner_use_enum_rename() {
fn box_into_vec() {
check_infer(
r#"
//- /core.rs crate:core
#[lang = "sized"]
pub trait Sized {}

View file

@ -1,4 +1,3 @@
use cov_mark::check;
use expect_test::expect;
use crate::tests::infer_with_mismatches;
@ -278,11 +277,11 @@ pub mod collections {
fn infer_ops_neg() {
check_types(
r#"
//- /main.rs crate:main deps:std
//- minicore:unary_ops
struct Bar;
struct Foo;
impl std::ops::Neg for Bar {
impl core::ops::Neg for Bar {
type Output = Foo;
}
@ -291,15 +290,6 @@ fn test() {
let b = -a;
b;
} //^ Foo
//- /std.rs crate:std
#[prelude_import] use ops::*;
mod ops {
#[lang = "neg"]
pub trait Neg {
type Output;
}
}
"#,
);
}
@ -308,11 +298,11 @@ mod ops {
fn infer_ops_not() {
check_types(
r#"
//- /main.rs crate:main deps:std
//- minicore:unary_ops
struct Bar;
struct Foo;
impl std::ops::Not for Bar {
impl core::ops::Not for Bar {
type Output = Foo;
}
@ -321,15 +311,6 @@ fn test() {
let b = !a;
b;
} //^ Foo
//- /std.rs crate:std
#[prelude_import] use ops::*;
mod ops {
#[lang = "not"]
pub trait Not {
type Output;
}
}
"#,
);
}
@ -1211,7 +1192,7 @@ fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
expect![[r#"
29..33 'self': &'? Self
54..58 'self': &'? Self
98..100 '{}': impl Trait<u64>
98..100 '{}': ()
110..111 'x': impl Trait<u64>
130..131 'y': &'? impl Trait<u64>
151..268 '{ ...2(); }': ()
@ -2982,13 +2963,13 @@ fn test() {
140..146 'IsCopy': IsCopy
140..153 'IsCopy.test()': bool
159..166 'NotCopy': NotCopy
159..173 'NotCopy.test()': {unknown}
159..173 'NotCopy.test()': bool
179..195 '(IsCop...sCopy)': (IsCopy, IsCopy)
179..202 '(IsCop...test()': bool
180..186 'IsCopy': IsCopy
188..194 'IsCopy': IsCopy
208..225 '(IsCop...tCopy)': (IsCopy, NotCopy)
208..232 '(IsCop...test()': {unknown}
208..232 '(IsCop...test()': bool
209..215 'IsCopy': IsCopy
217..224 'NotCopy': NotCopy
"#]],
@ -3081,7 +3062,7 @@ fn test() {
79..194 '{ ...ized }': ()
85..88 '1u8': u8
85..95 '1u8.test()': bool
101..116 '(*"foo").test()': {unknown}
101..116 '(*"foo").test()': bool
102..108 '*"foo"': str
103..108 '"foo"': &'static str
135..145 '(1u8, 1u8)': (u8, u8)
@ -3089,7 +3070,7 @@ fn test() {
136..139 '1u8': u8
141..144 '1u8': u8
158..171 '(1u8, *"foo")': (u8, str)
158..178 '(1u8, ...test()': {unknown}
158..178 '(1u8, ...test()': bool
159..162 '1u8': u8
164..170 '*"foo"': str
165..170 '"foo"': &'static str
@ -3944,7 +3925,6 @@ fn test() {
#[test]
fn foreign_trait_with_local_trait_impl() {
check!(block_local_impls);
check(
r#"
mod module {
@ -3955,15 +3935,16 @@ mod module {
}
fn f() {
struct Foo;
use module::T;
impl T for usize {
impl T for Foo {
const C: usize = 0;
fn f(&self) {}
}
0usize.f();
//^^^^^^^^^^ type: ()
usize::C;
//^^^^^^^^type: usize
Foo.f();
//^^^^^^^ type: ()
Foo::C;
//^^^^^^ type: usize
}
"#,
);
@ -4023,7 +4004,7 @@ fn f<F: Foo>() {
212..295 '{ ...ZED; }': ()
218..239 'F::Exp..._SIZED': Yes
245..266 'F::Imp..._SIZED': Yes
272..292 'F::Rel..._SIZED': {unknown}
272..292 'F::Rel..._SIZED': Yes
"#]],
);
}
@ -4274,7 +4255,7 @@ fn f<'a>(v: &dyn Trait<Assoc<i32> = &'a i32>) {
127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
164..195 '{ ...f(); }': ()
170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
170..184 'v.get::<i32>()': <dyn Trait<Assoc<i32> = &'a i32> + 'static as Trait>::Assoc<i32>
170..184 'v.get::<i32>()': <{unknown} as Trait>::Assoc<i32>
170..192 'v.get:...eref()': {unknown}
"#]],
);

View file

@ -4,13 +4,18 @@ use core::fmt;
use std::hash::Hash;
use base_db::Crate;
use hir_def::{BlockId, TraitId, lang_item::LangItem};
use hir_def::{
AdtId, AssocItemId, BlockId, HasModule, ImplId, Lookup, TraitId,
lang_item::LangItem,
nameres::DefMap,
signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags},
};
use hir_expand::name::Name;
use intern::sym;
use rustc_next_trait_solver::solve::{HasChanged, SolverDelegateEvalExt};
use rustc_type_ir::{
TypingMode,
inherent::{IntoKind, Span as _},
inherent::{AdtDef, BoundExistentialPredicates, IntoKind, Span as _},
solve::Certainty,
};
use triomphe::Arc;
@ -263,3 +268,147 @@ fn implements_trait_unique_impl<'db>(
let result = crate::traits::next_trait_solve_in_ctxt(&infcx, goal);
matches!(result, Ok((_, Certainty::Yes)))
}
pub fn is_inherent_impl_coherent(db: &dyn HirDatabase, def_map: &DefMap, impl_id: ImplId) -> bool {
let self_ty = db.impl_self_ty(impl_id).instantiate_identity();
let self_ty = self_ty.kind();
let impl_allowed = match self_ty {
TyKind::Tuple(_)
| TyKind::FnDef(_, _)
| TyKind::Array(_, _)
| TyKind::Never
| TyKind::RawPtr(_, _)
| TyKind::Ref(_, _, _)
| TyKind::Slice(_)
| TyKind::Str
| TyKind::Bool
| TyKind::Char
| TyKind::Int(_)
| TyKind::Uint(_)
| TyKind::Float(_) => def_map.is_rustc_coherence_is_core(),
TyKind::Adt(adt_def, _) => adt_def.def_id().0.module(db).krate() == def_map.krate(),
TyKind::Dynamic(it, _) => it
.principal_def_id()
.is_some_and(|trait_id| trait_id.0.module(db).krate() == def_map.krate()),
_ => true,
};
impl_allowed || {
let rustc_has_incoherent_inherent_impls = match self_ty {
TyKind::Tuple(_)
| TyKind::FnDef(_, _)
| TyKind::Array(_, _)
| TyKind::Never
| TyKind::RawPtr(_, _)
| TyKind::Ref(_, _, _)
| TyKind::Slice(_)
| TyKind::Str
| TyKind::Bool
| TyKind::Char
| TyKind::Int(_)
| TyKind::Uint(_)
| TyKind::Float(_) => true,
TyKind::Adt(adt_def, _) => match adt_def.def_id().0 {
hir_def::AdtId::StructId(id) => db
.struct_signature(id)
.flags
.contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
hir_def::AdtId::UnionId(id) => db
.union_signature(id)
.flags
.contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
hir_def::AdtId::EnumId(it) => db
.enum_signature(it)
.flags
.contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
},
TyKind::Dynamic(it, _) => it.principal_def_id().is_some_and(|trait_id| {
db.trait_signature(trait_id.0)
.flags
.contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
}),
_ => false,
};
let items = impl_id.impl_items(db);
rustc_has_incoherent_inherent_impls
&& !items.items.is_empty()
&& items.items.iter().all(|&(_, assoc)| match assoc {
AssocItemId::FunctionId(it) => {
db.function_signature(it).flags.contains(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
}
AssocItemId::ConstId(it) => {
db.const_signature(it).flags.contains(ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
}
AssocItemId::TypeAliasId(it) => db
.type_alias_signature(it)
.flags
.contains(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL),
})
}
}
/// Checks whether the impl satisfies the orphan rules.
///
/// Given `impl<P1..=Pn> Trait<T1..=Tn> for T0`, an `impl`` is valid only if at least one of the following is true:
/// - Trait is a local trait
/// - All of
/// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
/// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
pub fn check_orphan_rules<'db>(db: &'db dyn HirDatabase, impl_: ImplId) -> bool {
let Some(impl_trait) = db.impl_trait(impl_) else {
// not a trait impl
return true;
};
let local_crate = impl_.lookup(db).container.krate();
let is_local = |tgt_crate| tgt_crate == local_crate;
let trait_ref = impl_trait.instantiate_identity();
let trait_id = trait_ref.def_id.0;
if is_local(trait_id.module(db).krate()) {
// trait to be implemented is local
return true;
}
let unwrap_fundamental = |mut ty: Ty<'db>| {
// Unwrap all layers of fundamental types with a loop.
loop {
match ty.kind() {
TyKind::Ref(_, referenced, _) => ty = referenced,
TyKind::Adt(adt_def, subs) => {
let AdtId::StructId(s) = adt_def.def_id().0 else {
break ty;
};
let struct_signature = db.struct_signature(s);
if struct_signature.flags.contains(StructFlags::FUNDAMENTAL) {
let next = subs.types().next();
match next {
Some(it) => ty = it,
None => break ty,
}
} else {
break ty;
}
}
_ => break ty,
}
}
};
// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
// FIXME: param coverage
// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
let is_not_orphan = trait_ref.args.types().any(|ty| match unwrap_fundamental(ty).kind() {
TyKind::Adt(adt_def, _) => is_local(adt_def.def_id().0.module(db).krate()),
TyKind::Error(_) => true,
TyKind::Dynamic(it, _) => {
it.principal_def_id().is_some_and(|trait_id| is_local(trait_id.0.module(db).krate()))
}
_ => false,
});
#[allow(clippy::let_and_return)]
is_not_orphan
}

View file

@ -1,7 +1,5 @@
//! Attributes & documentation for hir types.
use std::ops::ControlFlow;
use hir_def::{
AssocItemId, AttrDefId, ModuleDefId,
attr::AttrsWithOwner,
@ -14,7 +12,13 @@ use hir_expand::{
mod_path::{ModPath, PathKind},
name::Name,
};
use hir_ty::{db::HirDatabase, method_resolution};
use hir_ty::{
db::HirDatabase,
method_resolution::{
self, CandidateId, MethodError, MethodResolutionContext, MethodResolutionUnstableFeatures,
},
next_solver::{DbInterner, TypingMode, infer::DbInternerInferExt},
};
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@ -242,7 +246,7 @@ fn resolve_assoc_item<'db>(
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
ty.iterate_assoc_items(db, ty.krate(db), move |assoc_item| {
ty.iterate_assoc_items(db, move |assoc_item| {
if assoc_item.name(db)? != *name {
return None;
}
@ -257,37 +261,39 @@ fn resolve_impl_trait_item<'db>(
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let canonical = ty.canonical(db);
let krate = ty.krate(db);
let environment = resolver
.generic_def()
.map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
let traits_in_scope = resolver.traits_in_scope(db);
let mut result = None;
// `ty.iterate_path_candidates()` require a scope, which is not available when resolving
// attributes here. Use path resolution directly instead.
//
// FIXME: resolve type aliases (which are not yielded by iterate_path_candidates)
_ = method_resolution::iterate_path_candidates(
&canonical,
db,
environment,
&traits_in_scope,
method_resolution::VisibleFromModule::None,
Some(name),
&mut |_, assoc_item_id: AssocItemId, _| {
// If two traits in scope define the same item, Rustdoc links to no specific trait (for
// instance, given two methods `a`, Rustdoc simply links to `method.a` with no
// disambiguation) so we just pick the first one we find as well.
result = as_module_def_if_namespace_matches(assoc_item_id.into(), ns);
if result.is_some() { ControlFlow::Break(()) } else { ControlFlow::Continue(()) }
},
);
result
let interner = DbInterner::new_with(db, Some(environment.krate), environment.block);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let unstable_features =
MethodResolutionUnstableFeatures::from_def_map(resolver.top_level_def_map());
let ctx = MethodResolutionContext {
infcx: &infcx,
resolver: &resolver,
env: &environment,
traits_in_scope: &traits_in_scope,
edition: krate.edition(db),
unstable_features: &unstable_features,
};
let resolution = ctx.probe_for_name(method_resolution::Mode::Path, name.clone(), ty.ty);
let resolution = match resolution {
Ok(resolution) => resolution.item,
Err(MethodError::PrivateMatch(resolution)) => resolution.item,
_ => return None,
};
let resolution = match resolution {
CandidateId::FunctionId(id) => AssocItem::Function(id.into()),
CandidateId::ConstId(id) => AssocItem::Const(id.into()),
};
as_module_def_if_namespace_matches(resolution, ns)
}
fn resolve_field(

View file

@ -11,6 +11,7 @@ use hir_def::{
type_ref::{TypeBound, TypeRef, TypeRefId},
};
use hir_ty::{
GenericPredicates,
db::HirDatabase,
display::{
HirDisplay, HirDisplayError, HirDisplayWithExpressionStore, HirFormatter, SizedByDefault,
@ -484,11 +485,9 @@ impl<'db> HirDisplay<'db> for TypeParam {
let param_data = &params[self.id.local_id()];
let krate = self.id.parent().krate(f.db).id;
let ty = self.ty(f.db).ty;
let predicates = f.db.generic_predicates(self.id.parent());
let predicates = GenericPredicates::query_all(f.db, self.id.parent());
let predicates = predicates
.instantiate_identity()
.into_iter()
.flatten()
.iter_identity_copied()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == ty,
ClauseKind::Projection(proj) => proj.self_ty() == ty,

View file

@ -72,26 +72,28 @@ use hir_expand::{
proc_macro::ProcMacroKind,
};
use hir_ty::{
TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId, all_super_traits, autoderef,
check_orphan_rules,
GenericPredicates, TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId,
all_super_traits, autoderef, check_orphan_rules,
consteval::try_const_usize,
db::{InternedClosureId, InternedCoroutineId},
diagnostics::BodyValidationDiagnostic,
direct_super_traits, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution,
method_resolution::{
self, InherentImpls, MethodResolutionContext, MethodResolutionUnstableFeatures,
},
mir::{MutBorrowKind, interpret_mir},
next_solver::{
AliasTy, Canonical, ClauseKind, ConstKind, DbInterner, ErrorGuaranteed, GenericArg,
GenericArgs, PolyFnSig, Region, SolverDefId, Ty, TyKind, TypingMode,
AliasTy, ClauseKind, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
PolyFnSig, Region, SolverDefId, Ty, TyKind, TypingMode,
infer::{DbInternerInferExt, InferCtxt},
},
traits::{self, FnTrait, structurally_normalize_ty},
traits::{self, FnTrait, is_inherent_impl_coherent, structurally_normalize_ty},
};
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
AliasTyKind, TypeSuperVisitable, TypeVisitable, TypeVisitor,
AliasTyKind, TypeSuperVisitable, TypeVisitable, TypeVisitor, fast_reject,
inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _},
};
use smallvec::SmallVec;
@ -168,11 +170,12 @@ pub use {
drop::DropGlue,
dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode},
layout::LayoutError,
method_resolution::TyFingerprint,
mir::{MirEvalError, MirLowerError},
next_solver::abi::Safety,
next_solver::clear_tls_solver_cache,
},
// FIXME: These are needed for import assets, properly encapsulate them.
hir_ty::{method_resolution::TraitImpls, next_solver::SimplifiedType},
intern::{Symbol, sym},
};
@ -751,8 +754,6 @@ impl Module {
}
self.legacy_macros(db).into_iter().for_each(|m| emit_macro_def_diagnostics(db, acc, m));
let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
let interner = DbInterner::new_with(db, Some(self.id.krate()), self.id.containing_block());
let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
@ -781,7 +782,9 @@ impl Module {
emit_def_diagnostic(db, acc, diag, edition);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
if impl_signature.target_trait.is_none()
&& !is_inherent_impl_coherent(db, def_map, impl_def.id)
{
acc.push(IncoherentImpl { impl_: ast_id_map.get(loc.id.value), file_id }.into())
}
@ -3347,6 +3350,15 @@ pub enum AssocItem {
TypeAlias(TypeAlias),
}
impl From<method_resolution::CandidateId> for AssocItem {
fn from(value: method_resolution::CandidateId) -> Self {
match value {
method_resolution::CandidateId::FunctionId(id) => AssocItem::Function(Function { id }),
method_resolution::CandidateId::ConstId(id) => AssocItem::Const(Const { id }),
}
}
}
#[derive(Debug, Clone)]
pub enum AssocItemContainer {
Trait(Trait),
@ -3698,7 +3710,7 @@ impl GenericDef {
push_ty_diagnostics(
db,
acc,
db.generic_predicates_without_parent_with_diagnostics(def).1,
GenericPredicates::query_with_diagnostics(db, def).1.clone(),
&source_map,
);
for (param_id, param) in generics.iter_type_or_consts() {
@ -4193,10 +4205,13 @@ impl TypeParam {
/// parameter, not additional bounds that might be added e.g. by a method if
/// the parameter comes from an impl!
pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
db.generic_predicates_for_param(self.id.parent(), self.id.into(), None)
.iter()
let self_ty = self.ty(db).ty;
GenericPredicates::query_explicit(db, self.id.parent())
.iter_identity_copied()
.filter_map(|pred| match &pred.kind().skip_binder() {
ClauseKind::Trait(trait_ref) => Some(Trait::from(trait_ref.def_id().0)),
ClauseKind::Trait(trait_ref) if trait_ref.self_ty() == self_ty => {
Some(Trait::from(trait_ref.def_id().0))
}
_ => None,
})
.collect()
@ -4358,90 +4373,81 @@ pub struct Impl {
impl Impl {
pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<Impl> {
let inherent = db.inherent_impls_in_crate(krate.id);
let trait_ = db.trait_impls_in_crate(krate.id);
let mut result = Vec::new();
extend_with_def_map(db, crate_def_map(db, krate.id), &mut result);
return result;
inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
fn extend_with_def_map(db: &dyn HirDatabase, def_map: &DefMap, result: &mut Vec<Impl>) {
for (_, module) in def_map.modules() {
result.extend(module.scope.impls().map(Impl::from));
for unnamed_const in module.scope.unnamed_consts() {
for (_, block_def_map) in db.body(unnamed_const.into()).blocks(db) {
extend_with_def_map(db, block_def_map, result);
}
}
}
}
}
pub fn all_in_module(db: &dyn HirDatabase, module: Module) -> Vec<Impl> {
module.id.def_map(db)[module.id.local_id].scope.impls().map(Into::into).collect()
}
/// **Note:** This is an **approximation** that strives to give the *human-perceived notion* of an "impl for type",
/// **not** answer the technical question "what are all impls applying to this type". In particular, it excludes
/// blanket impls, and only does a shallow type constructor check. In fact, this should've probably been on `Adt`
/// etc., and not on `Type`. If you would want to create a precise list of all impls applying to a type,
/// you would need to include blanket impls, and try to prove to predicates for each candidate.
pub fn all_for_type<'db>(db: &'db dyn HirDatabase, Type { ty, env }: Type<'db>) -> Vec<Impl> {
let def_crates = match method_resolution::def_crates(db, ty, env.krate) {
Some(def_crates) => def_crates,
None => return Vec::new(),
let mut result = Vec::new();
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let Some(simplified_ty) =
fast_reject::simplify_type(interner, ty, fast_reject::TreatParams::AsRigid)
else {
return Vec::new();
};
let filter = |impl_def: &Impl| {
let self_ty = impl_def.self_ty(db);
let rref = self_ty.remove_ref();
ty.equals_ctor(rref.as_ref().map_or(self_ty.ty, |it| it.ty))
};
let fp = TyFingerprint::for_inherent_impl(ty);
let fp = match fp {
Some(fp) => fp,
None => return Vec::new(),
};
let mut all = Vec::new();
def_crates.iter().for_each(|&id| {
all.extend(
db.inherent_impls_in_crate(id)
.for_self_ty(ty)
.iter()
.cloned()
.map(Self::from)
.filter(filter),
)
});
for id in def_crates
.iter()
.flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
.map(|Crate { id }| id)
{
all.extend(
db.trait_impls_in_crate(id)
.for_self_ty_without_blanket_impls(fp)
.map(Self::from)
.filter(filter),
let mut extend_with_impls =
|impls: &[ImplId]| result.extend(impls.iter().copied().map(Impl::from));
extend_with_impls(method_resolution::incoherent_inherent_impls(db, simplified_ty));
if let Some(module) = method_resolution::simplified_type_module(db, &simplified_ty) {
InherentImpls::for_each_crate_and_block(
db,
module.krate(),
module.containing_block(),
&mut |impls| extend_with_impls(impls.for_self_ty(&simplified_ty)),
);
}
if let Some(block) = ty.as_adt().and_then(|(def, _)| def.module(db).containing_block()) {
if let Some(inherent_impls) = db.inherent_impls_in_block(block) {
all.extend(
inherent_impls.for_self_ty(ty).iter().cloned().map(Self::from).filter(filter),
);
std::iter::successors(module.containing_block(), |block| {
block.loc(db).module.containing_block()
})
.filter_map(|block| TraitImpls::for_block(db, block).as_deref())
.for_each(|impls| impls.for_self_ty(&simplified_ty, &mut extend_with_impls));
for &krate in &**db.all_crates() {
TraitImpls::for_crate(db, krate)
.for_self_ty(&simplified_ty, &mut extend_with_impls);
}
if let Some(trait_impls) = db.trait_impls_in_block(block) {
all.extend(
trait_impls
.for_self_ty_without_blanket_impls(fp)
.map(Self::from)
.filter(filter),
);
} else {
for &krate in &**db.all_crates() {
TraitImpls::for_crate(db, krate)
.for_self_ty(&simplified_ty, &mut extend_with_impls);
}
}
all
result
}
pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
let module = trait_.module(db);
let krate = module.krate();
let module = trait_.module(db).id;
let mut all = Vec::new();
for Crate { id } in krate.transitive_reverse_dependencies(db) {
let impls = db.trait_impls_in_crate(id);
all.extend(impls.for_trait(trait_.id).map(Self::from))
let mut handle_impls = |impls: &TraitImpls| {
impls.for_trait(trait_.id, |impls| all.extend(impls.iter().copied().map(Impl::from)));
};
for krate in db.transitive_rev_deps(module.krate()) {
handle_impls(TraitImpls::for_crate(db, krate));
}
if let Some(block) = module.id.containing_block()
&& let Some(trait_impls) = db.trait_impls_in_block(block)
if let Some(block) = module.containing_block()
&& let Some(impls) = TraitImpls::for_block(db, block)
{
all.extend(trait_impls.for_trait(trait_.id).map(Self::from));
handle_impls(impls);
}
all
}
@ -5262,13 +5268,12 @@ impl<'db> Type<'db> {
}
}
pub fn fingerprint_for_trait_impl(&self) -> Option<TyFingerprint> {
TyFingerprint::for_trait_impl(self.ty)
}
pub(crate) fn canonical(&self, db: &'db dyn HirDatabase) -> Canonical<'db, Ty<'db>> {
let interner = DbInterner::new_with(db, None, None);
hir_ty::replace_errors_with_variables(interner, &self.ty)
pub fn fingerprint_for_trait_impl(&self) -> Option<SimplifiedType> {
fast_reject::simplify_type(
DbInterner::conjure(),
self.ty,
fast_reject::TreatParams::AsRigid,
)
}
/// Returns types that this type dereferences to (including this type itself). The returned
@ -5292,11 +5297,10 @@ impl<'db> Type<'db> {
pub fn iterate_assoc_items<T>(
&self,
db: &'db dyn HirDatabase,
krate: Crate,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
let mut slot = None;
self.iterate_assoc_items_dyn(db, krate, &mut |assoc_item_id| {
self.iterate_assoc_items_dyn(db, &mut |assoc_item_id| {
slot = callback(assoc_item_id.into());
slot.is_some()
});
@ -5306,24 +5310,36 @@ impl<'db> Type<'db> {
fn iterate_assoc_items_dyn(
&self,
db: &'db dyn HirDatabase,
krate: Crate,
callback: &mut dyn FnMut(AssocItemId) -> bool,
) {
let ty_ns = self.ty;
let def_crates = match method_resolution::def_crates(db, ty_ns, krate.id) {
Some(it) => it,
None => return,
};
for krate in def_crates {
let impls = db.inherent_impls_in_crate(krate);
for impl_def in impls.for_self_ty(ty_ns) {
let mut handle_impls = |impls: &[ImplId]| {
for &impl_def in impls {
for &(_, item) in impl_def.impl_items(db).items.iter() {
if callback(item) {
return;
}
}
}
};
let interner = DbInterner::new_with(db, None, None);
let Some(simplified_type) =
fast_reject::simplify_type(interner, self.ty, fast_reject::TreatParams::AsRigid)
else {
return;
};
handle_impls(method_resolution::incoherent_inherent_impls(db, simplified_type));
if let Some(module) = method_resolution::simplified_type_module(db, &simplified_type) {
InherentImpls::for_each_crate_and_block(
db,
module.krate(),
module.containing_block(),
&mut |impls| {
handle_impls(impls.for_self_ty(&simplified_type));
},
);
}
}
@ -5414,26 +5430,20 @@ impl<'db> Type<'db> {
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
name: Option<&Name>,
mut callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> {
let _p = tracing::info_span!("iterate_method_candidates_with_traits").entered();
let mut slot = None;
self.iterate_method_candidates_split_inherent(
db,
scope,
traits_in_scope,
with_local_impls,
name,
|f| match callback(f) {
self.iterate_method_candidates_split_inherent(db, scope, traits_in_scope, name, |f| {
match callback(f) {
it @ Some(_) => {
slot = it;
ControlFlow::Break(())
}
None => ControlFlow::Continue(()),
},
);
}
});
slot
}
@ -5441,7 +5451,6 @@ impl<'db> Type<'db> {
&self,
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
with_local_impls: Option<Module>,
name: Option<&Name>,
callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> {
@ -5449,12 +5458,37 @@ impl<'db> Type<'db> {
db,
scope,
&scope.visible_traits().0,
with_local_impls,
name,
callback,
)
}
fn with_method_resolution<R>(
&self,
db: &'db dyn HirDatabase,
resolver: &Resolver<'db>,
traits_in_scope: &FxHashSet<TraitId>,
f: impl FnOnce(&MethodResolutionContext<'_, 'db>) -> R,
) -> R {
let module = resolver.module();
let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let unstable_features =
MethodResolutionUnstableFeatures::from_def_map(resolver.top_level_def_map());
let environment = resolver
.generic_def()
.map_or_else(|| TraitEnvironment::empty(module.krate()), |d| db.trait_environment(d));
let ctx = MethodResolutionContext {
infcx: &infcx,
resolver,
env: &environment,
traits_in_scope,
edition: resolver.krate().data(db).edition,
unstable_features: &unstable_features,
};
f(&ctx)
}
/// Allows you to treat inherent and non-inherent methods differently.
///
/// Note that inherent methods may actually be trait methods! For example, in `dyn Trait`, the trait's methods
@ -5464,67 +5498,77 @@ impl<'db> Type<'db> {
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
name: Option<&Name>,
callback: impl MethodCandidateCallback,
mut callback: impl MethodCandidateCallback,
) {
struct Callback<T>(T);
impl<T: MethodCandidateCallback> method_resolution::MethodCandidateCallback for Callback<T> {
fn on_inherent_method(
&mut self,
_adjustments: method_resolution::ReceiverAdjustments,
item: AssocItemId,
_is_visible: bool,
) -> ControlFlow<()> {
if let AssocItemId::FunctionId(func) = item {
self.0.on_inherent_method(func.into())
} else {
ControlFlow::Continue(())
}
}
fn on_trait_method(
&mut self,
_adjustments: method_resolution::ReceiverAdjustments,
item: AssocItemId,
_is_visible: bool,
) -> ControlFlow<()> {
if let AssocItemId::FunctionId(func) = item {
self.0.on_trait_method(func.into())
} else {
ControlFlow::Continue(())
}
}
}
let _p = tracing::info_span!(
"iterate_method_candidates_dyn",
with_local_impls = traits_in_scope.len(),
"iterate_method_candidates_split_inherent",
traits_in_scope = traits_in_scope.len(),
?name,
)
.entered();
let interner = DbInterner::new_with(db, None, None);
// There should be no inference vars in types passed here
let canonical = hir_ty::replace_errors_with_variables(interner, &self.ty);
let krate = scope.krate();
let environment = scope
.resolver()
.generic_def()
.map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
self.with_method_resolution(db, scope.resolver(), traits_in_scope, |ctx| {
// There should be no inference vars in types passed here
let canonical = hir_ty::replace_errors_with_variables(ctx.infcx.interner, &self.ty);
let (self_ty, _) = ctx.infcx.instantiate_canonical(&canonical);
_ = method_resolution::iterate_method_candidates_dyn(
&canonical,
db,
environment,
traits_in_scope,
with_local_impls.and_then(|b| b.id.containing_block()).into(),
name,
method_resolution::LookupMode::MethodCall,
&mut Callback(callback),
);
match name {
Some(name) => {
match ctx.probe_for_name(
method_resolution::Mode::MethodCall,
name.clone(),
self_ty,
) {
Ok(candidate)
| Err(method_resolution::MethodError::PrivateMatch(candidate)) => {
let method_resolution::CandidateId::FunctionId(id) = candidate.item
else {
unreachable!("`Mode::MethodCall` can only return functions");
};
let id = Function { id };
match candidate.kind {
method_resolution::PickKind::InherentImplPick(_)
| method_resolution::PickKind::ObjectPick(..)
| method_resolution::PickKind::WhereClausePick(..) => {
// Candidates from where clauses and trait objects are considered inherent.
_ = callback.on_inherent_method(id);
}
method_resolution::PickKind::TraitPick(..) => {
_ = callback.on_trait_method(id);
}
}
}
Err(_) => {}
};
}
None => {
_ = ctx.probe_all(method_resolution::Mode::MethodCall, self_ty).try_for_each(
|candidate| {
let method_resolution::CandidateId::FunctionId(id) =
candidate.candidate.item
else {
unreachable!("`Mode::MethodCall` can only return functions");
};
let id = Function { id };
match candidate.candidate.kind {
method_resolution::CandidateKind::InherentImplCandidate {
..
}
| method_resolution::CandidateKind::ObjectCandidate(..)
| method_resolution::CandidateKind::WhereClauseCandidate(..) => {
// Candidates from where clauses and trait objects are considered inherent.
callback.on_inherent_method(id)
}
method_resolution::CandidateKind::TraitCandidate(..) => {
callback.on_trait_method(id)
}
}
},
);
}
}
})
}
#[tracing::instrument(skip_all, fields(name = ?name))]
@ -5533,27 +5577,21 @@ impl<'db> Type<'db> {
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
name: Option<&Name>,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
let _p = tracing::info_span!("iterate_path_candidates").entered();
let mut slot = None;
self.iterate_path_candidates_split_inherent(
db,
scope,
traits_in_scope,
with_local_impls,
name,
|item| match callback(item) {
self.iterate_path_candidates_split_inherent(db, scope, traits_in_scope, name, |item| {
match callback(item) {
it @ Some(_) => {
slot = it;
ControlFlow::Break(())
}
None => ControlFlow::Continue(()),
},
);
}
});
slot
}
@ -5568,50 +5606,68 @@ impl<'db> Type<'db> {
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
with_local_impls: Option<Module>,
name: Option<&Name>,
callback: impl PathCandidateCallback,
mut callback: impl PathCandidateCallback,
) {
struct Callback<T>(T);
let _p = tracing::info_span!(
"iterate_path_candidates_split_inherent",
traits_in_scope = traits_in_scope.len(),
?name,
)
.entered();
impl<T: PathCandidateCallback> method_resolution::MethodCandidateCallback for Callback<T> {
fn on_inherent_method(
&mut self,
_adjustments: method_resolution::ReceiverAdjustments,
item: AssocItemId,
_is_visible: bool,
) -> ControlFlow<()> {
self.0.on_inherent_item(item.into())
self.with_method_resolution(db, scope.resolver(), traits_in_scope, |ctx| {
// There should be no inference vars in types passed here
let canonical = hir_ty::replace_errors_with_variables(ctx.infcx.interner, &self.ty);
let (self_ty, _) = ctx.infcx.instantiate_canonical(&canonical);
match name {
Some(name) => {
match ctx.probe_for_name(
method_resolution::Mode::MethodCall,
name.clone(),
self_ty,
) {
Ok(candidate)
| Err(method_resolution::MethodError::PrivateMatch(candidate)) => {
let id = candidate.item.into();
match candidate.kind {
method_resolution::PickKind::InherentImplPick(_)
| method_resolution::PickKind::ObjectPick(..)
| method_resolution::PickKind::WhereClausePick(..) => {
// Candidates from where clauses and trait objects are considered inherent.
_ = callback.on_inherent_item(id);
}
method_resolution::PickKind::TraitPick(..) => {
_ = callback.on_trait_item(id);
}
}
}
Err(_) => {}
};
}
None => {
_ = ctx.probe_all(method_resolution::Mode::Path, self_ty).try_for_each(
|candidate| {
let id = candidate.candidate.item.into();
match candidate.candidate.kind {
method_resolution::CandidateKind::InherentImplCandidate {
..
}
| method_resolution::CandidateKind::ObjectCandidate(..)
| method_resolution::CandidateKind::WhereClauseCandidate(..) => {
// Candidates from where clauses and trait objects are considered inherent.
callback.on_inherent_item(id)
}
method_resolution::CandidateKind::TraitCandidate(..) => {
callback.on_trait_item(id)
}
}
},
);
}
}
fn on_trait_method(
&mut self,
_adjustments: method_resolution::ReceiverAdjustments,
item: AssocItemId,
_is_visible: bool,
) -> ControlFlow<()> {
self.0.on_trait_item(item.into())
}
}
let interner = DbInterner::new_with(db, None, None);
let canonical = hir_ty::replace_errors_with_variables(interner, &self.ty);
let krate = scope.krate();
let environment = scope
.resolver()
.generic_def()
.map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
_ = method_resolution::iterate_path_candidates(
&canonical,
db,
environment,
traits_in_scope,
with_local_impls.and_then(|b| b.id.containing_block()).into(),
name,
&mut Callback(callback),
);
})
}
pub fn as_adt(&self) -> Option<Adt> {

View file

@ -1581,9 +1581,9 @@ impl<'db> SemanticsImpl<'db> {
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
}
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(_, m)) => {
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
// FIXME: Handle lifetimes here
Adjust::Borrow(AutoBorrow::Ref(mutability(m)))
Adjust::Borrow(AutoBorrow::Ref(mutability(m.into())))
}
hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
};

View file

@ -35,7 +35,7 @@ use hir_ty::{
unsafe_operations,
},
lang_items::lang_items_for_bin_op,
method_resolution,
method_resolution::{self, CandidateId},
next_solver::{
DbInterner, ErrorGuaranteed, GenericArgs, Ty, TyKind, TypingMode, infer::DbInternerInferExt,
},
@ -651,8 +651,9 @@ impl<'db> SourceAnalyzer<'db> {
let lhs = self.ty_of_expr(binop_expr.lhs()?)?;
let rhs = self.ty_of_expr(binop_expr.rhs()?)?;
let (_op_trait, op_fn) = lang_items_for_bin_op(op)
.and_then(|(name, lang_item)| self.lang_trait_fn(db, lang_item, &name))?;
let (_op_trait, op_fn) = lang_items_for_bin_op(op).and_then(|(name, lang_item)| {
self.lang_trait_fn(db, lang_item, &Name::new_symbol_root(name))
})?;
// HACK: subst for `index()` coincides with that for `Index` because `index()` itself
// doesn't have any generic parameters, so we skip building another subst for `index()`.
let interner = DbInterner::new_with(db, None, None);
@ -861,7 +862,7 @@ impl<'db> SourceAnalyzer<'db> {
let expr_id = self.expr_id(path_expr.into())?;
if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_id) {
let (assoc, subst) = match assoc {
AssocItemId::FunctionId(f_in_trait) => {
CandidateId::FunctionId(f_in_trait) => {
match infer.type_of_expr_or_pat(expr_id) {
None => {
let subst = GenericSubstitution::new(
@ -869,7 +870,7 @@ impl<'db> SourceAnalyzer<'db> {
subs,
self.trait_environment(db),
);
(assoc, subst)
(AssocItemId::from(f_in_trait), subst)
}
Some(func_ty) => {
if let TyKind::FnDef(_fn_def, subs) = func_ty.kind() {
@ -889,12 +890,12 @@ impl<'db> SourceAnalyzer<'db> {
subs,
self.trait_environment(db),
);
(assoc, subst)
(f_in_trait.into(), subst)
}
}
}
}
AssocItemId::ConstId(const_id) => {
CandidateId::ConstId(const_id) => {
let (konst, subst) =
self.resolve_impl_const_or_trait_def_with_subst(db, const_id, subs);
let subst = GenericSubstitution::new(
@ -904,14 +905,6 @@ impl<'db> SourceAnalyzer<'db> {
);
(konst.into(), subst)
}
AssocItemId::TypeAliasId(type_alias) => (
assoc,
GenericSubstitution::new(
type_alias.into(),
subs,
self.trait_environment(db),
),
),
};
return Some((PathResolution::Def(AssocItem::from(assoc).into()), Some(subst)));
@ -927,7 +920,7 @@ impl<'db> SourceAnalyzer<'db> {
if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_or_pat_id)
{
let (assoc, subst) = match assoc {
AssocItemId::ConstId(const_id) => {
CandidateId::ConstId(const_id) => {
let (konst, subst) =
self.resolve_impl_const_or_trait_def_with_subst(db, const_id, subs);
let subst = GenericSubstitution::new(
@ -935,12 +928,12 @@ impl<'db> SourceAnalyzer<'db> {
subst,
self.trait_environment(db),
);
(konst.into(), subst)
(AssocItemId::from(konst), subst)
}
assoc => (
assoc,
CandidateId::FunctionId(function_id) => (
function_id.into(),
GenericSubstitution::new(
assoc.into(),
function_id.into(),
subs,
self.trait_environment(db),
),

View file

@ -129,7 +129,7 @@ fn is_ref_and_impls_iter_method(
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
let has_wanted_method = ty
.iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| {
.iterate_method_candidates(sema.db, &scope, Some(&wanted_method), |func| {
if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
return Some(());
}

View file

@ -165,7 +165,7 @@ fn is_ref_and_impls_iter_method(
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
let has_wanted_method = ty
.iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| {
.iterate_method_candidates(sema.db, &scope, Some(&wanted_method), |func| {
if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
return Some(());
}

View file

@ -4250,7 +4250,7 @@ fn $0fun_name() -> Result<i32, i64> {
check_assist(
extract_function,
r#"
//- minicore: option
//- minicore: option, add, builtin_impls
fn bar() -> Option<i32> { None }
fn foo() -> Option<()> {
let n = bar()?;
@ -4314,7 +4314,7 @@ fn $0fun_name() -> Option<()> {
check_assist(
extract_function,
r#"
//- minicore: result
//- minicore: result, add, builtin_impls
fn foo() -> Result<(), i64> {
let n = 1;
$0let k = foo()?;
@ -4345,7 +4345,7 @@ fn $0fun_name() -> Result<i32, i64> {
check_assist(
extract_function,
r#"
//- minicore: option
//- minicore: option, add, builtin_impls
fn foo() -> Option<()> {
let n = 1;
$0let k = foo()?;
@ -4382,7 +4382,7 @@ fn $0fun_name() -> Option<i32> {
check_assist(
extract_function,
r#"
//- minicore: result
//- minicore: result, add, builtin_impls
fn foo() -> Result<(), i64> {
let n = 1;
$0let k = foo()?;
@ -4441,7 +4441,7 @@ fn foo() -> Option<()> {
check_assist(
extract_function,
r#"
//- minicore: result
//- minicore: result, add, builtin_impls
fn foo() -> Result<(), i64> {
let n = 1;
$0let k = foo()?;

View file

@ -2189,7 +2189,7 @@ fn foo(s: &S) {
//- minicore: index
struct X;
impl std::ops::Index<usize> for X {
impl core::ops::Index<usize> for X {
type Output = i32;
fn index(&self) -> &Self::Output { 0 }
}
@ -2204,7 +2204,7 @@ fn foo(s: &S) {
r#"
struct X;
impl std::ops::Index<usize> for X {
impl core::ops::Index<usize> for X {
type Output = i32;
fn index(&self) -> &Self::Output { 0 }
}
@ -2214,8 +2214,8 @@ struct S {
}
fn foo(s: &S) {
let $0sub = &s.sub;
sub[0];
let $0x = &s.sub;
x[0];
}"#,
"Extract into variable",
);

View file

@ -1,4 +1,4 @@
use hir::{HasCrate, HasVisibility};
use hir::HasVisibility;
use ide_db::{FxHashSet, path_transform::PathTransform};
use syntax::{
ast::{
@ -79,8 +79,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let mut seen_names = FxHashSet::default();
for ty in sema_field_ty.autoderef(ctx.db()) {
let krate = ty.krate(ctx.db());
ty.iterate_assoc_items(ctx.db(), krate, |item| {
ty.iterate_assoc_items(ctx.db(), |item| {
if let hir::AssocItem::Function(f) = item {
let name = f.name(ctx.db());
if f.self_param(ctx.db()).is_some()

View file

@ -95,7 +95,7 @@ fn get_impl_method(
let scope = ctx.sema.scope(impl_.syntax())?;
let ty = impl_def.self_ty(db);
ty.iterate_method_candidates(db, &scope, None, Some(fn_name), Some)
ty.iterate_method_candidates(db, &scope, Some(fn_name), Some)
}
#[cfg(test)]

View file

@ -1,4 +1,4 @@
use hir::{AsAssocItem, AssocItemContainer, FileRange, HasCrate, HasSource};
use hir::{AsAssocItem, AssocItemContainer, FileRange, HasSource};
use ide_db::{assists::AssistId, defs::Definition, search::SearchScope};
use syntax::{
SyntaxKind,
@ -70,7 +70,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let ty = impl_.self_ty(db);
// If there exists another associated item with the same name, skip the assist.
if ty
.iterate_assoc_items(db, ty.krate(db), |assoc| {
.iterate_assoc_items(db, |assoc| {
// Type aliases wouldn't conflict due to different namespaces, but we're only checking
// the items in inherent impls, so we assume `assoc` is never type alias for the sake
// of brevity (inherent associated types exist in nightly Rust, but it's *very*

View file

@ -240,12 +240,12 @@ fn main() {
replace_arith_with_wrapping,
r#"
fn main() {
let x = 1*x $0+ 2;
let x = 1*3 $0+ 2;
}
"#,
r#"
fn main() {
let x = (1*x).wrapping_add(2);
let x = (1*3).wrapping_add(2);
}
"#,
)

View file

@ -45,7 +45,6 @@ pub(crate) fn replace_with_lazy_method(acc: &mut Assists, ctx: &AssistContext<'_
&scope,
&scope.visible_traits().0,
None,
None,
|func| {
let valid = func.name(ctx.sema.db).as_str() == &*method_name_lazy
&& func.num_params(ctx.sema.db) == n_params
@ -127,7 +126,6 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<'
&scope,
&scope.visible_traits().0,
None,
None,
|func| {
let valid = func.name(ctx.sema.db).as_str() == method_name_eager
&& func.num_params(ctx.sema.db) == n_params;

View file

@ -270,7 +270,6 @@ fn complete_methods(
ctx.db,
&ctx.scope,
traits_in_scope,
Some(ctx.module),
None,
Callback { ctx, f, seen_methods: FxHashSet::default() },
);
@ -597,7 +596,6 @@ fn foo(a: A) {
}
"#,
expect![[r#"
me local_method() fn(&self)
me pub_module_method() fn(&self)
"#]],
);
@ -1526,6 +1524,8 @@ async fn bar() {
check_no_kw(
r#"
//- minicore: receiver
#![feature(arbitrary_self_types)]
use core::ops::Receiver;
struct Foo;

View file

@ -126,13 +126,12 @@ pub(crate) fn complete_expr_path(
ctx.db,
&ctx.scope,
&ctx.traits_in_scope(),
Some(ctx.module),
None,
PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() },
);
// Iterate assoc types separately
ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
ty.iterate_assoc_items(ctx.db, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
@ -196,13 +195,12 @@ pub(crate) fn complete_expr_path(
ctx.db,
&ctx.scope,
&ctx.traits_in_scope(),
Some(ctx.module),
None,
PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() },
);
// Iterate assoc types separately
ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
ty.iterate_assoc_items(ctx.db, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
@ -232,7 +230,6 @@ pub(crate) fn complete_expr_path(
ctx.db,
&ctx.scope,
&ctx.traits_in_scope(),
Some(ctx.module),
None,
PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() },
);

View file

@ -67,7 +67,7 @@ pub(crate) fn complete_type_path(
});
// Iterate assoc types separately
ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
ty.iterate_assoc_items(ctx.db, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
@ -110,7 +110,7 @@ pub(crate) fn complete_type_path(
});
// Iterate assoc types separately
ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
ty.iterate_assoc_items(ctx.db, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}

View file

@ -615,21 +615,14 @@ impl CompletionContext<'_> {
mut cb: impl FnMut(hir::AssocItem),
) {
let mut seen = FxHashSet::default();
ty.iterate_path_candidates(
self.db,
&self.scope,
&self.traits_in_scope(),
Some(self.module),
None,
|item| {
// We might iterate candidates of a trait multiple times here, so deduplicate
// them.
if seen.insert(item) {
cb(item)
}
None::<()>
},
);
ty.iterate_path_candidates(self.db, &self.scope, &self.traits_in_scope(), None, |item| {
// We might iterate candidates of a trait multiple times here, so deduplicate
// them.
if seen.insert(item) {
cb(item)
}
None::<()>
});
}
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and

View file

@ -3198,6 +3198,7 @@ fn main() {
fn ambiguous_float_literal() {
check(
r#"
//- /core.rs crate:core
#![rustc_coherence_is_core]
impl i32 {
@ -3232,6 +3233,7 @@ fn foo() {
fn ambiguous_float_literal_in_ambiguous_method_call() {
check(
r#"
//- /core.rs crate:core
#![rustc_coherence_is_core]
impl i32 {

View file

@ -780,9 +780,9 @@ fn main() {
}
"#,
expect![[r#"
me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
"#]],
);
}

View file

@ -653,6 +653,7 @@ fn f(u: U) {
check(
r#"
//- /core.rs crate:core
#![rustc_coherence_is_core]
#[lang = "u32"]
impl u32 {

View file

@ -5,7 +5,7 @@ use std::{convert::Infallible, ops::ControlFlow};
use hir::{
AsAssocItem, AssocItem, AssocItemContainer, Complete, Crate, FindPathConfig, HasCrate,
ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
SemanticsScope, Trait, TyFingerprint, Type, db::HirDatabase,
SemanticsScope, Trait, Type,
};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
@ -500,44 +500,37 @@ fn validate_resolvable(
ModuleDef::Adt(adt) => adt.ty(db),
_ => return SmallVec::new(),
};
ty.iterate_path_candidates::<Infallible>(
db,
scope,
&FxHashSet::default(),
None,
None,
|assoc| {
// FIXME: Support extra trait imports
if assoc.container_or_implemented_trait(db).is_some() {
return None;
ty.iterate_path_candidates::<Infallible>(db, scope, &FxHashSet::default(), None, |assoc| {
// FIXME: Support extra trait imports
if assoc.container_or_implemented_trait(db).is_some() {
return None;
}
let name = assoc.name(db)?;
let is_match = match candidate {
NameToImport::Prefix(text, true) => name.as_str().starts_with(text),
NameToImport::Prefix(text, false) => {
name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| {
name_char.eq_ignore_ascii_case(&candidate_char)
})
}
let name = assoc.name(db)?;
let is_match = match candidate {
NameToImport::Prefix(text, true) => name.as_str().starts_with(text),
NameToImport::Prefix(text, false) => {
name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| {
name_char.eq_ignore_ascii_case(&candidate_char)
})
}
NameToImport::Exact(text, true) => name.as_str() == text,
NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text),
NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)),
NameToImport::Fuzzy(text, false) => text.chars().all(|c| {
name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c))
}),
};
if !is_match {
return None;
}
result.push(LocatedImport::new(
import_path_candidate.clone(),
resolved_qualifier,
assoc_to_item(assoc),
complete_in_flyimport,
));
None
},
);
NameToImport::Exact(text, true) => name.as_str() == text,
NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text),
NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)),
NameToImport::Fuzzy(text, false) => text
.chars()
.all(|c| name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c))),
};
if !is_match {
return None;
}
result.push(LocatedImport::new(
import_path_candidate.clone(),
resolved_qualifier,
assoc_to_item(assoc),
complete_in_flyimport,
));
None
});
result
}
@ -608,7 +601,6 @@ fn trait_applicable_items<'db>(
deref_chain
.into_iter()
.filter_map(|ty| Some((ty.krate(db).into(), ty.fingerprint_for_trait_impl()?)))
.sorted()
.unique()
.collect::<Vec<_>>()
};
@ -619,11 +611,11 @@ fn trait_applicable_items<'db>(
}
// in order to handle implied bounds through an associated type, keep all traits if any
// type in the deref chain matches `TyFingerprint::Unnameable`. This fingerprint
// type in the deref chain matches `SimplifiedType::Placeholder`. This fingerprint
// won't be in `TraitImpls` anyways, as `TraitImpls` only contains actual implementations.
if !autoderef_method_receiver
.iter()
.any(|(_, fingerprint)| matches!(fingerprint, TyFingerprint::Unnameable))
.any(|(_, fingerprint)| matches!(fingerprint, hir::SimplifiedType::Placeholder))
{
trait_candidates.retain(|&candidate_trait_id| {
// we care about the following cases:
@ -635,17 +627,18 @@ fn trait_applicable_items<'db>(
// a. This is recursive for fundamental types
let defining_crate_for_trait = Trait::from(candidate_trait_id).krate(db);
let trait_impls_in_crate = db.trait_impls_in_crate(defining_crate_for_trait.into());
let trait_impls_in_crate =
hir::TraitImpls::for_crate(db, defining_crate_for_trait.into());
let definitions_exist_in_trait_crate =
autoderef_method_receiver.iter().any(|&(_, fingerprint)| {
autoderef_method_receiver.iter().any(|(_, fingerprint)| {
trait_impls_in_crate
.has_impls_for_trait_and_self_ty(candidate_trait_id, fingerprint)
});
// this is a closure for laziness: if `definitions_exist_in_trait_crate` is true,
// we can avoid a second db lookup.
let definitions_exist_in_receiver_crate = || {
autoderef_method_receiver.iter().any(|&(krate, fingerprint)| {
db.trait_impls_in_crate(krate)
autoderef_method_receiver.iter().any(|(krate, fingerprint)| {
hir::TraitImpls::for_crate(db, *krate)
.has_impls_for_trait_and_self_ty(candidate_trait_id, fingerprint)
})
};
@ -663,7 +656,6 @@ fn trait_applicable_items<'db>(
scope,
&trait_candidates,
None,
None,
|assoc| {
if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {
let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?;
@ -688,7 +680,6 @@ fn trait_applicable_items<'db>(
scope,
&trait_candidates,
None,
None,
|function| {
let assoc = function.as_assoc_item(db)?;
if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {

View file

@ -226,7 +226,7 @@ fn get_default_constructor(
// Look for a ::new() associated function
let has_new_func = ty
.iterate_assoc_items(ctx.sema.db, krate, |assoc_item| {
.iterate_assoc_items(ctx.sema.db, |assoc_item| {
if let AssocItem::Function(func) = assoc_item
&& func.name(ctx.sema.db) == sym::new
&& func.assoc_fn_params(ctx.sema.db).is_empty()

View file

@ -390,7 +390,6 @@ fn main() {
#[test]
fn expr_diverges() {
cov_mark::check_count!(validate_match_bailed_out, 2);
check_diagnostics(
r#"
enum Either { A, B }
@ -401,6 +400,7 @@ fn main() {
Either::B => (),
}
match loop {} {
// ^^^^^^^ error: missing match arm: `B` not covered
Either::A => (),
}
match loop { break Either::A } {

View file

@ -806,7 +806,7 @@ fn f() {
_ = (x, y);
let x = Foo;
let y = &mut *x;
//^^ 💡 error: cannot mutate immutable variable `x`
// ^ 💡 error: cannot mutate immutable variable `x`
_ = (x, y);
let x = Foo;
//^ 💡 warn: unused variable
@ -815,13 +815,13 @@ fn f() {
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
_ = (x, y);
let ref mut y = *x;
//^^ 💡 error: cannot mutate immutable variable `x`
// ^ 💡 error: cannot mutate immutable variable `x`
_ = y;
let (ref mut y, _) = *x;
//^^ 💡 error: cannot mutate immutable variable `x`
// ^ 💡 error: cannot mutate immutable variable `x`
_ = y;
match *x {
//^^ 💡 error: cannot mutate immutable variable `x`
// ^ 💡 error: cannot mutate immutable variable `x`
(ref y, 5) => _ = y,
(_, ref mut y) => _ = y,
}
@ -1130,7 +1130,7 @@ fn f() {
//^^^^^^^ 💡 error: cannot mutate immutable variable `x`
let x = Box::new(5);
let closure = || *x = 2;
//^ 💡 error: cannot mutate immutable variable `x`
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
_ = closure;
}
"#,

View file

@ -228,12 +228,10 @@ impl<'db> ResolutionScope<'db> {
let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?;
if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
let name = path.segment()?.name_ref()?;
let module = self.scope.module();
adt.ty(self.scope.db).iterate_path_candidates(
self.scope.db,
&self.scope,
&self.scope.visible_traits().0,
Some(module),
None,
|assoc_item| {
let item_name = assoc_item.name(self.scope.db)?;

View file

@ -350,7 +350,7 @@ fn main() {
fn hover_closure() {
check(
r#"
//- minicore: copy
//- minicore: copy, add, builtin_impls
fn main() {
let x = 2;
let y = $0|z| x + z;
@ -3280,7 +3280,7 @@ fn test_hover_no_memory_layout() {
check_hover_no_memory_layout(
r#"
//- minicore: copy
//- minicore: copy, add, builtin_impls
fn main() {
let x = 2;
let y = $0|z| x + z;

View file

@ -352,7 +352,7 @@ mod tests {
check_with_config(
InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
r#"
//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn
//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn, builtin_impls
fn main() {
let _: u32 = loop {};
//^^^^^^^<never-to-any>
@ -466,9 +466,8 @@ impl core::ops::IndexMut for Struct {}
..DISABLED_CONFIG
},
r#"
//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn
//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn, builtin_impls
fn main() {
Struct.consume();
Struct.by_ref();
//^^^^^^.&

View file

@ -519,4 +519,7 @@ define_symbols! {
never_type_fallback,
specialization,
min_specialization,
arbitrary_self_types,
arbitrary_self_types_pointers,
supertrait_item_shadowing,
}

View file

@ -43,7 +43,7 @@ pub const ESCAPED_CURSOR_MARKER: &str = "\\$0";
#[macro_export]
macro_rules! assert_eq_text {
($left:expr, $right:expr) => {
assert_eq_text!($left, $right,)
$crate::assert_eq_text!($left, $right,)
};
($left:expr, $right:expr, $($tt:tt)*) => {{
let left = $left;

View file

@ -68,6 +68,7 @@
//! transmute:
//! try: infallible
//! tuple:
//! unary_ops:
//! unpin: sized
//! unsize: sized
//! write: fmt
@ -591,13 +592,13 @@ pub mod ops {
impl<T: PointeeSized> Deref for &T {
type Target = T;
fn deref(&self) -> &T {
loop {}
*self
}
}
impl<T: PointeeSized> Deref for &mut T {
type Target = T;
fn deref(&self) -> &T {
loop {}
*self
}
}
// region:deref_mut
@ -1056,6 +1057,9 @@ pub mod ops {
type Output = $t;
fn add(self, other: $t) -> $t { self + other }
}
impl AddAssign for $t {
fn add_assign(&mut self, other: $t) { *self += other; }
}
)*)
}
@ -1063,6 +1067,24 @@ pub mod ops {
// endregion:builtin_impls
// endregion:add
// region:unary_ops
#[lang = "not"]
pub const trait Not {
type Output;
#[must_use]
fn not(self) -> Self::Output;
}
#[lang = "neg"]
pub const trait Neg {
type Output;
#[must_use = "this returns the result of the operation, without modifying the original"]
fn neg(self) -> Self::Output;
}
// endregion:unary_ops
// region:coroutine
mod coroutine {
use crate::pin::Pin;
@ -1118,6 +1140,12 @@ pub mod cmp {
pub trait Eq: PartialEq<Self> + PointeeSized {}
// region:builtin_impls
impl PartialEq for () {
fn eq(&self, other: &()) -> bool { true }
}
// endregion:builtin_impls
// region:derive
#[rustc_builtin_macro]
pub macro PartialEq($item:item) {}