Merge pull request #21295 from ChayimFriedman2/non-salsa-interneds-v3

perf: Non-Salsa-interned solver types - with GC for them
This commit is contained in:
Chayim Refael Friedman 2025-12-20 15:56:46 +00:00 committed by GitHub
commit fa4ea90fb2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
90 changed files with 4372 additions and 2561 deletions

2
Cargo.lock generated
View file

@ -1214,7 +1214,9 @@ version = "0.0.0"
dependencies = [
"dashmap",
"hashbrown 0.14.5",
"rayon",
"rustc-hash 2.1.1",
"smallvec",
"triomphe",
]

View file

@ -63,7 +63,7 @@ impl DefMap {
return Ok(ResolvedAttr::Other);
}
}
None => return Err(UnresolvedMacro { path: ast_id.path.as_ref().clone() }),
None => return Err(UnresolvedMacro { path: (*ast_id.path).clone() }),
};
Ok(ResolvedAttr::Macro(attr_macro_as_call_id(
@ -145,7 +145,7 @@ pub(super) fn derive_macro_as_call_id(
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
let (macro_id, def_id) = resolver(&item_attr.path)
.filter(|(_, def_id)| def_id.is_derive())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?;
.ok_or_else(|| UnresolvedMacro { path: (*item_attr.path).clone() })?;
let call_id = def_id.make_call(
db,
krate,

View file

@ -1675,7 +1675,7 @@ impl<'db> DefCollector<'db> {
derive_index: *derive_pos as u32,
derive_macro_id: *derive_macro_id,
},
ast_id.path.as_ref().clone(),
(*ast_id.path).clone(),
));
}
// These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them

View file

@ -23,8 +23,9 @@ use crate::{
mir::{MirEvalError, MirLowerError},
next_solver::{
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
ParamEnv, Ty, ValueConst,
ParamEnv, StoredConst, StoredGenericArgs, Ty, ValueConst,
},
traits::StoredParamEnvAndCrate,
};
use super::mir::{interpret_mir, lower_to_mir, pad16};
@ -38,12 +39,12 @@ pub fn unknown_const_as_generic<'db>(ty: Ty<'db>) -> GenericArg<'db> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstEvalError<'db> {
MirLowerError(MirLowerError<'db>),
MirEvalError(MirEvalError<'db>),
pub enum ConstEvalError {
MirLowerError(MirLowerError),
MirEvalError(MirEvalError),
}
impl ConstEvalError<'_> {
impl ConstEvalError {
pub fn pretty_print(
&self,
f: &mut String,
@ -62,8 +63,8 @@ impl ConstEvalError<'_> {
}
}
impl<'db> From<MirLowerError<'db>> for ConstEvalError<'db> {
fn from(value: MirLowerError<'db>) -> Self {
impl From<MirLowerError> for ConstEvalError {
fn from(value: MirLowerError) -> Self {
match value {
MirLowerError::ConstEvalError(_, e) => *e,
_ => ConstEvalError::MirLowerError(value),
@ -71,8 +72,8 @@ impl<'db> From<MirLowerError<'db>> for ConstEvalError<'db> {
}
}
impl<'db> From<MirEvalError<'db>> for ConstEvalError<'db> {
fn from(value: MirEvalError<'db>) -> Self {
impl From<MirEvalError> for ConstEvalError {
fn from(value: MirEvalError) -> Self {
ConstEvalError::MirEvalError(value)
}
}
@ -85,7 +86,8 @@ pub fn intern_const_ref<'a>(
krate: Crate,
) -> Const<'a> {
let interner = DbInterner::new_no_crate(db);
let layout = db.layout_of_ty(ty, ParamEnvAndCrate { param_env: ParamEnv::empty(), krate });
let layout = db
.layout_of_ty(ty.store(), ParamEnvAndCrate { param_env: ParamEnv::empty(), krate }.store());
let kind = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
@ -180,10 +182,10 @@ pub fn try_const_isize<'db>(db: &'db dyn HirDatabase, c: &Const<'db>) -> Option<
}
}
pub(crate) fn const_eval_discriminant_variant<'db>(
db: &'db dyn HirDatabase,
pub(crate) fn const_eval_discriminant_variant(
db: &dyn HirDatabase,
variant_id: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>> {
) -> Result<i128, ConstEvalError> {
let interner = DbInterner::new_no_crate(db);
let def = variant_id.into();
let body = db.body(def);
@ -206,8 +208,9 @@ pub(crate) fn const_eval_discriminant_variant<'db>(
let mir_body = db.monomorphized_mir_body(
def,
GenericArgs::new_from_iter(interner, []),
ParamEnvAndCrate { param_env: db.trait_environment_for_body(def), krate: def.krate(db) },
GenericArgs::empty(interner).store(),
ParamEnvAndCrate { param_env: db.trait_environment_for_body(def), krate: def.krate(db) }
.store(),
)?;
let c = interpret_mir(db, mir_body, false, None)?.0?;
let c = if is_signed {
@ -233,7 +236,7 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd
}
if has_closure(ctx.body, expr) {
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr]);
return Const::error(ctx.interner());
}
if let Expr::Path(p) = &ctx.body[expr] {
let mut ctx = TyLoweringContext::new(
@ -252,63 +255,89 @@ pub(crate) fn eval_to_const<'db>(expr: ExprId, ctx: &mut InferenceContext<'_, 'd
{
return result;
}
unknown_const(infer[expr])
Const::error(ctx.interner())
}
pub(crate) fn const_eval_cycle_result<'db>(
_: &'db dyn HirDatabase,
_: salsa::Id,
_: ConstId,
_: GenericArgs<'db>,
_: Option<ParamEnvAndCrate<'db>>,
) -> Result<Const<'db>, ConstEvalError<'db>> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_static_cycle_result<'db>(
_: &'db dyn HirDatabase,
_: salsa::Id,
_: StaticId,
) -> Result<Const<'db>, ConstEvalError<'db>> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_discriminant_cycle_result<'db>(
_: &'db dyn HirDatabase,
pub(crate) fn const_eval_discriminant_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>> {
) -> Result<i128, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_query<'db>(
pub(crate) fn const_eval<'db>(
db: &'db dyn HirDatabase,
def: ConstId,
subst: GenericArgs<'db>,
trait_env: Option<ParamEnvAndCrate<'db>>,
) -> Result<Const<'db>, ConstEvalError<'db>> {
let body = db.monomorphized_mir_body(
def.into(),
subst,
ParamEnvAndCrate { param_env: db.trait_environment(def.into()), krate: def.krate(db) },
)?;
let c = interpret_mir(db, body, false, trait_env)?.0?;
Ok(c)
) -> Result<Const<'db>, ConstEvalError> {
return match const_eval_query(db, def, subst.store(), trait_env.map(|env| env.store())) {
Ok(konst) => Ok(konst.as_ref()),
Err(err) => Err(err.clone()),
};
#[salsa::tracked(returns(ref), cycle_result = const_eval_cycle_result)]
pub(crate) fn const_eval_query<'db>(
db: &'db dyn HirDatabase,
def: ConstId,
subst: StoredGenericArgs,
trait_env: Option<StoredParamEnvAndCrate>,
) -> Result<StoredConst, ConstEvalError> {
let body = db.monomorphized_mir_body(
def.into(),
subst,
ParamEnvAndCrate { param_env: db.trait_environment(def.into()), krate: def.krate(db) }
.store(),
)?;
let c = interpret_mir(db, body, false, trait_env.as_ref().map(|env| env.as_ref()))?.0?;
Ok(c.store())
}
pub(crate) fn const_eval_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_: ConstId,
_: StoredGenericArgs,
_: Option<StoredParamEnvAndCrate>,
) -> Result<StoredConst, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
}
pub(crate) fn const_eval_static_query<'db>(
pub(crate) fn const_eval_static<'db>(
db: &'db dyn HirDatabase,
def: StaticId,
) -> Result<Const<'db>, ConstEvalError<'db>> {
let interner = DbInterner::new_no_crate(db);
let body = db.monomorphized_mir_body(
def.into(),
GenericArgs::new_from_iter(interner, []),
ParamEnvAndCrate {
param_env: db.trait_environment_for_body(def.into()),
krate: def.krate(db),
},
)?;
let c = interpret_mir(db, body, false, None)?.0?;
Ok(c)
) -> Result<Const<'db>, ConstEvalError> {
return match const_eval_static_query(db, def) {
Ok(konst) => Ok(konst.as_ref()),
Err(err) => Err(err.clone()),
};
#[salsa::tracked(returns(ref), cycle_result = const_eval_static_cycle_result)]
pub(crate) fn const_eval_static_query<'db>(
db: &'db dyn HirDatabase,
def: StaticId,
) -> Result<StoredConst, ConstEvalError> {
let interner = DbInterner::new_no_crate(db);
let body = db.monomorphized_mir_body(
def.into(),
GenericArgs::empty(interner).store(),
ParamEnvAndCrate {
param_env: db.trait_environment_for_body(def.into()),
krate: def.krate(db),
}
.store(),
)?;
let c = interpret_mir(db, body, false, None)?.0?;
Ok(c.store())
}
pub(crate) fn const_eval_static_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_: StaticId,
) -> Result<StoredConst, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
}

View file

@ -27,7 +27,7 @@ use super::{
mod intrinsics;
fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> {
fn simplify(e: ConstEvalError) -> ConstEvalError {
match e {
ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => {
simplify(ConstEvalError::MirEvalError(*e))
@ -39,7 +39,7 @@ fn simplify(e: ConstEvalError<'_>) -> ConstEvalError<'_> {
#[track_caller]
fn check_fail(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
error: impl FnOnce(ConstEvalError<'_>) -> bool,
error: impl FnOnce(ConstEvalError) -> bool,
) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
crate::attach_db(&db, || match eval_goal(&db, file_id) {
@ -104,7 +104,7 @@ fn check_answer(
});
}
fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String {
fn pretty_print_err(e: ConstEvalError, db: &TestDB) -> String {
let mut err = String::new();
let span_formatter = |file, range| format!("{file:?} {range:?}");
let display_target =
@ -121,7 +121,7 @@ fn pretty_print_err(e: ConstEvalError<'_>, db: &TestDB) -> String {
err
}
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEvalError<'_>> {
fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEvalError> {
let _tracing = setup_tracing();
let interner = DbInterner::new_no_crate(db);
let module_id = db.module_for_file(file_id.file_id(db));
@ -142,7 +142,7 @@ fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const<'_>, ConstEv
_ => None,
})
.expect("No const named GOAL found in the test");
db.const_eval(const_id, GenericArgs::new_from_iter(interner, []), None)
db.const_eval(const_id, GenericArgs::empty(interner), None)
}
#[test]

View file

@ -19,9 +19,10 @@ use crate::{
lower::{Diagnostics, GenericDefaults},
mir::{BorrowckResult, MirBody, MirLowerError},
next_solver::{
Const, EarlyBinder, GenericArgs, ParamEnv, PolyFnSig, TraitRef, Ty, VariancesOf,
Const, EarlyBinder, GenericArgs, ParamEnv, PolyFnSig, StoredEarlyBinder, StoredGenericArgs,
StoredTy, TraitRef, Ty, VariancesOf,
},
traits::ParamEnvAndCrate,
traits::{ParamEnvAndCrate, StoredParamEnvAndCrate},
};
#[query_group::query_group]
@ -32,60 +33,48 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
// and `monomorphized_mir_body_for_closure` into `monomorphized_mir_body`
#[salsa::invoke(crate::mir::mir_body_query)]
#[salsa::cycle(cycle_result = crate::mir::mir_body_cycle_result)]
fn mir_body<'db>(
&'db self,
def: DefWithBodyId,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure<'db>(
&'db self,
def: InternedClosureId,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
#[salsa::cycle(cycle_result = crate::mir::monomorphized_mir_body_cycle_result)]
fn monomorphized_mir_body<'db>(
&'db self,
fn monomorphized_mir_body(
&self,
def: DefWithBodyId,
subst: GenericArgs<'db>,
env: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
subst: StoredGenericArgs,
env: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
fn monomorphized_mir_body_for_closure<'db>(
&'db self,
fn monomorphized_mir_body_for_closure(
&self,
def: InternedClosureId,
subst: GenericArgs<'db>,
env: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>>;
subst: StoredGenericArgs,
env: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::borrowck_query)]
#[salsa::lru(2024)]
fn borrowck<'db>(
&'db self,
def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult<'db>]>, MirLowerError<'db>>;
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_cycle_result)]
#[salsa::invoke(crate::consteval::const_eval)]
#[salsa::transparent]
fn const_eval<'db>(
&'db self,
def: ConstId,
subst: GenericArgs<'db>,
trait_env: Option<ParamEnvAndCrate<'db>>,
) -> Result<Const<'db>, ConstEvalError<'db>>;
) -> Result<Const<'db>, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_static_cycle_result)]
fn const_eval_static<'db>(&'db self, def: StaticId) -> Result<Const<'db>, ConstEvalError<'db>>;
#[salsa::invoke(crate::consteval::const_eval_static)]
#[salsa::transparent]
fn const_eval_static<'db>(&'db self, def: StaticId) -> Result<Const<'db>, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(cycle_result = crate::consteval::const_eval_discriminant_cycle_result)]
fn const_eval_discriminant<'db>(
&'db self,
def: EnumVariantId,
) -> Result<i128, ConstEvalError<'db>>;
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
#[salsa::transparent]
@ -100,19 +89,19 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::layout::layout_of_adt_query)]
#[salsa::cycle(cycle_result = crate::layout::layout_of_adt_cycle_result)]
fn layout_of_adt<'db>(
&'db self,
fn layout_of_adt(
&self,
def: AdtId,
args: GenericArgs<'db>,
trait_env: ParamEnvAndCrate<'db>,
args: StoredGenericArgs,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(cycle_result = crate::layout::layout_of_ty_cycle_result)]
fn layout_of_ty<'db>(
&'db self,
ty: Ty<'db>,
env: ParamEnvAndCrate<'db>,
fn layout_of_ty(
&self,
ty: StoredTy,
env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
@ -125,8 +114,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn ty<'db>(&'db self, def: TyDefId) -> EarlyBinder<'db, Ty<'db>>;
#[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::type_for_type_alias_with_diagnostics_cycle_result)]
#[salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics)]
#[salsa::transparent]
fn type_for_type_alias_with_diagnostics<'db>(
&'db self,
def: TypeAliasId,
@ -134,11 +123,12 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
/// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor.
#[salsa::invoke(crate::lower::value_ty_query)]
#[salsa::invoke(crate::lower::value_ty)]
#[salsa::transparent]
fn value_ty<'db>(&'db self, def: ValueTyDefId) -> Option<EarlyBinder<'db, Ty<'db>>>;
#[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::impl_self_ty_with_diagnostics_cycle_result)]
#[salsa::invoke(crate::lower::impl_self_ty_with_diagnostics)]
#[salsa::transparent]
fn impl_self_ty_with_diagnostics<'db>(
&'db self,
def: ImplId,
@ -148,9 +138,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn impl_self_ty<'db>(&'db self, def: ImplId) -> EarlyBinder<'db, Ty<'db>>;
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::const_param_ty_with_diagnostics_cycle_result)]
#[salsa::invoke(crate::lower::const_param_ty_with_diagnostics)]
#[salsa::transparent]
fn const_param_ty_with_diagnostics<'db>(&'db self, def: ConstParamId)
-> (Ty<'db>, Diagnostics);
@ -158,7 +147,8 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn const_param_ty_ns<'db>(&'db self, def: ConstParamId) -> Ty<'db>;
#[salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
#[salsa::invoke(crate::lower::impl_trait_with_diagnostics)]
#[salsa::transparent]
fn impl_trait_with_diagnostics<'db>(
&'db self,
def: ImplId,
@ -169,19 +159,18 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn impl_trait<'db>(&'db self, def: ImplId) -> Option<EarlyBinder<'db, TraitRef<'db>>>;
#[salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics<'db>(
&'db self,
#[salsa::transparent]
fn field_types_with_diagnostics(
&self,
var: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, EarlyBinder<'db, Ty<'db>>>>, Diagnostics);
) -> &(ArenaMap<LocalFieldId, StoredEarlyBinder<StoredTy>>, Diagnostics);
#[salsa::invoke(crate::lower::field_types_query)]
#[salsa::transparent]
fn field_types<'db>(
&'db self,
var: VariantId,
) -> Arc<ArenaMap<LocalFieldId, EarlyBinder<'db, Ty<'db>>>>;
fn field_types(&self, var: VariantId) -> &ArenaMap<LocalFieldId, StoredEarlyBinder<StoredTy>>;
#[salsa::invoke(crate::lower::callable_item_signature_query)]
#[salsa::invoke(crate::lower::callable_item_signature)]
#[salsa::transparent]
fn callable_item_signature<'db>(
&'db self,
def: CallableDefId,
@ -191,26 +180,27 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::transparent]
fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId) -> ParamEnv<'db>;
#[salsa::invoke(crate::lower::trait_environment_query)]
#[salsa::invoke(crate::lower::trait_environment)]
#[salsa::transparent]
fn trait_environment<'db>(&'db self, def: GenericDefId) -> ParamEnv<'db>;
#[salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
#[salsa::cycle(cycle_result = crate::lower::generic_defaults_with_diagnostics_cycle_result)]
fn generic_defaults_with_diagnostics<'db>(
&'db self,
fn generic_defaults_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericDefaults<'db>, Diagnostics);
) -> (GenericDefaults, Diagnostics);
/// This returns an empty list if no parameter has default.
///
/// The binders of the returned defaults are only up to (not including) this parameter.
#[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::transparent]
fn generic_defaults<'db>(&'db self, def: GenericDefId) -> GenericDefaults<'db>;
fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
// Interned IDs for solver integration
#[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId<'_>) -> InternedOpaqueTyId;
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[salsa::interned]
fn intern_closure(&self, id: InternedClosure) -> InternedClosureId;
@ -219,11 +209,7 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
#[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(
// cycle_fn = crate::variance::variances_of_cycle_fn,
// cycle_initial = crate::variance::variances_of_cycle_initial,
cycle_result = crate::variance::variances_of_cycle_initial,
)]
#[salsa::transparent]
fn variances_of<'db>(&'db self, def: GenericDefId) -> VariancesOf<'db>;
}
@ -245,10 +231,10 @@ pub struct InternedConstParamId {
pub loc: ConstParamId,
}
#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX, unsafe(non_update_types))]
#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
pub struct InternedOpaqueTyId {
pub loc: ImplTraitId<'db>,
pub loc: ImplTraitId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]

View file

@ -99,7 +99,7 @@ impl BodyValidationDiagnostic {
struct ExprValidator<'db> {
owner: DefWithBodyId,
body: Arc<Body>,
infer: &'db InferenceResult<'db>,
infer: &'db InferenceResult,
env: ParamEnv<'db>,
diagnostics: Vec<BodyValidationDiagnostic>,
validate_lints: bool,
@ -313,7 +313,7 @@ impl<'db> ExprValidator<'db> {
);
value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_)))
}
Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind() {
Expr::Field { expr, .. } => match self.infer.expr_ty(*expr).kind() {
TyKind::Adt(adt, ..) if matches!(adt.def_id().0, AdtId::UnionId(_)) => false,
_ => self.is_known_valid_scrutinee(*expr),
},
@ -554,7 +554,7 @@ impl<'db> FilterMapNextChecker<'db> {
pub fn record_literal_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult<'_>,
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
@ -584,7 +584,7 @@ pub fn record_literal_missing_fields(
pub fn record_pattern_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult<'_>,
infer: &InferenceResult,
id: PatId,
pat: &Pat,
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
@ -612,8 +612,8 @@ pub fn record_pattern_missing_fields(
Some((variant_def, missed_fields, exhaustive))
}
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult<'_>) -> bool {
fn walk(pat: PatId, body: &Body, infer: &InferenceResult<'_>, has_type_mismatches: &mut bool) {
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
match infer.type_mismatch_for_pat(pat) {
Some(_) => *has_type_mismatches = true,
None if *has_type_mismatches => (),

View file

@ -16,7 +16,7 @@ use hir_def::{
item_tree::FieldsShape,
};
use hir_expand::name::Name;
use rustc_type_ir::inherent::{IntoKind, SliceLike};
use rustc_type_ir::inherent::IntoKind;
use span::Edition;
use stdx::{always, never, variance::PhantomCovariantLifetime};
@ -96,7 +96,7 @@ pub(crate) enum PatKind<'db> {
pub(crate) struct PatCtxt<'a, 'db> {
db: &'db dyn HirDatabase,
infer: &'a InferenceResult<'db>,
infer: &'db InferenceResult,
body: &'a Body,
pub(crate) errors: Vec<PatternError>,
}
@ -104,7 +104,7 @@ pub(crate) struct PatCtxt<'a, 'db> {
impl<'a, 'db> PatCtxt<'a, 'db> {
pub(crate) fn new(
db: &'db dyn HirDatabase,
infer: &'a InferenceResult<'db>,
infer: &'db InferenceResult,
body: &'a Body,
) -> Self {
Self { db, infer, body, errors: Vec::new() }
@ -119,12 +119,15 @@ impl<'a, 'db> PatCtxt<'a, 'db> {
let unadjusted_pat = self.lower_pattern_unadjusted(pat);
self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold(
unadjusted_pat,
|subpattern, ref_ty| Pat { ty: *ref_ty, kind: Box::new(PatKind::Deref { subpattern }) },
|subpattern, ref_ty| Pat {
ty: ref_ty.as_ref(),
kind: Box::new(PatKind::Deref { subpattern }),
},
)
}
fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat<'db> {
let mut ty = self.infer[pat];
let mut ty = self.infer.pat_ty(pat);
let variant = self.infer.variant_resolution_for_pat(pat);
let kind = match self.body[pat] {
@ -151,7 +154,7 @@ impl<'a, 'db> PatCtxt<'a, 'db> {
hir_def::hir::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.binding_modes[pat];
ty = self.infer[id];
ty = self.infer.binding_ty(id);
let name = &self.body[id].name;
match (bm, ty.kind()) {
(BindingMode::Ref(_), TyKind::Ref(_, rty, _)) => ty = rty,
@ -273,7 +276,7 @@ impl<'a, 'db> PatCtxt<'a, 'db> {
}
fn lower_path(&mut self, pat: PatId, _path: &Path) -> Pat<'db> {
let ty = self.infer[pat];
let ty = self.infer.pat_ty(pat);
let pat_from_kind = |kind| Pat { ty, kind: Box::new(kind) };

View file

@ -11,7 +11,7 @@ use rustc_pattern_analysis::{
constructor::{Constructor, ConstructorSet, VariantVisibility},
usefulness::{PlaceValidity, UsefulnessReport, compute_match_usefulness},
};
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
use rustc_type_ir::inherent::{AdtDef, IntoKind};
use smallvec::{SmallVec, smallvec};
use stdx::never;
@ -150,7 +150,7 @@ impl<'a, 'db> MatchCheckCtx<'a, 'db> {
let fields_len = variant.fields(self.db).fields().len() as u32;
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| {
let ty = field_tys[fid].instantiate(self.infcx.interner, substs);
let ty = field_tys[fid].get().instantiate(self.infcx.interner, substs);
let ty = self
.infcx
.at(&ObligationCause::dummy(), self.env)

View file

@ -97,9 +97,9 @@ enum UnsafeDiagnostic {
DeprecatedSafe2024 { node: ExprId, inside_unsafe_block: InsideUnsafeBlock },
}
pub fn unsafe_operations_for_body<'db>(
db: &'db dyn HirDatabase,
infer: &InferenceResult<'db>,
pub fn unsafe_operations_for_body(
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,
body: &Body,
callback: &mut dyn FnMut(ExprOrPatId),
@ -116,9 +116,9 @@ pub fn unsafe_operations_for_body<'db>(
}
}
pub fn unsafe_operations<'db>(
db: &'db dyn HirDatabase,
infer: &InferenceResult<'db>,
pub fn unsafe_operations(
db: &dyn HirDatabase,
infer: &InferenceResult,
def: DefWithBodyId,
body: &Body,
current: ExprId,
@ -136,7 +136,7 @@ pub fn unsafe_operations<'db>(
struct UnsafeVisitor<'db> {
db: &'db dyn HirDatabase,
infer: &'db InferenceResult<'db>,
infer: &'db InferenceResult,
body: &'db Body,
resolver: Resolver<'db>,
def: DefWithBodyId,
@ -155,7 +155,7 @@ struct UnsafeVisitor<'db> {
impl<'db> UnsafeVisitor<'db> {
fn new(
db: &'db dyn HirDatabase,
infer: &'db InferenceResult<'db>,
infer: &'db InferenceResult,
body: &'db Body,
def: DefWithBodyId,
unsafe_expr_cb: &'db mut dyn FnMut(UnsafeDiagnostic),
@ -260,7 +260,7 @@ impl<'db> UnsafeVisitor<'db> {
match pat {
Pat::Record { .. } => {
if let Some((AdtId::UnionId(_), _)) = self.infer[current].as_adt() {
if let Some((AdtId::UnionId(_), _)) = self.infer.pat_ty(current).as_adt() {
let old_inside_union_destructure =
mem::replace(&mut self.inside_union_destructure, true);
self.body.walk_pats_shallow(current, |pat| self.walk_pat(pat));
@ -286,7 +286,7 @@ impl<'db> UnsafeVisitor<'db> {
let inside_assignment = mem::replace(&mut self.inside_assignment, false);
match expr {
&Expr::Call { callee, .. } => {
let callee = self.infer[callee];
let callee = self.infer.expr_ty(callee);
if let TyKind::FnDef(CallableIdWrapper(CallableDefId::FunctionId(func)), _) =
callee.kind()
{
@ -341,7 +341,7 @@ impl<'db> UnsafeVisitor<'db> {
}
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
if let TyKind::RawPtr(..) = self.infer[*expr].kind() {
if let TyKind::RawPtr(..) = self.infer.expr_ty(*expr).kind() {
self.on_unsafe_op(current.into(), UnsafetyReason::RawPtrDeref);
}
}

View file

@ -38,7 +38,7 @@ use rustc_hash::FxHashSet;
use rustc_type_ir::{
AliasTyKind, BoundVarIndexKind, CoroutineArgsParts, CoroutineClosureArgsParts, RegionKind,
Upcast,
inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _, Tys as _},
inherent::{AdtDef, GenericArgs as _, IntoKind, Term as _, Ty as _, Tys as _},
};
use smallvec::SmallVec;
use span::Edition;
@ -52,9 +52,9 @@ use crate::{
lower::GenericPredicates,
mir::pad16,
next_solver::{
AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, EarlyBinder,
ExistentialPredicate, FnSig, GenericArg, GenericArgs, ParamEnv, PolyFnSig, Region,
SolverDefId, Term, TraitRef, Ty, TyKind, TypingMode,
AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, ExistentialPredicate, FnSig,
GenericArg, GenericArgKind, GenericArgs, ParamEnv, PolyFnSig, Region, SolverDefId,
StoredEarlyBinder, StoredTy, Term, TermKind, TraitRef, Ty, TyKind, TypingMode,
abi::Safety,
infer::{DbInternerInferExt, traits::ObligationCause},
},
@ -602,7 +602,7 @@ impl<'db, T: HirDisplay<'db>> HirDisplay<'db> for &T {
impl<'db, T: HirDisplay<'db> + Internable> HirDisplay<'db> for Interned<T> {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
HirDisplay::hir_fmt(self.as_ref(), f)
HirDisplay::hir_fmt(&**self, f)
}
}
@ -664,10 +664,10 @@ fn write_projection<'db>(f: &mut HirFormatter<'_, 'db>, alias: &AliasTy<'db>) ->
impl<'db> HirDisplay<'db> for GenericArg<'db> {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
match self {
GenericArg::Ty(ty) => ty.hir_fmt(f),
GenericArg::Lifetime(lt) => lt.hir_fmt(f),
GenericArg::Const(c) => c.hir_fmt(f),
match self.kind() {
GenericArgKind::Type(ty) => ty.hir_fmt(f),
GenericArgKind::Lifetime(lt) => lt.hir_fmt(f),
GenericArgKind::Const(c) => c.hir_fmt(f),
}
}
}
@ -790,7 +790,7 @@ fn render_const_scalar_inner<'db>(
TyKind::Slice(ty) => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let count = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
@ -824,7 +824,7 @@ fn render_const_scalar_inner<'db>(
let Ok(t) = memory_map.vtable_ty(ty_id) else {
return f.write_str("<ty-missing-in-vtable-map>");
};
let Ok(layout) = f.db.layout_of_ty(t, param_env) else {
let Ok(layout) = f.db.layout_of_ty(t.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@ -854,7 +854,7 @@ fn render_const_scalar_inner<'db>(
return f.write_str("<layout-error>");
}
});
let Ok(layout) = f.db.layout_of_ty(t, param_env) else {
let Ok(layout) = f.db.layout_of_ty(t.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@ -866,7 +866,7 @@ fn render_const_scalar_inner<'db>(
}
},
TyKind::Tuple(tys) => {
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
f.write_str("(")?;
@ -878,7 +878,7 @@ fn render_const_scalar_inner<'db>(
f.write_str(", ")?;
}
let offset = layout.fields.offset(id).bytes_usize();
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
f.write_str("<layout-error>")?;
continue;
};
@ -889,7 +889,7 @@ fn render_const_scalar_inner<'db>(
}
TyKind::Adt(def, args) => {
let def = def.def_id().0;
let Ok(layout) = f.db.layout_of_adt(def, args, param_env) else {
let Ok(layout) = f.db.layout_of_adt(def, args.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
match def {
@ -900,7 +900,7 @@ fn render_const_scalar_inner<'db>(
render_variant_after_name(
s.fields(f.db),
f,
&field_types,
field_types,
f.db.trait_environment(def.into()),
&layout,
args,
@ -932,7 +932,7 @@ fn render_const_scalar_inner<'db>(
render_variant_after_name(
var_id.fields(f.db),
f,
&field_types,
field_types,
f.db.trait_environment(def.into()),
var_layout,
args,
@ -952,7 +952,7 @@ fn render_const_scalar_inner<'db>(
let Some(len) = consteval::try_const_usize(f.db, len) else {
return f.write_str("<unknown-array-len>");
};
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
@ -992,7 +992,7 @@ fn render_const_scalar_inner<'db>(
fn render_variant_after_name<'db>(
data: &VariantFields,
f: &mut HirFormatter<'_, 'db>,
field_types: &ArenaMap<LocalFieldId, EarlyBinder<'db, Ty<'db>>>,
field_types: &'db ArenaMap<LocalFieldId, StoredEarlyBinder<StoredTy>>,
param_env: ParamEnv<'db>,
layout: &Layout,
args: GenericArgs<'db>,
@ -1004,8 +1004,8 @@ fn render_variant_after_name<'db>(
FieldsShape::Record | FieldsShape::Tuple => {
let render_field = |f: &mut HirFormatter<'_, 'db>, id: LocalFieldId| {
let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize();
let ty = field_types[id].instantiate(f.interner, args);
let Ok(layout) = f.db.layout_of_ty(ty, param_env) else {
let ty = field_types[id].get().instantiate(f.interner, args);
let Ok(layout) = f.db.layout_of_ty(ty.store(), param_env.store()) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@ -1223,7 +1223,7 @@ impl<'db> HirDisplay<'db> for Ty<'db> {
};
f.end_location_link();
if args.len() > 0 {
if !args.is_empty() {
let generic_def_id = GenericDefId::from_callable(db, def);
let generics = generics(db, generic_def_id);
let (parent_len, self_param, type_, const_, impl_, lifetime) =
@ -1787,9 +1787,9 @@ impl<'db> HirDisplay<'db> for PolyFnSig<'db> {
impl<'db> HirDisplay<'db> for Term<'db> {
fn hir_fmt(&self, f: &mut HirFormatter<'_, 'db>) -> Result {
match self {
Term::Ty(it) => it.hir_fmt(f),
Term::Const(it) => it.hir_fmt(f),
match self.kind() {
TermKind::Ty(it) => it.hir_fmt(f),
TermKind::Const(it) => it.hir_fmt(f),
}
}
}

View file

@ -2,7 +2,7 @@
use hir_def::{AdtId, signatures::StructFlags};
use rustc_hash::FxHashSet;
use rustc_type_ir::inherent::{AdtDef, IntoKind, SliceLike};
use rustc_type_ir::inherent::{AdtDef, IntoKind};
use stdx::never;
use crate::{
@ -85,7 +85,7 @@ fn has_drop_glue_impl<'db>(
.map(|(_, field_ty)| {
has_drop_glue_impl(
infcx,
field_ty.instantiate(infcx.interner, subst),
field_ty.get().instantiate(infcx.interner, subst),
env,
visited,
)
@ -105,7 +105,7 @@ fn has_drop_glue_impl<'db>(
.map(|(_, field_ty)| {
has_drop_glue_impl(
infcx,
field_ty.instantiate(infcx.interner, subst),
field_ty.get().instantiate(infcx.interner, subst),
env,
visited,
)

View file

@ -10,8 +10,7 @@ use hir_def::{
use rustc_hash::FxHashSet;
use rustc_type_ir::{
AliasTyKind, ClauseKind, PredicatePolarity, TypeSuperVisitable as _, TypeVisitable as _,
Upcast, elaborate,
inherent::{IntoKind, SliceLike},
Upcast, elaborate, inherent::IntoKind,
};
use smallvec::SmallVec;

View file

@ -28,7 +28,7 @@ mod path;
mod place_op;
pub(crate) mod unify;
use std::{cell::OnceCell, convert::identity, iter, ops::Index};
use std::{cell::OnceCell, convert::identity, iter};
use base_db::Crate;
use either::Either;
@ -47,14 +47,12 @@ use hir_expand::{mod_path::ModPath, name::Name};
use indexmap::IndexSet;
use intern::sym;
use la_arena::ArenaMap;
use macros::{TypeFoldable, TypeVisitable};
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::{
AliasTyKind, TypeFoldable,
inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _},
inherent::{AdtDef, IntoKind, Region as _, Ty as _},
};
use salsa::Update;
use span::Edition;
use stdx::never;
use thin_vec::ThinVec;
@ -74,8 +72,8 @@ use crate::{
method_resolution::{CandidateId, MethodResolutionUnstableFeatures},
mir::MirSpan,
next_solver::{
AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind,
Tys,
AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region,
StoredGenericArgs, StoredTy, StoredTys, Ty, TyKind, Tys,
abi::Safety,
infer::{InferCtxt, traits::ObligationCause},
},
@ -95,7 +93,7 @@ use cast::{CastCheck, CastError};
pub(crate) use closure::analysis::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_> {
fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult {
let _p = tracing::info_span!("infer_query").entered();
let resolver = def.resolver(db);
let body = db.body(def);
@ -162,7 +160,7 @@ fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'_>
ctx.resolve_all()
}
fn infer_cycle_result(db: &dyn HirDatabase, _: salsa::Id, _: DefWithBodyId) -> InferenceResult<'_> {
fn infer_cycle_result(db: &dyn HirDatabase, _: salsa::Id, _: DefWithBodyId) -> InferenceResult {
InferenceResult {
has_errors: true,
..InferenceResult::new(Ty::new_error(DbInterner::new_no_crate(db), ErrorGuaranteed))
@ -196,8 +194,8 @@ pub enum InferenceTyDiagnosticSource {
Signature,
}
#[derive(Debug, PartialEq, Eq, Clone, Update)]
pub enum InferenceDiagnostic<'db> {
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum InferenceDiagnostic {
NoSuchField {
field: ExprOrPatId,
private: Option<LocalFieldId>,
@ -213,16 +211,16 @@ pub enum InferenceDiagnostic<'db> {
},
UnresolvedField {
expr: ExprId,
receiver: Ty<'db>,
receiver: StoredTy,
name: Name,
method_with_same_name_exists: bool,
},
UnresolvedMethodCall {
expr: ExprId,
receiver: Ty<'db>,
receiver: StoredTy,
name: Name,
/// Contains the type the field resolves to
field_with_same_name: Option<Ty<'db>>,
field_with_same_name: Option<StoredTy>,
assoc_func_with_same_name: Option<FunctionId>,
},
UnresolvedAssocItem {
@ -249,21 +247,21 @@ pub enum InferenceDiagnostic<'db> {
},
ExpectedFunction {
call_expr: ExprId,
found: Ty<'db>,
found: StoredTy,
},
TypedHole {
expr: ExprId,
expected: Ty<'db>,
expected: StoredTy,
},
CastToUnsized {
expr: ExprId,
cast_ty: Ty<'db>,
cast_ty: StoredTy,
},
InvalidCast {
expr: ExprId,
error: CastError,
expr_ty: Ty<'db>,
cast_ty: Ty<'db>,
expr_ty: StoredTy,
cast_ty: StoredTy,
},
TyDiagnostic {
source: InferenceTyDiagnosticSource,
@ -290,10 +288,10 @@ pub enum InferenceDiagnostic<'db> {
}
/// A mismatch between an expected and an inferred type.
#[derive(Clone, PartialEq, Eq, Debug, Hash, Update)]
pub struct TypeMismatch<'db> {
pub expected: Ty<'db>,
pub actual: Ty<'db>,
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TypeMismatch {
pub expected: StoredTy,
pub actual: StoredTy,
}
/// Represents coercing a value to a different type of value.
@ -336,20 +334,23 @@ pub struct TypeMismatch<'db> {
/// At some point, of course, `Box` should move out of the compiler, in which
/// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, Update)]
pub struct Adjustment<'db> {
#[type_visitable(ignore)]
#[type_foldable(identity)]
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Adjustment {
pub kind: Adjust,
pub target: Ty<'db>,
pub target: StoredTy,
}
impl<'db> Adjustment<'db> {
pub fn borrow(interner: DbInterner<'db>, m: Mutability, ty: Ty<'db>, lt: Region<'db>) -> Self {
impl Adjustment {
pub fn borrow<'db>(
interner: DbInterner<'db>,
m: Mutability,
ty: Ty<'db>,
lt: Region<'db>,
) -> Self {
let ty = Ty::new_ref(interner, lt, ty, m);
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::new(m, AllowTwoPhase::No))),
target: ty,
target: ty.store(),
}
}
}
@ -473,56 +474,47 @@ pub enum PointerCast {
/// When you add a field that stores types (including `Substitution` and the like), don't forget
/// `resolve_completely()`'ing them in `InferenceContext::resolve_all()`. Inference variables must
/// not appear in the final inference result.
#[derive(Clone, PartialEq, Eq, Debug, Update)]
pub struct InferenceResult<'db> {
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct InferenceResult {
/// For each method call expr, records the function it resolves to.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
method_resolutions: FxHashMap<ExprId, (FunctionId, GenericArgs<'db>)>,
method_resolutions: FxHashMap<ExprId, (FunctionId, StoredGenericArgs)>,
/// For each field access expr, records the field it resolves to.
field_resolutions: FxHashMap<ExprId, Either<FieldId, TupleFieldId>>,
/// For each struct literal or pattern, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, (CandidateId, GenericArgs<'db>)>,
assoc_resolutions: FxHashMap<ExprOrPatId, (CandidateId, StoredGenericArgs)>,
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))]
tuple_field_access_types: ThinVec<Tys<'db>>,
tuple_field_access_types: ThinVec<StoredTys>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
pub(crate) type_of_expr: ArenaMap<ExprId, Ty<'db>>,
pub(crate) type_of_expr: ArenaMap<ExprId, StoredTy>,
/// For each pattern record the type it resolves to.
///
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))]
pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* binding id is technically update */)))]
pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* type ref id is technically update */)))]
pub(crate) type_of_type_placeholder: FxHashMap<TypeRefId, Ty<'db>>,
pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>,
pub(crate) type_of_pat: ArenaMap<PatId, StoredTy>,
pub(crate) type_of_binding: ArenaMap<BindingId, StoredTy>,
pub(crate) type_of_type_placeholder: FxHashMap<TypeRefId, StoredTy>,
pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, StoredTy>,
pub(crate) type_mismatches: Option<Box<FxHashMap<ExprOrPatId, TypeMismatch<'db>>>>,
pub(crate) type_mismatches: Option<Box<FxHashMap<ExprOrPatId, TypeMismatch>>>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
// `TyKind::Error`.
// Which will then mark this field.
pub(crate) has_errors: bool,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* thinvec is technically update */)))]
diagnostics: ThinVec<InferenceDiagnostic<'db>>,
diagnostics: ThinVec<InferenceDiagnostic>,
/// Interned `Error` type to return references to.
// FIXME: Remove this.
error_ty: Ty<'db>,
error_ty: StoredTy,
#[update(unsafe(with(crate::utils::unsafe_update_eq /* expr id is technically update */)))]
pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment<'db>]>>,
pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
#[update(unsafe(with(crate::utils::unsafe_update_eq /* pat id is technically update */)))]
pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty<'db>>>,
pub(crate) pat_adjustments: FxHashMap<PatId, Vec<StoredTy>>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
/// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
@ -538,7 +530,7 @@ pub struct InferenceResult<'db> {
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem<'db>>, FnTrait)>,
pub(crate) closure_info: FxHashMap<InternedClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
@ -546,15 +538,15 @@ pub struct InferenceResult<'db> {
}
#[salsa::tracked]
impl<'db> InferenceResult<'db> {
#[salsa::tracked(returns(ref), cycle_result = infer_cycle_result, unsafe(non_update_types))]
pub fn for_body(db: &'db dyn HirDatabase, def: DefWithBodyId) -> InferenceResult<'db> {
impl InferenceResult {
#[salsa::tracked(returns(ref), cycle_result = infer_cycle_result)]
pub fn for_body(db: &dyn HirDatabase, def: DefWithBodyId) -> InferenceResult {
infer_query(db, def)
}
}
impl<'db> InferenceResult<'db> {
fn new(error_ty: Ty<'db>) -> Self {
impl InferenceResult {
fn new(error_ty: Ty<'_>) -> Self {
Self {
method_resolutions: Default::default(),
field_resolutions: Default::default(),
@ -569,7 +561,7 @@ impl<'db> InferenceResult<'db> {
type_of_opaque: Default::default(),
type_mismatches: Default::default(),
has_errors: Default::default(),
error_ty,
error_ty: error_ty.store(),
pat_adjustments: Default::default(),
binding_modes: Default::default(),
expr_adjustments: Default::default(),
@ -579,8 +571,8 @@ impl<'db> InferenceResult<'db> {
}
}
pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> {
self.method_resolutions.get(&expr).copied()
pub fn method_resolution<'db>(&self, expr: ExprId) -> Option<(FunctionId, GenericArgs<'db>)> {
self.method_resolutions.get(&expr).map(|(func, args)| (*func, args.as_ref()))
}
pub fn field_resolution(&self, expr: ExprId) -> Option<Either<FieldId, TupleFieldId>> {
self.field_resolutions.get(&expr).copied()
@ -597,16 +589,19 @@ impl<'db> InferenceResult<'db> {
ExprOrPatId::PatId(id) => self.variant_resolution_for_pat(id),
}
}
pub fn assoc_resolutions_for_expr(
pub fn assoc_resolutions_for_expr<'db>(
&self,
id: ExprId,
) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).copied()
self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref()))
}
pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).copied()
pub fn assoc_resolutions_for_pat<'db>(
&self,
id: PatId,
) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).map(|(assoc, args)| (*assoc, args.as_ref()))
}
pub fn assoc_resolutions_for_expr_or_pat(
pub fn assoc_resolutions_for_expr_or_pat<'db>(
&self,
id: ExprOrPatId,
) -> Option<(CandidateId, GenericArgs<'db>)> {
@ -615,20 +610,20 @@ impl<'db> InferenceResult<'db> {
ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id),
}
}
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch<'db>> {
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
self.type_mismatches.as_deref()?.get(&expr.into())
}
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch<'db>> {
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
self.type_mismatches.as_deref()?.get(&pat.into())
}
pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch<'db>)> {
pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch)> {
self.type_mismatches
.as_deref()
.into_iter()
.flatten()
.map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
}
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch<'db>)> {
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
self.type_mismatches.as_deref().into_iter().flatten().filter_map(
|(expr_or_pat, mismatch)| match *expr_or_pat {
ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
@ -636,22 +631,22 @@ impl<'db> InferenceResult<'db> {
},
)
}
pub fn placeholder_types(&self) -> impl Iterator<Item = (TypeRefId, &Ty<'db>)> {
self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty))
pub fn placeholder_types<'db>(&self) -> impl Iterator<Item = (TypeRefId, Ty<'db>)> {
self.type_of_type_placeholder.iter().map(|(&type_ref, ty)| (type_ref, ty.as_ref()))
}
pub fn type_of_type_placeholder(&self, type_ref: TypeRefId) -> Option<Ty<'db>> {
self.type_of_type_placeholder.get(&type_ref).copied()
pub fn type_of_type_placeholder<'db>(&self, type_ref: TypeRefId) -> Option<Ty<'db>> {
self.type_of_type_placeholder.get(&type_ref).map(|ty| ty.as_ref())
}
pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem<'db>>, FnTrait) {
pub fn closure_info(&self, closure: InternedClosureId) -> &(Vec<CapturedItem>, FnTrait) {
self.closure_info.get(&closure).unwrap()
}
pub fn type_of_expr_or_pat(&self, id: ExprOrPatId) -> Option<Ty<'db>> {
pub fn type_of_expr_or_pat<'db>(&self, id: ExprOrPatId) -> Option<Ty<'db>> {
match id {
ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).copied(),
ExprOrPatId::PatId(id) => self.type_of_pat.get(id).copied(),
ExprOrPatId::ExprId(id) => self.type_of_expr.get(id).map(|it| it.as_ref()),
ExprOrPatId::PatId(id) => self.type_of_pat.get(id).map(|it| it.as_ref()),
}
}
pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option<Ty<'db>> {
pub fn type_of_expr_with_adjust<'db>(&self, id: ExprId) -> Option<Ty<'db>> {
match self.expr_adjustments.get(&id).and_then(|adjustments| {
adjustments.iter().rfind(|adj| {
// https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140
@ -660,37 +655,37 @@ impl<'db> InferenceResult<'db> {
Adjustment {
kind: Adjust::NeverToAny,
target,
} if target.is_never()
} if target.as_ref().is_never()
)
})
}) {
Some(adjustment) => Some(adjustment.target),
None => self.type_of_expr.get(id).copied(),
Some(adjustment) => Some(adjustment.target.as_ref()),
None => self.type_of_expr.get(id).map(|it| it.as_ref()),
}
}
pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option<Ty<'db>> {
pub fn type_of_pat_with_adjust<'db>(&self, id: PatId) -> Option<Ty<'db>> {
match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
Some(adjusted) => Some(*adjusted),
None => self.type_of_pat.get(id).copied(),
Some(adjusted) => Some(adjusted.as_ref()),
None => self.type_of_pat.get(id).map(|it| it.as_ref()),
}
}
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
pub fn diagnostics(&self) -> &[InferenceDiagnostic<'db>] {
pub fn diagnostics(&self) -> &[InferenceDiagnostic] {
&self.diagnostics
}
pub fn tuple_field_access_type(&self, id: TupleId) -> Tys<'db> {
self.tuple_field_access_types[id.0 as usize]
pub fn tuple_field_access_type<'db>(&self, id: TupleId) -> Tys<'db> {
self.tuple_field_access_types[id.0 as usize].as_ref()
}
pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty<'db>]> {
pub fn pat_adjustment(&self, id: PatId) -> Option<&[StoredTy]> {
self.pat_adjustments.get(&id).map(|it| &**it)
}
pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment<'db>]> {
pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> {
self.expr_adjustments.get(&id).map(|it| &**it)
}
@ -699,66 +694,47 @@ impl<'db> InferenceResult<'db> {
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn expression_types(&self) -> impl Iterator<Item = (ExprId, Ty<'db>)> {
self.type_of_expr.iter().map(|(k, v)| (k, *v))
pub fn expression_types<'db>(&self) -> impl Iterator<Item = (ExprId, Ty<'db>)> {
self.type_of_expr.iter().map(|(k, v)| (k, v.as_ref()))
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn pattern_types(&self) -> impl Iterator<Item = (PatId, Ty<'db>)> {
self.type_of_pat.iter().map(|(k, v)| (k, *v))
pub fn pattern_types<'db>(&self) -> impl Iterator<Item = (PatId, Ty<'db>)> {
self.type_of_pat.iter().map(|(k, v)| (k, v.as_ref()))
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn binding_types(&self) -> impl Iterator<Item = (BindingId, Ty<'db>)> {
self.type_of_binding.iter().map(|(k, v)| (k, *v))
pub fn binding_types<'db>(&self) -> impl Iterator<Item = (BindingId, Ty<'db>)> {
self.type_of_binding.iter().map(|(k, v)| (k, v.as_ref()))
}
// This method is consumed by external tools to run rust-analyzer as a library. Don't remove, please.
pub fn return_position_impl_trait_types(
&self,
pub fn return_position_impl_trait_types<'db>(
&'db self,
db: &'db dyn HirDatabase,
) -> impl Iterator<Item = (ImplTraitIdx<'db>, Ty<'db>)> {
self.type_of_opaque.iter().filter_map(move |(&id, &ty)| {
) -> impl Iterator<Item = (ImplTraitIdx, Ty<'db>)> {
self.type_of_opaque.iter().filter_map(move |(&id, ty)| {
let ImplTraitId::ReturnTypeImplTrait(_, rpit_idx) = id.loc(db) else {
return None;
};
Some((rpit_idx, ty))
Some((rpit_idx, ty.as_ref()))
})
}
}
impl<'db> Index<ExprId> for InferenceResult<'db> {
type Output = Ty<'db>;
fn index(&self, expr: ExprId) -> &Ty<'db> {
self.type_of_expr.get(expr).unwrap_or(&self.error_ty)
pub fn expr_ty<'db>(&self, id: ExprId) -> Ty<'db> {
self.type_of_expr.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref())
}
}
impl<'db> Index<PatId> for InferenceResult<'db> {
type Output = Ty<'db>;
fn index(&self, pat: PatId) -> &Ty<'db> {
self.type_of_pat.get(pat).unwrap_or(&self.error_ty)
pub fn pat_ty<'db>(&self, id: PatId) -> Ty<'db> {
self.type_of_pat.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref())
}
}
impl<'db> Index<ExprOrPatId> for InferenceResult<'db> {
type Output = Ty<'db>;
fn index(&self, id: ExprOrPatId) -> &Ty<'db> {
match id {
ExprOrPatId::ExprId(id) => &self[id],
ExprOrPatId::PatId(id) => &self[id],
}
pub fn expr_or_pat_ty<'db>(&self, id: ExprOrPatId) -> Ty<'db> {
self.type_of_expr_or_pat(id).unwrap_or(self.error_ty.as_ref())
}
}
impl<'db> Index<BindingId> for InferenceResult<'db> {
type Output = Ty<'db>;
fn index(&self, b: BindingId) -> &Ty<'db> {
self.type_of_binding.get(b).unwrap_or(&self.error_ty)
pub fn binding_ty<'db>(&self, id: BindingId) -> Ty<'db> {
self.type_of_binding.get(id).map_or(self.error_ty.as_ref(), |it| it.as_ref())
}
}
@ -826,7 +802,7 @@ impl<'db> InternedStandardTypes<'db> {
re_error: Region::error(interner),
re_erased: Region::new_erased(interner),
empty_args: GenericArgs::new_from_iter(interner, []),
empty_args: GenericArgs::empty(interner),
}
}
}
@ -848,7 +824,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
pub(crate) lang_items: &'db LangItems,
/// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult<'db>,
pub(crate) result: InferenceResult,
tuple_field_accesses_rev:
IndexSet<Tys<'db>, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>,
/// The return type of the function being inferred, the closure or async block if we're
@ -873,7 +849,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
deferred_cast_checks: Vec<CastCheck<'db>>,
// fields related to closure capture
current_captures: Vec<CapturedItemWithoutTy<'db>>,
current_captures: Vec<CapturedItemWithoutTy>,
/// A stack that has an entry for each projection in the current capture.
///
/// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`.
@ -886,7 +862,7 @@ pub(crate) struct InferenceContext<'body, 'db> {
closure_dependencies: FxHashMap<InternedClosureId, Vec<InternedClosureId>>,
deferred_closures: FxHashMap<InternedClosureId, Vec<(Ty<'db>, Ty<'db>, Vec<Ty<'db>>, ExprId)>>,
diagnostics: Diagnostics<'db>,
diagnostics: Diagnostics,
}
#[derive(Clone, Debug)]
@ -1008,7 +984,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
/// Clones `self` and calls `resolve_all()` on it.
// FIXME: Remove this.
pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult<'db> {
pub(crate) fn fixme_resolve_all_clone(&self) -> InferenceResult {
let mut ctx = self.clone();
ctx.type_inference_fallback();
@ -1032,7 +1008,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
// `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
// used this function for another workaround, mention it here. If you really need this function and believe that
// there is no problem in it being `pub(crate)`, remove this comment.
fn resolve_all(self) -> InferenceResult<'db> {
fn resolve_all(self) -> InferenceResult {
let InferenceContext {
mut table, mut result, tuple_field_accesses_rev, diagnostics, ..
} = self;
@ -1066,23 +1042,23 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
} = &mut result;
for ty in type_of_expr.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
*ty = table.resolve_completely(ty.as_ref()).store();
*has_errors = *has_errors || ty.as_ref().references_non_lt_error();
}
type_of_expr.shrink_to_fit();
for ty in type_of_pat.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
*ty = table.resolve_completely(ty.as_ref()).store();
*has_errors = *has_errors || ty.as_ref().references_non_lt_error();
}
type_of_pat.shrink_to_fit();
for ty in type_of_binding.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
*ty = table.resolve_completely(ty.as_ref()).store();
*has_errors = *has_errors || ty.as_ref().references_non_lt_error();
}
type_of_binding.shrink_to_fit();
for ty in type_of_type_placeholder.values_mut() {
*ty = table.resolve_completely(*ty);
*has_errors = *has_errors || ty.references_non_lt_error();
*ty = table.resolve_completely(ty.as_ref()).store();
*has_errors = *has_errors || ty.as_ref().references_non_lt_error();
}
type_of_type_placeholder.shrink_to_fit();
type_of_opaque.shrink_to_fit();
@ -1090,8 +1066,8 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
if let Some(type_mismatches) = type_mismatches {
*has_errors = true;
for mismatch in type_mismatches.values_mut() {
mismatch.expected = table.resolve_completely(mismatch.expected);
mismatch.actual = table.resolve_completely(mismatch.actual);
mismatch.expected = table.resolve_completely(mismatch.expected.as_ref()).store();
mismatch.actual = table.resolve_completely(mismatch.actual.as_ref()).store();
}
type_mismatches.shrink_to_fit();
}
@ -1101,23 +1077,23 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
ExpectedFunction { found: ty, .. }
| UnresolvedField { receiver: ty, .. }
| UnresolvedMethodCall { receiver: ty, .. } => {
*ty = table.resolve_completely(*ty);
*ty = table.resolve_completely(ty.as_ref()).store();
// FIXME: Remove this when we are on par with rustc in terms of inference
if ty.references_non_lt_error() {
if ty.as_ref().references_non_lt_error() {
return false;
}
if let UnresolvedMethodCall { field_with_same_name, .. } = diagnostic
&& let Some(ty) = field_with_same_name
{
*ty = table.resolve_completely(*ty);
if ty.references_non_lt_error() {
*ty = table.resolve_completely(ty.as_ref()).store();
if ty.as_ref().references_non_lt_error() {
*field_with_same_name = None;
}
}
}
TypedHole { expected: ty, .. } => {
*ty = table.resolve_completely(*ty);
*ty = table.resolve_completely(ty.as_ref()).store();
}
_ => (),
}
@ -1125,30 +1101,33 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
});
diagnostics.shrink_to_fit();
for (_, subst) in method_resolutions.values_mut() {
*subst = table.resolve_completely(*subst);
*has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error());
*subst = table.resolve_completely(subst.as_ref()).store();
*has_errors =
*has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error());
}
method_resolutions.shrink_to_fit();
for (_, subst) in assoc_resolutions.values_mut() {
*subst = table.resolve_completely(*subst);
*has_errors = *has_errors || subst.types().any(|ty| ty.references_non_lt_error());
*subst = table.resolve_completely(subst.as_ref()).store();
*has_errors =
*has_errors || subst.as_ref().types().any(|ty| ty.references_non_lt_error());
}
assoc_resolutions.shrink_to_fit();
for adjustment in expr_adjustments.values_mut().flatten() {
adjustment.target = table.resolve_completely(adjustment.target);
*has_errors = *has_errors || adjustment.target.references_non_lt_error();
adjustment.target = table.resolve_completely(adjustment.target.as_ref()).store();
*has_errors = *has_errors || adjustment.target.as_ref().references_non_lt_error();
}
expr_adjustments.shrink_to_fit();
for adjustment in pat_adjustments.values_mut().flatten() {
*adjustment = table.resolve_completely(*adjustment);
*has_errors = *has_errors || adjustment.references_non_lt_error();
*adjustment = table.resolve_completely(adjustment.as_ref()).store();
*has_errors = *has_errors || adjustment.as_ref().references_non_lt_error();
}
pat_adjustments.shrink_to_fit();
result.tuple_field_access_types = tuple_field_accesses_rev
.into_iter()
.map(|subst| table.resolve_completely(subst))
.map(|subst| table.resolve_completely(subst).store())
.inspect(|subst| {
*has_errors = *has_errors || subst.iter().any(|ty| ty.references_non_lt_error());
*has_errors =
*has_errors || subst.as_ref().iter().any(|ty| ty.references_non_lt_error());
})
.collect();
result.tuple_field_access_types.shrink_to_fit();
@ -1262,10 +1241,10 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
fn write_expr_ty(&mut self, expr: ExprId, ty: Ty<'db>) {
self.result.type_of_expr.insert(expr, ty);
self.result.type_of_expr.insert(expr, ty.store());
}
pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) {
pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) {
if adjustments.is_empty() {
return;
}
@ -1278,7 +1257,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
) => {
// NeverToAny coercion can target any type, so instead of adding a new
// adjustment on top we can change the target.
*target = *new_target;
*target = new_target.clone();
}
_ => {
*entry.get_mut() = adjustments;
@ -1291,7 +1270,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
}
fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[Ty<'db>]>) {
fn write_pat_adj(&mut self, pat: PatId, adjustments: Box<[StoredTy]>) {
if adjustments.is_empty() {
return;
}
@ -1304,7 +1283,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
func: FunctionId,
subst: GenericArgs<'db>,
) {
self.result.method_resolutions.insert(expr, (func, subst));
self.result.method_resolutions.insert(expr, (func, subst.store()));
}
fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
@ -1317,22 +1296,22 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
item: CandidateId,
subs: GenericArgs<'db>,
) {
self.result.assoc_resolutions.insert(id, (item, subs));
self.result.assoc_resolutions.insert(id, (item, subs.store()));
}
fn write_pat_ty(&mut self, pat: PatId, ty: Ty<'db>) {
self.result.type_of_pat.insert(pat, ty);
self.result.type_of_pat.insert(pat, ty.store());
}
fn write_type_placeholder_ty(&mut self, type_ref: TypeRefId, ty: Ty<'db>) {
self.result.type_of_type_placeholder.insert(type_ref, ty);
self.result.type_of_type_placeholder.insert(type_ref, ty.store());
}
fn write_binding_ty(&mut self, id: BindingId, ty: Ty<'db>) {
self.result.type_of_binding.insert(id, ty);
self.result.type_of_binding.insert(id, ty.store());
}
pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) {
pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic) {
self.diagnostics.push(diagnostic);
}
@ -1486,7 +1465,13 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
match ty.kind() {
TyKind::Adt(adt_def, substs) => match adt_def.def_id().0 {
AdtId::StructId(struct_id) => {
match self.db.field_types(struct_id.into()).values().next_back().copied() {
match self
.db
.field_types(struct_id.into())
.values()
.next_back()
.map(|it| it.get())
{
Some(field) => {
ty = field.instantiate(self.interner(), substs);
}
@ -1547,7 +1532,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
self.result
.type_mismatches
.get_or_insert_default()
.insert(id, TypeMismatch { expected, actual });
.insert(id, TypeMismatch { expected: expected.store(), actual: actual.store() });
}
result
}
@ -1592,7 +1577,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
}
fn expr_ty(&self, expr: ExprId) -> Ty<'db> {
self.result[expr]
self.result.expr_ty(expr)
}
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
@ -1600,7 +1585,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> {
if let Some(it) = self.result.expr_adjustments.get(&e)
&& let Some(it) = it.last()
{
ty = Some(it.target);
ty = Some(it.target.as_ref());
}
ty.unwrap_or_else(|| self.expr_ty(e))
}

View file

@ -25,7 +25,7 @@ impl<'db> InferenceTable<'db> {
}
impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> {
pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment<'db>>> {
pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment>> {
let steps = self.steps();
if steps.is_empty() {
return InferOk { obligations: PredicateObligations::new(), value: vec![] };
@ -42,7 +42,10 @@ impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> {
}
})
.zip(targets)
.map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })
.map(|(autoderef, target)| Adjustment {
kind: Adjust::Deref(autoderef),
target: target.store(),
})
.collect();
InferOk { obligations: self.take_obligations(), value: steps }

View file

@ -4,7 +4,7 @@ use hir_def::{AdtId, hir::ExprId, signatures::TraitFlags};
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
Flags, InferTy, TypeFlags, UintTy,
inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _},
inherent::{AdtDef, BoundExistentialPredicates as _, IntoKind, Ty as _},
};
use stdx::never;
@ -83,8 +83,13 @@ impl CastError {
expr: ExprId,
expr_ty: Ty<'db>,
cast_ty: Ty<'db>,
) -> InferenceDiagnostic<'db> {
InferenceDiagnostic::InvalidCast { expr, error: self, expr_ty, cast_ty }
) -> InferenceDiagnostic {
InferenceDiagnostic::InvalidCast {
expr,
error: self,
expr_ty: expr_ty.store(),
cast_ty: cast_ty.store(),
}
}
}
@ -109,7 +114,7 @@ impl<'db> CastCheck<'db> {
pub(super) fn check(
&mut self,
ctx: &mut InferenceContext<'_, 'db>,
) -> Result<(), InferenceDiagnostic<'db>> {
) -> Result<(), InferenceDiagnostic> {
self.expr_ty = ctx.table.try_structurally_resolve_type(self.expr_ty);
self.cast_ty = ctx.table.try_structurally_resolve_type(self.cast_ty);
@ -137,7 +142,7 @@ impl<'db> CastCheck<'db> {
{
return Err(InferenceDiagnostic::CastToUnsized {
expr: self.expr,
cast_ty: self.cast_ty,
cast_ty: self.cast_ty.store(),
});
}
@ -393,8 +398,9 @@ fn pointer_kind<'db>(
let struct_data = id.fields(ctx.db);
if let Some((last_field, _)) = struct_data.fields().iter().last() {
let last_field_ty =
ctx.db.field_types(id.into())[last_field].instantiate(ctx.interner(), subst);
let last_field_ty = ctx.db.field_types(id.into())[last_field]
.get()
.instantiate(ctx.interner(), subst);
pointer_kind(last_field_ty, ctx)
} else {
Ok(Some(PointerKind::Thin))

View file

@ -15,7 +15,7 @@ use hir_def::{
};
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{IntoKind, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{format_to, never};
use syntax::utils::is_raw_identifier;
@ -25,21 +25,21 @@ use crate::{
db::{HirDatabase, InternedClosure, InternedClosureId},
infer::InferenceContext,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
next_solver::{DbInterner, EarlyBinder, GenericArgs, Ty, TyKind},
next_solver::{DbInterner, GenericArgs, StoredEarlyBinder, StoredTy, Ty, TyKind},
traits::FnTrait,
};
// The below functions handle capture and closure kind (Fn, FnMut, ..)
#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
pub(crate) struct HirPlace<'db> {
pub(crate) struct HirPlace {
pub(crate) local: BindingId,
pub(crate) projections: Vec<ProjectionElem<'db, Infallible>>,
pub(crate) projections: Vec<ProjectionElem<Infallible>>,
}
impl<'db> HirPlace<'db> {
fn ty(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> {
let mut ty = ctx.table.resolve_completely(ctx.result[self.local]);
impl HirPlace {
fn ty<'db>(&self, ctx: &mut InferenceContext<'_, 'db>) -> Ty<'db> {
let mut ty = ctx.table.resolve_completely(ctx.result.binding_ty(self.local));
for p in &self.projections {
ty = p.projected_ty(
&ctx.table.infer_ctxt,
@ -78,8 +78,8 @@ pub enum CaptureKind {
}
#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
pub struct CapturedItem<'db> {
pub(crate) place: HirPlace<'db>,
pub struct CapturedItem {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
/// The inner vec is the stacks; the outer vec is for each capture reference.
///
@ -88,11 +88,10 @@ pub struct CapturedItem<'db> {
/// copy all captures of the inner closure to the outer closure, and then we may
/// truncate them, and we want the correct span to be reported.
span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
#[update(unsafe(with(crate::utils::unsafe_update_eq)))]
pub(crate) ty: EarlyBinder<'db, Ty<'db>>,
pub(crate) ty: StoredEarlyBinder<StoredTy>,
}
impl<'db> CapturedItem<'db> {
impl CapturedItem {
pub fn local(&self) -> BindingId {
self.place.local
}
@ -102,9 +101,9 @@ impl<'db> CapturedItem<'db> {
self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref))
}
pub fn ty(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
pub fn ty<'db>(&self, db: &'db dyn HirDatabase, subst: GenericArgs<'db>) -> Ty<'db> {
let interner = DbInterner::new_no_crate(db);
self.ty.instantiate(interner, subst.split_closure_args_untupled().parent_args)
self.ty.get().instantiate(interner, subst.split_closure_args_untupled().parent_args)
}
pub fn kind(&self) -> CaptureKind {
@ -273,15 +272,15 @@ impl<'db> CapturedItem<'db> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct CapturedItemWithoutTy<'db> {
pub(crate) place: HirPlace<'db>,
pub(crate) struct CapturedItemWithoutTy {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
/// The inner vec is the stacks; the outer vec is for each capture reference.
pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
}
impl<'db> CapturedItemWithoutTy<'db> {
fn with_ty(self, ctx: &mut InferenceContext<'_, 'db>) -> CapturedItem<'db> {
impl CapturedItemWithoutTy {
fn with_ty(self, ctx: &mut InferenceContext<'_, '_>) -> CapturedItem {
let ty = self.place.ty(ctx);
let ty = match &self.kind {
CaptureKind::ByValue => ty,
@ -297,13 +296,13 @@ impl<'db> CapturedItemWithoutTy<'db> {
place: self.place,
kind: self.kind,
span_stacks: self.span_stacks,
ty: EarlyBinder::bind(ty),
ty: StoredEarlyBinder::bind(ty.store()),
}
}
}
impl<'db> InferenceContext<'_, 'db> {
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace<'db>> {
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
let r = self.place_of_expr_without_adjust(tgt_expr)?;
let adjustments =
self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
@ -311,7 +310,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
/// Pushes the span into `current_capture_span_stack`, *without clearing it first*.
fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace<'db>> {
fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option<HirPlace> {
if path.type_anchor().is_some() {
return None;
}
@ -332,7 +331,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace<'db>> {
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
self.current_capture_span_stack.clear();
match &self.body[tgt_expr] {
Expr::Path(p) => {
@ -367,7 +366,7 @@ impl<'db> InferenceContext<'_, 'db> {
None
}
fn push_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) {
fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
self.current_captures.push(CapturedItemWithoutTy {
place,
kind,
@ -375,11 +374,7 @@ impl<'db> InferenceContext<'_, 'db> {
});
}
fn truncate_capture_spans(
&self,
capture: &mut CapturedItemWithoutTy<'db>,
mut truncate_to: usize,
) {
fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
// The first span is the identifier, and it must always remain.
truncate_to += 1;
for span_stack in &mut capture.span_stacks {
@ -404,14 +399,14 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace<'db>>) {
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
if let Some(place) = place {
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
}
self.walk_expr(expr);
}
fn add_capture(&mut self, place: HirPlace<'db>, kind: CaptureKind) {
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
if self.is_upvar(&place) {
self.push_capture(place, kind);
}
@ -427,7 +422,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace<'db>>) {
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
if let Some(place) = place {
self.add_capture(
place,
@ -444,7 +439,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.walk_expr(expr);
}
fn consume_place(&mut self, place: HirPlace<'db>) {
fn consume_place(&mut self, place: HirPlace) {
if self.is_upvar(&place) {
let ty = place.ty(self);
let kind = if self.is_ty_copy(ty) {
@ -456,7 +451,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment<'db>]) {
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
if let Some((last, rest)) = adjustment.split_last() {
match &last.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
@ -477,12 +472,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn ref_capture_with_adjusts(
&mut self,
m: Mutability,
tgt_expr: ExprId,
rest: &[Adjustment<'db>],
) {
fn ref_capture_with_adjusts(&mut self, m: Mutability, tgt_expr: ExprId, rest: &[Adjustment]) {
let capture_kind = match m {
Mutability::Mut => CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
@ -780,7 +770,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
Pat::Bind { id, .. } => match self.result.binding_modes[p] {
crate::BindingMode::Move => {
if self.is_ty_copy(self.result.type_of_binding[*id]) {
if self.is_ty_copy(self.result.binding_ty(*id)) {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
} else {
update_result(CaptureKind::ByValue);
@ -798,7 +788,7 @@ impl<'db> InferenceContext<'_, 'db> {
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
}
fn is_upvar(&self, place: &HirPlace<'db>) -> bool {
fn is_upvar(&self, place: &HirPlace) -> bool {
if let Some(c) = self.current_closure {
let InternedClosure(_, root) = self.db.lookup_intern_closure(c);
return self.body.is_binding_upvar(place.local, root);
@ -830,7 +820,7 @@ impl<'db> InferenceContext<'_, 'db> {
// FIXME: Borrow checker problems without this.
let mut current_captures = std::mem::take(&mut self.current_captures);
for capture in &mut current_captures {
let mut ty = self.table.resolve_completely(self.result[capture.place.local]);
let mut ty = self.table.resolve_completely(self.result.binding_ty(capture.place.local));
if ty.is_raw_ptr() || ty.is_union() {
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
self.truncate_capture_spans(capture, 0);
@ -875,7 +865,7 @@ impl<'db> InferenceContext<'_, 'db> {
fn minimize_captures(&mut self) {
self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
let mut hash_map = FxHashMap::<HirPlace<'db>, usize>::default();
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
let result = mem::take(&mut self.current_captures);
for mut item in result {
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
@ -910,7 +900,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
}
fn consume_with_pat(&mut self, mut place: HirPlace<'db>, tgt_pat: PatId) {
fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
let adjustments_count =
self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
@ -921,7 +911,7 @@ impl<'db> InferenceContext<'_, 'db> {
Pat::Missing | Pat::Wild => (),
Pat::Tuple { args, ellipsis } => {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let field_count = match self.result[tgt_pat].kind() {
let field_count = match self.result.pat_ty(tgt_pat).kind() {
TyKind::Tuple(s) => s.len(),
_ => break 'reset_span_stack,
};
@ -1221,11 +1211,11 @@ impl<'db> InferenceContext<'_, 'db> {
}
/// Call this only when the last span in the stack isn't a split.
fn apply_adjusts_to_place<'db>(
fn apply_adjusts_to_place(
current_capture_span_stack: &mut Vec<MirSpan>,
mut r: HirPlace<'db>,
adjustments: &[Adjustment<'db>],
) -> Option<HirPlace<'db>> {
mut r: HirPlace,
adjustments: &[Adjustment],
) -> Option<HirPlace> {
let span = *current_capture_span_stack.last().expect("empty capture span stack");
for adj in adjustments {
match &adj.kind {

View file

@ -104,7 +104,7 @@ struct Coerce<D> {
cause: ObligationCause,
}
type CoerceResult<'db> = InferResult<'db, (Vec<Adjustment<'db>>, Ty<'db>)>;
type CoerceResult<'db> = InferResult<'db, (Vec<Adjustment>, Ty<'db>)>;
/// Coercing a mutable reference to an immutable works, while
/// coercing `&T` to `&mut T` should be forbidden.
@ -114,7 +114,7 @@ fn coerce_mutbls<'db>(from_mutbl: Mutability, to_mutbl: Mutability) -> RelateRes
/// This always returns `Ok(...)`.
fn success<'db>(
adj: Vec<Adjustment<'db>>,
adj: Vec<Adjustment>,
target: Ty<'db>,
obligations: PredicateObligations<'db>,
) -> CoerceResult<'db> {
@ -206,14 +206,17 @@ where
&mut self,
a: Ty<'db>,
b: Ty<'db>,
adjustments: impl IntoIterator<Item = Adjustment<'db>>,
adjustments: impl IntoIterator<Item = Adjustment>,
final_adjustment: Adjust,
) -> CoerceResult<'db> {
self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| {
success(
adjustments
.into_iter()
.chain(std::iter::once(Adjustment { target: ty, kind: final_adjustment }))
.chain(std::iter::once(Adjustment {
target: ty.store(),
kind: final_adjustment,
}))
.collect(),
ty,
obligations,
@ -237,7 +240,7 @@ where
if self.coerce_never {
return success(
vec![Adjustment { kind: Adjust::NeverToAny, target: b }],
vec![Adjustment { kind: Adjust::NeverToAny, target: b.store() }],
b,
PredicateObligations::new(),
);
@ -532,7 +535,8 @@ where
// Now apply the autoref.
let mutbl = AutoBorrowMutability::new(mutbl_b, self.allow_two_phase);
adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty });
adjustments
.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty.store() });
debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments);
@ -635,10 +639,10 @@ where
let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment { kind: Adjust::Deref(None), target: ty_a.store() },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b),
target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b).store(),
},
))
}
@ -646,16 +650,16 @@ where
coerce_mutbls(mt_a, mt_b)?;
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment { kind: Adjust::Deref(None), target: ty_a.store() },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)),
target: Ty::new_ptr(self.interner(), ty_a, mt_b),
target: Ty::new_ptr(self.interner(), ty_a, mt_b).store(),
},
))
}
_ => None,
};
let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target);
let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target.as_ref());
// Setup either a subtyping or a LUB relationship between
// the `CoerceUnsized` target type and the expected type.
@ -726,7 +730,7 @@ where
Ok(None) => {
if trait_pred.def_id().0 == unsize_did {
let self_ty = trait_pred.self_ty();
let unsize_ty = trait_pred.trait_ref.args.inner()[1].expect_ty();
let unsize_ty = trait_pred.trait_ref.args[1].expect_ty();
debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred);
match (self_ty.kind(), unsize_ty.kind()) {
(TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..))
@ -815,7 +819,7 @@ where
b,
adjustment.map(|kind| Adjustment {
kind,
target: Ty::new_fn_ptr(this.interner(), fn_ty_a),
target: Ty::new_fn_ptr(this.interner(), fn_ty_a).store(),
}),
Adjust::Pointer(PointerCast::UnsafeFnPointer),
)
@ -955,7 +959,7 @@ where
self.unify_and(
a_raw,
b,
[Adjustment { kind: Adjust::Deref(None), target: mt_a.ty }],
[Adjustment { kind: Adjust::Deref(None), target: mt_a.ty.store() }],
Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)),
)
} else if mt_a.mutbl != mutbl_b {
@ -1170,12 +1174,15 @@ impl<'db> InferenceContext<'_, 'db> {
for &expr in exprs {
self.write_expr_adj(
expr,
Box::new([Adjustment { kind: prev_adjustment.clone(), target: fn_ptr }]),
Box::new([Adjustment {
kind: prev_adjustment.clone(),
target: fn_ptr.store(),
}]),
);
}
self.write_expr_adj(
new,
Box::new([Adjustment { kind: next_adjustment, target: fn_ptr }]),
Box::new([Adjustment { kind: next_adjustment, target: fn_ptr.store() }]),
);
return Ok(fn_ptr);
}
@ -1510,9 +1517,9 @@ impl<'db, 'exprs> CoerceMany<'db, 'exprs> {
icx.result.type_mismatches.get_or_insert_default().insert(
expression.into(),
if label_expression_as_expected {
TypeMismatch { expected: found, actual: expected }
TypeMismatch { expected: found.store(), actual: expected.store() }
} else {
TypeMismatch { expected, actual: found }
TypeMismatch { expected: expected.store(), actual: found.store() }
},
);
}
@ -1570,7 +1577,7 @@ fn coerce<'db>(
db: &'db dyn HirDatabase,
env: ParamEnvAndCrate<'db>,
tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
) -> Result<(Vec<Adjustment<'db>>, Ty<'db>), TypeError<DbInterner<'db>>> {
) -> Result<(Vec<Adjustment>, Ty<'db>), TypeError<DbInterner<'db>>> {
let interner = DbInterner::new_with(db, env.krate);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys);
@ -1593,7 +1600,6 @@ fn coerce<'db>(
let mut ocx = ObligationCtxt::new(&infcx);
let (adjustments, ty) = ocx.register_infer_ok_obligations(infer_ok);
_ = ocx.try_evaluate_obligations();
let (adjustments, ty) = infcx.resolve_vars_if_possible((adjustments, ty));
// default any type vars that weren't unified back to their original bound vars
// (kind of hacky)
@ -1701,10 +1707,18 @@ fn coerce<'db>(
}
// FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferCtxt`.
let (adjustments, ty) = (adjustments, ty).fold_with(&mut Resolver {
interner,
debruijn: DebruijnIndex::ZERO,
var_values: vars.var_values,
});
let mut resolver =
Resolver { interner, debruijn: DebruijnIndex::ZERO, var_values: vars.var_values };
let ty = infcx.resolve_vars_if_possible(ty).fold_with(&mut resolver);
let adjustments = adjustments
.into_iter()
.map(|adjustment| Adjustment {
kind: adjustment.kind,
target: infcx
.resolve_vars_if_possible(adjustment.target.as_ref())
.fold_with(&mut resolver)
.store(),
})
.collect();
Ok((adjustments, ty))
}

View file

@ -25,10 +25,10 @@ use crate::{
// to our resolver and so we cannot have mutable reference, but we really want to have
// ability to dispatch diagnostics during this work otherwise the code becomes a complete mess.
#[derive(Debug, Default, Clone)]
pub(super) struct Diagnostics<'db>(RefCell<ThinVec<InferenceDiagnostic<'db>>>);
pub(super) struct Diagnostics(RefCell<ThinVec<InferenceDiagnostic>>);
impl<'db> Diagnostics<'db> {
pub(super) fn push(&self, diagnostic: InferenceDiagnostic<'db>) {
impl Diagnostics {
pub(super) fn push(&self, diagnostic: InferenceDiagnostic) {
self.0.borrow_mut().push(diagnostic);
}
@ -42,19 +42,19 @@ impl<'db> Diagnostics<'db> {
);
}
pub(super) fn finish(self) -> ThinVec<InferenceDiagnostic<'db>> {
pub(super) fn finish(self) -> ThinVec<InferenceDiagnostic> {
self.0.into_inner()
}
}
pub(crate) struct PathDiagnosticCallbackData<'a, 'db> {
pub(crate) struct PathDiagnosticCallbackData<'a> {
node: ExprOrPatId,
diagnostics: &'a Diagnostics<'db>,
diagnostics: &'a Diagnostics,
}
pub(super) struct InferenceTyLoweringContext<'db, 'a> {
ctx: TyLoweringContext<'db, 'a>,
diagnostics: &'a Diagnostics<'db>,
diagnostics: &'a Diagnostics,
source: InferenceTyDiagnosticSource,
}
@ -64,7 +64,7 @@ impl<'db, 'a> InferenceTyLoweringContext<'db, 'a> {
db: &'db dyn HirDatabase,
resolver: &'a Resolver<'db>,
store: &'a ExpressionStore,
diagnostics: &'a Diagnostics<'db>,
diagnostics: &'a Diagnostics,
source: InferenceTyDiagnosticSource,
generic_def: GenericDefId,
lifetime_elision: LifetimeElisionKind<'db>,

View file

@ -17,7 +17,7 @@ use hir_expand::name::Name;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
CoroutineArgs, CoroutineArgsParts, InferTy, Interner,
inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _},
inherent::{AdtDef, GenericArgs as _, IntoKind, Ty as _},
};
use syntax::ast::RangeOp;
use tracing::debug;
@ -35,7 +35,7 @@ use crate::{
lower::{GenericPredicates, lower_mutability},
method_resolution::{self, CandidateId, MethodCallee, MethodError},
next_solver::{
ErrorGuaranteed, FnSig, GenericArgs, TraitRef, Ty, TyKind, TypeError,
ErrorGuaranteed, FnSig, GenericArg, GenericArgs, TraitRef, Ty, TyKind, TypeError,
infer::{
BoundRegionConversionTime, InferOk,
traits::{Obligation, ObligationCause},
@ -68,10 +68,10 @@ impl<'db> InferenceContext<'_, 'db> {
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
let could_unify = self.unify(ty, expected_ty);
if !could_unify {
self.result
.type_mismatches
.get_or_insert_default()
.insert(tgt_expr.into(), TypeMismatch { expected: expected_ty, actual: ty });
self.result.type_mismatches.get_or_insert_default().insert(
tgt_expr.into(),
TypeMismatch { expected: expected_ty.store(), actual: ty.store() },
);
}
}
ty
@ -98,10 +98,10 @@ impl<'db> InferenceContext<'_, 'db> {
match self.coerce(expr.into(), ty, target, AllowTwoPhase::No, is_read) {
Ok(res) => res,
Err(_) => {
self.result
.type_mismatches
.get_or_insert_default()
.insert(expr.into(), TypeMismatch { expected: target, actual: ty });
self.result.type_mismatches.get_or_insert_default().insert(
expr.into(),
TypeMismatch { expected: target.store(), actual: ty.store() },
);
target
}
}
@ -276,7 +276,7 @@ impl<'db> InferenceContext<'_, 'db> {
if ty.is_never() {
if let Some(adjustments) = self.result.expr_adjustments.get(&expr) {
return if let [Adjustment { kind: Adjust::NeverToAny, target }] = &**adjustments {
*target
target.as_ref()
} else {
self.err_ty()
};
@ -292,10 +292,10 @@ impl<'db> InferenceContext<'_, 'db> {
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
let could_unify = self.unify(ty, expected_ty);
if !could_unify {
self.result
.type_mismatches
.get_or_insert_default()
.insert(expr.into(), TypeMismatch { expected: expected_ty, actual: ty });
self.result.type_mismatches.get_or_insert_default().insert(
expr.into(),
TypeMismatch { expected: expected_ty.store(), actual: ty.store() },
);
}
}
ty
@ -637,7 +637,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
};
let field_ty = field_def.map_or(self.err_ty(), |it| {
field_types[it].instantiate(self.interner(), &substs)
field_types[it].get().instantiate(self.interner(), &substs)
});
// Field type might have some unknown types
@ -780,7 +780,7 @@ impl<'db> InferenceContext<'_, 'db> {
Ty::new_adt(
self.interner(),
adt,
GenericArgs::new_from_iter(self.interner(), [ty.into()]),
GenericArgs::new_from_iter(self.interner(), [GenericArg::from(ty)]),
)
};
match (range_type, lhs_ty, rhs_ty) {
@ -947,7 +947,10 @@ impl<'db> InferenceContext<'_, 'db> {
// Underscore expression is an error, we render a specialized diagnostic
// to let the user know what type is expected though.
let expected = expected.to_option(&mut self.table).unwrap_or_else(|| self.err_ty());
self.push_diagnostic(InferenceDiagnostic::TypedHole { expr: tgt_expr, expected });
self.push_diagnostic(InferenceDiagnostic::TypedHole {
expr: tgt_expr,
expected: expected.store(),
});
expected
}
Expr::OffsetOf(_) => self.types.usize,
@ -1183,10 +1186,10 @@ impl<'db> InferenceContext<'_, 'db> {
match this.coerce(tgt_expr.into(), ty, target, AllowTwoPhase::No, ExprIsRead::Yes) {
Ok(res) => res,
Err(_) => {
this.result
.type_mismatches
.get_or_insert_default()
.insert(tgt_expr.into(), TypeMismatch { expected: target, actual: ty });
this.result.type_mismatches.get_or_insert_default().insert(
tgt_expr.into(),
TypeMismatch { expected: target.store(), actual: ty.store() },
);
target
}
}
@ -1234,7 +1237,7 @@ impl<'db> InferenceContext<'_, 'db> {
&mut self,
fn_x: FnTrait,
derefed_callee: Ty<'db>,
adjustments: &mut Vec<Adjustment<'db>>,
adjustments: &mut Vec<Adjustment>,
callee_ty: Ty<'db>,
params: &[Ty<'db>],
tgt_expr: ExprId,
@ -1249,7 +1252,8 @@ impl<'db> InferenceContext<'_, 'db> {
.unwrap_or(true)
{
// prefer reborrow to move
adjustments.push(Adjustment { kind: Adjust::Deref(None), target: inner });
adjustments
.push(Adjustment { kind: Adjust::Deref(None), target: inner.store() });
adjustments.push(Adjustment::borrow(
self.interner(),
Mutability::Mut,
@ -1282,13 +1286,10 @@ impl<'db> InferenceContext<'_, 'db> {
};
let trait_data = trait_.trait_items(self.db);
if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) {
let subst = GenericArgs::new_from_iter(
self.interner(),
[
callee_ty.into(),
Ty::new_tup_from_iter(self.interner(), params.iter().copied()).into(),
],
);
let subst = GenericArgs::new_from_slice(&[
callee_ty.into(),
Ty::new_tup(self.interner(), params).into(),
]);
self.write_method_resolution(tgt_expr, func, subst);
}
}
@ -1549,7 +1550,10 @@ impl<'db> InferenceContext<'_, 'db> {
{
this.result.type_mismatches.get_or_insert_default().insert(
expr.into(),
TypeMismatch { expected: t, actual: this.types.unit },
TypeMismatch {
expected: t.store(),
actual: this.types.unit.store(),
},
);
}
t
@ -1567,7 +1571,7 @@ impl<'db> InferenceContext<'_, 'db> {
&mut self,
receiver_ty: Ty<'db>,
name: &Name,
) -> Option<(Ty<'db>, Either<FieldId, TupleFieldId>, Vec<Adjustment<'db>>, bool)> {
) -> Option<(Ty<'db>, Either<FieldId, TupleFieldId>, Vec<Adjustment>, bool)> {
let interner = self.interner();
let mut autoderef = self.table.autoderef_with_tracking(receiver_ty);
let mut private_field = None;
@ -1612,6 +1616,7 @@ impl<'db> InferenceContext<'_, 'db> {
return None;
}
let ty = self.db.field_types(field_id.parent)[field_id.local_id]
.get()
.instantiate(interner, parameters);
Some((Either::Left(field_id), ty))
});
@ -1629,6 +1634,7 @@ impl<'db> InferenceContext<'_, 'db> {
let adjustments =
self.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok());
let ty = self.db.field_types(field_id.parent)[field_id.local_id]
.get()
.instantiate(self.interner(), subst);
let ty = self.process_remote_user_written_ty(ty);
@ -1679,7 +1685,7 @@ impl<'db> InferenceContext<'_, 'db> {
);
self.push_diagnostic(InferenceDiagnostic::UnresolvedField {
expr: tgt_expr,
receiver: receiver_ty,
receiver: receiver_ty.store(),
name: name.clone(),
method_with_same_name_exists: resolved.is_ok(),
});
@ -1755,7 +1761,7 @@ impl<'db> InferenceContext<'_, 'db> {
None => {
self.push_diagnostic(InferenceDiagnostic::ExpectedFunction {
call_expr: tgt_expr,
found: callee_ty,
found: callee_ty.store(),
});
(Vec::new(), Ty::new_error(interner, ErrorGuaranteed))
}
@ -1867,9 +1873,9 @@ impl<'db> InferenceContext<'_, 'db> {
self.push_diagnostic(InferenceDiagnostic::UnresolvedMethodCall {
expr: tgt_expr,
receiver: receiver_ty,
receiver: receiver_ty.store(),
name: method_name.clone(),
field_with_same_name: field_with_same_name_exists,
field_with_same_name: field_with_same_name_exists.map(|it| it.store()),
assoc_func_with_same_name: assoc_func_with_same_name.map(|it| it.def_id),
});
@ -2115,10 +2121,10 @@ impl<'db> InferenceContext<'_, 'db> {
&& args_count_matches
{
// Don't report type mismatches if there is a mismatch in args count.
self.result
.type_mismatches
.get_or_insert_default()
.insert((*arg).into(), TypeMismatch { expected, actual: found });
self.result.type_mismatches.get_or_insert_default().insert(
(*arg).into(),
TypeMismatch { expected: expected.store(), actual: found.store() },
);
}
}
}

View file

@ -26,8 +26,8 @@ impl<'db> InferenceContext<'_, 'db> {
Adjust::Deref(Some(d)) => {
if mutability == Mutability::Mut {
let source_ty = match adjustments.peek() {
Some(prev_adj) => prev_adj.target,
None => self.result.type_of_expr[tgt_expr],
Some(prev_adj) => prev_adj.target.as_ref(),
None => self.result.type_of_expr[tgt_expr].as_ref(),
};
if let Some(infer_ok) = Self::try_mutable_overloaded_place_op(
&self.table,

View file

@ -213,7 +213,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
let autoref = Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: method.sig.inputs_and_output.inputs()[0],
target: method.sig.inputs_and_output.inputs()[0].store(),
};
self.write_expr_adj(lhs_expr, Box::new([autoref]));
}
@ -227,7 +227,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
let autoref = Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: method.sig.inputs_and_output.inputs()[1],
target: method.sig.inputs_and_output.inputs()[1].store(),
};
// HACK(eddyb) Bypass checks due to reborrows being in
// some cases applied on the RHS, on top of which we need

View file

@ -112,12 +112,12 @@ impl<'db> InferenceContext<'_, 'db> {
_ = self.demand_eqtype_fixme_no_diag(expected, hidden_type.ty);
}
self.result.type_of_opaque.insert(def_id, ty.ty);
self.result.type_of_opaque.insert(def_id, ty.ty.store());
continue;
}
self.result.type_of_opaque.insert(def_id, self.types.error);
self.result.type_of_opaque.insert(def_id, self.types.error.store());
}
}

View file

@ -9,7 +9,7 @@ use hir_def::{
};
use hir_expand::name::Name;
use rustc_ast_ir::Mutability;
use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{GenericArg as _, GenericArgs as _, IntoKind, Ty as _};
use stdx::TupleExt;
use crate::{
@ -82,7 +82,7 @@ impl<'db> InferenceContext<'_, 'db> {
{
// FIXME(DIAGNOSE): private tuple field
}
let f = field_types[local_id];
let f = field_types[local_id].get();
let expected_ty = match substs {
Some(substs) => f.instantiate(self.interner(), substs),
None => f.instantiate(self.interner(), &[]),
@ -146,7 +146,7 @@ impl<'db> InferenceContext<'_, 'db> {
variant: def,
});
}
let f = field_types[local_id];
let f = field_types[local_id].get();
let expected_ty = match substs {
Some(substs) => f.instantiate(self.interner(), substs),
None => f.instantiate(self.interner(), &[]),
@ -270,7 +270,7 @@ impl<'db> InferenceContext<'_, 'db> {
} else if self.is_non_ref_pat(self.body, pat) {
let mut pat_adjustments = Vec::new();
while let TyKind::Ref(_lifetime, inner, mutability) = expected.kind() {
pat_adjustments.push(expected);
pat_adjustments.push(expected.store());
expected = self.table.try_structurally_resolve_type(inner);
default_bm = match default_bm {
BindingMode::Move => BindingMode::Ref(mutability),
@ -333,7 +333,10 @@ impl<'db> InferenceContext<'_, 'db> {
Err(_) => {
self.result.type_mismatches.get_or_insert_default().insert(
pat.into(),
TypeMismatch { expected, actual: ty_inserted_vars },
TypeMismatch {
expected: expected.store(),
actual: ty_inserted_vars.store(),
},
);
self.write_pat_ty(pat, ty);
// We return `expected` to prevent cascading errors. I guess an alternative is to
@ -413,10 +416,10 @@ impl<'db> InferenceContext<'_, 'db> {
) {
Ok(ty) => ty,
Err(_) => {
self.result
.type_mismatches
.get_or_insert_default()
.insert(pat.into(), TypeMismatch { expected, actual: lhs_ty });
self.result.type_mismatches.get_or_insert_default().insert(
pat.into(),
TypeMismatch { expected: expected.store(), actual: lhs_ty.store() },
);
// `rhs_ty` is returned so no further type mismatches are
// reported because of this mismatch.
expected
@ -432,22 +435,22 @@ impl<'db> InferenceContext<'_, 'db> {
let ty = self.insert_type_vars_shallow(ty);
// FIXME: This never check is odd, but required with out we do inference right now
if !expected.is_never() && !self.unify(ty, expected) {
self.result
.type_mismatches
.get_or_insert_default()
.insert(pat.into(), TypeMismatch { expected, actual: ty });
self.result.type_mismatches.get_or_insert_default().insert(
pat.into(),
TypeMismatch { expected: expected.store(), actual: ty.store() },
);
}
self.write_pat_ty(pat, ty);
self.pat_ty_after_adjustment(pat)
}
fn pat_ty_after_adjustment(&self, pat: PatId) -> Ty<'db> {
*self
.result
self.result
.pat_adjustments
.get(&pat)
.and_then(|it| it.last())
.unwrap_or(&self.result.type_of_pat[pat])
.unwrap_or_else(|| &self.result.type_of_pat[pat])
.as_ref()
}
fn infer_ref_pat(

View file

@ -64,7 +64,7 @@ impl<'db> InferenceContext<'_, 'db> {
}
ValueNs::LocalBinding(pat) => {
return match self.result.type_of_binding.get(pat) {
Some(ty) => Some(ValuePathResolution::NonGeneric(*ty)),
Some(ty) => Some(ValuePathResolution::NonGeneric(ty.as_ref())),
None => {
never!("uninferred pattern?");
None

View file

@ -65,7 +65,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
oprnd_expr,
Box::new([Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)),
target: method.sig.inputs_and_output.inputs()[0],
target: method.sig.inputs_and_output.inputs()[0].store(),
}]),
);
} else {
@ -151,7 +151,8 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
{
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)),
target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty),
target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty)
.store(),
});
} else {
panic!("input to index is not a ref?");
@ -159,7 +160,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
if unsize {
adjustments.push(Adjustment {
kind: Adjust::Pointer(PointerCast::Unsize),
target: method.sig.inputs_and_output.inputs()[0],
target: method.sig.inputs_and_output.inputs()[0].store(),
});
}
autoderef.ctx().write_expr_adj(base_expr, adjustments.into_boxed_slice());
@ -283,7 +284,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
None => return,
};
debug!("convert_place_op_to_mutable: method={:?}", method);
self.result.method_resolutions.insert(expr, (method.def_id, method.args));
self.result.method_resolutions.insert(expr, (method.def_id, method.args.store()));
let TyKind::Ref(region, _, Mutability::Mut) =
method.sig.inputs_and_output.inputs()[0].kind()
@ -308,9 +309,9 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
allow_two_phase_borrow: AllowTwoPhase::No,
};
adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(mutbl));
adjustment.target = Ty::new_ref(interner, region, source, mutbl.into());
adjustment.target = Ty::new_ref(interner, region, source, mutbl.into()).store();
}
source = adjustment.target;
source = adjustment.target.as_ref();
}
// If we have an autoref followed by unsizing at the end, fix the unsize target.
@ -320,7 +321,7 @@ impl<'a, 'db> InferenceContext<'a, 'db> {
Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target },
] = adjustments[..]
{
*target = method.sig.inputs_and_output.inputs()[0];
*target = method.sig.inputs_and_output.inputs()[0].store();
}
}
}

View file

@ -9,7 +9,7 @@ use intern::sym;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
TyVid, TypeFoldable, TypeVisitableExt, UpcastFrom,
inherent::{Const as _, GenericArg as _, IntoKind, SliceLike, Ty as _},
inherent::{Const as _, GenericArg as _, IntoKind, Ty as _},
solve::Certainty,
};
use smallvec::SmallVec;
@ -640,6 +640,7 @@ impl<'db> InferenceTable<'db> {
let struct_data = id.fields(self.db);
if let Some((last_field, _)) = struct_data.fields().iter().next_back() {
let last_field_ty = self.db.field_types(id.into())[last_field]
.get()
.instantiate(self.interner(), subst);
if structs.contains(&ty) {
// A struct recursively contains itself as a tail field somewhere.

View file

@ -154,7 +154,7 @@ impl<'a, 'db> UninhabitedFrom<'a, 'db> {
let field_vis = if is_enum { None } else { Some(self.db().field_visibilities(variant)) };
for (fid, _) in fields.iter() {
self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid], subst)?;
self.visit_field(field_vis.as_ref().map(|it| it[fid]), &field_tys[fid].get(), subst)?;
}
CONTINUE_OPAQUELY_INHABITED
}

View file

@ -14,10 +14,7 @@ use rustc_abi::{
TargetDataLayout, WrappingRange,
};
use rustc_index::IndexVec;
use rustc_type_ir::{
FloatTy, IntTy, UintTy,
inherent::{IntoKind, SliceLike},
};
use rustc_type_ir::{FloatTy, IntTy, UintTy, inherent::IntoKind};
use triomphe::Arc;
use crate::{
@ -25,9 +22,10 @@ use crate::{
consteval::try_const_usize,
db::HirDatabase,
next_solver::{
DbInterner, GenericArgs, Ty, TyKind, TypingMode,
DbInterner, GenericArgs, StoredTy, Ty, TyKind, TypingMode,
infer::{DbInternerInferExt, traits::ObligationCause},
},
traits::StoredParamEnvAndCrate,
};
pub(crate) use self::adt::layout_of_adt_cycle_result;
@ -144,22 +142,22 @@ fn layout_of_simd_ty<'db>(
let Some(TyKind::Array(e_ty, e_len)) = fields
.next()
.filter(|_| fields.next().is_none())
.map(|f| (*f.1).instantiate(DbInterner::new_no_crate(db), args).kind())
.map(|f| (*f.1).get().instantiate(DbInterner::new_no_crate(db), args).kind())
else {
return Err(LayoutError::InvalidSimdType);
};
let e_len = try_const_usize(db, e_len).ok_or(LayoutError::HasErrorConst)? as u64;
let e_ly = db.layout_of_ty(e_ty, env)?;
let e_ly = db.layout_of_ty(e_ty.store(), env.store())?;
let cx = LayoutCx::new(dl);
Ok(Arc::new(cx.calc.simd_type(e_ly, e_len, repr_packed)?))
}
pub fn layout_of_ty_query<'db>(
db: &'db dyn HirDatabase,
ty: Ty<'db>,
trait_env: ParamEnvAndCrate<'db>,
pub fn layout_of_ty_query(
db: &dyn HirDatabase,
ty: StoredTy,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
let interner = DbInterner::new_with(db, krate);
@ -170,19 +168,29 @@ pub fn layout_of_ty_query<'db>(
let cx = LayoutCx::new(dl);
let infer_ctxt = interner.infer_ctxt().build(TypingMode::PostAnalysis);
let cause = ObligationCause::dummy();
let ty = infer_ctxt.at(&cause, trait_env.param_env).deeply_normalize(ty).unwrap_or(ty);
let ty = infer_ctxt
.at(&cause, trait_env.param_env())
.deeply_normalize(ty.as_ref())
.unwrap_or(ty.as_ref());
let result = match ty.kind() {
TyKind::Adt(def, args) => {
match def.inner().id {
hir_def::AdtId::StructId(s) => {
let repr = AttrFlags::repr(db, s.into()).unwrap_or_default();
if repr.simd() {
return layout_of_simd_ty(db, s, repr.packed(), &args, trait_env, &target);
return layout_of_simd_ty(
db,
s,
repr.packed(),
&args,
trait_env.as_ref(),
&target,
);
}
}
_ => {}
}
return db.layout_of_adt(def.inner().id, args, trait_env);
return db.layout_of_adt(def.inner().id, args.store(), trait_env);
}
TyKind::Bool => Layout::scalar(
dl,
@ -246,21 +254,23 @@ pub fn layout_of_ty_query<'db>(
),
TyKind::Tuple(tys) => {
let kind =
if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized };
if tys.is_empty() { StructKind::AlwaysSized } else { StructKind::MaybeUnsized };
let fields =
tys.iter().map(|k| db.layout_of_ty(k, trait_env)).collect::<Result<Vec<_>, _>>()?;
let fields = tys
.iter()
.map(|k| db.layout_of_ty(k.store(), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.calc.univariant(&fields, &ReprOptions::default(), kind)?
}
TyKind::Array(element, count) => {
let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty(element, trait_env)?;
let element = db.layout_of_ty(element.store(), trait_env)?;
cx.calc.array_like::<_, _, ()>(&element, Some(count))?
}
TyKind::Slice(element) => {
let element = db.layout_of_ty(element, trait_env)?;
let element = db.layout_of_ty(element.store(), trait_env)?;
cx.calc.array_like::<_, _, ()>(&element, None)?
}
TyKind::Str => {
@ -325,9 +335,11 @@ pub fn layout_of_ty_query<'db>(
let fields = captures
.iter()
.map(|it| {
let ty =
it.ty.instantiate(interner, args.split_closure_args_untupled().parent_args);
db.layout_of_ty(ty, trait_env)
let ty = it
.ty
.get()
.instantiate(interner, args.split_closure_args_untupled().parent_args);
db.layout_of_ty(ty.store(), trait_env.clone())
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
@ -357,11 +369,11 @@ pub fn layout_of_ty_query<'db>(
Ok(Arc::new(result))
}
pub(crate) fn layout_of_ty_cycle_result<'db>(
pub(crate) fn layout_of_ty_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_: Ty<'db>,
_: ParamEnvAndCrate<'db>,
_: StoredTy,
_: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}
@ -377,7 +389,7 @@ fn struct_tail_erasing_lifetimes<'a>(db: &'a dyn HirDatabase, pointee: Ty<'a>) -
let mut it = data.fields().iter().rev();
match it.next() {
Some((f, _)) => {
let last_field_ty = field_ty(db, struct_id.into(), f, &args);
let last_field_ty = field_ty(db, struct_id.into(), f, args);
struct_tail_erasing_lifetimes(db, last_field_ty)
}
None => pointee,
@ -398,9 +410,9 @@ fn field_ty<'a>(
db: &'a dyn HirDatabase,
def: hir_def::VariantId,
fd: LocalFieldId,
args: &GenericArgs<'a>,
args: GenericArgs<'a>,
) -> Ty<'a> {
db.field_types(def)[fd].instantiate(DbInterner::new_no_crate(db), args)
db.field_types(def)[fd].get().instantiate(DbInterner::new_no_crate(db), args)
}
fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar {

View file

@ -13,17 +13,17 @@ use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
ParamEnvAndCrate,
db::HirDatabase,
layout::{Layout, LayoutCx, LayoutError, field_ty},
next_solver::GenericArgs,
next_solver::StoredGenericArgs,
traits::StoredParamEnvAndCrate,
};
pub fn layout_of_adt_query<'db>(
db: &'db dyn HirDatabase,
pub fn layout_of_adt_query(
db: &dyn HirDatabase,
def: AdtId,
args: GenericArgs<'db>,
trait_env: ParamEnvAndCrate<'db>,
args: StoredGenericArgs,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError> {
let krate = trait_env.krate;
let Ok(target) = db.target_data_layout(krate) else {
@ -34,7 +34,9 @@ pub fn layout_of_adt_query<'db>(
let handle_variant = |def: VariantId, var: &VariantFields| {
var.fields()
.iter()
.map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &args), trait_env))
.map(|(fd, _)| {
db.layout_of_ty(field_ty(db, def, fd, args.as_ref()).store(), trait_env.clone())
})
.collect::<Result<Vec<_>, _>>()
};
let (variants, repr, is_special_no_niche) = match def {
@ -95,12 +97,12 @@ pub fn layout_of_adt_query<'db>(
Ok(Arc::new(result))
}
pub(crate) fn layout_of_adt_cycle_result<'db>(
_: &'db dyn HirDatabase,
pub(crate) fn layout_of_adt_cycle_result(
_: &dyn HirDatabase,
_: salsa::Id,
_def: AdtId,
_args: GenericArgs<'db>,
_trait_env: ParamEnvAndCrate<'db>,
_args: StoredGenericArgs,
_trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}

View file

@ -98,7 +98,7 @@ fn eval_goal(
Either::Left(it) => it.krate(&db),
Either::Right(it) => it.krate(&db),
};
db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate })
db.layout_of_ty(goal_ty.store(), ParamEnvAndCrate { param_env, krate }.store())
})
}
@ -140,10 +140,10 @@ fn eval_expr(
.unwrap()
.0;
let infer = InferenceResult::for_body(&db, function_id.into());
let goal_ty = infer.type_of_binding[b];
let goal_ty = infer.type_of_binding[b].clone();
let param_env = db.trait_environment(function_id.into());
let krate = function_id.krate(&db);
db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate })
db.layout_of_ty(goal_ty, ParamEnvAndCrate { param_env, krate }.store())
})
}

View file

@ -61,11 +61,12 @@ use hir_def::{CallableDefId, TypeOrConstParamId, type_ref::Rawness};
use hir_expand::name::Name;
use indexmap::{IndexMap, map::Entry};
use intern::{Symbol, sym};
use macros::GenericTypeVisitable;
use mir::{MirEvalError, VTableMap};
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use rustc_type_ir::{
BoundVarIndexKind, TypeSuperVisitable, TypeVisitableExt, UpcastFrom,
inherent::{IntoKind, SliceLike, Ty as _},
inherent::{IntoKind, Ty as _},
};
use syntax::ast::{ConstArg, make};
use traits::FnTrait;
@ -104,7 +105,7 @@ pub use utils::{
/// A constant can have reference to other things. Memory map job is holding
/// the necessary bits of memory of the const eval session to keep the constant
/// meaningful.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub enum MemoryMap<'db> {
#[default]
Empty,
@ -112,7 +113,7 @@ pub enum MemoryMap<'db> {
Complex(Box<ComplexMemoryMap<'db>>),
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub struct ComplexMemoryMap<'db> {
memory: IndexMap<usize, Box<[u8]>, FxBuildHasher>,
vtable: VTableMap<'db>,
@ -134,7 +135,7 @@ impl ComplexMemoryMap<'_> {
}
impl<'db> MemoryMap<'db> {
pub fn vtable_ty(&self, id: usize) -> Result<Ty<'db>, MirEvalError<'db>> {
pub fn vtable_ty(&self, id: usize) -> Result<Ty<'db>, MirEvalError> {
match self {
MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)),
MemoryMap::Complex(cm) => cm.vtable.ty(id),
@ -150,8 +151,8 @@ impl<'db> MemoryMap<'db> {
/// allocator function as `f` and it will return a mapping of old addresses to new addresses.
fn transform_addresses(
&self,
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError<'db>>,
) -> Result<FxHashMap<usize, usize>, MirEvalError<'db>> {
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>,
) -> Result<FxHashMap<usize, usize>, MirEvalError> {
let mut transform = |(addr, val): (&usize, &[u8])| {
let addr = *addr;
let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
@ -333,9 +334,9 @@ impl FnAbi {
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
pub enum ImplTraitId<'db> {
ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx<'db>),
TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx<'db>),
pub enum ImplTraitId {
ReturnTypeImplTrait(hir_def::FunctionId, next_solver::ImplTraitIdx),
TypeAliasImplTrait(hir_def::TypeAliasId, next_solver::ImplTraitIdx),
}
/// 'Canonicalizes' the `t` by replacing any errors with new variables. Also

File diff suppressed because it is too large Load diff

View file

@ -20,7 +20,7 @@ use hir_def::{
use hir_expand::name::Name;
use rustc_type_ir::{
AliasTerm, AliasTy, AliasTyKind,
inherent::{GenericArgs as _, Region as _, SliceLike, Ty as _},
inherent::{GenericArgs as _, Region as _, Ty as _},
};
use smallvec::SmallVec;
use stdx::never;
@ -45,17 +45,15 @@ use super::{
const_param_ty_query, ty_query,
};
type CallbackData<'a, 'db> = Either<
PathDiagnosticCallbackData,
crate::infer::diagnostics::PathDiagnosticCallbackData<'a, 'db>,
>;
type CallbackData<'a> =
Either<PathDiagnosticCallbackData, crate::infer::diagnostics::PathDiagnosticCallbackData<'a>>;
// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box<dyn FnMut()>`
// because of the allocation, so we create a lifetime-less callback, tailored for our needs.
pub(crate) struct PathDiagnosticCallback<'a, 'db> {
pub(crate) data: CallbackData<'a, 'db>,
pub(crate) data: CallbackData<'a>,
pub(crate) callback:
fn(&CallbackData<'_, 'db>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic),
fn(&CallbackData<'_>, &mut TyLoweringContext<'db, '_>, PathLoweringDiagnostic),
}
pub(crate) struct PathLoweringContext<'a, 'b, 'db> {
@ -555,7 +553,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> {
ValueTyDefId::UnionId(it) => it.into(),
ValueTyDefId::ConstId(it) => it.into(),
ValueTyDefId::StaticId(_) => {
return GenericArgs::new_from_iter(interner, []);
return GenericArgs::empty(interner);
}
ValueTyDefId::EnumVariantId(var) => {
// the generic args for an enum variant may be either specified

View file

@ -26,7 +26,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::{
TypeVisitableExt,
fast_reject::{TreatParams, simplify_type},
inherent::{BoundExistentialPredicates, IntoKind, SliceLike},
inherent::{BoundExistentialPredicates, IntoKind},
};
use stdx::impl_from;
use triomphe::Arc;

View file

@ -9,7 +9,7 @@ use hir_def::{
use rustc_type_ir::{
TypeFoldable,
elaborate::elaborate,
inherent::{BoundExistentialPredicates, IntoKind, SliceLike, Ty as _},
inherent::{BoundExistentialPredicates, IntoKind, Ty as _},
};
use tracing::debug;
@ -45,7 +45,7 @@ struct ConfirmContext<'a, 'b, 'db> {
pub(crate) struct ConfirmResult<'db> {
pub(crate) callee: MethodCallee<'db>,
pub(crate) illegal_sized_bound: bool,
pub(crate) adjustments: Box<[Adjustment<'db>]>,
pub(crate) adjustments: Box<[Adjustment]>,
}
impl<'a, 'db> InferenceContext<'a, 'db> {
@ -177,7 +177,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
&mut self,
unadjusted_self_ty: Ty<'db>,
pick: &probe::Pick<'db>,
) -> (Ty<'db>, Box<[Adjustment<'db>]>) {
) -> (Ty<'db>, Box<[Adjustment]>) {
// Commit the autoderefs by calling `autoderef` again, but this
// time writing the results into the various typeck results.
let mut autoderef = self.ctx.table.autoderef_with_tracking(unadjusted_self_ty);
@ -200,8 +200,10 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
// for two-phase borrows.
let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
adjustments
.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target });
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: target.store(),
});
if unsize {
let unsized_ty = if let TyKind::Array(elem_ty, _) = base_ty.kind() {
@ -213,8 +215,10 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
)
};
target = Ty::new_ref(self.interner(), region, unsized_ty, mutbl.into());
adjustments
.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target });
adjustments.push(Adjustment {
kind: Adjust::Pointer(PointerCast::Unsize),
target: target.store(),
});
}
}
Some(probe::AutorefOrPtrAdjustment::ToConstPtr) => {
@ -228,7 +232,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
adjustments.push(Adjustment {
kind: Adjust::Pointer(PointerCast::MutToConstPointer),
target,
target: target.store(),
});
}
None => {}
@ -482,7 +486,7 @@ impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
if self.ctx.unstable_features.arbitrary_self_types {
self.ctx.result.type_mismatches.get_or_insert_default().insert(
self.expr.into(),
TypeMismatch { expected: method_self_ty, actual: self_ty },
TypeMismatch { expected: method_self_ty.store(), actual: self_ty.store() },
);
}
}

View file

@ -14,7 +14,7 @@ use rustc_type_ir::{
InferTy, TypeVisitableExt, Upcast, Variance,
elaborate::{self, supertrait_def_ids},
fast_reject::{DeepRejectCtxt, TreatParams, simplify_type},
inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _},
inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, Ty as _},
};
use smallvec::{SmallVec, smallvec};
use tracing::{debug, instrument};

View file

@ -12,7 +12,7 @@ use hir_def::{
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_ast_ir::Mutability;
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{impl_from, never};
@ -23,7 +23,8 @@ use crate::{
display::{DisplayTarget, HirDisplay},
infer::PointerCast,
next_solver::{
Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind,
Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, StoredConst, StoredGenericArgs,
StoredTy, Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@ -49,16 +50,16 @@ pub(crate) use monomorphization::monomorphized_mir_body_cycle_result;
use super::consteval::try_const_usize;
pub type BasicBlockId<'db> = Idx<BasicBlock<'db>>;
pub type LocalId<'db> = Idx<Local<'db>>;
pub type BasicBlockId = Idx<BasicBlock>;
pub type LocalId = Idx<Local>;
fn return_slot<'db>() -> LocalId<'db> {
fn return_slot() -> LocalId {
LocalId::from_raw(RawIdx::from(0))
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Local<'db> {
pub ty: Ty<'db>,
pub struct Local {
pub ty: StoredTy,
}
/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
@ -80,19 +81,19 @@ pub struct Local<'db> {
/// currently implements it, but it seems like this may be something to check against in the
/// validator.
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Operand<'db> {
kind: OperandKind<'db>,
pub struct Operand {
kind: OperandKind,
// FIXME : This should actually just be of type `MirSpan`.
span: Option<MirSpan>,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum OperandKind<'db> {
pub enum OperandKind {
/// Creates a value by loading the given place.
///
/// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
/// is no such requirement.
Copy(Place<'db>),
Copy(Place),
/// Creates a value by performing loading the place, just like the `Copy` operand.
///
@ -101,21 +102,21 @@ pub enum OperandKind<'db> {
/// place without first re-initializing it.
///
/// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
Move(Place<'db>),
Move(Place),
/// Constants are already semantically values, and remain unchanged.
Constant { konst: Const<'db>, ty: Ty<'db> },
Constant { konst: StoredConst, ty: StoredTy },
/// NON STANDARD: This kind of operand returns an immutable reference to that static memory. Rustc
/// handles it with the `Constant` variant somehow.
Static(StaticId),
}
impl<'db> Operand<'db> {
impl<'db> Operand {
fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap<'db>, ty: Ty<'db>) -> Self {
let interner = DbInterner::conjure();
Operand {
kind: OperandKind::Constant {
konst: Const::new_valtree(interner, ty, data, memory_map),
ty,
konst: Const::new_valtree(interner, ty, data, memory_map).store(),
ty: ty.store(),
},
span: None,
}
@ -125,7 +126,7 @@ impl<'db> Operand<'db> {
Operand::from_concrete_const(data, MemoryMap::default(), ty)
}
fn const_zst(ty: Ty<'db>) -> Operand<'db> {
fn const_zst(ty: Ty<'db>) -> Operand {
Self::from_bytes(Box::default(), ty)
}
@ -133,28 +134,28 @@ impl<'db> Operand<'db> {
db: &'db dyn HirDatabase,
func_id: hir_def::FunctionId,
generic_args: GenericArgs<'db>,
) -> Operand<'db> {
) -> Operand {
let interner = DbInterner::new_no_crate(db);
let ty = Ty::new_fn_def(interner, CallableDefId::FunctionId(func_id).into(), generic_args);
Operand::from_bytes(Box::default(), ty)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)]
pub enum ProjectionElem<'db, V: PartialEq> {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProjectionElem<V: PartialEq> {
Deref,
Field(Either<FieldId, TupleFieldId>),
// FIXME: get rid of this, and use FieldId for tuples and closures
ClosureField(usize),
Index(#[update(unsafe(with(crate::utils::unsafe_update_eq)))] V),
Index(V),
ConstantIndex { offset: u64, from_end: bool },
Subslice { from: u64, to: u64 },
//Downcast(Option<Symbol>, VariantIdx),
OpaqueCast(Ty<'db>),
OpaqueCast(StoredTy),
}
impl<'db, V: PartialEq> ProjectionElem<'db, V> {
pub fn projected_ty(
impl<V: PartialEq> ProjectionElem<V> {
pub fn projected_ty<'db>(
&self,
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
@ -194,7 +195,7 @@ impl<'db, V: PartialEq> ProjectionElem<'db, V> {
},
ProjectionElem::Field(Either::Left(f)) => match base.kind() {
TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].instantiate(interner, subst)
db.field_types(f.parent)[f.local_id].get().instantiate(interner, subst)
}
ty => {
never!("Only adt has field, found {:?}", ty);
@ -253,18 +254,18 @@ impl<'db, V: PartialEq> ProjectionElem<'db, V> {
}
}
type PlaceElem<'db> = ProjectionElem<'db, LocalId<'db>>;
type PlaceElem = ProjectionElem<LocalId>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProjectionId(u32);
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ProjectionStore<'db> {
id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem<'db>]>>,
proj_to_id: FxHashMap<Box<[PlaceElem<'db>]>, ProjectionId>,
pub struct ProjectionStore {
id_to_proj: FxHashMap<ProjectionId, Box<[PlaceElem]>>,
proj_to_id: FxHashMap<Box<[PlaceElem]>, ProjectionId>,
}
impl Default for ProjectionStore<'_> {
impl Default for ProjectionStore {
fn default() -> Self {
let mut this = Self { id_to_proj: Default::default(), proj_to_id: Default::default() };
// Ensure that [] will get the id 0 which is used in `ProjectionId::Empty`
@ -273,17 +274,17 @@ impl Default for ProjectionStore<'_> {
}
}
impl<'db> ProjectionStore<'db> {
impl ProjectionStore {
pub fn shrink_to_fit(&mut self) {
self.id_to_proj.shrink_to_fit();
self.proj_to_id.shrink_to_fit();
}
pub fn intern_if_exist(&self, projection: &[PlaceElem<'db>]) -> Option<ProjectionId> {
pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
self.proj_to_id.get(projection).copied()
}
pub fn intern(&mut self, projection: Box<[PlaceElem<'db>]>) -> ProjectionId {
pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
let new_id = ProjectionId(self.proj_to_id.len() as u32);
match self.proj_to_id.entry(projection) {
Entry::Occupied(id) => *id.get(),
@ -304,15 +305,11 @@ impl ProjectionId {
self == ProjectionId::EMPTY
}
pub fn lookup<'a, 'db>(self, store: &'a ProjectionStore<'db>) -> &'a [PlaceElem<'db>] {
pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
store.id_to_proj.get(&self).unwrap()
}
pub fn project<'db>(
self,
projection: PlaceElem<'db>,
store: &mut ProjectionStore<'db>,
) -> ProjectionId {
pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
let mut current = self.lookup(store).to_vec();
current.push(projection);
store.intern(current.into())
@ -320,13 +317,13 @@ impl ProjectionId {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Place<'db> {
pub local: LocalId<'db>,
pub struct Place {
pub local: LocalId,
pub projection: ProjectionId,
}
impl<'db> Place<'db> {
fn is_parent(&self, child: &Place<'db>, store: &ProjectionStore<'db>) -> bool {
impl Place {
fn is_parent(&self, child: &Place, store: &ProjectionStore) -> bool {
self.local == child.local
&& child.projection.lookup(store).starts_with(self.projection.lookup(store))
}
@ -334,39 +331,39 @@ impl<'db> Place<'db> {
/// The place itself is not included
fn iterate_over_parents<'a>(
&'a self,
store: &'a ProjectionStore<'db>,
) -> impl Iterator<Item = Place<'db>> + 'a {
store: &'a ProjectionStore,
) -> impl Iterator<Item = Place> + 'a {
let projection = self.projection.lookup(store);
(0..projection.len()).map(|x| &projection[0..x]).filter_map(move |x| {
Some(Place { local: self.local, projection: store.intern_if_exist(x)? })
})
}
fn project(&self, projection: PlaceElem<'db>, store: &mut ProjectionStore<'db>) -> Place<'db> {
fn project(&self, projection: PlaceElem, store: &mut ProjectionStore) -> Place {
Place { local: self.local, projection: self.projection.project(projection, store) }
}
}
impl<'db> From<LocalId<'db>> for Place<'db> {
fn from(local: LocalId<'db>) -> Self {
impl From<LocalId> for Place {
fn from(local: LocalId) -> Self {
Self { local, projection: ProjectionId::EMPTY }
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum AggregateKind<'db> {
pub enum AggregateKind {
/// The type is of the element
Array(Ty<'db>),
Array(StoredTy),
/// The type is of the tuple
Tuple(Ty<'db>),
Adt(VariantId, GenericArgs<'db>),
Tuple(StoredTy),
Adt(VariantId, StoredGenericArgs),
Union(UnionId, FieldId),
Closure(Ty<'db>),
Closure(StoredTy),
//Coroutine(LocalDefId, SubstsRef, Movability),
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct SwitchTargets<'db> {
pub struct SwitchTargets {
/// Possible values. The locations to branch to in each case
/// are found in the corresponding indices from the `targets` vector.
values: SmallVec<[u128; 1]>,
@ -383,17 +380,17 @@ pub struct SwitchTargets<'db> {
//
// However weve decided to keep this as-is until we figure a case
// where some other approach seems to be strictly better than other.
targets: SmallVec<[BasicBlockId<'db>; 2]>,
targets: SmallVec<[BasicBlockId; 2]>,
}
impl<'db> SwitchTargets<'db> {
impl SwitchTargets {
/// Creates switch targets from an iterator of values and target blocks.
///
/// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to
/// `goto otherwise;`.
pub fn new(
targets: impl Iterator<Item = (u128, BasicBlockId<'db>)>,
otherwise: BasicBlockId<'db>,
targets: impl Iterator<Item = (u128, BasicBlockId)>,
otherwise: BasicBlockId,
) -> Self {
let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip();
targets.push(otherwise);
@ -402,12 +399,12 @@ impl<'db> SwitchTargets<'db> {
/// Builds a switch targets definition that jumps to `then` if the tested value equals `value`,
/// and to `else_` if not.
pub fn static_if(value: u128, then: BasicBlockId<'db>, else_: BasicBlockId<'db>) -> Self {
pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self {
Self { values: smallvec![value], targets: smallvec![then, else_] }
}
/// Returns the fallback target that is jumped to when none of the values match the operand.
pub fn otherwise(&self) -> BasicBlockId<'db> {
pub fn otherwise(&self) -> BasicBlockId {
*self.targets.last().unwrap()
}
@ -417,33 +414,33 @@ impl<'db> SwitchTargets<'db> {
/// including the `otherwise` fallback target.
///
/// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId<'db>)> + '_ {
pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId)> + '_ {
iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y))
}
/// Returns a slice with all possible jump targets (including the fallback target).
pub fn all_targets(&self) -> &[BasicBlockId<'db>] {
pub fn all_targets(&self) -> &[BasicBlockId] {
&self.targets
}
/// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
/// specific value. This cannot fail, as it'll return the `otherwise`
/// branch if there's not a specific match for the value.
pub fn target_for_value(&self, value: u128) -> BasicBlockId<'db> {
pub fn target_for_value(&self, value: u128) -> BasicBlockId {
self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Terminator<'db> {
pub struct Terminator {
pub span: MirSpan,
pub kind: TerminatorKind<'db>,
pub kind: TerminatorKind,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TerminatorKind<'db> {
pub enum TerminatorKind {
/// Block has one successor; we continue execution there.
Goto { target: BasicBlockId<'db> },
Goto { target: BasicBlockId },
/// Switches based on the computed value.
///
@ -455,9 +452,9 @@ pub enum TerminatorKind<'db> {
/// Target values may not appear more than once.
SwitchInt {
/// The discriminant value being tested.
discr: Operand<'db>,
discr: Operand,
targets: SwitchTargets<'db>,
targets: SwitchTargets,
},
/// Indicates that the landing pad is finished and that the process should continue unwinding.
@ -508,7 +505,7 @@ pub enum TerminatorKind<'db> {
/// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
/// > the place or one of its "parents" occurred more recently than a move out of it. This does not
/// > consider indirect assignments.
Drop { place: Place<'db>, target: BasicBlockId<'db>, unwind: Option<BasicBlockId<'db>> },
Drop { place: Place, target: BasicBlockId, unwind: Option<BasicBlockId> },
/// Drops the place and assigns a new value to it.
///
@ -541,10 +538,10 @@ pub enum TerminatorKind<'db> {
///
/// Disallowed after drop elaboration.
DropAndReplace {
place: Place<'db>,
value: Operand<'db>,
target: BasicBlockId<'db>,
unwind: Option<BasicBlockId<'db>>,
place: Place,
value: Operand,
target: BasicBlockId,
unwind: Option<BasicBlockId>,
},
/// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
@ -559,18 +556,18 @@ pub enum TerminatorKind<'db> {
/// [#71117]: https://github.com/rust-lang/rust/issues/71117
Call {
/// The function thats being called.
func: Operand<'db>,
func: Operand,
/// Arguments the function is called with.
/// These are owned by the callee, which is free to modify them.
/// This allows the memory occupied by "by-value" arguments to be
/// reused across function calls without duplicating the contents.
args: Box<[Operand<'db>]>,
args: Box<[Operand]>,
/// Where the returned value will be written
destination: Place<'db>,
destination: Place,
/// Where to go after this call returns. If none, the call necessarily diverges.
target: Option<BasicBlockId<'db>>,
target: Option<BasicBlockId>,
/// Cleanups to be done if the call unwinds.
cleanup: Option<BasicBlockId<'db>>,
cleanup: Option<BasicBlockId>,
/// `true` if this is from a call in HIR rather than from an overloaded
/// operator. True for overloaded function call.
from_hir_call: bool,
@ -586,11 +583,11 @@ pub enum TerminatorKind<'db> {
/// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
/// assertion does not fail, execution continues at the specified basic block.
Assert {
cond: Operand<'db>,
cond: Operand,
expected: bool,
//msg: AssertMessage,
target: BasicBlockId<'db>,
cleanup: Option<BasicBlockId<'db>>,
target: BasicBlockId,
cleanup: Option<BasicBlockId>,
},
/// Marks a suspend point.
@ -607,13 +604,13 @@ pub enum TerminatorKind<'db> {
/// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
Yield {
/// The value to return.
value: Operand<'db>,
value: Operand,
/// Where to resume to.
resume: BasicBlockId<'db>,
resume: BasicBlockId,
/// The place to store the resume argument in.
resume_arg: Place<'db>,
resume_arg: Place,
/// Cleanup to be done if the coroutine is dropped at this suspend point.
drop: Option<BasicBlockId<'db>>,
drop: Option<BasicBlockId>,
},
/// Indicates the end of dropping a coroutine.
@ -636,10 +633,10 @@ pub enum TerminatorKind<'db> {
/// Disallowed after drop elaboration.
FalseEdge {
/// The target normal control flow will take.
real_target: BasicBlockId<'db>,
real_target: BasicBlockId,
/// A block control flow could conceptually jump to, but won't in
/// practice.
imaginary_target: BasicBlockId<'db>,
imaginary_target: BasicBlockId,
},
/// A terminator for blocks that only take one path in reality, but where we reserve the right
@ -651,14 +648,14 @@ pub enum TerminatorKind<'db> {
/// Disallowed after drop elaboration.
FalseUnwind {
/// The target normal control flow will take.
real_target: BasicBlockId<'db>,
real_target: BasicBlockId,
/// The imaginary cleanup block link. This particular path will never be taken
/// in practice, but in order to avoid fragility we want to always
/// consider it in borrowck. We don't want to accept programs which
/// pass borrowck only when `panic=abort` or some assertions are disabled
/// due to release vs. debug mode builds. This needs to be an `Option` because
/// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes.
unwind: Option<BasicBlockId<'db>>,
unwind: Option<BasicBlockId>,
},
}
@ -845,8 +842,8 @@ impl From<hir_def::hir::CmpOp> for BinOp {
}
}
impl<'db> From<Operand<'db>> for Rvalue<'db> {
fn from(x: Operand<'db>) -> Self {
impl From<Operand> for Rvalue {
fn from(x: Operand) -> Self {
Self::Use(x)
}
}
@ -875,14 +872,14 @@ pub enum CastKind {
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Rvalue<'db> {
pub enum Rvalue {
/// Yields the operand unchanged
Use(Operand<'db>),
Use(Operand),
/// Creates an array where each element is the value of the operand.
///
/// Corresponds to source code like `[x; 32]`.
Repeat(Operand<'db>, Const<'db>),
Repeat(Operand, StoredConst),
/// Creates a reference of the indicated kind to the place.
///
@ -891,7 +888,7 @@ pub enum Rvalue<'db> {
/// exactly what the behavior of this operation should be.
///
/// `Shallow` borrows are disallowed after drop lowering.
Ref(BorrowKind, Place<'db>),
Ref(BorrowKind, Place),
/// Creates a pointer/reference to the given thread local.
///
@ -922,7 +919,7 @@ pub enum Rvalue<'db> {
/// If the type of the place is an array, this is the array length. For slices (`[T]`, not
/// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
/// ill-formed for places of other types.
Len(Place<'db>),
Len(Place),
/// Performs essentially all of the casts that can be performed via `as`.
///
@ -930,7 +927,7 @@ pub enum Rvalue<'db> {
///
/// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why
/// `ArrayToPointer` and `MutToConstPointer` are special.
Cast(CastKind, Operand<'db>, Ty<'db>),
Cast(CastKind, Operand, StoredTy),
// FIXME link to `pointer::offset` when it hits stable.
/// * `Offset` has the same semantics as `pointer::offset`, except that the second
@ -962,7 +959,7 @@ pub enum Rvalue<'db> {
/// when the value of right-hand side is negative.
///
/// Other combinations of types and operators are unsupported.
CheckedBinaryOp(BinOp, Operand<'db>, Operand<'db>),
CheckedBinaryOp(BinOp, Operand, Operand),
/// Computes a value as described by the operation.
//NullaryOp(NullOp, Ty),
@ -973,7 +970,7 @@ pub enum Rvalue<'db> {
/// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
/// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
/// return a value with the same type as their operand.
UnaryOp(UnOp, Operand<'db>),
UnaryOp(UnOp, Operand),
/// Computes the discriminant of the place, returning it as an integer of type
/// `discriminant_ty`. Returns zero for types without discriminant.
@ -983,7 +980,7 @@ pub enum Rvalue<'db> {
/// variant index; use `discriminant_for_variant` to convert.
///
/// [#91095]: https://github.com/rust-lang/rust/issues/91095
Discriminant(Place<'db>),
Discriminant(Place),
/// Creates an aggregate value, like a tuple or struct.
///
@ -993,17 +990,17 @@ pub enum Rvalue<'db> {
///
/// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After
/// coroutine lowering, `Coroutine` aggregate kinds are disallowed too.
Aggregate(AggregateKind<'db>, Box<[Operand<'db>]>),
Aggregate(AggregateKind, Box<[Operand]>),
/// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
///
/// This is different from a normal transmute because dataflow analysis will treat the box as
/// initialized but its content as uninitialized. Like other pointer casts, this in general
/// affects alias analysis.
ShallowInitBox(Operand<'db>, Ty<'db>),
ShallowInitBox(Operand, StoredTy),
/// NON STANDARD: allocates memory with the type's layout, and shallow init the box with the resulting pointer.
ShallowInitBoxWithAlloc(Ty<'db>),
ShallowInitBoxWithAlloc(StoredTy),
/// A CopyForDeref is equivalent to a read from a place at the
/// codegen level, but is treated specially by drop elaboration. When such a read happens, it
@ -1013,41 +1010,41 @@ pub enum Rvalue<'db> {
/// read never happened and just projects further. This allows simplifying various MIR
/// optimizations and codegen backends that previously had to handle deref operations anywhere
/// in a place.
CopyForDeref(Place<'db>),
CopyForDeref(Place),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum StatementKind<'db> {
Assign(Place<'db>, Rvalue<'db>),
FakeRead(Place<'db>),
pub enum StatementKind {
Assign(Place, Rvalue),
FakeRead(Place),
//SetDiscriminant {
// place: Box<Place>,
// variant_index: VariantIdx,
//},
Deinit(Place<'db>),
StorageLive(LocalId<'db>),
StorageDead(LocalId<'db>),
Deinit(Place),
StorageLive(LocalId),
StorageDead(LocalId),
//Retag(RetagKind, Box<Place>),
//AscribeUserType(Place, UserTypeProjection, Variance),
//Intrinsic(Box<NonDivergingIntrinsic>),
Nop,
}
impl<'db> StatementKind<'db> {
fn with_span(self, span: MirSpan) -> Statement<'db> {
impl StatementKind {
fn with_span(self, span: MirSpan) -> Statement {
Statement { kind: self, span }
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Statement<'db> {
pub kind: StatementKind<'db>,
pub struct Statement {
pub kind: StatementKind,
pub span: MirSpan,
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct BasicBlock<'db> {
pub struct BasicBlock {
/// List of statements in this block.
pub statements: Vec<Statement<'db>>,
pub statements: Vec<Statement>,
/// Terminator for this block.
///
@ -1057,7 +1054,7 @@ pub struct BasicBlock<'db> {
/// exception is that certain passes, such as `simplify_cfg`, swap
/// out the terminator temporarily with `None` while they continue
/// to recurse over the set of basic blocks.
pub terminator: Option<Terminator<'db>>,
pub terminator: Option<Terminator>,
/// If true, this block lies on an unwind path. This is used
/// during codegen where distinct kinds of basic blocks may be
@ -1067,29 +1064,29 @@ pub struct BasicBlock<'db> {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MirBody<'db> {
pub projection_store: ProjectionStore<'db>,
pub basic_blocks: Arena<BasicBlock<'db>>,
pub locals: Arena<Local<'db>>,
pub start_block: BasicBlockId<'db>,
pub struct MirBody {
pub projection_store: ProjectionStore,
pub basic_blocks: Arena<BasicBlock>,
pub locals: Arena<Local>,
pub start_block: BasicBlockId,
pub owner: DefWithBodyId,
pub binding_locals: ArenaMap<BindingId, LocalId<'db>>,
pub param_locals: Vec<LocalId<'db>>,
pub binding_locals: ArenaMap<BindingId, LocalId>,
pub param_locals: Vec<LocalId>,
/// This field stores the closures directly owned by this body. It is used
/// in traversing every mir body.
pub closures: Vec<InternedClosureId>,
}
impl<'db> MirBody<'db> {
pub fn local_to_binding_map(&self) -> ArenaMap<LocalId<'db>, BindingId> {
impl MirBody {
pub fn local_to_binding_map(&self) -> ArenaMap<LocalId, BindingId> {
self.binding_locals.iter().map(|(it, y)| (*y, it)).collect()
}
fn walk_places(&mut self, mut f: impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>)) {
fn for_operand<'db>(
op: &mut Operand<'db>,
f: &mut impl FnMut(&mut Place<'db>, &mut ProjectionStore<'db>),
store: &mut ProjectionStore<'db>,
fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) {
fn for_operand(
op: &mut Operand,
f: &mut impl FnMut(&mut Place, &mut ProjectionStore),
store: &mut ProjectionStore,
) {
match &mut op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {

View file

@ -17,7 +17,7 @@ use crate::{
display::DisplayTarget,
mir::OperandKind,
next_solver::{
DbInterner, GenericArgs, ParamEnv, Ty, TypingMode,
DbInterner, GenericArgs, ParamEnv, StoredTy, Ty, TypingMode,
infer::{DbInternerInferExt, InferCtxt},
},
};
@ -36,44 +36,44 @@ pub enum MutabilityReason {
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MovedOutOfRef<'db> {
pub ty: Ty<'db>,
pub struct MovedOutOfRef {
pub ty: StoredTy,
pub span: MirSpan,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PartiallyMoved<'db> {
pub ty: Ty<'db>,
pub struct PartiallyMoved {
pub ty: StoredTy,
pub span: MirSpan,
pub local: LocalId<'db>,
pub local: LocalId,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowRegion<'db> {
pub local: LocalId<'db>,
pub struct BorrowRegion {
pub local: LocalId,
pub kind: BorrowKind,
pub places: Vec<MirSpan>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowckResult<'db> {
pub mir_body: Arc<MirBody<'db>>,
pub mutability_of_locals: ArenaMap<LocalId<'db>, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef<'db>>,
pub partially_moved: Vec<PartiallyMoved<'db>>,
pub borrow_regions: Vec<BorrowRegion<'db>>,
pub struct BorrowckResult {
pub mir_body: Arc<MirBody>,
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef>,
pub partially_moved: Vec<PartiallyMoved>,
pub borrow_regions: Vec<BorrowRegion>,
}
fn all_mir_bodies<'db>(
db: &'db dyn HirDatabase,
fn all_mir_bodies(
db: &dyn HirDatabase,
def: DefWithBodyId,
mut cb: impl FnMut(Arc<MirBody<'db>>),
) -> Result<(), MirLowerError<'db>> {
fn for_closure<'db>(
db: &'db dyn HirDatabase,
mut cb: impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
fn for_closure(
db: &dyn HirDatabase,
c: InternedClosureId,
cb: &mut impl FnMut(Arc<MirBody<'db>>),
) -> Result<(), MirLowerError<'db>> {
cb: &mut impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
match db.mir_body_for_closure(c) {
Ok(body) => {
cb(body.clone());
@ -91,10 +91,10 @@ fn all_mir_bodies<'db>(
}
}
pub fn borrowck_query<'db>(
db: &'db dyn HirDatabase,
pub fn borrowck_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Result<Arc<[BorrowckResult<'db>]>, MirLowerError<'db>> {
) -> Result<Arc<[BorrowckResult]>, MirLowerError> {
let _p = tracing::info_span!("borrowck_query").entered();
let module = def.module(db);
let interner = DbInterner::new_with(db, module.krate(db));
@ -125,20 +125,20 @@ fn make_fetch_closure_field<'db>(
let (captures, _) = infer.closure_info(c);
let parent_subst = subst.split_closure_args_untupled().parent_args;
let interner = DbInterner::new_no_crate(db);
captures.get(f).expect("broken closure field").ty.instantiate(interner, parent_subst)
captures.get(f).expect("broken closure field").ty.get().instantiate(interner, parent_subst)
}
}
fn moved_out_of_ref<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
body: &MirBody<'db>,
) -> Vec<MovedOutOfRef<'db>> {
body: &MirBody,
) -> Vec<MovedOutOfRef> {
let db = infcx.interner.db;
let mut result = vec![];
let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind {
let mut for_operand = |op: &Operand, span: MirSpan| match op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
let mut ty: Ty<'db> = body.locals[p.local].ty;
let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref();
let mut is_dereference_of_ref = false;
for proj in p.projection.lookup(&body.projection_store) {
if *proj == ProjectionElem::Deref && ty.as_reference().is_some() {
@ -156,7 +156,7 @@ fn moved_out_of_ref<'db>(
&& !infcx.type_is_copy_modulo_regions(env, ty)
&& !ty.references_non_lt_error()
{
result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty });
result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty: ty.store() });
}
}
OperandKind::Constant { .. } | OperandKind::Static(_) => (),
@ -233,13 +233,13 @@ fn moved_out_of_ref<'db>(
fn partially_moved<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
body: &MirBody<'db>,
) -> Vec<PartiallyMoved<'db>> {
body: &MirBody,
) -> Vec<PartiallyMoved> {
let db = infcx.interner.db;
let mut result = vec![];
let mut for_operand = |op: &Operand<'db>, span: MirSpan| match op.kind {
let mut for_operand = |op: &Operand, span: MirSpan| match op.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
let mut ty: Ty<'db> = body.locals[p.local].ty;
let mut ty: Ty<'db> = body.locals[p.local].ty.as_ref();
for proj in p.projection.lookup(&body.projection_store) {
ty = proj.projected_ty(
infcx,
@ -250,7 +250,7 @@ fn partially_moved<'db>(
);
}
if !infcx.type_is_copy_modulo_regions(env, ty) && !ty.references_non_lt_error() {
result.push(PartiallyMoved { span, ty, local: p.local });
result.push(PartiallyMoved { span, ty: ty.store(), local: p.local });
}
}
OperandKind::Constant { .. } | OperandKind::Static(_) => (),
@ -324,7 +324,7 @@ fn partially_moved<'db>(
result
}
fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec<BorrowRegion<'db>> {
fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
let mut borrows = FxHashMap::default();
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_revision_cancelled();
@ -332,7 +332,7 @@ fn borrow_regions<'db>(db: &'db dyn HirDatabase, body: &MirBody<'db>) -> Vec<Bor
if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
borrows
.entry(p.local)
.and_modify(|it: &mut BorrowRegion<'db>| {
.and_modify(|it: &mut BorrowRegion| {
it.places.push(statement.span);
})
.or_insert_with(|| BorrowRegion {
@ -377,12 +377,12 @@ enum ProjectionCase {
fn place_case<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
body: &MirBody<'db>,
lvalue: &Place<'db>,
body: &MirBody,
lvalue: &Place,
) -> ProjectionCase {
let db = infcx.interner.db;
let mut is_part_of = false;
let mut ty = body.locals[lvalue.local].ty;
let mut ty = body.locals[lvalue.local].ty.as_ref();
for proj in lvalue.projection.lookup(&body.projection_store).iter() {
match proj {
ProjectionElem::Deref if ty.as_adt().is_none() => return ProjectionCase::Indirect, // It's indirect in case of reference and raw
@ -410,18 +410,18 @@ fn place_case<'db>(
/// Returns a map from basic blocks to the set of locals that might be ever initialized before
/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
/// `Uninit` and `drop` and similar after initialization.
fn ever_initialized_map<'db>(
db: &'db dyn HirDatabase,
body: &MirBody<'db>,
) -> ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>> {
let mut result: ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>> =
fn ever_initialized_map(
db: &dyn HirDatabase,
body: &MirBody,
) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect();
fn dfs<'db>(
db: &'db dyn HirDatabase,
body: &MirBody<'db>,
l: LocalId<'db>,
stack: &mut Vec<BasicBlockId<'db>>,
result: &mut ArenaMap<BasicBlockId<'db>, ArenaMap<LocalId<'db>, bool>>,
fn dfs(
db: &dyn HirDatabase,
body: &MirBody,
l: LocalId,
stack: &mut Vec<BasicBlockId>,
result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
) {
while let Some(b) = stack.pop() {
let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
@ -509,11 +509,7 @@ fn ever_initialized_map<'db>(
result
}
fn push_mut_span<'db>(
local: LocalId<'db>,
span: MirSpan,
result: &mut ArenaMap<LocalId<'db>, MutabilityReason>,
) {
fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap<LocalId, MutabilityReason>) {
match &mut result[local] {
MutabilityReason::Mut { spans } => spans.push(span),
it @ (MutabilityReason::Not | MutabilityReason::Unused) => {
@ -522,16 +518,13 @@ fn push_mut_span<'db>(
};
}
fn record_usage<'db>(local: LocalId<'db>, result: &mut ArenaMap<LocalId<'db>, MutabilityReason>) {
fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>) {
if let it @ MutabilityReason::Unused = &mut result[local] {
*it = MutabilityReason::Not;
};
}
fn record_usage_for_operand<'db>(
arg: &Operand<'db>,
result: &mut ArenaMap<LocalId<'db>, MutabilityReason>,
) {
fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap<LocalId, MutabilityReason>) {
if let OperandKind::Copy(p) | OperandKind::Move(p) = arg.kind {
record_usage(p.local, result);
}
@ -540,10 +533,10 @@ fn record_usage_for_operand<'db>(
fn mutability_of_locals<'db>(
infcx: &InferCtxt<'db>,
env: ParamEnv<'db>,
body: &MirBody<'db>,
) -> ArenaMap<LocalId<'db>, MutabilityReason> {
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let db = infcx.interner.db;
let mut result: ArenaMap<LocalId<'db>, MutabilityReason> =
let mut result: ArenaMap<LocalId, MutabilityReason> =
body.locals.iter().map(|it| (it.0, MutabilityReason::Unused)).collect();
let ever_init_maps = ever_initialized_map(db, body);

View file

@ -17,6 +17,7 @@ use hir_def::{
use hir_expand::{InFile, mod_path::path, name::Name};
use intern::sym;
use la_arena::ArenaMap;
use macros::GenericTypeVisitable;
use rustc_abi::TargetDataLayout;
use rustc_apfloat::{
Float,
@ -42,8 +43,8 @@ use crate::{
layout::{Layout, LayoutError, RustcEnumVariantIdx},
method_resolution::{is_dyn_method, lookup_impl_const},
next_solver::{
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region, Ty, TyKind,
TypingMode, UnevaluatedConst, ValueConst,
Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region,
StoredConst, StoredTy, Ty, TyKind, TypingMode, UnevaluatedConst, ValueConst,
infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@ -83,7 +84,7 @@ macro_rules! not_supported {
};
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub struct VTableMap<'db> {
ty_to_id: FxHashMap<Ty<'db>, usize>,
id_to_ty: Vec<Ty<'db>>,
@ -150,16 +151,16 @@ impl TlsData {
}
}
struct StackFrame<'db> {
locals: Locals<'db>,
destination: Option<BasicBlockId<'db>>,
struct StackFrame {
locals: Locals,
destination: Option<BasicBlockId>,
prev_stack_ptr: usize,
span: (MirSpan, DefWithBodyId),
}
#[derive(Clone)]
enum MirOrDynIndex<'db> {
Mir(Arc<MirBody<'db>>),
enum MirOrDynIndex {
Mir(Arc<MirBody>),
Dyn(usize),
}
@ -169,7 +170,7 @@ pub struct Evaluator<'db> {
target_data_layout: Arc<TargetDataLayout>,
stack: Vec<u8>,
heap: Vec<u8>,
code_stack: Vec<StackFrame<'db>>,
code_stack: Vec<StackFrame>,
/// Stores the global location of the statics. We const evaluate every static first time we need it
/// and see it's missing, then we add it to this to reuse.
static_locations: FxHashMap<StaticId, Address>,
@ -182,13 +183,13 @@ pub struct Evaluator<'db> {
stdout: Vec<u8>,
stderr: Vec<u8>,
layout_cache: RefCell<FxHashMap<Ty<'db>, Arc<Layout>>>,
projected_ty_cache: RefCell<FxHashMap<(Ty<'db>, PlaceElem<'db>), Ty<'db>>>,
projected_ty_cache: RefCell<FxHashMap<(Ty<'db>, PlaceElem), Ty<'db>>>,
not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, GenericArgs<'db>), MirOrDynIndex<'db>>>,
/// Constantly dropping and creating `Locals<'db>` is very costly. We store
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, GenericArgs<'db>), MirOrDynIndex>>,
/// Constantly dropping and creating `Locals` is very costly. We store
/// old locals that we normally want to drop here, to reuse their allocations
/// later.
unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals<'db>>>>,
unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
cached_ptr_size: usize,
cached_fn_trait_func: Option<FunctionId>,
cached_fn_mut_trait_func: Option<FunctionId>,
@ -261,7 +262,7 @@ impl<'db> IntervalAndTy<'db> {
addr: Address,
ty: Ty<'db>,
evaluator: &Evaluator<'db>,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, IntervalAndTy<'db>> {
let size = evaluator.size_of_sized(ty, locals, "type of interval")?;
Ok(IntervalAndTy { interval: Interval { addr, size }, ty })
@ -340,22 +341,22 @@ impl Address {
}
#[derive(Clone, PartialEq, Eq)]
pub enum MirEvalError<'db> {
ConstEvalError(String, Box<ConstEvalError<'db>>),
LayoutError(LayoutError, Ty<'db>),
pub enum MirEvalError {
ConstEvalError(String, Box<ConstEvalError>),
LayoutError(LayoutError, StoredTy),
TargetDataLayoutNotAvailable(TargetLoadError),
/// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
/// then use this type of error.
UndefinedBehavior(String),
Panic(String),
// FIXME: This should be folded into ConstEvalError?
MirLowerError(FunctionId, MirLowerError<'db>),
MirLowerErrorForClosure(InternedClosureId, MirLowerError<'db>),
TypeIsUnsized(Ty<'db>, &'static str),
MirLowerError(FunctionId, MirLowerError),
MirLowerErrorForClosure(InternedClosureId, MirLowerError),
TypeIsUnsized(StoredTy, &'static str),
NotSupported(String),
InvalidConst(Const<'db>),
InvalidConst(StoredConst),
InFunction(
Box<MirEvalError<'db>>,
Box<MirEvalError>,
Vec<(Either<FunctionId, InternedClosureId>, MirSpan, DefWithBodyId)>,
),
ExecutionLimitExceeded,
@ -363,12 +364,12 @@ pub enum MirEvalError<'db> {
/// FIXME: Fold this into InternalError
InvalidVTableId(usize),
/// ?
CoerceUnsizedError(Ty<'db>),
CoerceUnsizedError(StoredTy),
/// These should not occur, usually indicates a bug in mir lowering.
InternalError(Box<str>),
}
impl MirEvalError<'_> {
impl MirEvalError {
pub fn pretty_print(
&self,
f: &mut String,
@ -432,7 +433,9 @@ impl MirEvalError<'_> {
write!(
f,
"Layout for type `{}` is not available due {err:?}",
ty.display(db, display_target).with_closure_style(ClosureStyle::ClosureWithId)
ty.as_ref()
.display(db, display_target)
.with_closure_style(ClosureStyle::ClosureWithId)
)?;
}
MirEvalError::MirLowerError(func, err) => {
@ -495,7 +498,7 @@ impl MirEvalError<'_> {
}
}
impl std::fmt::Debug for MirEvalError<'_> {
impl std::fmt::Debug for MirEvalError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ConstEvalError(arg0, arg1) => {
@ -534,15 +537,15 @@ impl std::fmt::Debug for MirEvalError<'_> {
}
}
type Result<'db, T> = std::result::Result<T, MirEvalError<'db>>;
type Result<'db, T> = std::result::Result<T, MirEvalError>;
#[derive(Debug, Default)]
struct DropFlags<'db> {
need_drop: FxHashSet<Place<'db>>,
struct DropFlags {
need_drop: FxHashSet<Place>,
}
impl<'db> DropFlags<'db> {
fn add_place(&mut self, p: Place<'db>, store: &ProjectionStore<'db>) {
impl DropFlags {
fn add_place(&mut self, p: Place, store: &ProjectionStore) {
if p.iterate_over_parents(store).any(|it| self.need_drop.contains(&it)) {
return;
}
@ -550,7 +553,7 @@ impl<'db> DropFlags<'db> {
self.need_drop.insert(p);
}
fn remove_place(&mut self, p: &Place<'db>, store: &ProjectionStore<'db>) -> bool {
fn remove_place(&mut self, p: &Place, store: &ProjectionStore) -> bool {
// FIXME: replace parents with parts
if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(it)) {
self.need_drop.remove(&parent);
@ -565,10 +568,10 @@ impl<'db> DropFlags<'db> {
}
#[derive(Debug)]
struct Locals<'db> {
ptr: ArenaMap<LocalId<'db>, Interval>,
body: Arc<MirBody<'db>>,
drop_flags: DropFlags<'db>,
struct Locals {
ptr: ArenaMap<LocalId, Interval>,
body: Arc<MirBody>,
drop_flags: DropFlags,
}
pub struct MirOutput {
@ -587,7 +590,7 @@ impl MirOutput {
pub fn interpret_mir<'db>(
db: &'db dyn HirDatabase,
body: Arc<MirBody<'db>>,
body: Arc<MirBody>,
// FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
// they share their body with their parent, so in MIR lowering we have locals of the parent body, which
// might have placeholders. With this argument, we (wrongly) assume that every placeholder type has
@ -596,7 +599,7 @@ pub fn interpret_mir<'db>(
assert_placeholder_ty_is_unused: bool,
trait_env: Option<ParamEnvAndCrate<'db>>,
) -> Result<'db, (Result<'db, Const<'db>>, MirOutput)> {
let ty = body.locals[return_slot()].ty;
let ty = body.locals[return_slot()].ty.as_ref();
let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?;
let it: Result<'db, Const<'db>> = (|| {
if evaluator.ptr_size() != size_of::<usize>() {
@ -694,11 +697,11 @@ impl<'db> Evaluator<'db> {
self.infcx.interner.lang_items()
}
fn place_addr(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Address> {
fn place_addr(&self, p: &Place, locals: &Locals) -> Result<'db, Address> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
}
fn place_interval(&self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> {
fn place_interval(&self, p: &Place, locals: &Locals) -> Result<'db, Interval> {
let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;
Ok(Interval {
addr: place_addr_and_ty.0,
@ -714,7 +717,7 @@ impl<'db> Evaluator<'db> {
self.cached_ptr_size
}
fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem<'db>) -> Ty<'db> {
fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem) -> Ty<'db> {
let pair = (ty, proj);
if let Some(r) = self.projected_ty_cache.borrow().get(&pair) {
return *r;
@ -733,6 +736,7 @@ impl<'db> Evaluator<'db> {
.get(f)
.expect("broken closure field")
.ty
.get()
.instantiate(self.interner(), parent_subst)
},
self.crate_id,
@ -743,11 +747,11 @@ impl<'db> Evaluator<'db> {
fn place_addr_and_ty_and_metadata<'a>(
&'a self,
p: &Place<'db>,
locals: &'a Locals<'db>,
p: &Place,
locals: &'a Locals,
) -> Result<'db, (Address, Ty<'db>, Option<IntervalOrOwned>)> {
let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty<'db> = locals.body.locals[p.local].ty;
let mut ty: Ty<'db> = locals.body.locals[p.local].ty.as_ref();
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
for proj in p.projection.lookup(&locals.body.projection_store) {
let prev_ty = ty;
@ -868,8 +872,8 @@ impl<'db> Evaluator<'db> {
}
let r = self
.db
.layout_of_ty(ty, self.param_env)
.map_err(|e| MirEvalError::LayoutError(e, ty))?;
.layout_of_ty(ty.store(), self.param_env.store())
.map_err(|e| MirEvalError::LayoutError(e, ty.store()))?;
self.layout_cache.borrow_mut().insert(ty, r.clone());
Ok(r)
}
@ -878,17 +882,17 @@ impl<'db> Evaluator<'db> {
self.layout(Ty::new_adt(self.interner(), adt, subst))
}
fn place_ty<'a>(&'a self, p: &Place<'db>, locals: &'a Locals<'db>) -> Result<'db, Ty<'db>> {
fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<'db, Ty<'db>> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)
}
fn operand_ty(&self, o: &Operand<'db>, locals: &Locals<'db>) -> Result<'db, Ty<'db>> {
fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<'db, Ty<'db>> {
Ok(match &o.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?,
OperandKind::Constant { konst: _, ty } => *ty,
OperandKind::Constant { konst: _, ty } => ty.as_ref(),
&OperandKind::Static(s) => {
let ty =
InferenceResult::for_body(self.db, s.into())[self.db.body(s.into()).body_expr];
let ty = InferenceResult::for_body(self.db, s.into())
.expr_ty(self.db.body(s.into()).body_expr);
Ty::new_ref(
self.interner(),
Region::new_static(self.interner()),
@ -901,8 +905,8 @@ impl<'db> Evaluator<'db> {
fn operand_ty_and_eval(
&mut self,
o: &Operand<'db>,
locals: &mut Locals<'db>,
o: &Operand,
locals: &mut Locals,
) -> Result<'db, IntervalAndTy<'db>> {
Ok(IntervalAndTy {
interval: self.eval_operand(o, locals)?,
@ -912,7 +916,7 @@ impl<'db> Evaluator<'db> {
fn interpret_mir(
&mut self,
body: Arc<MirBody<'db>>,
body: Arc<MirBody>,
args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<'db, Interval> {
if let Some(it) = self.stack_depth_limit.checked_sub(1) {
@ -1076,8 +1080,8 @@ impl<'db> Evaluator<'db> {
fn fill_locals_for_body(
&mut self,
body: &MirBody<'db>,
locals: &mut Locals<'db>,
body: &MirBody,
locals: &mut Locals,
args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<'db, ()> {
let mut remain_args = body.param_locals.len();
@ -1100,9 +1104,9 @@ impl<'db> Evaluator<'db> {
fn create_locals_for_body(
&mut self,
body: &Arc<MirBody<'db>>,
body: &Arc<MirBody>,
destination: Option<Interval>,
) -> Result<'db, (Locals<'db>, usize)> {
) -> Result<'db, (Locals, usize)> {
let mut locals =
match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() {
None => Locals {
@ -1126,7 +1130,7 @@ impl<'db> Evaluator<'db> {
continue;
}
let (size, align) = self.size_align_of_sized(
it.ty,
it.ty.as_ref(),
&locals,
"no unsized local in extending stack",
)?;
@ -1149,11 +1153,7 @@ impl<'db> Evaluator<'db> {
Ok((locals, prev_stack_pointer))
}
fn eval_rvalue(
&mut self,
r: &Rvalue<'db>,
locals: &mut Locals<'db>,
) -> Result<'db, IntervalOrOwned> {
fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<'db, IntervalOrOwned> {
use IntervalOrOwned::*;
Ok(match r {
Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
@ -1445,7 +1445,7 @@ impl<'db> Evaluator<'db> {
Owned(result.to_le_bytes().to_vec())
}
Rvalue::Repeat(it, len) => {
let len = match try_const_usize(self.db, *len) {
let len = match try_const_usize(self.db, len.as_ref()) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in repeat Rvalue"),
};
@ -1455,7 +1455,7 @@ impl<'db> Evaluator<'db> {
}
Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
Rvalue::ShallowInitBoxWithAlloc(ty) => {
let Some((size, align)) = self.size_align_of(*ty, locals)? else {
let Some((size, align)) = self.size_align_of(ty.as_ref(), locals)? else {
not_supported!("unsized box initialization");
};
let addr = self.heap_allocate(size, align)?;
@ -1477,7 +1477,7 @@ impl<'db> Evaluator<'db> {
Owned(r)
}
AggregateKind::Tuple(ty) => {
let layout = self.layout(*ty)?;
let layout = self.layout(ty.as_ref())?;
Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -1486,10 +1486,8 @@ impl<'db> Evaluator<'db> {
)?)
}
AggregateKind::Union(it, f) => {
let layout = self.layout_adt(
(*it).into(),
GenericArgs::new_from_iter(self.interner(), []),
)?;
let layout =
self.layout_adt((*it).into(), GenericArgs::empty(self.interner()))?;
let offset = layout
.fields
.offset(u32::from(f.local_id.into_raw()) as usize)
@ -1501,7 +1499,7 @@ impl<'db> Evaluator<'db> {
}
AggregateKind::Adt(it, subst) => {
let (size, variant_layout, tag) =
self.layout_of_variant(*it, *subst, locals)?;
self.layout_of_variant(*it, subst.as_ref(), locals)?;
Owned(self.construct_with_layout(
size,
&variant_layout,
@ -1510,7 +1508,7 @@ impl<'db> Evaluator<'db> {
)?)
}
AggregateKind::Closure(ty) => {
let layout = self.layout(*ty)?;
let layout = self.layout(ty.as_ref())?;
Owned(self.construct_with_layout(
layout.size.bytes_usize(),
&layout,
@ -1537,7 +1535,7 @@ impl<'db> Evaluator<'db> {
PointerCast::Unsize => {
let current_ty = self.operand_ty(operand, locals)?;
let addr = self.eval_operand(operand, locals)?;
self.coerce_unsized(addr, current_ty, *target_ty)?
self.coerce_unsized(addr, current_ty, target_ty.as_ref())?
}
PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => {
// This is no-op
@ -1556,8 +1554,11 @@ impl<'db> Evaluator<'db> {
let current_ty = self.operand_ty(operand, locals)?;
let is_signed = matches!(current_ty.kind(), TyKind::Int(_));
let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
let dest_size =
self.size_of_sized(*target_ty, locals, "destination of int to int cast")?;
let dest_size = self.size_of_sized(
target_ty.as_ref(),
locals,
"destination of int to int cast",
)?;
Owned(current[0..dest_size].to_vec())
}
CastKind::FloatToInt => {
@ -1579,9 +1580,12 @@ impl<'db> Evaluator<'db> {
not_supported!("unstable floating point type f16 and f128");
}
};
let is_signed = matches!(target_ty.kind(), TyKind::Int(_));
let dest_size =
self.size_of_sized(*target_ty, locals, "destination of float to int cast")?;
let is_signed = matches!(target_ty.as_ref().kind(), TyKind::Int(_));
let dest_size = self.size_of_sized(
target_ty.as_ref(),
locals,
"destination of float to int cast",
)?;
let dest_bits = dest_size * 8;
let (max, min) = if dest_bits == 128 {
(i128::MAX, i128::MIN)
@ -1614,7 +1618,7 @@ impl<'db> Evaluator<'db> {
not_supported!("unstable floating point type f16 and f128");
}
};
let TyKind::Float(target_ty) = target_ty.kind() else {
let TyKind::Float(target_ty) = target_ty.as_ref().kind() else {
not_supported!("invalid float to float cast");
};
match target_ty {
@ -1630,7 +1634,7 @@ impl<'db> Evaluator<'db> {
let is_signed = matches!(current_ty.kind(), TyKind::Int(_));
let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
let value = i128::from_le_bytes(value);
let TyKind::Float(target_ty) = target_ty.kind() else {
let TyKind::Float(target_ty) = target_ty.as_ref().kind() else {
not_supported!("invalid int to float cast");
};
match target_ty {
@ -1709,12 +1713,12 @@ impl<'db> Evaluator<'db> {
{
let field_types = self.db.field_types(struct_id.into());
if let Some(ty) =
field_types.iter().last().map(|it| it.1.instantiate(self.interner(), subst))
field_types.iter().last().map(|it| it.1.get().instantiate(self.interner(), subst))
{
return self.coerce_unsized_look_through_fields(ty, goal);
}
}
Err(MirEvalError::CoerceUnsizedError(ty))
Err(MirEvalError::CoerceUnsizedError(ty.store()))
}
fn coerce_unsized(
@ -1787,8 +1791,10 @@ impl<'db> Evaluator<'db> {
not_supported!("unsizing struct without field");
};
let target_last_field = self.db.field_types(id.into())[last_field]
.get()
.instantiate(self.interner(), target_subst);
let current_last_field = self.db.field_types(id.into())[last_field]
.get()
.instantiate(self.interner(), current_subst);
return self.unsizing_ptr_from_addr(
target_last_field,
@ -1806,7 +1812,7 @@ impl<'db> Evaluator<'db> {
&mut self,
it: VariantId,
subst: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, (usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
let adt = it.adt_id(self.db);
if let DefWithBodyId::VariantId(f) = locals.body.owner
@ -1900,11 +1906,7 @@ impl<'db> Evaluator<'db> {
Ok(result)
}
fn eval_operand(
&mut self,
it: &Operand<'db>,
locals: &mut Locals<'db>,
) -> Result<'db, Interval> {
fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<'db, Interval> {
Ok(match &it.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
locals.drop_flags.remove_place(p, &locals.body.projection_store);
@ -1914,14 +1916,16 @@ impl<'db> Evaluator<'db> {
let addr = self.eval_static(*st, locals)?;
Interval::new(addr, self.ptr_size())
}
OperandKind::Constant { konst, .. } => self.allocate_const_in_heap(locals, *konst)?,
OperandKind::Constant { konst, .. } => {
self.allocate_const_in_heap(locals, konst.as_ref())?
}
})
}
#[allow(clippy::double_parens)]
fn allocate_const_in_heap(
&mut self,
locals: &Locals<'db>,
locals: &Locals,
konst: Const<'db>,
) -> Result<'db, Interval> {
let result_owner;
@ -1971,7 +1975,7 @@ impl<'db> Evaluator<'db> {
} else if size < 16 && v.len() == 16 {
Cow::Borrowed(&v[0..size])
} else {
return Err(MirEvalError::InvalidConst(konst));
return Err(MirEvalError::InvalidConst(konst.store()));
}
} else {
Cow::Borrowed(v)
@ -1993,7 +1997,7 @@ impl<'db> Evaluator<'db> {
Ok(Interval::new(addr, size))
}
fn eval_place(&mut self, p: &Place<'db>, locals: &Locals<'db>) -> Result<'db, Interval> {
fn eval_place(&mut self, p: &Place, locals: &Locals) -> Result<'db, Interval> {
let addr = self.place_addr(p, locals)?;
Ok(Interval::new(
addr,
@ -2093,11 +2097,7 @@ impl<'db> Evaluator<'db> {
Ok(())
}
fn size_align_of(
&self,
ty: Ty<'db>,
locals: &Locals<'db>,
) -> Result<'db, Option<(usize, usize)>> {
fn size_align_of(&self, ty: Ty<'db>, locals: &Locals) -> Result<'db, Option<(usize, usize)>> {
if let Some(layout) = self.layout_cache.borrow().get(&ty) {
return Ok(layout
.is_sized()
@ -2126,12 +2126,12 @@ impl<'db> Evaluator<'db> {
fn size_of_sized(
&self,
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
what: &'static str,
) -> Result<'db, usize> {
match self.size_align_of(ty, locals)? {
Some(it) => Ok(it.0),
None => Err(MirEvalError::TypeIsUnsized(ty, what)),
None => Err(MirEvalError::TypeIsUnsized(ty.store(), what)),
}
}
@ -2140,12 +2140,12 @@ impl<'db> Evaluator<'db> {
fn size_align_of_sized(
&self,
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
what: &'static str,
) -> Result<'db, (usize, usize)> {
match self.size_align_of(ty, locals)? {
Some(it) => Ok(it),
None => Err(MirEvalError::TypeIsUnsized(ty, what)),
None => Err(MirEvalError::TypeIsUnsized(ty.store(), what)),
}
}
@ -2181,13 +2181,13 @@ impl<'db> Evaluator<'db> {
&self,
bytes: &[u8],
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, ComplexMemoryMap<'db>> {
fn rec<'db>(
this: &Evaluator<'db>,
bytes: &[u8],
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
mm: &mut ComplexMemoryMap<'db>,
stack_depth_limit: usize,
) -> Result<'db, ()> {
@ -2288,7 +2288,7 @@ impl<'db> Evaluator<'db> {
.fields
.offset(u32::from(f.into_raw()) as usize)
.bytes_usize();
let ty = field_types[f].instantiate(this.interner(), subst);
let ty = field_types[f].get().instantiate(this.interner(), subst);
let size = this.layout(ty)?.size.bytes_usize();
rec(
this,
@ -2314,7 +2314,7 @@ impl<'db> Evaluator<'db> {
for (f, _) in data.fields().iter() {
let offset =
l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize();
let ty = field_types[f].instantiate(this.interner(), subst);
let ty = field_types[f].get().instantiate(this.interner(), subst);
let size = this.layout(ty)?.size.bytes_usize();
rec(
this,
@ -2356,7 +2356,7 @@ impl<'db> Evaluator<'db> {
ty_of_bytes: impl Fn(&[u8]) -> Result<'db, Ty<'db>> + Copy,
addr: Address,
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, ()> {
// FIXME: support indirect references
let layout = self.layout(ty)?;
@ -2389,7 +2389,7 @@ impl<'db> Evaluator<'db> {
AdtId::StructId(s) => {
for (i, (_, ty)) in self.db.field_types(s.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let ty = ty.instantiate(self.interner(), args);
let ty = ty.get().instantiate(self.interner(), args);
self.patch_addresses(
patch_map,
ty_of_bytes,
@ -2410,7 +2410,7 @@ impl<'db> Evaluator<'db> {
) {
for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let ty = ty.instantiate(self.interner(), args);
let ty = ty.get().instantiate(self.interner(), args);
self.patch_addresses(
patch_map,
ty_of_bytes,
@ -2477,10 +2477,10 @@ impl<'db> Evaluator<'db> {
bytes: Interval,
destination: Interval,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
target_bb: Option<BasicBlockId<'db>>,
locals: &Locals,
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
let id = from_bytes!(usize, bytes.get(self)?);
let next_ty = self.vtable_map.ty(id)?;
use rustc_type_ir::TyKind;
@ -2508,19 +2508,23 @@ impl<'db> Evaluator<'db> {
generic_args: GenericArgs<'db>,
destination: Interval,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
let mir_body = self
.db
.monomorphized_mir_body_for_closure(closure, generic_args, self.param_env)
.monomorphized_mir_body_for_closure(
closure,
generic_args.store(),
self.param_env.store(),
)
.map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?;
let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
{
closure_data.addr.to_bytes().to_vec()
} else {
closure_data.get(self)?.to_owned()
};
let closure_data =
if mir_body.locals[mir_body.param_locals[0]].ty.as_ref().as_reference().is_some() {
closure_data.addr.to_bytes().to_vec()
} else {
closure_data.get(self)?.to_owned()
};
let arg_bytes = iter::once(Ok(closure_data))
.chain(args.iter().map(|it| Ok(it.get(self)?.to_owned())))
.collect::<Result<'db, Vec<_>>>()?;
@ -2542,10 +2546,10 @@ impl<'db> Evaluator<'db> {
generic_args: GenericArgs<'db>,
destination: Interval,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
target_bb: Option<BasicBlockId<'db>>,
locals: &Locals,
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
match def {
CallableDefId::FunctionId(def) => {
if self.detect_fn_trait(def).is_some() {
@ -2600,9 +2604,9 @@ impl<'db> Evaluator<'db> {
&self,
def: FunctionId,
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
) -> Result<'db, MirOrDynIndex<'db>> {
) -> Result<'db, MirOrDynIndex> {
let pair = (def, generic_args);
if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) {
return Ok(r.clone());
@ -2621,7 +2625,7 @@ impl<'db> Evaluator<'db> {
let mir_body = self
.db
.monomorphized_mir_body(imp.into(), generic_args, self.param_env)
.monomorphized_mir_body(imp.into(), generic_args.store(), self.param_env.store())
.map_err(|e| {
MirEvalError::InFunction(
Box::new(MirEvalError::MirLowerError(imp, e)),
@ -2639,11 +2643,11 @@ impl<'db> Evaluator<'db> {
mut def: FunctionId,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
target_bb: Option<BasicBlockId<'db>>,
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
if self.detect_and_exec_special_function(
def,
args,
@ -2705,14 +2709,14 @@ impl<'db> Evaluator<'db> {
fn exec_looked_up_function(
&mut self,
mir_body: Arc<MirBody<'db>>,
locals: &Locals<'db>,
mir_body: Arc<MirBody>,
locals: &Locals,
def: FunctionId,
arg_bytes: impl Iterator<Item = IntervalOrOwned>,
span: MirSpan,
destination: Interval,
target_bb: Option<BasicBlockId<'db>>,
) -> Result<'db, Option<StackFrame<'db>>> {
target_bb: Option<BasicBlockId>,
) -> Result<'db, Option<StackFrame>> {
Ok(if let Some(target_bb) = target_bb {
let (mut locals, prev_stack_ptr) =
self.create_locals_for_body(&mir_body, Some(destination))?;
@ -2736,11 +2740,11 @@ impl<'db> Evaluator<'db> {
def: FunctionId,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
target_bb: Option<BasicBlockId<'db>>,
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<'db, Option<StackFrame<'db>>> {
) -> Result<'db, Option<StackFrame>> {
let func = args
.first()
.ok_or_else(|| MirEvalError::InternalError("fn trait with no arg".into()))?;
@ -2805,7 +2809,7 @@ impl<'db> Evaluator<'db> {
}
}
fn eval_static(&mut self, st: StaticId, locals: &Locals<'db>) -> Result<'db, Address> {
fn eval_static(&mut self, st: StaticId, locals: &Locals) -> Result<'db, Address> {
if let Some(o) = self.static_locations.get(&st) {
return Ok(*o);
};
@ -2816,8 +2820,8 @@ impl<'db> Evaluator<'db> {
})?;
self.allocate_const_in_heap(locals, konst)?
} else {
let ty =
InferenceResult::for_body(self.db, st.into())[self.db.body(st.into()).body_expr];
let ty = InferenceResult::for_body(self.db, st.into())
.expr_ty(self.db.body(st.into()).body_expr);
let Some((size, align)) = self.size_align_of(ty, locals)? else {
not_supported!("unsized extern static");
};
@ -2852,12 +2856,7 @@ impl<'db> Evaluator<'db> {
}
}
fn drop_place(
&mut self,
place: &Place<'db>,
locals: &mut Locals<'db>,
span: MirSpan,
) -> Result<'db, ()> {
fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<'db, ()> {
let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
if !locals.drop_flags.remove_place(place, &locals.body.projection_store) {
return Ok(());
@ -2872,7 +2871,7 @@ impl<'db> Evaluator<'db> {
fn run_drop_glue_deep(
&mut self,
ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
addr: Address,
_metadata: &[u8],
span: MirSpan,
@ -2886,7 +2885,7 @@ impl<'db> Evaluator<'db> {
return Ok(());
};
let generic_args = GenericArgs::new_from_iter(self.interner(), [ty.into()]);
let generic_args = GenericArgs::new_from_slice(&[ty.into()]);
if let Ok(MirOrDynIndex::Mir(body)) =
self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span)
{
@ -2920,7 +2919,9 @@ impl<'db> Evaluator<'db> {
.offset(u32::from(field.into_raw()) as usize)
.bytes_usize();
let addr = addr.offset(offset);
let ty = field_types[field].instantiate(self.interner(), subst);
let ty = field_types[field]
.get()
.instantiate(self.interner(), subst);
self.run_drop_glue_deep(ty, locals, addr, &[], span)?;
}
}
@ -3011,7 +3012,7 @@ pub fn render_const_using_debug_impl<'db>(
let debug_fmt_fn_ptr = evaluator.vtable_map.id(Ty::new_fn_def(
evaluator.interner(),
CallableDefId::FunctionId(debug_fmt_fn).into(),
GenericArgs::new_from_iter(evaluator.interner(), [ty.into()]),
GenericArgs::new_from_slice(&[ty.into()]),
));
evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?;
// a3 = ::core::fmt::Arguments::new_v1(a1, a2)

View file

@ -52,7 +52,7 @@ impl<'db> Evaluator<'db> {
def: FunctionId,
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<'db, bool> {
@ -149,7 +149,7 @@ impl<'db> Evaluator<'db> {
def: FunctionId,
args: &[IntervalAndTy<'db>],
self_ty: Ty<'db>,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<'db, ()> {
@ -195,7 +195,7 @@ impl<'db> Evaluator<'db> {
self.exec_fn_with_args(
def,
args,
GenericArgs::new_from_iter(self.interner(), [self_ty.into()]),
GenericArgs::new_from_slice(&[self_ty.into()]),
locals,
destination,
None,
@ -212,7 +212,7 @@ impl<'db> Evaluator<'db> {
layout: Arc<Layout>,
addr: Address,
def: FunctionId,
locals: &Locals<'db>,
locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<'db, ()> {
@ -318,7 +318,7 @@ impl<'db> Evaluator<'db> {
it: EvalLangItem,
generic_args: GenericArgs<'db>,
args: &[IntervalAndTy<'db>],
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
) -> Result<'db, Vec<u8>> {
use EvalLangItem::*;
@ -390,7 +390,7 @@ impl<'db> Evaluator<'db> {
id: i64,
args: &[IntervalAndTy<'db>],
destination: Interval,
_locals: &Locals<'db>,
_locals: &Locals,
_span: MirSpan,
) -> Result<'db, ()> {
match id {
@ -421,7 +421,7 @@ impl<'db> Evaluator<'db> {
args: &[IntervalAndTy<'db>],
_generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
) -> Result<'db, ()> {
match as_str {
@ -587,7 +587,7 @@ impl<'db> Evaluator<'db> {
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals<'db>,
locals: &Locals,
span: MirSpan,
needs_override: bool,
) -> Result<'db, bool> {
@ -1235,7 +1235,7 @@ impl<'db> Evaluator<'db> {
def,
&args,
// FIXME: wrong for manual impls of `FnOnce`
GenericArgs::new_from_iter(self.interner(), []),
GenericArgs::empty(self.interner()),
locals,
destination,
None,
@ -1369,7 +1369,7 @@ impl<'db> Evaluator<'db> {
&mut self,
ty: Ty<'db>,
metadata: Interval,
locals: &Locals<'db>,
locals: &Locals,
) -> Result<'db, (usize, usize)> {
Ok(match ty.kind() {
TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
@ -1391,8 +1391,13 @@ impl<'db> Evaluator<'db> {
_ => not_supported!("unsized enum or union"),
};
let field_types = self.db.field_types(id.into());
let last_field_ty =
field_types.iter().next_back().unwrap().1.instantiate(self.interner(), subst);
let last_field_ty = field_types
.iter()
.next_back()
.unwrap()
.1
.get()
.instantiate(self.interner(), subst);
let sized_part_size =
layout.fields.offset(field_types.iter().count() - 1).bytes_usize();
let sized_part_align = layout.align.bytes() as usize;
@ -1423,7 +1428,7 @@ impl<'db> Evaluator<'db> {
args: &[IntervalAndTy<'db>],
generic_args: GenericArgs<'db>,
destination: Interval,
locals: &Locals<'db>,
locals: &Locals,
_span: MirSpan,
) -> Result<'db, ()> {
// We are a single threaded runtime with no UB checking and no optimization, so

View file

@ -35,6 +35,7 @@ impl<'db> Evaluator<'db> {
not_supported!("simd type with no field");
};
let field_ty = self.db.field_types(id.into())[first_field]
.get()
.instantiate(self.interner(), subst);
return Ok((fields.len(), field_ty));
}
@ -67,7 +68,7 @@ impl<'db> Evaluator<'db> {
args: &[IntervalAndTy<'db>],
_generic_args: GenericArgs<'db>,
destination: Interval,
_locals: &Locals<'db>,
_locals: &Locals,
_span: MirSpan,
) -> Result<'db, ()> {
match name {

View file

@ -15,7 +15,7 @@ use crate::{
use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError<'_>> {
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError> {
crate::attach_db(db, || {
let interner = DbInterner::new_no_crate(db);
let module_id = db.module_for_file(file_id.file_id(db));
@ -39,11 +39,12 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
let body = db
.monomorphized_mir_body(
func_id.into(),
GenericArgs::new_from_iter(interner, []),
GenericArgs::empty(interner).store(),
crate::ParamEnvAndCrate {
param_env: db.trait_environment(func_id.into()),
krate: func_id.krate(db),
},
}
.store(),
)
.map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
@ -122,7 +123,7 @@ fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic:
fn check_error_with(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
expect_err: impl FnOnce(MirEvalError<'_>) -> bool,
expect_err: impl FnOnce(MirEvalError) -> bool,
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
crate::attach_db(&db, || {

View file

@ -19,7 +19,7 @@ use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_apfloat::Float;
use rustc_hash::FxHashMap;
use rustc_type_ir::inherent::{Const as _, IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{Const as _, IntoKind, Ty as _};
use span::{Edition, FileId};
use syntax::TextRange;
use triomphe::Arc;
@ -42,7 +42,8 @@ use crate::{
TupleFieldId, Ty, UnOp, VariantId, return_slot,
},
next_solver::{
Const, DbInterner, ParamConst, ParamEnv, Region, TyKind, TypingMode, UnevaluatedConst,
Const, DbInterner, ParamConst, ParamEnv, Region, StoredGenericArgs, StoredTy, TyKind,
TypingMode, UnevaluatedConst,
infer::{DbInternerInferExt, InferCtxt},
},
traits::FnTrait,
@ -56,39 +57,39 @@ mod pattern_matching;
mod tests;
#[derive(Debug, Clone)]
struct LoopBlocks<'db> {
begin: BasicBlockId<'db>,
struct LoopBlocks {
begin: BasicBlockId,
/// `None` for loops that are not terminating
end: Option<BasicBlockId<'db>>,
place: Place<'db>,
end: Option<BasicBlockId>,
place: Place,
drop_scope_index: usize,
}
#[derive(Debug, Clone, Default)]
struct DropScope<'db> {
struct DropScope {
/// locals, in order of definition (so we should run drop glues in reverse order)
locals: Vec<LocalId<'db>>,
locals: Vec<LocalId>,
}
struct MirLowerCtx<'a, 'db> {
result: MirBody<'db>,
result: MirBody,
owner: DefWithBodyId,
current_loop_blocks: Option<LoopBlocks<'db>>,
labeled_loop_blocks: FxHashMap<LabelId, LoopBlocks<'db>>,
discr_temp: Option<Place<'db>>,
current_loop_blocks: Option<LoopBlocks>,
labeled_loop_blocks: FxHashMap<LabelId, LoopBlocks>,
discr_temp: Option<Place>,
db: &'db dyn HirDatabase,
body: &'a Body,
infer: &'a InferenceResult<'db>,
infer: &'a InferenceResult,
resolver: Resolver<'db>,
drop_scopes: Vec<DropScope<'db>>,
drop_scopes: Vec<DropScope>,
env: ParamEnv<'db>,
infcx: InferCtxt<'db>,
}
// FIXME: Make this smaller, its stored in database queries
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MirLowerError<'db> {
ConstEvalError(Box<str>, Box<ConstEvalError<'db>>),
pub enum MirLowerError {
ConstEvalError(Box<str>, Box<ConstEvalError>),
LayoutError(LayoutError),
IncompleteExpr,
IncompletePattern,
@ -98,9 +99,9 @@ pub enum MirLowerError<'db> {
RecordLiteralWithoutPath,
UnresolvedMethod(String),
UnresolvedField,
UnsizedTemporary(Ty<'db>),
UnsizedTemporary(StoredTy),
MissingFunctionDefinition(DefWithBodyId, ExprId),
TypeMismatch(TypeMismatch<'db>),
TypeMismatch(TypeMismatch),
HasErrors,
/// This should never happen. Type mismatch should catch everything.
TypeError(&'static str),
@ -113,11 +114,11 @@ pub enum MirLowerError<'db> {
LangItemNotFound,
MutatingRvalue,
UnresolvedLabel,
UnresolvedUpvar(Place<'db>),
UnresolvedUpvar(Place),
InaccessibleLocal,
// monomorphization errors:
GenericArgNotProvided(GenericParamId, GenericArgs<'db>),
GenericArgNotProvided(GenericParamId, StoredGenericArgs),
}
/// A token to ensuring that each drop scope is popped at most once, thanks to the compiler that checks moves.
@ -126,9 +127,9 @@ impl DropScopeToken {
fn pop_and_drop<'db>(
self,
ctx: &mut MirLowerCtx<'_, 'db>,
current: BasicBlockId<'db>,
current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId<'db> {
) -> BasicBlockId {
std::mem::forget(self);
ctx.pop_drop_scope_internal(current, span)
}
@ -158,7 +159,7 @@ impl Drop for DropScopeToken {
// }
// }
impl MirLowerError<'_> {
impl MirLowerError {
pub fn pretty_print(
&self,
f: &mut String,
@ -190,8 +191,8 @@ impl MirLowerError<'_> {
MirLowerError::TypeMismatch(e) => writeln!(
f,
"Type mismatch: Expected {}, found {}",
e.expected.display(db, display_target),
e.actual.display(db, display_target),
e.expected.as_ref().display(db, display_target),
e.actual.as_ref().display(db, display_target),
)?,
MirLowerError::GenericArgNotProvided(id, subst) => {
let param_name = match *id {
@ -211,7 +212,7 @@ impl MirLowerError<'_> {
param_name.unwrap_or(Name::missing()).display(db, display_target.edition)
)?;
writeln!(f, "Provided args: [")?;
for g in subst.iter() {
for g in subst.as_ref() {
write!(f, " {},", g.display(db, display_target))?;
}
writeln!(f, "]")?;
@ -254,13 +255,13 @@ macro_rules! implementation_error {
}};
}
impl From<LayoutError> for MirLowerError<'_> {
impl From<LayoutError> for MirLowerError {
fn from(value: LayoutError) -> Self {
MirLowerError::LayoutError(value)
}
}
impl MirLowerError<'_> {
impl MirLowerError {
fn unresolved_path(
db: &dyn HirDatabase,
p: &Path,
@ -273,14 +274,14 @@ impl MirLowerError<'_> {
}
}
type Result<'db, T> = std::result::Result<T, MirLowerError<'db>>;
type Result<'db, T> = std::result::Result<T, MirLowerError>;
impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn new(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
body: &'a Body,
infer: &'a InferenceResult<'db>,
infer: &'a InferenceResult,
) -> Self {
let mut basic_blocks = Arena::new();
let start_block = basic_blocks.alloc(BasicBlock {
@ -289,7 +290,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
is_cleanup: false,
});
let locals = Arena::new();
let binding_locals: ArenaMap<BindingId, LocalId<'db>> = ArenaMap::new();
let binding_locals: ArenaMap<BindingId, LocalId> = ArenaMap::new();
let mir = MirBody {
projection_store: ProjectionStore::default(),
basic_blocks,
@ -332,16 +333,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.infcx.interner.lang_items()
}
fn temp(
&mut self,
ty: Ty<'db>,
current: BasicBlockId<'db>,
span: MirSpan,
) -> Result<'db, LocalId<'db>> {
fn temp(&mut self, ty: Ty<'db>, current: BasicBlockId, span: MirSpan) -> Result<'db, LocalId> {
if matches!(ty.kind(), TyKind::Slice(_) | TyKind::Dynamic(..)) {
return Err(MirLowerError::UnsizedTemporary(ty));
return Err(MirLowerError::UnsizedTemporary(ty.store()));
}
let l = self.result.locals.alloc(Local { ty });
let l = self.result.locals.alloc(Local { ty: ty.store() });
self.push_storage_live_for_local(l, current, span)?;
Ok(l)
}
@ -349,8 +345,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_some_operand(
&mut self,
expr_id: ExprId,
current: BasicBlockId<'db>,
) -> Result<'db, Option<(Operand<'db>, BasicBlockId<'db>)>> {
current: BasicBlockId,
) -> Result<'db, Option<(Operand, BasicBlockId)>> {
if !self.has_adjustments(expr_id)
&& let Expr::Literal(l) = &self.body[expr_id]
{
@ -366,10 +362,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_place_with_adjust(
&mut self,
expr_id: ExprId,
place: Place<'db>,
current: BasicBlockId<'db>,
adjustments: &[Adjustment<'db>],
) -> Result<'db, Option<BasicBlockId<'db>>> {
place: Place,
current: BasicBlockId,
adjustments: &[Adjustment],
) -> Result<'db, Option<BasicBlockId>> {
match adjustments.split_last() {
Some((last, rest)) => match &last.kind {
Adjust::NeverToAny => {
@ -416,7 +412,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Rvalue::Cast(
CastKind::PointerCoercion(*cast),
Operand { kind: OperandKind::Copy(p), span: None },
last.target,
last.target.clone(),
),
expr_id.into(),
);
@ -430,11 +426,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_place_with_borrow_adjust(
&mut self,
expr_id: ExprId,
place: Place<'db>,
current: BasicBlockId<'db>,
rest: &[Adjustment<'db>],
place: Place,
current: BasicBlockId,
rest: &[Adjustment],
m: Mutability,
) -> Result<'db, Option<BasicBlockId<'db>>> {
) -> Result<'db, Option<BasicBlockId>> {
let Some((p, current)) =
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
else {
@ -448,9 +444,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_place(
&mut self,
expr_id: ExprId,
place: Place<'db>,
prev_block: BasicBlockId<'db>,
) -> Result<'db, Option<BasicBlockId<'db>>> {
place: Place,
prev_block: BasicBlockId,
) -> Result<'db, Option<BasicBlockId>> {
if let Some(adjustments) = self.infer.expr_adjustments.get(&expr_id) {
return self.lower_expr_to_place_with_adjust(expr_id, place, prev_block, adjustments);
}
@ -460,9 +456,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_expr_to_place_without_adjust(
&mut self,
expr_id: ExprId,
place: Place<'db>,
mut current: BasicBlockId<'db>,
) -> Result<'db, Option<BasicBlockId<'db>>> {
place: Place,
mut current: BasicBlockId,
) -> Result<'db, Option<BasicBlockId>> {
match &self.body[expr_id] {
Expr::OffsetOf(_) => {
not_supported!("builtin#offset_of")
@ -537,7 +533,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
const_id.into(),
current,
place,
GenericArgs::new_from_iter(self.interner(), []),
GenericArgs::empty(self.interner()),
expr_id.into(),
)?;
Ok(Some(current))
@ -545,7 +541,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
ValueNs::EnumVariantId(variant_id) => {
let variant_fields = variant_id.fields(self.db);
if variant_fields.shape == FieldsShape::Unit {
let ty = self.infer.type_of_expr[expr_id];
let ty = self.infer.expr_ty(expr_id);
current = self.lower_enum_variant(
variant_id,
current,
@ -575,8 +571,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
konst: Const::new_param(
self.interner(),
ParamConst { id: p, index },
),
ty: self.db.const_param_ty_ns(p),
)
.store(),
ty: self.db.const_param_ty_ns(p).store(),
},
span: None,
}),
@ -907,7 +904,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
operands[u32::from(field_id.into_raw()) as usize] = Some(op);
}
let rvalue = Rvalue::Aggregate(
AggregateKind::Adt(variant_id, subst),
AggregateKind::Adt(variant_id, subst.store()),
match spread_place {
Some(sp) => operands
.into_iter()
@ -978,15 +975,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
let rvalue = if self.infer.coercion_casts.contains(expr) {
Rvalue::Use(it)
} else {
let source_ty = self.infer[*expr];
let target_ty = self.infer[expr_id];
let source_ty = self.infer.expr_ty(*expr);
let target_ty = self.infer.expr_ty(expr_id);
let cast_kind = if source_ty.as_reference().is_some() {
CastKind::PointerCoercion(PointerCast::ArrayToPointer)
} else {
cast_kind(self.db, source_ty, target_ty)?
};
Rvalue::Cast(cast_kind, it, target_ty)
Rvalue::Cast(cast_kind, it, target_ty.store())
};
self.push_assignment(current, place, rvalue, expr_id.into());
Ok(Some(current))
@ -1004,7 +1001,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.push_assignment(
current,
place,
Rvalue::ShallowInitBoxWithAlloc(ty),
Rvalue::ShallowInitBoxWithAlloc(ty.store()),
expr_id.into(),
);
let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)?
@ -1222,7 +1219,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
current,
place,
Rvalue::Aggregate(
AggregateKind::Adt(st.into(), subst),
AggregateKind::Adt(st.into(), subst.store()),
st.fields(self.db)
.fields()
.iter()
@ -1284,11 +1281,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
};
match &capture.kind {
CaptureKind::ByRef(bk) => {
let tmp_ty = capture.ty.instantiate_identity();
let tmp_ty = capture.ty.get().instantiate_identity();
// FIXME: Handle more than one span.
let capture_spans = capture.spans();
let tmp: Place<'db> =
self.temp(tmp_ty, current, capture_spans[0])?.into();
let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into();
self.push_assignment(
current,
tmp,
@ -1305,7 +1301,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.push_assignment(
current,
place,
Rvalue::Aggregate(AggregateKind::Closure(ty), operands.into()),
Rvalue::Aggregate(AggregateKind::Closure(ty.store()), operands.into()),
expr_id.into(),
);
Ok(Some(current))
@ -1325,7 +1321,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
return Ok(None);
};
let r = Rvalue::Aggregate(
AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id)),
AggregateKind::Tuple(self.expr_ty_without_adjust(expr_id).store()),
values,
);
self.push_assignment(current, place, r, expr_id.into());
@ -1355,7 +1351,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
else {
return Ok(None);
};
let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty), values);
let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty.store()), values);
self.push_assignment(current, place, r, expr_id.into());
Ok(Some(current))
}
@ -1373,7 +1369,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
));
}
};
let r = Rvalue::Repeat(init, len);
let r = Rvalue::Repeat(init, len.store());
self.push_assignment(current, place, r, expr_id.into());
Ok(Some(current))
}
@ -1388,11 +1384,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
}
fn push_field_projection(
&mut self,
place: &mut Place<'db>,
expr_id: ExprId,
) -> Result<'db, ()> {
fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<'db, ()> {
if let Expr::Field { expr, name } = &self.body[expr_id] {
if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind() {
let index =
@ -1421,7 +1413,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
&mut self,
ty: Ty<'db>,
loc: &ExprId,
) -> Result<'db, Operand<'db>> {
) -> Result<'db, Operand> {
match &self.body[*loc] {
Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
Expr::Path(c) => {
@ -1443,7 +1435,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
ResolveValueResult::ValueNs(v, _) => {
if let ValueNs::ConstId(c) = v {
self.lower_const_to_operand(
GenericArgs::new_from_iter(self.interner(), []),
GenericArgs::empty(self.interner()),
c.into(),
)
} else {
@ -1461,10 +1453,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
}
fn lower_literal_to_operand(&mut self, ty: Ty<'db>, l: &Literal) -> Result<'db, Operand<'db>> {
fn lower_literal_to_operand(&mut self, ty: Ty<'db>, l: &Literal) -> Result<'db, Operand> {
let size = || {
self.db
.layout_of_ty(ty, ParamEnvAndCrate { param_env: self.env, krate: self.krate() })
.layout_of_ty(
ty.store(),
ParamEnvAndCrate { param_env: self.env, krate: self.krate() }.store(),
)
.map(|it| it.size.bytes_usize())
};
const USIZE_SIZE: usize = size_of::<usize>();
@ -1512,15 +1507,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Ok(Operand::from_concrete_const(bytes, MemoryMap::default(), ty))
}
fn new_basic_block(&mut self) -> BasicBlockId<'db> {
fn new_basic_block(&mut self) -> BasicBlockId {
self.result.basic_blocks.alloc(BasicBlock::default())
}
fn lower_const(
&mut self,
const_id: GeneralConstId,
prev_block: BasicBlockId<'db>,
place: Place<'db>,
prev_block: BasicBlockId,
place: Place,
subst: GenericArgs<'db>,
span: MirSpan,
) -> Result<'db, ()> {
@ -1533,8 +1528,8 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
&mut self,
subst: GenericArgs<'db>,
const_id: GeneralConstId,
) -> Result<'db, Operand<'db>> {
let konst = if subst.len() != 0 {
) -> Result<'db, Operand> {
let konst = if !subst.is_empty() {
// We can't evaluate constant with substitution now, as generics are not monomorphized in lowering.
Const::new_unevaluated(
self.interner(),
@ -1564,13 +1559,16 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
})
.unwrap()
.instantiate(self.interner(), subst);
Ok(Operand { kind: OperandKind::Constant { konst, ty }, span: None })
Ok(Operand {
kind: OperandKind::Constant { konst: konst.store(), ty: ty.store() },
span: None,
})
}
fn write_bytes_to_place(
&mut self,
prev_block: BasicBlockId<'db>,
place: Place<'db>,
prev_block: BasicBlockId,
place: Place,
cv: Box<[u8]>,
ty: Ty<'db>,
span: MirSpan,
@ -1582,12 +1580,12 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_enum_variant(
&mut self,
variant_id: EnumVariantId,
prev_block: BasicBlockId<'db>,
place: Place<'db>,
prev_block: BasicBlockId,
place: Place,
ty: Ty<'db>,
fields: Box<[Operand<'db>]>,
fields: Box<[Operand]>,
span: MirSpan,
) -> Result<'db, BasicBlockId<'db>> {
) -> Result<'db, BasicBlockId> {
let subst = match ty.kind() {
TyKind::Adt(_, subst) => subst,
_ => implementation_error!("Non ADT enum"),
@ -1595,7 +1593,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
self.push_assignment(
prev_block,
place,
Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst), fields),
Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst.store()), fields),
span,
);
Ok(prev_block)
@ -1603,13 +1601,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_call_and_args(
&mut self,
func: Operand<'db>,
func: Operand,
args: impl Iterator<Item = ExprId>,
place: Place<'db>,
mut current: BasicBlockId<'db>,
place: Place,
mut current: BasicBlockId,
is_uninhabited: bool,
span: MirSpan,
) -> Result<'db, Option<BasicBlockId<'db>>> {
) -> Result<'db, Option<BasicBlockId>> {
let Some(args) = args
.map(|arg| {
if let Some((temp, c)) = self.lower_expr_to_some_operand(arg, current)? {
@ -1628,13 +1626,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_call(
&mut self,
func: Operand<'db>,
args: Box<[Operand<'db>]>,
place: Place<'db>,
current: BasicBlockId<'db>,
func: Operand,
args: Box<[Operand]>,
place: Place,
current: BasicBlockId,
is_uninhabited: bool,
span: MirSpan,
) -> Result<'db, Option<BasicBlockId<'db>>> {
) -> Result<'db, Option<BasicBlockId>> {
let b = if is_uninhabited { None } else { Some(self.new_basic_block()) };
self.set_terminator(
current,
@ -1651,25 +1649,20 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Ok(b)
}
fn is_unterminated(&mut self, source: BasicBlockId<'db>) -> bool {
fn is_unterminated(&mut self, source: BasicBlockId) -> bool {
self.result.basic_blocks[source].terminator.is_none()
}
fn set_terminator(
&mut self,
source: BasicBlockId<'db>,
terminator: TerminatorKind<'db>,
span: MirSpan,
) {
fn set_terminator(&mut self, source: BasicBlockId, terminator: TerminatorKind, span: MirSpan) {
self.result.basic_blocks[source].terminator = Some(Terminator { span, kind: terminator });
}
fn set_goto(&mut self, source: BasicBlockId<'db>, target: BasicBlockId<'db>, span: MirSpan) {
fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId, span: MirSpan) {
self.set_terminator(source, TerminatorKind::Goto { target }, span);
}
fn expr_ty_without_adjust(&self, e: ExprId) -> Ty<'db> {
self.infer[e]
self.infer.expr_ty(e)
}
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
@ -1677,36 +1670,36 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
if let Some(it) = self.infer.expr_adjustments.get(&e)
&& let Some(it) = it.last()
{
ty = Some(it.target);
ty = Some(it.target.as_ref());
}
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
}
fn push_statement(&mut self, block: BasicBlockId<'db>, statement: Statement<'db>) {
fn push_statement(&mut self, block: BasicBlockId, statement: Statement) {
self.result.basic_blocks[block].statements.push(statement);
}
fn push_fake_read(&mut self, block: BasicBlockId<'db>, p: Place<'db>, span: MirSpan) {
fn push_fake_read(&mut self, block: BasicBlockId, p: Place, span: MirSpan) {
self.push_statement(block, StatementKind::FakeRead(p).with_span(span));
}
fn push_assignment(
&mut self,
block: BasicBlockId<'db>,
place: Place<'db>,
rvalue: Rvalue<'db>,
block: BasicBlockId,
place: Place,
rvalue: Rvalue,
span: MirSpan,
) {
self.push_statement(block, StatementKind::Assign(place, rvalue).with_span(span));
}
fn discr_temp_place(&mut self, current: BasicBlockId<'db>) -> Place<'db> {
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
match &self.discr_temp {
Some(it) => *it,
None => {
// FIXME: rustc's ty is dependent on the adt type, maybe we need to do that as well
let discr_ty = Ty::new_int(self.interner(), rustc_type_ir::IntTy::I128);
let tmp: Place<'db> = self
let tmp: Place = self
.temp(discr_ty, current, MirSpan::Unknown)
.expect("discr_ty is never unsized")
.into();
@ -1718,12 +1711,12 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_loop(
&mut self,
prev_block: BasicBlockId<'db>,
place: Place<'db>,
prev_block: BasicBlockId,
place: Place,
label: Option<LabelId>,
span: MirSpan,
f: impl FnOnce(&mut MirLowerCtx<'_, 'db>, BasicBlockId<'db>) -> Result<'db, ()>,
) -> Result<'db, Option<BasicBlockId<'db>>> {
f: impl FnOnce(&mut MirLowerCtx<'_, 'db>, BasicBlockId) -> Result<'db, ()>,
) -> Result<'db, Option<BasicBlockId>> {
let begin = self.new_basic_block();
let prev = self.current_loop_blocks.replace(LoopBlocks {
begin,
@ -1758,10 +1751,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn merge_blocks(
&mut self,
b1: Option<BasicBlockId<'db>>,
b2: Option<BasicBlockId<'db>>,
b1: Option<BasicBlockId>,
b2: Option<BasicBlockId>,
span: MirSpan,
) -> Option<BasicBlockId<'db>> {
) -> Option<BasicBlockId> {
match (b1, b2) {
(None, None) => None,
(None, Some(b)) | (Some(b), None) => Some(b),
@ -1774,7 +1767,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
}
}
fn current_loop_end(&mut self) -> Result<'db, BasicBlockId<'db>> {
fn current_loop_end(&mut self) -> Result<'db, BasicBlockId> {
let r = match self
.current_loop_blocks
.as_mut()
@ -1801,7 +1794,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn is_uninhabited(&self, expr_id: ExprId) -> bool {
is_ty_uninhabited_from(
&self.infcx,
self.infer[expr_id],
self.infer.expr_ty(expr_id),
self.owner.module(self.db),
self.env,
)
@ -1809,15 +1802,15 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
/// `Drop` in the appropriated places.
fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId<'db>) -> Result<'db, ()> {
fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<'db, ()> {
let l = self.binding_local(b)?;
self.push_storage_live_for_local(l, current, MirSpan::BindingId(b))
}
fn push_storage_live_for_local(
&mut self,
l: LocalId<'db>,
current: BasicBlockId<'db>,
l: LocalId,
current: BasicBlockId,
span: MirSpan,
) -> Result<'db, ()> {
self.drop_scopes.last_mut().unwrap().locals.push(l);
@ -1828,11 +1821,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn lower_block_to_place(
&mut self,
statements: &[hir_def::hir::Statement],
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
tail: Option<ExprId>,
place: Place<'db>,
place: Place,
span: MirSpan,
) -> Result<'db, Option<Idx<BasicBlock<'db>>>> {
) -> Result<'db, Option<Idx<BasicBlock>>> {
let scope = self.push_drop_scope();
for statement in statements.iter() {
match statement {
@ -1908,11 +1901,11 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
params: impl Iterator<Item = (PatId, Ty<'db>)> + Clone,
self_binding: Option<(BindingId, Ty<'db>)>,
pick_binding: impl Fn(BindingId) -> bool,
) -> Result<'db, BasicBlockId<'db>> {
) -> Result<'db, BasicBlockId> {
let base_param_count = self.result.param_locals.len();
let self_binding = match self_binding {
Some((self_binding, ty)) => {
let local_id = self.result.locals.alloc(Local { ty });
let local_id = self.result.locals.alloc(Local { ty: ty.store() });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
self.result.binding_locals.insert(self_binding, local_id);
self.result.param_locals.push(local_id);
@ -1921,7 +1914,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
None => None,
};
self.result.param_locals.extend(params.clone().map(|(it, ty)| {
let local_id = self.result.locals.alloc(Local { ty });
let local_id = self.result.locals.alloc(Local { ty: ty.store() });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
if let Pat::Bind { id, subpat: None } = self.body[it]
&& matches!(
@ -1939,9 +1932,10 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
continue;
}
if !self.result.binding_locals.contains_idx(id) {
self.result
.binding_locals
.insert(id, self.result.locals.alloc(Local { ty: self.infer[id] }));
self.result.binding_locals.insert(
id,
self.result.locals.alloc(Local { ty: self.infer.binding_ty(id).store() }),
);
}
}
let mut current = self.result.start_block;
@ -1976,7 +1970,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
Ok(current)
}
fn binding_local(&self, b: BindingId) -> Result<'db, LocalId<'db>> {
fn binding_local(&self, b: BindingId) -> Result<'db, LocalId> {
match self.result.binding_locals.get(b) {
Some(it) => Ok(*it),
None => {
@ -2025,9 +2019,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn drop_until_scope(
&mut self,
scope_index: usize,
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId<'db> {
) -> BasicBlockId {
for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() {
self.emit_drop_and_storage_dead_for_scope(scope, &mut current, span);
}
@ -2047,9 +2041,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
/// Don't call directly
fn pop_drop_scope_internal(
&mut self,
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId<'db> {
) -> BasicBlockId {
let scope = self.drop_scopes.pop().unwrap();
self.emit_drop_and_storage_dead_for_scope(&scope, &mut current, span);
current
@ -2057,9 +2051,9 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn pop_drop_scope_assert_finished(
&mut self,
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
span: MirSpan,
) -> Result<'db, BasicBlockId<'db>> {
) -> Result<'db, BasicBlockId> {
current = self.pop_drop_scope_internal(current, span);
if !self.drop_scopes.is_empty() {
implementation_error!("Mismatched count between drop scope push and pops");
@ -2069,12 +2063,13 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> {
fn emit_drop_and_storage_dead_for_scope(
&mut self,
scope: &DropScope<'db>,
current: &mut Idx<BasicBlock<'db>>,
scope: &DropScope,
current: &mut Idx<BasicBlock>,
span: MirSpan,
) {
for &l in scope.locals.iter().rev() {
if !self.infcx.type_is_copy_modulo_regions(self.env, self.result.locals[l].ty) {
if !self.infcx.type_is_copy_modulo_regions(self.env, self.result.locals[l].ty.as_ref())
{
let prev = std::mem::replace(current, self.new_basic_block());
self.set_terminator(
prev,
@ -2112,36 +2107,37 @@ fn cast_kind<'db>(
pub fn mir_body_for_closure_query<'db>(
db: &'db dyn HirDatabase,
closure: InternedClosureId,
) -> Result<'db, Arc<MirBody<'db>>> {
) -> Result<'db, Arc<MirBody>> {
let InternedClosure(owner, expr) = db.lookup_intern_closure(closure);
let body = db.body(owner);
let infer = InferenceResult::for_body(db, owner);
let Expr::Closure { args, body: root, .. } = &body[expr] else {
implementation_error!("closure expression is not closure");
};
let crate::next_solver::TyKind::Closure(_, substs) = infer[expr].kind() else {
let crate::next_solver::TyKind::Closure(_, substs) = infer.expr_ty(expr).kind() else {
implementation_error!("closure expression is not closure");
};
let (captures, kind) = infer.closure_info(closure);
let mut ctx = MirLowerCtx::new(db, owner, &body, infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer[*root] });
ctx.result.locals.alloc(Local { ty: infer.expr_ty(*root).store() });
let closure_local = ctx.result.locals.alloc(Local {
ty: match kind {
FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer[expr],
FnTrait::FnOnce | FnTrait::AsyncFnOnce => infer.expr_ty(expr),
FnTrait::FnMut | FnTrait::AsyncFnMut => Ty::new_ref(
ctx.interner(),
Region::error(ctx.interner()),
infer[expr],
infer.expr_ty(expr),
Mutability::Mut,
),
FnTrait::Fn | FnTrait::AsyncFn => Ty::new_ref(
ctx.interner(),
Region::error(ctx.interner()),
infer[expr],
infer.expr_ty(expr),
Mutability::Not,
),
},
}
.store(),
});
ctx.result.param_locals.push(closure_local);
let Some(sig) =
@ -2160,8 +2156,7 @@ pub fn mir_body_for_closure_query<'db>(
let current = ctx.pop_drop_scope_assert_finished(current, root.into())?;
ctx.set_terminator(current, TerminatorKind::Return, (*root).into());
}
let mut upvar_map: FxHashMap<LocalId<'db>, Vec<(&CapturedItem<'_>, usize)>> =
FxHashMap::default();
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedItem, usize)>> = FxHashMap::default();
for (i, capture) in captures.iter().enumerate() {
let local = ctx.binding_local(capture.place.local)?;
upvar_map.entry(local).or_default().push((capture, i));
@ -2226,7 +2221,7 @@ pub fn mir_body_for_closure_query<'db>(
pub fn mir_body_query<'db>(
db: &'db dyn HirDatabase,
def: DefWithBodyId,
) -> Result<'db, Arc<MirBody<'db>>> {
) -> Result<'db, Arc<MirBody>> {
let krate = def.krate(db);
let edition = krate.data(db).edition;
let detail = match def {
@ -2263,7 +2258,7 @@ pub(crate) fn mir_body_cycle_result<'db>(
_db: &'db dyn HirDatabase,
_: salsa::Id,
_def: DefWithBodyId,
) -> Result<'db, Arc<MirBody<'db>>> {
) -> Result<'db, Arc<MirBody>> {
Err(MirLowerError::Loop)
}
@ -2271,17 +2266,17 @@ pub fn lower_to_mir<'db>(
db: &'db dyn HirDatabase,
owner: DefWithBodyId,
body: &Body,
infer: &InferenceResult<'db>,
infer: &InferenceResult,
// FIXME: root_expr should always be the body.body_expr, but since `X` in `[(); X]` doesn't have its own specific body yet, we
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<'db, MirBody<'db>> {
) -> Result<'db, MirBody> {
if infer.type_mismatches().next().is_some() || infer.is_erroneous() {
return Err(MirLowerError::HasErrors);
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) });
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr).store() });
let binding_picker = |b: BindingId| {
let owner = ctx.body.binding_owner(b);
if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) }

View file

@ -20,8 +20,8 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn lower_expr_to_some_place_without_adjust(
&mut self,
expr_id: ExprId,
prev_block: BasicBlockId<'db>,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
prev_block: BasicBlockId,
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let ty = self.expr_ty_without_adjust(expr_id);
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) =
@ -35,12 +35,12 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn lower_expr_to_some_place_with_adjust(
&mut self,
expr_id: ExprId,
prev_block: BasicBlockId<'db>,
adjustments: &[Adjustment<'db>],
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
prev_block: BasicBlockId,
adjustments: &[Adjustment],
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let ty = adjustments
.last()
.map(|it| it.target)
.map(|it| it.target.as_ref())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) =
@ -53,11 +53,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn lower_expr_as_place_with_adjust(
&mut self,
current: BasicBlockId<'db>,
current: BasicBlockId,
expr_id: ExprId,
upgrade_rvalue: bool,
adjustments: &[Adjustment<'db>],
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
adjustments: &[Adjustment],
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| {
if !upgrade_rvalue {
return Err(MirLowerError::MutatingRvalue);
@ -93,9 +93,9 @@ impl<'db> MirLowerCtx<'_, 'db> {
current,
r,
rest.last()
.map(|it| it.target)
.map(|it| it.target.as_ref())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
last.target,
last.target.as_ref(),
expr_id.into(),
match od.0 {
Some(Mutability::Mut) => true,
@ -115,10 +115,10 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn lower_expr_as_place(
&mut self,
current: BasicBlockId<'db>,
current: BasicBlockId,
expr_id: ExprId,
upgrade_rvalue: bool,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
) -> Result<'db, Option<(Place, BasicBlockId)>> {
match self.infer.expr_adjustments.get(&expr_id) {
Some(a) => self.lower_expr_as_place_with_adjust(current, expr_id, upgrade_rvalue, a),
None => self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue),
@ -127,10 +127,10 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn lower_expr_as_place_without_adjust(
&mut self,
current: BasicBlockId<'db>,
current: BasicBlockId,
expr_id: ExprId,
upgrade_rvalue: bool,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let try_rvalue = |this: &mut MirLowerCtx<'_, 'db>| {
if !upgrade_rvalue {
return Err(MirLowerError::MutatingRvalue);
@ -159,7 +159,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
ty,
Mutability::Not,
);
let temp: Place<'db> = self.temp(ref_ty, current, expr_id.into())?.into();
let temp: Place = self.temp(ref_ty, current, expr_id.into())?.into();
self.push_assignment(
current,
temp,
@ -279,21 +279,21 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn lower_overloaded_index(
&mut self,
current: BasicBlockId<'db>,
place: Place<'db>,
current: BasicBlockId,
place: Place,
base_ty: Ty<'db>,
result_ty: Ty<'db>,
index_operand: Operand<'db>,
index_operand: Operand,
span: MirSpan,
index_fn: (FunctionId, GenericArgs<'db>),
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let mutability = match base_ty.as_reference() {
Some((_, _, mutability)) => mutability,
None => Mutability::Not,
};
let result_ref =
Ty::new_ref(self.interner(), Region::error(self.interner()), result_ty, mutability);
let mut result: Place<'db> = self.temp(result_ref, current, span)?.into();
let mut result: Place = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(Ty::new_fn_def(
self.interner(),
CallableDefId::FunctionId(index_fn.0).into(),
@ -316,13 +316,13 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn lower_overloaded_deref(
&mut self,
current: BasicBlockId<'db>,
place: Place<'db>,
current: BasicBlockId,
place: Place,
source_ty: Ty<'db>,
target_ty: Ty<'db>,
span: MirSpan,
mutability: bool,
) -> Result<'db, Option<(Place<'db>, BasicBlockId<'db>)>> {
) -> Result<'db, Option<(Place, BasicBlockId)>> {
let lang_items = self.lang_items();
let (mutability, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
(
@ -342,7 +342,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
let error_region = Region::error(self.interner());
let ty_ref = Ty::new_ref(self.interner(), error_region, source_ty, mutability);
let target_ty_ref = Ty::new_ref(self.interner(), error_region, target_ty, mutability);
let ref_place: Place<'db> = self.temp(ty_ref, current, span)?.into();
let ref_place: Place = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span);
let deref_trait = trait_lang_item.ok_or(MirLowerError::LangItemNotFound)?;
let deref_fn = deref_trait
@ -352,9 +352,9 @@ impl<'db> MirLowerCtx<'_, 'db> {
let deref_fn_op = Operand::const_zst(Ty::new_fn_def(
self.interner(),
CallableDefId::FunctionId(deref_fn).into(),
GenericArgs::new_from_iter(self.interner(), [source_ty.into()]),
GenericArgs::new_from_slice(&[source_ty.into()]),
));
let mut result: Place<'db> = self.temp(target_ty_ref, current, span)?.into();
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
let Some(current) = self.lower_call(
deref_fn_op,
Box::new([Operand { kind: OperandKind::Copy(ref_place), span: None }]),

View file

@ -1,7 +1,7 @@
//! MIR lowering for patterns
use hir_def::{hir::ExprId, signatures::VariantFields};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
use rustc_type_ir::inherent::{IntoKind, Ty as _};
use crate::{
BindingMode,
@ -63,11 +63,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
/// so it should be an empty block.
pub(super) fn pattern_match(
&mut self,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
cond_place: Place<'db>,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
cond_place: Place,
pattern: PatId,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let (current, current_else) = self.pattern_match_inner(
current,
current_else,
@ -87,10 +87,10 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn pattern_match_assignment(
&mut self,
current: BasicBlockId<'db>,
value: Place<'db>,
current: BasicBlockId,
value: Place,
pattern: PatId,
) -> Result<'db, BasicBlockId<'db>> {
) -> Result<'db, BasicBlockId> {
let (current, _) =
self.pattern_match_inner(current, None, value, pattern, MatchingMode::Assign)?;
Ok(current)
@ -99,9 +99,9 @@ impl<'db> MirLowerCtx<'_, 'db> {
pub(super) fn match_self_param(
&mut self,
id: BindingId,
current: BasicBlockId<'db>,
local: LocalId<'db>,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
current: BasicBlockId,
local: LocalId,
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
self.pattern_match_binding(
id,
BindingMode::Move,
@ -114,12 +114,12 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_match_inner(
&mut self,
mut current: BasicBlockId<'db>,
mut current_else: Option<BasicBlockId<'db>>,
mut cond_place: Place<'db>,
mut current: BasicBlockId,
mut current_else: Option<BasicBlockId>,
mut cond_place: Place,
pattern: PatId,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let cnt = self.infer.pat_adjustments.get(&pattern).map(|x| x.len()).unwrap_or_default();
cond_place.projection = self.result.projection_store.intern(
cond_place
@ -135,7 +135,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
let subst = match self.infer[pattern].kind() {
let subst = match self.infer.pat_ty(pattern).kind() {
TyKind::Tuple(s) => s,
_ => {
return Err(MirLowerError::TypeError(
@ -209,10 +209,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
}
Pat::Range { start, end, range_type: _ } => {
let mut add_check = |l: &ExprId, binop| -> Result<'db, ()> {
let lv = self.lower_literal_or_const_to_operand(self.infer[pattern], l)?;
let lv =
self.lower_literal_or_const_to_operand(self.infer.pat_ty(pattern), l)?;
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
let next = self.new_basic_block();
let discr: Place<'db> =
let discr: Place =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
@ -249,9 +250,9 @@ impl<'db> MirLowerCtx<'_, 'db> {
Pat::Slice { prefix, slice, suffix } => {
if mode == MatchingMode::Check {
// emit runtime length check for slice
if let TyKind::Slice(_) = self.infer[pattern].kind() {
if let TyKind::Slice(_) = self.infer.pat_ty(pattern).kind() {
let pattern_len = prefix.len() + suffix.len();
let place_len: Place<'db> = self
let place_len: Place = self
.temp(Ty::new_usize(self.interner()), current, pattern.into())?
.into();
self.push_assignment(
@ -285,7 +286,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
MemoryMap::default(),
Ty::new_usize(self.interner()),
);
let discr: Place<'db> = self
let discr: Place = self
.temp(Ty::new_bool(self.interner()), current, pattern.into())?
.into();
self.push_assignment(
@ -398,15 +399,15 @@ impl<'db> MirLowerCtx<'_, 'db> {
break 'b (c, x.1);
}
if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr {
break 'b (c, GenericArgs::new_from_iter(self.interner(), []));
break 'b (c, GenericArgs::empty(self.interner()));
}
not_supported!("path in pattern position that is not const or variant")
};
let tmp: Place<'db> =
self.temp(self.infer[pattern], current, pattern.into())?.into();
let tmp: Place =
self.temp(self.infer.pat_ty(pattern), current, pattern.into())?.into();
let span = pattern.into();
self.lower_const(c.into(), current, tmp, subst, span)?;
let tmp2: Place<'db> =
let tmp2: Place =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
@ -434,7 +435,7 @@ impl<'db> MirLowerCtx<'_, 'db> {
Pat::Lit(l) => match &self.body[*l] {
Expr::Literal(l) => {
if mode == MatchingMode::Check {
let c = self.lower_literal_to_operand(self.infer[pattern], l)?;
let c = self.lower_literal_to_operand(self.infer.pat_ty(pattern), l)?;
self.pattern_match_const(current_else, current, c, cond_place, pattern)?
} else {
(current, current_else)
@ -506,11 +507,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
&mut self,
id: BindingId,
mode: BindingMode,
cond_place: Place<'db>,
cond_place: Place,
span: MirSpan,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
current: BasicBlockId,
current_else: Option<BasicBlockId>,
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let target_place = self.binding_local(id)?;
self.push_storage_live(id, current)?;
self.push_match_assignment(current, target_place, mode, cond_place, span);
@ -519,10 +520,10 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn push_match_assignment(
&mut self,
current: BasicBlockId<'db>,
target_place: LocalId<'db>,
current: BasicBlockId,
target_place: LocalId,
mode: BindingMode,
cond_place: Place<'db>,
cond_place: Place,
span: MirSpan,
) {
self.push_assignment(
@ -545,15 +546,15 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_match_const(
&mut self,
current_else: Option<BasicBlockId<'db>>,
current: BasicBlockId<'db>,
c: Operand<'db>,
cond_place: Place<'db>,
current_else: Option<BasicBlockId>,
current: BasicBlockId,
c: Operand,
cond_place: Place,
pattern: Idx<Pat>,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let then_target = self.new_basic_block();
let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
let discr: Place<'db> =
let discr: Place =
self.temp(Ty::new_bool(self.interner()), current, pattern.into())?.into();
self.push_assignment(
current,
@ -579,14 +580,14 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_matching_variant(
&mut self,
cond_place: Place<'db>,
cond_place: Place,
variant: VariantId,
mut current: BasicBlockId<'db>,
mut current: BasicBlockId,
span: MirSpan,
mut current_else: Option<BasicBlockId<'db>>,
mut current_else: Option<BasicBlockId>,
shape: AdtPatternShape<'_>,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
Ok(match variant {
VariantId::EnumVariantId(v) => {
if mode == MatchingMode::Check {
@ -635,11 +636,11 @@ impl<'db> MirLowerCtx<'_, 'db> {
shape: AdtPatternShape<'_>,
variant_data: &VariantFields,
v: VariantId,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
cond_place: &Place<'db>,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
cond_place: &Place,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
Ok(match shape {
AdtPatternShape::Record { args } => {
let it = args
@ -678,12 +679,12 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_match_adt(
&mut self,
mut current: BasicBlockId<'db>,
mut current_else: Option<BasicBlockId<'db>>,
args: impl Iterator<Item = (PlaceElem<'db>, PatId)>,
cond_place: &Place<'db>,
mut current: BasicBlockId,
mut current_else: Option<BasicBlockId>,
args: impl Iterator<Item = (PlaceElem, PatId)>,
cond_place: &Place,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
for (proj, arg) in args {
let cond_place = cond_place.project(proj, &mut self.result.projection_store);
(current, current_else) =
@ -694,14 +695,14 @@ impl<'db> MirLowerCtx<'_, 'db> {
fn pattern_match_tuple_like(
&mut self,
current: BasicBlockId<'db>,
current_else: Option<BasicBlockId<'db>>,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
args: &[PatId],
ellipsis: Option<u32>,
fields: impl DoubleEndedIterator<Item = PlaceElem<'db>> + Clone,
cond_place: &Place<'db>,
fields: impl DoubleEndedIterator<Item = PlaceElem> + Clone,
cond_place: &Place,
mode: MatchingMode,
) -> Result<'db, (BasicBlockId<'db>, Option<BasicBlockId<'db>>)> {
) -> Result<'db, (BasicBlockId, Option<BasicBlockId>)> {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let it = al
.iter()

View file

@ -8,7 +8,7 @@
//! So the monomorphization should be called even if the substitution is empty.
use hir_def::DefWithBodyId;
use rustc_type_ir::inherent::{IntoKind, SliceLike};
use rustc_type_ir::inherent::IntoKind;
use rustc_type_ir::{
FallibleTypeFolder, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeVisitableExt,
};
@ -16,7 +16,8 @@ use triomphe::Arc;
use crate::{
ParamEnvAndCrate,
next_solver::{Const, ConstKind, Region, RegionKind},
next_solver::{Const, ConstKind, Region, RegionKind, StoredConst, StoredGenericArgs, StoredTy},
traits::StoredParamEnvAndCrate,
};
use crate::{
db::{HirDatabase, InternedClosureId},
@ -37,7 +38,7 @@ struct Filler<'db> {
}
impl<'db> FallibleTypeFolder<DbInterner<'db>> for Filler<'db> {
type Error = MirLowerError<'db>;
type Error = MirLowerError;
fn cx(&self) -> DbInterner<'db> {
self.infcx.interner
@ -69,7 +70,7 @@ impl<'db> FallibleTypeFolder<DbInterner<'db>> for Filler<'db> {
.get(param.index as usize)
.and_then(|arg| arg.ty())
.ok_or_else(|| {
MirLowerError::GenericArgNotProvided(param.id.into(), self.subst)
MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store())
})?),
_ => ty.try_super_fold_with(self),
}
@ -79,22 +80,18 @@ impl<'db> FallibleTypeFolder<DbInterner<'db>> for Filler<'db> {
let ConstKind::Param(param) = ct.kind() else {
return ct.try_super_fold_with(self);
};
self.subst
.as_slice()
.get(param.index as usize)
.and_then(|arg| arg.konst())
.ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst))
self.subst.as_slice().get(param.index as usize).and_then(|arg| arg.konst()).ok_or_else(
|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()),
)
}
fn try_fold_region(&mut self, region: Region<'db>) -> Result<Region<'db>, Self::Error> {
let RegionKind::ReEarlyParam(param) = region.kind() else {
return Ok(region);
};
self.subst
.as_slice()
.get(param.index as usize)
.and_then(|arg| arg.region())
.ok_or_else(|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst))
self.subst.as_slice().get(param.index as usize).and_then(|arg| arg.region()).ok_or_else(
|| MirLowerError::GenericArgNotProvided(param.id.into(), self.subst.store()),
)
}
}
@ -105,33 +102,50 @@ impl<'db> Filler<'db> {
Self { infcx, trait_env: env, subst }
}
fn fill<T: TypeFoldable<DbInterner<'db>> + Copy>(
&mut self,
t: &mut T,
) -> Result<(), MirLowerError<'db>> {
fn fill_ty(&mut self, t: &mut StoredTy) -> Result<(), MirLowerError> {
// Can't deep normalized as that'll try to normalize consts and fail.
*t = t.try_fold_with(self)?;
if references_non_lt_error(t) {
*t = t.as_ref().try_fold_with(self)?.store();
if references_non_lt_error(&t.as_ref()) {
Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned()))
} else {
Ok(())
}
}
fn fill_operand(&mut self, op: &mut Operand<'db>) -> Result<(), MirLowerError<'db>> {
fn fill_const(&mut self, t: &mut StoredConst) -> Result<(), MirLowerError> {
// Can't deep normalized as that'll try to normalize consts and fail.
*t = t.as_ref().try_fold_with(self)?.store();
if references_non_lt_error(&t.as_ref()) {
Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned()))
} else {
Ok(())
}
}
fn fill_args(&mut self, t: &mut StoredGenericArgs) -> Result<(), MirLowerError> {
// Can't deep normalized as that'll try to normalize consts and fail.
*t = t.as_ref().try_fold_with(self)?.store();
if references_non_lt_error(&t.as_ref()) {
Err(MirLowerError::NotSupported("monomorphization resulted in errors".to_owned()))
} else {
Ok(())
}
}
fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> {
match &mut op.kind {
OperandKind::Constant { konst, ty } => {
self.fill(konst)?;
self.fill(ty)?;
self.fill_const(konst)?;
self.fill_ty(ty)?;
}
OperandKind::Copy(_) | OperandKind::Move(_) | OperandKind::Static(_) => (),
}
Ok(())
}
fn fill_body(&mut self, body: &mut MirBody<'db>) -> Result<(), MirLowerError<'db>> {
fn fill_body(&mut self, body: &mut MirBody) -> Result<(), MirLowerError> {
for (_, l) in body.locals.iter_mut() {
self.fill(&mut l.ty)?;
self.fill_ty(&mut l.ty)?;
}
for (_, bb) in body.basic_blocks.iter_mut() {
for statement in &mut bb.statements {
@ -144,20 +158,20 @@ impl<'db> Filler<'db> {
match ak {
super::AggregateKind::Array(ty)
| super::AggregateKind::Tuple(ty)
| super::AggregateKind::Closure(ty) => self.fill(ty)?,
super::AggregateKind::Adt(_, subst) => self.fill(subst)?,
| super::AggregateKind::Closure(ty) => self.fill_ty(ty)?,
super::AggregateKind::Adt(_, subst) => self.fill_args(subst)?,
super::AggregateKind::Union(_, _) => (),
}
}
Rvalue::ShallowInitBox(_, ty) | Rvalue::ShallowInitBoxWithAlloc(ty) => {
self.fill(ty)?;
self.fill_ty(ty)?;
}
Rvalue::Use(op) => {
self.fill_operand(op)?;
}
Rvalue::Repeat(op, len) => {
self.fill_operand(op)?;
self.fill(len)?;
self.fill_const(len)?;
}
Rvalue::Ref(_, _)
| Rvalue::Len(_)
@ -208,36 +222,36 @@ impl<'db> Filler<'db> {
}
}
pub fn monomorphized_mir_body_query<'db>(
db: &'db dyn HirDatabase,
pub fn monomorphized_mir_body_query(
db: &dyn HirDatabase,
owner: DefWithBodyId,
subst: GenericArgs<'db>,
trait_env: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
let mut filler = Filler::new(db, trait_env, subst);
subst: StoredGenericArgs,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError> {
let mut filler = Filler::new(db, trait_env.as_ref(), subst.as_ref());
let body = db.mir_body(owner)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;
Ok(Arc::new(body))
}
pub(crate) fn monomorphized_mir_body_cycle_result<'db>(
_db: &'db dyn HirDatabase,
pub(crate) fn monomorphized_mir_body_cycle_result(
_db: &dyn HirDatabase,
_: salsa::Id,
_: DefWithBodyId,
_: GenericArgs<'db>,
_: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
_: StoredGenericArgs,
_: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError> {
Err(MirLowerError::Loop)
}
pub fn monomorphized_mir_body_for_closure_query<'db>(
db: &'db dyn HirDatabase,
pub fn monomorphized_mir_body_for_closure_query(
db: &dyn HirDatabase,
closure: InternedClosureId,
subst: GenericArgs<'db>,
trait_env: ParamEnvAndCrate<'db>,
) -> Result<Arc<MirBody<'db>>, MirLowerError<'db>> {
let mut filler = Filler::new(db, trait_env, subst);
subst: StoredGenericArgs,
trait_env: StoredParamEnvAndCrate,
) -> Result<Arc<MirBody>, MirLowerError> {
let mut filler = Filler::new(db, trait_env.as_ref(), subst.as_ref());
let body = db.mir_body_for_closure(closure)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;

View file

@ -36,8 +36,8 @@ macro_rules! wln {
};
}
impl<'db> MirBody<'db> {
pub fn pretty_print(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> String {
impl MirBody {
pub fn pretty_print(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> String {
let hir_body = db.body(self.owner);
let mut ctx = MirPrettyCtx::new(self, &hir_body, db, display_target);
ctx.for_body(|this| match ctx.body.owner {
@ -80,7 +80,7 @@ impl<'db> MirBody<'db> {
// String with lines is rendered poorly in `dbg` macros, which I use very much, so this
// function exists to solve that.
pub fn dbg(&self, db: &'db dyn HirDatabase, display_target: DisplayTarget) -> impl Debug {
pub fn dbg(&self, db: &dyn HirDatabase, display_target: DisplayTarget) -> impl Debug {
struct StringDbg(String);
impl Debug for StringDbg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@ -92,12 +92,12 @@ impl<'db> MirBody<'db> {
}
struct MirPrettyCtx<'a, 'db> {
body: &'a MirBody<'db>,
body: &'a MirBody,
hir_body: &'a Body,
db: &'db dyn HirDatabase,
result: String,
indent: String,
local_to_binding: ArenaMap<LocalId<'db>, BindingId>,
local_to_binding: ArenaMap<LocalId, BindingId>,
display_target: DisplayTarget,
}
@ -113,12 +113,12 @@ impl Write for MirPrettyCtx<'_, '_> {
}
}
enum LocalName<'db> {
Unknown(LocalId<'db>),
Binding(Name, LocalId<'db>),
enum LocalName {
Unknown(LocalId),
Binding(Name, LocalId),
}
impl<'db> HirDisplay<'db> for LocalName<'db> {
impl<'db> HirDisplay<'db> for LocalName {
fn hir_fmt(
&self,
f: &mut crate::display::HirFormatter<'_, 'db>,
@ -179,7 +179,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
}
fn new(
body: &'a MirBody<'db>,
body: &'a MirBody,
hir_body: &'a Body,
db: &'db dyn HirDatabase,
display_target: DisplayTarget,
@ -211,19 +211,19 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
self,
"let {}: {};",
self.local_name(id).display_test(self.db, self.display_target),
self.hir_display(&local.ty)
self.hir_display(&local.ty.as_ref())
);
}
}
fn local_name(&self, local: LocalId<'db>) -> LocalName<'db> {
fn local_name(&self, local: LocalId) -> LocalName {
match self.local_to_binding.get(local) {
Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local),
None => LocalName::Unknown(local),
}
}
fn basic_block_id(&self, basic_block_id: BasicBlockId<'db>) -> String {
fn basic_block_id(&self, basic_block_id: BasicBlockId) -> String {
format!("'bb{}", u32::from(basic_block_id.into_raw()))
}
@ -311,12 +311,8 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
}
}
fn place(&mut self, p: &Place<'db>) {
fn f<'db>(
this: &mut MirPrettyCtx<'_, 'db>,
local: LocalId<'db>,
projections: &[PlaceElem<'db>],
) {
fn place(&mut self, p: &Place) {
fn f<'db>(this: &mut MirPrettyCtx<'_, 'db>, local: LocalId, projections: &[PlaceElem]) {
let Some((last, head)) = projections.split_last() else {
// no projection
w!(this, "{}", this.local_name(local).display_test(this.db, this.display_target));
@ -376,19 +372,21 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
f(self, p.local, p.projection.lookup(&self.body.projection_store));
}
fn operand(&mut self, r: &Operand<'db>) {
fn operand(&mut self, r: &Operand) {
match &r.kind {
OperandKind::Copy(p) | OperandKind::Move(p) => {
// MIR at the time of writing doesn't have difference between move and copy, so we show them
// equally. Feel free to change it.
self.place(p);
}
OperandKind::Constant { konst, .. } => w!(self, "Const({})", self.hir_display(konst)),
OperandKind::Constant { konst, .. } => {
w!(self, "Const({})", self.hir_display(&konst.as_ref()))
}
OperandKind::Static(s) => w!(self, "Static({:?})", s),
}
}
fn rvalue(&mut self, r: &Rvalue<'db>) {
fn rvalue(&mut self, r: &Rvalue) {
match r {
Rvalue::Use(op) => self.operand(op),
Rvalue::Ref(r, p) => {
@ -415,7 +413,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
Rvalue::Repeat(op, len) => {
w!(self, "[");
self.operand(op);
w!(self, "; {}]", len.display_test(self.db, self.display_target));
w!(self, "; {}]", len.as_ref().display_test(self.db, self.display_target));
}
Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
w!(self, "Adt(");
@ -440,7 +438,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
Rvalue::Cast(ck, op, ty) => {
w!(self, "Cast({ck:?}, ");
self.operand(op);
w!(self, ", {})", self.hir_display(ty));
w!(self, ", {})", self.hir_display(&ty.as_ref()));
}
Rvalue::CheckedBinaryOp(b, o1, o2) => {
self.operand(o1);
@ -478,7 +476,7 @@ impl<'a, 'db> MirPrettyCtx<'a, 'db> {
}
}
fn operand_list(&mut self, it: &[Operand<'db>]) {
fn operand_list(&mut self, it: &[Operand]) {
let mut it = it.iter();
if let Some(first) = it.next() {
self.operand(first);

View file

@ -1,6 +1,11 @@
//! Things relevant to the next trait solver.
// Note: in interned types defined in this module, we generally treat the lifetime as advisory
// and transmute it as needed. This is because no real memory unsafety can be caused from an
// incorrect lifetime here.
pub mod abi;
mod binder;
mod consts;
mod def_id;
pub mod fold;
@ -21,6 +26,7 @@ mod structural_normalize;
mod ty;
pub mod util;
pub use binder::*;
pub use consts::*;
pub use def_id::*;
pub use generic_arg::*;

View file

@ -0,0 +1,83 @@
use crate::{
FnAbi,
next_solver::{
Binder, Clauses, EarlyBinder, FnSig, PolyFnSig, StoredBoundVarKinds, StoredClauses,
StoredTy, StoredTys, Ty, abi::Safety,
},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StoredEarlyBinder<T>(T);
impl<T> StoredEarlyBinder<T> {
#[inline]
pub fn bind(value: T) -> Self {
Self(value)
}
#[inline]
pub fn skip_binder(self) -> T {
self.0
}
#[inline]
pub fn as_ref(&self) -> StoredEarlyBinder<&T> {
StoredEarlyBinder(&self.0)
}
#[inline]
pub fn get_with<'db, 'a, R>(&'a self, f: impl FnOnce(&'a T) -> R) -> EarlyBinder<'db, R> {
EarlyBinder::bind(f(&self.0))
}
}
impl StoredEarlyBinder<StoredTy> {
#[inline]
pub fn get<'db>(&self) -> EarlyBinder<'db, Ty<'db>> {
self.get_with(|it| it.as_ref())
}
}
impl StoredEarlyBinder<StoredClauses> {
#[inline]
pub fn get<'db>(&self) -> EarlyBinder<'db, Clauses<'db>> {
self.get_with(|it| it.as_ref())
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StoredPolyFnSig {
bound_vars: StoredBoundVarKinds,
inputs_and_output: StoredTys,
c_variadic: bool,
safety: Safety,
abi: FnAbi,
}
impl StoredPolyFnSig {
#[inline]
pub fn new(sig: PolyFnSig<'_>) -> Self {
let bound_vars = sig.bound_vars().store();
let sig = sig.skip_binder();
Self {
bound_vars,
inputs_and_output: sig.inputs_and_output.store(),
c_variadic: sig.c_variadic,
safety: sig.safety,
abi: sig.abi,
}
}
#[inline]
pub fn get(&self) -> PolyFnSig<'_> {
Binder::bind_with_vars(
FnSig {
inputs_and_output: self.inputs_and_output.as_ref(),
c_variadic: self.c_variadic,
safety: self.safety,
abi: self.abi,
},
self.bound_vars.as_ref(),
)
}
}

View file

@ -3,19 +3,20 @@
use std::hash::Hash;
use hir_def::ConstParamId;
use macros::{TypeFoldable, TypeVisitable};
use intern::{Interned, InternedRef, impl_internable};
use macros::{GenericTypeVisitable, TypeFoldable, TypeVisitable};
use rustc_ast_ir::visit::VisitorResult;
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags, InferConst,
TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
WithCachedTypeInfo,
BoundVar, BoundVarIndexKind, ConstVid, DebruijnIndex, FlagComputation, Flags,
GenericTypeVisitable, InferConst, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable,
TypeVisitable, TypeVisitableExt, WithCachedTypeInfo,
inherent::{IntoKind, ParamEnv as _, PlaceholderLike, SliceLike},
relate::Relate,
};
use crate::{
MemoryMap,
next_solver::{ClauseKind, ParamEnv, interner::InternedWrapperNoDebug},
next_solver::{ClauseKind, ParamEnv, impl_stored_interned},
};
use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder, Ty};
@ -23,30 +24,43 @@ use super::{BoundVarKind, DbInterner, ErrorGuaranteed, GenericArgs, Placeholder,
pub type ConstKind<'db> = rustc_type_ir::ConstKind<DbInterner<'db>>;
pub type UnevaluatedConst<'db> = rustc_type_ir::UnevaluatedConst<DbInterner<'db>>;
#[salsa::interned(constructor = new_, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Const<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<WithCachedTypeInfo<ConstKind<'db>>>,
pub(super) interned: InternedRef<'db, ConstInterned>,
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
#[repr(align(4))] // Required for `GenericArg` bit-tagging.
pub(super) struct ConstInterned(pub(super) WithCachedTypeInfo<ConstKind<'static>>);
impl_internable!(gc; ConstInterned);
impl_stored_interned!(ConstInterned, Const, StoredConst);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Const<'static>>();
};
impl<'db> Const<'db> {
pub fn new(interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self {
pub fn new(_interner: DbInterner<'db>, kind: ConstKind<'db>) -> Self {
let kind = unsafe { std::mem::transmute::<ConstKind<'db>, ConstKind<'static>>(kind) };
let flags = FlagComputation::for_const_kind(&kind);
let cached = WithCachedTypeInfo {
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
Const::new_(interner.db(), InternedWrapperNoDebug(cached))
Self { interned: Interned::new_gc(ConstInterned(cached)) }
}
pub fn inner(&self) -> &WithCachedTypeInfo<ConstKind<'db>> {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Const<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe {
std::mem::transmute::<
&WithCachedTypeInfo<ConstKind<'static>>,
&WithCachedTypeInfo<ConstKind<'db>>,
>(inner)
}
}
pub fn error(interner: DbInterner<'db>) -> Self {
@ -106,12 +120,6 @@ impl<'db> std::fmt::Debug for Const<'db> {
}
}
impl<'db> std::fmt::Debug for InternedWrapperNoDebug<WithCachedTypeInfo<ConstKind<'db>>> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.internee.fmt(f)
}
}
pub type PlaceholderConst = Placeholder<BoundConst>;
#[derive(Copy, Clone, Hash, Eq, PartialEq)]
@ -164,7 +172,9 @@ impl ParamConst {
/// A type-level constant value.
///
/// Represents a typed, fully evaluated constant.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable)]
#[derive(
Debug, Copy, Clone, Eq, PartialEq, Hash, TypeFoldable, TypeVisitable, GenericTypeVisitable,
)]
pub struct ValueConst<'db> {
pub ty: Ty<'db>,
// FIXME: Should we ignore this for TypeVisitable, TypeFoldable?
@ -190,7 +200,7 @@ impl<'db> rustc_type_ir::inherent::ValueConst<DbInterner<'db>> for ValueConst<'d
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, GenericTypeVisitable)]
pub struct ConstBytes<'db> {
pub memory: Box<[u8]>,
pub memory_map: MemoryMap<'db>,
@ -202,31 +212,52 @@ impl Hash for ConstBytes<'_> {
}
}
#[salsa::interned(constructor = new_, debug, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Valtree<'db> {
#[returns(ref)]
bytes_: ConstBytes<'db>,
interned: InternedRef<'db, ValtreeInterned>,
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Valtree<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.inner().generic_visit_with(visitor);
}
}
}
#[derive(Debug, PartialEq, Eq, Hash, GenericTypeVisitable)]
pub(super) struct ValtreeInterned(ConstBytes<'static>);
impl_internable!(gc; ValtreeInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Valtree<'static>>();
};
impl<'db> Valtree<'db> {
#[inline]
pub fn new(bytes: ConstBytes<'db>) -> Self {
crate::with_attached_db(|db| unsafe {
// SAFETY: ¯\_(ツ)_/¯
std::mem::transmute(Valtree::new_(db, bytes))
})
let bytes = unsafe { std::mem::transmute::<ConstBytes<'db>, ConstBytes<'static>>(bytes) };
Self { interned: Interned::new_gc(ValtreeInterned(bytes)) }
}
#[inline]
pub fn inner(&self) -> &ConstBytes<'db> {
crate::with_attached_db(|db| {
let inner = self.bytes_(db);
// SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe { std::mem::transmute::<&ConstBytes<'static>, &ConstBytes<'db>>(inner) }
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)]
impl std::fmt::Debug for Valtree<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.interned.fmt(f)
}
}
#[derive(
Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable, GenericTypeVisitable,
)]
pub struct ExprConst;
impl rustc_type_ir::inherent::ParamLike for ParamConst {
@ -243,6 +274,14 @@ impl<'db> IntoKind for Const<'db> {
}
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Const<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for Const<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,

View file

@ -249,7 +249,7 @@ impl<'db> FulfillmentCtxt<'db> {
| TypingMode::PostBorrowckAnalysis { defined_opaque_types: _ }
| TypingMode::PostAnalysis => return Default::default(),
};
let stalled_coroutines = stalled_coroutines.inner();
let stalled_coroutines = stalled_coroutines.as_slice();
if stalled_coroutines.is_empty() {
return Default::default();

View file

@ -9,7 +9,7 @@ use rustc_next_trait_solver::solve::{GoalEvaluation, SolverDelegateEvalExt};
use rustc_type_ir::{
AliasRelationDirection, AliasTermKind, HostEffectPredicate, Interner, PredicatePolarity,
error::ExpectedFound,
inherent::{IntoKind, SliceLike, Span as _},
inherent::{IntoKind, Span as _},
lang_items::SolverTraitLangItem,
solve::{Certainty, GoalSource, MaybeCause, NoSolution},
};

View file

@ -1,41 +1,226 @@
//! Things related to generic args in the next-trait-solver.
//! Things related to generic args in the next-trait-solver (`GenericArg`, `GenericArgs`, `Term`).
//!
//! Implementations of `GenericArg` and `Term` are pointer-tagged instead of an enum (rustc does
//! the same). This is done to save memory (which also helps speed) - one `GenericArg` is a machine
//! word instead of two, while matching on it is basically as cheap. The implementation for both
//! `GenericArg` and `Term` is shared in [`GenericArgImpl`]. This both simplifies the implementation,
//! as well as enables a noop conversion from `Term` to `GenericArg`.
use std::{hint::unreachable_unchecked, marker::PhantomData, ptr::NonNull};
use hir_def::{GenericDefId, GenericParamId};
use macros::{TypeFoldable, TypeVisitable};
use intern::InternedRef;
use rustc_type_ir::{
ClosureArgs, CollectAndApply, ConstVid, CoroutineArgs, CoroutineClosureArgs, FnSigTys,
GenericArgKind, Interner, TermKind, TyKind, TyVid, Variance,
ClosureArgs, ConstVid, CoroutineArgs, CoroutineClosureArgs, FallibleTypeFolder, FnSigTys,
GenericTypeVisitable, Interner, TyKind, TyVid, TypeFoldable, TypeFolder, TypeVisitable,
TypeVisitor, Variance,
inherent::{GenericArg as _, GenericsOf, IntoKind, SliceLike, Term as _, Ty as _},
relate::{Relate, VarianceDiagInfo},
walk::TypeWalker,
};
use smallvec::SmallVec;
use crate::next_solver::{PolyFnSig, interned_vec_db};
use crate::next_solver::{
ConstInterned, PolyFnSig, RegionInterned, TyInterned, impl_foldable_for_interned_slice,
impl_stored_interned_slice, interned_slice,
};
use super::{
Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, ParamConst, Region, SolverDefId, Ty, Tys,
generics::Generics,
};
#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, salsa::Supertype)]
pub enum GenericArg<'db> {
Ty(Ty<'db>),
Lifetime(Region<'db>),
Const(Const<'db>),
pub type GenericArgKind<'db> = rustc_type_ir::GenericArgKind<DbInterner<'db>>;
pub type TermKind<'db> = rustc_type_ir::TermKind<DbInterner<'db>>;
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
struct GenericArgImpl<'db> {
/// # Invariant
///
/// Contains an [`InternedRef`] of a [`Ty`], [`Const`] or [`Region`], bit-tagged as per the consts below.
ptr: NonNull<()>,
_marker: PhantomData<(Ty<'db>, Const<'db>, Region<'db>)>,
}
// SAFETY: We essentially own the `Ty`, `Const` or `Region`, and they are `Send + Sync`.
unsafe impl Send for GenericArgImpl<'_> {}
unsafe impl Sync for GenericArgImpl<'_> {}
impl<'db> GenericArgImpl<'db> {
const KIND_MASK: usize = 0b11;
const PTR_MASK: usize = !Self::KIND_MASK;
const TY_TAG: usize = 0b00;
const CONST_TAG: usize = 0b01;
const REGION_TAG: usize = 0b10;
#[inline]
fn new_ty(ty: Ty<'db>) -> Self {
Self {
// SAFETY: We create it from an `InternedRef`, and it's never null.
ptr: unsafe {
NonNull::new_unchecked(
ty.interned
.as_raw()
.cast::<()>()
.cast_mut()
.map_addr(|addr| addr | Self::TY_TAG),
)
},
_marker: PhantomData,
}
}
#[inline]
fn new_const(ty: Const<'db>) -> Self {
Self {
// SAFETY: We create it from an `InternedRef`, and it's never null.
ptr: unsafe {
NonNull::new_unchecked(
ty.interned
.as_raw()
.cast::<()>()
.cast_mut()
.map_addr(|addr| addr | Self::CONST_TAG),
)
},
_marker: PhantomData,
}
}
#[inline]
fn new_region(ty: Region<'db>) -> Self {
Self {
// SAFETY: We create it from an `InternedRef`, and it's never null.
ptr: unsafe {
NonNull::new_unchecked(
ty.interned
.as_raw()
.cast::<()>()
.cast_mut()
.map_addr(|addr| addr | Self::REGION_TAG),
)
},
_marker: PhantomData,
}
}
#[inline]
fn kind(self) -> GenericArgKind<'db> {
let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK);
// SAFETY: We can only be created from a `Ty`, a `Const` or a `Region`, and the tag will match.
unsafe {
match self.ptr.addr().get() & Self::KIND_MASK {
Self::TY_TAG => GenericArgKind::Type(Ty {
interned: InternedRef::from_raw(ptr.cast::<TyInterned>()),
}),
Self::CONST_TAG => GenericArgKind::Const(Const {
interned: InternedRef::from_raw(ptr.cast::<ConstInterned>()),
}),
Self::REGION_TAG => GenericArgKind::Lifetime(Region {
interned: InternedRef::from_raw(ptr.cast::<RegionInterned>()),
}),
_ => unreachable_unchecked(),
}
}
}
#[inline]
fn term_kind(self) -> TermKind<'db> {
let ptr = self.ptr.as_ptr().map_addr(|addr| addr & Self::PTR_MASK);
// SAFETY: We can only be created from a `Ty`, a `Const` or a `Region`, and the tag will match.
// It is the caller's responsibility (encapsulated within this module) to only call this with
// `Term`, which cannot be constructed from a `Region`.
unsafe {
match self.ptr.addr().get() & Self::KIND_MASK {
Self::TY_TAG => {
TermKind::Ty(Ty { interned: InternedRef::from_raw(ptr.cast::<TyInterned>()) })
}
Self::CONST_TAG => TermKind::Const(Const {
interned: InternedRef::from_raw(ptr.cast::<ConstInterned>()),
}),
_ => unreachable_unchecked(),
}
}
}
}
#[derive(PartialEq, Eq, Hash)]
pub struct StoredGenericArg {
ptr: GenericArgImpl<'static>,
}
impl Clone for StoredGenericArg {
#[inline]
fn clone(&self) -> Self {
match self.ptr.kind() {
GenericArgKind::Lifetime(it) => std::mem::forget(it.interned.to_owned()),
GenericArgKind::Type(it) => std::mem::forget(it.interned.to_owned()),
GenericArgKind::Const(it) => std::mem::forget(it.interned.to_owned()),
}
Self { ptr: self.ptr }
}
}
impl Drop for StoredGenericArg {
#[inline]
fn drop(&mut self) {
unsafe {
match self.ptr.kind() {
GenericArgKind::Lifetime(it) => it.interned.decrement_refcount(),
GenericArgKind::Type(it) => it.interned.decrement_refcount(),
GenericArgKind::Const(it) => it.interned.decrement_refcount(),
}
}
}
}
impl StoredGenericArg {
#[inline]
fn new(value: GenericArg<'_>) -> Self {
let result = Self { ptr: GenericArgImpl { ptr: value.ptr.ptr, _marker: PhantomData } };
// Increase refcount.
std::mem::forget(result.clone());
result
}
#[inline]
pub fn as_ref<'db>(&self) -> GenericArg<'db> {
GenericArg { ptr: self.ptr }
}
}
impl std::fmt::Debug for StoredGenericArg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_ref().fmt(f)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct GenericArg<'db> {
ptr: GenericArgImpl<'db>,
}
impl<'db> std::fmt::Debug for GenericArg<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ty(t) => std::fmt::Debug::fmt(t, f),
Self::Lifetime(r) => std::fmt::Debug::fmt(r, f),
Self::Const(c) => std::fmt::Debug::fmt(c, f),
match self.kind() {
GenericArgKind::Type(t) => std::fmt::Debug::fmt(&t, f),
GenericArgKind::Lifetime(r) => std::fmt::Debug::fmt(&r, f),
GenericArgKind::Const(c) => std::fmt::Debug::fmt(&c, f),
}
}
}
impl<'db> GenericArg<'db> {
#[inline]
pub fn store(self) -> StoredGenericArg {
StoredGenericArg::new(self)
}
#[inline]
pub fn kind(self) -> GenericArgKind<'db> {
self.ptr.kind()
}
pub fn ty(self) -> Option<Ty<'db>> {
match self.kind() {
GenericArgKind::Type(ty) => Some(ty),
@ -66,8 +251,8 @@ impl<'db> GenericArg<'db> {
#[inline]
pub(crate) fn expect_region(self) -> Region<'db> {
match self {
GenericArg::Lifetime(region) => region,
match self.kind() {
GenericArgKind::Lifetime(region) => region,
_ => panic!("expected a region, got {self:?}"),
}
}
@ -87,30 +272,32 @@ impl<'db> GenericArg<'db> {
}
impl<'db> From<Term<'db>> for GenericArg<'db> {
#[inline]
fn from(value: Term<'db>) -> Self {
match value {
Term::Ty(ty) => GenericArg::Ty(ty),
Term::Const(c) => GenericArg::Const(c),
}
GenericArg { ptr: value.ptr }
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub enum Term<'db> {
Ty(Ty<'db>),
Const(Const<'db>),
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct Term<'db> {
ptr: GenericArgImpl<'db>,
}
impl<'db> std::fmt::Debug for Term<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ty(t) => std::fmt::Debug::fmt(t, f),
Self::Const(c) => std::fmt::Debug::fmt(c, f),
match self.kind() {
TermKind::Ty(t) => std::fmt::Debug::fmt(&t, f),
TermKind::Const(c) => std::fmt::Debug::fmt(&c, f),
}
}
}
impl<'db> Term<'db> {
#[inline]
pub fn kind(self) -> TermKind<'db> {
self.ptr.term_kind()
}
pub fn expect_type(&self) -> Ty<'db> {
self.as_type().expect("expected a type, but found a const")
}
@ -124,31 +311,108 @@ impl<'db> Term<'db> {
}
impl<'db> From<Ty<'db>> for GenericArg<'db> {
#[inline]
fn from(value: Ty<'db>) -> Self {
Self::Ty(value)
GenericArg { ptr: GenericArgImpl::new_ty(value) }
}
}
impl<'db> From<Region<'db>> for GenericArg<'db> {
#[inline]
fn from(value: Region<'db>) -> Self {
Self::Lifetime(value)
GenericArg { ptr: GenericArgImpl::new_region(value) }
}
}
impl<'db> From<Const<'db>> for GenericArg<'db> {
#[inline]
fn from(value: Const<'db>) -> Self {
Self::Const(value)
GenericArg { ptr: GenericArgImpl::new_const(value) }
}
}
impl<'db> IntoKind for GenericArg<'db> {
type Kind = GenericArgKind<DbInterner<'db>>;
type Kind = GenericArgKind<'db>;
#[inline]
fn kind(self) -> Self::Kind {
match self {
GenericArg::Ty(ty) => GenericArgKind::Type(ty),
GenericArg::Lifetime(region) => GenericArgKind::Lifetime(region),
GenericArg::Const(c) => GenericArgKind::Const(c),
self.ptr.kind()
}
}
impl<'db, V> GenericTypeVisitable<V> for GenericArg<'db>
where
GenericArgKind<'db>: GenericTypeVisitable<V>,
{
fn generic_visit_with(&self, visitor: &mut V) {
self.kind().generic_visit_with(visitor);
}
}
impl<'db, V> GenericTypeVisitable<V> for Term<'db>
where
TermKind<'db>: GenericTypeVisitable<V>,
{
fn generic_visit_with(&self, visitor: &mut V) {
self.kind().generic_visit_with(visitor);
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for GenericArg<'db> {
fn visit_with<V: TypeVisitor<DbInterner<'db>>>(&self, visitor: &mut V) -> V::Result {
match self.kind() {
GenericArgKind::Lifetime(it) => it.visit_with(visitor),
GenericArgKind::Type(it) => it.visit_with(visitor),
GenericArgKind::Const(it) => it.visit_with(visitor),
}
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for Term<'db> {
fn visit_with<V: TypeVisitor<DbInterner<'db>>>(&self, visitor: &mut V) -> V::Result {
match self.kind() {
TermKind::Ty(it) => it.visit_with(visitor),
TermKind::Const(it) => it.visit_with(visitor),
}
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for GenericArg<'db> {
fn try_fold_with<F: FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(match self.kind() {
GenericArgKind::Lifetime(it) => it.try_fold_with(folder)?.into(),
GenericArgKind::Type(it) => it.try_fold_with(folder)?.into(),
GenericArgKind::Const(it) => it.try_fold_with(folder)?.into(),
})
}
fn fold_with<F: TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
match self.kind() {
GenericArgKind::Lifetime(it) => it.fold_with(folder).into(),
GenericArgKind::Type(it) => it.fold_with(folder).into(),
GenericArgKind::Const(it) => it.fold_with(folder).into(),
}
}
}
impl<'db> TypeFoldable<DbInterner<'db>> for Term<'db> {
fn try_fold_with<F: FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(match self.kind() {
TermKind::Ty(it) => it.try_fold_with(folder)?.into(),
TermKind::Const(it) => it.try_fold_with(folder)?.into(),
})
}
fn fold_with<F: TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
match self.kind() {
TermKind::Ty(it) => it.fold_with(folder).into(),
TermKind::Const(it) => it.fold_with(folder).into(),
}
}
}
@ -182,7 +446,9 @@ impl<'db> Relate<DbInterner<'db>> for GenericArg<'db> {
}
}
interned_vec_db!(GenericArgs, GenericArg);
interned_slice!(GenericArgsStorage, GenericArgs, GenericArg<'db>, GenericArg<'static>,);
impl_foldable_for_interned_slice!(GenericArgs);
impl_stored_interned_slice!(GenericArgsStorage, GenericArgs, StoredGenericArgs);
impl<'db> rustc_type_ir::inherent::GenericArg<DbInterner<'db>> for GenericArg<'db> {}
@ -306,7 +572,7 @@ impl<'db> GenericArgs<'db> {
/// A "sensible" `.split_closure_args()`, where the arguments are not in a tuple.
pub fn split_closure_args_untupled(self) -> rustc_type_ir::ClosureArgsParts<DbInterner<'db>> {
// FIXME: should use `ClosureSubst` when possible
match self.inner().as_slice() {
match self.as_slice() {
[parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => {
let interner = DbInterner::conjure();
rustc_type_ir::ClosureArgsParts {
@ -341,8 +607,8 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for GenericArgs<'db> {
a: Self,
b: Self,
) -> rustc_type_ir::relate::RelateResult<DbInterner<'db>, Self> {
let interner = relation.cx();
CollectAndApply::collect_and_apply(
GenericArgs::new_from_iter(
relation.cx(),
std::iter::zip(a.iter(), b.iter()).map(|(a, b)| {
relation.relate_with_variance(
Variance::Invariant,
@ -351,7 +617,6 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for GenericArgs<'db> {
b,
)
}),
|g| GenericArgs::new_from_iter(interner, g.iter().cloned()),
)
}
}
@ -397,29 +662,26 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
})
}
fn type_at(self, i: usize) -> <DbInterner<'db> as rustc_type_ir::Interner>::Ty {
self.inner()
.get(i)
self.get(i)
.and_then(|g| g.as_type())
.unwrap_or_else(|| Ty::new_error(DbInterner::conjure(), ErrorGuaranteed))
}
fn region_at(self, i: usize) -> <DbInterner<'db> as rustc_type_ir::Interner>::Region {
self.inner()
.get(i)
self.get(i)
.and_then(|g| g.as_region())
.unwrap_or_else(|| Region::error(DbInterner::conjure()))
}
fn const_at(self, i: usize) -> <DbInterner<'db> as rustc_type_ir::Interner>::Const {
self.inner()
.get(i)
self.get(i)
.and_then(|g| g.as_const())
.unwrap_or_else(|| Const::error(DbInterner::conjure()))
}
fn split_closure_args(self) -> rustc_type_ir::ClosureArgsParts<DbInterner<'db>> {
// FIXME: should use `ClosureSubst` when possible
match self.inner().as_slice() {
match self.as_slice() {
[parent_args @ .., closure_kind_ty, sig_ty, tupled_upvars_ty] => {
let interner = DbInterner::conjure();
// This is stupid, but the next solver expects the first input to actually be a tuple
@ -458,7 +720,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
fn split_coroutine_closure_args(
self,
) -> rustc_type_ir::CoroutineClosureArgsParts<DbInterner<'db>> {
match self.inner().as_slice() {
match self.as_slice() {
[
parent_args @ ..,
closure_kind_ty,
@ -481,7 +743,7 @@ impl<'db> rustc_type_ir::inherent::GenericArgs<DbInterner<'db>> for GenericArgs<
fn split_coroutine_args(self) -> rustc_type_ir::CoroutineArgsParts<DbInterner<'db>> {
let interner = DbInterner::conjure();
match self.inner().as_slice() {
match self.as_slice() {
[parent_args @ .., kind_ty, resume_ty, yield_ty, return_ty, tupled_upvars_ty] => {
rustc_type_ir::CoroutineArgsParts {
parent_args: GenericArgs::new_from_iter(interner, parent_args.iter().cloned()),
@ -518,25 +780,25 @@ pub fn error_for_param_kind<'db>(id: GenericParamId, interner: DbInterner<'db>)
}
impl<'db> IntoKind for Term<'db> {
type Kind = TermKind<DbInterner<'db>>;
type Kind = TermKind<'db>;
#[inline]
fn kind(self) -> Self::Kind {
match self {
Term::Ty(ty) => TermKind::Ty(ty),
Term::Const(c) => TermKind::Const(c),
}
self.ptr.term_kind()
}
}
impl<'db> From<Ty<'db>> for Term<'db> {
#[inline]
fn from(value: Ty<'db>) -> Self {
Self::Ty(value)
Term { ptr: GenericArgImpl::new_ty(value) }
}
}
impl<'db> From<Const<'db>> for Term<'db> {
#[inline]
fn from(value: Const<'db>) -> Self {
Self::Const(value)
Term { ptr: GenericArgImpl::new_const(value) }
}
}
@ -583,7 +845,7 @@ impl From<ConstVid> for TermVid {
impl<'db> DbInterner<'db> {
pub(super) fn mk_args(self, args: &[GenericArg<'db>]) -> GenericArgs<'db> {
GenericArgs::new_from_iter(self, args.iter().cloned())
GenericArgs::new_from_slice(args)
}
pub(super) fn mk_args_from_iter<I, T>(self, iter: I) -> T::Output

View file

@ -28,7 +28,7 @@
use rustc_type_ir::{
FnSig, GenericArgKind, TypeFoldable, TypingMode, Variance,
error::ExpectedFound,
inherent::{IntoKind, Span as _},
inherent::Span as _,
relate::{Relate, TypeRelation, solver_relating::RelateExt},
};

View file

@ -8,7 +8,7 @@
use rustc_hash::FxHashMap;
use rustc_index::Idx;
use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar};
use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _};
use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, Ty as _};
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags,
TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
@ -498,7 +498,7 @@ impl<'cx, 'db> Canonicalizer<'cx, 'db> {
{
let base = Canonical {
max_universe: UniverseIndex::ROOT,
variables: CanonicalVars::new_from_iter(tcx, []),
variables: CanonicalVars::empty(tcx),
value: (),
};
Canonicalizer::canonicalize_with_base(

View file

@ -23,7 +23,7 @@ use rustc_index::{Idx as _, IndexVec};
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder,
TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
inherent::{GenericArg as _, IntoKind, SliceLike},
inherent::{GenericArg as _, IntoKind},
};
use tracing::{debug, instrument};

View file

@ -3,7 +3,7 @@ use rustc_type_ir::{OutlivesPredicate, TypeVisitableExt};
use tracing::{debug, instrument};
use crate::next_solver::{
ArgOutlivesPredicate, GenericArg, Region, RegionOutlivesPredicate, Ty,
ArgOutlivesPredicate, GenericArgKind, Region, RegionOutlivesPredicate, Ty,
infer::{InferCtxt, TypeOutlivesConstraint, snapshot::undo_log::UndoLog},
};
@ -12,14 +12,14 @@ impl<'db> InferCtxt<'db> {
&self,
OutlivesPredicate(arg, r2): ArgOutlivesPredicate<'db>,
) {
match arg {
GenericArg::Lifetime(r1) => {
match arg.kind() {
GenericArgKind::Lifetime(r1) => {
self.register_region_outlives_constraint(OutlivesPredicate(r1, r2));
}
GenericArg::Ty(ty1) => {
GenericArgKind::Type(ty1) => {
self.register_type_outlives_constraint(ty1, r2);
}
GenericArg::Const(_) => unreachable!(),
GenericArgKind::Const(_) => unreachable!(),
}
}

View file

@ -1,7 +1,9 @@
//! Things related to the Interner in the next-trait-solver.
use std::fmt;
use std::{fmt, ops::ControlFlow};
use intern::{Interned, InternedRef, InternedSliceRef, impl_internable};
use macros::GenericTypeVisitable;
use rustc_ast_ir::{FloatTy, IntTy, UintTy};
pub use tls_cache::clear_tls_solver_cache;
pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db};
@ -20,8 +22,8 @@ use rustc_hash::FxHashSet;
use rustc_index::bit_set::DenseBitSet;
use rustc_type_ir::{
AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, CoroutineWitnessTypes, DebruijnIndex,
EarlyBinder, FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, Interner, TraitRef,
TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance,
EarlyBinder, FlagComputation, Flags, GenericArgKind, GenericTypeVisitable, ImplPolarity,
InferTy, Interner, TraitRef, TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance,
elaborate::elaborate,
error::TypeError,
fast_reject,
@ -57,155 +59,18 @@ use super::{
util::sizedness_constraint_for_ty,
};
#[derive(PartialEq, Eq, Hash, PartialOrd, Ord, Clone)]
pub struct InternedWrapperNoDebug<T>(pub(crate) T);
macro_rules! interned_slice {
($storage:ident, $name:ident, $ty_db:ty, $ty_static:ty $(,)?) => {
const _: () = {
#[allow(unused_lifetimes)]
fn _ensure_correct_types<'db: 'static>(v: $ty_db) -> $ty_static { v }
};
#[macro_export]
#[doc(hidden)]
macro_rules! _interned_vec_nolifetime_salsa {
($name:ident, $ty:ty) => {
interned_vec_nolifetime_salsa!($name, $ty, nofold);
::intern::impl_slice_internable!(gc; $storage, (), $ty_static);
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for $name<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.try_fold_with(folder)).collect::<Result<_, _>>()?;
Ok($name::new_(folder.cx().db(), inner))
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Self {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.fold_with(folder)).collect();
$name::new_(folder.cx().db(), inner)
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for $name<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
use rustc_ast_ir::visit::VisitorResult;
use rustc_type_ir::inherent::SliceLike as _;
rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter());
V::Result::output()
}
}
};
($name:ident, $ty:ty, nofold) => {
#[salsa::interned(constructor = new_)]
pub struct $name {
#[returns(ref)]
inner_: smallvec::SmallVec<[$ty; 2]>,
}
impl<'db> $name<'db> {
pub fn new_from_iter(
interner: DbInterner<'db>,
data: impl IntoIterator<Item = $ty>,
) -> Self {
$name::new_(interner.db(), data.into_iter().collect::<smallvec::SmallVec<[_; 2]>>())
}
pub fn inner(&self) -> &smallvec::SmallVec<[$ty; 2]> {
// SAFETY: ¯\_(ツ)_/¯
$crate::with_attached_db(|db| {
let inner = self.inner_(db);
unsafe { std::mem::transmute(inner) }
})
}
}
impl<'db> std::fmt::Debug for $name<'db> {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_slice().fmt(fmt)
}
}
impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> {
type Item = $ty;
type IntoIter = <smallvec::SmallVec<[$ty; 2]> as IntoIterator>::IntoIter;
fn iter(self) -> Self::IntoIter {
self.inner().clone().into_iter()
}
fn as_slice(&self) -> &[Self::Item] {
self.inner().as_slice()
}
}
impl<'db> IntoIterator for $name<'db> {
type Item = $ty;
type IntoIter = <Self as rustc_type_ir::inherent::SliceLike>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
rustc_type_ir::inherent::SliceLike::iter(self)
}
}
impl<'db> Default for $name<'db> {
fn default() -> Self {
$name::new_from_iter(DbInterner::conjure(), [])
}
}
};
}
pub use crate::_interned_vec_nolifetime_salsa as interned_vec_nolifetime_salsa;
#[macro_export]
#[doc(hidden)]
macro_rules! _interned_vec_db {
($name:ident, $ty:ident) => {
interned_vec_db!($name, $ty, nofold);
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for $name<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.try_fold_with(folder)).collect::<Result<_, _>>()?;
Ok($name::new_(folder.cx().db(), inner))
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Self {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.fold_with(folder)).collect();
$name::new_(folder.cx().db(), inner)
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for $name<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
use rustc_ast_ir::visit::VisitorResult;
use rustc_type_ir::inherent::SliceLike as _;
rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter());
V::Result::output()
}
}
};
($name:ident, $ty:ident, nofold) => {
#[salsa::interned(constructor = new_, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct $name<'db> {
#[returns(ref)]
inner_: smallvec::SmallVec<[$ty<'db>; 2]>,
interned: ::intern::InternedSliceRef<'db, $storage>,
}
impl<'db> std::fmt::Debug for $name<'db> {
@ -215,58 +80,234 @@ macro_rules! _interned_vec_db {
}
impl<'db> $name<'db> {
pub fn empty(interner: DbInterner<'db>) -> Self {
$name::new_(interner.db(), smallvec::SmallVec::new())
#[inline]
pub fn empty(_interner: DbInterner<'db>) -> Self {
// FIXME: Get from a static.
Self::new_from_slice(&[])
}
pub fn new_from_iter(
interner: DbInterner<'db>,
data: impl IntoIterator<Item = $ty<'db>>,
) -> Self {
$name::new_(interner.db(), data.into_iter().collect::<smallvec::SmallVec<[_; 2]>>())
#[inline]
pub fn new_from_slice(slice: &[$ty_db]) -> Self {
let slice = unsafe { ::std::mem::transmute::<&[$ty_db], &[$ty_static]>(slice) };
Self { interned: ::intern::InternedSlice::from_header_and_slice((), slice) }
}
pub fn inner(&self) -> &smallvec::SmallVec<[$ty<'db>; 2]> {
// SAFETY: ¯\_(ツ)_/¯
$crate::with_attached_db(|db| {
let inner = self.inner_(db);
unsafe { std::mem::transmute(inner) }
#[inline]
pub fn new_from_iter<I, T>(_interner: DbInterner<'db>, args: I) -> T::Output
where
I: IntoIterator<Item = T>,
T: ::rustc_type_ir::CollectAndApply<$ty_db, Self>,
{
::rustc_type_ir::CollectAndApply::collect_and_apply(args.into_iter(), |g| {
Self::new_from_slice(g)
})
}
}
impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> {
type Item = $ty<'db>;
type IntoIter = <smallvec::SmallVec<[$ty<'db>; 2]> as IntoIterator>::IntoIter;
fn iter(self) -> Self::IntoIter {
self.inner().clone().into_iter()
#[inline]
pub fn as_slice(self) -> &'db [$ty_db] {
let slice = &self.interned.get().slice;
unsafe { ::std::mem::transmute::<&[$ty_static], &[$ty_db]>(slice) }
}
fn as_slice(&self) -> &[Self::Item] {
self.inner().as_slice()
#[inline]
pub fn iter(self) -> ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>> {
self.as_slice().iter().copied()
}
#[inline]
pub fn len(self) -> usize {
self.as_slice().len()
}
#[inline]
pub fn is_empty(self) -> bool {
self.as_slice().is_empty()
}
}
impl<'db> IntoIterator for $name<'db> {
type Item = $ty<'db>;
type IntoIter = <Self as rustc_type_ir::inherent::SliceLike>::IntoIter;
type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>>;
type Item = $ty_db;
#[inline]
fn into_iter(self) -> Self::IntoIter { self.iter() }
}
fn into_iter(self) -> Self::IntoIter {
rustc_type_ir::inherent::SliceLike::iter(self)
impl<'db> ::std::ops::Deref for $name<'db> {
type Target = [$ty_db];
#[inline]
fn deref(&self) -> &Self::Target {
(*self).as_slice()
}
}
impl<'db> rustc_type_ir::inherent::SliceLike for $name<'db> {
type Item = $ty_db;
type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, $ty_db>>;
#[inline]
fn iter(self) -> Self::IntoIter {
self.iter()
}
#[inline]
fn as_slice(&self) -> &[Self::Item] {
(*self).as_slice()
}
}
impl<'db> Default for $name<'db> {
#[inline]
fn default() -> Self {
$name::new_from_iter(DbInterner::conjure(), [])
$name::empty(DbInterner::conjure())
}
}
impl<'db, V: $crate::next_solver::interner::WorldExposer>
rustc_type_ir::GenericTypeVisitable<V> for $name<'db>
{
#[inline]
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned_slice(self.interned).is_continue() {
self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor));
}
}
}
};
}
pub(crate) use interned_slice;
pub use crate::_interned_vec_db as interned_vec_db;
macro_rules! impl_foldable_for_interned_slice {
($name:ident) => {
impl<'db> ::rustc_type_ir::TypeVisitable<DbInterner<'db>> for $name<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
use rustc_ast_ir::visit::VisitorResult;
rustc_ast_ir::walk_visitable_list!(visitor, (*self).iter());
V::Result::output()
}
}
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for $name<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Self::new_from_iter(folder.cx(), self.iter().map(|it| it.try_fold_with(folder)))
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Self {
Self::new_from_iter(folder.cx(), self.iter().map(|it| it.fold_with(folder)))
}
}
};
}
pub(crate) use impl_foldable_for_interned_slice;
macro_rules! impl_stored_interned_slice {
( $storage:ident, $name:ident, $stored_name:ident $(,)? ) => {
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct $stored_name {
interned: ::intern::InternedSlice<$storage>,
}
impl $stored_name {
#[inline]
fn new(it: $name<'_>) -> Self {
Self { interned: it.interned.to_owned() }
}
#[inline]
pub fn as_ref<'a, 'db>(&'a self) -> $name<'db> {
let it = $name { interned: self.interned.as_ref() };
unsafe { std::mem::transmute::<$name<'a>, $name<'db>>(it) }
}
}
// SAFETY: It is safe to store this type in queries (but not `$name`).
unsafe impl salsa::Update for $stored_name {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
// SAFETY: Comparing by (pointer) equality is safe.
unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) }
}
}
impl std::fmt::Debug for $stored_name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_ref().fmt(f)
}
}
impl $name<'_> {
#[inline]
pub fn store(self) -> $stored_name {
$stored_name::new(self)
}
}
};
}
pub(crate) use impl_stored_interned_slice;
macro_rules! impl_stored_interned {
( $storage:ident, $name:ident, $stored_name:ident $(,)? ) => {
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct $stored_name {
interned: ::intern::Interned<$storage>,
}
impl $stored_name {
#[inline]
fn new(it: $name<'_>) -> Self {
Self { interned: it.interned.to_owned() }
}
#[inline]
pub fn as_ref<'a, 'db>(&'a self) -> $name<'db> {
let it = $name { interned: self.interned.as_ref() };
unsafe { std::mem::transmute::<$name<'a>, $name<'db>>(it) }
}
}
unsafe impl salsa::Update for $stored_name {
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
unsafe { crate::utils::unsafe_update_eq(old_pointer, new_value) }
}
}
impl std::fmt::Debug for $stored_name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_ref().fmt(f)
}
}
impl $name<'_> {
#[inline]
pub fn store(self) -> $stored_name {
$stored_name::new(self)
}
}
};
}
pub(crate) use impl_stored_interned;
/// This is a visitor trait that treats any interned thing specifically. Visitables are expected to call
/// the trait's methods when encountering an interned. This is used to implement marking in GC.
pub trait WorldExposer {
fn on_interned<T: intern::Internable>(
&mut self,
interned: InternedRef<'_, T>,
) -> ControlFlow<()>;
fn on_interned_slice<T: intern::SliceInternable>(
&mut self,
interned: InternedSliceRef<'_, T>,
) -> ControlFlow<()>;
}
#[derive(Debug, Copy, Clone)]
pub struct DbInterner<'db> {
@ -333,7 +374,8 @@ impl<'db> inherent::Span<DbInterner<'db>> for Span {
}
}
interned_vec_nolifetime_salsa!(BoundVarKinds, BoundVarKind, nofold);
interned_slice!(BoundVarKindsStorage, BoundVarKinds, BoundVarKind, BoundVarKind);
impl_stored_interned_slice!(BoundVarKindsStorage, BoundVarKinds, StoredBoundVarKinds);
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum BoundVarKind {
@ -365,7 +407,12 @@ impl BoundVarKind {
}
}
interned_vec_db!(CanonicalVars, CanonicalVarKind, nofold);
interned_slice!(
CanonicalVarsStorage,
CanonicalVars,
CanonicalVarKind<'db>,
CanonicalVarKind<'static>
);
pub struct DepNodeIndex;
@ -391,7 +438,8 @@ impl<T: std::fmt::Debug> std::fmt::Debug for Placeholder<T> {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct AllocId;
interned_vec_nolifetime_salsa!(VariancesOf, Variance, nofold);
interned_slice!(VariancesOfStorage, VariancesOf, Variance, Variance);
impl_stored_interned_slice!(VariancesOfStorage, VariancesOf, StoredVariancesOf);
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct VariantIdx(usize);
@ -658,7 +706,7 @@ impl<'db> inherent::AdtDef<DbInterner<'db>> for AdtDef {
let id: VariantId = struct_id.into();
let field_types = interner.db().field_types(id);
field_types.iter().last().map(|f| *f.1)
field_types.iter().last().map(|f| f.1.get())
}
fn all_field_tys(
@ -668,7 +716,7 @@ impl<'db> inherent::AdtDef<DbInterner<'db>> for AdtDef {
let db = interner.db();
// FIXME: this is disabled just to match the behavior with chalk right now
let _field_tys = |id: VariantId| {
db.field_types(id).iter().map(|(_, ty)| ty.skip_binder()).collect::<Vec<_>>()
db.field_types(id).iter().map(|(_, ty)| ty.get().skip_binder()).collect::<Vec<_>>()
};
let field_tys = |_id: VariantId| vec![];
let tys: Vec<_> = match self.inner().id {
@ -762,30 +810,36 @@ impl std::ops::Deref for UnsizingParams {
pub type PatternKind<'db> = rustc_type_ir::PatternKind<DbInterner<'db>>;
#[salsa::interned(constructor = new_, debug, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Pattern<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<PatternKind<'db>>,
interned: InternedRef<'db, PatternInterned>,
}
impl<'db> std::fmt::Debug for InternedWrapperNoDebug<PatternKind<'db>> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
struct PatternInterned(PatternKind<'static>);
impl_internable!(gc; PatternInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Pattern<'static>>();
};
impl<'db> Pattern<'db> {
pub fn new(interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self {
Pattern::new_(interner.db(), InternedWrapperNoDebug(kind))
pub fn new(_interner: DbInterner<'db>, kind: PatternKind<'db>) -> Self {
let kind = unsafe { std::mem::transmute::<PatternKind<'db>, PatternKind<'static>>(kind) };
Self { interned: Interned::new_gc(PatternInterned(kind)) }
}
pub fn inner(&self) -> &PatternKind<'db> {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe { std::mem::transmute::<&PatternKind<'static>, &PatternKind<'db>>(inner) }
}
}
impl<'db> std::fmt::Debug for Pattern<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
@ -831,6 +885,36 @@ impl<'db> rustc_type_ir::inherent::IntoKind for Pattern<'db> {
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for Pattern<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
visitor: &mut V,
) -> V::Result {
self.kind().visit_with(visitor)
}
}
impl<'db, V: WorldExposer> rustc_type_ir::GenericTypeVisitable<V> for Pattern<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for Pattern<'db> {
fn try_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(Pattern::new(folder.cx(), self.kind().try_fold_with(folder)?))
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
Pattern::new(folder.cx(), self.kind().fold_with(folder))
}
}
impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for Pattern<'db> {
fn relate<R: rustc_type_ir::relate::TypeRelation<DbInterner<'db>>>(
relation: &mut R,
@ -851,9 +935,9 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for Pattern<'db> {
if a.len() != b.len() {
return Err(TypeError::Mismatch);
}
let pats = CollectAndApply::collect_and_apply(
let pats = PatList::new_from_iter(
relation.cx(),
std::iter::zip(a.iter(), b.iter()).map(|(a, b)| relation.relate(a, b)),
|g| PatList::new_from_iter(tcx, g.iter().cloned()),
)?;
Ok(Pattern::new(tcx, PatternKind::Or(pats)))
}
@ -865,7 +949,8 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for Pattern<'db> {
}
}
interned_vec_db!(PatList, Pattern);
interned_slice!(PatListStorage, PatList, Pattern<'db>, Pattern<'static>);
impl_foldable_for_interned_slice!(PatList);
macro_rules! as_lang_item {
(
@ -941,7 +1026,7 @@ impl<'db> Interner for DbInterner<'db> {
self,
data: &[(OpaqueTypeKey<'db>, Self::Ty)],
) -> Self::PredefinedOpaques {
PredefinedOpaques::new_from_iter(self, data.iter().cloned())
PredefinedOpaques::new_from_slice(data)
}
type CanonicalVarKinds = CanonicalVars<'db>;
@ -950,7 +1035,7 @@ impl<'db> Interner for DbInterner<'db> {
self,
kinds: &[rustc_type_ir::CanonicalVarKind<Self>],
) -> Self::CanonicalVarKinds {
CanonicalVars::new_from_iter(self, kinds.iter().cloned())
CanonicalVars::new_from_slice(kinds)
}
type ExternalConstraints = ExternalConstraints<'db>;
@ -1012,7 +1097,7 @@ impl<'db> Interner for DbInterner<'db> {
type Features = Features;
fn mk_args(self, args: &[Self::GenericArg]) -> Self::GenericArgs {
GenericArgs::new_from_iter(self, args.iter().cloned())
GenericArgs::new_from_slice(args)
}
fn mk_args_from_iter<I, T>(self, args: I) -> T::Output
@ -1020,9 +1105,7 @@ impl<'db> Interner for DbInterner<'db> {
I: Iterator<Item = T>,
T: rustc_type_ir::CollectAndApply<Self::GenericArg, Self::GenericArgs>,
{
CollectAndApply::collect_and_apply(args, |g| {
GenericArgs::new_from_iter(self, g.iter().cloned())
})
GenericArgs::new_from_iter(self, args)
}
type UnsizingParams = UnsizingParams;
@ -1096,7 +1179,7 @@ impl<'db> Interner for DbInterner<'db> {
| SolverDefId::ImplId(_)
| SolverDefId::InternedClosureId(_)
| SolverDefId::InternedCoroutineId(_) => {
return VariancesOf::new_from_iter(self, []);
return VariancesOf::empty(self);
}
};
self.db.variances_of(generic_def)
@ -1202,7 +1285,7 @@ impl<'db> Interner for DbInterner<'db> {
I: Iterator<Item = T>,
T: rustc_type_ir::CollectAndApply<Self::Ty, Self::Tys>,
{
CollectAndApply::collect_and_apply(args, |g| Tys::new_from_iter(self, g.iter().cloned()))
Tys::new_from_iter(self, args)
}
fn parent(self, def_id: Self::DefId) -> Self::DefId {
@ -1338,7 +1421,7 @@ impl<'db> Interner for DbInterner<'db> {
let own_bounds: FxHashSet<_> =
self.item_self_bounds(def_id).skip_binder().into_iter().collect();
if all_bounds.len() == own_bounds.len() {
EarlyBinder::bind(Clauses::new_from_iter(self, []))
EarlyBinder::bind(Clauses::empty(self))
} else {
EarlyBinder::bind(Clauses::new_from_iter(
self,
@ -1952,7 +2035,7 @@ impl<'db> Interner for DbInterner<'db> {
let field_types = self.db().field_types(variant.id());
let mut unsizing_params = DenseBitSet::new_empty(num_params);
let ty = field_types[tail_field.0];
let ty = field_types[tail_field.0].get();
for arg in ty.instantiate_identity().walk() {
if let Some(i) = maybe_unsizing_param_idx(arg) {
unsizing_params.insert(i);
@ -1962,7 +2045,7 @@ impl<'db> Interner for DbInterner<'db> {
// Ensure none of the other fields mention the parameters used
// in unsizing.
for field in prefix_fields {
for arg in field_types[field.0].instantiate_identity().walk() {
for arg in field_types[field.0].get().instantiate_identity().walk() {
if let Some(i) = maybe_unsizing_param_idx(arg) {
unsizing_params.remove(i);
}
@ -2022,7 +2105,7 @@ impl<'db> Interner for DbInterner<'db> {
};
let mut result = Vec::new();
crate::opaques::opaque_types_defined_by(self.db, def_id, &mut result);
SolverDefIds::new_from_iter(self, result)
SolverDefIds::new_from_slice(&result)
}
fn opaque_types_and_coroutines_defined_by(self, def_id: Self::LocalDefId) -> Self::LocalDefIds {
@ -2051,7 +2134,7 @@ impl<'db> Interner for DbInterner<'db> {
}
});
SolverDefIds::new_from_iter(self, result)
SolverDefIds::new_from_slice(&result)
}
fn alias_has_const_conditions(self, _def_id: Self::DefId) -> bool {
@ -2096,10 +2179,10 @@ impl<'db> Interner for DbInterner<'db> {
let impl_trait_id = self.db().lookup_intern_impl_trait_id(opaque);
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
crate::opaques::rpit_hidden_types(self.db, func)[idx]
crate::opaques::rpit_hidden_types(self.db, func)[idx].get()
}
crate::ImplTraitId::TypeAliasImplTrait(type_alias, idx) => {
crate::opaques::tait_hidden_types(self.db, type_alias)[idx]
crate::opaques::tait_hidden_types(self.db, type_alias)[idx].get()
}
}
}
@ -2288,6 +2371,11 @@ macro_rules! TrivialTypeTraversalImpls {
<F::Result as rustc_ast_ir::visit::VisitorResult>::output()
}
}
impl<V> rustc_type_ir::GenericTypeVisitable<V> for $ty {
#[inline]
fn generic_visit_with(&self, _visitor: &mut V) {}
}
)+
};
}
@ -2302,17 +2390,22 @@ TrivialTypeTraversalImpls! {
AdtIdWrapper,
ImplIdWrapper,
GeneralConstIdWrapper,
Pattern<'db>,
Safety,
FnAbi,
Span,
ParamConst,
ParamTy,
BoundRegion,
BoundVar,
Placeholder<BoundRegion>,
Placeholder<BoundTy>,
Placeholder<BoundVar>,
Placeholder<BoundConst>,
BoundVarKind,
EarlyParamRegion,
LateParamRegion,
AdtDef,
BoundTy,
BoundConst,
}
mod tls_db {
@ -2479,3 +2572,110 @@ mod tls_cache {
GLOBAL_CACHE.with_borrow_mut(|handle| *handle = None);
}
}
impl WorldExposer for intern::GarbageCollector {
fn on_interned<T: intern::Internable>(
&mut self,
interned: InternedRef<'_, T>,
) -> ControlFlow<()> {
self.mark_interned_alive(interned)
}
fn on_interned_slice<T: intern::SliceInternable>(
&mut self,
interned: InternedSliceRef<'_, T>,
) -> ControlFlow<()> {
self.mark_interned_slice_alive(interned)
}
}
/// # Safety
///
/// This cannot be called if there are some not-yet-recorded type values. Generally, if you have a mutable
/// reference to the database, and there are no other database - then you can call this safely, but you
/// also need to make sure to maintain the mutable reference while this is running.
pub unsafe fn collect_ty_garbage() {
let mut gc = intern::GarbageCollector::default();
gc.add_storage::<super::consts::ConstInterned>();
gc.add_storage::<super::consts::ValtreeInterned>();
gc.add_storage::<PatternInterned>();
gc.add_storage::<super::opaques::ExternalConstraintsInterned>();
gc.add_storage::<super::predicate::PredicateInterned>();
gc.add_storage::<super::region::RegionInterned>();
gc.add_storage::<super::ty::TyInterned>();
gc.add_slice_storage::<super::predicate::ClausesStorage>();
gc.add_slice_storage::<super::generic_arg::GenericArgsStorage>();
gc.add_slice_storage::<BoundVarKindsStorage>();
gc.add_slice_storage::<VariancesOfStorage>();
gc.add_slice_storage::<CanonicalVarsStorage>();
gc.add_slice_storage::<PatListStorage>();
gc.add_slice_storage::<super::opaques::PredefinedOpaquesStorage>();
gc.add_slice_storage::<super::opaques::SolverDefIdsStorage>();
gc.add_slice_storage::<super::predicate::BoundExistentialPredicatesStorage>();
gc.add_slice_storage::<super::region::RegionAssumptionsStorage>();
gc.add_slice_storage::<super::ty::TysStorage>();
// SAFETY:
// - By our precondition, there are no unrecorded types.
// - We implement `GcInternedVisit` and `GcInternedSliceVisit` correctly for all types.
// - We added all storages (FIXME: it's too easy to forget to add a new storage here).
unsafe { gc.collect() };
}
macro_rules! impl_gc_visit {
( $($ty:ty),* $(,)? ) => {
$(
impl ::intern::GcInternedVisit for $ty {
#[inline]
fn visit_with(&self, gc: &mut ::intern::GarbageCollector) {
self.generic_visit_with(gc);
}
}
)*
};
}
impl_gc_visit!(
super::consts::ConstInterned,
super::consts::ValtreeInterned,
PatternInterned,
super::opaques::ExternalConstraintsInterned,
super::predicate::PredicateInterned,
super::region::RegionInterned,
super::ty::TyInterned,
super::predicate::ClausesCachedTypeInfo,
);
macro_rules! impl_gc_visit_slice {
( $($ty:ty),* $(,)? ) => {
$(
impl ::intern::GcInternedSliceVisit for $ty {
#[inline]
fn visit_header(header: &<Self as ::intern::SliceInternable>::Header, gc: &mut ::intern::GarbageCollector) {
header.generic_visit_with(gc);
}
#[inline]
fn visit_slice(header: &[<Self as ::intern::SliceInternable>::SliceType], gc: &mut ::intern::GarbageCollector) {
header.generic_visit_with(gc);
}
}
)*
};
}
impl_gc_visit_slice!(
super::predicate::ClausesStorage,
super::generic_arg::GenericArgsStorage,
BoundVarKindsStorage,
VariancesOfStorage,
CanonicalVarsStorage,
PatListStorage,
super::opaques::PredefinedOpaquesStorage,
super::opaques::SolverDefIdsStorage,
super::predicate::BoundExistentialPredicatesStorage,
super::region::RegionAssumptionsStorage,
super::ty::TysStorage,
);

View file

@ -2,7 +2,6 @@
use std::any::type_name_of_val;
use rustc_type_ir::inherent::SliceLike;
use rustc_type_ir::{self as ty, ir_print::IrPrint};
use super::SolverDefId;

View file

@ -1,37 +1,65 @@
//! Things related to opaques in the next-trait-solver.
use intern::{Interned, InternedRef, impl_internable};
use macros::GenericTypeVisitable;
use rustc_ast_ir::try_visit;
use rustc_type_ir::inherent::SliceLike;
use super::{DbInterner, SolverDefId, Ty, interned_vec_db, interned_vec_nolifetime_salsa};
use crate::next_solver::{impl_foldable_for_interned_slice, interned_slice};
use super::{DbInterner, SolverDefId, Ty};
pub type OpaqueTypeKey<'db> = rustc_type_ir::OpaqueTypeKey<DbInterner<'db>>;
type PredefinedOpaque<'db> = (OpaqueTypeKey<'db>, Ty<'db>);
interned_vec_db!(PredefinedOpaques, PredefinedOpaque);
interned_slice!(
PredefinedOpaquesStorage,
PredefinedOpaques,
PredefinedOpaque<'db>,
PredefinedOpaque<'static>,
);
impl_foldable_for_interned_slice!(PredefinedOpaques);
pub type ExternalConstraintsData<'db> =
rustc_type_ir::solve::ExternalConstraintsData<DbInterner<'db>>;
interned_vec_nolifetime_salsa!(SolverDefIds, SolverDefId);
interned_slice!(SolverDefIdsStorage, SolverDefIds, SolverDefId, SolverDefId);
impl_foldable_for_interned_slice!(SolverDefIds);
#[salsa::interned(constructor = new_, debug, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct ExternalConstraints<'db> {
#[returns(ref)]
kind_: rustc_type_ir::solve::ExternalConstraintsData<DbInterner<'db>>,
interned: InternedRef<'db, ExternalConstraintsInterned>,
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
pub(super) struct ExternalConstraintsInterned(ExternalConstraintsData<'static>);
impl_internable!(gc; ExternalConstraintsInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<ExternalConstraints<'static>>();
};
impl<'db> ExternalConstraints<'db> {
pub fn new(interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self {
ExternalConstraints::new_(interner.db(), data)
#[inline]
pub fn new(_interner: DbInterner<'db>, data: ExternalConstraintsData<'db>) -> Self {
let data = unsafe {
std::mem::transmute::<ExternalConstraintsData<'db>, ExternalConstraintsData<'static>>(
data,
)
};
Self { interned: Interned::new_gc(ExternalConstraintsInterned(data)) }
}
#[inline]
pub fn inner(&self) -> &ExternalConstraintsData<'db> {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: ¯\_(ツ)_/¯
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe {
std::mem::transmute::<&ExternalConstraintsData<'static>, &ExternalConstraintsData<'db>>(
inner,
)
}
}
}
@ -43,6 +71,12 @@ impl<'db> std::ops::Deref for ExternalConstraints<'db> {
}
}
impl std::fmt::Debug for ExternalConstraints<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().fmt(f)
}
}
impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for ExternalConstraints<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,

View file

@ -2,20 +2,25 @@
use std::cmp::Ordering;
use macros::{TypeFoldable, TypeVisitable};
use intern::{
Interned, InternedRef, InternedSlice, InternedSliceRef, impl_internable, impl_slice_internable,
};
use macros::{GenericTypeVisitable, TypeFoldable, TypeVisitable};
use rustc_type_ir::{
self as ty, CollectAndApply, DebruijnIndex, EarlyBinder, FlagComputation, Flags,
PredicatePolarity, TypeFlags, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable,
TypeVisitable, Upcast, UpcastFrom, WithCachedTypeInfo,
self as ty, CollectAndApply, EarlyBinder, FlagComputation, Flags, GenericTypeVisitable,
PredicatePolarity, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, Upcast,
UpcastFrom, WithCachedTypeInfo,
elaborate::Elaboratable,
error::{ExpectedFound, TypeError},
inherent::{IntoKind, SliceLike},
};
use smallvec::SmallVec;
use crate::next_solver::{GenericArg, InternedWrapperNoDebug, TraitIdWrapper};
use crate::next_solver::{
GenericArg, TraitIdWrapper, impl_foldable_for_interned_slice, impl_stored_interned_slice,
interned_slice,
};
use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db};
use super::{Binder, BoundVarKinds, DbInterner, Region, Ty};
pub type BoundExistentialPredicate<'db> = Binder<'db, ExistentialPredicate<'db>>;
@ -68,7 +73,13 @@ fn stable_cmp_existential_predicate<'db>(
(ExistentialPredicate::AutoTrait(_), _) => Ordering::Greater,
}
}
interned_vec_db!(BoundExistentialPredicates, BoundExistentialPredicate);
interned_slice!(
BoundExistentialPredicatesStorage,
BoundExistentialPredicates,
BoundExistentialPredicate<'db>,
BoundExistentialPredicate<'static>,
);
impl_foldable_for_interned_slice!(BoundExistentialPredicates);
impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates<DbInterner<'db>>
for BoundExistentialPredicates<'db>
@ -82,7 +93,7 @@ impl<'db> rustc_type_ir::inherent::BoundExistentialPredicates<DbInterner<'db>>
) -> Option<
rustc_type_ir::Binder<DbInterner<'db>, rustc_type_ir::ExistentialTraitRef<DbInterner<'db>>>,
> {
self.inner()[0]
self[0]
.map_bound(|this| match this {
ExistentialPredicate::Trait(tr) => Some(tr),
_ => None,
@ -166,74 +177,50 @@ impl<'db> rustc_type_ir::relate::Relate<DbInterner<'db>> for BoundExistentialPre
},
);
CollectAndApply::collect_and_apply(v, |g| {
BoundExistentialPredicates::new_from_iter(interner, g.iter().cloned())
})
BoundExistentialPredicates::new_from_iter(interner, v)
}
}
#[salsa::interned(constructor = new_, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Predicate<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>>>,
interned: InternedRef<'db, PredicateInterned>,
}
impl<'db> std::fmt::Debug for Predicate<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().internee.fmt(f)
}
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
pub(super) struct PredicateInterned(WithCachedTypeInfo<Binder<'static, PredicateKind<'static>>>);
impl<'db> std::fmt::Debug
for InternedWrapperNoDebug<WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>>>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Binder<")?;
match self.0.internee.skip_binder() {
rustc_type_ir::PredicateKind::Clause(clause_kind) => {
write!(f, "{clause_kind:?}")
}
rustc_type_ir::PredicateKind::DynCompatible(trait_def_id) => {
write!(f, "the trait `{trait_def_id:?}` is dyn-compatible")
}
rustc_type_ir::PredicateKind::Subtype(subtype_predicate) => {
write!(f, "{subtype_predicate:?}")
}
rustc_type_ir::PredicateKind::Coerce(coerce_predicate) => {
write!(f, "{coerce_predicate:?}")
}
rustc_type_ir::PredicateKind::ConstEquate(c1, c2) => {
write!(f, "the constant `{c1:?}` equals `{c2:?}`")
}
rustc_type_ir::PredicateKind::Ambiguous => write!(f, "ambiguous"),
rustc_type_ir::PredicateKind::NormalizesTo(data) => write!(f, "{data:?}"),
rustc_type_ir::PredicateKind::AliasRelate(t1, t2, dir) => {
write!(f, "{t1:?} {dir:?} {t2:?}")
}
}?;
write!(f, ", [{:?}]>", self.0.internee.bound_vars())?;
Ok(())
}
}
impl_internable!(gc; PredicateInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Predicate<'static>>();
};
impl<'db> Predicate<'db> {
pub fn new(interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self {
pub fn new(_interner: DbInterner<'db>, kind: Binder<'db, PredicateKind<'db>>) -> Self {
let kind = unsafe {
std::mem::transmute::<
Binder<'db, PredicateKind<'db>>,
Binder<'static, PredicateKind<'static>>,
>(kind)
};
let flags = FlagComputation::for_predicate(kind);
let cached = WithCachedTypeInfo {
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
Predicate::new_(interner.db(), InternedWrapperNoDebug(cached))
Self { interned: Interned::new_gc(PredicateInterned(cached)) }
}
pub fn inner(&self) -> &WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>> {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Predicate<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe {
std::mem::transmute::<
&WithCachedTypeInfo<Binder<'static, PredicateKind<'static>>>,
&WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>>,
>(inner)
}
}
/// Flips the polarity of a Predicate.
@ -259,110 +246,135 @@ impl<'db> Predicate<'db> {
}
}
// FIXME: should make a "header" in interned_vec
#[derive(Debug, Clone)]
pub struct InternedClausesWrapper<'db>(SmallVec<[Clause<'db>; 2]>, TypeFlags, DebruijnIndex);
impl<'db> PartialEq for InternedClausesWrapper<'db> {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
impl<'db> std::fmt::Debug for Predicate<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
impl<'db> Eq for InternedClausesWrapper<'db> {}
#[derive(Clone, Copy, PartialEq, Eq, Hash, GenericTypeVisitable)]
pub struct ClausesCachedTypeInfo(WithCachedTypeInfo<()>);
impl<'db> std::hash::Hash for InternedClausesWrapper<'db> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state)
}
}
impl_slice_internable!(gc; ClausesStorage, ClausesCachedTypeInfo, Clause<'static>);
impl_stored_interned_slice!(ClausesStorage, Clauses, StoredClauses);
#[salsa::interned(constructor = new_, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Clauses<'db> {
#[returns(ref)]
inner_: InternedClausesWrapper<'db>,
}
impl<'db> Clauses<'db> {
pub fn new_from_iter(
interner: DbInterner<'db>,
data: impl IntoIterator<Item = Clause<'db>>,
) -> Self {
let clauses: SmallVec<_> = data.into_iter().collect();
let flags = FlagComputation::<DbInterner<'db>>::for_clauses(&clauses);
let wrapper = InternedClausesWrapper(clauses, flags.flags, flags.outer_exclusive_binder);
Clauses::new_(interner.db(), wrapper)
}
pub fn inner(&self) -> &InternedClausesWrapper<'db> {
crate::with_attached_db(|db| {
let inner = self.inner_(db);
// SAFETY: The caller already has access to a `Clauses<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
}
interned: InternedSliceRef<'db, ClausesStorage>,
}
impl<'db> std::fmt::Debug for Clauses<'db> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.inner().0.fmt(f)
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_slice().fmt(fmt)
}
}
impl<'db> rustc_type_ir::inherent::Clauses<DbInterner<'db>> for Clauses<'db> {}
impl<'db> rustc_type_ir::inherent::SliceLike for Clauses<'db> {
type Item = Clause<'db>;
type IntoIter = <smallvec::SmallVec<[Clause<'db>; 2]> as IntoIterator>::IntoIter;
fn iter(self) -> Self::IntoIter {
self.inner().0.clone().into_iter()
impl<'db> Clauses<'db> {
#[inline]
pub fn empty(_interner: DbInterner<'db>) -> Self {
// FIXME: Get from a static.
Self::new_from_slice(&[])
}
fn as_slice(&self) -> &[Self::Item] {
self.inner().0.as_slice()
#[inline]
pub fn new_from_slice(slice: &[Clause<'db>]) -> Self {
let slice = unsafe { ::std::mem::transmute::<&[Clause<'db>], &[Clause<'static>]>(slice) };
let flags = FlagComputation::<DbInterner<'db>>::for_clauses(slice);
let flags = ClausesCachedTypeInfo(WithCachedTypeInfo {
internee: (),
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
});
Self { interned: InternedSlice::from_header_and_slice(flags, slice) }
}
#[inline]
pub fn new_from_iter<I, T>(_interner: DbInterner<'db>, args: I) -> T::Output
where
I: IntoIterator<Item = T>,
T: CollectAndApply<Clause<'db>, Self>,
{
CollectAndApply::collect_and_apply(args.into_iter(), Self::new_from_slice)
}
#[inline]
pub fn as_slice(self) -> &'db [Clause<'db>] {
let slice = &self.interned.get().slice;
unsafe { ::std::mem::transmute::<&[Clause<'static>], &[Clause<'db>]>(slice) }
}
#[inline]
pub fn iter(self) -> ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>> {
self.as_slice().iter().copied()
}
#[inline]
pub fn len(self) -> usize {
self.as_slice().len()
}
#[inline]
pub fn is_empty(self) -> bool {
self.as_slice().is_empty()
}
}
impl<'db> IntoIterator for Clauses<'db> {
type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>>;
type Item = Clause<'db>;
type IntoIter = <Self as rustc_type_ir::inherent::SliceLike>::IntoIter;
#[inline]
fn into_iter(self) -> Self::IntoIter {
rustc_type_ir::inherent::SliceLike::iter(self)
self.iter()
}
}
impl<'db> std::ops::Deref for Clauses<'db> {
type Target = [Clause<'db>];
#[inline]
fn deref(&self) -> &Self::Target {
(*self).as_slice()
}
}
impl<'db> rustc_type_ir::inherent::SliceLike for Clauses<'db> {
type Item = Clause<'db>;
type IntoIter = ::std::iter::Copied<::std::slice::Iter<'db, Clause<'db>>>;
#[inline]
fn iter(self) -> Self::IntoIter {
self.iter()
}
#[inline]
fn as_slice(&self) -> &[Self::Item] {
(*self).as_slice()
}
}
impl<'db> Default for Clauses<'db> {
#[inline]
fn default() -> Self {
Clauses::new_from_iter(DbInterner::conjure(), [])
Clauses::empty(DbInterner::conjure())
}
}
impl<'db> rustc_type_ir::inherent::Clauses<DbInterner<'db>> for Clauses<'db> {}
impl<'db> rustc_type_ir::TypeSuperFoldable<DbInterner<'db>> for Clauses<'db> {
fn try_super_fold_with<F: rustc_type_ir::FallibleTypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len());
for c in self {
clauses.push(c.try_fold_with(folder)?);
}
Ok(Clauses::new_from_iter(folder.cx(), clauses))
Clauses::new_from_iter(folder.cx(), self.iter().map(|clause| clause.try_fold_with(folder)))
}
fn super_fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(
self,
folder: &mut F,
) -> Self {
let mut clauses: SmallVec<[_; 2]> = SmallVec::with_capacity(self.inner().0.len());
for c in self {
clauses.push(c.fold_with(folder));
}
Clauses::new_from_iter(folder.cx(), clauses)
Clauses::new_from_iter(folder.cx(), self.iter().map(|clause| clause.fold_with(folder)))
}
}
@ -371,15 +383,10 @@ impl<'db> rustc_type_ir::TypeFoldable<DbInterner<'db>> for Clauses<'db> {
self,
folder: &mut F,
) -> Result<Self, F::Error> {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> =
self.iter().map(|v| v.try_fold_with(folder)).collect::<Result<_, _>>()?;
Ok(Clauses::new_from_iter(folder.cx(), inner))
self.try_super_fold_with(folder)
}
fn fold_with<F: rustc_type_ir::TypeFolder<DbInterner<'db>>>(self, folder: &mut F) -> Self {
use rustc_type_ir::inherent::SliceLike as _;
let inner: smallvec::SmallVec<[_; 2]> = self.iter().map(|v| v.fold_with(folder)).collect();
Clauses::new_from_iter(folder.cx(), inner)
self.super_fold_with(folder)
}
}
@ -389,19 +396,28 @@ impl<'db> rustc_type_ir::TypeVisitable<DbInterner<'db>> for Clauses<'db> {
visitor: &mut V,
) -> V::Result {
use rustc_ast_ir::visit::VisitorResult;
use rustc_type_ir::inherent::SliceLike as _;
rustc_ast_ir::walk_visitable_list!(visitor, self.as_slice().iter());
rustc_ast_ir::walk_visitable_list!(visitor, self.iter());
V::Result::output()
}
}
impl<'db, V: super::WorldExposer> rustc_type_ir::GenericTypeVisitable<V> for Clauses<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned_slice(self.interned).is_continue() {
self.as_slice().iter().for_each(|it| it.generic_visit_with(visitor));
}
}
}
impl<'db> rustc_type_ir::Flags for Clauses<'db> {
#[inline]
fn flags(&self) -> rustc_type_ir::TypeFlags {
self.inner().1
self.interned.header.header.0.flags
}
#[inline]
fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex {
self.inner().2
self.interned.header.header.0.outer_exclusive_binder
}
}
@ -414,18 +430,20 @@ impl<'db> rustc_type_ir::TypeSuperVisitable<DbInterner<'db>> for Clauses<'db> {
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] // TODO implement Debug by hand
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, GenericTypeVisitable)] // TODO implement Debug by hand
pub struct Clause<'db>(pub(crate) Predicate<'db>);
// We could cram the reveal into the clauses like rustc does, probably
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable)]
#[derive(
Copy, Clone, Debug, Hash, PartialEq, Eq, TypeVisitable, TypeFoldable, GenericTypeVisitable,
)]
pub struct ParamEnv<'db> {
pub(crate) clauses: Clauses<'db>,
}
impl<'db> ParamEnv<'db> {
pub fn empty() -> Self {
ParamEnv { clauses: Clauses::new_from_iter(DbInterner::conjure(), []) }
ParamEnv { clauses: Clauses::empty(DbInterner::conjure()) }
}
pub fn clauses(self) -> Clauses<'db> {
@ -460,6 +478,14 @@ impl<'db> TypeVisitable<DbInterner<'db>> for Predicate<'db> {
}
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Predicate<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
impl<'db> TypeSuperVisitable<DbInterner<'db>> for Predicate<'db> {
fn super_visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,

View file

@ -1,47 +1,51 @@
//! Things related to regions.
use hir_def::LifetimeParamId;
use intern::Symbol;
use intern::{Interned, InternedRef, Symbol, impl_internable};
use macros::GenericTypeVisitable;
use rustc_type_ir::{
BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, INNERMOST, RegionVid, TypeFlags,
TypeFoldable, TypeVisitable,
BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, GenericTypeVisitable, INNERMOST, RegionVid,
TypeFlags, TypeFoldable, TypeVisitable,
inherent::{IntoKind, PlaceholderLike, SliceLike},
relate::Relate,
};
use crate::next_solver::{GenericArg, OutlivesPredicate};
use crate::next_solver::{
GenericArg, OutlivesPredicate, impl_foldable_for_interned_slice, interned_slice,
};
use super::{
ErrorGuaranteed, SolverDefId, interned_vec_db,
ErrorGuaranteed, SolverDefId,
interner::{BoundVarKind, DbInterner, Placeholder},
};
pub type RegionKind<'db> = rustc_type_ir::RegionKind<DbInterner<'db>>;
#[salsa::interned(constructor = new_, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Region<'db> {
#[returns(ref)]
kind_: RegionKind<'db>,
pub(super) interned: InternedRef<'db, RegionInterned>,
}
impl std::fmt::Debug for Region<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
#[repr(align(4))] // Required for `GenericArg` bit-tagging.
pub(super) struct RegionInterned(RegionKind<'static>);
impl_internable!(gc; RegionInterned);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Region<'static>>();
};
impl<'db> Region<'db> {
pub fn new(interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self {
Region::new_(interner.db(), kind)
pub fn new(_interner: DbInterner<'db>, kind: RegionKind<'db>) -> Self {
let kind = unsafe { std::mem::transmute::<RegionKind<'db>, RegionKind<'static>>(kind) };
Self { interned: Interned::new_gc(RegionInterned(kind)) }
}
pub fn inner(&self) -> &RegionKind<'db> {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: The caller already has access to a `Region<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute::<&RegionKind<'_>, &RegionKind<'db>>(inner) }
})
let inner = &self.interned.0;
unsafe { std::mem::transmute::<&RegionKind<'static>, &RegionKind<'db>>(inner) }
}
pub fn new_early_param(
@ -256,6 +260,12 @@ impl BoundRegionKind {
}
}
impl std::fmt::Debug for Region<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.kind().fmt(f)
}
}
impl<'db> IntoKind for Region<'db> {
type Kind = RegionKind<'db>;
@ -377,6 +387,20 @@ impl<'db> PlaceholderLike<DbInterner<'db>> for PlaceholderRegion {
}
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Region<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
type GenericArgOutlivesPredicate<'db> = OutlivesPredicate<'db, GenericArg<'db>>;
interned_vec_db!(RegionAssumptions, GenericArgOutlivesPredicate);
interned_slice!(
RegionAssumptionsStorage,
RegionAssumptions,
GenericArgOutlivesPredicate<'db>,
GenericArgOutlivesPredicate<'static>,
);
impl_foldable_for_interned_slice!(RegionAssumptions);

View file

@ -5,7 +5,7 @@ use rustc_next_trait_solver::delegate::SolverDelegate;
use rustc_type_ir::{
AliasTyKind, GenericArgKind, InferCtxtLike, Interner, PredicatePolarity, TypeFlags,
TypeVisitableExt,
inherent::{IntoKind, SliceLike, Term as _, Ty as _},
inherent::{IntoKind, Term as _, Ty as _},
lang_items::SolverTraitLangItem,
solve::{Certainty, NoSolution},
};

View file

@ -7,13 +7,15 @@ use hir_def::{
hir::generics::{TypeOrConstParamData, TypeParamProvenance},
};
use hir_def::{TraitId, type_ref::Rawness};
use intern::{Interned, InternedRef, impl_internable};
use macros::GenericTypeVisitable;
use rustc_abi::{Float, Integer, Size};
use rustc_ast_ir::{Mutability, try_visit, visit::VisitorResult};
use rustc_type_ir::{
AliasTyKind, BoundVar, BoundVarIndexKind, ClosureKind, CoroutineArgs, CoroutineArgsParts,
DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, InferTy, IntTy, IntVid, Interner,
TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt,
TypeVisitor, UintTy, Upcast, WithCachedTypeInfo,
DebruijnIndex, FlagComputation, Flags, FloatTy, FloatVid, GenericTypeVisitable, InferTy, IntTy,
IntVid, Interner, TyVid, TypeFoldable, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable,
TypeVisitableExt, TypeVisitor, UintTy, Upcast, WithCachedTypeInfo,
inherent::{
AdtDef as _, BoundExistentialPredicates, BoundVarLike, Const as _, GenericArgs as _,
IntoKind, ParamLike, PlaceholderLike, Safety as _, SliceLike, Ty as _,
@ -28,15 +30,16 @@ use crate::{
lower::GenericPredicates,
next_solver::{
AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const,
CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
CoroutineIdWrapper, FnSig, GenericArgKind, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
abi::Safety,
interner::InternedWrapperNoDebug,
impl_foldable_for_interned_slice, impl_stored_interned, impl_stored_interned_slice,
interned_slice,
util::{CoroutineArgsExt, IntegerTypeExt},
},
};
use super::{
BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId, interned_vec_db,
BoundVarKind, DbInterner, GenericArgs, Placeholder, SolverDefId,
util::{FloatExt, IntegerExt},
};
@ -44,35 +47,45 @@ pub type SimplifiedType = rustc_type_ir::fast_reject::SimplifiedType<SolverDefId
pub type TyKind<'db> = rustc_type_ir::TyKind<DbInterner<'db>>;
pub type FnHeader<'db> = rustc_type_ir::FnHeader<DbInterner<'db>>;
#[salsa::interned(constructor = new_, unsafe(non_update_types))]
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Ty<'db> {
#[returns(ref)]
kind_: InternedWrapperNoDebug<WithCachedTypeInfo<TyKind<'db>>>,
pub(super) interned: InternedRef<'db, TyInterned>,
}
#[derive(PartialEq, Eq, Hash, GenericTypeVisitable)]
#[repr(align(4))] // Required for `GenericArg` bit-tagging.
pub(super) struct TyInterned(WithCachedTypeInfo<TyKind<'static>>);
impl_internable!(gc; TyInterned);
impl_stored_interned!(TyInterned, Ty, StoredTy);
const _: () = {
const fn is_copy<T: Copy>() {}
is_copy::<Ty<'static>>();
};
impl<'db> Ty<'db> {
pub fn new(interner: DbInterner<'db>, kind: TyKind<'db>) -> Self {
#[inline]
pub fn new(_interner: DbInterner<'db>, kind: TyKind<'db>) -> Self {
let kind = unsafe { std::mem::transmute::<TyKind<'db>, TyKind<'static>>(kind) };
let flags = FlagComputation::for_kind(&kind);
let cached = WithCachedTypeInfo {
internee: kind,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
Ty::new_(interner.db(), InternedWrapperNoDebug(cached))
Self { interned: Interned::new_gc(TyInterned(cached)) }
}
#[inline]
pub fn inner(&self) -> &WithCachedTypeInfo<TyKind<'db>> {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
let inner = &self.interned.0;
unsafe {
std::mem::transmute::<
&WithCachedTypeInfo<TyKind<'static>>,
&WithCachedTypeInfo<TyKind<'db>>,
>(inner)
}
}
pub fn new_adt(interner: DbInterner<'db>, adt_id: AdtId, args: GenericArgs<'db>) -> Self {
@ -383,7 +396,7 @@ impl<'db> Ty<'db> {
#[inline]
pub fn is_unit(self) -> bool {
matches!(self.kind(), TyKind::Tuple(tys) if tys.inner().is_empty())
matches!(self.kind(), TyKind::Tuple(tys) if tys.is_empty())
}
#[inline]
@ -661,12 +674,9 @@ impl<'db> Ty<'db> {
// This is only used by type walking.
// Parameters will be walked outside, and projection predicate is not used.
// So just provide the Future trait.
let impl_bound = TraitRef::new(
interner,
future_trait.into(),
GenericArgs::new_from_iter(interner, []),
)
.upcast(interner);
let impl_bound =
TraitRef::new(interner, future_trait.into(), GenericArgs::empty(interner))
.upcast(interner);
Some(vec![impl_bound])
} else {
None
@ -730,20 +740,23 @@ impl<'db> std::fmt::Debug for Ty<'db> {
}
}
impl<'db> std::fmt::Debug for InternedWrapperNoDebug<WithCachedTypeInfo<TyKind<'db>>> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.internee.fmt(f)
}
}
impl<'db> IntoKind for Ty<'db> {
type Kind = TyKind<'db>;
#[inline]
fn kind(self) -> Self::Kind {
self.inner().internee
}
}
impl<'db, V: super::WorldExposer> GenericTypeVisitable<V> for Ty<'db> {
fn generic_visit_with(&self, visitor: &mut V) {
if visitor.on_interned(self.interned).is_continue() {
self.kind().generic_visit_with(visitor);
}
}
}
impl<'db> TypeVisitable<DbInterner<'db>> for Ty<'db> {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,
@ -1068,9 +1081,9 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
// to unnecessary overflows in async code. See the issue:
// <https://github.com/rust-lang/rust/issues/145151>.
let coroutine_args = interner.mk_args_from_iter(coroutine_args.iter().map(|arg| {
match arg {
GenericArg::Ty(_) | GenericArg::Const(_) => arg,
GenericArg::Lifetime(_) => {
match arg.kind() {
GenericArgKind::Type(_) | GenericArgKind::Const(_) => arg,
GenericArgKind::Lifetime(_) => {
crate::next_solver::Region::new(interner, rustc_type_ir::RegionKind::ReErased)
.into()
}
@ -1254,10 +1267,13 @@ impl<'db> rustc_type_ir::inherent::Ty<DbInterner<'db>> for Ty<'db> {
}
}
interned_vec_db!(Tys, Ty);
interned_slice!(TysStorage, Tys, Ty<'db>, Ty<'static>);
impl_foldable_for_interned_slice!(Tys);
impl_stored_interned_slice!(TysStorage, Tys, StoredTys);
impl<'db> Tys<'db> {
pub fn inputs(&self) -> &[Ty<'db>] {
#[inline]
pub fn inputs(self) -> &'db [Ty<'db>] {
self.as_slice().split_last().unwrap().1
}
}
@ -1323,6 +1339,10 @@ pub enum BoundTyKind {
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct ErrorGuaranteed;
impl<V> GenericTypeVisitable<V> for ErrorGuaranteed {
fn generic_visit_with(&self, _visitor: &mut V) {}
}
impl<'db> TypeVisitable<DbInterner<'db>> for ErrorGuaranteed {
fn visit_with<V: rustc_type_ir::TypeVisitor<DbInterner<'db>>>(
&self,

View file

@ -13,7 +13,7 @@ use crate::{
db::{HirDatabase, InternedOpaqueTyId},
lower::{ImplTraitIdx, ImplTraits},
next_solver::{
DbInterner, EarlyBinder, ErrorGuaranteed, SolverDefId, Ty, TypingMode,
DbInterner, ErrorGuaranteed, SolverDefId, StoredEarlyBinder, StoredTy, Ty, TypingMode,
infer::{DbInternerInferExt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@ -72,10 +72,10 @@ pub(crate) fn opaque_types_defined_by(
// FIXME: Collect opaques from `#[define_opaque]`.
fn extend_with_opaques<'db>(
db: &'db dyn HirDatabase,
opaques: &Option<Box<EarlyBinder<'db, ImplTraits<'db>>>>,
mut make_impl_trait: impl FnMut(ImplTraitIdx<'db>) -> ImplTraitId<'db>,
fn extend_with_opaques(
db: &dyn HirDatabase,
opaques: &Option<Box<StoredEarlyBinder<ImplTraits>>>,
mut make_impl_trait: impl FnMut(ImplTraitIdx) -> ImplTraitId,
result: &mut Vec<SolverDefId>,
) {
if let Some(opaques) = opaques {
@ -89,25 +89,25 @@ pub(crate) fn opaque_types_defined_by(
// These are firewall queries to prevent drawing dependencies between infers:
#[salsa::tracked(returns(ref), unsafe(non_update_types))]
#[salsa::tracked(returns(ref))]
pub(crate) fn rpit_hidden_types<'db>(
db: &'db dyn HirDatabase,
function: FunctionId,
) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> {
) -> ArenaMap<ImplTraitIdx, StoredEarlyBinder<StoredTy>> {
let infer = InferenceResult::for_body(db, function.into());
let mut result = ArenaMap::new();
for (opaque, hidden_type) in infer.return_position_impl_trait_types(db) {
result.insert(opaque, EarlyBinder::bind(hidden_type));
result.insert(opaque, StoredEarlyBinder::bind(hidden_type.store()));
}
result.shrink_to_fit();
result
}
#[salsa::tracked(returns(ref), unsafe(non_update_types))]
#[salsa::tracked(returns(ref))]
pub(crate) fn tait_hidden_types<'db>(
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> {
) -> ArenaMap<ImplTraitIdx, StoredEarlyBinder<StoredTy>> {
// Call this first, to not perform redundant work if there are no TAITs.
let Some(taits_count) = ImplTraits::type_alias_impl_traits(db, type_alias)
.as_deref()
@ -129,7 +129,7 @@ pub(crate) fn tait_hidden_types<'db>(
let mut result = ArenaMap::with_capacity(taits_count);
for defining_body in defining_bodies {
let infer = InferenceResult::for_body(db, defining_body);
for (&opaque, &hidden_type) in &infer.type_of_opaque {
for (&opaque, hidden_type) in &infer.type_of_opaque {
let ImplTraitId::TypeAliasImplTrait(opaque_owner, opaque_idx) = opaque.loc(db) else {
continue;
};
@ -138,13 +138,18 @@ pub(crate) fn tait_hidden_types<'db>(
}
// In the presence of errors, we attempt to create a unified type from all
// types. rustc doesn't do that, but this should improve the experience.
let hidden_type = infcx.insert_type_vars(hidden_type);
let hidden_type = infcx.insert_type_vars(hidden_type.as_ref());
match result.entry(opaque_idx) {
la_arena::Entry::Vacant(entry) => {
entry.insert(EarlyBinder::bind(hidden_type));
entry.insert(StoredEarlyBinder::bind(hidden_type.store()));
}
la_arena::Entry::Occupied(entry) => {
_ = ocx.eq(&cause, param_env, entry.get().instantiate_identity(), hidden_type);
_ = ocx.eq(
&cause,
param_env,
entry.get().get().instantiate_identity(),
hidden_type,
);
}
}
}
@ -157,12 +162,15 @@ pub(crate) fn tait_hidden_types<'db>(
let idx = la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(idx as u32));
match result.entry(idx) {
la_arena::Entry::Vacant(entry) => {
entry.insert(EarlyBinder::bind(Ty::new_error(interner, ErrorGuaranteed)));
entry.insert(StoredEarlyBinder::bind(
Ty::new_error(interner, ErrorGuaranteed).store(),
));
}
la_arena::Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().map_bound(|hidden_type| {
infcx.resolve_vars_if_possible(hidden_type).replace_infer_with_error(interner)
});
let hidden_type = entry.get().get().skip_binder();
let hidden_type =
infcx.resolve_vars_if_possible(hidden_type).replace_infer_with_error(interner);
*entry.get_mut() = StoredEarlyBinder::bind(hidden_type.store());
}
}
}

View file

@ -2,7 +2,6 @@
use hir_def::{HasModule, ImplId, nameres::crate_def_map};
use intern::sym;
use rustc_type_ir::inherent::SliceLike;
use tracing::debug;
use crate::{

View file

@ -149,9 +149,10 @@ fn check_impl(
let (body, body_source_map) = db.body_with_source_map(def);
let inference_result = InferenceResult::for_body(&db, def);
for (pat, mut ty) in inference_result.type_of_pat.iter() {
for (pat, ty) in inference_result.type_of_pat.iter() {
let mut ty = ty.as_ref();
if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
ty = inference_result.type_of_binding[id].as_ref();
}
let node = match pat_node(&body_source_map, pat, &db) {
Some(value) => value,
@ -169,6 +170,7 @@ fn check_impl(
}
for (expr, ty) in inference_result.type_of_expr.iter() {
let ty = ty.as_ref();
let node = match expr_node(&body_source_map, expr, &db) {
Some(value) => value,
None => continue,
@ -209,8 +211,8 @@ fn check_impl(
let range = node.as_ref().original_file_range_rooted(&db);
let actual = format!(
"expected {}, got {}",
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target)
mismatch.expected.as_ref().display_test(&db, display_target),
mismatch.actual.as_ref().display_test(&db, display_target)
);
match mismatches.remove(&range) {
Some(annotation) => assert_eq!(actual, annotation),
@ -318,20 +320,20 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
crate::attach_db(&db, || {
let mut buf = String::new();
let mut infer_def = |inference_result: &InferenceResult<'_>,
let mut infer_def = |inference_result: &InferenceResult,
body: Arc<Body>,
body_source_map: Arc<BodySourceMap>,
krate: Crate| {
let display_target = DisplayTarget::from_crate(&db, krate);
let mut types: Vec<(InFile<SyntaxNode>, &Ty<'_>)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch<'_>)> = Vec::new();
let mut types: Vec<(InFile<SyntaxNode>, Ty<'_>)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
if let Some(self_param) = body.self_param {
let ty = &inference_result.type_of_binding[self_param];
if let Some(syntax_ptr) = body_source_map.self_param_syntax() {
let root = db.parse_or_expand(syntax_ptr.file_id);
let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone());
types.push((node, ty));
types.push((node, ty.as_ref()));
}
}
@ -346,7 +348,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
types.push((node.clone(), ty.as_ref()));
if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) {
mismatches.push((node, mismatch));
}
@ -360,7 +362,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
types.push((node.clone(), ty.as_ref()));
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
mismatches.push((node, mismatch));
}
@ -401,8 +403,8 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
"{}{:?}: expected {}, got {}\n",
macro_prefix,
range,
mismatch.expected.display_test(&db, display_target),
mismatch.actual.display_test(&db, display_target),
mismatch.expected.as_ref().display_test(&db, display_target),
mismatch.actual.as_ref().display_test(&db, display_target),
);
}
}

View file

@ -74,6 +74,7 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
let place = capture.display_place(closure.0, db);
let capture_ty = capture
.ty
.get()
.skip_binder()
.display_test(db, DisplayTarget::from_crate(db, module.krate(db)))
.to_string();

View file

@ -28,7 +28,7 @@ fn foo() -> i32 {
}
});
},
&[("InferenceResult < 'db >::for_body_", 1)],
&[("InferenceResult::for_body_", 1)],
expect_test::expect![[r#"
[
"crate_local_def_map",
@ -36,17 +36,17 @@ fn foo() -> i32 {
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"InferenceResult < 'db >::for_body_",
"InferenceResult::for_body_",
"function_signature_shim",
"function_signature_with_source_map_shim",
"AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"trait_environment_query",
"lang_items",
"crate_lang_items",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"expr_scopes_shim",
]
"#]],
@ -72,7 +72,7 @@ fn foo() -> i32 {
}
});
},
&[("InferenceResult < 'db >::for_body_", 0)],
&[("InferenceResult::for_body_", 0)],
expect_test::expect![[r#"
[
"parse_shim",
@ -115,7 +115,7 @@ fn baz() -> i32 {
}
});
},
&[("InferenceResult < 'db >::for_body_", 3)],
&[("InferenceResult::for_body_", 3)],
expect_test::expect![[r#"
[
"crate_local_def_map",
@ -123,37 +123,37 @@ fn baz() -> i32 {
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
"InferenceResult < 'db >::for_body_",
"InferenceResult::for_body_",
"function_signature_shim",
"function_signature_with_source_map_shim",
"AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"trait_environment_query",
"lang_items",
"crate_lang_items",
"AttrFlags::query_",
"AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"expr_scopes_shim",
"InferenceResult < 'db >::for_body_",
"InferenceResult::for_body_",
"function_signature_shim",
"function_signature_with_source_map_shim",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"trait_environment_query",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"expr_scopes_shim",
"InferenceResult < 'db >::for_body_",
"InferenceResult::for_body_",
"function_signature_shim",
"function_signature_with_source_map_shim",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"trait_environment_query",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"expr_scopes_shim",
]
"#]],
@ -184,7 +184,7 @@ fn baz() -> i32 {
}
});
},
&[("InferenceResult < 'db >::for_body_", 1)],
&[("InferenceResult::for_body_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@ -202,7 +202,7 @@ fn baz() -> i32 {
"function_signature_shim",
"body_with_source_map_shim",
"body_shim",
"InferenceResult < 'db >::for_body_",
"InferenceResult::for_body_",
"expr_scopes_shim",
"function_signature_with_source_map_shim",
"function_signature_shim",
@ -502,7 +502,7 @@ impl SomeStruct {
"crate_local_def_map",
"TraitImpls::for_crate_",
"AttrFlags::query_",
"impl_trait_with_diagnostics_shim",
"impl_trait_with_diagnostics_query",
"impl_signature_shim",
"impl_signature_with_source_map_shim",
"lang_items",
@ -512,7 +512,7 @@ impl SomeStruct {
"AttrFlags::query_",
"AttrFlags::query_",
"AttrFlags::query_",
"impl_self_ty_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_query",
"struct_signature_shim",
"struct_signature_with_source_map_shim",
]
@ -574,7 +574,7 @@ fn main() {
"body_with_source_map_shim",
"AttrFlags::query_",
"ImplItems::of_",
"InferenceResult < 'db >::for_body_",
"InferenceResult::for_body_",
"trait_signature_shim",
"trait_signature_with_source_map_shim",
"AttrFlags::query_",
@ -583,36 +583,36 @@ fn main() {
"AttrFlags::query_",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
"trait_environment_query",
"lang_items",
"crate_lang_items",
"AttrFlags::query_",
"AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"InferenceResult < 'db >::for_body_",
"GenericPredicates::query_with_diagnostics_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"InferenceResult::for_body_",
"function_signature_shim",
"function_signature_with_source_map_shim",
"trait_environment_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"trait_environment_query",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"expr_scopes_shim",
"struct_signature_shim",
"struct_signature_with_source_map_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"value_ty_shim",
"GenericPredicates::query_with_diagnostics_",
"value_ty_query",
"VariantFields::firewall_",
"VariantFields::query_",
"InherentImpls::for_crate_",
"impl_signature_shim",
"impl_signature_with_source_map_shim",
"callable_item_signature_shim",
"callable_item_signature_query",
"TraitImpls::for_crate_and_deps_",
"TraitImpls::for_crate_",
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"impl_trait_with_diagnostics_query",
"impl_self_ty_with_diagnostics_query",
"GenericPredicates::query_with_diagnostics_",
]
"#]],
);
@ -671,7 +671,7 @@ fn main() {
"AttrFlags::query_",
"body_shim",
"ImplItems::of_",
"InferenceResult < 'db >::for_body_",
"InferenceResult::for_body_",
"AttrFlags::query_",
"trait_signature_with_source_map_shim",
"AttrFlags::query_",
@ -683,25 +683,25 @@ fn main() {
"AttrFlags::query_",
"AttrFlags::query_",
"AttrFlags::query_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"InferenceResult < 'db >::for_body_",
"GenericPredicates::query_with_diagnostics_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"InferenceResult::for_body_",
"function_signature_with_source_map_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"ImplTraits < 'db >::return_type_impl_traits_",
"GenericPredicates::query_with_diagnostics_",
"ImplTraits::return_type_impl_traits_",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"GenericPredicates::query_with_diagnostics_",
"VariantFields::query_",
"InherentImpls::for_crate_",
"impl_signature_with_source_map_shim",
"impl_signature_shim",
"callable_item_signature_shim",
"callable_item_signature_query",
"TraitImpls::for_crate_",
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
"GenericPredicates < 'db >::query_with_diagnostics_",
"impl_trait_with_diagnostics_query",
"impl_self_ty_with_diagnostics_query",
"GenericPredicates::query_with_diagnostics_",
]
"#]],
);

View file

@ -21,8 +21,8 @@ use rustc_type_ir::{
use crate::{
db::HirDatabase,
next_solver::{
Canonical, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, SolverContext, Span, Ty,
TyKind,
Canonical, DbInterner, GenericArgs, Goal, ParamEnv, Predicate, SolverContext, Span,
StoredClauses, Ty, TyKind,
infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@ -35,6 +35,31 @@ pub struct ParamEnvAndCrate<'db> {
pub krate: Crate,
}
impl<'db> ParamEnvAndCrate<'db> {
#[inline]
pub fn store(self) -> StoredParamEnvAndCrate {
StoredParamEnvAndCrate { param_env: self.param_env.clauses.store(), krate: self.krate }
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StoredParamEnvAndCrate {
param_env: StoredClauses,
pub krate: Crate,
}
impl StoredParamEnvAndCrate {
#[inline]
pub fn param_env(&self) -> ParamEnv<'_> {
ParamEnv { clauses: self.param_env.as_ref() }
}
#[inline]
pub fn as_ref(&self) -> ParamEnvAndCrate<'_> {
ParamEnvAndCrate { param_env: self.param_env(), krate: self.krate }
}
}
/// This should be used in `hir` only.
pub fn structurally_normalize_ty<'db>(
infcx: &InferCtxt<'db>,

View file

@ -17,7 +17,7 @@ use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId, signatures::Struct
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
Variance,
inherent::{AdtDef, IntoKind, SliceLike},
inherent::{AdtDef, IntoKind},
};
use stdx::never;
@ -25,12 +25,22 @@ use crate::{
db::HirDatabase,
generics::{Generics, generics},
next_solver::{
Const, ConstKind, DbInterner, ExistentialPredicate, GenericArg, GenericArgs, Region,
RegionKind, Term, Ty, TyKind, VariancesOf,
Const, ConstKind, DbInterner, ExistentialPredicate, GenericArgKind, GenericArgs, Region,
RegionKind, StoredVariancesOf, TermKind, Ty, TyKind, VariancesOf,
},
};
pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> VariancesOf<'_> {
variances_of_query(db, def).as_ref()
}
#[salsa::tracked(
returns(ref),
// cycle_fn = crate::variance::variances_of_cycle_fn,
// cycle_initial = crate::variance::variances_of_cycle_initial,
cycle_result = crate::variance::variances_of_cycle_initial,
)]
fn variances_of_query(db: &dyn HirDatabase, def: GenericDefId) -> StoredVariancesOf {
tracing::debug!("variances_of(def={:?})", def);
let interner = DbInterner::new_no_crate(db);
match def {
@ -39,19 +49,19 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Variances
if let AdtId::StructId(id) = adt {
let flags = &db.struct_signature(id).flags;
if flags.contains(StructFlags::IS_UNSAFE_CELL) {
return VariancesOf::new_from_iter(interner, [Variance::Invariant]);
return VariancesOf::new_from_iter(interner, [Variance::Invariant]).store();
} else if flags.contains(StructFlags::IS_PHANTOM_DATA) {
return VariancesOf::new_from_iter(interner, [Variance::Covariant]);
return VariancesOf::new_from_iter(interner, [Variance::Covariant]).store();
}
}
}
_ => return VariancesOf::new_from_iter(interner, []),
_ => return VariancesOf::empty(interner).store(),
}
let generics = generics(db, def);
let count = generics.len();
if count == 0 {
return VariancesOf::new_from_iter(interner, []);
return VariancesOf::empty(interner).store();
}
let mut variances =
Context { generics, variances: vec![Variance::Bivariant; count], db }.solve();
@ -71,7 +81,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Variances
}
}
VariancesOf::new_from_iter(interner, variances)
VariancesOf::new_from_iter(interner, variances).store()
}
// pub(crate) fn variances_of_cycle_fn(
@ -107,13 +117,13 @@ pub(crate) fn variances_of_cycle_initial(
db: &dyn HirDatabase,
_: salsa::Id,
def: GenericDefId,
) -> VariancesOf<'_> {
) -> StoredVariancesOf {
let interner = DbInterner::new_no_crate(db);
let generics = generics(db, def);
let count = generics.len();
// FIXME(next-solver): Returns `Invariance` and not `Bivariance` here, see the comment in the main query.
VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Invariant, count))
VariancesOf::new_from_iter(interner, std::iter::repeat_n(Variance::Invariant, count)).store()
}
struct Context<'db> {
@ -131,7 +141,7 @@ impl<'db> Context<'db> {
let mut add_constraints_from_variant = |variant| {
for (_, field) in db.field_types(variant).iter() {
self.add_constraints_from_ty(
field.instantiate_identity(),
field.get().instantiate_identity(),
Variance::Covariant,
);
}
@ -233,11 +243,11 @@ impl<'db> Context<'db> {
}
ExistentialPredicate::Projection(projection) => {
self.add_constraints_from_invariant_args(projection.args);
match projection.term {
Term::Ty(ty) => {
match projection.term.kind() {
TermKind::Ty(ty) => {
self.add_constraints_from_ty(ty, Variance::Invariant)
}
Term::Const(konst) => self.add_constraints_from_const(konst),
TermKind::Const(konst) => self.add_constraints_from_const(konst),
}
}
ExistentialPredicate::AutoTrait(_) => {}
@ -267,12 +277,12 @@ impl<'db> Context<'db> {
fn add_constraints_from_invariant_args(&mut self, args: GenericArgs<'db>) {
for k in args.iter() {
match k {
GenericArg::Lifetime(lt) => {
match k.kind() {
GenericArgKind::Lifetime(lt) => {
self.add_constraints_from_region(lt, Variance::Invariant)
}
GenericArg::Ty(ty) => self.add_constraints_from_ty(ty, Variance::Invariant),
GenericArg::Const(val) => self.add_constraints_from_const(val),
GenericArgKind::Type(ty) => self.add_constraints_from_ty(ty, Variance::Invariant),
GenericArgKind::Const(val) => self.add_constraints_from_const(val),
}
}
}
@ -291,10 +301,12 @@ impl<'db> Context<'db> {
let variances = self.db.variances_of(def_id);
for (k, v) in args.iter().zip(variances) {
match k {
GenericArg::Lifetime(lt) => self.add_constraints_from_region(lt, variance.xform(v)),
GenericArg::Ty(ty) => self.add_constraints_from_ty(ty, variance.xform(v)),
GenericArg::Const(val) => self.add_constraints_from_const(val),
match k.kind() {
GenericArgKind::Lifetime(lt) => {
self.add_constraints_from_region(lt, variance.xform(v))
}
GenericArgKind::Type(ty) => self.add_constraints_from_ty(ty, variance.xform(v)),
GenericArgKind::Const(val) => self.add_constraints_from_const(val),
}
}
}
@ -388,7 +400,7 @@ mod tests {
AdtId, GenericDefId, ModuleDefId, hir::generics::GenericParamDataRef, src::HasSource,
};
use itertools::Itertools;
use rustc_type_ir::{Variance, inherent::SliceLike};
use rustc_type_ir::Variance;
use stdx::format_to;
use syntax::{AstNode, ast::HasName};
use test_fixture::WithFixture;

View file

@ -617,7 +617,7 @@ impl<'db> AnyDiagnostic<'db> {
pub(crate) fn inference_diagnostic(
db: &'db dyn HirDatabase,
def: DefWithBodyId,
d: &InferenceDiagnostic<'db>,
d: &InferenceDiagnostic,
source_map: &hir_def::expr_store::BodySourceMap,
sig_map: &hir_def::expr_store::ExpressionStoreSourceMap,
) -> Option<AnyDiagnostic<'db>> {
@ -663,7 +663,8 @@ impl<'db> AnyDiagnostic<'db> {
}
InferenceDiagnostic::ExpectedFunction { call_expr, found } => {
let call_expr = expr_syntax(*call_expr)?;
ExpectedFunction { call: call_expr, found: Type::new(db, def, *found) }.into()
ExpectedFunction { call: call_expr, found: Type::new(db, def, found.as_ref()) }
.into()
}
InferenceDiagnostic::UnresolvedField {
expr,
@ -675,7 +676,7 @@ impl<'db> AnyDiagnostic<'db> {
UnresolvedField {
expr,
name: name.clone(),
receiver: Type::new(db, def, *receiver),
receiver: Type::new(db, def, receiver.as_ref()),
method_with_same_name_exists: *method_with_same_name_exists,
}
.into()
@ -691,8 +692,10 @@ impl<'db> AnyDiagnostic<'db> {
UnresolvedMethodCall {
expr,
name: name.clone(),
receiver: Type::new(db, def, *receiver),
field_with_same_name: (*field_with_same_name).map(|ty| Type::new(db, def, ty)),
receiver: Type::new(db, def, receiver.as_ref()),
field_with_same_name: field_with_same_name
.as_ref()
.map(|ty| Type::new(db, def, ty.as_ref())),
assoc_func_with_same_name: assoc_func_with_same_name.map(Into::into),
}
.into()
@ -719,7 +722,7 @@ impl<'db> AnyDiagnostic<'db> {
}
InferenceDiagnostic::TypedHole { expr, expected } => {
let expr = expr_syntax(*expr)?;
TypedHole { expr, expected: Type::new(db, def, *expected) }.into()
TypedHole { expr, expected: Type::new(db, def, expected.as_ref()) }.into()
}
&InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
let expr_or_pat = match pat {
@ -736,12 +739,12 @@ impl<'db> AnyDiagnostic<'db> {
}
InferenceDiagnostic::CastToUnsized { expr, cast_ty } => {
let expr = expr_syntax(*expr)?;
CastToUnsized { expr, cast_ty: Type::new(db, def, *cast_ty) }.into()
CastToUnsized { expr, cast_ty: Type::new(db, def, cast_ty.as_ref()) }.into()
}
InferenceDiagnostic::InvalidCast { expr, error, expr_ty, cast_ty } => {
let expr = expr_syntax(*expr)?;
let expr_ty = Type::new(db, def, *expr_ty);
let cast_ty = Type::new(db, def, *cast_ty);
let expr_ty = Type::new(db, def, expr_ty.as_ref());
let cast_ty = Type::new(db, def, cast_ty.as_ref());
InvalidCast { expr, error: *error, expr_ty, cast_ty }.into()
}
InferenceDiagnostic::TyDiagnostic { source, diag } => {

View file

@ -102,7 +102,7 @@ use rustc_type_ir::{
};
use smallvec::SmallVec;
use span::{AstIdNode, Edition, FileId};
use stdx::{format_to, impl_from, never};
use stdx::{format_to, impl_from, never, variance::PhantomCovariantLifetime};
use syntax::{
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr,
ast::{self, HasName, HasVisibility as _},
@ -175,7 +175,7 @@ pub use {
layout::LayoutError,
mir::{MirEvalError, MirLowerError},
next_solver::abi::Safety,
next_solver::clear_tls_solver_cache,
next_solver::{clear_tls_solver_cache, collect_ty_garbage},
},
// FIXME: These are needed for import assets, properly encapsulate them.
hir_ty::{method_resolution::TraitImpls, next_solver::SimplifiedType},
@ -697,7 +697,7 @@ impl Module {
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(s.id.into()).1,
db.field_types_with_diagnostics(s.id.into()).1.clone(),
source_map,
);
}
@ -709,7 +709,7 @@ impl Module {
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(u.id.into()).1,
db.field_types_with_diagnostics(u.id.into()).1.clone(),
source_map,
);
}
@ -739,7 +739,7 @@ impl Module {
push_ty_diagnostics(
db,
acc,
db.field_types_with_diagnostics(v.into()).1,
db.field_types_with_diagnostics(v.into()).1.clone(),
source_map,
);
expr_store_diagnostics(db, acc, source_map);
@ -1219,7 +1219,7 @@ impl<'db> InstantiatedField<'db> {
let interner = DbInterner::new_no_crate(db);
let var_id = self.inner.parent.into();
let field = db.field_types(var_id)[self.inner.id];
let field = db.field_types(var_id)[self.inner.id].get();
let ty = field.instantiate(interner, self.args);
TypeNs::new(db, var_id, ty)
}
@ -1297,7 +1297,7 @@ impl Field {
/// context of the field definition.
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> TypeNs<'db> {
let var_id = self.parent.into();
let ty = db.field_types(var_id)[self.id].skip_binder();
let ty = db.field_types(var_id)[self.id].get().skip_binder();
TypeNs::new(db, var_id, ty)
}
@ -1315,13 +1315,13 @@ impl Field {
};
let interner = DbInterner::new_no_crate(db);
let args = generic_args_from_tys(interner, def_id.into(), generics.map(|ty| ty.ty));
let ty = db.field_types(var_id)[self.id].instantiate(interner, args);
let ty = db.field_types(var_id)[self.id].get().instantiate(interner, args);
Type::new(db, var_id, ty)
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty(
self.ty(db).ty,
self.ty(db).ty.store(),
param_env_from_has_crate(
db,
match hir_def::VariantId::from(self.parent) {
@ -1331,7 +1331,8 @@ impl Field {
hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()),
},
),
)
.store(),
)
.map(|layout| Layout(layout, db.target_data_layout(self.krate(db).into()).unwrap()))
}
@ -1662,7 +1663,7 @@ impl Variant {
self.source(db)?.value.expr()
}
pub fn eval(self, db: &dyn HirDatabase) -> Result<i128, ConstEvalError<'_>> {
pub fn eval(self, db: &dyn HirDatabase) -> Result<i128, ConstEvalError> {
db.const_eval_discriminant(self.into())
}
@ -1753,7 +1754,7 @@ impl Adt {
let args = GenericArgs::for_item_with_defaults(interner, adt_id.into(), |_, id, _| {
GenericArg::error_from_id(interner, id)
});
db.layout_of_adt(adt_id, args, param_env_from_has_crate(db, adt_id))
db.layout_of_adt(adt_id, args.store(), param_env_from_has_crate(db, adt_id).store())
.map(|layout| Layout(layout, db.target_data_layout(self.krate(db).id).unwrap()))
}
@ -1988,8 +1989,8 @@ impl DefWithBody {
acc.push(
TypeMismatch {
expr_or_pat,
expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected),
actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual),
expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.as_ref()),
actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.as_ref()),
}
.into(),
);
@ -2059,7 +2060,10 @@ impl DefWithBody {
}
mir::MirSpan::Unknown => continue,
};
acc.push(MovedOutOfRef { ty: Type::new_for_crate(krate, moof.ty), span }.into())
acc.push(
MovedOutOfRef { ty: Type::new_for_crate(krate, moof.ty.as_ref()), span }
.into(),
)
}
let mol = &borrowck_result.mutability_of_locals;
for (binding_id, binding_data) in body.bindings() {
@ -2468,15 +2472,16 @@ impl Function {
self,
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
) -> Result<String, ConstEvalError<'_>> {
) -> Result<String, ConstEvalError> {
let interner = DbInterner::new_no_crate(db);
let body = db.monomorphized_mir_body(
self.id.into(),
GenericArgs::new_from_iter(interner, []),
GenericArgs::empty(interner).store(),
ParamEnvAndCrate {
param_env: db.trait_environment(self.id.into()),
krate: self.id.module(db).krate(db),
},
}
.store(),
)?;
let (result, output) = interpret_mir(db, body, false, None)?;
let mut text = match result {
@ -2728,11 +2733,14 @@ impl Const {
}
/// Evaluate the constant.
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst<'_>, ConstEvalError<'_>> {
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst<'_>, ConstEvalError> {
let interner = DbInterner::new_no_crate(db);
let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity();
db.const_eval(self.id, GenericArgs::new_from_iter(interner, []), None)
.map(|it| EvaluatedConst { const_: it, def: self.id.into(), ty })
db.const_eval(self.id, GenericArgs::empty(interner), None).map(|it| EvaluatedConst {
const_: it,
def: self.id.into(),
ty,
})
}
}
@ -2753,7 +2761,7 @@ impl<'db> EvaluatedConst<'db> {
format!("{}", self.const_.display(db, display_target))
}
pub fn render_debug(&self, db: &'db dyn HirDatabase) -> Result<String, MirEvalError<'db>> {
pub fn render_debug(&self, db: &'db dyn HirDatabase) -> Result<String, MirEvalError> {
let kind = self.const_.kind();
if let ConstKind::Value(c) = kind
&& let ty = c.ty.kind()
@ -2809,7 +2817,7 @@ impl Static {
}
/// Evaluate the static initializer.
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst<'_>, ConstEvalError<'_>> {
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst<'_>, ConstEvalError> {
let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity();
db.const_eval_static(self.id).map(|it| EvaluatedConst {
const_: it,
@ -3847,7 +3855,7 @@ impl Local {
pub fn ty(self, db: &dyn HirDatabase) -> Type<'_> {
let def = self.parent;
let infer = InferenceResult::for_body(db, def);
let ty = infer[self.binding_id];
let ty = infer.binding_ty(self.binding_id);
Type::new(db, def, ty)
}
@ -4152,8 +4160,8 @@ impl TypeParam {
pub fn default(self, db: &dyn HirDatabase) -> Option<Type<'_>> {
let ty = generic_arg_from_param(db, self.id.into())?;
let resolver = self.id.parent().resolver(db);
match ty {
GenericArg::Ty(it) if !it.is_ty_error() => {
match ty.kind() {
rustc_type_ir::GenericArgKind::Type(it) if !it.is_ty_error() => {
Some(Type::new_with_resolver_inner(db, &resolver, it))
}
_ => None,
@ -4545,7 +4553,12 @@ impl<'db> Closure<'db> {
info.0
.iter()
.cloned()
.map(|capture| ClosureCapture { owner, closure: id, capture })
.map(|capture| ClosureCapture {
owner,
closure: id,
capture,
_marker: PhantomCovariantLifetime::new(),
})
.collect()
}
@ -4650,7 +4663,8 @@ impl FnTrait {
pub struct ClosureCapture<'db> {
owner: DefWithBodyId,
closure: InternedClosureId,
capture: hir_ty::CapturedItem<'db>,
capture: hir_ty::CapturedItem,
_marker: PhantomCovariantLifetime<'db>,
}
impl<'db> ClosureCapture<'db> {
@ -4917,7 +4931,7 @@ impl<'db> Type<'db> {
.fields()
.iter()
.map(|(idx, _)| {
field_types[idx].instantiate(self.interner, args)
field_types[idx].get().instantiate(self.interner, args)
})
.filter(|it| !it.references_non_lt_error())
.collect()
@ -5241,7 +5255,7 @@ impl<'db> Type<'db> {
.iter()
.map(|(local_id, ty)| {
let def = Field { parent: variant_id.into(), id: local_id };
let ty = ty.instantiate(interner, substs);
let ty = ty.get().instantiate(interner, substs);
(def, self.derived(ty))
})
.collect()
@ -5399,12 +5413,14 @@ impl<'db> Type<'db> {
.as_adt()
.into_iter()
.flat_map(|(_, substs)| substs.iter())
.filter_map(move |arg| match arg {
GenericArg::Ty(ty) => Some(format_smolstr!("{}", ty.display(db, display_target))),
GenericArg::Const(const_) => {
.filter_map(move |arg| match arg.kind() {
rustc_type_ir::GenericArgKind::Type(ty) => {
Some(format_smolstr!("{}", ty.display(db, display_target)))
}
rustc_type_ir::GenericArgKind::Const(const_) => {
Some(format_smolstr!("{}", const_.display(db, display_target)))
}
GenericArg::Lifetime(_) => None,
rustc_type_ir::GenericArgKind::Lifetime(_) => None,
})
}
@ -5808,7 +5824,7 @@ impl<'db> Type<'db> {
}
pub fn layout(&self, db: &'db dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty(self.ty, self.env)
db.layout_of_ty(self.ty.store(), self.env.store())
.map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap()))
}
@ -5840,7 +5856,7 @@ impl<'db> TypeNs<'db> {
pub fn impls_trait(&self, infcx: InferCtxt<'db>, trait_: Trait, args: &[TypeNs<'db>]) -> bool {
let args = GenericArgs::new_from_iter(
infcx.interner,
[self.ty].into_iter().chain(args.iter().map(|t| t.ty)).map(|t| t.into()),
[self.ty].into_iter().chain(args.iter().map(|t| t.ty)).map(GenericArg::from),
);
let trait_ref = hir_ty::next_solver::TraitRef::new(infcx.interner, trait_.id.into(), args);

View file

@ -1653,8 +1653,11 @@ impl<'db> SemanticsImpl<'db> {
analyzer.expr_adjustments(expr).map(|it| {
it.iter()
.map(|adjust| {
let target =
Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target);
let target = Type::new_with_resolver(
self.db,
&analyzer.resolver,
adjust.target.as_ref(),
);
let kind = match adjust.kind {
hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {

View file

@ -79,7 +79,7 @@ pub(crate) enum BodyOrSig<'db> {
def: DefWithBodyId,
body: Arc<Body>,
source_map: Arc<BodySourceMap>,
infer: Option<&'db InferenceResult<'db>>,
infer: Option<&'db InferenceResult>,
},
// To be folded into body once it is considered one
VariantFields {
@ -119,7 +119,7 @@ impl<'db> SourceAnalyzer<'db> {
def: DefWithBodyId,
node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
offset: Option<TextSize>,
infer: Option<&'db InferenceResult<'db>>,
infer: Option<&'db InferenceResult>,
) -> SourceAnalyzer<'db> {
let (body, source_map) = db.body_with_source_map(def);
let scopes = db.expr_scopes(def);
@ -185,9 +185,7 @@ impl<'db> SourceAnalyzer<'db> {
}
// FIXME: Remove this
fn body_(
&self,
) -> Option<(DefWithBodyId, &Body, &BodySourceMap, Option<&InferenceResult<'db>>)> {
fn body_(&self) -> Option<(DefWithBodyId, &Body, &BodySourceMap, Option<&InferenceResult>)> {
self.body_or_sig.as_ref().and_then(|it| match it {
BodyOrSig::Body { def, body, source_map, infer } => {
Some((*def, &**body, &**source_map, infer.as_deref()))
@ -196,7 +194,7 @@ impl<'db> SourceAnalyzer<'db> {
})
}
fn infer(&self) -> Option<&InferenceResult<'db>> {
fn infer(&self) -> Option<&InferenceResult> {
self.body_or_sig.as_ref().and_then(|it| match it {
BodyOrSig::Sig { .. } => None,
BodyOrSig::VariantFields { .. } => None,
@ -260,7 +258,7 @@ impl<'db> SourceAnalyzer<'db> {
if let Pat::Bind { id, .. } = self.store()?[pat_id.as_pat()?] { Some(id) } else { None }
}
pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment<'db>]> {
pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> {
// It is safe to omit destructuring assignments here because they have no adjustments (neither
// expressions nor patterns).
let expr_id = self.expr_id(expr.clone())?.as_expr()?;
@ -326,8 +324,8 @@ impl<'db> SourceAnalyzer<'db> {
let coerced = expr_id
.as_expr()
.and_then(|expr_id| infer.expr_adjustment(expr_id))
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target));
let ty = infer[expr_id];
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.as_ref()));
let ty = infer.expr_or_pat_ty(expr_id);
let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty);
Some((mk_ty(ty), coerced.map(mk_ty)))
}
@ -342,14 +340,15 @@ impl<'db> SourceAnalyzer<'db> {
let coerced = match expr_or_pat_id {
ExprOrPatId::ExprId(idx) => infer
.expr_adjustment(idx)
.and_then(|adjusts| adjusts.last().cloned())
.map(|adjust| adjust.target),
ExprOrPatId::PatId(idx) => {
infer.pat_adjustment(idx).and_then(|adjusts| adjusts.last().cloned())
}
.and_then(|adjusts| adjusts.last())
.map(|adjust| adjust.target.as_ref()),
ExprOrPatId::PatId(idx) => infer
.pat_adjustment(idx)
.and_then(|adjusts| adjusts.last())
.map(|adjust| adjust.as_ref()),
};
let ty = infer[expr_or_pat_id];
let ty = infer.expr_or_pat_ty(expr_or_pat_id);
let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty);
Some((mk_ty(ty), coerced.map(mk_ty)))
}
@ -361,7 +360,7 @@ impl<'db> SourceAnalyzer<'db> {
) -> Option<Type<'db>> {
let binding_id = self.binding_id_of_pat(pat)?;
let infer = self.infer()?;
let ty = infer[binding_id];
let ty = infer.binding_ty(binding_id);
let mk_ty = |ty: Ty<'db>| Type::new_with_resolver(db, &self.resolver, ty);
Some(mk_ty(ty))
}
@ -372,7 +371,7 @@ impl<'db> SourceAnalyzer<'db> {
_param: &ast::SelfParam,
) -> Option<Type<'db>> {
let binding = self.body()?.self_param?;
let ty = self.infer()?[binding];
let ty = self.infer()?.binding_ty(binding);
Some(Type::new_with_resolver(db, &self.resolver, ty))
}
@ -404,7 +403,7 @@ impl<'db> SourceAnalyzer<'db> {
infer
.pat_adjustment(pat_id.as_pat()?)?
.iter()
.map(|ty| Type::new_with_resolver(db, &self.resolver, *ty))
.map(|ty| Type::new_with_resolver(db, &self.resolver, ty.as_ref()))
.collect(),
)
}
@ -482,7 +481,7 @@ impl<'db> SourceAnalyzer<'db> {
fn field_subst(
&self,
field_expr: ExprId,
infer: &InferenceResult<'db>,
infer: &InferenceResult,
db: &'db dyn HirDatabase,
) -> Option<GenericSubstitution<'db>> {
let body = self.store()?;
@ -598,8 +597,7 @@ impl<'db> SourceAnalyzer<'db> {
let poll_fn = self.lang_items(db).FuturePoll?;
// HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
// doesn't have any generic parameters, so we skip building another subst for `poll()`.
let interner = DbInterner::new_no_crate(db);
let substs = GenericArgs::new_from_iter(interner, [ty.into()]);
let substs = GenericArgs::new_from_slice(&[ty.into()]);
Some(self.resolve_impl_method_or_trait_def(db, poll_fn, substs))
}
@ -641,10 +639,9 @@ impl<'db> SourceAnalyzer<'db> {
let ty = self.ty_of_expr(prefix_expr.expr()?)?;
let interner = DbInterner::new_no_crate(db);
// HACK: subst for all methods coincides with that for their trait because the methods
// don't have any generic parameters, so we skip building another subst for the methods.
let substs = GenericArgs::new_from_iter(interner, [ty.into()]);
let substs = GenericArgs::new_from_slice(&[ty.into()]);
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
@ -674,8 +671,7 @@ impl<'db> SourceAnalyzer<'db> {
.unwrap_or(index_fn);
// HACK: subst for all methods coincides with that for their trait because the methods
// don't have any generic parameters, so we skip building another subst for the methods.
let interner = DbInterner::new_no_crate(db);
let substs = GenericArgs::new_from_iter(interner, [base_ty.into(), index_ty.into()]);
let substs = GenericArgs::new_from_slice(&[base_ty.into(), index_ty.into()]);
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
@ -694,8 +690,7 @@ impl<'db> SourceAnalyzer<'db> {
})?;
// HACK: subst for `index()` coincides with that for `Index` because `index()` itself
// doesn't have any generic parameters, so we skip building another subst for `index()`.
let interner = DbInterner::new_no_crate(db);
let substs = GenericArgs::new_from_iter(interner, [lhs.into(), rhs.into()]);
let substs = GenericArgs::new_from_slice(&[lhs.into(), rhs.into()]);
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
@ -710,8 +705,7 @@ impl<'db> SourceAnalyzer<'db> {
let op_fn = self.lang_items(db).TryTraitBranch?;
// HACK: subst for `branch()` coincides with that for `Try` because `branch()` itself
// doesn't have any generic parameters, so we skip building another subst for `branch()`.
let interner = DbInterner::new_no_crate(db);
let substs = GenericArgs::new_from_iter(interner, [ty.into()]);
let substs = GenericArgs::new_from_slice(&[ty.into()]);
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
@ -751,7 +745,8 @@ impl<'db> SourceAnalyzer<'db> {
let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?;
let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
let field_ty = (*db.field_types(variant).get(field.local_id)?).instantiate(interner, subst);
let field_ty =
(*db.field_types(variant).get(field.local_id)?).get().instantiate(interner, subst);
Some((
field.into(),
local,
@ -772,8 +767,9 @@ impl<'db> SourceAnalyzer<'db> {
let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?;
let field_ty = (*db.field_types(variant).get(field.local_id)?).instantiate(interner, subst);
let (adt, subst) = self.infer()?.pat_ty(pat_id.as_pat()?).as_adt()?;
let field_ty =
(*db.field_types(variant).get(field.local_id)?).get().instantiate(interner, subst);
Some((
field.into(),
Type::new_with_resolver(db, &self.resolver, field_ty),
@ -835,23 +831,24 @@ impl<'db> SourceAnalyzer<'db> {
if let Either::Right(container) = &mut container {
*container = structurally_normalize_ty(&infcx, *container, trait_env.param_env);
}
let handle_variants =
|variant: VariantId, subst: GenericArgs<'db>, container: &mut _| {
let fields = variant.fields(db);
let field = fields.field(&field_name.as_name())?;
let field_types = db.field_types(variant);
*container = Either::Right(field_types[field].instantiate(interner, subst));
let generic_def = match variant {
VariantId::EnumVariantId(it) => it.loc(db).parent.into(),
VariantId::StructId(it) => it.into(),
VariantId::UnionId(it) => it.into(),
};
Some((
Either::Right(Field { parent: variant.into(), id: field }),
generic_def,
subst,
))
let handle_variants = |variant: VariantId,
subst: GenericArgs<'db>,
container: &mut _| {
let fields = variant.fields(db);
let field = fields.field(&field_name.as_name())?;
let field_types = db.field_types(variant);
*container = Either::Right(field_types[field].get().instantiate(interner, subst));
let generic_def = match variant {
VariantId::EnumVariantId(it) => it.loc(db).parent.into(),
VariantId::StructId(it) => it.into(),
VariantId::UnionId(it) => it.into(),
};
Some((
Either::Right(Field { parent: variant.into(), id: field }),
generic_def,
subst,
))
};
let temp_ty = Ty::new_error(interner, ErrorGuaranteed);
let (field_def, generic_def, subst) =
match std::mem::replace(&mut container, Either::Right(temp_ty)) {
@ -1173,7 +1170,7 @@ impl<'db> SourceAnalyzer<'db> {
self.infer()?.type_of_expr_or_pat(expr_id)?
} else if let Some(pat) = ast::Pat::cast(parent) {
let pat_id = self.pat_id(&pat)?;
self.infer()?[pat_id]
self.infer()?.expr_or_pat_ty(pat_id)
} else {
return None;
};
@ -1245,7 +1242,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let expr_id = self.expr_id(literal.clone().into())?;
let substs = infer[expr_id].as_adt()?.1;
let substs = infer.expr_or_pat_ty(expr_id).as_adt()?.1;
let (variant, missing_fields, _exhaustive) = match expr_id {
ExprOrPatId::ExprId(expr_id) => {
@ -1268,7 +1265,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
let substs = infer[pat_id].as_adt()?.1;
let substs = infer.pat_ty(pat_id).as_adt()?.1;
let (variant, missing_fields, _exhaustive) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
@ -1290,7 +1287,7 @@ impl<'db> SourceAnalyzer<'db> {
.into_iter()
.map(|local_id| {
let field = FieldId { parent: variant, local_id };
let ty = field_types[local_id].instantiate(interner, substs);
let ty = field_types[local_id].get().instantiate(interner, substs);
(field.into(), Type::new_with_resolver_inner(db, &self.resolver, ty))
})
.collect()

View file

@ -46,7 +46,7 @@ pub(super) fn hints(
if !place.projection.is_empty() {
continue; // Ignore complex cases for now
}
if mir.locals[place.local].ty.as_adt().is_none() {
if mir.locals[place.local].ty.as_ref().as_adt().is_none() {
continue; // Arguably only ADTs have significant drop impls
}
let Some(&binding_idx) = local_to_binding.get(place.local) else {

View file

@ -60,7 +60,7 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Dura
pub(crate) fn render_const_eval_error(
db: &RootDatabase,
e: ConstEvalError<'_>,
e: ConstEvalError,
display_target: DisplayTarget,
) -> String {
let span_formatter = |file_id, text_range: TextRange| {

View file

@ -18,6 +18,8 @@ dashmap.workspace = true
hashbrown.workspace = true
rustc-hash.workspace = true
triomphe.workspace = true
smallvec.workspace = true
rayon.workspace = true
[lints]
workspace = true

330
crates/intern/src/gc.rs Normal file
View file

@ -0,0 +1,330 @@
//! Garbage collection of interned values.
//!
//! The GC is a simple mark-and-sweep GC: you first mark all storages, then the
//! GC visits them, and each live value they refer, recursively, then removes
//! those not marked. The sweep phase is done in parallel.
use std::{hash::Hash, marker::PhantomData, ops::ControlFlow};
use dashmap::DashMap;
use hashbrown::raw::RawTable;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use rustc_hash::{FxBuildHasher, FxHashSet};
use triomphe::{Arc, ThinArc};
use crate::{Internable, InternedRef, InternedSliceRef, SliceInternable};
trait Storage {
fn len(&self) -> usize;
fn mark(&self, gc: &mut GarbageCollector);
fn sweep(&self, gc: &GarbageCollector);
}
struct InternedStorage<T>(PhantomData<fn() -> T>);
impl<T: Internable + GcInternedVisit> Storage for InternedStorage<T> {
fn len(&self) -> usize {
T::storage().get().len()
}
fn mark(&self, gc: &mut GarbageCollector) {
let storage = T::storage().get();
for item in storage {
let item = item.key();
let addr = Arc::as_ptr(item).addr();
if Arc::strong_count(item) > 1 {
// The item is referenced from the outside.
gc.alive.insert(addr);
item.visit_with(gc);
}
}
}
fn sweep(&self, gc: &GarbageCollector) {
let storage = T::storage().get();
gc.sweep_storage(storage, |item| item.as_ptr().addr());
}
}
struct InternedSliceStorage<T>(PhantomData<fn() -> T>);
impl<T: SliceInternable + GcInternedSliceVisit> Storage for InternedSliceStorage<T> {
fn len(&self) -> usize {
T::storage().get().len()
}
fn mark(&self, gc: &mut GarbageCollector) {
let storage = T::storage().get();
for item in storage {
let item = item.key();
let addr = ThinArc::as_ptr(item).addr();
if ThinArc::strong_count(item) > 1 {
// The item is referenced from the outside.
gc.alive.insert(addr);
T::visit_header(&item.header.header, gc);
T::visit_slice(&item.slice, gc);
}
}
}
fn sweep(&self, gc: &GarbageCollector) {
let storage = T::storage().get();
gc.sweep_storage(storage, |item| item.as_ptr().addr());
}
}
pub trait GcInternedVisit {
fn visit_with(&self, gc: &mut GarbageCollector);
}
pub trait GcInternedSliceVisit: SliceInternable {
fn visit_header(header: &Self::Header, gc: &mut GarbageCollector);
fn visit_slice(header: &[Self::SliceType], gc: &mut GarbageCollector);
}
#[derive(Default)]
pub struct GarbageCollector {
alive: FxHashSet<usize>,
storages: Vec<Box<dyn Storage + Send + Sync>>,
}
impl GarbageCollector {
pub fn add_storage<T: Internable + GcInternedVisit>(&mut self) {
const { assert!(T::USE_GC) };
self.storages.push(Box::new(InternedStorage::<T>(PhantomData)));
}
pub fn add_slice_storage<T: SliceInternable + GcInternedSliceVisit>(&mut self) {
const { assert!(T::USE_GC) };
self.storages.push(Box::new(InternedSliceStorage::<T>(PhantomData)));
}
/// # Safety
///
/// - This cannot be called if there are some not-yet-recorded type values.
/// - All relevant storages must have been added; that is, within the full graph of values,
/// the added storages must form a DAG.
/// - [`GcInternedVisit`] and [`GcInternedSliceVisit`] must mark all values reachable from the node.
pub unsafe fn collect(mut self) {
let total_nodes = self.storages.iter().map(|storage| storage.len()).sum();
self.alive = FxHashSet::with_capacity_and_hasher(total_nodes, FxBuildHasher);
let storages = std::mem::take(&mut self.storages);
for storage in &storages {
storage.mark(&mut self);
}
// Miri doesn't support rayon.
if cfg!(miri) {
storages.iter().for_each(|storage| storage.sweep(&self));
} else {
storages.par_iter().for_each(|storage| storage.sweep(&self));
}
}
pub fn mark_interned_alive<T: Internable>(
&mut self,
interned: InternedRef<'_, T>,
) -> ControlFlow<()> {
if interned.strong_count() > 1 {
// It will be visited anyway, so short-circuit
return ControlFlow::Break(());
}
let addr = interned.as_raw().addr();
if !self.alive.insert(addr) { ControlFlow::Break(()) } else { ControlFlow::Continue(()) }
}
pub fn mark_interned_slice_alive<T: SliceInternable>(
&mut self,
interned: InternedSliceRef<'_, T>,
) -> ControlFlow<()> {
if interned.strong_count() > 1 {
// It will be visited anyway, so short-circuit
return ControlFlow::Break(());
}
let addr = interned.as_raw().addr();
if !self.alive.insert(addr) { ControlFlow::Break(()) } else { ControlFlow::Continue(()) }
}
fn sweep_storage<T: Hash + Eq + Send + Sync>(
&self,
storage: &DashMap<T, (), FxBuildHasher>,
get_addr: impl Fn(&T) -> usize + Send + Sync,
) {
// Miri doesn't support rayon.
if cfg!(miri) {
storage.shards().iter().for_each(|shard| {
self.retain_only_alive(&mut *shard.write(), |item| get_addr(&item.0))
});
} else {
storage.shards().par_iter().for_each(|shard| {
self.retain_only_alive(&mut *shard.write(), |item| get_addr(&item.0))
});
}
}
#[inline]
fn retain_only_alive<T>(&self, map: &mut RawTable<T>, mut get_addr: impl FnMut(&T) -> usize) {
// This code was copied from DashMap's retain() - which we can't use because we want to run in parallel.
unsafe {
// Here we only use `iter` as a temporary, preventing use-after-free
for bucket in map.iter() {
let item = bucket.as_mut();
let addr = get_addr(item);
if !self.alive.contains(&addr) {
map.erase(bucket);
}
}
}
}
}
#[cfg(test)]
mod tests {
use crate::{
GarbageCollector, GcInternedSliceVisit, GcInternedVisit, Interned, InternedSliceRef,
};
crate::impl_internable!(String);
#[test]
fn simple_interned() {
let a = Interned::new("abc".to_owned());
let b = Interned::new("abc".to_owned());
assert_eq!(a, b);
assert_eq!(a.as_ref(), b.as_ref());
assert_eq!(a.as_ref(), a.as_ref());
assert_eq!(a, a.clone());
assert_eq!(a, a.clone().clone());
assert_eq!(b.clone(), a.clone().clone());
assert_eq!(*a, "abc");
assert_eq!(*b, "abc");
assert_eq!(b.as_ref().to_owned(), a);
let c = Interned::new("def".to_owned());
assert_ne!(a, c);
assert_ne!(b, c);
assert_ne!(b.as_ref(), c.as_ref());
assert_eq!(*c.as_ref(), "def");
drop(c);
assert_eq!(*a, "abc");
assert_eq!(*b, "abc");
drop(a);
assert_eq!(*b, "abc");
drop(b);
}
#[test]
fn simple_gc() {
#[derive(Debug, PartialEq, Eq, Hash)]
struct GcString(String);
crate::impl_internable!(gc; GcString);
impl GcInternedVisit for GcString {
fn visit_with(&self, _gc: &mut GarbageCollector) {}
}
crate::impl_slice_internable!(gc; StringSlice, String, u32);
type InternedSlice = crate::InternedSlice<StringSlice>;
impl GcInternedSliceVisit for StringSlice {
fn visit_header(_header: &Self::Header, _gc: &mut GarbageCollector) {}
fn visit_slice(_header: &[Self::SliceType], _gc: &mut GarbageCollector) {}
}
let (a, d) = {
let a = Interned::new_gc(GcString("abc".to_owned())).to_owned();
let b = Interned::new_gc(GcString("abc".to_owned())).to_owned();
assert_eq!(a, b);
assert_eq!(a.as_ref(), b.as_ref());
assert_eq!(a.as_ref(), a.as_ref());
assert_eq!(a, a.clone());
assert_eq!(a, a.clone().clone());
assert_eq!(b.clone(), a.clone().clone());
assert_eq!(a.0, "abc");
assert_eq!(b.0, "abc");
assert_eq!(b.as_ref().to_owned(), a);
let c = Interned::new_gc(GcString("def".to_owned())).to_owned();
assert_ne!(a, c);
assert_ne!(b, c);
assert_ne!(b.as_ref(), c.as_ref());
assert_eq!(c.as_ref().0, "def");
let d = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456]);
let e = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456]);
assert_eq!(d, e);
assert_eq!(d.to_owned(), e.to_owned());
assert_eq!(d.header.length, 2);
assert_eq!(d.header.header, "abc");
assert_eq!(d.slice, [123, 456]);
(a, d.to_owned())
};
let mut gc = GarbageCollector::default();
gc.add_slice_storage::<StringSlice>();
gc.add_storage::<GcString>();
unsafe { gc.collect() };
assert_eq!(a.0, "abc");
assert_eq!(d.header.length, 2);
assert_eq!(d.header.header, "abc");
assert_eq!(d.slice, [123, 456]);
drop(a);
drop(d);
let mut gc = GarbageCollector::default();
gc.add_slice_storage::<StringSlice>();
gc.add_storage::<GcString>();
unsafe { gc.collect() };
}
#[test]
fn gc_visit() {
#[derive(PartialEq, Eq, Hash)]
struct GcInterned(InternedSliceRef<'static, StringSlice>);
crate::impl_internable!(gc; GcInterned);
impl GcInternedVisit for GcInterned {
fn visit_with(&self, gc: &mut GarbageCollector) {
_ = gc.mark_interned_slice_alive(self.0);
}
}
crate::impl_slice_internable!(gc; StringSlice, String, i32);
type InternedSlice = crate::InternedSlice<StringSlice>;
impl GcInternedSliceVisit for StringSlice {
fn visit_header(_header: &Self::Header, _gc: &mut GarbageCollector) {}
fn visit_slice(_header: &[Self::SliceType], _gc: &mut GarbageCollector) {}
}
let outer = {
let inner = InternedSlice::from_header_and_slice("abc".to_owned(), &[123, 456, 789]);
Interned::new_gc(GcInterned(inner)).to_owned()
};
let mut gc = GarbageCollector::default();
gc.add_slice_storage::<StringSlice>();
gc.add_storage::<GcInterned>();
unsafe { gc.collect() };
assert_eq!(outer.0.header.header, "abc");
assert_eq!(outer.0.slice, [123, 456, 789]);
drop(outer);
let mut gc = GarbageCollector::default();
gc.add_slice_storage::<StringSlice>();
gc.add_storage::<GcInterned>();
unsafe { gc.collect() };
}
}

372
crates/intern/src/intern.rs Normal file
View file

@ -0,0 +1,372 @@
//! Interning of single values.
//!
//! Interning supports two modes: GC and non-GC.
//!
//! In non-GC mode, you create [`Interned`]s, and can create `Copy` handles to them
//! that can still be upgraded back to [`Interned`] ([`InternedRef`]) via [`Interned::as_ref`].
//! Generally, letting the [`InternedRef`] to outlive the [`Interned`] is a soundness bug and can
//! lead to UB. When all [`Interned`]s of some value are dropped, the value is freed (newer interns
//! may re-create it, not necessarily in the same place).
//!
//! In GC mode, you generally operate on [`InternedRef`]s. They are `Copy` and comfortable. To intern
//! a value you call [`Interned::new_gc`], which returns an [`InternedRef`]. Having all [`Interned`]s
//! of some value be dropped will *not* immediately free the value. Instead, a mark-and-sweep GC can
//! be initiated, which will free all values which have no live [`Interned`]s.
//!
//! Generally, in GC mode, you operate on [`InternedRef`], but when you need to store some long-term
//! value (e.g. a Salsa query output), you convert it to an [`Interned`]. This ensures that an eventual
//! GC will not free it as long as it is alive.
//!
//! Making mistakes is hard due to GC [`InternedRef`] wrappers not implementing `salsa::Update`, meaning
//! Salsa will ensure you do not store them in queries or Salsa-interneds. However it's still *possible*
//! without unsafe code (for example, by storing them in a `static`), which is why triggering GC is unsafe.
//!
//! For more information about GC see [`crate::gc`].
use std::{
fmt::{self, Debug, Display},
hash::{BuildHasher, Hash, Hasher},
ops::Deref,
ptr,
sync::OnceLock,
};
use dashmap::{DashMap, SharedValue};
use hashbrown::raw::RawTable;
use rustc_hash::FxBuildHasher;
use triomphe::{Arc, ArcBorrow};
type InternMap<T> = DashMap<Arc<T>, (), FxBuildHasher>;
type Guard<T> = dashmap::RwLockWriteGuard<'static, RawTable<(Arc<T>, SharedValue<()>)>>;
pub struct Interned<T: Internable> {
arc: Arc<T>,
}
impl<T: Internable> Interned<T> {
#[inline]
pub fn new(obj: T) -> Self {
const { assert!(!T::USE_GC) };
let storage = T::storage().get();
let (mut shard, hash) = Self::select(storage, &obj);
// Atomically,
// - check if `obj` is already in the map
// - if so, clone its `Arc` and return it
// - if not, box it up, insert it, and return a clone
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
// insert the same object between us looking it up and inserting it.
let bucket = match shard.find_or_find_insert_slot(
hash,
|(other, _)| **other == obj,
|(x, _)| Self::hash(storage, x),
) {
Ok(bucket) => bucket,
// SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
Err(insert_slot) => unsafe {
shard.insert_in_slot(hash, insert_slot, (Arc::new(obj), SharedValue::new(())))
},
};
// SAFETY: We just retrieved/inserted this bucket.
unsafe { Self { arc: bucket.as_ref().0.clone() } }
}
#[inline]
pub fn new_gc<'a>(obj: T) -> InternedRef<'a, T> {
const { assert!(T::USE_GC) };
let storage = T::storage().get();
let (mut shard, hash) = Self::select(storage, &obj);
// Atomically,
// - check if `obj` is already in the map
// - if so, clone its `Arc` and return it
// - if not, box it up, insert it, and return a clone
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
// insert the same object between us looking it up and inserting it.
let bucket = match shard.find_or_find_insert_slot(
hash,
|(other, _)| **other == obj,
|(x, _)| Self::hash(storage, x),
) {
Ok(bucket) => bucket,
// SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
Err(insert_slot) => unsafe {
shard.insert_in_slot(hash, insert_slot, (Arc::new(obj), SharedValue::new(())))
},
};
// SAFETY: We just retrieved/inserted this bucket.
unsafe { InternedRef { arc: Arc::borrow_arc(&bucket.as_ref().0) } }
}
#[inline]
fn select(storage: &'static InternMap<T>, obj: &T) -> (Guard<T>, u64) {
let hash = Self::hash(storage, obj);
let shard_idx = storage.determine_shard(hash as usize);
let shard = &storage.shards()[shard_idx];
(shard.write(), hash)
}
#[inline]
fn hash(storage: &'static InternMap<T>, obj: &T) -> u64 {
storage.hasher().hash_one(obj)
}
/// # Safety
///
/// The pointer should originate from an `Interned` or an `InternedRef`.
#[inline]
pub unsafe fn from_raw(ptr: *const T) -> Self {
// SAFETY: Our precondition.
Self { arc: unsafe { Arc::from_raw(ptr) } }
}
#[inline]
pub fn as_ref(&self) -> InternedRef<'_, T> {
InternedRef { arc: self.arc.borrow_arc() }
}
}
impl<T: Internable> Drop for Interned<T> {
#[inline]
fn drop(&mut self) {
// When the last `Ref` is dropped, remove the object from the global map.
if !T::USE_GC && Arc::count(&self.arc) == 2 {
// Only `self` and the global map point to the object.
self.drop_slow();
}
}
}
impl<T: Internable> Interned<T> {
#[cold]
fn drop_slow(&mut self) {
let storage = T::storage().get();
let (mut shard, hash) = Self::select(storage, &self.arc);
if Arc::count(&self.arc) != 2 {
// Another thread has interned another copy
return;
}
shard.remove_entry(hash, |(other, _)| **other == **self);
// Shrink the backing storage if the shard is less than 50% occupied.
if shard.len() * 2 < shard.capacity() {
let len = shard.len();
shard.shrink_to(len, |(x, _)| Self::hash(storage, x));
}
}
}
/// Compares interned `Ref`s using pointer equality.
impl<T: Internable> PartialEq for Interned<T> {
// NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects.
#[inline]
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.arc, &other.arc)
}
}
impl<T: Internable> Eq for Interned<T> {}
impl<T: Internable> Hash for Interned<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_usize(self.arc.as_ptr().addr())
}
}
impl<T: Internable> AsRef<T> for Interned<T> {
#[inline]
fn as_ref(&self) -> &T {
self
}
}
impl<T: Internable> Deref for Interned<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.arc
}
}
impl<T: Internable> Clone for Interned<T> {
#[inline]
fn clone(&self) -> Self {
Self { arc: self.arc.clone() }
}
}
impl<T: Debug + Internable> Debug for Interned<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
<T as Debug>::fmt(&**self, f)
}
}
impl<T: Display + Internable> Display for Interned<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
<T as Display>::fmt(&**self, f)
}
}
#[repr(transparent)]
pub struct InternedRef<'a, T> {
arc: ArcBorrow<'a, T>,
}
impl<'a, T: Internable> InternedRef<'a, T> {
#[inline]
pub fn as_raw(self) -> *const T {
// Not `ptr::from_ref(&*self.arc)`, because we need to keep the provenance.
self.arc.with_arc(|arc| Arc::as_ptr(arc))
}
/// # Safety
///
/// The pointer needs to originate from `Interned` or `InternedRef`.
#[inline]
pub unsafe fn from_raw(ptr: *const T) -> Self {
// SAFETY: Our precondition.
Self { arc: unsafe { ArcBorrow::from_ptr(ptr) } }
}
#[inline]
pub fn to_owned(self) -> Interned<T> {
Interned { arc: self.arc.clone_arc() }
}
#[inline]
pub fn get(self) -> &'a T {
self.arc.get()
}
/// # Safety
///
/// You have to make sure the data is not referenced after the refcount reaches zero; beware the interning
/// map also keeps a reference to the value.
#[inline]
pub unsafe fn decrement_refcount(self) {
// SAFETY: Our precondition.
unsafe { drop(Arc::from_raw(self.as_raw())) }
}
#[inline]
pub(crate) fn strong_count(self) -> usize {
ArcBorrow::strong_count(&self.arc)
}
/// **Available only on GC mode**.
///
/// Changes the attached lifetime, as in GC mode, the lifetime is more kind of a lint to prevent misuse
/// than actual soundness check.
#[inline]
pub fn change_lifetime<'b>(self) -> InternedRef<'b, T> {
const { assert!(T::USE_GC) };
// SAFETY: The lifetime on `InternedRef` is essentially advisory only for GCed types.
unsafe { std::mem::transmute::<InternedRef<'a, T>, InternedRef<'b, T>>(self) }
}
}
impl<T> Clone for InternedRef<'_, T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl<T> Copy for InternedRef<'_, T> {}
impl<T: Hash> Hash for InternedRef<'_, T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
let ptr = ptr::from_ref::<T>(&*self.arc);
state.write_usize(ptr.addr());
}
}
impl<T: PartialEq> PartialEq for InternedRef<'_, T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
ArcBorrow::ptr_eq(&self.arc, &other.arc)
}
}
impl<T: Eq> Eq for InternedRef<'_, T> {}
impl<T> Deref for InternedRef<'_, T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.arc
}
}
impl<T: Debug> Debug for InternedRef<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(*self.arc).fmt(f)
}
}
impl<T: Display> Display for InternedRef<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(*self.arc).fmt(f)
}
}
pub struct InternStorage<T: ?Sized> {
map: OnceLock<InternMap<T>>,
}
#[allow(
clippy::new_without_default,
reason = "this a const fn, so it can't be default yet. See <https://github.com/rust-lang/rust/issues/63065>"
)]
impl<T: ?Sized> InternStorage<T> {
pub const fn new() -> Self {
Self { map: OnceLock::new() }
}
}
impl<T: Internable + ?Sized> InternStorage<T> {
pub(crate) fn get(&self) -> &InternMap<T> {
self.map.get_or_init(DashMap::default)
}
}
pub trait Internable: Hash + Eq + Send + Sync + 'static {
const USE_GC: bool;
fn storage() -> &'static InternStorage<Self>;
}
/// Implements `Internable` for a given list of types, making them usable with `Interned`.
#[macro_export]
#[doc(hidden)]
macro_rules! _impl_internable {
( gc; $($t:ty),+ $(,)? ) => { $(
impl $crate::Internable for $t {
const USE_GC: bool = true;
fn storage() -> &'static $crate::InternStorage<Self> {
static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new();
&STORAGE
}
}
)+ };
( $($t:ty),+ $(,)? ) => { $(
impl $crate::Internable for $t {
const USE_GC: bool = false;
fn storage() -> &'static $crate::InternStorage<Self> {
static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new();
&STORAGE
}
}
)+ };
}
pub use crate::_impl_internable as impl_internable;

View file

@ -0,0 +1,325 @@
//! Interning of slices, potentially with a header.
//!
//! See [`crate::intern`] for an explanation of interning modes. Note that slice interning is currently
//! available only in GC mode (there is no other need).
//!
//! [`InternedSlice`] and [`InternedSliceRef`] are essentially [`Interned<(Header, Box<[SliceType]>)>`][crate::Interned]
//! and [`InternedRef`][crate::InternedRef] with the same types, but more optimized. There is only one
//! allocation and the pointer is thin.
use std::{
ffi::c_void,
fmt::{self, Debug},
hash::{BuildHasher, Hash, Hasher},
marker::PhantomData,
mem::ManuallyDrop,
ops::Deref,
ptr::{self, NonNull},
sync::OnceLock,
};
use dashmap::{DashMap, SharedValue};
use hashbrown::raw::RawTable;
use rustc_hash::FxBuildHasher;
use triomphe::{HeaderSlice, HeaderWithLength, ThinArc};
type InternMap<T> = DashMap<
ThinArc<<T as SliceInternable>::Header, <T as SliceInternable>::SliceType>,
(),
FxBuildHasher,
>;
type Guard<T> = dashmap::RwLockWriteGuard<
'static,
RawTable<(
ThinArc<<T as SliceInternable>::Header, <T as SliceInternable>::SliceType>,
SharedValue<()>,
)>,
>;
type Pointee<T> = HeaderSlice<
HeaderWithLength<<T as SliceInternable>::Header>,
[<T as SliceInternable>::SliceType],
>;
pub struct InternedSlice<T: SliceInternable> {
arc: ThinArc<T::Header, T::SliceType>,
}
impl<T: SliceInternable> InternedSlice<T> {
#[inline]
pub fn from_header_and_slice<'a>(
header: T::Header,
slice: &[T::SliceType],
) -> InternedSliceRef<'a, T> {
const { assert!(T::USE_GC) };
let storage = T::storage().get();
let (mut shard, hash) = Self::select(storage, &header, slice);
// Atomically,
// - check if `obj` is already in the map
// - if so, clone its `Arc` and return it
// - if not, box it up, insert it, and return a clone
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
// insert the same object between us looking it up and inserting it.
let bucket = match shard.find_or_find_insert_slot(
hash,
|(other, _)| other.header.header == header && other.slice == *slice,
|(x, _)| storage.hasher().hash_one(x),
) {
Ok(bucket) => bucket,
// SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
Err(insert_slot) => unsafe {
shard.insert_in_slot(
hash,
insert_slot,
(ThinArc::from_header_and_slice(header, slice), SharedValue::new(())),
)
},
};
// SAFETY: We just retrieved/inserted this bucket.
// `NonNull::new_unchecked()` is safe because the pointer originates from a `ThinArc`.
unsafe {
InternedSliceRef {
// INVARIANT: We create it from a `ThinArc`.
ptr: NonNull::new_unchecked(ThinArc::as_ptr(&bucket.as_ref().0).cast_mut()),
_marker: PhantomData,
}
}
}
#[inline]
fn select(
storage: &'static InternMap<T>,
header: &T::Header,
slice: &[T::SliceType],
) -> (Guard<T>, u64) {
let hash = Self::hash(storage, header, slice);
let shard_idx = storage.determine_shard(hash as usize);
let shard = &storage.shards()[shard_idx];
(shard.write(), hash)
}
#[inline]
fn hash(storage: &'static InternMap<T>, header: &T::Header, slice: &[T::SliceType]) -> u64 {
storage.hasher().hash_one(HeaderSlice {
header: HeaderWithLength { header, length: slice.len() },
slice,
})
}
#[inline(always)]
fn ptr(&self) -> *const c_void {
self.arc.as_ptr()
}
#[inline]
pub fn as_ref(&self) -> InternedSliceRef<'_, T> {
InternedSliceRef {
// SAFETY: `self.ptr` comes from a valid `ThinArc`, so non null.
// INVARIANT: We create it from a `ThinArc`.
ptr: unsafe { NonNull::new_unchecked(self.ptr().cast_mut()) },
_marker: PhantomData,
}
}
}
/// Compares interned `Ref`s using pointer equality.
impl<T: SliceInternable> PartialEq for InternedSlice<T> {
// NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects.
#[inline]
fn eq(&self, other: &Self) -> bool {
self.arc.as_ptr() == other.arc.as_ptr()
}
}
impl<T: SliceInternable> Eq for InternedSlice<T> {}
impl<T: SliceInternable> Hash for InternedSlice<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_usize(self.ptr().addr())
}
}
impl<T: SliceInternable> Deref for InternedSlice<T> {
type Target = Pointee<T>;
#[inline]
fn deref(&self) -> &Self::Target {
&self.arc
}
}
impl<T: SliceInternable> Clone for InternedSlice<T> {
#[inline]
fn clone(&self) -> Self {
Self { arc: self.arc.clone() }
}
}
impl<T> Debug for InternedSlice<T>
where
T: SliceInternable,
T::SliceType: Debug,
T::Header: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(*self.arc).fmt(f)
}
}
#[repr(transparent)]
pub struct InternedSliceRef<'a, T> {
/// # Invariant
///
/// There is no `ThinArcBorrow` unfortunately, so this is basically a `ManuallyDrop<ThinArc>`,
/// except that can't be `Copy`, so we store a raw pointer instead.
ptr: NonNull<c_void>,
_marker: PhantomData<&'a T>,
}
// SAFETY: This is essentially a `ThinArc`, implemented as a raw pointer because there is no `ThinArcBorrowed`.
unsafe impl<T: Send + Sync> Send for InternedSliceRef<'_, T> {}
unsafe impl<T: Send + Sync> Sync for InternedSliceRef<'_, T> {}
impl<'a, T: SliceInternable> InternedSliceRef<'a, T> {
#[inline(always)]
fn arc(self) -> ManuallyDrop<ThinArc<T::Header, T::SliceType>> {
// SAFETY: `self.ptr`'s invariant.
unsafe { ManuallyDrop::new(ThinArc::from_raw(self.ptr.as_ptr())) }
}
#[inline]
pub fn to_owned(self) -> InternedSlice<T> {
InternedSlice { arc: (*self.arc()).clone() }
}
#[inline]
pub fn get(self) -> &'a Pointee<T> {
// SAFETY: This is a lifetime extension, valid because we live for `'a`.
unsafe { &*ptr::from_ref::<Pointee<T>>(&*self.arc()) }
}
/// # Safety
///
/// You have to make sure the data is not referenced after the refcount reaches zero; beware the interning
/// map also keeps a reference to the value.
#[inline]
pub unsafe fn decrement_refcount(self) {
drop(ManuallyDrop::into_inner(self.arc()));
}
#[inline]
pub(crate) fn strong_count(self) -> usize {
ThinArc::strong_count(&self.arc())
}
#[inline]
pub(crate) fn as_raw(self) -> *const c_void {
self.arc().as_ptr()
}
/// **Available only on GC mode**.
///
/// Changes the attached lifetime, as in GC mode, the lifetime is more kind of a lint to prevent misuse
/// than actual soundness check.
#[inline]
pub fn change_lifetime<'b>(self) -> InternedSliceRef<'b, T> {
const { assert!(T::USE_GC) };
// SAFETY: The lifetime on `InternedSliceRef` is essentially advisory only for GCed types.
unsafe { std::mem::transmute::<InternedSliceRef<'a, T>, InternedSliceRef<'b, T>>(self) }
}
}
impl<T> Clone for InternedSliceRef<'_, T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl<T> Copy for InternedSliceRef<'_, T> {}
impl<T: SliceInternable> Hash for InternedSliceRef<'_, T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_usize(self.ptr.as_ptr().addr());
}
}
impl<T: SliceInternable> PartialEq for InternedSliceRef<'_, T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.ptr == other.ptr
}
}
impl<T: SliceInternable> Eq for InternedSliceRef<'_, T> {}
impl<T: SliceInternable> Deref for InternedSliceRef<'_, T> {
type Target = Pointee<T>;
#[inline]
fn deref(&self) -> &Self::Target {
self.get()
}
}
impl<T> Debug for InternedSliceRef<'_, T>
where
T: SliceInternable,
T::SliceType: Debug,
T::Header: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(**self).fmt(f)
}
}
pub struct InternSliceStorage<T: SliceInternable> {
map: OnceLock<InternMap<T>>,
}
#[allow(
clippy::new_without_default,
reason = "this a const fn, so it can't be default yet. See <https://github.com/rust-lang/rust/issues/63065>"
)]
impl<T: SliceInternable> InternSliceStorage<T> {
pub const fn new() -> Self {
Self { map: OnceLock::new() }
}
}
impl<T: SliceInternable> InternSliceStorage<T> {
pub(crate) fn get(&self) -> &InternMap<T> {
self.map.get_or_init(DashMap::default)
}
}
pub trait SliceInternable: Sized + 'static {
const USE_GC: bool;
type Header: Eq + Hash + Send + Sync;
type SliceType: Eq + Hash + Send + Sync + Copy + 'static;
fn storage() -> &'static InternSliceStorage<Self>;
}
/// Implements `SliceInternable` for a given list of types, making them usable with `InternedSlice`.
#[macro_export]
#[doc(hidden)]
macro_rules! _impl_slice_internable {
( gc; $tag:ident, $h:ty, $t:ty $(,)? ) => {
#[allow(unreachable_pub)]
pub struct $tag;
impl $crate::SliceInternable for $tag {
const USE_GC: bool = true;
type Header = $h;
type SliceType = $t;
fn storage() -> &'static $crate::InternSliceStorage<Self> {
static STORAGE: $crate::InternSliceStorage<$tag> =
$crate::InternSliceStorage::new();
&STORAGE
}
}
};
}
pub use crate::_impl_slice_internable as impl_slice_internable;

View file

@ -2,219 +2,14 @@
//!
//! Eventually this should probably be replaced with salsa-based interning.
use std::{
borrow::Borrow,
fmt::{self, Debug, Display},
hash::{BuildHasher, BuildHasherDefault, Hash, Hasher},
ops::Deref,
sync::OnceLock,
};
use dashmap::{DashMap, SharedValue};
use hashbrown::raw::RawTable;
use rustc_hash::FxHasher;
use triomphe::Arc;
type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
type Guard<T> = dashmap::RwLockWriteGuard<'static, RawTable<(Arc<T>, SharedValue<()>)>>;
mod gc;
mod intern;
mod intern_slice;
mod symbol;
pub use self::gc::{GarbageCollector, GcInternedSliceVisit, GcInternedVisit};
pub use self::intern::{InternStorage, Internable, Interned, InternedRef, impl_internable};
pub use self::intern_slice::{
InternSliceStorage, InternedSlice, InternedSliceRef, SliceInternable, impl_slice_internable,
};
pub use self::symbol::{Symbol, symbols as sym};
pub struct Interned<T: Internable + ?Sized> {
arc: Arc<T>,
}
impl<T: Internable> Interned<T> {
#[inline]
pub fn new(obj: T) -> Self {
Self::new_generic(obj)
}
}
impl Interned<str> {
#[inline]
pub fn new_str(s: &str) -> Self {
Self::new_generic(s)
}
}
impl<T: Internable + ?Sized> Interned<T> {
#[inline]
pub fn new_generic<U>(obj: U) -> Self
where
U: Borrow<T>,
Arc<T>: From<U>,
{
let storage = T::storage().get();
let (mut shard, hash) = Self::select(storage, obj.borrow());
// Atomically,
// - check if `obj` is already in the map
// - if so, clone its `Arc` and return it
// - if not, box it up, insert it, and return a clone
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
// insert the same object between us looking it up and inserting it.
let bucket = match shard.find_or_find_insert_slot(
hash,
|(other, _)| **other == *obj.borrow(),
|(x, _)| Self::hash(storage, x),
) {
Ok(bucket) => bucket,
// SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
Err(insert_slot) => unsafe {
shard.insert_in_slot(hash, insert_slot, (Arc::from(obj), SharedValue::new(())))
},
};
// SAFETY: We just retrieved/inserted this bucket.
unsafe { Self { arc: bucket.as_ref().0.clone() } }
}
#[inline]
fn select(storage: &'static InternMap<T>, obj: &T) -> (Guard<T>, u64) {
let hash = Self::hash(storage, obj);
let shard_idx = storage.determine_shard(hash as usize);
let shard = &storage.shards()[shard_idx];
(shard.write(), hash)
}
#[inline]
fn hash(storage: &'static InternMap<T>, obj: &T) -> u64 {
storage.hasher().hash_one(obj)
}
}
impl<T: Internable + ?Sized> Drop for Interned<T> {
#[inline]
fn drop(&mut self) {
// When the last `Ref` is dropped, remove the object from the global map.
if Arc::count(&self.arc) == 2 {
// Only `self` and the global map point to the object.
self.drop_slow();
}
}
}
impl<T: Internable + ?Sized> Interned<T> {
#[cold]
fn drop_slow(&mut self) {
let storage = T::storage().get();
let (mut shard, hash) = Self::select(storage, &self.arc);
if Arc::count(&self.arc) != 2 {
// Another thread has interned another copy
return;
}
shard.remove_entry(hash, |(other, _)| **other == *self.arc);
// Shrink the backing storage if the shard is less than 50% occupied.
if shard.len() * 2 < shard.capacity() {
let len = shard.len();
shard.shrink_to(len, |(x, _)| Self::hash(storage, x));
}
}
}
/// Compares interned `Ref`s using pointer equality.
impl<T: Internable> PartialEq for Interned<T> {
// NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects.
#[inline]
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.arc, &other.arc)
}
}
impl<T: Internable> Eq for Interned<T> {}
impl PartialEq for Interned<str> {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.arc, &other.arc)
}
}
impl Eq for Interned<str> {}
impl<T: Internable + ?Sized> Hash for Interned<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
// NOTE: Cast disposes vtable pointer / slice/str length.
state.write_usize(Arc::as_ptr(&self.arc) as *const () as usize)
}
}
impl<T: Internable + ?Sized> AsRef<T> for Interned<T> {
#[inline]
fn as_ref(&self) -> &T {
&self.arc
}
}
impl<T: Internable + ?Sized> Deref for Interned<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.arc
}
}
impl<T: Internable + ?Sized> Clone for Interned<T> {
fn clone(&self) -> Self {
Self { arc: self.arc.clone() }
}
}
impl<T: Debug + Internable + ?Sized> Debug for Interned<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(*self.arc).fmt(f)
}
}
impl<T: Display + Internable + ?Sized> Display for Interned<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
(*self.arc).fmt(f)
}
}
pub struct InternStorage<T: ?Sized> {
map: OnceLock<InternMap<T>>,
}
#[allow(
clippy::new_without_default,
reason = "this a const fn, so it can't be default yet. See <https://github.com/rust-lang/rust/issues/63065>"
)]
impl<T: ?Sized> InternStorage<T> {
pub const fn new() -> Self {
Self { map: OnceLock::new() }
}
}
impl<T: Internable + ?Sized> InternStorage<T> {
fn get(&self) -> &InternMap<T> {
self.map.get_or_init(DashMap::default)
}
}
pub trait Internable: Hash + Eq + 'static {
fn storage() -> &'static InternStorage<Self>;
}
/// Implements `Internable` for a given list of types, making them usable with `Interned`.
#[macro_export]
#[doc(hidden)]
macro_rules! _impl_internable {
( $($t:path),+ $(,)? ) => { $(
impl $crate::Internable for $t {
fn storage() -> &'static $crate::InternStorage<Self> {
static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new();
&STORAGE
}
}
)+ };
}
pub use crate::_impl_internable as impl_internable;
impl_internable!(str,);

View file

@ -25,6 +25,9 @@ decl_derive!(
/// visited (and its type is not required to implement `TypeVisitable`).
type_visitable_derive
);
decl_derive!(
[GenericTypeVisitable] => generic_type_visitable_derive
);
fn type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
if let syn::Data::Union(_) = s.ast().data {
@ -163,6 +166,33 @@ fn has_ignore_attr(attrs: &[syn::Attribute], name: &'static str, meta: &'static
ignored
}
fn generic_type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
if let syn::Data::Union(_) = s.ast().data {
panic!("cannot derive on union")
}
s.add_bounds(synstructure::AddBounds::Fields);
s.bind_with(|_| synstructure::BindStyle::Move);
s.add_impl_generic(parse_quote!(__V: hir_ty::next_solver::interner::WorldExposer));
let body_visit = s.each(|bind| {
quote! {
::rustc_type_ir::GenericTypeVisitable::<__V>::generic_visit_with(#bind, __visitor);
}
});
s.bound_impl(
quote!(::rustc_type_ir::GenericTypeVisitable<__V>),
quote! {
fn generic_visit_with(
&self,
__visitor: &mut __V
) {
match self { #body_visit }
}
},
)
}
decl_derive!(
[UpmapFromRaFixture] => upmap_from_ra_fixture
);

View file

@ -48,8 +48,7 @@ impl ToTokens for TrackedQuery {
quote!(#(#options),*)
})
.into_iter()
.chain(self.lru.map(|lru| quote!(lru = #lru)))
.chain(Some(quote!(unsafe(non_update_types))));
.chain(self.lru.map(|lru| quote!(lru = #lru)));
let annotation = quote!(#[salsa_macros::tracked( #(#options),* )]);
let pat_and_tys = &self.pat_and_tys;

View file

@ -355,6 +355,7 @@ impl flags::AnalysisStats {
}
hir::clear_tls_solver_cache();
unsafe { hir::collect_ty_garbage() };
let db = host.raw_database_mut();
db.trigger_lru_eviction();
@ -390,11 +391,12 @@ impl flags::AnalysisStats {
all += 1;
let Err(e) = db.layout_of_adt(
hir_def::AdtId::from(a),
GenericArgs::new_from_iter(interner, []),
GenericArgs::empty(interner).store(),
hir_ty::ParamEnvAndCrate {
param_env: db.trait_environment(a.into()),
krate: a.krate(db).into(),
},
}
.store(),
) else {
continue;
};
@ -830,7 +832,7 @@ impl flags::AnalysisStats {
let (previous_exprs, previous_unknown, previous_partially_unknown) =
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
for (expr_id, _) in body.exprs() {
let ty = &inference_result[expr_id];
let ty = inference_result.expr_ty(expr_id);
num_exprs += 1;
let unknown_or_partial = if ty.is_ty_error() {
num_exprs_unknown += 1;
@ -897,15 +899,15 @@ impl flags::AnalysisStats {
start.col,
end.line + 1,
end.col,
mismatch.expected.display(db, display_target),
mismatch.actual.display(db, display_target)
mismatch.expected.as_ref().display(db, display_target),
mismatch.actual.as_ref().display(db, display_target)
));
} else {
bar.println(format!(
"{}: Expected {}, got {}",
name.display(db, Edition::LATEST),
mismatch.expected.display(db, display_target),
mismatch.actual.display(db, display_target)
mismatch.expected.as_ref().display(db, display_target),
mismatch.actual.as_ref().display(db, display_target)
));
}
}
@ -913,8 +915,8 @@ impl flags::AnalysisStats {
println!(
r#"{},mismatch,"{}","{}""#,
location_csv_expr(db, vfs, &sm(), expr_id),
mismatch.expected.display(db, display_target),
mismatch.actual.display(db, display_target)
mismatch.expected.as_ref().display(db, display_target),
mismatch.actual.as_ref().display(db, display_target)
);
}
}
@ -934,7 +936,7 @@ impl flags::AnalysisStats {
let (previous_pats, previous_unknown, previous_partially_unknown) =
(num_pats, num_pats_unknown, num_pats_partially_unknown);
for (pat_id, _) in body.pats() {
let ty = &inference_result[pat_id];
let ty = inference_result.pat_ty(pat_id);
num_pats += 1;
let unknown_or_partial = if ty.is_ty_error() {
num_pats_unknown += 1;
@ -999,15 +1001,15 @@ impl flags::AnalysisStats {
start.col,
end.line + 1,
end.col,
mismatch.expected.display(db, display_target),
mismatch.actual.display(db, display_target)
mismatch.expected.as_ref().display(db, display_target),
mismatch.actual.as_ref().display(db, display_target)
));
} else {
bar.println(format!(
"{}: Expected {}, got {}",
name.display(db, Edition::LATEST),
mismatch.expected.display(db, display_target),
mismatch.actual.display(db, display_target)
mismatch.expected.as_ref().display(db, display_target),
mismatch.actual.as_ref().display(db, display_target)
));
}
}
@ -1015,8 +1017,8 @@ impl flags::AnalysisStats {
println!(
r#"{},mismatch,"{}","{}""#,
location_csv_pat(db, vfs, &sm(), pat_id),
mismatch.expected.display(db, display_target),
mismatch.actual.display(db, display_target)
mismatch.expected.as_ref().display(db, display_target),
mismatch.actual.as_ref().display(db, display_target)
);
}
}

View file

@ -98,6 +98,13 @@ config_data! {
/// Code's `files.watcherExclude`.
files_exclude | files_excludeDirs: Vec<Utf8PathBuf> = vec![],
/// This config controls the frequency in which rust-analyzer will perform its internal Garbage
/// Collection. It is specified in revisions, roughly equivalent to number of changes. The default
/// is 1000.
///
/// Setting a smaller value may help limit peak memory usage at the expense of speed.
gc_frequency: usize = 1000,
/// If this is `true`, when "Goto Implementations" and in "Implementations" lens, are triggered on a `struct` or `enum` or `union`, we filter out trait implementations that originate from `derive`s above the type.
gotoImplementations_filterAdjacentDerives: bool = false,
@ -1701,9 +1708,11 @@ impl Config {
pub fn caps(&self) -> &ClientCapabilities {
&self.caps
}
}
impl Config {
pub fn gc_freq(&self) -> usize {
*self.gc_frequency()
}
pub fn assist(&self, source_root: Option<SourceRootId>) -> AssistConfig {
AssistConfig {
snippet_cap: self.snippet_cap(),

View file

@ -193,6 +193,8 @@ pub(crate) struct GlobalState {
/// which will usually end up causing a bunch of incorrect diagnostics on startup.
pub(crate) incomplete_crate_graph: bool,
pub(crate) revisions_until_next_gc: usize,
pub(crate) minicore: MiniCoreRustAnalyzerInternalOnly,
}
@ -319,6 +321,8 @@ impl GlobalState {
incomplete_crate_graph: false,
minicore: MiniCoreRustAnalyzerInternalOnly::default(),
revisions_until_next_gc: config.gc_freq(),
};
// Apply any required database inputs from the config.
this.update_configuration(config);
@ -435,6 +439,15 @@ impl GlobalState {
});
self.analysis_host.apply_change(change);
if self.revisions_until_next_gc == 0 {
// SAFETY: Just changed some database inputs, all queries were canceled.
unsafe { hir::collect_ty_garbage() };
self.revisions_until_next_gc = self.config.gc_freq();
} else {
self.revisions_until_next_gc -= 1;
}
if !modified_ratoml_files.is_empty()
|| !self.config.same_source_root_parent_map(&self.local_roots_parent_map)
{

View file

@ -635,6 +635,17 @@ Default: `"client"`
Controls file watching implementation.
## rust-analyzer.gc.frequency {#gc.frequency}
Default: `1000`
This config controls the frequency in which rust-analyzer will perform its internal Garbage
Collection. It is specified in revisions, roughly equivalent to number of changes. The default
is 1000.
Setting a smaller value may help limit peak memory usage at the expense of speed.
## rust-analyzer.gotoImplementations.filterAdjacentDerives {#gotoImplementations.filterAdjacentDerives}
Default: `false`

View file

@ -1627,6 +1627,17 @@
}
}
},
{
"title": "Gc",
"properties": {
"rust-analyzer.gc.frequency": {
"markdownDescription": "This config controls the frequency in which rust-analyzer will perform its internal Garbage\nCollection. It is specified in revisions, roughly equivalent to number of changes. The default\nis 1000.\n\nSetting a smaller value may help limit peak memory usage at the expense of speed.",
"default": 1000,
"type": "integer",
"minimum": 0
}
}
},
{
"title": "Goto Implementations",
"properties": {