Merge pull request #20184 from Veykril/push-ywpynxnltpok
Some checks failed
autopublish / publish (push) Waiting to run
release / dist (aarch64-unknown-linux-gnu) (push) Waiting to run
release / dist (arm-unknown-linux-gnueabihf) (push) Waiting to run
release / dist (x86_64-unknown-linux-gnu) (push) Waiting to run
release / dist (aarch64-pc-windows-msvc) (push) Waiting to run
release / dist (x86_64-pc-windows-msvc) (push) Waiting to run
release / dist (i686-pc-windows-msvc) (push) Waiting to run
release / dist (aarch64-apple-darwin) (push) Waiting to run
release / dist (x86_64-apple-darwin) (push) Waiting to run
release / dist (x86_64-unknown-linux-musl) (push) Waiting to run
release / publish (push) Blocked by required conditions
metrics / build_metrics (push) Has been cancelled
rustdoc / rustdoc (push) Has been cancelled
metrics / other_metrics (diesel-1.4.8) (push) Has been cancelled
metrics / other_metrics (hyper-0.14.18) (push) Has been cancelled
metrics / other_metrics (ripgrep-13.0.0) (push) Has been cancelled
metrics / other_metrics (self) (push) Has been cancelled
metrics / other_metrics (webrender-2022) (push) Has been cancelled
metrics / generate_final_metrics (push) Has been cancelled

chore: Remove dead field from `InferenceContext`
This commit is contained in:
Lukas Wirth 2025-07-06 09:08:36 +00:00 committed by GitHub
commit 0ac65592a8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 44 additions and 32 deletions

View file

@ -460,19 +460,17 @@ pub struct InferenceResult {
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
pub tuple_field_access_types: FxHashMap<TupleId, Substitution>,
tuple_field_access_types: FxHashMap<TupleId, Substitution>,
/// During inference this field is empty and [`InferenceContext::diagnostics`] is filled instead.
pub diagnostics: Vec<InferenceDiagnostic>,
pub type_of_expr: ArenaMap<ExprId, Ty>,
diagnostics: Vec<InferenceDiagnostic>,
pub(crate) type_of_expr: ArenaMap<ExprId, Ty>,
/// For each pattern record the type it resolves to.
///
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
pub type_of_pat: ArenaMap<PatId, Ty>,
pub type_of_binding: ArenaMap<BindingId, Ty>,
pub type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
/// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
pub(crate) type_of_pat: ArenaMap<PatId, Ty>,
pub(crate) type_of_binding: ArenaMap<BindingId, Ty>,
pub(crate) type_of_rpit: ArenaMap<ImplTraitIdx, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
@ -483,7 +481,7 @@ pub struct InferenceResult {
// FIXME: Move this into `InferenceContext`
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
pub(crate) pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
/// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
@ -497,12 +495,12 @@ pub struct InferenceResult {
/// }
/// ```
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
pub binding_modes: ArenaMap<PatId, BindingMode>,
pub expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
pub(crate) binding_modes: ArenaMap<PatId, BindingMode>,
pub(crate) expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
pub coercion_casts: FxHashSet<ExprId>,
pub(crate) coercion_casts: FxHashSet<ExprId>,
}
impl InferenceResult {
@ -566,6 +564,26 @@ impl InferenceResult {
pub fn is_erroneous(&self) -> bool {
self.has_errors && self.type_of_expr.iter().count() == 0
}
pub fn diagnostics(&self) -> &[InferenceDiagnostic] {
&self.diagnostics
}
pub fn tuple_field_access_type(&self, id: TupleId) -> &Substitution {
&self.tuple_field_access_types[&id]
}
pub fn pat_adjustment(&self, id: PatId) -> Option<&[Ty]> {
self.pat_adjustments.get(&id).map(|it| &**it)
}
pub fn expr_adjustment(&self, id: ExprId) -> Option<&[Adjustment]> {
self.expr_adjustments.get(&id).map(|it| &**it)
}
pub fn binding_mode(&self, id: PatId) -> Option<BindingMode> {
self.binding_modes.get(id).copied()
}
}
impl Index<ExprId> for InferenceResult {
@ -772,7 +790,6 @@ impl<'db> InferenceContext<'db> {
type_of_pat,
type_of_binding,
type_of_rpit,
type_of_for_iterator,
type_mismatches,
has_errors,
standard_types: _,
@ -832,11 +849,6 @@ impl<'db> InferenceContext<'db> {
*has_errors = *has_errors || ty.contains_unknown();
}
type_of_rpit.shrink_to_fit();
for ty in type_of_for_iterator.values_mut() {
*ty = table.resolve_completely(ty.clone());
*has_errors = *has_errors || ty.contains_unknown();
}
type_of_for_iterator.shrink_to_fit();
*has_errors |= !type_mismatches.is_empty();

View file

@ -1260,7 +1260,9 @@ impl TupleField {
}
pub fn ty<'db>(&self, db: &'db dyn HirDatabase) -> Type<'db> {
let ty = db.infer(self.owner).tuple_field_access_types[&self.tuple]
let ty = db
.infer(self.owner)
.tuple_field_access_type(self.tuple)
.as_slice(Interner)
.get(self.index as usize)
.and_then(|arg| arg.ty(Interner))
@ -1927,7 +1929,7 @@ impl DefWithBody {
expr_store_diagnostics(db, acc, &source_map);
let infer = db.infer(self.into());
for d in &infer.diagnostics {
for d in infer.diagnostics() {
acc.extend(AnyDiagnostic::inference_diagnostic(
db,
self.into(),

View file

@ -254,7 +254,7 @@ impl<'db> SourceAnalyzer<'db> {
// expressions nor patterns).
let expr_id = self.expr_id(expr.clone())?.as_expr()?;
let infer = self.infer()?;
infer.expr_adjustments.get(&expr_id).map(|v| &**v)
infer.expr_adjustment(expr_id)
}
pub(crate) fn type_of_type(
@ -286,7 +286,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let coerced = expr_id
.as_expr()
.and_then(|expr_id| infer.expr_adjustments.get(&expr_id))
.and_then(|expr_id| infer.expr_adjustment(expr_id))
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
let ty = infer[expr_id].clone();
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
@ -302,12 +302,11 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let coerced = match expr_or_pat_id {
ExprOrPatId::ExprId(idx) => infer
.expr_adjustments
.get(&idx)
.expr_adjustment(idx)
.and_then(|adjusts| adjusts.last().cloned())
.map(|adjust| adjust.target),
ExprOrPatId::PatId(idx) => {
infer.pat_adjustments.get(&idx).and_then(|adjusts| adjusts.last().cloned())
infer.pat_adjustment(idx).and_then(|adjusts| adjusts.last().cloned())
}
};
@ -345,7 +344,7 @@ impl<'db> SourceAnalyzer<'db> {
) -> Option<BindingMode> {
let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer()?;
infer.binding_modes.get(id.as_pat()?).map(|bm| match bm {
infer.binding_mode(id.as_pat()?).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@ -362,8 +361,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
Some(
infer
.pat_adjustments
.get(&pat_id.as_pat()?)?
.pat_adjustment(pat_id.as_pat()?)?
.iter()
.map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
.collect(),
@ -736,7 +734,7 @@ impl<'db> SourceAnalyzer<'db> {
let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
let variant_data = variant.fields(db);
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
let (adt, subst) = self.infer()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
let (adt, subst) = self.infer()?[pat_id.as_pat()?].as_adt()?;
let field_ty =
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
Some((
@ -1250,7 +1248,7 @@ impl<'db> SourceAnalyzer<'db> {
let infer = self.infer()?;
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
let substs = infer.type_of_pat[pat_id].as_adt()?.1;
let substs = infer[pat_id].as_adt()?.1;
let (variant, missing_fields, _exhaustive) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
@ -1786,8 +1784,8 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
}
fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
match infer.expr_adjustments.get(&id).and_then(|adjustments| adjustments.last()) {
match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) {
Some(adjustment) => Some(&adjustment.target),
None => infer.type_of_expr.get(id),
None => Some(&infer[id]),
}
}