Merge commit 'ac998a74b3' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2024-02-18 09:41:20 +02:00
parent d33d8675d0
commit 6b17dba68c
178 changed files with 7101 additions and 1965 deletions

View file

@ -169,9 +169,9 @@ impl ExprValidator {
return;
}
let pattern_arena = Arena::new();
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena);
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db);
let pattern_arena = Arena::new();
let mut m_arms = Vec::with_capacity(arms.len());
let mut has_lowering_errors = false;
for arm in arms {
@ -196,8 +196,9 @@ impl ExprValidator {
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
// to the matrix here.
let pat = self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors);
let m_arm = pat_analysis::MatchArm {
pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors),
pat: pattern_arena.alloc(pat),
has_guard: arm.guard.is_some(),
arm_data: (),
};
@ -223,7 +224,7 @@ impl ExprValidator {
ValidityConstraint::ValidOnly,
) {
Ok(report) => report,
Err(void) => match void {},
Err(()) => return,
};
// FIXME Report unreachable arms
@ -245,10 +246,10 @@ impl ExprValidator {
db: &dyn HirDatabase,
body: &Body,
have_errors: &mut bool,
) -> &'p DeconstructedPat<'p> {
) -> DeconstructedPat<'p> {
let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
let pattern = patcx.lower_pattern(pat);
let pattern = cx.pattern_arena.alloc(cx.lower_pat(&pattern));
let pattern = cx.lower_pat(&pattern);
if !patcx.errors.is_empty() {
*have_errors = true;
}

View file

@ -1,6 +1,7 @@
//! Interface with `rustc_pattern_analysis`.
use std::fmt;
use tracing::debug;
use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use rustc_hash::FxHashMap;
@ -11,7 +12,6 @@ use rustc_pattern_analysis::{
};
use smallvec::{smallvec, SmallVec};
use stdx::never;
use typed_arena::Arena;
use crate::{
db::HirDatabase,
@ -26,7 +26,7 @@ use Constructor::*;
// Re-export r-a-specific versions of all these types.
pub(crate) type DeconstructedPat<'p> =
rustc_pattern_analysis::pat::DeconstructedPat<'p, MatchCheckCtx<'p>>;
rustc_pattern_analysis::pat::DeconstructedPat<MatchCheckCtx<'p>>;
pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>;
pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat<MatchCheckCtx<'p>>;
@ -40,7 +40,6 @@ pub(crate) struct MatchCheckCtx<'p> {
module: ModuleId,
body: DefWithBodyId,
pub(crate) db: &'p dyn HirDatabase,
pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
exhaustive_patterns: bool,
min_exhaustive_patterns: bool,
}
@ -52,17 +51,12 @@ pub(crate) struct PatData<'p> {
}
impl<'p> MatchCheckCtx<'p> {
pub(crate) fn new(
module: ModuleId,
body: DefWithBodyId,
db: &'p dyn HirDatabase,
pattern_arena: &'p Arena<DeconstructedPat<'p>>,
) -> Self {
pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'p dyn HirDatabase) -> Self {
let def_map = db.crate_def_map(module.krate());
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
let min_exhaustive_patterns =
def_map.is_unstable_feature_enabled("min_exhaustive_patterns");
Self { module, body, db, pattern_arena, exhaustive_patterns, min_exhaustive_patterns }
Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns }
}
fn is_uninhabited(&self, ty: &Ty) -> bool {
@ -131,15 +125,15 @@ impl<'p> MatchCheckCtx<'p> {
}
pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> {
let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat));
let singleton = |pat| vec![pat];
let ctor;
let fields: &[_];
let fields: Vec<_>;
match pat.kind.as_ref() {
PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat),
PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
ctor = Wildcard;
fields = &[];
fields = Vec::new();
}
PatKind::Deref { subpattern } => {
ctor = match pat.ty.kind(Interner) {
@ -157,7 +151,7 @@ impl<'p> MatchCheckCtx<'p> {
match pat.ty.kind(Interner) {
TyKind::Tuple(_, substs) => {
ctor = Struct;
let mut wilds: SmallVec<[_; 2]> = substs
let mut wilds: Vec<_> = substs
.iter(Interner)
.map(|arg| arg.assert_ty_ref(Interner).clone())
.map(DeconstructedPat::wildcard)
@ -166,7 +160,7 @@ impl<'p> MatchCheckCtx<'p> {
let idx: u32 = pat.field.into_raw().into();
wilds[idx as usize] = self.lower_pat(&pat.pattern);
}
fields = self.pattern_arena.alloc_extend(wilds)
fields = wilds
}
TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => {
// The only legal patterns of type `Box` (outside `std`) are `_` and box
@ -216,33 +210,29 @@ impl<'p> MatchCheckCtx<'p> {
field_id_to_id[field_idx as usize] = Some(i);
ty
});
let mut wilds: SmallVec<[_; 2]> =
tys.map(DeconstructedPat::wildcard).collect();
let mut wilds: Vec<_> = tys.map(DeconstructedPat::wildcard).collect();
for pat in subpatterns {
let field_idx: u32 = pat.field.into_raw().into();
if let Some(i) = field_id_to_id[field_idx as usize] {
wilds[i] = self.lower_pat(&pat.pattern);
}
}
fields = self.pattern_arena.alloc_extend(wilds);
fields = wilds;
}
_ => {
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
ctor = Wildcard;
fields = &[];
fields = Vec::new();
}
}
}
&PatKind::LiteralBool { value } => {
ctor = Bool(value);
fields = &[];
fields = Vec::new();
}
PatKind::Or { pats } => {
ctor = Or;
// Collect here because `Arena::alloc_extend` panics on reentrancy.
let subpats: SmallVec<[_; 2]> =
pats.iter().map(|pat| self.lower_pat(pat)).collect();
fields = self.pattern_arena.alloc_extend(subpats);
fields = pats.iter().map(|pat| self.lower_pat(pat)).collect();
}
}
let data = PatData { db: self.db };
@ -307,7 +297,7 @@ impl<'p> MatchCheckCtx<'p> {
}
impl<'p> TypeCx for MatchCheckCtx<'p> {
type Error = Void;
type Error = ();
type Ty = Ty;
type VariantIdx = EnumVariantId;
type StrLit = Void;
@ -463,7 +453,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
fn write_variant_name(
f: &mut fmt::Formatter<'_>,
pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>,
pat: &rustc_pattern_analysis::pat::DeconstructedPat<Self>,
) -> fmt::Result {
let variant =
pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt));
@ -485,8 +475,8 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
Ok(())
}
fn bug(&self, fmt: fmt::Arguments<'_>) -> ! {
panic!("{}", fmt)
fn bug(&self, fmt: fmt::Arguments<'_>) {
debug!("{}", fmt)
}
}

View file

@ -26,7 +26,7 @@ use std::{convert::identity, ops::Index};
use chalk_ir::{
cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety,
Scalar, TyKind, TypeFlags,
Scalar, TyKind, TypeFlags, Variance,
};
use either::Either;
use hir_def::{
@ -58,8 +58,9 @@ use crate::{
static_lifetime, to_assoc_type_id,
traits::FnTrait,
utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
AliasEq, AliasTy, ClosureId, DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment,
Interner, ProjectionTy, RpitId, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId,
InEnvironment, Interner, Lifetime, ProjectionTy, RpitId, Substitution, TraitEnvironment,
TraitRef, Ty, TyBuilder, TyExt,
};
// This lint has a false positive here. See the link below for details.
@ -68,7 +69,7 @@ use crate::{
#[allow(unreachable_pub)]
pub use coerce::could_coerce;
#[allow(unreachable_pub)]
pub use unify::could_unify;
pub use unify::{could_unify, could_unify_deeply};
use cast::CastCheck;
pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
@ -688,10 +689,17 @@ impl<'a> InferenceContext<'a> {
for ty in type_of_for_iterator.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
for mismatch in type_mismatches.values_mut() {
type_mismatches.retain(|_, mismatch| {
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
mismatch.actual = table.resolve_completely(mismatch.actual.clone());
}
chalk_ir::zip::Zip::zip_with(
&mut UnknownMismatch(self.db),
Variance::Invariant,
&mismatch.expected,
&mismatch.actual,
)
.is_ok()
});
diagnostics.retain_mut(|diagnostic| {
use InferenceDiagnostic::*;
match diagnostic {
@ -1502,3 +1510,116 @@ impl std::ops::BitOrAssign for Diverges {
*self = *self | other;
}
}
/// A zipper that checks for unequal `{unknown}` occurrences in the two types. Used to filter out
/// mismatch diagnostics that only differ in `{unknown}`. These mismatches are usually not helpful.
/// As the cause is usually an underlying name resolution problem.
struct UnknownMismatch<'db>(&'db dyn HirDatabase);
impl chalk_ir::zip::Zipper<Interner> for UnknownMismatch<'_> {
fn zip_tys(&mut self, variance: Variance, a: &Ty, b: &Ty) -> chalk_ir::Fallible<()> {
let zip_substs = |this: &mut Self,
variances,
sub_a: &Substitution,
sub_b: &Substitution| {
this.zip_substs(variance, variances, sub_a.as_slice(Interner), sub_b.as_slice(Interner))
};
match (a.kind(Interner), b.kind(Interner)) {
(TyKind::Adt(id_a, sub_a), TyKind::Adt(id_b, sub_b)) if id_a == id_b => zip_substs(
self,
Some(self.unification_database().adt_variance(*id_a)),
sub_a,
sub_b,
)?,
(
TyKind::AssociatedType(assoc_ty_a, sub_a),
TyKind::AssociatedType(assoc_ty_b, sub_b),
) if assoc_ty_a == assoc_ty_b => zip_substs(self, None, sub_a, sub_b)?,
(TyKind::Tuple(arity_a, sub_a), TyKind::Tuple(arity_b, sub_b))
if arity_a == arity_b =>
{
zip_substs(self, None, sub_a, sub_b)?
}
(TyKind::OpaqueType(opaque_ty_a, sub_a), TyKind::OpaqueType(opaque_ty_b, sub_b))
if opaque_ty_a == opaque_ty_b =>
{
zip_substs(self, None, sub_a, sub_b)?
}
(TyKind::Slice(ty_a), TyKind::Slice(ty_b)) => self.zip_tys(variance, ty_a, ty_b)?,
(TyKind::FnDef(fn_def_a, sub_a), TyKind::FnDef(fn_def_b, sub_b))
if fn_def_a == fn_def_b =>
{
zip_substs(
self,
Some(self.unification_database().fn_def_variance(*fn_def_a)),
sub_a,
sub_b,
)?
}
(TyKind::Ref(mutability_a, _, ty_a), TyKind::Ref(mutability_b, _, ty_b))
if mutability_a == mutability_b =>
{
self.zip_tys(variance, ty_a, ty_b)?
}
(TyKind::Raw(mutability_a, ty_a), TyKind::Raw(mutability_b, ty_b))
if mutability_a == mutability_b =>
{
self.zip_tys(variance, ty_a, ty_b)?
}
(TyKind::Array(ty_a, const_a), TyKind::Array(ty_b, const_b)) if const_a == const_b => {
self.zip_tys(variance, ty_a, ty_b)?
}
(TyKind::Closure(id_a, sub_a), TyKind::Closure(id_b, sub_b)) if id_a == id_b => {
zip_substs(self, None, sub_a, sub_b)?
}
(TyKind::Coroutine(coroutine_a, sub_a), TyKind::Coroutine(coroutine_b, sub_b))
if coroutine_a == coroutine_b =>
{
zip_substs(self, None, sub_a, sub_b)?
}
(
TyKind::CoroutineWitness(coroutine_a, sub_a),
TyKind::CoroutineWitness(coroutine_b, sub_b),
) if coroutine_a == coroutine_b => zip_substs(self, None, sub_a, sub_b)?,
(TyKind::Function(fn_ptr_a), TyKind::Function(fn_ptr_b))
if fn_ptr_a.sig == fn_ptr_b.sig && fn_ptr_a.num_binders == fn_ptr_b.num_binders =>
{
zip_substs(self, None, &fn_ptr_a.substitution.0, &fn_ptr_b.substitution.0)?
}
(TyKind::Error, TyKind::Error) => (),
(TyKind::Error, _) | (_, TyKind::Error) => return Err(chalk_ir::NoSolution),
_ => (),
}
Ok(())
}
fn zip_lifetimes(&mut self, _: Variance, _: &Lifetime, _: &Lifetime) -> chalk_ir::Fallible<()> {
Ok(())
}
fn zip_consts(&mut self, _: Variance, _: &Const, _: &Const) -> chalk_ir::Fallible<()> {
Ok(())
}
fn zip_binders<T>(
&mut self,
variance: Variance,
a: &Binders<T>,
b: &Binders<T>,
) -> chalk_ir::Fallible<()>
where
T: Clone
+ HasInterner<Interner = Interner>
+ chalk_ir::zip::Zip<Interner>
+ TypeFoldable<Interner>,
{
chalk_ir::zip::Zip::zip_with(self, variance, a.skip_binders(), b.skip_binders())
}
fn interner(&self) -> Interner {
Interner
}
fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase<Interner> {
&self.0
}
}

View file

@ -485,6 +485,7 @@ impl InferenceContext<'_> {
Statement::Expr { expr, has_semi: _ } => {
self.consume_expr(*expr);
}
Statement::Item => (),
}
}
if let Some(tail) = tail {
@ -531,6 +532,9 @@ impl InferenceContext<'_> {
self.consume_expr(expr);
}
}
&Expr::Become { expr } => {
self.consume_expr(expr);
}
Expr::RecordLit { fields, spread, .. } => {
if let &Some(expr) = spread {
self.consume_expr(expr);

View file

@ -502,6 +502,7 @@ impl InferenceContext<'_> {
self.result.standard_types.never.clone()
}
&Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr),
&Expr::Become { expr } => self.infer_expr_become(expr),
Expr::Yield { expr } => {
if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() {
if let Some(expr) = expr {
@ -1084,6 +1085,27 @@ impl InferenceContext<'_> {
self.result.standard_types.never.clone()
}
fn infer_expr_become(&mut self, expr: ExprId) -> Ty {
match &self.return_coercion {
Some(return_coercion) => {
let ret_ty = return_coercion.expected_ty();
let call_expr_ty =
self.infer_expr_inner(expr, &Expectation::HasType(ret_ty.clone()));
// NB: this should *not* coerce.
// tail calls don't support any coercions except lifetimes ones (like `&'static u8 -> &'a u8`).
self.unify(&call_expr_ty, &ret_ty);
}
None => {
// FIXME: diagnose `become` outside of functions
self.infer_expr_no_expect(expr);
}
}
self.result.standard_types.never.clone()
}
fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
if let Some(box_id) = self.resolve_boxed_box() {
let table = &mut self.table;
@ -1367,6 +1389,7 @@ impl InferenceContext<'_> {
);
}
}
Statement::Item => (),
}
}

View file

@ -65,6 +65,7 @@ impl InferenceContext<'_> {
Statement::Expr { expr, has_semi: _ } => {
self.infer_mut_expr(*expr, Mutability::Not);
}
Statement::Item => (),
}
}
if let Some(tail) = tail {
@ -93,6 +94,9 @@ impl InferenceContext<'_> {
self.infer_mut_expr(expr, Mutability::Not);
}
}
Expr::Become { expr } => {
self.infer_mut_expr(*expr, Mutability::Not);
}
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
}

View file

@ -74,6 +74,12 @@ impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
}
}
/// Check if types unify.
///
/// Note that we consider placeholder types to unify with everything.
/// This means that there may be some unresolved goals that actually set bounds for the placeholder
/// type for the types to unify. For example `Option<T>` and `Option<U>` unify although there is
/// unresolved goal `T = U`.
pub fn could_unify(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
@ -82,21 +88,35 @@ pub fn could_unify(
unify(db, env, tys).is_some()
}
/// Check if types unify eagerly making sure there are no unresolved goals.
///
/// This means that placeholder types are not considered to unify if there are any bounds set on
/// them. For example `Option<T>` and `Option<U>` do not unify as we cannot show that `T = U`
pub fn could_unify_deeply(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
tys: &Canonical<(Ty, Ty)>,
) -> bool {
let mut table = InferenceTable::new(db, env);
let vars = make_substitutions(tys, &mut table);
let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars);
let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars);
table.resolve_obligations_as_possible();
table.propagate_diverging_flag();
let ty1_with_vars = table.resolve_completely(ty1_with_vars);
let ty2_with_vars = table.resolve_completely(ty2_with_vars);
table.unify_deeply(&ty1_with_vars, &ty2_with_vars)
}
pub(crate) fn unify(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
tys: &Canonical<(Ty, Ty)>,
) -> Option<Substitution> {
let mut table = InferenceTable::new(db, env);
let vars = Substitution::from_iter(
Interner,
tys.binders.iter(Interner).map(|it| match &it.kind {
chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner),
// FIXME: maybe wrong?
chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner),
chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
}),
);
let vars = make_substitutions(tys, &mut table);
let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
if !table.unify(&ty1_with_vars, &ty2_with_vars) {
@ -125,6 +145,21 @@ pub(crate) fn unify(
))
}
fn make_substitutions(
tys: &chalk_ir::Canonical<(chalk_ir::Ty<Interner>, chalk_ir::Ty<Interner>)>,
table: &mut InferenceTable<'_>,
) -> chalk_ir::Substitution<Interner> {
Substitution::from_iter(
Interner,
tys.binders.iter(Interner).map(|it| match &it.kind {
chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner),
// FIXME: maybe wrong?
chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner),
chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner),
}),
)
}
bitflags::bitflags! {
#[derive(Default, Clone, Copy)]
pub(crate) struct TypeVariableFlags: u8 {
@ -431,6 +466,18 @@ impl<'a> InferenceTable<'a> {
true
}
/// Unify two relatable values (e.g. `Ty`) and check whether trait goals which arise from that could be fulfilled
pub(crate) fn unify_deeply<T: ?Sized + Zip<Interner>>(&mut self, ty1: &T, ty2: &T) -> bool {
let result = match self.try_unify(ty1, ty2) {
Ok(r) => r,
Err(_) => return false,
};
result.goals.iter().all(|goal| {
let canonicalized = self.canonicalize(goal.clone());
self.try_resolve_obligation(&canonicalized).is_some()
})
}
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
/// caller needs to deal with them.
pub(crate) fn try_unify<T: ?Sized + Zip<Interner>>(
@ -501,7 +548,8 @@ impl<'a> InferenceTable<'a> {
fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) {
let canonicalized = self.canonicalize(goal);
if !self.try_resolve_obligation(&canonicalized) {
let solution = self.try_resolve_obligation(&canonicalized);
if matches!(solution, Some(Solution::Ambig(_))) {
self.pending_obligations.push(canonicalized);
}
}
@ -627,38 +675,35 @@ impl<'a> InferenceTable<'a> {
fn try_resolve_obligation(
&mut self,
canonicalized: &Canonicalized<InEnvironment<Goal>>,
) -> bool {
) -> Option<chalk_solve::Solution<Interner>> {
let solution = self.db.trait_solve(
self.trait_env.krate,
self.trait_env.block,
canonicalized.value.clone(),
);
match solution {
match &solution {
Some(Solution::Unique(canonical_subst)) => {
canonicalized.apply_solution(
self,
Canonical {
binders: canonical_subst.binders,
binders: canonical_subst.binders.clone(),
// FIXME: handle constraints
value: canonical_subst.value.subst,
value: canonical_subst.value.subst.clone(),
},
);
true
}
Some(Solution::Ambig(Guidance::Definite(substs))) => {
canonicalized.apply_solution(self, substs);
false
canonicalized.apply_solution(self, substs.clone());
}
Some(_) => {
// FIXME use this when trying to resolve everything at the end
false
}
None => {
// FIXME obligation cannot be fulfilled => diagnostic
true
}
}
solution
}
pub(crate) fn callable_sig(

View file

@ -11,10 +11,8 @@ pub fn target_data_layout_query(
db: &dyn HirDatabase,
krate: CrateId,
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
let crate_graph = db.crate_graph();
let res = crate_graph[krate].target_layout.as_deref();
match res {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
match db.data_layout(krate) {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) {
Ok(it) => Ok(Arc::new(it)),
Err(e) => {
Err(match e {
@ -44,6 +42,6 @@ pub fn target_data_layout_query(
}.into())
}
},
Err(e) => Err(Arc::from(&**e)),
Err(e) => Err(e),
}
}

View file

@ -1,6 +1,7 @@
use chalk_ir::{AdtId, TyKind};
use either::Either;
use hir_def::db::DefDatabase;
use project_model::target_data_layout::RustcDataLayoutConfig;
use rustc_hash::FxHashMap;
use test_fixture::WithFixture;
use triomphe::Arc;
@ -15,13 +16,18 @@ use crate::{
mod closure;
fn current_machine_data_layout() -> String {
project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap()
project_model::target_data_layout::get(
RustcDataLayoutConfig::Rustc(None),
None,
&FxHashMap::default(),
)
.unwrap()
}
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}",
"//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}",
);
let (db, file_ids) = TestDB::with_many_files(&ra_fixture);
@ -70,7 +76,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}",
"//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}",
);
let (db, file_id) = TestDB::with_single_file(&ra_fixture);

View file

@ -79,8 +79,8 @@ pub use builder::{ParamKind, TyBuilder};
pub use chalk_ext::*;
pub use infer::{
closure::{CaptureKind, CapturedItem},
could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic,
InferenceResult, OverloadedDeref, PointerCast,
could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode,
InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast,
};
pub use interner::Interner;
pub use lower::{

View file

@ -7,6 +7,7 @@ use std::iter;
use hir_def::{DefWithBodyId, HasModule};
use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
use stdx::never;
use triomphe::Arc;
@ -14,7 +15,7 @@ use crate::{
db::{HirDatabase, InternedClosure},
mir::Operand,
utils::ClosureSubst,
ClosureId, Interner, Ty, TyExt, TypeFlags,
ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags,
};
use super::{
@ -36,11 +37,27 @@ pub struct MovedOutOfRef {
pub span: MirSpan,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct PartiallyMoved {
pub ty: Ty,
pub span: MirSpan,
pub local: LocalId,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowRegion {
pub local: LocalId,
pub kind: BorrowKind,
pub places: Vec<MirSpan>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct BorrowckResult {
pub mir_body: Arc<MirBody>,
pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
pub moved_out_of_ref: Vec<MovedOutOfRef>,
pub partially_moved: Vec<PartiallyMoved>,
pub borrow_regions: Vec<BorrowRegion>,
}
fn all_mir_bodies(
@ -80,12 +97,26 @@ pub fn borrowck_query(
res.push(BorrowckResult {
mutability_of_locals: mutability_of_locals(db, &body),
moved_out_of_ref: moved_out_of_ref(db, &body),
partially_moved: partially_moved(db, &body),
borrow_regions: borrow_regions(db, &body),
mir_body: body,
});
})?;
Ok(res.into())
}
fn make_fetch_closure_field(
db: &dyn HirDatabase,
) -> impl FnOnce(ClosureId, &Substitution, usize) -> Ty + '_ {
|c: ClosureId, subst: &Substitution, f: usize| {
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures.get(f).expect("broken closure field").ty.clone().substitute(Interner, parent_subst)
}
}
fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> {
let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op {
@ -99,18 +130,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
ty = proj.projected_ty(
ty,
db,
|c, subst, f| {
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
},
make_fetch_closure_field(db),
body.owner.module(db.upcast()).krate(),
);
}
@ -188,6 +208,132 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
result
}
fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved> {
let mut result = vec![];
let mut for_operand = |op: &Operand, span: MirSpan| match op {
Operand::Copy(p) | Operand::Move(p) => {
let mut ty: Ty = body.locals[p.local].ty.clone();
for proj in p.projection.lookup(&body.projection_store) {
ty = proj.projected_ty(
ty,
db,
make_fetch_closure_field(db),
body.owner.module(db.upcast()).krate(),
);
}
if !ty.clone().is_copy(db, body.owner)
&& !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR)
{
result.push(PartiallyMoved { span, ty, local: p.local });
}
}
Operand::Constant(_) | Operand::Static(_) => (),
};
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(_, r) => match r {
Rvalue::ShallowInitBoxWithAlloc(_) => (),
Rvalue::ShallowInitBox(o, _)
| Rvalue::UnaryOp(_, o)
| Rvalue::Cast(_, o, _)
| Rvalue::Repeat(o, _)
| Rvalue::Use(o) => for_operand(o, statement.span),
Rvalue::CopyForDeref(_)
| Rvalue::Discriminant(_)
| Rvalue::Len(_)
| Rvalue::Ref(_, _) => (),
Rvalue::CheckedBinaryOp(_, o1, o2) => {
for_operand(o1, statement.span);
for_operand(o2, statement.span);
}
Rvalue::Aggregate(_, ops) => {
for op in ops.iter() {
for_operand(op, statement.span);
}
}
},
StatementKind::FakeRead(_)
| StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
}
match &block.terminator {
Some(terminator) => match &terminator.kind {
TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, terminator.span),
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::UnwindResume
| TerminatorKind::CoroutineDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. } => (),
TerminatorKind::DropAndReplace { value, .. } => {
for_operand(value, terminator.span);
}
TerminatorKind::Call { func, args, .. } => {
for_operand(func, terminator.span);
args.iter().for_each(|it| for_operand(it, terminator.span));
}
TerminatorKind::Assert { cond, .. } => {
for_operand(cond, terminator.span);
}
TerminatorKind::Yield { value, .. } => {
for_operand(value, terminator.span);
}
},
None => (),
}
}
result.shrink_to_fit();
result
}
fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
let mut borrows = FxHashMap::default();
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
for statement in &block.statements {
if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
borrows
.entry(p.local)
.and_modify(|it: &mut BorrowRegion| {
it.places.push(statement.span);
})
.or_insert_with(|| BorrowRegion {
local: p.local,
kind: *kind,
places: vec![statement.span],
});
}
}
match &block.terminator {
Some(terminator) => match &terminator.kind {
TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::UnwindResume
| TerminatorKind::CoroutineDrop
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. } => (),
TerminatorKind::DropAndReplace { .. } => {}
TerminatorKind::Call { .. } => {}
_ => (),
},
None => (),
}
}
borrows.into_values().collect()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ProjectionCase {
/// Projection is a local
@ -217,18 +363,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
ty = proj.projected_ty(
ty,
db,
|c, subst, f| {
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
let infer = db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
},
make_fetch_closure_field(db),
body.owner.module(db.upcast()).krate(),
);
}

View file

@ -775,6 +775,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.set_terminator(current, TerminatorKind::Return, expr_id.into());
Ok(None)
}
Expr::Become { .. } => not_supported!("tail-calls"),
Expr::Yield { .. } => not_supported!("yield"),
Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => {
let spread_place = match spread {
@ -1246,7 +1247,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(current, place, op.into(), expr_id.into());
Ok(Some(current))
}
Expr::Underscore => not_supported!("underscore"),
Expr::Underscore => Ok(Some(current)),
}
}
@ -1780,6 +1781,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_fake_read(c, p, expr.into());
current = scope2.pop_and_drop(self, c, expr.into());
}
hir_def::hir::Statement::Item => (),
}
}
if let Some(tail) = tail {

View file

@ -1,3 +1,5 @@
use crate::tests::check_no_mismatches;
use super::check;
#[test]
@ -94,3 +96,43 @@ fn test(x: bool) {
"#,
);
}
#[test]
fn no_mismatches_on_atpit() {
check_no_mismatches(
r#"
//- minicore: option, sized
#![feature(impl_trait_in_assoc_type)]
trait WrappedAssoc {
type Assoc;
fn do_thing(&self) -> Option<Self::Assoc>;
}
struct Foo;
impl WrappedAssoc for Foo {
type Assoc = impl Sized;
fn do_thing(&self) -> Option<Self::Assoc> {
Some(())
}
}
"#,
);
check_no_mismatches(
r#"
//- minicore: option, sized
#![feature(impl_trait_in_assoc_type)]
trait Trait {
type Assoc;
const DEFINE: Option<Self::Assoc>;
}
impl Trait for () {
type Assoc = impl Sized;
const DEFINE: Option<Self::Assoc> = Option::Some(());
}
"#,
);
}

View file

@ -3376,11 +3376,8 @@ fn main() {
[x,] = &[1,];
//^^^^expected &[i32; 1], got [{unknown}; _]
// FIXME we only want the outermost error, but this matches the current
// behavior of slice patterns
let x;
[(x,),] = &[(1,),];
// ^^^^expected {unknown}, got ({unknown},)
//^^^^^^^expected &[(i32,); 1], got [{unknown}; _]
let x;