mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 22:01:37 +00:00
Merge commit 'ddf105b646
' into sync-from-ra
This commit is contained in:
parent
0816d49d83
commit
e41ab350d6
378 changed files with 14720 additions and 3111 deletions
|
@ -88,6 +88,7 @@ impl<'a, 'db> Autoderef<'a, 'db> {
|
|||
impl Iterator for Autoderef<'_, '_> {
|
||||
type Item = (Ty, usize);
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.at_start {
|
||||
self.at_start = false;
|
||||
|
|
|
@ -125,6 +125,7 @@ impl<D> TyBuilder<D> {
|
|||
this
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self {
|
||||
self.fill(|x| match x {
|
||||
ParamKind::Type => table.new_type_var().cast(Interner),
|
||||
|
@ -208,6 +209,7 @@ impl TyBuilder<()> {
|
|||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub fn subst_for_def(
|
||||
db: &dyn HirDatabase,
|
||||
def: impl Into<GenericDefId>,
|
||||
|
|
|
@ -17,7 +17,7 @@ use hir_def::{
|
|||
use hir_expand::name::name;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedCoroutine},
|
||||
display::HirDisplay,
|
||||
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, make_binders,
|
||||
make_single_type_binders,
|
||||
|
@ -428,7 +428,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
|||
&self,
|
||||
id: chalk_ir::CoroutineId<Interner>,
|
||||
) -> Arc<chalk_solve::rust_ir::CoroutineDatum<Interner>> {
|
||||
let (parent, expr) = self.db.lookup_intern_coroutine(id.into());
|
||||
let InternedCoroutine(parent, expr) = self.db.lookup_intern_coroutine(id.into());
|
||||
|
||||
// We fill substitution with unknown type, because we only need to know whether the generic
|
||||
// params are types or consts to build `Binders` and those being filled up are for
|
||||
|
@ -473,7 +473,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
|||
let inner_types =
|
||||
rust_ir::CoroutineWitnessExistential { types: wrap_empty_binders(vec![]) };
|
||||
|
||||
let (parent, _) = self.db.lookup_intern_coroutine(id.into());
|
||||
let InternedCoroutine(parent, _) = self.db.lookup_intern_coroutine(id.into());
|
||||
// See the comment in `coroutine_datum()` for unknown types.
|
||||
let subst = TyBuilder::subst_for_coroutine(self.db, parent).fill_with_unknown().build();
|
||||
let it = subst
|
||||
|
|
|
@ -133,7 +133,7 @@ fn bit_op() {
|
|||
check_number(r#"const GOAL: i8 = 1 << 7"#, (1i8 << 7) as i128);
|
||||
check_number(r#"const GOAL: i8 = -1 << 2"#, (-1i8 << 2) as i128);
|
||||
check_fail(r#"const GOAL: i8 = 1 << 8"#, |e| {
|
||||
e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_string()))
|
||||
e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_owned()))
|
||||
});
|
||||
check_number(r#"const GOAL: i32 = 100000000i32 << 11"#, (100000000i32 << 11) as i128);
|
||||
}
|
||||
|
@ -2756,7 +2756,7 @@ fn memory_limit() {
|
|||
"#,
|
||||
|e| {
|
||||
e == ConstEvalError::MirEvalError(MirEvalError::Panic(
|
||||
"Memory allocation of 30000000000 bytes failed".to_string(),
|
||||
"Memory allocation of 30000000000 bytes failed".to_owned(),
|
||||
))
|
||||
},
|
||||
);
|
||||
|
|
|
@ -3,7 +3,11 @@
|
|||
|
||||
use std::sync;
|
||||
|
||||
use base_db::{impl_intern_key, salsa, CrateId, Upcast};
|
||||
use base_db::{
|
||||
impl_intern_key,
|
||||
salsa::{self, impl_intern_value_trivial},
|
||||
CrateId, Upcast,
|
||||
};
|
||||
use hir_def::{
|
||||
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId,
|
||||
DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
|
||||
|
@ -199,9 +203,9 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
|||
#[salsa::interned]
|
||||
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
|
||||
#[salsa::interned]
|
||||
fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> InternedClosureId;
|
||||
fn intern_closure(&self, id: InternedClosure) -> InternedClosureId;
|
||||
#[salsa::interned]
|
||||
fn intern_coroutine(&self, id: (DefWithBodyId, ExprId)) -> InternedCoroutineId;
|
||||
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
|
||||
|
||||
#[salsa::invoke(chalk_db::associated_ty_data_query)]
|
||||
fn associated_ty_data(
|
||||
|
@ -337,10 +341,18 @@ impl_intern_key!(InternedOpaqueTyId);
|
|||
pub struct InternedClosureId(salsa::InternId);
|
||||
impl_intern_key!(InternedClosureId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct InternedClosure(pub DefWithBodyId, pub ExprId);
|
||||
impl_intern_value_trivial!(InternedClosure);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct InternedCoroutineId(salsa::InternId);
|
||||
impl_intern_key!(InternedCoroutineId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
|
||||
impl_intern_value_trivial!(InternedCoroutine);
|
||||
|
||||
/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
|
||||
/// we have different IDs for struct and enum variant constructors.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
|
||||
|
|
|
@ -16,11 +16,9 @@ mod case_conv;
|
|||
use std::fmt;
|
||||
|
||||
use hir_def::{
|
||||
data::adt::VariantData,
|
||||
hir::{Pat, PatId},
|
||||
src::HasSource,
|
||||
AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId,
|
||||
StaticId, StructId,
|
||||
data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId,
|
||||
EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId, StructId,
|
||||
TraitId, TypeAliasId,
|
||||
};
|
||||
use hir_expand::{
|
||||
name::{AsName, Name},
|
||||
|
@ -79,12 +77,14 @@ pub enum IdentType {
|
|||
Enum,
|
||||
Field,
|
||||
Function,
|
||||
Module,
|
||||
Parameter,
|
||||
StaticVariable,
|
||||
Structure,
|
||||
Trait,
|
||||
TypeAlias,
|
||||
Variable,
|
||||
Variant,
|
||||
Module,
|
||||
}
|
||||
|
||||
impl fmt::Display for IdentType {
|
||||
|
@ -94,12 +94,14 @@ impl fmt::Display for IdentType {
|
|||
IdentType::Enum => "Enum",
|
||||
IdentType::Field => "Field",
|
||||
IdentType::Function => "Function",
|
||||
IdentType::Module => "Module",
|
||||
IdentType::Parameter => "Parameter",
|
||||
IdentType::StaticVariable => "Static variable",
|
||||
IdentType::Structure => "Structure",
|
||||
IdentType::Trait => "Trait",
|
||||
IdentType::TypeAlias => "Type alias",
|
||||
IdentType::Variable => "Variable",
|
||||
IdentType::Variant => "Variant",
|
||||
IdentType::Module => "Module",
|
||||
};
|
||||
|
||||
repr.fmt(f)
|
||||
|
@ -136,10 +138,12 @@ impl<'a> DeclValidator<'a> {
|
|||
pub(super) fn validate_item(&mut self, item: ModuleDefId) {
|
||||
match item {
|
||||
ModuleDefId::ModuleId(module_id) => self.validate_module(module_id),
|
||||
ModuleDefId::TraitId(trait_id) => self.validate_trait(trait_id),
|
||||
ModuleDefId::FunctionId(func) => self.validate_func(func),
|
||||
ModuleDefId::AdtId(adt) => self.validate_adt(adt),
|
||||
ModuleDefId::ConstId(const_id) => self.validate_const(const_id),
|
||||
ModuleDefId::StaticId(static_id) => self.validate_static(static_id),
|
||||
ModuleDefId::TypeAliasId(type_alias_id) => self.validate_type_alias(type_alias_id),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
@ -242,50 +246,46 @@ impl<'a> DeclValidator<'a> {
|
|||
|
||||
// Check the module name.
|
||||
let Some(module_name) = module_id.name(self.db.upcast()) else { return };
|
||||
let module_name_replacement =
|
||||
let Some(module_name_replacement) =
|
||||
module_name.as_str().and_then(to_lower_snake_case).map(|new_name| Replacement {
|
||||
current_name: module_name,
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::LowerSnakeCase,
|
||||
});
|
||||
})
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id];
|
||||
let Some(module_src) = module_data.declaration_source(self.db.upcast()) else {
|
||||
return;
|
||||
};
|
||||
self.create_incorrect_case_diagnostic_for_ast_node(
|
||||
module_name_replacement,
|
||||
module_src.file_id,
|
||||
&module_src.value,
|
||||
IdentType::Module,
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(module_name_replacement) = module_name_replacement {
|
||||
let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id];
|
||||
let module_src = module_data.declaration_source(self.db.upcast());
|
||||
|
||||
if let Some(module_src) = module_src {
|
||||
let ast_ptr = match module_src.value.name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
never!(
|
||||
"Replacement ({:?}) was generated for a module without a name: {:?}",
|
||||
module_name_replacement,
|
||||
module_src
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: module_src.file_id,
|
||||
ident_type: IdentType::Module,
|
||||
ident: AstPtr::new(&ast_ptr),
|
||||
expected_case: module_name_replacement.expected_case,
|
||||
ident_text: module_name_replacement
|
||||
.current_name
|
||||
.display(self.db.upcast())
|
||||
.to_string(),
|
||||
suggested_text: module_name_replacement.suggested_text,
|
||||
};
|
||||
|
||||
self.sink.push(diagnostic);
|
||||
}
|
||||
fn validate_trait(&mut self, trait_id: TraitId) {
|
||||
// Check whether non-snake case identifiers are allowed for this trait.
|
||||
if self.allowed(trait_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check the trait name.
|
||||
let data = self.db.trait_data(trait_id);
|
||||
self.create_incorrect_case_diagnostic_for_item_name(
|
||||
trait_id,
|
||||
&data.name,
|
||||
CaseType::UpperCamelCase,
|
||||
IdentType::Trait,
|
||||
);
|
||||
}
|
||||
|
||||
fn validate_func(&mut self, func: FunctionId) {
|
||||
let data = self.db.function_data(func);
|
||||
if matches!(func.lookup(self.db.upcast()).container, ItemContainerId::ExternBlockId(_)) {
|
||||
let container = func.lookup(self.db.upcast()).container;
|
||||
if matches!(container, ItemContainerId::ExternBlockId(_)) {
|
||||
cov_mark::hit!(extern_func_incorrect_case_ignored);
|
||||
return;
|
||||
}
|
||||
|
@ -296,270 +296,173 @@ impl<'a> DeclValidator<'a> {
|
|||
}
|
||||
|
||||
// Check the function name.
|
||||
let function_name = data.name.display(self.db.upcast()).to_string();
|
||||
let fn_name_replacement = to_lower_snake_case(&function_name).map(|new_name| Replacement {
|
||||
current_name: data.name.clone(),
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::LowerSnakeCase,
|
||||
});
|
||||
|
||||
let body = self.db.body(func.into());
|
||||
// Skipped if function is an associated item of a trait implementation.
|
||||
if !self.is_trait_impl_container(container) {
|
||||
let data = self.db.function_data(func);
|
||||
self.create_incorrect_case_diagnostic_for_item_name(
|
||||
func,
|
||||
&data.name,
|
||||
CaseType::LowerSnakeCase,
|
||||
IdentType::Function,
|
||||
);
|
||||
} else {
|
||||
cov_mark::hit!(trait_impl_assoc_func_name_incorrect_case_ignored);
|
||||
}
|
||||
|
||||
// Check the patterns inside the function body.
|
||||
// This includes function parameters.
|
||||
let pats_replacements = body
|
||||
self.validate_func_body(func);
|
||||
}
|
||||
|
||||
/// Check incorrect names for patterns inside the function body.
|
||||
/// This includes function parameters except for trait implementation associated functions.
|
||||
fn validate_func_body(&mut self, func: FunctionId) {
|
||||
let body = self.db.body(func.into());
|
||||
let mut pats_replacements = body
|
||||
.pats
|
||||
.iter()
|
||||
.filter_map(|(pat_id, pat)| match pat {
|
||||
Pat::Bind { id, .. } => Some((pat_id, &body.bindings[*id].name)),
|
||||
Pat::Bind { id, .. } => {
|
||||
let bind_name = &body.bindings[*id].name;
|
||||
let replacement = Replacement {
|
||||
current_name: bind_name.clone(),
|
||||
suggested_text: to_lower_snake_case(&bind_name.to_smol_str())?,
|
||||
expected_case: CaseType::LowerSnakeCase,
|
||||
};
|
||||
Some((pat_id, replacement))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.filter_map(|(id, bind_name)| {
|
||||
Some((
|
||||
id,
|
||||
Replacement {
|
||||
current_name: bind_name.clone(),
|
||||
suggested_text: to_lower_snake_case(
|
||||
&bind_name.display(self.db.upcast()).to_string(),
|
||||
)?,
|
||||
expected_case: CaseType::LowerSnakeCase,
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect();
|
||||
.peekable();
|
||||
|
||||
// If there is at least one element to spawn a warning on, go to the source map and generate a warning.
|
||||
if let Some(fn_name_replacement) = fn_name_replacement {
|
||||
self.create_incorrect_case_diagnostic_for_func(func, fn_name_replacement);
|
||||
}
|
||||
|
||||
self.create_incorrect_case_diagnostic_for_variables(func, pats_replacements);
|
||||
}
|
||||
|
||||
/// Given the information about incorrect names in the function declaration, looks up into the source code
|
||||
/// for exact locations and adds diagnostics into the sink.
|
||||
fn create_incorrect_case_diagnostic_for_func(
|
||||
&mut self,
|
||||
func: FunctionId,
|
||||
fn_name_replacement: Replacement,
|
||||
) {
|
||||
let fn_loc = func.lookup(self.db.upcast());
|
||||
let fn_src = fn_loc.source(self.db.upcast());
|
||||
|
||||
// Diagnostic for function name.
|
||||
let ast_ptr = match fn_src.value.name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
never!(
|
||||
"Replacement ({:?}) was generated for a function without a name: {:?}",
|
||||
fn_name_replacement,
|
||||
fn_src
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: fn_src.file_id,
|
||||
ident_type: IdentType::Function,
|
||||
ident: AstPtr::new(&ast_ptr),
|
||||
expected_case: fn_name_replacement.expected_case,
|
||||
ident_text: fn_name_replacement.current_name.display(self.db.upcast()).to_string(),
|
||||
suggested_text: fn_name_replacement.suggested_text,
|
||||
};
|
||||
|
||||
self.sink.push(diagnostic);
|
||||
}
|
||||
|
||||
/// Given the information about incorrect variable names, looks up into the source code
|
||||
/// for exact locations and adds diagnostics into the sink.
|
||||
fn create_incorrect_case_diagnostic_for_variables(
|
||||
&mut self,
|
||||
func: FunctionId,
|
||||
pats_replacements: Vec<(PatId, Replacement)>,
|
||||
) {
|
||||
// XXX: only look at source_map if we do have missing fields
|
||||
if pats_replacements.is_empty() {
|
||||
if pats_replacements.peek().is_none() {
|
||||
return;
|
||||
}
|
||||
|
||||
let (_, source_map) = self.db.body_with_source_map(func.into());
|
||||
|
||||
for (id, replacement) in pats_replacements {
|
||||
if let Ok(source_ptr) = source_map.pat_syntax(id) {
|
||||
if let Some(ptr) = source_ptr.value.cast::<ast::IdentPat>() {
|
||||
let root = source_ptr.file_syntax(self.db.upcast());
|
||||
let ident_pat = ptr.to_node(&root);
|
||||
let parent = match ident_pat.syntax().parent() {
|
||||
Some(parent) => parent,
|
||||
None => continue,
|
||||
};
|
||||
let name_ast = match ident_pat.name() {
|
||||
Some(name_ast) => name_ast,
|
||||
None => continue,
|
||||
};
|
||||
let Ok(source_ptr) = source_map.pat_syntax(id) else {
|
||||
continue;
|
||||
};
|
||||
let Some(ptr) = source_ptr.value.cast::<ast::IdentPat>() else {
|
||||
continue;
|
||||
};
|
||||
let root = source_ptr.file_syntax(self.db.upcast());
|
||||
let ident_pat = ptr.to_node(&root);
|
||||
let Some(parent) = ident_pat.syntax().parent() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let is_param = ast::Param::can_cast(parent.kind());
|
||||
|
||||
// We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
|
||||
// because e.g. match arms are patterns as well.
|
||||
// In other words, we check that it's a named variable binding.
|
||||
let is_binding = ast::LetStmt::can_cast(parent.kind())
|
||||
|| (ast::MatchArm::can_cast(parent.kind())
|
||||
&& ident_pat.at_token().is_some());
|
||||
if !(is_param || is_binding) {
|
||||
// This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
|
||||
continue;
|
||||
}
|
||||
|
||||
let ident_type =
|
||||
if is_param { IdentType::Parameter } else { IdentType::Variable };
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: source_ptr.file_id,
|
||||
ident_type,
|
||||
ident: AstPtr::new(&name_ast),
|
||||
expected_case: replacement.expected_case,
|
||||
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
|
||||
suggested_text: replacement.suggested_text,
|
||||
};
|
||||
|
||||
self.sink.push(diagnostic);
|
||||
}
|
||||
let is_param = ast::Param::can_cast(parent.kind());
|
||||
// We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
|
||||
// because e.g. match arms are patterns as well.
|
||||
// In other words, we check that it's a named variable binding.
|
||||
let is_binding = ast::LetStmt::can_cast(parent.kind())
|
||||
|| (ast::MatchArm::can_cast(parent.kind()) && ident_pat.at_token().is_some());
|
||||
if !(is_param || is_binding) {
|
||||
// This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
|
||||
continue;
|
||||
}
|
||||
|
||||
let ident_type = if is_param { IdentType::Parameter } else { IdentType::Variable };
|
||||
|
||||
self.create_incorrect_case_diagnostic_for_ast_node(
|
||||
replacement,
|
||||
source_ptr.file_id,
|
||||
&ident_pat,
|
||||
ident_type,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_struct(&mut self, struct_id: StructId) {
|
||||
let data = self.db.struct_data(struct_id);
|
||||
|
||||
// Check the structure name.
|
||||
let non_camel_case_allowed =
|
||||
self.allowed(struct_id.into(), allow::NON_CAMEL_CASE_TYPES, false);
|
||||
let non_snake_case_allowed = self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false);
|
||||
|
||||
// Check the structure name.
|
||||
let struct_name = data.name.display(self.db.upcast()).to_string();
|
||||
let struct_name_replacement = if !non_camel_case_allowed {
|
||||
to_camel_case(&struct_name).map(|new_name| Replacement {
|
||||
current_name: data.name.clone(),
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::UpperCamelCase,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Check the field names.
|
||||
let mut struct_fields_replacements = Vec::new();
|
||||
|
||||
if !non_snake_case_allowed {
|
||||
if let VariantData::Record(fields) = data.variant_data.as_ref() {
|
||||
for (_, field) in fields.iter() {
|
||||
let field_name = field.name.display(self.db.upcast()).to_string();
|
||||
if let Some(new_name) = to_lower_snake_case(&field_name) {
|
||||
let replacement = Replacement {
|
||||
current_name: field.name.clone(),
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::LowerSnakeCase,
|
||||
};
|
||||
struct_fields_replacements.push(replacement);
|
||||
}
|
||||
}
|
||||
}
|
||||
if !non_camel_case_allowed {
|
||||
let data = self.db.struct_data(struct_id);
|
||||
self.create_incorrect_case_diagnostic_for_item_name(
|
||||
struct_id,
|
||||
&data.name,
|
||||
CaseType::UpperCamelCase,
|
||||
IdentType::Structure,
|
||||
);
|
||||
}
|
||||
|
||||
// If there is at least one element to spawn a warning on, go to the source map and generate a warning.
|
||||
self.create_incorrect_case_diagnostic_for_struct(
|
||||
struct_id,
|
||||
struct_name_replacement,
|
||||
struct_fields_replacements,
|
||||
);
|
||||
// Check the field names.
|
||||
self.validate_struct_fields(struct_id);
|
||||
}
|
||||
|
||||
/// Given the information about incorrect names in the struct declaration, looks up into the source code
|
||||
/// for exact locations and adds diagnostics into the sink.
|
||||
fn create_incorrect_case_diagnostic_for_struct(
|
||||
&mut self,
|
||||
struct_id: StructId,
|
||||
struct_name_replacement: Option<Replacement>,
|
||||
struct_fields_replacements: Vec<Replacement>,
|
||||
) {
|
||||
/// Check incorrect names for struct fields.
|
||||
fn validate_struct_fields(&mut self, struct_id: StructId) {
|
||||
if self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false) {
|
||||
return;
|
||||
}
|
||||
|
||||
let data = self.db.struct_data(struct_id);
|
||||
let VariantData::Record(fields) = data.variant_data.as_ref() else {
|
||||
return;
|
||||
};
|
||||
let mut struct_fields_replacements = fields
|
||||
.iter()
|
||||
.filter_map(|(_, field)| {
|
||||
to_lower_snake_case(&field.name.to_smol_str()).map(|new_name| Replacement {
|
||||
current_name: field.name.clone(),
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::LowerSnakeCase,
|
||||
})
|
||||
})
|
||||
.peekable();
|
||||
|
||||
// XXX: Only look at sources if we do have incorrect names.
|
||||
if struct_name_replacement.is_none() && struct_fields_replacements.is_empty() {
|
||||
if struct_fields_replacements.peek().is_none() {
|
||||
return;
|
||||
}
|
||||
|
||||
let struct_loc = struct_id.lookup(self.db.upcast());
|
||||
let struct_src = struct_loc.source(self.db.upcast());
|
||||
|
||||
if let Some(replacement) = struct_name_replacement {
|
||||
let ast_ptr = match struct_src.value.name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
let Some(ast::FieldList::RecordFieldList(struct_fields_list)) =
|
||||
struct_src.value.field_list()
|
||||
else {
|
||||
always!(
|
||||
struct_fields_replacements.peek().is_none(),
|
||||
"Replacements ({:?}) were generated for a structure fields \
|
||||
which had no fields list: {:?}",
|
||||
struct_fields_replacements.collect::<Vec<_>>(),
|
||||
struct_src
|
||||
);
|
||||
return;
|
||||
};
|
||||
let mut struct_fields_iter = struct_fields_list.fields();
|
||||
for field_replacement in struct_fields_replacements {
|
||||
// We assume that parameters in replacement are in the same order as in the
|
||||
// actual params list, but just some of them (ones that named correctly) are skipped.
|
||||
let field = loop {
|
||||
if let Some(field) = struct_fields_iter.next() {
|
||||
let Some(field_name) = field.name() else {
|
||||
continue;
|
||||
};
|
||||
if field_name.as_name() == field_replacement.current_name {
|
||||
break field;
|
||||
}
|
||||
} else {
|
||||
never!(
|
||||
"Replacement ({:?}) was generated for a structure without a name: {:?}",
|
||||
replacement,
|
||||
"Replacement ({:?}) was generated for a structure field \
|
||||
which was not found: {:?}",
|
||||
field_replacement,
|
||||
struct_src
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: struct_src.file_id,
|
||||
ident_type: IdentType::Structure,
|
||||
ident: AstPtr::new(&ast_ptr),
|
||||
expected_case: replacement.expected_case,
|
||||
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
|
||||
suggested_text: replacement.suggested_text,
|
||||
};
|
||||
|
||||
self.sink.push(diagnostic);
|
||||
}
|
||||
|
||||
let struct_fields_list = match struct_src.value.field_list() {
|
||||
Some(ast::FieldList::RecordFieldList(fields)) => fields,
|
||||
_ => {
|
||||
always!(
|
||||
struct_fields_replacements.is_empty(),
|
||||
"Replacements ({:?}) were generated for a structure fields which had no fields list: {:?}",
|
||||
struct_fields_replacements,
|
||||
struct_src
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
let mut struct_fields_iter = struct_fields_list.fields();
|
||||
for field_to_rename in struct_fields_replacements {
|
||||
// We assume that parameters in replacement are in the same order as in the
|
||||
// actual params list, but just some of them (ones that named correctly) are skipped.
|
||||
let ast_ptr = loop {
|
||||
match struct_fields_iter.next().and_then(|field| field.name()) {
|
||||
Some(field_name) => {
|
||||
if field_name.as_name() == field_to_rename.current_name {
|
||||
break field_name;
|
||||
}
|
||||
}
|
||||
None => {
|
||||
never!(
|
||||
"Replacement ({:?}) was generated for a structure field which was not found: {:?}",
|
||||
field_to_rename, struct_src
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: struct_src.file_id,
|
||||
ident_type: IdentType::Field,
|
||||
ident: AstPtr::new(&ast_ptr),
|
||||
expected_case: field_to_rename.expected_case,
|
||||
ident_text: field_to_rename.current_name.display(self.db.upcast()).to_string(),
|
||||
suggested_text: field_to_rename.suggested_text,
|
||||
};
|
||||
|
||||
self.sink.push(diagnostic);
|
||||
self.create_incorrect_case_diagnostic_for_ast_node(
|
||||
field_replacement,
|
||||
struct_src.file_id,
|
||||
&field,
|
||||
IdentType::Field,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -572,163 +475,103 @@ impl<'a> DeclValidator<'a> {
|
|||
}
|
||||
|
||||
// Check the enum name.
|
||||
let enum_name = data.name.display(self.db.upcast()).to_string();
|
||||
let enum_name_replacement = to_camel_case(&enum_name).map(|new_name| Replacement {
|
||||
current_name: data.name.clone(),
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::UpperCamelCase,
|
||||
});
|
||||
self.create_incorrect_case_diagnostic_for_item_name(
|
||||
enum_id,
|
||||
&data.name,
|
||||
CaseType::UpperCamelCase,
|
||||
IdentType::Enum,
|
||||
);
|
||||
|
||||
// Check the field names.
|
||||
let enum_fields_replacements = data
|
||||
// Check the variant names.
|
||||
self.validate_enum_variants(enum_id)
|
||||
}
|
||||
|
||||
/// Check incorrect names for enum variants.
|
||||
fn validate_enum_variants(&mut self, enum_id: EnumId) {
|
||||
let data = self.db.enum_data(enum_id);
|
||||
let mut enum_variants_replacements = data
|
||||
.variants
|
||||
.iter()
|
||||
.filter_map(|(_, name)| {
|
||||
Some(Replacement {
|
||||
to_camel_case(&name.to_smol_str()).map(|new_name| Replacement {
|
||||
current_name: name.clone(),
|
||||
suggested_text: to_camel_case(&name.to_smol_str())?,
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::UpperCamelCase,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
.peekable();
|
||||
|
||||
// If there is at least one element to spawn a warning on, go to the source map and generate a warning.
|
||||
self.create_incorrect_case_diagnostic_for_enum(
|
||||
enum_id,
|
||||
enum_name_replacement,
|
||||
enum_fields_replacements,
|
||||
)
|
||||
}
|
||||
|
||||
/// Given the information about incorrect names in the struct declaration, looks up into the source code
|
||||
/// for exact locations and adds diagnostics into the sink.
|
||||
fn create_incorrect_case_diagnostic_for_enum(
|
||||
&mut self,
|
||||
enum_id: EnumId,
|
||||
enum_name_replacement: Option<Replacement>,
|
||||
enum_variants_replacements: Vec<Replacement>,
|
||||
) {
|
||||
// XXX: only look at sources if we do have incorrect names
|
||||
if enum_name_replacement.is_none() && enum_variants_replacements.is_empty() {
|
||||
if enum_variants_replacements.peek().is_none() {
|
||||
return;
|
||||
}
|
||||
|
||||
let enum_loc = enum_id.lookup(self.db.upcast());
|
||||
let enum_src = enum_loc.source(self.db.upcast());
|
||||
|
||||
if let Some(replacement) = enum_name_replacement {
|
||||
let ast_ptr = match enum_src.value.name() {
|
||||
Some(name) => name,
|
||||
None => {
|
||||
let Some(enum_variants_list) = enum_src.value.variant_list() else {
|
||||
always!(
|
||||
enum_variants_replacements.peek().is_none(),
|
||||
"Replacements ({:?}) were generated for enum variants \
|
||||
which had no fields list: {:?}",
|
||||
enum_variants_replacements,
|
||||
enum_src
|
||||
);
|
||||
return;
|
||||
};
|
||||
let mut enum_variants_iter = enum_variants_list.variants();
|
||||
for variant_replacement in enum_variants_replacements {
|
||||
// We assume that parameters in replacement are in the same order as in the
|
||||
// actual params list, but just some of them (ones that named correctly) are skipped.
|
||||
let variant = loop {
|
||||
if let Some(variant) = enum_variants_iter.next() {
|
||||
let Some(variant_name) = variant.name() else {
|
||||
continue;
|
||||
};
|
||||
if variant_name.as_name() == variant_replacement.current_name {
|
||||
break variant;
|
||||
}
|
||||
} else {
|
||||
never!(
|
||||
"Replacement ({:?}) was generated for a enum without a name: {:?}",
|
||||
replacement,
|
||||
"Replacement ({:?}) was generated for an enum variant \
|
||||
which was not found: {:?}",
|
||||
variant_replacement,
|
||||
enum_src
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: enum_src.file_id,
|
||||
ident_type: IdentType::Enum,
|
||||
ident: AstPtr::new(&ast_ptr),
|
||||
expected_case: replacement.expected_case,
|
||||
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
|
||||
suggested_text: replacement.suggested_text,
|
||||
};
|
||||
|
||||
self.sink.push(diagnostic);
|
||||
}
|
||||
|
||||
let enum_variants_list = match enum_src.value.variant_list() {
|
||||
Some(variants) => variants,
|
||||
_ => {
|
||||
always!(
|
||||
enum_variants_replacements.is_empty(),
|
||||
"Replacements ({:?}) were generated for a enum variants which had no fields list: {:?}",
|
||||
enum_variants_replacements,
|
||||
enum_src
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
let mut enum_variants_iter = enum_variants_list.variants();
|
||||
for variant_to_rename in enum_variants_replacements {
|
||||
// We assume that parameters in replacement are in the same order as in the
|
||||
// actual params list, but just some of them (ones that named correctly) are skipped.
|
||||
let ast_ptr = loop {
|
||||
match enum_variants_iter.next().and_then(|v| v.name()) {
|
||||
Some(variant_name) => {
|
||||
if variant_name.as_name() == variant_to_rename.current_name {
|
||||
break variant_name;
|
||||
}
|
||||
}
|
||||
None => {
|
||||
never!(
|
||||
"Replacement ({:?}) was generated for a enum variant which was not found: {:?}",
|
||||
variant_to_rename, enum_src
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: enum_src.file_id,
|
||||
ident_type: IdentType::Variant,
|
||||
ident: AstPtr::new(&ast_ptr),
|
||||
expected_case: variant_to_rename.expected_case,
|
||||
ident_text: variant_to_rename.current_name.display(self.db.upcast()).to_string(),
|
||||
suggested_text: variant_to_rename.suggested_text,
|
||||
};
|
||||
|
||||
self.sink.push(diagnostic);
|
||||
self.create_incorrect_case_diagnostic_for_ast_node(
|
||||
variant_replacement,
|
||||
enum_src.file_id,
|
||||
&variant,
|
||||
IdentType::Variant,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_const(&mut self, const_id: ConstId) {
|
||||
let data = self.db.const_data(const_id);
|
||||
let container = const_id.lookup(self.db.upcast()).container;
|
||||
if self.is_trait_impl_container(container) {
|
||||
cov_mark::hit!(trait_impl_assoc_const_incorrect_case_ignored);
|
||||
return;
|
||||
}
|
||||
|
||||
if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
|
||||
return;
|
||||
}
|
||||
|
||||
let name = match &data.name {
|
||||
Some(name) => name,
|
||||
None => return,
|
||||
};
|
||||
|
||||
let const_name = name.to_smol_str();
|
||||
let replacement = if let Some(new_name) = to_upper_snake_case(&const_name) {
|
||||
Replacement {
|
||||
current_name: name.clone(),
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::UpperSnakeCase,
|
||||
}
|
||||
} else {
|
||||
// Nothing to do here.
|
||||
let data = self.db.const_data(const_id);
|
||||
let Some(name) = &data.name else {
|
||||
return;
|
||||
};
|
||||
|
||||
let const_loc = const_id.lookup(self.db.upcast());
|
||||
let const_src = const_loc.source(self.db.upcast());
|
||||
|
||||
let ast_ptr = match const_src.value.name() {
|
||||
Some(name) => name,
|
||||
None => return,
|
||||
};
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: const_src.file_id,
|
||||
ident_type: IdentType::Constant,
|
||||
ident: AstPtr::new(&ast_ptr),
|
||||
expected_case: replacement.expected_case,
|
||||
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
|
||||
suggested_text: replacement.suggested_text,
|
||||
};
|
||||
|
||||
self.sink.push(diagnostic);
|
||||
self.create_incorrect_case_diagnostic_for_item_name(
|
||||
const_id,
|
||||
name,
|
||||
CaseType::UpperSnakeCase,
|
||||
IdentType::Constant,
|
||||
);
|
||||
}
|
||||
|
||||
fn validate_static(&mut self, static_id: StaticId) {
|
||||
|
@ -742,32 +585,91 @@ impl<'a> DeclValidator<'a> {
|
|||
return;
|
||||
}
|
||||
|
||||
let name = &data.name;
|
||||
self.create_incorrect_case_diagnostic_for_item_name(
|
||||
static_id,
|
||||
&data.name,
|
||||
CaseType::UpperSnakeCase,
|
||||
IdentType::StaticVariable,
|
||||
);
|
||||
}
|
||||
|
||||
let static_name = name.to_smol_str();
|
||||
let replacement = if let Some(new_name) = to_upper_snake_case(&static_name) {
|
||||
Replacement {
|
||||
current_name: name.clone(),
|
||||
suggested_text: new_name,
|
||||
expected_case: CaseType::UpperSnakeCase,
|
||||
}
|
||||
} else {
|
||||
// Nothing to do here.
|
||||
fn validate_type_alias(&mut self, type_alias_id: TypeAliasId) {
|
||||
let container = type_alias_id.lookup(self.db.upcast()).container;
|
||||
if self.is_trait_impl_container(container) {
|
||||
cov_mark::hit!(trait_impl_assoc_type_incorrect_case_ignored);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check whether non-snake case identifiers are allowed for this type alias.
|
||||
if self.allowed(type_alias_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check the type alias name.
|
||||
let data = self.db.type_alias_data(type_alias_id);
|
||||
self.create_incorrect_case_diagnostic_for_item_name(
|
||||
type_alias_id,
|
||||
&data.name,
|
||||
CaseType::UpperCamelCase,
|
||||
IdentType::TypeAlias,
|
||||
);
|
||||
}
|
||||
|
||||
fn create_incorrect_case_diagnostic_for_item_name<N, S, L>(
|
||||
&mut self,
|
||||
item_id: L,
|
||||
name: &Name,
|
||||
expected_case: CaseType,
|
||||
ident_type: IdentType,
|
||||
) where
|
||||
N: AstNode + HasName + fmt::Debug,
|
||||
S: HasSource<Value = N>,
|
||||
L: Lookup<Data = S, Database<'a> = dyn DefDatabase + 'a>,
|
||||
{
|
||||
let to_expected_case_type = match expected_case {
|
||||
CaseType::LowerSnakeCase => to_lower_snake_case,
|
||||
CaseType::UpperSnakeCase => to_upper_snake_case,
|
||||
CaseType::UpperCamelCase => to_camel_case,
|
||||
};
|
||||
let Some(replacement) = to_expected_case_type(&name.to_smol_str()).map(|new_name| {
|
||||
Replacement { current_name: name.clone(), suggested_text: new_name, expected_case }
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let static_loc = static_id.lookup(self.db.upcast());
|
||||
let static_src = static_loc.source(self.db.upcast());
|
||||
let item_loc = item_id.lookup(self.db.upcast());
|
||||
let item_src = item_loc.source(self.db.upcast());
|
||||
self.create_incorrect_case_diagnostic_for_ast_node(
|
||||
replacement,
|
||||
item_src.file_id,
|
||||
&item_src.value,
|
||||
ident_type,
|
||||
);
|
||||
}
|
||||
|
||||
let ast_ptr = match static_src.value.name() {
|
||||
Some(name) => name,
|
||||
None => return,
|
||||
fn create_incorrect_case_diagnostic_for_ast_node<T>(
|
||||
&mut self,
|
||||
replacement: Replacement,
|
||||
file_id: HirFileId,
|
||||
node: &T,
|
||||
ident_type: IdentType,
|
||||
) where
|
||||
T: AstNode + HasName + fmt::Debug,
|
||||
{
|
||||
let Some(name_ast) = node.name() else {
|
||||
never!(
|
||||
"Replacement ({:?}) was generated for a {:?} without a name: {:?}",
|
||||
replacement,
|
||||
ident_type,
|
||||
node
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
||||
let diagnostic = IncorrectCase {
|
||||
file: static_src.file_id,
|
||||
ident_type: IdentType::StaticVariable,
|
||||
ident: AstPtr::new(&ast_ptr),
|
||||
file: file_id,
|
||||
ident_type,
|
||||
ident: AstPtr::new(&name_ast),
|
||||
expected_case: replacement.expected_case,
|
||||
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
|
||||
suggested_text: replacement.suggested_text,
|
||||
|
@ -775,4 +677,13 @@ impl<'a> DeclValidator<'a> {
|
|||
|
||||
self.sink.push(diagnostic);
|
||||
}
|
||||
|
||||
fn is_trait_impl_container(&self, container_id: ItemContainerId) -> bool {
|
||||
if let ItemContainerId::ImplId(impl_id) = container_id {
|
||||
if self.db.impl_trait(impl_id).is_some() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ use crate::{
|
|||
|
||||
pub(crate) use hir_def::{
|
||||
body::Body,
|
||||
hir::{Expr, ExprId, MatchArm, Pat, PatId},
|
||||
hir::{Expr, ExprId, MatchArm, Pat, PatId, Statement},
|
||||
LocalFieldId, VariantId,
|
||||
};
|
||||
|
||||
|
@ -44,6 +44,12 @@ pub enum BodyValidationDiagnostic {
|
|||
match_expr: ExprId,
|
||||
uncovered_patterns: String,
|
||||
},
|
||||
RemoveTrailingReturn {
|
||||
return_expr: ExprId,
|
||||
},
|
||||
RemoveUnnecessaryElse {
|
||||
if_expr: ExprId,
|
||||
},
|
||||
}
|
||||
|
||||
impl BodyValidationDiagnostic {
|
||||
|
@ -72,6 +78,10 @@ impl ExprValidator {
|
|||
let body = db.body(self.owner);
|
||||
let mut filter_map_next_checker = None;
|
||||
|
||||
if matches!(self.owner, DefWithBodyId::FunctionId(_)) {
|
||||
self.check_for_trailing_return(body.body_expr, &body);
|
||||
}
|
||||
|
||||
for (id, expr) in body.exprs.iter() {
|
||||
if let Some((variant, missed_fields, true)) =
|
||||
record_literal_missing_fields(db, &self.infer, id, expr)
|
||||
|
@ -90,9 +100,16 @@ impl ExprValidator {
|
|||
Expr::Call { .. } | Expr::MethodCall { .. } => {
|
||||
self.validate_call(db, id, expr, &mut filter_map_next_checker);
|
||||
}
|
||||
Expr::Closure { body: body_expr, .. } => {
|
||||
self.check_for_trailing_return(*body_expr, &body);
|
||||
}
|
||||
Expr::If { .. } => {
|
||||
self.check_for_unnecessary_else(id, expr, &body);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
for (id, pat) in body.pats.iter() {
|
||||
if let Some((variant, missed_fields, true)) =
|
||||
record_pattern_missing_fields(db, &self.infer, id, pat)
|
||||
|
@ -153,14 +170,7 @@ impl ExprValidator {
|
|||
}
|
||||
|
||||
let pattern_arena = Arena::new();
|
||||
let ty_arena = Arena::new();
|
||||
let cx = MatchCheckCtx::new(
|
||||
self.owner.module(db.upcast()),
|
||||
self.owner,
|
||||
db,
|
||||
&pattern_arena,
|
||||
&ty_arena,
|
||||
);
|
||||
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena);
|
||||
|
||||
let mut m_arms = Vec::with_capacity(arms.len());
|
||||
let mut has_lowering_errors = false;
|
||||
|
@ -207,7 +217,7 @@ impl ExprValidator {
|
|||
}
|
||||
|
||||
let report = match compute_match_usefulness(
|
||||
rustc_pattern_analysis::MatchCtxt { tycx: &cx },
|
||||
&cx,
|
||||
m_arms.as_slice(),
|
||||
scrut_ty.clone(),
|
||||
ValidityConstraint::ValidOnly,
|
||||
|
@ -244,6 +254,59 @@ impl ExprValidator {
|
|||
}
|
||||
pattern
|
||||
}
|
||||
|
||||
fn check_for_trailing_return(&mut self, body_expr: ExprId, body: &Body) {
|
||||
match &body.exprs[body_expr] {
|
||||
Expr::Block { statements, tail, .. } => {
|
||||
let last_stmt = tail.or_else(|| match statements.last()? {
|
||||
Statement::Expr { expr, .. } => Some(*expr),
|
||||
_ => None,
|
||||
});
|
||||
if let Some(last_stmt) = last_stmt {
|
||||
self.check_for_trailing_return(last_stmt, body);
|
||||
}
|
||||
}
|
||||
Expr::If { then_branch, else_branch, .. } => {
|
||||
self.check_for_trailing_return(*then_branch, body);
|
||||
if let Some(else_branch) = else_branch {
|
||||
self.check_for_trailing_return(*else_branch, body);
|
||||
}
|
||||
}
|
||||
Expr::Match { arms, .. } => {
|
||||
for arm in arms.iter() {
|
||||
let MatchArm { expr, .. } = arm;
|
||||
self.check_for_trailing_return(*expr, body);
|
||||
}
|
||||
}
|
||||
Expr::Return { .. } => {
|
||||
self.diagnostics.push(BodyValidationDiagnostic::RemoveTrailingReturn {
|
||||
return_expr: body_expr,
|
||||
});
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, body: &Body) {
|
||||
if let Expr::If { condition: _, then_branch, else_branch } = expr {
|
||||
if else_branch.is_none() {
|
||||
return;
|
||||
}
|
||||
if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] {
|
||||
let last_then_expr = tail.or_else(|| match statements.last()? {
|
||||
Statement::Expr { expr, .. } => Some(*expr),
|
||||
_ => None,
|
||||
});
|
||||
if let Some(last_then_expr) = last_then_expr {
|
||||
let last_then_expr_ty = &self.infer[last_then_expr];
|
||||
if last_then_expr_ty.is_never() {
|
||||
self.diagnostics
|
||||
.push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct FilterMapNextChecker {
|
||||
|
|
|
@ -9,7 +9,7 @@ use rustc_pattern_analysis::{
|
|||
index::IdxContainer,
|
||||
Captures, TypeCx,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use stdx::never;
|
||||
use typed_arena::Arena;
|
||||
|
||||
|
@ -41,8 +41,14 @@ pub(crate) struct MatchCheckCtx<'p> {
|
|||
body: DefWithBodyId,
|
||||
pub(crate) db: &'p dyn HirDatabase,
|
||||
pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
||||
ty_arena: &'p Arena<Ty>,
|
||||
exhaustive_patterns: bool,
|
||||
min_exhaustive_patterns: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct PatData<'p> {
|
||||
/// Keep db around so that we can print variant names in `Debug`.
|
||||
pub(crate) db: &'p dyn HirDatabase,
|
||||
}
|
||||
|
||||
impl<'p> MatchCheckCtx<'p> {
|
||||
|
@ -51,11 +57,12 @@ impl<'p> MatchCheckCtx<'p> {
|
|||
body: DefWithBodyId,
|
||||
db: &'p dyn HirDatabase,
|
||||
pattern_arena: &'p Arena<DeconstructedPat<'p>>,
|
||||
ty_arena: &'p Arena<Ty>,
|
||||
) -> Self {
|
||||
let def_map = db.crate_def_map(module.krate());
|
||||
let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns");
|
||||
Self { module, body, db, pattern_arena, exhaustive_patterns, ty_arena }
|
||||
let min_exhaustive_patterns =
|
||||
def_map.is_unstable_feature_enabled("min_exhaustive_patterns");
|
||||
Self { module, body, db, pattern_arena, exhaustive_patterns, min_exhaustive_patterns }
|
||||
}
|
||||
|
||||
fn is_uninhabited(&self, ty: &Ty) -> bool {
|
||||
|
@ -75,18 +82,15 @@ impl<'p> MatchCheckCtx<'p> {
|
|||
}
|
||||
}
|
||||
|
||||
fn variant_id_for_adt(&self, ctor: &Constructor<Self>, adt: hir_def::AdtId) -> VariantId {
|
||||
fn variant_id_for_adt(ctor: &Constructor<Self>, adt: hir_def::AdtId) -> Option<VariantId> {
|
||||
match ctor {
|
||||
&Variant(id) => id.into(),
|
||||
Struct | UnionField => {
|
||||
assert!(!matches!(adt, hir_def::AdtId::EnumId(_)));
|
||||
match adt {
|
||||
hir_def::AdtId::EnumId(_) => unreachable!(),
|
||||
hir_def::AdtId::StructId(id) => id.into(),
|
||||
hir_def::AdtId::UnionId(id) => id.into(),
|
||||
}
|
||||
}
|
||||
_ => panic!("bad constructor {self:?} for adt {adt:?}"),
|
||||
&Variant(id) => Some(id.into()),
|
||||
Struct | UnionField => match adt {
|
||||
hir_def::AdtId::EnumId(_) => None,
|
||||
hir_def::AdtId::StructId(id) => Some(id.into()),
|
||||
hir_def::AdtId::UnionId(id) => Some(id.into()),
|
||||
},
|
||||
_ => panic!("bad constructor {ctor:?} for adt {adt:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,7 +204,7 @@ impl<'p> MatchCheckCtx<'p> {
|
|||
Wildcard
|
||||
}
|
||||
};
|
||||
let variant = self.variant_id_for_adt(&ctor, adt.0);
|
||||
let variant = Self::variant_id_for_adt(&ctor, adt.0).unwrap();
|
||||
let fields_len = variant.variant_data(self.db.upcast()).fields().len();
|
||||
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||
let mut field_id_to_id: Vec<Option<usize>> = vec![None; fields_len];
|
||||
|
@ -241,7 +245,8 @@ impl<'p> MatchCheckCtx<'p> {
|
|||
fields = self.pattern_arena.alloc_extend(subpats);
|
||||
}
|
||||
}
|
||||
DeconstructedPat::new(ctor, fields, pat.ty.clone(), ())
|
||||
let data = PatData { db: self.db };
|
||||
DeconstructedPat::new(ctor, fields, pat.ty.clone(), data)
|
||||
}
|
||||
|
||||
pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'p>) -> Pat {
|
||||
|
@ -266,7 +271,7 @@ impl<'p> MatchCheckCtx<'p> {
|
|||
PatKind::Deref { subpattern: subpatterns.next().unwrap() }
|
||||
}
|
||||
TyKind::Adt(adt, substs) => {
|
||||
let variant = self.variant_id_for_adt(pat.ctor(), adt.0);
|
||||
let variant = Self::variant_id_for_adt(pat.ctor(), adt.0).unwrap();
|
||||
let subpatterns = self
|
||||
.list_variant_nonhidden_fields(pat.ty(), variant)
|
||||
.zip(subpatterns)
|
||||
|
@ -307,11 +312,14 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
|||
type VariantIdx = EnumVariantId;
|
||||
type StrLit = Void;
|
||||
type ArmData = ();
|
||||
type PatData = ();
|
||||
type PatData = PatData<'p>;
|
||||
|
||||
fn is_exhaustive_patterns_feature_on(&self) -> bool {
|
||||
self.exhaustive_patterns
|
||||
}
|
||||
fn is_min_exhaustive_patterns_feature_on(&self) -> bool {
|
||||
self.min_exhaustive_patterns
|
||||
}
|
||||
|
||||
fn ctor_arity(
|
||||
&self,
|
||||
|
@ -327,7 +335,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
|||
// patterns. If we're here we can assume this is a box pattern.
|
||||
1
|
||||
} else {
|
||||
let variant = self.variant_id_for_adt(ctor, adt);
|
||||
let variant = Self::variant_id_for_adt(ctor, adt).unwrap();
|
||||
self.list_variant_nonhidden_fields(ty, variant).count()
|
||||
}
|
||||
}
|
||||
|
@ -347,54 +355,51 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
|||
}
|
||||
}
|
||||
|
||||
fn ctor_sub_tys(
|
||||
&self,
|
||||
ctor: &rustc_pattern_analysis::constructor::Constructor<Self>,
|
||||
ty: &Self::Ty,
|
||||
) -> &[Self::Ty] {
|
||||
use std::iter::once;
|
||||
fn alloc<'a>(cx: &'a MatchCheckCtx<'_>, iter: impl Iterator<Item = Ty>) -> &'a [Ty] {
|
||||
cx.ty_arena.alloc_extend(iter)
|
||||
}
|
||||
match ctor {
|
||||
fn ctor_sub_tys<'a>(
|
||||
&'a self,
|
||||
ctor: &'a rustc_pattern_analysis::constructor::Constructor<Self>,
|
||||
ty: &'a Self::Ty,
|
||||
) -> impl ExactSizeIterator<Item = Self::Ty> + Captures<'a> {
|
||||
let single = |ty| smallvec![ty];
|
||||
let tys: SmallVec<[_; 2]> = match ctor {
|
||||
Struct | Variant(_) | UnionField => match ty.kind(Interner) {
|
||||
TyKind::Tuple(_, substs) => {
|
||||
let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner));
|
||||
alloc(self, tys.cloned())
|
||||
tys.cloned().collect()
|
||||
}
|
||||
TyKind::Ref(.., rty) => alloc(self, once(rty.clone())),
|
||||
TyKind::Ref(.., rty) => single(rty.clone()),
|
||||
&TyKind::Adt(AdtId(adt), ref substs) => {
|
||||
if is_box(self.db, adt) {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
|
||||
alloc(self, once(subst_ty))
|
||||
single(subst_ty)
|
||||
} else {
|
||||
let variant = self.variant_id_for_adt(ctor, adt);
|
||||
let tys = self.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty);
|
||||
alloc(self, tys)
|
||||
let variant = Self::variant_id_for_adt(ctor, adt).unwrap();
|
||||
self.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty).collect()
|
||||
}
|
||||
}
|
||||
ty_kind => {
|
||||
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
|
||||
alloc(self, once(ty.clone()))
|
||||
single(ty.clone())
|
||||
}
|
||||
},
|
||||
Ref => match ty.kind(Interner) {
|
||||
TyKind::Ref(.., rty) => alloc(self, once(rty.clone())),
|
||||
TyKind::Ref(.., rty) => single(rty.clone()),
|
||||
ty_kind => {
|
||||
never!("Unexpected type for `{:?}` constructor: {:?}", ctor, ty_kind);
|
||||
alloc(self, once(ty.clone()))
|
||||
single(ty.clone())
|
||||
}
|
||||
},
|
||||
Slice(_) => unreachable!("Found a `Slice` constructor in match checking"),
|
||||
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
|
||||
| NonExhaustive | Hidden | Missing | Wildcard => &[],
|
||||
| NonExhaustive | Hidden | Missing | Wildcard => smallvec![],
|
||||
Or => {
|
||||
never!("called `Fields::wildcards` on an `Or` ctor");
|
||||
&[]
|
||||
smallvec![]
|
||||
}
|
||||
}
|
||||
};
|
||||
tys.into_iter()
|
||||
}
|
||||
|
||||
fn ctors_for_ty(
|
||||
|
@ -456,11 +461,27 @@ impl<'p> TypeCx for MatchCheckCtx<'p> {
|
|||
})
|
||||
}
|
||||
|
||||
fn debug_pat(
|
||||
_f: &mut fmt::Formatter<'_>,
|
||||
_pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>,
|
||||
fn write_variant_name(
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>,
|
||||
) -> fmt::Result {
|
||||
// FIXME: implement this, as using `unimplemented!()` causes panics in `tracing`.
|
||||
let variant =
|
||||
pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt));
|
||||
|
||||
let db = pat.data().unwrap().db;
|
||||
if let Some(variant) = variant {
|
||||
match variant {
|
||||
VariantId::EnumVariantId(v) => {
|
||||
write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?;
|
||||
}
|
||||
VariantId::StructId(s) => {
|
||||
write!(f, "{}", db.struct_data(s).name.display(db.upcast()))?
|
||||
}
|
||||
VariantId::UnionId(u) => {
|
||||
write!(f, "{}", db.union_data(u).name.display(db.upcast()))?
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
consteval::try_const_usize,
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
|
||||
layout::Layout,
|
||||
lt_from_placeholder_idx,
|
||||
|
@ -814,9 +814,8 @@ impl HirDisplay for Ty {
|
|||
|
||||
// Don't count Sized but count when it absent
|
||||
// (i.e. when explicit ?Sized bound is set).
|
||||
let default_sized = SizedByDefault::Sized {
|
||||
anchor: func.lookup(db.upcast()).module(db.upcast()).krate(),
|
||||
};
|
||||
let default_sized =
|
||||
SizedByDefault::Sized { anchor: func.krate(db.upcast()) };
|
||||
let sized_bounds = bounds
|
||||
.skip_binders()
|
||||
.iter()
|
||||
|
@ -1025,7 +1024,7 @@ impl HirDisplay for Ty {
|
|||
let data =
|
||||
(*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
|
||||
let bounds = data.substitute(Interner, ¶meters);
|
||||
let krate = func.lookup(db.upcast()).module(db.upcast()).krate();
|
||||
let krate = func.krate(db.upcast());
|
||||
write_bounds_like_dyn_trait_with_prefix(
|
||||
f,
|
||||
"impl",
|
||||
|
@ -1086,7 +1085,7 @@ impl HirDisplay for Ty {
|
|||
}
|
||||
let sig = ClosureSubst(substs).sig_ty().callable_sig(db);
|
||||
if let Some(sig) = sig {
|
||||
let (def, _) = db.lookup_intern_closure((*id).into());
|
||||
let InternedClosure(def, _) = db.lookup_intern_closure((*id).into());
|
||||
let infer = db.infer(def);
|
||||
let (_, kind) = infer.closure_info(id);
|
||||
match f.closure_style {
|
||||
|
@ -1191,7 +1190,7 @@ impl HirDisplay for Ty {
|
|||
let data =
|
||||
(*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
|
||||
let bounds = data.substitute(Interner, &opaque_ty.substitution);
|
||||
let krate = func.lookup(db.upcast()).module(db.upcast()).krate();
|
||||
let krate = func.krate(db.upcast());
|
||||
write_bounds_like_dyn_trait_with_prefix(
|
||||
f,
|
||||
"impl",
|
||||
|
|
|
@ -21,7 +21,7 @@ use smallvec::SmallVec;
|
|||
use stdx::never;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
from_placeholder_idx, make_binders,
|
||||
mir::{BorrowKind, MirSpan, ProjectionElem},
|
||||
static_lifetime, to_chalk_trait_id,
|
||||
|
@ -194,17 +194,15 @@ impl CapturedItem {
|
|||
}
|
||||
let variant_data = f.parent.variant_data(db.upcast());
|
||||
let field = match &*variant_data {
|
||||
VariantData::Record(fields) => fields[f.local_id]
|
||||
.name
|
||||
.as_str()
|
||||
.unwrap_or("[missing field]")
|
||||
.to_string(),
|
||||
VariantData::Record(fields) => {
|
||||
fields[f.local_id].name.as_str().unwrap_or("[missing field]").to_owned()
|
||||
}
|
||||
VariantData::Tuple(fields) => fields
|
||||
.iter()
|
||||
.position(|it| it.0 == f.local_id)
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
VariantData::Unit => "[missing field]".to_string(),
|
||||
VariantData::Unit => "[missing field]".to_owned(),
|
||||
};
|
||||
result = format!("{result}.{field}");
|
||||
field_need_paren = false;
|
||||
|
@ -718,7 +716,7 @@ impl InferenceContext<'_> {
|
|||
|
||||
fn is_upvar(&self, place: &HirPlace) -> bool {
|
||||
if let Some(c) = self.current_closure {
|
||||
let (_, root) = self.db.lookup_intern_closure(c.into());
|
||||
let InternedClosure(_, root) = self.db.lookup_intern_closure(c.into());
|
||||
return self.body.is_binding_upvar(place.local, root);
|
||||
}
|
||||
false
|
||||
|
@ -940,7 +938,7 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
|
||||
fn analyze_closure(&mut self, closure: ClosureId) -> FnTrait {
|
||||
let (_, root) = self.db.lookup_intern_closure(closure.into());
|
||||
let InternedClosure(_, root) = self.db.lookup_intern_closure(closure.into());
|
||||
self.current_closure = Some(closure);
|
||||
let Expr::Closure { body, capture_by, .. } = &self.body[root] else {
|
||||
unreachable!("Closure expression id is always closure");
|
||||
|
|
|
@ -23,6 +23,7 @@ use syntax::ast::RangeOp;
|
|||
use crate::{
|
||||
autoderef::{builtin_deref, deref_by_trait, Autoderef},
|
||||
consteval,
|
||||
db::{InternedClosure, InternedCoroutine},
|
||||
infer::{
|
||||
coerce::{CoerceMany, CoercionCause},
|
||||
find_continuable,
|
||||
|
@ -253,13 +254,17 @@ impl InferenceContext<'_> {
|
|||
.push(ret_ty.clone())
|
||||
.build();
|
||||
|
||||
let coroutine_id = self.db.intern_coroutine((self.owner, tgt_expr)).into();
|
||||
let coroutine_id = self
|
||||
.db
|
||||
.intern_coroutine(InternedCoroutine(self.owner, tgt_expr))
|
||||
.into();
|
||||
let coroutine_ty = TyKind::Coroutine(coroutine_id, subst).intern(Interner);
|
||||
|
||||
(None, coroutine_ty, Some((resume_ty, yield_ty)))
|
||||
}
|
||||
ClosureKind::Closure | ClosureKind::Async => {
|
||||
let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
|
||||
let closure_id =
|
||||
self.db.intern_closure(InternedClosure(self.owner, tgt_expr)).into();
|
||||
let closure_ty = TyKind::Closure(
|
||||
closure_id,
|
||||
TyBuilder::subst_for_closure(self.db, self.owner, sig_ty.clone()),
|
||||
|
|
|
@ -469,12 +469,14 @@ impl<'a> InferenceTable<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) {
|
||||
self.var_unification_table.rollback_to(snapshot.var_table_snapshot);
|
||||
self.type_variable_table = snapshot.type_variable_table_snapshot;
|
||||
self.pending_obligations = snapshot.pending_obligations;
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
pub(crate) fn run_in_snapshot<T>(&mut self, f: impl FnOnce(&mut InferenceTable<'_>) -> T) -> T {
|
||||
let snapshot = self.snapshot();
|
||||
let result = f(self);
|
||||
|
|
|
@ -19,8 +19,12 @@ use stdx::never;
|
|||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx,
|
||||
utils::ClosureSubst, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
|
||||
consteval::try_const_usize,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
infer::normalize,
|
||||
layout::adt::struct_variant_idx,
|
||||
utils::ClosureSubst,
|
||||
Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
|
||||
};
|
||||
|
||||
pub use self::{
|
||||
|
@ -391,7 +395,7 @@ pub fn layout_of_ty_query(
|
|||
}
|
||||
}
|
||||
TyKind::Closure(c, subst) => {
|
||||
let (def, _) = db.lookup_intern_closure((*c).into());
|
||||
let InternedClosure(def, _) = db.lookup_intern_closure((*c).into());
|
||||
let infer = db.infer(def);
|
||||
let (captures, _) = infer.closure_info(c);
|
||||
let fields = captures
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use chalk_ir::{AdtId, TyKind};
|
||||
use either::Either;
|
||||
use hir_def::db::DefDatabase;
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_fixture::WithFixture;
|
||||
use triomphe::Arc;
|
||||
|
||||
|
@ -16,7 +15,7 @@ use crate::{
|
|||
mod closure;
|
||||
|
||||
fn current_machine_data_layout() -> String {
|
||||
project_model::target_data_layout::get(None, None, &HashMap::default()).unwrap()
|
||||
project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap()
|
||||
}
|
||||
|
||||
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
|
||||
|
|
|
@ -51,6 +51,7 @@ use std::{
|
|||
hash::{BuildHasherDefault, Hash},
|
||||
};
|
||||
|
||||
use base_db::salsa::impl_intern_value_trivial;
|
||||
use chalk_ir::{
|
||||
fold::{Shift, TypeFoldable},
|
||||
interner::HasInterner,
|
||||
|
@ -228,7 +229,7 @@ impl MemoryMap {
|
|||
&self,
|
||||
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>,
|
||||
) -> Result<FxHashMap<usize, usize>, MirEvalError> {
|
||||
let mut transform = |(addr, val): (&usize, &Box<[u8]>)| {
|
||||
let mut transform = |(addr, val): (&usize, &[u8])| {
|
||||
let addr = *addr;
|
||||
let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
|
||||
f(val, align).map(|it| (addr, it))
|
||||
|
@ -240,7 +241,9 @@ impl MemoryMap {
|
|||
map.insert(addr, val);
|
||||
map
|
||||
}),
|
||||
MemoryMap::Complex(cm) => cm.memory.iter().map(transform).collect(),
|
||||
MemoryMap::Complex(cm) => {
|
||||
cm.memory.iter().map(|(addr, val)| transform((addr, val))).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -584,6 +587,7 @@ pub enum ImplTraitId {
|
|||
ReturnTypeImplTrait(hir_def::FunctionId, RpitId),
|
||||
AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
|
||||
}
|
||||
impl_intern_value_trivial!(ImplTraitId);
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct ReturnTypeImplTraits {
|
||||
|
|
|
@ -10,7 +10,10 @@ use std::{
|
|||
iter,
|
||||
};
|
||||
|
||||
use base_db::{salsa::Cycle, CrateId};
|
||||
use base_db::{
|
||||
salsa::{impl_intern_value_trivial, Cycle},
|
||||
CrateId,
|
||||
};
|
||||
use chalk_ir::{
|
||||
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
|
||||
};
|
||||
|
@ -1225,7 +1228,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
.collect();
|
||||
|
||||
if !ctx.unsized_types.borrow().contains(&self_ty) {
|
||||
let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate();
|
||||
let krate = func.krate(ctx.db.upcast());
|
||||
let sized_trait = ctx
|
||||
.db
|
||||
.lang_item(krate, LangItem::Sized)
|
||||
|
@ -1809,6 +1812,7 @@ pub enum CallableDefId {
|
|||
StructId(StructId),
|
||||
EnumVariantId(EnumVariantId),
|
||||
}
|
||||
impl_intern_value_trivial!(CallableDefId);
|
||||
impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
|
||||
impl From<CallableDefId> for ModuleDefId {
|
||||
fn from(def: CallableDefId) -> ModuleDefId {
|
||||
|
@ -1824,11 +1828,10 @@ impl CallableDefId {
|
|||
pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
|
||||
let db = db.upcast();
|
||||
match self {
|
||||
CallableDefId::FunctionId(f) => f.lookup(db).module(db),
|
||||
CallableDefId::StructId(s) => s.lookup(db).container,
|
||||
CallableDefId::EnumVariantId(e) => e.module(db),
|
||||
CallableDefId::FunctionId(f) => f.krate(db),
|
||||
CallableDefId::StructId(s) => s.krate(db),
|
||||
CallableDefId::EnumVariantId(e) => e.krate(db),
|
||||
}
|
||||
.krate()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -931,6 +931,15 @@ pub fn iterate_method_candidates_dyn(
|
|||
mode: LookupMode,
|
||||
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
|
||||
) -> ControlFlow<()> {
|
||||
let _p = tracing::span!(
|
||||
tracing::Level::INFO,
|
||||
"iterate_method_candidates_dyn",
|
||||
?mode,
|
||||
?name,
|
||||
traits_in_scope_len = traits_in_scope.len()
|
||||
)
|
||||
.entered();
|
||||
|
||||
match mode {
|
||||
LookupMode::MethodCall => {
|
||||
// For method calls, rust first does any number of autoderef, and
|
||||
|
@ -984,6 +993,7 @@ pub fn iterate_method_candidates_dyn(
|
|||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name = ?name))]
|
||||
fn iterate_method_candidates_with_autoref(
|
||||
receiver_ty: &Canonical<Ty>,
|
||||
first_adjustment: ReceiverAdjustments,
|
||||
|
@ -1041,6 +1051,7 @@ fn iterate_method_candidates_with_autoref(
|
|||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name = ?name))]
|
||||
fn iterate_method_candidates_by_receiver(
|
||||
receiver_ty: &Canonical<Ty>,
|
||||
receiver_adjustments: ReceiverAdjustments,
|
||||
|
@ -1088,6 +1099,7 @@ fn iterate_method_candidates_by_receiver(
|
|||
ControlFlow::Continue(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name = ?name))]
|
||||
fn iterate_method_candidates_for_self_ty(
|
||||
self_ty: &Canonical<Ty>,
|
||||
db: &dyn HirDatabase,
|
||||
|
@ -1119,6 +1131,7 @@ fn iterate_method_candidates_for_self_ty(
|
|||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))]
|
||||
fn iterate_trait_method_candidates(
|
||||
self_ty: &Ty,
|
||||
table: &mut InferenceTable<'_>,
|
||||
|
@ -1175,6 +1188,7 @@ fn iterate_trait_method_candidates(
|
|||
ControlFlow::Continue(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))]
|
||||
fn iterate_inherent_methods(
|
||||
self_ty: &Ty,
|
||||
table: &mut InferenceTable<'_>,
|
||||
|
@ -1267,6 +1281,7 @@ fn iterate_inherent_methods(
|
|||
}
|
||||
return ControlFlow::Continue(());
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))]
|
||||
fn iterate_inherent_trait_methods(
|
||||
self_ty: &Ty,
|
||||
table: &mut InferenceTable<'_>,
|
||||
|
@ -1293,6 +1308,7 @@ fn iterate_inherent_methods(
|
|||
ControlFlow::Continue(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))]
|
||||
fn impls_for_self_ty(
|
||||
impls: &InherentImpls,
|
||||
self_ty: &Ty,
|
||||
|
@ -1356,6 +1372,7 @@ macro_rules! check_that {
|
|||
};
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name))]
|
||||
fn is_valid_candidate(
|
||||
table: &mut InferenceTable<'_>,
|
||||
name: Option<&Name>,
|
||||
|
@ -1403,6 +1420,7 @@ enum IsValidCandidate {
|
|||
NotVisible,
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all, fields(name))]
|
||||
fn is_valid_fn_candidate(
|
||||
table: &mut InferenceTable<'_>,
|
||||
fn_id: FunctionId,
|
||||
|
@ -1439,15 +1457,15 @@ fn is_valid_fn_candidate(
|
|||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone()))
|
||||
.fill_with_inference_vars(table)
|
||||
.build();
|
||||
|
||||
check_that!(table.unify(&expect_self_ty, self_ty));
|
||||
|
||||
if let Some(receiver_ty) = receiver_ty {
|
||||
check_that!(data.has_self_param());
|
||||
|
||||
let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone()))
|
||||
.fill_with_inference_vars(table)
|
||||
.build();
|
||||
|
||||
let sig = db.callable_item_signature(fn_id.into());
|
||||
let expected_receiver =
|
||||
sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst);
|
||||
|
@ -1540,6 +1558,7 @@ pub fn implements_trait_unique(
|
|||
|
||||
/// This creates Substs for a trait with the given Self type and type variables
|
||||
/// for all other parameters, to query Chalk with it.
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn generic_implements_goal(
|
||||
db: &dyn HirDatabase,
|
||||
env: Arc<TraitEnvironment>,
|
||||
|
|
|
@ -11,7 +11,10 @@ use stdx::never;
|
|||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase, mir::Operand, utils::ClosureSubst, ClosureId, Interner, Ty, TyExt, TypeFlags,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
mir::Operand,
|
||||
utils::ClosureSubst,
|
||||
ClosureId, Interner, Ty, TyExt, TypeFlags,
|
||||
};
|
||||
|
||||
use super::{
|
||||
|
@ -97,7 +100,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
|
|||
ty,
|
||||
db,
|
||||
|c, subst, f| {
|
||||
let (def, _) = db.lookup_intern_closure(c.into());
|
||||
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
|
||||
let infer = db.infer(def);
|
||||
let (captures, _) = infer.closure_info(&c);
|
||||
let parent_subst = ClosureSubst(subst).parent_subst();
|
||||
|
@ -215,7 +218,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
|
|||
ty,
|
||||
db,
|
||||
|c, subst, f| {
|
||||
let (def, _) = db.lookup_intern_closure(c.into());
|
||||
let InternedClosure(def, _) = db.lookup_intern_closure(c.into());
|
||||
let infer = db.infer(def);
|
||||
let (captures, _) = infer.closure_info(&c);
|
||||
let parent_subst = ClosureSubst(subst).parent_subst();
|
||||
|
|
|
@ -25,7 +25,7 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
consteval::{intern_const_scalar, try_const_usize, ConstEvalError},
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
display::{ClosureStyle, HirDisplay},
|
||||
infer::PointerCast,
|
||||
layout::{Layout, LayoutError, RustcEnumVariantIdx},
|
||||
|
@ -647,7 +647,7 @@ impl Evaluator<'_> {
|
|||
ty.clone(),
|
||||
self.db,
|
||||
|c, subst, f| {
|
||||
let (def, _) = self.db.lookup_intern_closure(c.into());
|
||||
let InternedClosure(def, _) = self.db.lookup_intern_closure(c.into());
|
||||
let infer = self.db.infer(def);
|
||||
let (captures, _) = infer.closure_info(&c);
|
||||
let parent_subst = ClosureSubst(subst).parent_subst();
|
||||
|
@ -1763,7 +1763,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
};
|
||||
mem.get(pos..pos + size)
|
||||
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_string()))
|
||||
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_owned()))
|
||||
}
|
||||
|
||||
fn write_memory_using_ref(&mut self, addr: Address, size: usize) -> Result<&mut [u8]> {
|
||||
|
@ -1777,7 +1777,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
};
|
||||
mem.get_mut(pos..pos + size)
|
||||
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_string()))
|
||||
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_owned()))
|
||||
}
|
||||
|
||||
fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
|
||||
|
@ -1800,7 +1800,7 @@ impl Evaluator<'_> {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_string());
|
||||
let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_owned());
|
||||
|
||||
match (addr, r.addr) {
|
||||
(Stack(dst), Stack(src)) => {
|
||||
|
@ -2653,7 +2653,7 @@ pub fn render_const_using_debug_impl(
|
|||
ptr: ArenaMap::new(),
|
||||
body: db
|
||||
.mir_body(owner.into())
|
||||
.map_err(|_| MirEvalError::NotSupported("unreachable".to_string()))?,
|
||||
.map_err(|_| MirEvalError::NotSupported("unreachable".to_owned()))?,
|
||||
drop_flags: DropFlags::default(),
|
||||
};
|
||||
let data = evaluator.allocate_const_in_heap(locals, c)?;
|
||||
|
|
|
@ -178,7 +178,7 @@ impl Evaluator<'_> {
|
|||
not_supported!("wrong arg count for clone");
|
||||
};
|
||||
let addr = Address::from_bytes(arg.get(self)?)?;
|
||||
let (closure_owner, _) = self.db.lookup_intern_closure((*id).into());
|
||||
let InternedClosure(closure_owner, _) = self.db.lookup_intern_closure((*id).into());
|
||||
let infer = self.db.infer(closure_owner);
|
||||
let (captures, _) = infer.closure_info(id);
|
||||
let layout = self.layout(&self_ty)?;
|
||||
|
@ -304,7 +304,7 @@ impl Evaluator<'_> {
|
|||
use LangItem::*;
|
||||
let mut args = args.iter();
|
||||
match it {
|
||||
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
|
||||
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_owned())),
|
||||
PanicFmt => {
|
||||
let message = (|| {
|
||||
let resolver = self
|
||||
|
|
|
@ -25,7 +25,7 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
consteval::ConstEvalError,
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
display::HirDisplay,
|
||||
infer::{CaptureKind, CapturedItem, TypeMismatch},
|
||||
inhabitedness::is_ty_uninhabited_from,
|
||||
|
@ -126,6 +126,10 @@ impl DropScopeToken {
|
|||
}
|
||||
}
|
||||
|
||||
impl Drop for DropScopeToken {
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
|
||||
// Uncomment this to make `DropScopeToken` a drop bomb. Unfortunately we can't do this in release, since
|
||||
// in cases that mir lowering fails, we don't handle (and don't need to handle) drop scopes so it will be
|
||||
// actually reached. `pop_drop_scope_assert_finished` will also detect this case, but doesn't show useful
|
||||
|
@ -1630,7 +1634,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
self.set_goto(prev_block, begin, span);
|
||||
f(self, begin)?;
|
||||
let my = mem::replace(&mut self.current_loop_blocks, prev).ok_or(
|
||||
MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_string()),
|
||||
MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_owned()),
|
||||
)?;
|
||||
if let Some(prev) = prev_label {
|
||||
self.labeled_loop_blocks.insert(label.unwrap(), prev);
|
||||
|
@ -1665,7 +1669,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
.current_loop_blocks
|
||||
.as_mut()
|
||||
.ok_or(MirLowerError::ImplementationError(
|
||||
"Current loop access out of loop".to_string(),
|
||||
"Current loop access out of loop".to_owned(),
|
||||
))?
|
||||
.end
|
||||
{
|
||||
|
@ -1675,7 +1679,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
self.current_loop_blocks
|
||||
.as_mut()
|
||||
.ok_or(MirLowerError::ImplementationError(
|
||||
"Current loop access out of loop".to_string(),
|
||||
"Current loop access out of loop".to_owned(),
|
||||
))?
|
||||
.end = Some(s);
|
||||
s
|
||||
|
@ -1973,7 +1977,7 @@ pub fn mir_body_for_closure_query(
|
|||
db: &dyn HirDatabase,
|
||||
closure: ClosureId,
|
||||
) -> Result<Arc<MirBody>> {
|
||||
let (owner, expr) = db.lookup_intern_closure(closure.into());
|
||||
let InternedClosure(owner, expr) = db.lookup_intern_closure(closure.into());
|
||||
let body = db.body(owner);
|
||||
let infer = db.infer(owner);
|
||||
let Expr::Closure { args, body: root, .. } = &body[expr] else {
|
||||
|
|
|
@ -225,7 +225,7 @@ impl MirLowerCtx<'_> {
|
|||
{
|
||||
let Some(index_fn) = self.infer.method_resolution(expr_id) else {
|
||||
return Err(MirLowerError::UnresolvedMethod(
|
||||
"[overloaded index]".to_string(),
|
||||
"[overloaded index]".to_owned(),
|
||||
));
|
||||
};
|
||||
let Some((base_place, current)) =
|
||||
|
|
|
@ -19,7 +19,7 @@ use triomphe::Arc;
|
|||
|
||||
use crate::{
|
||||
consteval::{intern_const_scalar, unknown_const},
|
||||
db::HirDatabase,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
from_placeholder_idx,
|
||||
infer::normalize,
|
||||
utils::{generics, Generics},
|
||||
|
@ -315,7 +315,7 @@ pub fn monomorphized_mir_body_for_closure_query(
|
|||
subst: Substitution,
|
||||
trait_env: Arc<crate::TraitEnvironment>,
|
||||
) -> Result<Arc<MirBody>, MirLowerError> {
|
||||
let (owner, _) = db.lookup_intern_closure(closure.into());
|
||||
let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into());
|
||||
let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def));
|
||||
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
||||
let body = db.mir_body_for_closure(closure)?;
|
||||
|
|
|
@ -10,7 +10,7 @@ mod regression;
|
|||
mod simple;
|
||||
mod traits;
|
||||
|
||||
use std::{collections::HashMap, env};
|
||||
use std::env;
|
||||
|
||||
use base_db::{FileRange, SourceDatabaseExt};
|
||||
use expect_test::Expect;
|
||||
|
@ -25,6 +25,7 @@ use hir_def::{
|
|||
};
|
||||
use hir_expand::{db::ExpandDatabase, InFile};
|
||||
use once_cell::race::OnceBool;
|
||||
use rustc_hash::FxHashMap;
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
ast::{self, AstNode, HasName},
|
||||
|
@ -90,16 +91,16 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
|||
let (db, files) = TestDB::with_many_files(ra_fixture);
|
||||
|
||||
let mut had_annotations = false;
|
||||
let mut mismatches = HashMap::new();
|
||||
let mut types = HashMap::new();
|
||||
let mut adjustments = HashMap::<_, Vec<_>>::new();
|
||||
let mut mismatches = FxHashMap::default();
|
||||
let mut types = FxHashMap::default();
|
||||
let mut adjustments = FxHashMap::<_, Vec<_>>::default();
|
||||
for (file_id, annotations) in db.extract_annotations() {
|
||||
for (range, expected) in annotations {
|
||||
let file_range = FileRange { file_id, range };
|
||||
if only_types {
|
||||
types.insert(file_range, expected);
|
||||
} else if expected.starts_with("type: ") {
|
||||
types.insert(file_range, expected.trim_start_matches("type: ").to_string());
|
||||
types.insert(file_range, expected.trim_start_matches("type: ").to_owned());
|
||||
} else if expected.starts_with("expected") {
|
||||
mismatches.insert(file_range, expected);
|
||||
} else if expected.starts_with("adjustments:") {
|
||||
|
@ -109,7 +110,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
|||
.trim_start_matches("adjustments:")
|
||||
.trim()
|
||||
.split(',')
|
||||
.map(|it| it.trim().to_string())
|
||||
.map(|it| it.trim().to_owned())
|
||||
.filter(|it| !it.is_empty())
|
||||
.collect(),
|
||||
);
|
||||
|
@ -330,7 +331,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
|||
});
|
||||
for (node, ty) in &types {
|
||||
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
|
||||
(self_param.name().unwrap().syntax().text_range(), "self".to_string())
|
||||
(self_param.name().unwrap().syntax().text_range(), "self".to_owned())
|
||||
} else {
|
||||
(node.value.text_range(), node.value.text().to_string().replace('\n', " "))
|
||||
};
|
||||
|
|
|
@ -1373,3 +1373,34 @@ pub fn attr_macro() {}
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clone_with_type_bound() {
|
||||
check_types(
|
||||
r#"
|
||||
//- minicore: derive, clone, builtin_impls
|
||||
#[derive(Clone)]
|
||||
struct Float;
|
||||
|
||||
trait TensorKind: Clone {
|
||||
/// The primitive type of the tensor.
|
||||
type Primitive: Clone;
|
||||
}
|
||||
|
||||
impl TensorKind for Float {
|
||||
type Primitive = f64;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct Tensor<K = Float> where K: TensorKind
|
||||
{
|
||||
primitive: K::Primitive,
|
||||
}
|
||||
|
||||
fn foo(t: Tensor) {
|
||||
let x = t.clone();
|
||||
//^ Tensor<Float>
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -104,8 +104,8 @@ pub(crate) fn trait_solve_query(
|
|||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
|
||||
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
|
||||
}
|
||||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
|
||||
_ => "??".to_string(),
|
||||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
|
||||
_ => "??".to_owned(),
|
||||
};
|
||||
let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered();
|
||||
tracing::info!("trait_solve_query({:?})", goal.value.goal);
|
||||
|
@ -187,7 +187,7 @@ struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, Ch
|
|||
|
||||
impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> {
|
||||
fn drop(&mut self) {
|
||||
eprintln!("chalk program:\n{}", self.0);
|
||||
tracing::info!("chalk program:\n{}", self.0);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue