mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-28 10:39:45 +00:00
chore: Start infesting ide crates with 'db lifetime
This commit is contained in:
parent
a31e10a2fd
commit
03f1003637
79 changed files with 1214 additions and 815 deletions
|
|
@ -242,9 +242,9 @@ fn resolve_assoc_or_field(
|
|||
resolve_field(db, variant_def, name, ns)
|
||||
}
|
||||
|
||||
fn resolve_assoc_item(
|
||||
db: &dyn HirDatabase,
|
||||
ty: &Type,
|
||||
fn resolve_assoc_item<'db>(
|
||||
db: &'db dyn HirDatabase,
|
||||
ty: &Type<'db>,
|
||||
name: &Name,
|
||||
ns: Option<Namespace>,
|
||||
) -> Option<DocLinkDef> {
|
||||
|
|
@ -256,10 +256,10 @@ fn resolve_assoc_item(
|
|||
})
|
||||
}
|
||||
|
||||
fn resolve_impl_trait_item(
|
||||
db: &dyn HirDatabase,
|
||||
fn resolve_impl_trait_item<'db>(
|
||||
db: &'db dyn HirDatabase,
|
||||
resolver: Resolver<'_>,
|
||||
ty: &Type,
|
||||
ty: &Type<'db>,
|
||||
name: &Name,
|
||||
ns: Option<Namespace>,
|
||||
) -> Option<DocLinkDef> {
|
||||
|
|
|
|||
|
|
@ -36,15 +36,15 @@ pub use hir_ty::{
|
|||
};
|
||||
|
||||
macro_rules! diagnostics {
|
||||
($($diag:ident,)*) => {
|
||||
($($diag:ident $(<$lt:lifetime>)?,)*) => {
|
||||
#[derive(Debug)]
|
||||
pub enum AnyDiagnostic {$(
|
||||
$diag(Box<$diag>),
|
||||
pub enum AnyDiagnostic<'db> {$(
|
||||
$diag(Box<$diag $(<$lt>)?>),
|
||||
)*}
|
||||
|
||||
$(
|
||||
impl From<$diag> for AnyDiagnostic {
|
||||
fn from(d: $diag) -> AnyDiagnostic {
|
||||
impl<'db> From<$diag $(<$lt>)?> for AnyDiagnostic<'db> {
|
||||
fn from(d: $diag $(<$lt>)?) -> AnyDiagnostic<'db> {
|
||||
AnyDiagnostic::$diag(Box::new(d))
|
||||
}
|
||||
}
|
||||
|
|
@ -69,12 +69,12 @@ macro_rules! diagnostics {
|
|||
diagnostics![
|
||||
AwaitOutsideOfAsync,
|
||||
BreakOutsideOfLoop,
|
||||
CastToUnsized,
|
||||
ExpectedFunction,
|
||||
CastToUnsized<'db>,
|
||||
ExpectedFunction<'db>,
|
||||
InactiveCode,
|
||||
IncoherentImpl,
|
||||
IncorrectCase,
|
||||
InvalidCast,
|
||||
InvalidCast<'db>,
|
||||
InvalidDeriveTarget,
|
||||
MacroDefError,
|
||||
MacroError,
|
||||
|
|
@ -85,7 +85,7 @@ diagnostics![
|
|||
MissingFields,
|
||||
MissingMatchArms,
|
||||
MissingUnsafe,
|
||||
MovedOutOfRef,
|
||||
MovedOutOfRef<'db>,
|
||||
NeedMut,
|
||||
NonExhaustiveLet,
|
||||
NoSuchField,
|
||||
|
|
@ -98,17 +98,17 @@ diagnostics![
|
|||
TraitImplMissingAssocItems,
|
||||
TraitImplOrphan,
|
||||
TraitImplRedundantAssocItems,
|
||||
TypedHole,
|
||||
TypeMismatch,
|
||||
TypedHole<'db>,
|
||||
TypeMismatch<'db>,
|
||||
UndeclaredLabel,
|
||||
UnimplementedBuiltinMacro,
|
||||
UnreachableLabel,
|
||||
UnresolvedAssocItem,
|
||||
UnresolvedExternCrate,
|
||||
UnresolvedField,
|
||||
UnresolvedField<'db>,
|
||||
UnresolvedImport,
|
||||
UnresolvedMacroCall,
|
||||
UnresolvedMethodCall,
|
||||
UnresolvedMethodCall<'db>,
|
||||
UnresolvedModule,
|
||||
UnresolvedIdent,
|
||||
UnusedMut,
|
||||
|
|
@ -130,9 +130,9 @@ pub struct BreakOutsideOfLoop {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TypedHole {
|
||||
pub struct TypedHole<'db> {
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub expected: Type,
|
||||
pub expected: Type<'db>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
@ -242,25 +242,25 @@ pub struct MismatchedTupleStructPatArgCount {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExpectedFunction {
|
||||
pub struct ExpectedFunction<'db> {
|
||||
pub call: InFile<ExprOrPatPtr>,
|
||||
pub found: Type,
|
||||
pub found: Type<'db>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnresolvedField {
|
||||
pub struct UnresolvedField<'db> {
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub receiver: Type,
|
||||
pub receiver: Type<'db>,
|
||||
pub name: Name,
|
||||
pub method_with_same_name_exists: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnresolvedMethodCall {
|
||||
pub struct UnresolvedMethodCall<'db> {
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub receiver: Type,
|
||||
pub receiver: Type<'db>,
|
||||
pub name: Name,
|
||||
pub field_with_same_name: Option<Type>,
|
||||
pub field_with_same_name: Option<Type<'db>>,
|
||||
pub assoc_func_with_same_name: Option<Function>,
|
||||
}
|
||||
|
||||
|
|
@ -329,10 +329,10 @@ pub struct NonExhaustiveLet {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TypeMismatch {
|
||||
pub struct TypeMismatch<'db> {
|
||||
pub expr_or_pat: InFile<ExprOrPatPtr>,
|
||||
pub expected: Type,
|
||||
pub actual: Type,
|
||||
pub expected: Type<'db>,
|
||||
pub actual: Type<'db>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
@ -352,8 +352,8 @@ pub struct UnusedVariable {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MovedOutOfRef {
|
||||
pub ty: Type,
|
||||
pub struct MovedOutOfRef<'db> {
|
||||
pub ty: Type<'db>,
|
||||
pub span: InFile<SyntaxNodePtr>,
|
||||
}
|
||||
|
||||
|
|
@ -403,17 +403,17 @@ pub struct RemoveUnnecessaryElse {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CastToUnsized {
|
||||
pub struct CastToUnsized<'db> {
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub cast_ty: Type,
|
||||
pub cast_ty: Type<'db>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct InvalidCast {
|
||||
pub struct InvalidCast<'db> {
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub error: CastError,
|
||||
pub expr_ty: Type,
|
||||
pub cast_ty: Type,
|
||||
pub expr_ty: Type<'db>,
|
||||
pub cast_ty: Type<'db>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
@ -482,12 +482,12 @@ pub struct IncorrectGenericsOrder {
|
|||
pub expected_kind: GenericArgKind,
|
||||
}
|
||||
|
||||
impl AnyDiagnostic {
|
||||
impl<'db> AnyDiagnostic<'db> {
|
||||
pub(crate) fn body_validation_diagnostic(
|
||||
db: &dyn HirDatabase,
|
||||
db: &'db dyn HirDatabase,
|
||||
diagnostic: BodyValidationDiagnostic,
|
||||
source_map: &hir_def::expr_store::BodySourceMap,
|
||||
) -> Option<AnyDiagnostic> {
|
||||
) -> Option<AnyDiagnostic<'db>> {
|
||||
match diagnostic {
|
||||
BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
|
||||
let variant_data = variant.variant_data(db);
|
||||
|
|
@ -618,12 +618,12 @@ impl AnyDiagnostic {
|
|||
}
|
||||
|
||||
pub(crate) fn inference_diagnostic(
|
||||
db: &dyn HirDatabase,
|
||||
db: &'db dyn HirDatabase,
|
||||
def: DefWithBodyId,
|
||||
d: &InferenceDiagnostic,
|
||||
source_map: &hir_def::expr_store::BodySourceMap,
|
||||
sig_map: &hir_def::expr_store::ExpressionStoreSourceMap,
|
||||
) -> Option<AnyDiagnostic> {
|
||||
) -> Option<AnyDiagnostic<'db>> {
|
||||
let expr_syntax = |expr| {
|
||||
source_map
|
||||
.expr_syntax(expr)
|
||||
|
|
@ -819,7 +819,7 @@ impl AnyDiagnostic {
|
|||
fn path_diagnostic(
|
||||
diag: &PathLoweringDiagnostic,
|
||||
path: InFile<ast::Path>,
|
||||
) -> Option<AnyDiagnostic> {
|
||||
) -> Option<AnyDiagnostic<'db>> {
|
||||
Some(match *diag {
|
||||
PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => {
|
||||
let segment = hir_segment_to_ast_segment(&path.value, segment)?;
|
||||
|
|
@ -912,8 +912,8 @@ impl AnyDiagnostic {
|
|||
pub(crate) fn ty_diagnostic(
|
||||
diag: &TyLoweringDiagnostic,
|
||||
source_map: &ExpressionStoreSourceMap,
|
||||
db: &dyn HirDatabase,
|
||||
) -> Option<AnyDiagnostic> {
|
||||
db: &'db dyn HirDatabase,
|
||||
) -> Option<AnyDiagnostic<'db>> {
|
||||
let Ok(source) = source_map.type_syntax(diag.source) else {
|
||||
stdx::never!("error on synthetic type syntax");
|
||||
return None;
|
||||
|
|
|
|||
|
|
@ -431,7 +431,7 @@ impl HirDisplay for Variant {
|
|||
}
|
||||
}
|
||||
|
||||
impl HirDisplay for Type {
|
||||
impl HirDisplay for Type<'_> {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
self.ty.hir_fmt(f)
|
||||
}
|
||||
|
|
@ -743,7 +743,7 @@ impl HirDisplay for Static {
|
|||
}
|
||||
}
|
||||
|
||||
impl HirDisplay for TraitRef {
|
||||
impl HirDisplay for TraitRef<'_> {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
self.trait_ref.hir_fmt(f)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -225,7 +225,7 @@ impl HasSource for LocalSource {
|
|||
}
|
||||
}
|
||||
|
||||
impl HasSource for Param {
|
||||
impl HasSource for Param<'_> {
|
||||
type Ast = Either<ast::SelfParam, ast::Param>;
|
||||
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -123,15 +123,15 @@ impl PathResolutionPerNs {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TypeInfo {
|
||||
pub struct TypeInfo<'db> {
|
||||
/// The original type of the expression or pattern.
|
||||
pub original: Type,
|
||||
pub original: Type<'db>,
|
||||
/// The adjusted type, if an adjustment happened.
|
||||
pub adjusted: Option<Type>,
|
||||
pub adjusted: Option<Type<'db>>,
|
||||
}
|
||||
|
||||
impl TypeInfo {
|
||||
pub fn original(self) -> Type {
|
||||
impl<'db> TypeInfo<'db> {
|
||||
pub fn original(self) -> Type<'db> {
|
||||
self.original
|
||||
}
|
||||
|
||||
|
|
@ -140,7 +140,7 @@ impl TypeInfo {
|
|||
}
|
||||
|
||||
/// The adjusted type, or the original in case no adjustments occurred.
|
||||
pub fn adjusted(self) -> Type {
|
||||
pub fn adjusted(self) -> Type<'db> {
|
||||
self.adjusted.unwrap_or(self.original)
|
||||
}
|
||||
}
|
||||
|
|
@ -1534,7 +1534,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
Some(Label { parent, label_id })
|
||||
}
|
||||
|
||||
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
|
||||
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type<'db>> {
|
||||
let analyze = self.analyze(ty.syntax())?;
|
||||
analyze.type_of_type(self.db, ty)
|
||||
}
|
||||
|
|
@ -1553,7 +1553,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
|
||||
pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment<'db>>> {
|
||||
let mutability = |m| match m {
|
||||
hir_ty::Mutability::Not => Mutability::Shared,
|
||||
hir_ty::Mutability::Mut => Mutability::Mut,
|
||||
|
|
@ -1596,13 +1596,13 @@ impl<'db> SemanticsImpl<'db> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
|
||||
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo<'db>> {
|
||||
self.analyze(expr.syntax())?
|
||||
.type_of_expr(self.db, expr)
|
||||
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
|
||||
}
|
||||
|
||||
pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
|
||||
pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo<'db>> {
|
||||
self.analyze(pat.syntax())?
|
||||
.type_of_pat(self.db, pat)
|
||||
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
|
||||
|
|
@ -1611,15 +1611,15 @@ impl<'db> SemanticsImpl<'db> {
|
|||
/// It also includes the changes that binding mode makes in the type. For example in
|
||||
/// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
|
||||
/// of this function is `&mut Option<T>`
|
||||
pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
|
||||
pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type<'db>> {
|
||||
self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
|
||||
}
|
||||
|
||||
pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
|
||||
pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type<'db>> {
|
||||
self.analyze(param.syntax())?.type_of_self(self.db, param)
|
||||
}
|
||||
|
||||
pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
|
||||
pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type<'db>; 1]> {
|
||||
self.analyze(pat.syntax())
|
||||
.and_then(|it| it.pattern_adjustments(self.db, pat))
|
||||
.unwrap_or_default()
|
||||
|
|
@ -1629,7 +1629,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
|
||||
}
|
||||
|
||||
pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable> {
|
||||
pub fn resolve_expr_as_callable(&self, call: &ast::Expr) -> Option<Callable<'db>> {
|
||||
self.analyze(call.syntax())?.resolve_expr_as_callable(self.db, call)
|
||||
}
|
||||
|
||||
|
|
@ -1641,7 +1641,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
pub fn resolve_method_call_fallback(
|
||||
&self,
|
||||
call: &ast::MethodCallExpr,
|
||||
) -> Option<(Either<Function, Field>, Option<GenericSubstitution>)> {
|
||||
) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
|
||||
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
|
||||
}
|
||||
|
||||
|
|
@ -1649,10 +1649,10 @@ impl<'db> SemanticsImpl<'db> {
|
|||
// FIXME: better api for the trait environment
|
||||
pub fn resolve_trait_impl_method(
|
||||
&self,
|
||||
env: Type,
|
||||
env: Type<'db>,
|
||||
trait_: Trait,
|
||||
func: Function,
|
||||
subst: impl IntoIterator<Item = Type>,
|
||||
subst: impl IntoIterator<Item = Type<'db>>,
|
||||
) -> Option<Function> {
|
||||
let mut substs = hir_ty::TyBuilder::subst_for_def(self.db, TraitId::from(trait_), None);
|
||||
for s in subst {
|
||||
|
|
@ -1691,7 +1691,10 @@ impl<'db> SemanticsImpl<'db> {
|
|||
|
||||
// This does not resolve the method call to the correct trait impl!
|
||||
// We should probably fix that.
|
||||
pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
|
||||
pub fn resolve_method_call_as_callable(
|
||||
&self,
|
||||
call: &ast::MethodCallExpr,
|
||||
) -> Option<Callable<'db>> {
|
||||
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
|
||||
}
|
||||
|
||||
|
|
@ -1702,14 +1705,15 @@ impl<'db> SemanticsImpl<'db> {
|
|||
pub fn resolve_field_fallback(
|
||||
&self,
|
||||
field: &ast::FieldExpr,
|
||||
) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution>)> {
|
||||
) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
|
||||
{
|
||||
self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
|
||||
}
|
||||
|
||||
pub fn resolve_record_field(
|
||||
&self,
|
||||
field: &ast::RecordExprField,
|
||||
) -> Option<(Field, Option<Local>, Type)> {
|
||||
) -> Option<(Field, Option<Local>, Type<'db>)> {
|
||||
self.resolve_record_field_with_substitution(field)
|
||||
.map(|(field, local, ty, _)| (field, local, ty))
|
||||
}
|
||||
|
|
@ -1717,18 +1721,21 @@ impl<'db> SemanticsImpl<'db> {
|
|||
pub fn resolve_record_field_with_substitution(
|
||||
&self,
|
||||
field: &ast::RecordExprField,
|
||||
) -> Option<(Field, Option<Local>, Type, GenericSubstitution)> {
|
||||
) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
|
||||
self.analyze(field.syntax())?.resolve_record_field(self.db, field)
|
||||
}
|
||||
|
||||
pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
|
||||
pub fn resolve_record_pat_field(
|
||||
&self,
|
||||
field: &ast::RecordPatField,
|
||||
) -> Option<(Field, Type<'db>)> {
|
||||
self.resolve_record_pat_field_with_subst(field).map(|(field, ty, _)| (field, ty))
|
||||
}
|
||||
|
||||
pub fn resolve_record_pat_field_with_subst(
|
||||
&self,
|
||||
field: &ast::RecordPatField,
|
||||
) -> Option<(Field, Type, GenericSubstitution)> {
|
||||
) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
|
||||
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
|
||||
}
|
||||
|
||||
|
|
@ -1801,7 +1808,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
pub fn resolve_path_with_subst(
|
||||
&self,
|
||||
path: &ast::Path,
|
||||
) -> Option<(PathResolution, Option<GenericSubstitution>)> {
|
||||
) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
|
||||
self.analyze(path.syntax())?.resolve_path(self.db, path)
|
||||
}
|
||||
|
||||
|
|
@ -1812,7 +1819,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
pub fn resolve_offset_of_field(
|
||||
&self,
|
||||
name_ref: &ast::NameRef,
|
||||
) -> Option<(Either<Variant, Field>, GenericSubstitution)> {
|
||||
) -> Option<(Either<Variant, Field>, GenericSubstitution<'db>)> {
|
||||
self.analyze_no_infer(name_ref.syntax())?.resolve_offset_of_field(self.db, name_ref)
|
||||
}
|
||||
|
||||
|
|
@ -1834,13 +1841,19 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
|
||||
}
|
||||
|
||||
pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
|
||||
pub fn record_literal_missing_fields(
|
||||
&self,
|
||||
literal: &ast::RecordExpr,
|
||||
) -> Vec<(Field, Type<'db>)> {
|
||||
self.analyze(literal.syntax())
|
||||
.and_then(|it| it.record_literal_missing_fields(self.db, literal))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
|
||||
pub fn record_pattern_missing_fields(
|
||||
&self,
|
||||
pattern: &ast::RecordPat,
|
||||
) -> Vec<(Field, Type<'db>)> {
|
||||
self.analyze(pattern.syntax())
|
||||
.and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
|
||||
.unwrap_or_default()
|
||||
|
|
|
|||
|
|
@ -257,7 +257,11 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
infer.expr_adjustments.get(&expr_id).map(|v| &**v)
|
||||
}
|
||||
|
||||
pub(crate) fn type_of_type(&self, db: &'db dyn HirDatabase, ty: &ast::Type) -> Option<Type> {
|
||||
pub(crate) fn type_of_type(
|
||||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
ty: &ast::Type,
|
||||
) -> Option<Type<'db>> {
|
||||
let type_ref = self.type_id(ty)?;
|
||||
let ty = TyLoweringContext::new(
|
||||
db,
|
||||
|
|
@ -277,7 +281,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
expr: &ast::Expr,
|
||||
) -> Option<(Type, Option<Type>)> {
|
||||
) -> Option<(Type<'db>, Option<Type<'db>>)> {
|
||||
let expr_id = self.expr_id(expr.clone())?;
|
||||
let infer = self.infer()?;
|
||||
let coerced = expr_id
|
||||
|
|
@ -293,7 +297,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
pat: &ast::Pat,
|
||||
) -> Option<(Type, Option<Type>)> {
|
||||
) -> Option<(Type<'db>, Option<Type<'db>>)> {
|
||||
let expr_or_pat_id = self.pat_id(pat)?;
|
||||
let infer = self.infer()?;
|
||||
let coerced = match expr_or_pat_id {
|
||||
|
|
@ -316,7 +320,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
pat: &ast::IdentPat,
|
||||
) -> Option<Type> {
|
||||
) -> Option<Type<'db>> {
|
||||
let binding_id = self.binding_id_of_pat(pat)?;
|
||||
let infer = self.infer()?;
|
||||
let ty = infer[binding_id].clone();
|
||||
|
|
@ -328,7 +332,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
_param: &ast::SelfParam,
|
||||
) -> Option<Type> {
|
||||
) -> Option<Type<'db>> {
|
||||
let binding = self.body()?.self_param?;
|
||||
let ty = self.infer()?[binding].clone();
|
||||
Some(Type::new_with_resolver(db, &self.resolver, ty))
|
||||
|
|
@ -353,7 +357,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
pat: &ast::Pat,
|
||||
) -> Option<SmallVec<[Type; 1]>> {
|
||||
) -> Option<SmallVec<[Type<'db>; 1]>> {
|
||||
let pat_id = self.pat_id(pat)?;
|
||||
let infer = self.infer()?;
|
||||
Some(
|
||||
|
|
@ -370,7 +374,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
call: &ast::MethodCallExpr,
|
||||
) -> Option<Callable> {
|
||||
) -> Option<Callable<'db>> {
|
||||
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
|
||||
let (func, substs) = self.infer()?.method_resolution(expr_id)?;
|
||||
let ty = db.value_ty(func.into())?.substitute(Interner, &substs);
|
||||
|
|
@ -395,7 +399,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
call: &ast::MethodCallExpr,
|
||||
) -> Option<(Either<Function, Field>, Option<GenericSubstitution>)> {
|
||||
) -> Option<(Either<Function, Field>, Option<GenericSubstitution<'db>>)> {
|
||||
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
|
||||
let inference_result = self.infer()?;
|
||||
match inference_result.method_resolution(expr_id) {
|
||||
|
|
@ -419,7 +423,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
call: &ast::Expr,
|
||||
) -> Option<Callable> {
|
||||
) -> Option<Callable<'db>> {
|
||||
let (orig, adjusted) = self.type_of_expr(db, &call.clone())?;
|
||||
adjusted.unwrap_or(orig).as_callable(db)
|
||||
}
|
||||
|
|
@ -440,7 +444,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
field_expr: ExprId,
|
||||
infer: &InferenceResult,
|
||||
db: &'db dyn HirDatabase,
|
||||
) -> Option<GenericSubstitution> {
|
||||
) -> Option<GenericSubstitution<'db>> {
|
||||
let body = self.store()?;
|
||||
if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] {
|
||||
let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?;
|
||||
|
|
@ -457,7 +461,8 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
field: &ast::FieldExpr,
|
||||
) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution>)> {
|
||||
) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution<'db>>)>
|
||||
{
|
||||
let (def, ..) = self.body_()?;
|
||||
let expr_id = self.expr_id(field.clone().into())?.as_expr()?;
|
||||
let inference_result = self.infer()?;
|
||||
|
|
@ -680,7 +685,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
field: &ast::RecordExprField,
|
||||
) -> Option<(Field, Option<Local>, Type, GenericSubstitution)> {
|
||||
) -> Option<(Field, Option<Local>, Type<'db>, GenericSubstitution<'db>)> {
|
||||
let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
|
||||
let expr = ast::Expr::from(record_expr);
|
||||
let expr_id = self.store_sm()?.node_expr(InFile::new(self.file_id, &expr))?;
|
||||
|
|
@ -724,7 +729,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
field: &ast::RecordPatField,
|
||||
) -> Option<(Field, Type, GenericSubstitution)> {
|
||||
) -> Option<(Field, Type<'db>, GenericSubstitution<'db>)> {
|
||||
let field_name = field.field_name()?.as_name();
|
||||
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
|
||||
let pat_id = self.pat_id(&record_pat.into())?;
|
||||
|
|
@ -779,7 +784,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
name_ref: &ast::NameRef,
|
||||
) -> Option<(Either<crate::Variant, crate::Field>, GenericSubstitution)> {
|
||||
) -> Option<(Either<crate::Variant, crate::Field>, GenericSubstitution<'db>)> {
|
||||
let offset_of_expr = ast::OffsetOfExpr::cast(name_ref.syntax().parent()?)?;
|
||||
let container = offset_of_expr.ty()?;
|
||||
let container = self.type_of_type(db, &container)?;
|
||||
|
|
@ -851,7 +856,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
path: &ast::Path,
|
||||
) -> Option<(PathResolution, Option<GenericSubstitution>)> {
|
||||
) -> Option<(PathResolution, Option<GenericSubstitution<'db>>)> {
|
||||
let parent = path.syntax().parent();
|
||||
let parent = || parent.clone();
|
||||
|
||||
|
|
@ -1216,7 +1221,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
literal: &ast::RecordExpr,
|
||||
) -> Option<Vec<(Field, Type)>> {
|
||||
) -> Option<Vec<(Field, Type<'db>)>> {
|
||||
let body = self.store()?;
|
||||
let infer = self.infer()?;
|
||||
|
||||
|
|
@ -1239,7 +1244,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
&self,
|
||||
db: &'db dyn HirDatabase,
|
||||
pattern: &ast::RecordPat,
|
||||
) -> Option<Vec<(Field, Type)>> {
|
||||
) -> Option<Vec<(Field, Type<'db>)>> {
|
||||
let body = self.store()?;
|
||||
let infer = self.infer()?;
|
||||
|
||||
|
|
@ -1258,7 +1263,7 @@ impl<'db> SourceAnalyzer<'db> {
|
|||
substs: &Substitution,
|
||||
variant: VariantId,
|
||||
missing_fields: Vec<LocalFieldId>,
|
||||
) -> Vec<(Field, Type)> {
|
||||
) -> Vec<(Field, Type<'db>)> {
|
||||
let field_types = db.field_types(variant);
|
||||
|
||||
missing_fields
|
||||
|
|
|
|||
|
|
@ -22,20 +22,20 @@ enum NewTypesKey {
|
|||
/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many
|
||||
/// to take into account.
|
||||
#[derive(Debug)]
|
||||
enum AlternativeExprs {
|
||||
enum AlternativeExprs<'db> {
|
||||
/// There are few trees, so we keep track of them all
|
||||
Few(FxHashSet<Expr>),
|
||||
Few(FxHashSet<Expr<'db>>),
|
||||
/// There are too many trees to keep track of
|
||||
Many,
|
||||
}
|
||||
|
||||
impl AlternativeExprs {
|
||||
impl<'db> AlternativeExprs<'db> {
|
||||
/// Construct alternative trees
|
||||
///
|
||||
/// # Arguments
|
||||
/// `threshold` - threshold value for many trees (more than that is many)
|
||||
/// `exprs` - expressions iterator
|
||||
fn new(threshold: usize, exprs: impl Iterator<Item = Expr>) -> AlternativeExprs {
|
||||
fn new(threshold: usize, exprs: impl Iterator<Item = Expr<'db>>) -> AlternativeExprs<'db> {
|
||||
let mut it = AlternativeExprs::Few(Default::default());
|
||||
it.extend_with_threshold(threshold, exprs);
|
||||
it
|
||||
|
|
@ -45,7 +45,7 @@ impl AlternativeExprs {
|
|||
///
|
||||
/// # Arguments
|
||||
/// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`)
|
||||
fn exprs(&self, ty: &Type) -> Vec<Expr> {
|
||||
fn exprs(&self, ty: &Type<'db>) -> Vec<Expr<'db>> {
|
||||
match self {
|
||||
AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(),
|
||||
AlternativeExprs::Many => vec![Expr::Many(ty.clone())],
|
||||
|
|
@ -57,7 +57,7 @@ impl AlternativeExprs {
|
|||
/// # Arguments
|
||||
/// `threshold` - threshold value for many trees (more than that is many)
|
||||
/// `exprs` - expressions iterator
|
||||
fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator<Item = Expr>) {
|
||||
fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator<Item = Expr<'db>>) {
|
||||
match self {
|
||||
AlternativeExprs::Few(tts) => {
|
||||
for it in exprs {
|
||||
|
|
@ -88,20 +88,20 @@ impl AlternativeExprs {
|
|||
/// Both of them are to speed up the term search by leaving out types / ScopeDefs that likely do
|
||||
/// not produce any new results.
|
||||
#[derive(Default, Debug)]
|
||||
struct LookupTable {
|
||||
struct LookupTable<'db> {
|
||||
/// All the `Expr`s in "value" produce the type of "key"
|
||||
data: FxHashMap<Type, AlternativeExprs>,
|
||||
data: FxHashMap<Type<'db>, AlternativeExprs<'db>>,
|
||||
/// New types reached since last query by the `NewTypesKey`
|
||||
new_types: FxHashMap<NewTypesKey, Vec<Type>>,
|
||||
new_types: FxHashMap<NewTypesKey, Vec<Type<'db>>>,
|
||||
/// Types queried but not present
|
||||
types_wishlist: FxHashSet<Type>,
|
||||
types_wishlist: FxHashSet<Type<'db>>,
|
||||
/// Threshold to squash trees to `Many`
|
||||
many_threshold: usize,
|
||||
}
|
||||
|
||||
impl LookupTable {
|
||||
impl<'db> LookupTable<'db> {
|
||||
/// Initialize lookup table
|
||||
fn new(many_threshold: usize, goal: Type) -> Self {
|
||||
fn new(many_threshold: usize, goal: Type<'db>) -> Self {
|
||||
let mut res = Self { many_threshold, ..Default::default() };
|
||||
res.new_types.insert(NewTypesKey::ImplMethod, Vec::new());
|
||||
res.new_types.insert(NewTypesKey::StructProjection, Vec::new());
|
||||
|
|
@ -110,7 +110,7 @@ impl LookupTable {
|
|||
}
|
||||
|
||||
/// Find all `Expr`s that unify with the `ty`
|
||||
fn find(&mut self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
|
||||
fn find(&mut self, db: &'db dyn HirDatabase, ty: &Type<'db>) -> Option<Vec<Expr<'db>>> {
|
||||
let res = self
|
||||
.data
|
||||
.iter()
|
||||
|
|
@ -135,7 +135,7 @@ impl LookupTable {
|
|||
///
|
||||
/// For example if we have type `i32` in data and we query for `&i32` it map all the type
|
||||
/// trees we have for `i32` with `Expr::Reference` and returns them.
|
||||
fn find_autoref(&mut self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
|
||||
fn find_autoref(&mut self, db: &'db dyn HirDatabase, ty: &Type<'db>) -> Option<Vec<Expr<'db>>> {
|
||||
let res = self
|
||||
.data
|
||||
.iter()
|
||||
|
|
@ -174,7 +174,7 @@ impl LookupTable {
|
|||
/// Note that the types have to be the same, unification is not enough as unification is not
|
||||
/// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
|
||||
/// but they clearly do not unify themselves.
|
||||
fn insert(&mut self, ty: Type, exprs: impl Iterator<Item = Expr>) {
|
||||
fn insert(&mut self, ty: Type<'db>, exprs: impl Iterator<Item = Expr<'db>>) {
|
||||
match self.data.get_mut(&ty) {
|
||||
Some(it) => {
|
||||
it.extend_with_threshold(self.many_threshold, exprs);
|
||||
|
|
@ -192,14 +192,14 @@ impl LookupTable {
|
|||
}
|
||||
|
||||
/// Iterate all the reachable types
|
||||
fn iter_types(&self) -> impl Iterator<Item = Type> + '_ {
|
||||
fn iter_types(&self) -> impl Iterator<Item = Type<'db>> + '_ {
|
||||
self.data.keys().cloned()
|
||||
}
|
||||
|
||||
/// Query new types reached since last query by key
|
||||
///
|
||||
/// Create new key if you wish to query it to avoid conflicting with existing queries.
|
||||
fn new_types(&mut self, key: NewTypesKey) -> Vec<Type> {
|
||||
fn new_types(&mut self, key: NewTypesKey) -> Vec<Type<'db>> {
|
||||
match self.new_types.get_mut(&key) {
|
||||
Some(it) => std::mem::take(it),
|
||||
None => Vec::new(),
|
||||
|
|
@ -207,20 +207,20 @@ impl LookupTable {
|
|||
}
|
||||
|
||||
/// Types queried but not found
|
||||
fn types_wishlist(&mut self) -> &FxHashSet<Type> {
|
||||
fn types_wishlist(&mut self) -> &FxHashSet<Type<'db>> {
|
||||
&self.types_wishlist
|
||||
}
|
||||
}
|
||||
|
||||
/// Context for the `term_search` function
|
||||
#[derive(Debug)]
|
||||
pub struct TermSearchCtx<'a, DB: HirDatabase> {
|
||||
pub struct TermSearchCtx<'db, DB: HirDatabase> {
|
||||
/// Semantics for the program
|
||||
pub sema: &'a Semantics<'a, DB>,
|
||||
pub sema: &'db Semantics<'db, DB>,
|
||||
/// Semantic scope, captures context for the term search
|
||||
pub scope: &'a SemanticsScope<'a>,
|
||||
pub scope: &'db SemanticsScope<'db>,
|
||||
/// Target / expected output type
|
||||
pub goal: Type,
|
||||
pub goal: Type<'db>,
|
||||
/// Configuration for term search
|
||||
pub config: TermSearchConfig,
|
||||
}
|
||||
|
|
@ -263,7 +263,7 @@ impl Default for TermSearchConfig {
|
|||
/// Note that there are usually more ways we can get to the `goal` type but some are discarded to
|
||||
/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through
|
||||
/// thousands of possible responses so we currently take first 10 from every tactic.
|
||||
pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
|
||||
pub fn term_search<'db, DB: HirDatabase>(ctx: &'db TermSearchCtx<'db, DB>) -> Vec<Expr<'db>> {
|
||||
let module = ctx.scope.module();
|
||||
let mut defs = FxHashSet::default();
|
||||
defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module)));
|
||||
|
|
@ -285,7 +285,7 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
|
|||
};
|
||||
|
||||
// Try trivial tactic first, also populates lookup table
|
||||
let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
|
||||
let mut solutions: Vec<Expr<'db>> = tactics::trivial(ctx, &defs, &mut lookup).collect();
|
||||
// Use well known types tactic before iterations as it does not depend on other tactics
|
||||
solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
|
||||
solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ fn mod_item_path_str(
|
|||
/// So in short it pretty much gives us a way to get type `Option<i32>` using the items we have in
|
||||
/// scope.
|
||||
#[derive(Debug, Clone, Eq, Hash, PartialEq)]
|
||||
pub enum Expr {
|
||||
pub enum Expr<'db> {
|
||||
/// Constant
|
||||
Const(Const),
|
||||
/// Static variable
|
||||
|
|
@ -69,26 +69,31 @@ pub enum Expr {
|
|||
/// Constant generic parameter
|
||||
ConstParam(ConstParam),
|
||||
/// Well known type (such as `true` for bool)
|
||||
FamousType { ty: Type, value: &'static str },
|
||||
FamousType { ty: Type<'db>, value: &'static str },
|
||||
/// Function call (does not take self param)
|
||||
Function { func: Function, generics: Vec<Type>, params: Vec<Expr> },
|
||||
Function { func: Function, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
|
||||
/// Method call (has self param)
|
||||
Method { func: Function, generics: Vec<Type>, target: Box<Expr>, params: Vec<Expr> },
|
||||
Method {
|
||||
func: Function,
|
||||
generics: Vec<Type<'db>>,
|
||||
target: Box<Expr<'db>>,
|
||||
params: Vec<Expr<'db>>,
|
||||
},
|
||||
/// Enum variant construction
|
||||
Variant { variant: Variant, generics: Vec<Type>, params: Vec<Expr> },
|
||||
Variant { variant: Variant, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
|
||||
/// Struct construction
|
||||
Struct { strukt: Struct, generics: Vec<Type>, params: Vec<Expr> },
|
||||
Struct { strukt: Struct, generics: Vec<Type<'db>>, params: Vec<Expr<'db>> },
|
||||
/// Tuple construction
|
||||
Tuple { ty: Type, params: Vec<Expr> },
|
||||
Tuple { ty: Type<'db>, params: Vec<Expr<'db>> },
|
||||
/// Struct field access
|
||||
Field { expr: Box<Expr>, field: Field },
|
||||
Field { expr: Box<Expr<'db>>, field: Field },
|
||||
/// Passing type as reference (with `&`)
|
||||
Reference(Box<Expr>),
|
||||
Reference(Box<Expr<'db>>),
|
||||
/// Indicates possibility of many different options that all evaluate to `ty`
|
||||
Many(Type),
|
||||
Many(Type<'db>),
|
||||
}
|
||||
|
||||
impl Expr {
|
||||
impl<'db> Expr<'db> {
|
||||
/// Generate source code for type tree.
|
||||
///
|
||||
/// Note that trait imports are not added to generated code.
|
||||
|
|
@ -96,8 +101,8 @@ impl Expr {
|
|||
/// by `traits_used` method are also imported.
|
||||
pub fn gen_source_code(
|
||||
&self,
|
||||
sema_scope: &SemanticsScope<'_>,
|
||||
many_formatter: &mut dyn FnMut(&Type) -> String,
|
||||
sema_scope: &SemanticsScope<'db>,
|
||||
many_formatter: &mut dyn FnMut(&Type<'db>) -> String,
|
||||
cfg: ImportPathConfig,
|
||||
display_target: DisplayTarget,
|
||||
) -> Result<String, DisplaySourceCodeError> {
|
||||
|
|
@ -298,7 +303,7 @@ impl Expr {
|
|||
/// Get type of the type tree.
|
||||
///
|
||||
/// Same as getting the type of root node
|
||||
pub fn ty(&self, db: &dyn HirDatabase) -> Type {
|
||||
pub fn ty(&self, db: &'db dyn HirDatabase) -> Type<'db> {
|
||||
match self {
|
||||
Expr::Const(it) => it.ty(db),
|
||||
Expr::Static(it) => it.ty(db),
|
||||
|
|
|
|||
|
|
@ -40,11 +40,11 @@ use super::{LookupTable, NewTypesKey, TermSearchCtx};
|
|||
///
|
||||
/// _Note that there is no use of calling this tactic in every iteration as the output does not
|
||||
/// depend on the current state of `lookup`_
|
||||
pub(super) fn trivial<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn trivial<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
defs.iter().filter_map(|def| {
|
||||
let expr = match def {
|
||||
|
|
@ -104,11 +104,11 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
|
|||
///
|
||||
/// _Note that there is no use of calling this tactic in every iteration as the output does not
|
||||
/// depend on the current state of `lookup`_
|
||||
pub(super) fn assoc_const<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn assoc_const<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
let module = ctx.scope.module();
|
||||
|
||||
|
|
@ -152,12 +152,12 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>(
|
|||
/// * `defs` - Set of items in scope at term search target location
|
||||
/// * `lookup` - Lookup table for types
|
||||
/// * `should_continue` - Function that indicates when to stop iterating
|
||||
pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn data_constructor<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
_defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
let module = ctx.scope.module();
|
||||
lookup
|
||||
|
|
@ -199,14 +199,14 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
|||
let generics: Vec<_> = ty.type_arguments().collect();
|
||||
|
||||
// Early exit if some param cannot be filled from lookup
|
||||
let param_exprs: Vec<Vec<Expr>> = fields
|
||||
let param_exprs: Vec<Vec<Expr<'_>>> = fields
|
||||
.into_iter()
|
||||
.map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
|
||||
.collect::<Option<_>>()?;
|
||||
|
||||
// Note that we need special case for 0 param constructors because of multi cartesian
|
||||
// product
|
||||
let exprs: Vec<Expr> = if param_exprs.is_empty() {
|
||||
let exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
|
||||
vec![Expr::Struct { strukt, generics, params: Vec::new() }]
|
||||
} else {
|
||||
param_exprs
|
||||
|
|
@ -247,7 +247,7 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
|||
.into_iter()
|
||||
.filter_map(|variant| {
|
||||
// Early exit if some param cannot be filled from lookup
|
||||
let param_exprs: Vec<Vec<Expr>> = variant
|
||||
let param_exprs: Vec<Vec<Expr<'_>>> = variant
|
||||
.fields(db)
|
||||
.into_iter()
|
||||
.map(|field| {
|
||||
|
|
@ -257,7 +257,7 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
|||
|
||||
// Note that we need special case for 0 param constructors because of multi cartesian
|
||||
// product
|
||||
let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
|
||||
let variant_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
|
||||
vec![Expr::Variant {
|
||||
variant,
|
||||
generics: generics.clone(),
|
||||
|
|
@ -301,12 +301,12 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
|
|||
/// * `defs` - Set of items in scope at term search target location
|
||||
/// * `lookup` - Lookup table for types
|
||||
/// * `should_continue` - Function that indicates when to stop iterating
|
||||
pub(super) fn free_function<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn free_function<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
let module = ctx.scope.module();
|
||||
defs.iter()
|
||||
|
|
@ -375,7 +375,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
|
|||
}
|
||||
|
||||
// Early exit if some param cannot be filled from lookup
|
||||
let param_exprs: Vec<Vec<Expr>> = it
|
||||
let param_exprs: Vec<Vec<Expr<'_>>> = it
|
||||
.params_without_self_with_args(db, generics.iter().cloned())
|
||||
.into_iter()
|
||||
.map(|field| {
|
||||
|
|
@ -389,7 +389,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
|
|||
|
||||
// Note that we need special case for 0 param constructors because of multi cartesian
|
||||
// product
|
||||
let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
|
||||
let fn_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
|
||||
vec![Expr::Function { func: *it, generics, params: Vec::new() }]
|
||||
} else {
|
||||
param_exprs
|
||||
|
|
@ -432,12 +432,12 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
|
|||
/// * `defs` - Set of items in scope at term search target location
|
||||
/// * `lookup` - Lookup table for types
|
||||
/// * `should_continue` - Function that indicates when to stop iterating
|
||||
pub(super) fn impl_method<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn impl_method<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
_defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
let module = ctx.scope.module();
|
||||
lookup
|
||||
|
|
@ -507,14 +507,14 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
|
|||
let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
|
||||
|
||||
// Early exit if some param cannot be filled from lookup
|
||||
let param_exprs: Vec<Vec<Expr>> = it
|
||||
let param_exprs: Vec<Vec<Expr<'_>>> = it
|
||||
.params_without_self_with_args(db, ty.type_arguments())
|
||||
.into_iter()
|
||||
.map(|field| lookup.find_autoref(db, field.ty()))
|
||||
.collect::<Option<_>>()?;
|
||||
|
||||
let generics: Vec<_> = ty.type_arguments().collect();
|
||||
let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
|
||||
let fn_exprs: Vec<Expr<'_>> = std::iter::once(target_type_exprs)
|
||||
.chain(param_exprs)
|
||||
.multi_cartesian_product()
|
||||
.map(|params| {
|
||||
|
|
@ -547,12 +547,12 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
|
|||
/// * `defs` - Set of items in scope at term search target location
|
||||
/// * `lookup` - Lookup table for types
|
||||
/// * `should_continue` - Function that indicates when to stop iterating
|
||||
pub(super) fn struct_projection<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn struct_projection<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
_defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
let module = ctx.scope.module();
|
||||
lookup
|
||||
|
|
@ -589,11 +589,11 @@ pub(super) fn struct_projection<'a, DB: HirDatabase>(
|
|||
/// * `ctx` - Context for the term search
|
||||
/// * `defs` - Set of items in scope at term search target location
|
||||
/// * `lookup` - Lookup table for types
|
||||
pub(super) fn famous_types<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn famous_types<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
_defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
let module = ctx.scope.module();
|
||||
[
|
||||
|
|
@ -620,12 +620,12 @@ pub(super) fn famous_types<'a, DB: HirDatabase>(
|
|||
/// * `defs` - Set of items in scope at term search target location
|
||||
/// * `lookup` - Lookup table for types
|
||||
/// * `should_continue` - Function that indicates when to stop iterating
|
||||
pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn impl_static_method<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
_defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
let module = ctx.scope.module();
|
||||
lookup
|
||||
|
|
@ -683,7 +683,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
|||
}
|
||||
|
||||
// Early exit if some param cannot be filled from lookup
|
||||
let param_exprs: Vec<Vec<Expr>> = it
|
||||
let param_exprs: Vec<Vec<Expr<'_>>> = it
|
||||
.params_without_self_with_args(db, ty.type_arguments())
|
||||
.into_iter()
|
||||
.map(|field| lookup.find_autoref(db, field.ty()))
|
||||
|
|
@ -692,7 +692,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
|||
// Note that we need special case for 0 param constructors because of multi cartesian
|
||||
// product
|
||||
let generics = ty.type_arguments().collect();
|
||||
let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
|
||||
let fn_exprs: Vec<Expr<'_>> = if param_exprs.is_empty() {
|
||||
vec![Expr::Function { func: it, generics, params: Vec::new() }]
|
||||
} else {
|
||||
param_exprs
|
||||
|
|
@ -722,12 +722,12 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
|
|||
/// * `defs` - Set of items in scope at term search target location
|
||||
/// * `lookup` - Lookup table for types
|
||||
/// * `should_continue` - Function that indicates when to stop iterating
|
||||
pub(super) fn make_tuple<'a, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'a, DB>,
|
||||
pub(super) fn make_tuple<'a, 'lt, 'db, DB: HirDatabase>(
|
||||
ctx: &'a TermSearchCtx<'db, DB>,
|
||||
_defs: &'a FxHashSet<ScopeDef>,
|
||||
lookup: &'a mut LookupTable,
|
||||
lookup: &'lt mut LookupTable<'db>,
|
||||
should_continue: &'a dyn std::ops::Fn() -> bool,
|
||||
) -> impl Iterator<Item = Expr> + 'a {
|
||||
) -> impl Iterator<Item = Expr<'db>> + use<'a, 'db, 'lt, DB> {
|
||||
let db = ctx.sema.db;
|
||||
let module = ctx.scope.module();
|
||||
|
||||
|
|
@ -749,15 +749,15 @@ pub(super) fn make_tuple<'a, DB: HirDatabase>(
|
|||
}
|
||||
|
||||
// Early exit if some param cannot be filled from lookup
|
||||
let param_exprs: Vec<Vec<Expr>> =
|
||||
let param_exprs: Vec<Vec<Expr<'db>>> =
|
||||
ty.type_arguments().map(|field| lookup.find(db, &field)).collect::<Option<_>>()?;
|
||||
|
||||
let exprs: Vec<Expr> = param_exprs
|
||||
let exprs: Vec<Expr<'db>> = param_exprs
|
||||
.into_iter()
|
||||
.multi_cartesian_product()
|
||||
.filter(|_| should_continue())
|
||||
.map(|params| {
|
||||
let tys: Vec<Type> = params.iter().map(|it| it.ty(db)).collect();
|
||||
let tys: Vec<Type<'_>> = params.iter().map(|it| it.ty(db)).collect();
|
||||
let tuple_ty = Type::new_tuple(module.krate().into(), &tys);
|
||||
|
||||
let expr = Expr::Tuple { ty: tuple_ty.clone(), params };
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ impl<'a> AssistContext<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn db(&self) -> &RootDatabase {
|
||||
pub(crate) fn db(&self) -> &'a RootDatabase {
|
||||
self.sema.db
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -192,7 +192,7 @@ fn add_missing_impl_members_inner(
|
|||
fn try_gen_trait_body(
|
||||
ctx: &AssistContext<'_>,
|
||||
func: &ast::Fn,
|
||||
trait_ref: hir::TraitRef,
|
||||
trait_ref: hir::TraitRef<'_>,
|
||||
impl_def: &ast::Impl,
|
||||
edition: Edition,
|
||||
) -> Option<()> {
|
||||
|
|
|
|||
|
|
@ -413,8 +413,8 @@ impl ExtendedEnum {
|
|||
fn enum_(
|
||||
db: &RootDatabase,
|
||||
enum_: hir::Enum,
|
||||
enum_ty: &hir::Type,
|
||||
self_ty: Option<&hir::Type>,
|
||||
enum_ty: &hir::Type<'_>,
|
||||
self_ty: Option<&hir::Type<'_>>,
|
||||
) -> Self {
|
||||
ExtendedEnum::Enum {
|
||||
enum_,
|
||||
|
|
@ -448,7 +448,7 @@ impl ExtendedEnum {
|
|||
fn resolve_enum_def(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
expr: &ast::Expr,
|
||||
self_ty: Option<&hir::Type>,
|
||||
self_ty: Option<&hir::Type<'_>>,
|
||||
) -> Option<ExtendedEnum> {
|
||||
sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() {
|
||||
Some(Adt::Enum(e)) => Some(ExtendedEnum::enum_(sema.db, e, &ty, self_ty)),
|
||||
|
|
@ -459,7 +459,7 @@ fn resolve_enum_def(
|
|||
fn resolve_tuple_of_enum_def(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
expr: &ast::Expr,
|
||||
self_ty: Option<&hir::Type>,
|
||||
self_ty: Option<&hir::Type<'_>>,
|
||||
) -> Option<Vec<ExtendedEnum>> {
|
||||
sema.type_of_expr(expr)?
|
||||
.adjusted()
|
||||
|
|
@ -483,7 +483,7 @@ fn resolve_tuple_of_enum_def(
|
|||
fn resolve_array_of_enum_def(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
expr: &ast::Expr,
|
||||
self_ty: Option<&hir::Type>,
|
||||
self_ty: Option<&hir::Type<'_>>,
|
||||
) -> Option<(ExtendedEnum, usize)> {
|
||||
sema.type_of_expr(expr)?.adjusted().as_array(sema.db).and_then(|(ty, len)| {
|
||||
ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
|
||||
|
|
|
|||
|
|
@ -164,9 +164,9 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
|||
Some(())
|
||||
}
|
||||
|
||||
pub(super) fn find_importable_node(
|
||||
ctx: &AssistContext<'_>,
|
||||
) -> Option<(ImportAssets, SyntaxNode, Option<Type>)> {
|
||||
pub(super) fn find_importable_node<'a: 'db, 'db>(
|
||||
ctx: &'a AssistContext<'db>,
|
||||
) -> Option<(ImportAssets<'db>, SyntaxNode, Option<Type<'db>>)> {
|
||||
// Deduplicate this with the `expected_type_and_name` logic for completions
|
||||
let expected = |expr_or_pat: Either<ast::Expr, ast::Pat>| match expr_or_pat {
|
||||
Either::Left(expr) => {
|
||||
|
|
@ -226,7 +226,7 @@ pub(super) fn find_importable_node(
|
|||
}
|
||||
}
|
||||
|
||||
fn group_label(import_candidate: &ImportCandidate) -> GroupLabel {
|
||||
fn group_label(import_candidate: &ImportCandidate<'_>) -> GroupLabel {
|
||||
let name = match import_candidate {
|
||||
ImportCandidate::Path(candidate) => format!("Import {}", candidate.name.text()),
|
||||
ImportCandidate::TraitAssocItem(candidate) => {
|
||||
|
|
@ -244,7 +244,7 @@ fn group_label(import_candidate: &ImportCandidate) -> GroupLabel {
|
|||
pub(crate) fn relevance_score(
|
||||
ctx: &AssistContext<'_>,
|
||||
import: &LocatedImport,
|
||||
expected: Option<&Type>,
|
||||
expected: Option<&Type<'_>>,
|
||||
current_module: Option<&Module>,
|
||||
) -> i32 {
|
||||
let mut score = 0;
|
||||
|
|
|
|||
|
|
@ -309,23 +309,23 @@ fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option<Fu
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Function {
|
||||
struct Function<'db> {
|
||||
name: ast::NameRef,
|
||||
self_param: Option<ast::SelfParam>,
|
||||
params: Vec<Param>,
|
||||
control_flow: ControlFlow,
|
||||
ret_ty: RetType,
|
||||
params: Vec<Param<'db>>,
|
||||
control_flow: ControlFlow<'db>,
|
||||
ret_ty: RetType<'db>,
|
||||
body: FunctionBody,
|
||||
outliving_locals: Vec<OutlivedLocal>,
|
||||
/// Whether at least one of the container's tail expr is contained in the range we're extracting.
|
||||
contains_tail_expr: bool,
|
||||
mods: ContainerInfo,
|
||||
mods: ContainerInfo<'db>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Param {
|
||||
struct Param<'db> {
|
||||
var: Local,
|
||||
ty: hir::Type,
|
||||
ty: hir::Type<'db>,
|
||||
move_local: bool,
|
||||
requires_mut: bool,
|
||||
is_copy: bool,
|
||||
|
|
@ -340,10 +340,10 @@ enum ParamKind {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum FunType {
|
||||
enum FunType<'db> {
|
||||
Unit,
|
||||
Single(hir::Type),
|
||||
Tuple(Vec<hir::Type>),
|
||||
Single(hir::Type<'db>),
|
||||
Tuple(Vec<hir::Type<'db>>),
|
||||
}
|
||||
|
||||
/// Where to put extracted function definition
|
||||
|
|
@ -358,19 +358,19 @@ enum Anchor {
|
|||
// FIXME: ControlFlow and ContainerInfo both track some function modifiers, feels like these two should
|
||||
// probably be merged somehow.
|
||||
#[derive(Debug)]
|
||||
struct ControlFlow {
|
||||
kind: Option<FlowKind>,
|
||||
struct ControlFlow<'db> {
|
||||
kind: Option<FlowKind<'db>>,
|
||||
is_async: bool,
|
||||
is_unsafe: bool,
|
||||
}
|
||||
|
||||
/// The thing whose expression we are extracting from. Can be a function, const, static, const arg, ...
|
||||
#[derive(Clone, Debug)]
|
||||
struct ContainerInfo {
|
||||
struct ContainerInfo<'db> {
|
||||
is_const: bool,
|
||||
parent_loop: Option<SyntaxNode>,
|
||||
/// The function's return type, const's type etc.
|
||||
ret_type: Option<hir::Type>,
|
||||
ret_type: Option<hir::Type<'db>>,
|
||||
generic_param_lists: Vec<ast::GenericParamList>,
|
||||
where_clauses: Vec<ast::WhereClause>,
|
||||
edition: Edition,
|
||||
|
|
@ -389,11 +389,11 @@ struct ContainerInfo {
|
|||
/// }
|
||||
/// ```
|
||||
#[derive(Debug, Clone)]
|
||||
enum FlowKind {
|
||||
enum FlowKind<'db> {
|
||||
/// Return with value (`return $expr;`)
|
||||
Return(Option<ast::Expr>),
|
||||
Try {
|
||||
kind: TryKind,
|
||||
kind: TryKind<'db>,
|
||||
},
|
||||
/// Break with label and value (`break 'label $expr;`)
|
||||
Break(Option<ast::Lifetime>, Option<ast::Expr>),
|
||||
|
|
@ -402,18 +402,18 @@ enum FlowKind {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum TryKind {
|
||||
enum TryKind<'db> {
|
||||
Option,
|
||||
Result { ty: hir::Type },
|
||||
Result { ty: hir::Type<'db> },
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum RetType {
|
||||
Expr(hir::Type),
|
||||
enum RetType<'db> {
|
||||
Expr(hir::Type<'db>),
|
||||
Stmt,
|
||||
}
|
||||
|
||||
impl RetType {
|
||||
impl RetType<'_> {
|
||||
fn is_unit(&self) -> bool {
|
||||
match self {
|
||||
RetType::Expr(ty) => ty.is_unit(),
|
||||
|
|
@ -456,8 +456,8 @@ impl LocalUsages {
|
|||
}
|
||||
}
|
||||
|
||||
impl Function {
|
||||
fn return_type(&self, ctx: &AssistContext<'_>) -> FunType {
|
||||
impl<'db> Function<'db> {
|
||||
fn return_type(&self, ctx: &AssistContext<'db>) -> FunType<'db> {
|
||||
match &self.ret_ty {
|
||||
RetType::Expr(ty) if ty.is_unit() => FunType::Unit,
|
||||
RetType::Expr(ty) => FunType::Single(ty.clone()),
|
||||
|
|
@ -487,7 +487,7 @@ impl ParamKind {
|
|||
}
|
||||
}
|
||||
|
||||
impl Param {
|
||||
impl<'db> Param<'db> {
|
||||
fn kind(&self) -> ParamKind {
|
||||
match (self.move_local, self.requires_mut, self.is_copy) {
|
||||
(false, true, _) => ParamKind::MutRef,
|
||||
|
|
@ -497,7 +497,7 @@ impl Param {
|
|||
}
|
||||
}
|
||||
|
||||
fn to_arg(&self, ctx: &AssistContext<'_>, edition: Edition) -> ast::Expr {
|
||||
fn to_arg(&self, ctx: &AssistContext<'db>, edition: Edition) -> ast::Expr {
|
||||
let var = path_expr_from_local(ctx, self.var, edition);
|
||||
match self.kind() {
|
||||
ParamKind::Value | ParamKind::MutValue => var,
|
||||
|
|
@ -532,8 +532,12 @@ impl Param {
|
|||
}
|
||||
}
|
||||
|
||||
impl TryKind {
|
||||
fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>, edition: Edition) -> Option<TryKind> {
|
||||
impl<'db> TryKind<'db> {
|
||||
fn of_ty(
|
||||
ty: hir::Type<'db>,
|
||||
ctx: &AssistContext<'db>,
|
||||
edition: Edition,
|
||||
) -> Option<TryKind<'db>> {
|
||||
if ty.is_unknown() {
|
||||
// We favour Result for `expr?`
|
||||
return Some(TryKind::Result { ty });
|
||||
|
|
@ -551,7 +555,7 @@ impl TryKind {
|
|||
}
|
||||
}
|
||||
|
||||
impl FlowKind {
|
||||
impl<'db> FlowKind<'db> {
|
||||
fn make_result_handler(&self, expr: Option<ast::Expr>) -> ast::Expr {
|
||||
match self {
|
||||
FlowKind::Return(_) => make::expr_return(expr),
|
||||
|
|
@ -567,7 +571,7 @@ impl FlowKind {
|
|||
}
|
||||
}
|
||||
|
||||
fn expr_ty(&self, ctx: &AssistContext<'_>) -> Option<hir::Type> {
|
||||
fn expr_ty(&self, ctx: &AssistContext<'db>) -> Option<hir::Type<'db>> {
|
||||
match self {
|
||||
FlowKind::Return(Some(expr)) | FlowKind::Break(_, Some(expr)) => {
|
||||
ctx.sema.type_of_expr(expr).map(TypeInfo::adjusted)
|
||||
|
|
@ -876,11 +880,11 @@ impl FunctionBody {
|
|||
(res, self_param)
|
||||
}
|
||||
|
||||
fn analyze_container(
|
||||
fn analyze_container<'db>(
|
||||
&self,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
edition: Edition,
|
||||
) -> Option<(ContainerInfo, bool)> {
|
||||
) -> Option<(ContainerInfo<'db>, bool)> {
|
||||
let mut ancestors = self.parent()?.ancestors();
|
||||
let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
|
||||
let mut parent_loop = None;
|
||||
|
|
@ -985,7 +989,7 @@ impl FunctionBody {
|
|||
))
|
||||
}
|
||||
|
||||
fn return_ty(&self, ctx: &AssistContext<'_>) -> Option<RetType> {
|
||||
fn return_ty<'db>(&self, ctx: &AssistContext<'db>) -> Option<RetType<'db>> {
|
||||
match self.tail_expr() {
|
||||
Some(expr) => ctx.sema.type_of_expr(&expr).map(TypeInfo::original).map(RetType::Expr),
|
||||
None => Some(RetType::Stmt),
|
||||
|
|
@ -1006,11 +1010,11 @@ impl FunctionBody {
|
|||
}
|
||||
|
||||
/// Analyses the function body for external control flow.
|
||||
fn external_control_flow(
|
||||
fn external_control_flow<'db>(
|
||||
&self,
|
||||
ctx: &AssistContext<'_>,
|
||||
container_info: &ContainerInfo,
|
||||
) -> Option<ControlFlow> {
|
||||
ctx: &AssistContext<'db>,
|
||||
container_info: &ContainerInfo<'db>,
|
||||
) -> Option<ControlFlow<'db>> {
|
||||
let mut ret_expr = None;
|
||||
let mut try_expr = None;
|
||||
let mut break_expr = None;
|
||||
|
|
@ -1096,12 +1100,12 @@ impl FunctionBody {
|
|||
/// find variables that should be extracted as params
|
||||
///
|
||||
/// Computes additional info that affects param type and mutability
|
||||
fn extracted_function_params(
|
||||
fn extracted_function_params<'db>(
|
||||
&self,
|
||||
ctx: &AssistContext<'_>,
|
||||
container_info: &ContainerInfo,
|
||||
ctx: &AssistContext<'db>,
|
||||
container_info: &ContainerInfo<'db>,
|
||||
locals: FxIndexSet<Local>,
|
||||
) -> Vec<Param> {
|
||||
) -> Vec<Param<'db>> {
|
||||
locals
|
||||
.into_iter()
|
||||
.sorted()
|
||||
|
|
@ -1449,7 +1453,7 @@ fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) {
|
|||
}
|
||||
}
|
||||
|
||||
fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> SyntaxNode {
|
||||
fn make_call(ctx: &AssistContext<'_>, fun: &Function<'_>, indent: IndentLevel) -> SyntaxNode {
|
||||
let ret_ty = fun.return_type(ctx);
|
||||
|
||||
let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx, fun.mods.edition)));
|
||||
|
|
@ -1508,17 +1512,17 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> Sy
|
|||
}
|
||||
}
|
||||
|
||||
enum FlowHandler {
|
||||
enum FlowHandler<'db> {
|
||||
None,
|
||||
Try { kind: TryKind },
|
||||
If { action: FlowKind },
|
||||
IfOption { action: FlowKind },
|
||||
MatchOption { none: FlowKind },
|
||||
MatchResult { err: FlowKind },
|
||||
Try { kind: TryKind<'db> },
|
||||
If { action: FlowKind<'db> },
|
||||
IfOption { action: FlowKind<'db> },
|
||||
MatchOption { none: FlowKind<'db> },
|
||||
MatchResult { err: FlowKind<'db> },
|
||||
}
|
||||
|
||||
impl FlowHandler {
|
||||
fn from_ret_ty(fun: &Function, ret_ty: &FunType) -> FlowHandler {
|
||||
impl<'db> FlowHandler<'db> {
|
||||
fn from_ret_ty(fun: &Function<'db>, ret_ty: &FunType<'db>) -> FlowHandler<'db> {
|
||||
if fun.contains_tail_expr {
|
||||
return FlowHandler::None;
|
||||
}
|
||||
|
|
@ -1628,7 +1632,7 @@ fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local, edition: Edition) -
|
|||
fn format_function(
|
||||
ctx: &AssistContext<'_>,
|
||||
module: hir::Module,
|
||||
fun: &Function,
|
||||
fun: &Function<'_>,
|
||||
old_indent: IndentLevel,
|
||||
) -> ast::Fn {
|
||||
let fun_name = make::name(&fun.name.text());
|
||||
|
|
@ -1654,7 +1658,7 @@ fn format_function(
|
|||
|
||||
fn make_generic_params_and_where_clause(
|
||||
ctx: &AssistContext<'_>,
|
||||
fun: &Function,
|
||||
fun: &Function<'_>,
|
||||
) -> (Option<ast::GenericParamList>, Option<ast::WhereClause>) {
|
||||
let used_type_params = fun.type_params(ctx);
|
||||
|
||||
|
|
@ -1666,7 +1670,7 @@ fn make_generic_params_and_where_clause(
|
|||
|
||||
fn make_generic_param_list(
|
||||
ctx: &AssistContext<'_>,
|
||||
fun: &Function,
|
||||
fun: &Function<'_>,
|
||||
used_type_params: &[TypeParam],
|
||||
) -> Option<ast::GenericParamList> {
|
||||
let mut generic_params = fun
|
||||
|
|
@ -1703,7 +1707,7 @@ fn param_is_required(
|
|||
|
||||
fn make_where_clause(
|
||||
ctx: &AssistContext<'_>,
|
||||
fun: &Function,
|
||||
fun: &Function<'_>,
|
||||
used_type_params: &[TypeParam],
|
||||
) -> Option<ast::WhereClause> {
|
||||
let mut predicates = fun
|
||||
|
|
@ -1743,9 +1747,9 @@ fn resolved_type_param(ctx: &AssistContext<'_>, pred: &ast::WherePred) -> Option
|
|||
}
|
||||
}
|
||||
|
||||
impl Function {
|
||||
impl<'db> Function<'db> {
|
||||
/// Collect all the `TypeParam`s used in the `body` and `params`.
|
||||
fn type_params(&self, ctx: &AssistContext<'_>) -> Vec<TypeParam> {
|
||||
fn type_params(&self, ctx: &AssistContext<'db>) -> Vec<TypeParam> {
|
||||
let type_params_in_descendant_paths =
|
||||
self.body.descendant_paths().filter_map(|it| match ctx.sema.resolve_path(&it) {
|
||||
Some(PathResolution::TypeParam(type_param)) => Some(type_param),
|
||||
|
|
@ -1808,8 +1812,8 @@ impl Function {
|
|||
}
|
||||
}
|
||||
|
||||
impl FunType {
|
||||
fn make_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
|
||||
impl<'db> FunType<'db> {
|
||||
fn make_ty(&self, ctx: &AssistContext<'db>, module: hir::Module) -> ast::Type {
|
||||
match self {
|
||||
FunType::Unit => make::ty_unit(),
|
||||
FunType::Single(ty) => make_ty(ty, ctx, module),
|
||||
|
|
@ -1831,7 +1835,11 @@ impl FunType {
|
|||
}
|
||||
}
|
||||
|
||||
fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -> ast::BlockExpr {
|
||||
fn make_body(
|
||||
ctx: &AssistContext<'_>,
|
||||
old_indent: IndentLevel,
|
||||
fun: &Function<'_>,
|
||||
) -> ast::BlockExpr {
|
||||
let ret_ty = fun.return_type(ctx);
|
||||
let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
|
||||
|
||||
|
|
@ -2009,19 +2017,19 @@ fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr
|
|||
make::hacky_block_expr(elements, Some(tail_expr))
|
||||
}
|
||||
|
||||
fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String {
|
||||
fn format_type(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> String {
|
||||
ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_owned())
|
||||
}
|
||||
|
||||
fn make_ty(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
|
||||
fn make_ty(ty: &hir::Type<'_>, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
|
||||
let ty_str = format_type(ty, ctx, module);
|
||||
make::ty(&ty_str)
|
||||
}
|
||||
|
||||
fn rewrite_body_segment(
|
||||
ctx: &AssistContext<'_>,
|
||||
params: &[Param],
|
||||
handler: &FlowHandler,
|
||||
params: &[Param<'_>],
|
||||
handler: &FlowHandler<'_>,
|
||||
syntax: &SyntaxNode,
|
||||
) -> SyntaxNode {
|
||||
let syntax = fix_param_usages(ctx, params, syntax);
|
||||
|
|
@ -2030,8 +2038,12 @@ fn rewrite_body_segment(
|
|||
}
|
||||
|
||||
/// change all usages to account for added `&`/`&mut` for some params
|
||||
fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode {
|
||||
let mut usages_for_param: Vec<(&Param, Vec<ast::Expr>)> = Vec::new();
|
||||
fn fix_param_usages(
|
||||
ctx: &AssistContext<'_>,
|
||||
params: &[Param<'_>],
|
||||
syntax: &SyntaxNode,
|
||||
) -> SyntaxNode {
|
||||
let mut usages_for_param: Vec<(&Param<'_>, Vec<ast::Expr>)> = Vec::new();
|
||||
|
||||
let tm = TreeMutator::new(syntax);
|
||||
|
||||
|
|
@ -2085,7 +2097,7 @@ fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNo
|
|||
res
|
||||
}
|
||||
|
||||
fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) {
|
||||
fn update_external_control_flow(handler: &FlowHandler<'_>, syntax: &SyntaxNode) {
|
||||
let mut nested_loop = None;
|
||||
let mut nested_scope = None;
|
||||
for event in syntax.preorder() {
|
||||
|
|
@ -2146,7 +2158,10 @@ fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) {
|
|||
}
|
||||
}
|
||||
|
||||
fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Option<ast::Expr> {
|
||||
fn make_rewritten_flow(
|
||||
handler: &FlowHandler<'_>,
|
||||
arg_expr: Option<ast::Expr>,
|
||||
) -> Option<ast::Expr> {
|
||||
let value = match handler {
|
||||
FlowHandler::None | FlowHandler::Try { .. } => return None,
|
||||
FlowHandler::If { .. } => make::expr_call(
|
||||
|
|
|
|||
|
|
@ -307,7 +307,7 @@ impl FunctionBuilder {
|
|||
ctx: &AssistContext<'_>,
|
||||
call: &ast::MethodCallExpr,
|
||||
name: &ast::NameRef,
|
||||
receiver_ty: Type,
|
||||
receiver_ty: Type<'_>,
|
||||
target_module: Module,
|
||||
target: GeneratedFunctionTarget,
|
||||
) -> Option<Self> {
|
||||
|
|
|
|||
|
|
@ -283,11 +283,11 @@ impl CallInfo {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_fn_params(
|
||||
db: &dyn HirDatabase,
|
||||
fn get_fn_params<'db>(
|
||||
db: &'db dyn HirDatabase,
|
||||
function: hir::Function,
|
||||
param_list: &ast::ParamList,
|
||||
) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param)>> {
|
||||
) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param<'db>)>> {
|
||||
let mut assoc_fn_params = function.assoc_fn_params(db).into_iter();
|
||||
|
||||
let mut params = Vec::new();
|
||||
|
|
@ -316,7 +316,7 @@ fn inline(
|
|||
function_def_file_id: EditionedFileId,
|
||||
function: hir::Function,
|
||||
fn_body: &ast::BlockExpr,
|
||||
params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
|
||||
params: &[(ast::Pat, Option<ast::Type>, hir::Param<'_>)],
|
||||
CallInfo { node, arguments, generic_arg_list, krate }: &CallInfo,
|
||||
) -> ast::Expr {
|
||||
let file_id = sema.hir_file_for(fn_body.syntax());
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
|
||||
fn validate_type_recursively(
|
||||
ctx: &AssistContext<'_>,
|
||||
ty_hir: Option<&hir::Type>,
|
||||
ty_hir: Option<&hir::Type<'_>>,
|
||||
refed: bool,
|
||||
fuel: i32,
|
||||
) -> Option<()> {
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ fn contains_placeholder(a: &ast::MatchArm) -> bool {
|
|||
}
|
||||
|
||||
fn are_same_types(
|
||||
current_arm_types: &FxHashMap<String, Option<Type>>,
|
||||
current_arm_types: &FxHashMap<String, Option<Type<'_>>>,
|
||||
arm: &ast::MatchArm,
|
||||
ctx: &AssistContext<'_>,
|
||||
) -> bool {
|
||||
|
|
@ -121,15 +121,15 @@ fn are_same_types(
|
|||
true
|
||||
}
|
||||
|
||||
fn get_arm_types(
|
||||
context: &AssistContext<'_>,
|
||||
fn get_arm_types<'db>(
|
||||
context: &AssistContext<'db>,
|
||||
arm: &ast::MatchArm,
|
||||
) -> FxHashMap<String, Option<Type>> {
|
||||
let mut mapping: FxHashMap<String, Option<Type>> = FxHashMap::default();
|
||||
) -> FxHashMap<String, Option<Type<'db>>> {
|
||||
let mut mapping: FxHashMap<String, Option<Type<'db>>> = FxHashMap::default();
|
||||
|
||||
fn recurse(
|
||||
map: &mut FxHashMap<String, Option<Type>>,
|
||||
ctx: &AssistContext<'_>,
|
||||
fn recurse<'db>(
|
||||
map: &mut FxHashMap<String, Option<Type<'db>>>,
|
||||
ctx: &AssistContext<'db>,
|
||||
pat: &Option<ast::Pat>,
|
||||
) {
|
||||
if let Some(local_pat) = pat {
|
||||
|
|
|
|||
|
|
@ -217,7 +217,7 @@ fn item_as_trait(db: &RootDatabase, item: hir::ItemInNs) -> Option<hir::Trait> {
|
|||
}
|
||||
}
|
||||
|
||||
fn group_label(candidate: &ImportCandidate) -> GroupLabel {
|
||||
fn group_label(candidate: &ImportCandidate<'_>) -> GroupLabel {
|
||||
let name = match candidate {
|
||||
ImportCandidate::Path(it) => &it.name,
|
||||
ImportCandidate::TraitAssocItem(it) | ImportCandidate::TraitMethod(it) => {
|
||||
|
|
@ -230,7 +230,7 @@ fn group_label(candidate: &ImportCandidate) -> GroupLabel {
|
|||
|
||||
fn label(
|
||||
db: &RootDatabase,
|
||||
candidate: &ImportCandidate,
|
||||
candidate: &ImportCandidate<'_>,
|
||||
import: &LocatedImport,
|
||||
edition: Edition,
|
||||
) -> String {
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
|||
return None;
|
||||
}
|
||||
|
||||
let mut formatter = |_: &hir::Type| String::from("todo!()");
|
||||
let mut formatter = |_: &hir::Type<'_>| String::from("todo!()");
|
||||
|
||||
let edition = scope.krate().edition(ctx.db());
|
||||
let paths = paths
|
||||
|
|
|
|||
|
|
@ -405,7 +405,7 @@ pub(crate) fn does_pat_variant_nested_or_literal(ctx: &AssistContext<'_>, pat: &
|
|||
}
|
||||
|
||||
fn check_pat_variant_from_enum(ctx: &AssistContext<'_>, pat: &ast::Pat) -> bool {
|
||||
ctx.sema.type_of_pat(pat).is_none_or(|ty: hir::TypeInfo| {
|
||||
ctx.sema.type_of_pat(pat).is_none_or(|ty: hir::TypeInfo<'_>| {
|
||||
ty.adjusted().as_adt().is_some_and(|adt| matches!(adt, hir::Adt::Enum(_)))
|
||||
})
|
||||
}
|
||||
|
|
@ -780,9 +780,9 @@ pub(crate) fn add_method_to_adt(
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ReferenceConversion {
|
||||
pub(crate) struct ReferenceConversion<'db> {
|
||||
conversion: ReferenceConversionType,
|
||||
ty: hir::Type,
|
||||
ty: hir::Type<'db>,
|
||||
impls_deref: bool,
|
||||
}
|
||||
|
||||
|
|
@ -802,10 +802,10 @@ enum ReferenceConversionType {
|
|||
Result,
|
||||
}
|
||||
|
||||
impl ReferenceConversion {
|
||||
impl<'db> ReferenceConversion<'db> {
|
||||
pub(crate) fn convert_type(
|
||||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
db: &'db dyn HirDatabase,
|
||||
display_target: DisplayTarget,
|
||||
) -> ast::Type {
|
||||
let ty = match self.conversion {
|
||||
|
|
@ -878,11 +878,11 @@ impl ReferenceConversion {
|
|||
// FIXME: It should return a new hir::Type, but currently constructing new types is too cumbersome
|
||||
// and all users of this function operate on string type names, so they can do the conversion
|
||||
// itself themselves.
|
||||
pub(crate) fn convert_reference_type(
|
||||
ty: hir::Type,
|
||||
db: &RootDatabase,
|
||||
famous_defs: &FamousDefs<'_, '_>,
|
||||
) -> Option<ReferenceConversion> {
|
||||
pub(crate) fn convert_reference_type<'db>(
|
||||
ty: hir::Type<'db>,
|
||||
db: &'db RootDatabase,
|
||||
famous_defs: &FamousDefs<'_, 'db>,
|
||||
) -> Option<ReferenceConversion<'db>> {
|
||||
handle_copy(&ty, db)
|
||||
.or_else(|| handle_as_ref_str(&ty, db, famous_defs))
|
||||
.or_else(|| handle_as_ref_slice(&ty, db, famous_defs))
|
||||
|
|
@ -892,18 +892,21 @@ pub(crate) fn convert_reference_type(
|
|||
.map(|(conversion, impls_deref)| ReferenceConversion { ty, conversion, impls_deref })
|
||||
}
|
||||
|
||||
fn could_deref_to_target(ty: &hir::Type, target: &hir::Type, db: &dyn HirDatabase) -> bool {
|
||||
fn could_deref_to_target(ty: &hir::Type<'_>, target: &hir::Type<'_>, db: &dyn HirDatabase) -> bool {
|
||||
let ty_ref = ty.add_reference(hir::Mutability::Shared);
|
||||
let target_ref = target.add_reference(hir::Mutability::Shared);
|
||||
ty_ref.could_coerce_to(db, &target_ref)
|
||||
}
|
||||
|
||||
fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<(ReferenceConversionType, bool)> {
|
||||
fn handle_copy(
|
||||
ty: &hir::Type<'_>,
|
||||
db: &dyn HirDatabase,
|
||||
) -> Option<(ReferenceConversionType, bool)> {
|
||||
ty.is_copy(db).then_some((ReferenceConversionType::Copy, true))
|
||||
}
|
||||
|
||||
fn handle_as_ref_str(
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
db: &dyn HirDatabase,
|
||||
famous_defs: &FamousDefs<'_, '_>,
|
||||
) -> Option<(ReferenceConversionType, bool)> {
|
||||
|
|
@ -914,7 +917,7 @@ fn handle_as_ref_str(
|
|||
}
|
||||
|
||||
fn handle_as_ref_slice(
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
db: &dyn HirDatabase,
|
||||
famous_defs: &FamousDefs<'_, '_>,
|
||||
) -> Option<(ReferenceConversionType, bool)> {
|
||||
|
|
@ -928,7 +931,7 @@ fn handle_as_ref_slice(
|
|||
}
|
||||
|
||||
fn handle_dereferenced(
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
db: &dyn HirDatabase,
|
||||
famous_defs: &FamousDefs<'_, '_>,
|
||||
) -> Option<(ReferenceConversionType, bool)> {
|
||||
|
|
@ -941,7 +944,7 @@ fn handle_dereferenced(
|
|||
}
|
||||
|
||||
fn handle_option_as_ref(
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
db: &dyn HirDatabase,
|
||||
famous_defs: &FamousDefs<'_, '_>,
|
||||
) -> Option<(ReferenceConversionType, bool)> {
|
||||
|
|
@ -953,7 +956,7 @@ fn handle_option_as_ref(
|
|||
}
|
||||
|
||||
fn handle_result_as_ref(
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
db: &dyn HirDatabase,
|
||||
famous_defs: &FamousDefs<'_, '_>,
|
||||
) -> Option<(ReferenceConversionType, bool)> {
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ pub(crate) fn gen_trait_fn_body(
|
|||
func: &ast::Fn,
|
||||
trait_path: &ast::Path,
|
||||
adt: &ast::Adt,
|
||||
trait_ref: Option<TraitRef>,
|
||||
trait_ref: Option<TraitRef<'_>>,
|
||||
) -> Option<()> {
|
||||
match trait_path.segment()?.name_ref()?.text().as_str() {
|
||||
"Clone" => gen_clone_impl(adt, func),
|
||||
|
|
@ -405,7 +405,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
|||
}
|
||||
|
||||
/// Generate a `PartialEq` impl based on the fields and members of the target type.
|
||||
fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -> Option<()> {
|
||||
fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
|
||||
stdx::always!(func.name().is_some_and(|name| name.text() == "eq"));
|
||||
fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
|
||||
match expr {
|
||||
|
|
@ -599,7 +599,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -
|
|||
Some(())
|
||||
}
|
||||
|
||||
fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -> Option<()> {
|
||||
fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> {
|
||||
stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp"));
|
||||
fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
|
||||
let mut arms = vec![];
|
||||
|
|
|
|||
|
|
@ -161,7 +161,11 @@ impl Completions {
|
|||
item.add_to(self, ctx.db);
|
||||
}
|
||||
|
||||
pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) {
|
||||
pub(crate) fn add_expr(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
expr: &hir::term_search::Expr<'_>,
|
||||
) {
|
||||
if let Some(item) = render_expr(ctx, expr) {
|
||||
item.add_to(self, ctx.db)
|
||||
}
|
||||
|
|
@ -170,7 +174,7 @@ impl Completions {
|
|||
pub(crate) fn add_crate_roots(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
) {
|
||||
ctx.process_all_names(&mut |name, res, doc_aliases| match res {
|
||||
ScopeDef::ModuleDef(hir::ModuleDef::Module(m)) if m.is_crate_root() => {
|
||||
|
|
@ -183,7 +187,7 @@ impl Completions {
|
|||
pub(crate) fn add_path_resolution(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
local_name: hir::Name,
|
||||
resolution: hir::ScopeDef,
|
||||
doc_aliases: Vec<syntax::SmolStr>,
|
||||
|
|
@ -232,7 +236,7 @@ impl Completions {
|
|||
pub(crate) fn add_enum_variants(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
e: hir::Enum,
|
||||
) {
|
||||
if !ctx.check_stability_and_hidden(e) {
|
||||
|
|
@ -246,7 +250,7 @@ impl Completions {
|
|||
pub(crate) fn add_module(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
module: hir::Module,
|
||||
local_name: hir::Name,
|
||||
doc_aliases: Vec<syntax::SmolStr>,
|
||||
|
|
@ -263,7 +267,7 @@ impl Completions {
|
|||
pub(crate) fn add_macro(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
mac: hir::Macro,
|
||||
local_name: hir::Name,
|
||||
) {
|
||||
|
|
@ -286,7 +290,7 @@ impl Completions {
|
|||
pub(crate) fn add_function(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
func: hir::Function,
|
||||
local_name: Option<hir::Name>,
|
||||
) {
|
||||
|
|
@ -312,7 +316,7 @@ impl Completions {
|
|||
pub(crate) fn add_method(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
dot_access: &DotAccess<'_>,
|
||||
func: hir::Function,
|
||||
receiver: Option<SmolStr>,
|
||||
local_name: Option<hir::Name>,
|
||||
|
|
@ -340,7 +344,7 @@ impl Completions {
|
|||
pub(crate) fn add_method_with_import(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
dot_access: &DotAccess<'_>,
|
||||
func: hir::Function,
|
||||
import: LocatedImport,
|
||||
) {
|
||||
|
|
@ -407,7 +411,7 @@ impl Completions {
|
|||
pub(crate) fn add_qualified_enum_variant(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
variant: hir::Variant,
|
||||
path: hir::ModPath,
|
||||
) {
|
||||
|
|
@ -424,7 +428,7 @@ impl Completions {
|
|||
pub(crate) fn add_enum_variant(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
variant: hir::Variant,
|
||||
local_name: Option<hir::Name>,
|
||||
) {
|
||||
|
|
@ -447,10 +451,10 @@ impl Completions {
|
|||
pub(crate) fn add_field(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
dot_access: &DotAccess<'_>,
|
||||
receiver: Option<SmolStr>,
|
||||
field: hir::Field,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
) {
|
||||
let is_private_editable = match ctx.is_visible(&field) {
|
||||
Visible::Yes => false,
|
||||
|
|
@ -471,7 +475,7 @@ impl Completions {
|
|||
pub(crate) fn add_struct_literal(
|
||||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
strukt: hir::Struct,
|
||||
path: Option<hir::ModPath>,
|
||||
local_name: Option<hir::Name>,
|
||||
|
|
@ -518,7 +522,7 @@ impl Completions {
|
|||
ctx: &CompletionContext<'_>,
|
||||
receiver: Option<SmolStr>,
|
||||
field: usize,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
) {
|
||||
// Only used for (unnamed) tuples, whose all fields *are* stable. No need to check
|
||||
// stability here.
|
||||
|
|
@ -550,7 +554,7 @@ impl Completions {
|
|||
&mut self,
|
||||
ctx: &CompletionContext<'_>,
|
||||
pattern_ctx: &PatternContext,
|
||||
path_ctx: Option<&PathCompletionCtx>,
|
||||
path_ctx: Option<&PathCompletionCtx<'_>>,
|
||||
variant: hir::Variant,
|
||||
local_name: Option<hir::Name>,
|
||||
) {
|
||||
|
|
@ -704,7 +708,7 @@ pub(super) fn complete_name(
|
|||
pub(super) fn complete_name_ref(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
NameRefContext { nameref, kind }: &NameRefContext,
|
||||
NameRefContext { nameref, kind }: &NameRefContext<'_>,
|
||||
) {
|
||||
match kind {
|
||||
NameRefKind::Path(path_ctx) => {
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ pub(crate) fn complete_known_attribute_input(
|
|||
pub(crate) fn complete_attribute_path(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
|
||||
&AttrCtx { kind, annotated_item_kind, ref derive_helpers }: &AttrCtx,
|
||||
) {
|
||||
let is_inner = kind == AttrKind::Inner;
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use crate::{
|
|||
pub(crate) fn complete_derive_path(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
|
||||
existing_derives: &ExistingDerives,
|
||||
) {
|
||||
let core = ctx.famous_defs().core();
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ use crate::{
|
|||
pub(crate) fn complete_dot(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
dot_access: &DotAccess<'_>,
|
||||
) {
|
||||
let receiver_ty = match dot_access {
|
||||
DotAccess { receiver_ty: Some(receiver_ty), .. } => &receiver_ty.original,
|
||||
|
|
@ -130,8 +130,8 @@ pub(crate) fn complete_dot(
|
|||
pub(crate) fn complete_undotted_self(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
expr_ctx: &PathExprCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
expr_ctx: &PathExprCtx<'_>,
|
||||
) {
|
||||
if !ctx.config.enable_self_on_the_fly {
|
||||
return;
|
||||
|
|
@ -198,9 +198,9 @@ pub(crate) fn complete_undotted_self(
|
|||
fn complete_fields(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
receiver: &hir::Type,
|
||||
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
|
||||
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
|
||||
receiver: &hir::Type<'_>,
|
||||
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type<'_>),
|
||||
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type<'_>),
|
||||
is_field_access: bool,
|
||||
is_method_access_with_parens: bool,
|
||||
) {
|
||||
|
|
@ -230,7 +230,7 @@ fn complete_fields(
|
|||
|
||||
fn complete_methods(
|
||||
ctx: &CompletionContext<'_>,
|
||||
receiver: &hir::Type,
|
||||
receiver: &hir::Type<'_>,
|
||||
traits_in_scope: &FxHashSet<hir::TraitId>,
|
||||
f: impl FnMut(hir::Function),
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -47,8 +47,8 @@ where
|
|||
pub(crate) fn complete_expr_path(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
|
||||
expr_ctx: &PathExprCtx,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
|
||||
expr_ctx: &PathExprCtx<'_>,
|
||||
) {
|
||||
let _p = tracing::info_span!("complete_expr_path").entered();
|
||||
if !ctx.qualifier_ctx.none() {
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
pub(crate) fn complete_field_list_tuple_variant(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
) {
|
||||
if ctx.qualifier_ctx.vis_node.is_some() {
|
||||
} else if let PathCompletionCtx {
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ use crate::{
|
|||
pub(crate) fn import_on_the_fly_path(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
) -> Option<()> {
|
||||
if !ctx.config.enable_imports_on_the_fly {
|
||||
return None;
|
||||
|
|
@ -175,7 +175,7 @@ pub(crate) fn import_on_the_fly_pat(
|
|||
pub(crate) fn import_on_the_fly_dot(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
dot_access: &DotAccess<'_>,
|
||||
) -> Option<()> {
|
||||
if !ctx.config.enable_imports_on_the_fly {
|
||||
return None;
|
||||
|
|
@ -203,8 +203,8 @@ pub(crate) fn import_on_the_fly_dot(
|
|||
fn import_on_the_fly(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { kind, .. }: &PathCompletionCtx,
|
||||
import_assets: ImportAssets,
|
||||
path_ctx @ PathCompletionCtx { kind, .. }: &PathCompletionCtx<'_>,
|
||||
import_assets: ImportAssets<'_>,
|
||||
position: SyntaxNode,
|
||||
potential_import_name: String,
|
||||
) -> Option<()> {
|
||||
|
|
@ -290,7 +290,7 @@ fn import_on_the_fly_pat_(
|
|||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
pattern_ctx: &PatternContext,
|
||||
import_assets: ImportAssets,
|
||||
import_assets: ImportAssets<'_>,
|
||||
position: SyntaxNode,
|
||||
potential_import_name: String,
|
||||
) -> Option<()> {
|
||||
|
|
@ -335,8 +335,8 @@ fn import_on_the_fly_pat_(
|
|||
fn import_on_the_fly_method(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
import_assets: ImportAssets,
|
||||
dot_access: &DotAccess<'_>,
|
||||
import_assets: ImportAssets<'_>,
|
||||
position: SyntaxNode,
|
||||
potential_import_name: String,
|
||||
) -> Option<()> {
|
||||
|
|
@ -400,11 +400,11 @@ fn import_name(ctx: &CompletionContext<'_>) -> String {
|
|||
if token_kind.is_any_identifier() { ctx.token.to_string() } else { String::new() }
|
||||
}
|
||||
|
||||
fn import_assets_for_path(
|
||||
ctx: &CompletionContext<'_>,
|
||||
fn import_assets_for_path<'db>(
|
||||
ctx: &CompletionContext<'db>,
|
||||
potential_import_name: &str,
|
||||
qualifier: Option<ast::Path>,
|
||||
) -> Option<ImportAssets> {
|
||||
) -> Option<ImportAssets<'db>> {
|
||||
let _p =
|
||||
tracing::info_span!("import_assets_for_path", ?potential_import_name, ?qualifier).entered();
|
||||
|
||||
|
|
|
|||
|
|
@ -10,8 +10,8 @@ pub(crate) mod trait_impl;
|
|||
pub(crate) fn complete_item_list_in_expr(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
expr_ctx: &PathExprCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
expr_ctx: &PathExprCtx<'_>,
|
||||
) {
|
||||
if !expr_ctx.in_block_expr {
|
||||
return;
|
||||
|
|
@ -25,7 +25,7 @@ pub(crate) fn complete_item_list_in_expr(
|
|||
pub(crate) fn complete_item_list(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
|
||||
kind: &ItemListKind,
|
||||
) {
|
||||
let _p = tracing::info_span!("complete_item_list").entered();
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ fn complete_trait_impl_name(
|
|||
pub(crate) fn complete_trait_impl_item_by_name(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
name_ref: &Option<ast::NameRef>,
|
||||
impl_: &Option<ast::Impl>,
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ pub(crate) fn complete_pattern(
|
|||
pub(crate) fn complete_pattern_path(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
|
||||
) {
|
||||
match qualified {
|
||||
Qualified::With { resolution: Some(resolution), super_chain_len, .. } => {
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ use crate::{
|
|||
pub(crate) fn complete_postfix(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
dot_access: &DotAccess<'_>,
|
||||
) {
|
||||
if !ctx.config.enable_postfix_completions {
|
||||
return;
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ pub(crate) fn complete_record_expr_fields(
|
|||
pub(crate) fn add_default_update(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
ty: Option<hir::TypeInfo>,
|
||||
ty: Option<hir::TypeInfo<'_>>,
|
||||
) {
|
||||
let default_trait = ctx.famous_defs().core_default_Default();
|
||||
let impls_default_trait = default_trait
|
||||
|
|
@ -117,7 +117,7 @@ pub(crate) fn add_default_update(
|
|||
fn complete_fields(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
missing_fields: Vec<(hir::Field, hir::Type)>,
|
||||
missing_fields: Vec<(hir::Field, hir::Type<'_>)>,
|
||||
) {
|
||||
for (field, ty) in missing_fields {
|
||||
// This should call something else, we shouldn't be synthesizing a DotAccess here
|
||||
|
|
|
|||
|
|
@ -11,8 +11,8 @@ use crate::{
|
|||
pub(crate) fn complete_expr_snippet(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
&PathExprCtx { in_block_expr, .. }: &PathExprCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
&PathExprCtx { in_block_expr, .. }: &PathExprCtx<'_>,
|
||||
) {
|
||||
if !matches!(path_ctx.qualified, Qualified::No) {
|
||||
return;
|
||||
|
|
@ -51,7 +51,7 @@ macro_rules! $1 {
|
|||
pub(crate) fn complete_item_snippet(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
kind: &ItemListKind,
|
||||
) {
|
||||
if !matches!(path_ctx.qualified, Qualified::No) {
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ use crate::{
|
|||
pub(crate) fn complete_type_path(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
|
||||
location: &TypeLocation,
|
||||
) {
|
||||
let _p = tracing::info_span!("complete_type_path").entered();
|
||||
|
|
@ -220,7 +220,7 @@ pub(crate) fn complete_type_path(
|
|||
pub(crate) fn complete_ascribed_type(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
ascription: &TypeAscriptionTarget,
|
||||
) -> Option<()> {
|
||||
if !path_ctx.is_trivial_path() {
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use crate::{
|
|||
pub(crate) fn complete_use_path(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { qualified, use_tree_parent, .. }: &PathCompletionCtx,
|
||||
path_ctx @ PathCompletionCtx { qualified, use_tree_parent, .. }: &PathCompletionCtx<'_>,
|
||||
name_ref: &Option<ast::NameRef>,
|
||||
) {
|
||||
match qualified {
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use crate::{
|
|||
pub(crate) fn complete_vis_path(
|
||||
acc: &mut Completions,
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx,
|
||||
path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx<'_>,
|
||||
&has_in_token: &bool,
|
||||
) {
|
||||
match qualified {
|
||||
|
|
|
|||
|
|
@ -65,13 +65,13 @@ impl QualifierCtx {
|
|||
|
||||
/// The state of the path we are currently completing.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct PathCompletionCtx {
|
||||
pub(crate) struct PathCompletionCtx<'db> {
|
||||
/// If this is a call with () already there (or {} in case of record patterns)
|
||||
pub(crate) has_call_parens: bool,
|
||||
/// If this has a macro call bang !
|
||||
pub(crate) has_macro_bang: bool,
|
||||
/// The qualifier of the current path.
|
||||
pub(crate) qualified: Qualified,
|
||||
pub(crate) qualified: Qualified<'db>,
|
||||
/// The parent of the path we are completing.
|
||||
pub(crate) parent: Option<ast::Path>,
|
||||
#[allow(dead_code)]
|
||||
|
|
@ -79,14 +79,14 @@ pub(crate) struct PathCompletionCtx {
|
|||
pub(crate) path: ast::Path,
|
||||
/// The path of which we are completing the segment in the original file
|
||||
pub(crate) original_path: Option<ast::Path>,
|
||||
pub(crate) kind: PathKind,
|
||||
pub(crate) kind: PathKind<'db>,
|
||||
/// Whether the path segment has type args or not.
|
||||
pub(crate) has_type_args: bool,
|
||||
/// Whether the qualifier comes from a use tree parent or not
|
||||
pub(crate) use_tree_parent: bool,
|
||||
}
|
||||
|
||||
impl PathCompletionCtx {
|
||||
impl PathCompletionCtx<'_> {
|
||||
pub(crate) fn is_trivial_path(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
|
|
@ -104,9 +104,9 @@ impl PathCompletionCtx {
|
|||
|
||||
/// The kind of path we are completing right now.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) enum PathKind {
|
||||
pub(crate) enum PathKind<'db> {
|
||||
Expr {
|
||||
expr_ctx: PathExprCtx,
|
||||
expr_ctx: PathExprCtx<'db>,
|
||||
},
|
||||
Type {
|
||||
location: TypeLocation,
|
||||
|
|
@ -140,7 +140,7 @@ pub(crate) struct AttrCtx {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct PathExprCtx {
|
||||
pub(crate) struct PathExprCtx<'db> {
|
||||
pub(crate) in_block_expr: bool,
|
||||
pub(crate) in_breakable: BreakableKind,
|
||||
pub(crate) after_if_expr: bool,
|
||||
|
|
@ -152,7 +152,7 @@ pub(crate) struct PathExprCtx {
|
|||
/// The surrounding RecordExpression we are completing a functional update
|
||||
pub(crate) is_func_update: Option<ast::RecordExpr>,
|
||||
pub(crate) self_param: Option<hir::SelfParam>,
|
||||
pub(crate) innermost_ret_ty: Option<hir::Type>,
|
||||
pub(crate) innermost_ret_ty: Option<hir::Type<'db>>,
|
||||
pub(crate) impl_: Option<ast::Impl>,
|
||||
/// Whether this expression occurs in match arm guard position: before the
|
||||
/// fat arrow token
|
||||
|
|
@ -241,7 +241,7 @@ pub(crate) enum ItemListKind {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Qualified {
|
||||
pub(crate) enum Qualified<'db> {
|
||||
No,
|
||||
With {
|
||||
path: ast::Path,
|
||||
|
|
@ -260,7 +260,7 @@ pub(crate) enum Qualified {
|
|||
},
|
||||
/// <_>::
|
||||
TypeAnchor {
|
||||
ty: Option<hir::Type>,
|
||||
ty: Option<hir::Type<'db>>,
|
||||
trait_: Option<hir::Trait>,
|
||||
},
|
||||
/// Whether the path is an absolute path
|
||||
|
|
@ -341,17 +341,17 @@ pub(crate) enum NameKind {
|
|||
|
||||
/// The state of the NameRef we are completing.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct NameRefContext {
|
||||
pub(crate) struct NameRefContext<'db> {
|
||||
/// NameRef syntax in the original file
|
||||
pub(crate) nameref: Option<ast::NameRef>,
|
||||
pub(crate) kind: NameRefKind,
|
||||
pub(crate) kind: NameRefKind<'db>,
|
||||
}
|
||||
|
||||
/// The kind of the NameRef we are completing.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum NameRefKind {
|
||||
Path(PathCompletionCtx),
|
||||
DotAccess(DotAccess),
|
||||
pub(crate) enum NameRefKind<'db> {
|
||||
Path(PathCompletionCtx<'db>),
|
||||
DotAccess(DotAccess<'db>),
|
||||
/// Position where we are only interested in keyword completions
|
||||
Keyword(ast::Item),
|
||||
/// The record expression this nameref is a field of and whether a dot precedes the completion identifier.
|
||||
|
|
@ -365,9 +365,9 @@ pub(crate) enum NameRefKind {
|
|||
|
||||
/// The identifier we are currently completing.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum CompletionAnalysis {
|
||||
pub(crate) enum CompletionAnalysis<'db> {
|
||||
Name(NameContext),
|
||||
NameRef(NameRefContext),
|
||||
NameRef(NameRefContext<'db>),
|
||||
Lifetime(LifetimeContext),
|
||||
/// The string the cursor is currently inside
|
||||
String {
|
||||
|
|
@ -386,9 +386,9 @@ pub(crate) enum CompletionAnalysis {
|
|||
|
||||
/// Information about the field or method access we are completing.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DotAccess {
|
||||
pub(crate) struct DotAccess<'db> {
|
||||
pub(crate) receiver: Option<ast::Expr>,
|
||||
pub(crate) receiver_ty: Option<TypeInfo>,
|
||||
pub(crate) receiver_ty: Option<TypeInfo<'db>>,
|
||||
pub(crate) kind: DotAccessKind,
|
||||
pub(crate) ctx: DotAccessExprCtx,
|
||||
}
|
||||
|
|
@ -457,7 +457,7 @@ pub(crate) struct CompletionContext<'a> {
|
|||
/// This is usually the parameter name of the function argument we are completing.
|
||||
pub(crate) expected_name: Option<NameOrNameRef>,
|
||||
/// The expected type of what we are completing.
|
||||
pub(crate) expected_type: Option<Type>,
|
||||
pub(crate) expected_type: Option<Type<'a>>,
|
||||
|
||||
pub(crate) qualifier_ctx: QualifierCtx,
|
||||
|
||||
|
|
@ -608,7 +608,7 @@ impl CompletionContext<'_> {
|
|||
|
||||
pub(crate) fn iterate_path_candidates(
|
||||
&self,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
mut cb: impl FnMut(hir::AssocItem),
|
||||
) {
|
||||
let mut seen = FxHashSet::default();
|
||||
|
|
@ -695,12 +695,12 @@ impl CompletionContext<'_> {
|
|||
}
|
||||
|
||||
// CompletionContext construction
|
||||
impl<'a> CompletionContext<'a> {
|
||||
impl<'db> CompletionContext<'db> {
|
||||
pub(crate) fn new(
|
||||
db: &'a RootDatabase,
|
||||
db: &'db RootDatabase,
|
||||
position @ FilePosition { file_id, offset }: FilePosition,
|
||||
config: &'a CompletionConfig<'a>,
|
||||
) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
|
||||
config: &'db CompletionConfig<'db>,
|
||||
) -> Option<(CompletionContext<'db>, CompletionAnalysis<'db>)> {
|
||||
let _p = tracing::info_span!("CompletionContext::new").entered();
|
||||
let sema = Semantics::new(db);
|
||||
|
||||
|
|
|
|||
|
|
@ -38,9 +38,9 @@ struct ExpansionResult {
|
|||
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
|
||||
}
|
||||
|
||||
pub(super) struct AnalysisResult {
|
||||
pub(super) analysis: CompletionAnalysis,
|
||||
pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
|
||||
pub(super) struct AnalysisResult<'db> {
|
||||
pub(super) analysis: CompletionAnalysis<'db>,
|
||||
pub(super) expected: (Option<Type<'db>>, Option<ast::NameOrNameRef>),
|
||||
pub(super) qualifier_ctx: QualifierCtx,
|
||||
/// the original token of the expanded file
|
||||
pub(super) token: SyntaxToken,
|
||||
|
|
@ -48,13 +48,13 @@ pub(super) struct AnalysisResult {
|
|||
pub(super) original_offset: TextSize,
|
||||
}
|
||||
|
||||
pub(super) fn expand_and_analyze(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
pub(super) fn expand_and_analyze<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
original_file: InFile<SyntaxNode>,
|
||||
speculative_file: SyntaxNode,
|
||||
offset: TextSize,
|
||||
original_token: &SyntaxToken,
|
||||
) -> Option<AnalysisResult> {
|
||||
) -> Option<AnalysisResult<'db>> {
|
||||
// as we insert after the offset, right biased will *always* pick the identifier no matter
|
||||
// if there is an ident already typed or not
|
||||
let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
|
||||
|
|
@ -432,12 +432,13 @@ fn expand(
|
|||
|
||||
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
|
||||
/// of the completion location.
|
||||
fn analyze(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
fn analyze<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
expansion_result: ExpansionResult,
|
||||
original_token: &SyntaxToken,
|
||||
self_token: &SyntaxToken,
|
||||
) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
|
||||
) -> Option<(CompletionAnalysis<'db>, (Option<Type<'db>>, Option<ast::NameOrNameRef>), QualifierCtx)>
|
||||
{
|
||||
let _p = tracing::info_span!("CompletionContext::analyze").entered();
|
||||
let ExpansionResult {
|
||||
original_file,
|
||||
|
|
@ -555,17 +556,17 @@ fn analyze(
|
|||
}
|
||||
|
||||
/// Calculate the expected type and name of the cursor position.
|
||||
fn expected_type_and_name(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
fn expected_type_and_name<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
token: &SyntaxToken,
|
||||
name_like: &ast::NameLike,
|
||||
) -> (Option<Type>, Option<NameOrNameRef>) {
|
||||
) -> (Option<Type<'db>>, Option<NameOrNameRef>) {
|
||||
let mut node = match token.parent() {
|
||||
Some(it) => it,
|
||||
None => return (None, None),
|
||||
};
|
||||
|
||||
let strip_refs = |mut ty: Type| match name_like {
|
||||
let strip_refs = |mut ty: Type<'db>| match name_like {
|
||||
ast::NameLike::NameRef(n) => {
|
||||
let p = match n.syntax().parent() {
|
||||
Some(it) => it,
|
||||
|
|
@ -805,13 +806,13 @@ fn classify_name(
|
|||
Some(NameContext { name, kind })
|
||||
}
|
||||
|
||||
fn classify_name_ref(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
fn classify_name_ref<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
original_file: &SyntaxNode,
|
||||
name_ref: ast::NameRef,
|
||||
original_offset: TextSize,
|
||||
parent: SyntaxNode,
|
||||
) -> Option<(NameRefContext, QualifierCtx)> {
|
||||
) -> Option<(NameRefContext<'db>, QualifierCtx)> {
|
||||
let nameref = find_node_at_offset(original_file, original_offset);
|
||||
|
||||
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
|
||||
|
|
|
|||
|
|
@ -502,7 +502,7 @@ pub(crate) struct Builder {
|
|||
impl Builder {
|
||||
pub(crate) fn from_resolution(
|
||||
ctx: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
local_name: hir::Name,
|
||||
resolution: hir::ScopeDef,
|
||||
) -> Self {
|
||||
|
|
|
|||
|
|
@ -122,10 +122,10 @@ impl<'a> RenderContext<'a> {
|
|||
|
||||
pub(crate) fn render_field(
|
||||
ctx: RenderContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
dot_access: &DotAccess<'_>,
|
||||
receiver: Option<SmolStr>,
|
||||
field: hir::Field,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
) -> CompletionItem {
|
||||
let db = ctx.db();
|
||||
let is_deprecated = ctx.is_deprecated(field);
|
||||
|
|
@ -204,7 +204,7 @@ pub(crate) fn render_tuple_field(
|
|||
ctx: RenderContext<'_>,
|
||||
receiver: Option<SmolStr>,
|
||||
field: usize,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
) -> CompletionItem {
|
||||
let mut item = CompletionItem::new(
|
||||
SymbolKind::Field,
|
||||
|
|
@ -241,7 +241,7 @@ pub(crate) fn render_type_inference(
|
|||
|
||||
pub(crate) fn render_path_resolution(
|
||||
ctx: RenderContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
local_name: hir::Name,
|
||||
resolution: ScopeDef,
|
||||
) -> Builder {
|
||||
|
|
@ -259,7 +259,7 @@ pub(crate) fn render_pattern_resolution(
|
|||
|
||||
pub(crate) fn render_resolution_with_import(
|
||||
ctx: RenderContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
import_edit: LocatedImport,
|
||||
) -> Option<Builder> {
|
||||
let resolution = ScopeDef::from(import_edit.original_item);
|
||||
|
|
@ -282,10 +282,10 @@ pub(crate) fn render_resolution_with_import_pat(
|
|||
|
||||
pub(crate) fn render_expr(
|
||||
ctx: &CompletionContext<'_>,
|
||||
expr: &hir::term_search::Expr,
|
||||
expr: &hir::term_search::Expr<'_>,
|
||||
) -> Option<Builder> {
|
||||
let mut i = 1;
|
||||
let mut snippet_formatter = |ty: &hir::Type| {
|
||||
let mut snippet_formatter = |ty: &hir::Type<'_>| {
|
||||
let arg_name = ty
|
||||
.as_adt()
|
||||
.map(|adt| stdx::to_lower_snake_case(adt.name(ctx.db).as_str()))
|
||||
|
|
@ -295,7 +295,7 @@ pub(crate) fn render_expr(
|
|||
res
|
||||
};
|
||||
|
||||
let mut label_formatter = |ty: &hir::Type| {
|
||||
let mut label_formatter = |ty: &hir::Type<'_>| {
|
||||
ty.as_adt()
|
||||
.map(|adt| stdx::to_lower_snake_case(adt.name(ctx.db).as_str()))
|
||||
.unwrap_or_else(|| String::from("..."))
|
||||
|
|
@ -391,7 +391,7 @@ fn render_resolution_pat(
|
|||
|
||||
fn render_resolution_path(
|
||||
ctx: RenderContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
local_name: hir::Name,
|
||||
import_to_add: Option<LocatedImport>,
|
||||
resolution: ScopeDef,
|
||||
|
|
@ -460,7 +460,7 @@ fn render_resolution_path(
|
|||
}
|
||||
}
|
||||
|
||||
let mut set_item_relevance = |ty: Type| {
|
||||
let mut set_item_relevance = |ty: Type<'_>| {
|
||||
if !ty.is_unknown() {
|
||||
item.detail(ty.display(db, krate).to_string());
|
||||
}
|
||||
|
|
@ -593,8 +593,8 @@ fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> boo
|
|||
// FIXME: This checks types without possible coercions which some completions might want to do
|
||||
fn match_types(
|
||||
ctx: &CompletionContext<'_>,
|
||||
ty1: &hir::Type,
|
||||
ty2: &hir::Type,
|
||||
ty1: &hir::Type<'_>,
|
||||
ty2: &hir::Type<'_>,
|
||||
) -> Option<CompletionRelevanceTypeMatch> {
|
||||
if ty1 == ty2 {
|
||||
Some(CompletionRelevanceTypeMatch::Exact)
|
||||
|
|
@ -607,7 +607,7 @@ fn match_types(
|
|||
|
||||
fn compute_type_match(
|
||||
ctx: &CompletionContext<'_>,
|
||||
completion_ty: &hir::Type,
|
||||
completion_ty: &hir::Type<'_>,
|
||||
) -> Option<CompletionRelevanceTypeMatch> {
|
||||
let expected_type = ctx.expected_type.as_ref()?;
|
||||
|
||||
|
|
@ -626,7 +626,7 @@ fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str)
|
|||
|
||||
fn compute_ref_match(
|
||||
ctx: &CompletionContext<'_>,
|
||||
completion_ty: &hir::Type,
|
||||
completion_ty: &hir::Type<'_>,
|
||||
) -> Option<CompletionItemRefMode> {
|
||||
let expected_type = ctx.expected_type.as_ref()?;
|
||||
let expected_without_ref = expected_type.remove_ref();
|
||||
|
|
@ -658,8 +658,8 @@ fn compute_ref_match(
|
|||
|
||||
fn path_ref_match(
|
||||
completion: &CompletionContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
ty: &hir::Type,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
ty: &hir::Type<'_>,
|
||||
item: &mut Builder,
|
||||
) {
|
||||
if let Some(original_path) = &path_ctx.original_path {
|
||||
|
|
|
|||
|
|
@ -22,13 +22,13 @@ use crate::{
|
|||
|
||||
#[derive(Debug)]
|
||||
enum FuncKind<'ctx> {
|
||||
Function(&'ctx PathCompletionCtx),
|
||||
Method(&'ctx DotAccess, Option<SmolStr>),
|
||||
Function(&'ctx PathCompletionCtx<'ctx>),
|
||||
Method(&'ctx DotAccess<'ctx>, Option<SmolStr>),
|
||||
}
|
||||
|
||||
pub(crate) fn render_fn(
|
||||
ctx: RenderContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
local_name: Option<hir::Name>,
|
||||
func: hir::Function,
|
||||
) -> Builder {
|
||||
|
|
@ -38,7 +38,7 @@ pub(crate) fn render_fn(
|
|||
|
||||
pub(crate) fn render_method(
|
||||
ctx: RenderContext<'_>,
|
||||
dot_access: &DotAccess,
|
||||
dot_access: &DotAccess<'_>,
|
||||
receiver: Option<SmolStr>,
|
||||
local_name: Option<hir::Name>,
|
||||
func: hir::Function,
|
||||
|
|
@ -186,8 +186,8 @@ fn render(
|
|||
fn compute_return_type_match(
|
||||
db: &dyn HirDatabase,
|
||||
ctx: &RenderContext<'_>,
|
||||
self_type: hir::Type,
|
||||
ret_type: &hir::Type,
|
||||
self_type: hir::Type<'_>,
|
||||
ret_type: &hir::Type<'_>,
|
||||
) -> CompletionRelevanceReturnType {
|
||||
if match_types(ctx.completion, &self_type, ret_type).is_some() {
|
||||
// fn([..]) -> Self
|
||||
|
|
@ -217,8 +217,8 @@ pub(super) fn add_call_parens<'b>(
|
|||
name: SmolStr,
|
||||
escaped_name: SmolStr,
|
||||
self_param: Option<hir::SelfParam>,
|
||||
params: Vec<hir::Param>,
|
||||
ret_type: &hir::Type,
|
||||
params: Vec<hir::Param<'_>>,
|
||||
ret_type: &hir::Type<'_>,
|
||||
) -> &'b mut Builder {
|
||||
cov_mark::hit!(inserts_parens_for_function_calls);
|
||||
|
||||
|
|
@ -288,7 +288,7 @@ pub(super) fn add_call_parens<'b>(
|
|||
builder.label(SmolStr::from_iter([&name, label_suffix])).insert_snippet(cap, snippet)
|
||||
}
|
||||
|
||||
fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'static str {
|
||||
fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type<'_>) -> &'static str {
|
||||
if let Some(derefed_ty) = ty.remove_ref() {
|
||||
for (name, local) in ctx.locals.iter().sorted_by_key(|&(k, _)| k.clone()) {
|
||||
if name.as_str() == arg {
|
||||
|
|
@ -369,12 +369,12 @@ fn params_display(ctx: &CompletionContext<'_>, detail: &mut String, func: hir::F
|
|||
}
|
||||
}
|
||||
|
||||
fn params(
|
||||
ctx: &CompletionContext<'_>,
|
||||
fn params<'db>(
|
||||
ctx: &CompletionContext<'db>,
|
||||
func: hir::Function,
|
||||
func_kind: &FuncKind<'_>,
|
||||
has_dot_receiver: bool,
|
||||
) -> Option<(Option<hir::SelfParam>, Vec<hir::Param>)> {
|
||||
) -> Option<(Option<hir::SelfParam>, Vec<hir::Param<'db>>)> {
|
||||
ctx.config.callable.as_ref()?;
|
||||
|
||||
// Don't add parentheses if the expected type is a function reference with the same signature.
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ use crate::{
|
|||
|
||||
pub(crate) fn render_variant_lit(
|
||||
ctx: RenderContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
local_name: Option<hir::Name>,
|
||||
variant: hir::Variant,
|
||||
path: Option<hir::ModPath>,
|
||||
|
|
@ -35,7 +35,7 @@ pub(crate) fn render_variant_lit(
|
|||
|
||||
pub(crate) fn render_struct_literal(
|
||||
ctx: RenderContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
strukt: hir::Struct,
|
||||
path: Option<hir::ModPath>,
|
||||
local_name: Option<hir::Name>,
|
||||
|
|
@ -49,7 +49,7 @@ pub(crate) fn render_struct_literal(
|
|||
|
||||
fn render(
|
||||
ctx @ RenderContext { completion, .. }: RenderContext<'_>,
|
||||
path_ctx: &PathCompletionCtx,
|
||||
path_ctx: &PathCompletionCtx<'_>,
|
||||
thing: Variant,
|
||||
name: hir::Name,
|
||||
path: Option<hir::ModPath>,
|
||||
|
|
@ -194,7 +194,7 @@ impl Variant {
|
|||
}
|
||||
}
|
||||
|
||||
fn ty(self, db: &dyn HirDatabase) -> hir::Type {
|
||||
fn ty(self, db: &dyn HirDatabase) -> hir::Type<'_> {
|
||||
match self {
|
||||
Variant::Struct(it) => it.ty(db),
|
||||
Variant::EnumVariant(it) => it.parent_enum(db).ty(db),
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ use crate::{
|
|||
|
||||
pub(crate) fn render_macro(
|
||||
ctx: RenderContext<'_>,
|
||||
PathCompletionCtx { kind, has_macro_bang, has_call_parens, .. }: &PathCompletionCtx,
|
||||
PathCompletionCtx { kind, has_macro_bang, has_call_parens, .. }: &PathCompletionCtx<'_>,
|
||||
|
||||
name: hir::Name,
|
||||
macro_: hir::Macro,
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ pub(crate) fn render_struct_pat(
|
|||
pub(crate) fn render_variant_pat(
|
||||
ctx: RenderContext<'_>,
|
||||
pattern_ctx: &PatternContext,
|
||||
path_ctx: Option<&PathCompletionCtx>,
|
||||
path_ctx: Option<&PathCompletionCtx<'_>>,
|
||||
variant: hir::Variant,
|
||||
local_name: Option<Name>,
|
||||
path: Option<&hir::ModPath>,
|
||||
|
|
@ -109,7 +109,7 @@ fn build_completion(
|
|||
lookup: SmolStr,
|
||||
pat: String,
|
||||
def: impl HasDocs + Copy,
|
||||
adt_ty: hir::Type,
|
||||
adt_ty: hir::Type<'_>,
|
||||
// Missing in context of match statement completions
|
||||
is_variant_missing: bool,
|
||||
) -> CompletionItem {
|
||||
|
|
|
|||
|
|
@ -13,21 +13,21 @@ use syntax::{
|
|||
use crate::RootDatabase;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ActiveParameter {
|
||||
pub ty: Type,
|
||||
pub struct ActiveParameter<'db> {
|
||||
pub ty: Type<'db>,
|
||||
pub src: Option<InFile<Either<ast::SelfParam, ast::Param>>>,
|
||||
}
|
||||
|
||||
impl ActiveParameter {
|
||||
impl<'db> ActiveParameter<'db> {
|
||||
/// Returns information about the call argument this token is part of.
|
||||
pub fn at_token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Self> {
|
||||
pub fn at_token(sema: &Semantics<'db, RootDatabase>, token: SyntaxToken) -> Option<Self> {
|
||||
let (signature, active_parameter) = callable_for_token(sema, token)?;
|
||||
Self::from_signature_and_active_parameter(sema, signature, active_parameter)
|
||||
}
|
||||
|
||||
/// Returns information about the call argument this token is part of.
|
||||
pub fn at_arg(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &'db Semantics<'db, RootDatabase>,
|
||||
list: ast::ArgList,
|
||||
at: TextSize,
|
||||
) -> Option<Self> {
|
||||
|
|
@ -36,8 +36,8 @@ impl ActiveParameter {
|
|||
}
|
||||
|
||||
fn from_signature_and_active_parameter(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
signature: hir::Callable,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
signature: hir::Callable<'db>,
|
||||
active_parameter: Option<usize>,
|
||||
) -> Option<Self> {
|
||||
let idx = active_parameter?;
|
||||
|
|
@ -63,10 +63,10 @@ impl ActiveParameter {
|
|||
}
|
||||
|
||||
/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
|
||||
pub fn callable_for_token(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
pub fn callable_for_token<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
token: SyntaxToken,
|
||||
) -> Option<(hir::Callable, Option<usize>)> {
|
||||
) -> Option<(hir::Callable<'db>, Option<usize>)> {
|
||||
let offset = token.text_range().start();
|
||||
// Find the calling expression and its NameRef
|
||||
let parent = token.parent()?;
|
||||
|
|
@ -79,21 +79,21 @@ pub fn callable_for_token(
|
|||
}
|
||||
|
||||
/// Returns a [`hir::Callable`] this token is a part of and its argument index of said callable.
|
||||
pub fn callable_for_arg_list(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
pub fn callable_for_arg_list<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
arg_list: ast::ArgList,
|
||||
at: TextSize,
|
||||
) -> Option<(hir::Callable, Option<usize>)> {
|
||||
) -> Option<(hir::Callable<'db>, Option<usize>)> {
|
||||
debug_assert!(arg_list.syntax().text_range().contains(at));
|
||||
let callable = arg_list.syntax().parent().and_then(ast::CallableExpr::cast)?;
|
||||
callable_for_node(sema, &callable, at)
|
||||
}
|
||||
|
||||
pub fn callable_for_node(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
pub fn callable_for_node<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
calling_node: &ast::CallableExpr,
|
||||
offset: TextSize,
|
||||
) -> Option<(hir::Callable, Option<usize>)> {
|
||||
) -> Option<(hir::Callable<'db>, Option<usize>)> {
|
||||
let callable = match calling_node {
|
||||
ast::CallableExpr::Call(call) => sema.resolve_expr_as_callable(&call.expr()?),
|
||||
ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
|
||||
|
|
|
|||
|
|
@ -385,17 +385,17 @@ fn find_std_module(
|
|||
|
||||
// FIXME: IdentClass as a name no longer fits
|
||||
#[derive(Debug)]
|
||||
pub enum IdentClass {
|
||||
NameClass(NameClass),
|
||||
NameRefClass(NameRefClass),
|
||||
pub enum IdentClass<'db> {
|
||||
NameClass(NameClass<'db>),
|
||||
NameRefClass(NameRefClass<'db>),
|
||||
Operator(OperatorClass),
|
||||
}
|
||||
|
||||
impl IdentClass {
|
||||
impl<'db> IdentClass<'db> {
|
||||
pub fn classify_node(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<IdentClass> {
|
||||
) -> Option<IdentClass<'db>> {
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
|
||||
|
|
@ -418,23 +418,23 @@ impl IdentClass {
|
|||
}
|
||||
|
||||
pub fn classify_token(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
token: &SyntaxToken,
|
||||
) -> Option<IdentClass> {
|
||||
) -> Option<IdentClass<'db>> {
|
||||
let parent = token.parent()?;
|
||||
Self::classify_node(sema, &parent)
|
||||
}
|
||||
|
||||
pub fn classify_lifetime(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
lifetime: &ast::Lifetime,
|
||||
) -> Option<IdentClass> {
|
||||
) -> Option<IdentClass<'db>> {
|
||||
NameRefClass::classify_lifetime(sema, lifetime)
|
||||
.map(IdentClass::NameRefClass)
|
||||
.or_else(|| NameClass::classify_lifetime(sema, lifetime).map(IdentClass::NameClass))
|
||||
}
|
||||
|
||||
pub fn definitions(self) -> ArrayVec<(Definition, Option<GenericSubstitution>), 2> {
|
||||
pub fn definitions(self) -> ArrayVec<(Definition, Option<GenericSubstitution<'db>>), 2> {
|
||||
let mut res = ArrayVec::new();
|
||||
match self {
|
||||
IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
|
||||
|
|
@ -518,7 +518,7 @@ impl IdentClass {
|
|||
///
|
||||
/// A model special case is `None` constant in pattern.
|
||||
#[derive(Debug)]
|
||||
pub enum NameClass {
|
||||
pub enum NameClass<'db> {
|
||||
Definition(Definition),
|
||||
/// `None` in `if let None = Some(82) {}`.
|
||||
/// Syntactically, it is a name, but semantically it is a reference.
|
||||
|
|
@ -528,11 +528,11 @@ pub enum NameClass {
|
|||
PatFieldShorthand {
|
||||
local_def: Local,
|
||||
field_ref: Field,
|
||||
adt_subst: GenericSubstitution,
|
||||
adt_subst: GenericSubstitution<'db>,
|
||||
},
|
||||
}
|
||||
|
||||
impl NameClass {
|
||||
impl<'db> NameClass<'db> {
|
||||
/// `Definition` defined by this name.
|
||||
pub fn defined(self) -> Option<Definition> {
|
||||
let res = match self {
|
||||
|
|
@ -545,7 +545,10 @@ impl NameClass {
|
|||
Some(res)
|
||||
}
|
||||
|
||||
pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
|
||||
pub fn classify(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
name: &ast::Name,
|
||||
) -> Option<NameClass<'db>> {
|
||||
let _p = tracing::info_span!("NameClass::classify").entered();
|
||||
|
||||
let parent = name.syntax().parent()?;
|
||||
|
|
@ -597,10 +600,10 @@ impl NameClass {
|
|||
Some(definition)
|
||||
}
|
||||
|
||||
fn classify_ident_pat(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
fn classify_ident_pat<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
ident_pat: ast::IdentPat,
|
||||
) -> Option<NameClass> {
|
||||
) -> Option<NameClass<'db>> {
|
||||
if let Some(def) = sema.resolve_bind_pat_to_const(&ident_pat) {
|
||||
return Some(NameClass::ConstReference(Definition::from(def)));
|
||||
}
|
||||
|
|
@ -638,9 +641,9 @@ impl NameClass {
|
|||
}
|
||||
|
||||
pub fn classify_lifetime(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
lifetime: &ast::Lifetime,
|
||||
) -> Option<NameClass> {
|
||||
) -> Option<NameClass<'db>> {
|
||||
let _p = tracing::info_span!("NameClass::classify_lifetime", ?lifetime).entered();
|
||||
let parent = lifetime.syntax().parent()?;
|
||||
|
||||
|
|
@ -723,12 +726,12 @@ impl OperatorClass {
|
|||
/// A model special case is field shorthand syntax, which uses a single
|
||||
/// reference to point to two different defs.
|
||||
#[derive(Debug)]
|
||||
pub enum NameRefClass {
|
||||
Definition(Definition, Option<GenericSubstitution>),
|
||||
pub enum NameRefClass<'db> {
|
||||
Definition(Definition, Option<GenericSubstitution<'db>>),
|
||||
FieldShorthand {
|
||||
local_ref: Local,
|
||||
field_ref: Field,
|
||||
adt_subst: GenericSubstitution,
|
||||
adt_subst: GenericSubstitution<'db>,
|
||||
},
|
||||
/// The specific situation where we have an extern crate decl without a rename
|
||||
/// Here we have both a declaration and a reference.
|
||||
|
|
@ -741,13 +744,13 @@ pub enum NameRefClass {
|
|||
},
|
||||
}
|
||||
|
||||
impl NameRefClass {
|
||||
impl<'db> NameRefClass<'db> {
|
||||
// Note: we don't have unit-tests for this rather important function.
|
||||
// It is primarily exercised via goto definition tests in `ide`.
|
||||
pub fn classify(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
name_ref: &ast::NameRef,
|
||||
) -> Option<NameRefClass> {
|
||||
) -> Option<NameRefClass<'db>> {
|
||||
let _p = tracing::info_span!("NameRefClass::classify", ?name_ref).entered();
|
||||
|
||||
let parent = name_ref.syntax().parent()?;
|
||||
|
|
@ -866,9 +869,9 @@ impl NameRefClass {
|
|||
}
|
||||
|
||||
pub fn classify_lifetime(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
lifetime: &ast::Lifetime,
|
||||
) -> Option<NameRefClass> {
|
||||
) -> Option<NameRefClass<'db>> {
|
||||
let _p = tracing::info_span!("NameRefClass::classify_lifetime", ?lifetime).entered();
|
||||
if lifetime.text() == "'static" {
|
||||
return Some(NameRefClass::Definition(
|
||||
|
|
|
|||
|
|
@ -25,26 +25,26 @@ use crate::{
|
|||
/// * assists
|
||||
/// * etc.
|
||||
#[derive(Debug)]
|
||||
pub enum ImportCandidate {
|
||||
pub enum ImportCandidate<'db> {
|
||||
/// A path, qualified (`std::collections::HashMap`) or not (`HashMap`).
|
||||
Path(PathImportCandidate),
|
||||
/// A trait associated function (with no self parameter) or an associated constant.
|
||||
/// For 'test_mod::TestEnum::test_function', `ty` is the `test_mod::TestEnum` expression type
|
||||
/// and `name` is the `test_function`
|
||||
TraitAssocItem(TraitImportCandidate),
|
||||
TraitAssocItem(TraitImportCandidate<'db>),
|
||||
/// A trait method with self parameter.
|
||||
/// For 'test_enum.test_method()', `ty` is the `test_enum` expression type
|
||||
/// and `name` is the `test_method`
|
||||
TraitMethod(TraitImportCandidate),
|
||||
TraitMethod(TraitImportCandidate<'db>),
|
||||
}
|
||||
|
||||
/// A trait import needed for a given associated item access.
|
||||
/// For `some::path::SomeStruct::ASSOC_`, contains the
|
||||
/// type of `some::path::SomeStruct` and `ASSOC_` as the item name.
|
||||
#[derive(Debug)]
|
||||
pub struct TraitImportCandidate {
|
||||
pub struct TraitImportCandidate<'db> {
|
||||
/// A type of the item that has the associated item accessed at.
|
||||
pub receiver_ty: Type,
|
||||
pub receiver_ty: Type<'db>,
|
||||
/// The associated item name that the trait to import should contain.
|
||||
pub assoc_item_name: NameToImport,
|
||||
}
|
||||
|
|
@ -100,16 +100,16 @@ impl NameToImport {
|
|||
|
||||
/// A struct to find imports in the project, given a certain name (or its part) and the context.
|
||||
#[derive(Debug)]
|
||||
pub struct ImportAssets {
|
||||
import_candidate: ImportCandidate,
|
||||
pub struct ImportAssets<'db> {
|
||||
import_candidate: ImportCandidate<'db>,
|
||||
candidate_node: SyntaxNode,
|
||||
module_with_candidate: Module,
|
||||
}
|
||||
|
||||
impl ImportAssets {
|
||||
impl<'db> ImportAssets<'db> {
|
||||
pub fn for_method_call(
|
||||
method_call: &ast::MethodCallExpr,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
) -> Option<Self> {
|
||||
let candidate_node = method_call.syntax().clone();
|
||||
Some(Self {
|
||||
|
|
@ -121,7 +121,7 @@ impl ImportAssets {
|
|||
|
||||
pub fn for_exact_path(
|
||||
fully_qualified_path: &ast::Path,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
) -> Option<Self> {
|
||||
let candidate_node = fully_qualified_path.syntax().clone();
|
||||
if let Some(use_tree) = candidate_node.ancestors().find_map(ast::UseTree::cast) {
|
||||
|
|
@ -139,7 +139,7 @@ impl ImportAssets {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn for_ident_pat(sema: &Semantics<'_, RootDatabase>, pat: &ast::IdentPat) -> Option<Self> {
|
||||
pub fn for_ident_pat(sema: &Semantics<'db, RootDatabase>, pat: &ast::IdentPat) -> Option<Self> {
|
||||
if !pat.is_simple_ident() {
|
||||
return None;
|
||||
}
|
||||
|
|
@ -156,7 +156,7 @@ impl ImportAssets {
|
|||
module_with_candidate: Module,
|
||||
qualifier: Option<ast::Path>,
|
||||
fuzzy_name: String,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
candidate_node: SyntaxNode,
|
||||
) -> Option<Self> {
|
||||
Some(Self {
|
||||
|
|
@ -168,7 +168,7 @@ impl ImportAssets {
|
|||
|
||||
pub fn for_fuzzy_method_call(
|
||||
module_with_method_call: Module,
|
||||
receiver_ty: Type,
|
||||
receiver_ty: Type<'db>,
|
||||
fuzzy_method_name: String,
|
||||
candidate_node: SyntaxNode,
|
||||
) -> Option<Self> {
|
||||
|
|
@ -229,14 +229,14 @@ impl LocatedImport {
|
|||
}
|
||||
}
|
||||
|
||||
impl ImportAssets {
|
||||
pub fn import_candidate(&self) -> &ImportCandidate {
|
||||
impl<'db> ImportAssets<'db> {
|
||||
pub fn import_candidate(&self) -> &ImportCandidate<'db> {
|
||||
&self.import_candidate
|
||||
}
|
||||
|
||||
pub fn search_for_imports(
|
||||
&self,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
cfg: ImportPathConfig,
|
||||
prefix_kind: PrefixKind,
|
||||
) -> impl Iterator<Item = LocatedImport> {
|
||||
|
|
@ -247,7 +247,7 @@ impl ImportAssets {
|
|||
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
|
||||
pub fn search_for_relative_paths(
|
||||
&self,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
cfg: ImportPathConfig,
|
||||
) -> impl Iterator<Item = LocatedImport> {
|
||||
let _p = tracing::info_span!("ImportAssets::search_for_relative_paths").entered();
|
||||
|
|
@ -286,7 +286,7 @@ impl ImportAssets {
|
|||
|
||||
fn search_for(
|
||||
&self,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
prefixed: Option<PrefixKind>,
|
||||
cfg: ImportPathConfig,
|
||||
) -> impl Iterator<Item = LocatedImport> {
|
||||
|
|
@ -533,11 +533,11 @@ fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Optio
|
|||
})
|
||||
}
|
||||
|
||||
fn trait_applicable_items(
|
||||
db: &RootDatabase,
|
||||
fn trait_applicable_items<'db>(
|
||||
db: &'db RootDatabase,
|
||||
current_crate: Crate,
|
||||
scope: &SemanticsScope<'_>,
|
||||
trait_candidate: &TraitImportCandidate,
|
||||
scope: &SemanticsScope<'db>,
|
||||
trait_candidate: &TraitImportCandidate<'db>,
|
||||
trait_assoc_item: bool,
|
||||
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
|
||||
scope_filter: impl Fn(hir::Trait) -> bool,
|
||||
|
|
@ -709,9 +709,9 @@ fn get_mod_path(
|
|||
}
|
||||
}
|
||||
|
||||
impl ImportCandidate {
|
||||
impl<'db> ImportCandidate<'db> {
|
||||
fn for_method_call(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
method_call: &ast::MethodCallExpr,
|
||||
) -> Option<Self> {
|
||||
match sema.resolve_method_call(method_call) {
|
||||
|
|
@ -725,7 +725,7 @@ impl ImportCandidate {
|
|||
}
|
||||
}
|
||||
|
||||
fn for_regular_path(sema: &Semantics<'_, RootDatabase>, path: &ast::Path) -> Option<Self> {
|
||||
fn for_regular_path(sema: &Semantics<'db, RootDatabase>, path: &ast::Path) -> Option<Self> {
|
||||
if sema.resolve_path(path).is_some() {
|
||||
return None;
|
||||
}
|
||||
|
|
@ -736,7 +736,7 @@ impl ImportCandidate {
|
|||
)
|
||||
}
|
||||
|
||||
fn for_name(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<Self> {
|
||||
fn for_name(sema: &Semantics<'db, RootDatabase>, name: &ast::Name) -> Option<Self> {
|
||||
if sema
|
||||
.scope(name.syntax())?
|
||||
.speculative_resolve(&make::ext::ident_path(&name.text()))
|
||||
|
|
@ -753,17 +753,17 @@ impl ImportCandidate {
|
|||
fn for_fuzzy_path(
|
||||
qualifier: Option<ast::Path>,
|
||||
fuzzy_name: String,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
) -> Option<Self> {
|
||||
path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name))
|
||||
}
|
||||
}
|
||||
|
||||
fn path_import_candidate(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
fn path_import_candidate<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
qualifier: Option<ast::Path>,
|
||||
name: NameToImport,
|
||||
) -> Option<ImportCandidate> {
|
||||
) -> Option<ImportCandidate<'db>> {
|
||||
Some(match qualifier {
|
||||
Some(qualifier) => match sema.resolve_path(&qualifier) {
|
||||
Some(PathResolution::Def(ModuleDef::BuiltinType(_))) | None => {
|
||||
|
|
|
|||
|
|
@ -429,7 +429,7 @@ pub struct FindUsages<'a> {
|
|||
/// The container of our definition should it be an assoc item
|
||||
assoc_item_container: Option<hir::AssocItemContainer>,
|
||||
/// whether to search for the `Self` type of the definition
|
||||
include_self_kw_refs: Option<hir::Type>,
|
||||
include_self_kw_refs: Option<hir::Type<'a>>,
|
||||
/// whether to search for the `self` module
|
||||
search_self_mod: bool,
|
||||
}
|
||||
|
|
@ -1087,12 +1087,12 @@ impl<'a> FindUsages<'a> {
|
|||
|
||||
fn found_self_ty_name_ref(
|
||||
&self,
|
||||
self_ty: &hir::Type,
|
||||
self_ty: &hir::Type<'_>,
|
||||
name_ref: &ast::NameRef,
|
||||
sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
|
||||
) -> bool {
|
||||
// See https://github.com/rust-lang/rust-analyzer/pull/15864/files/e0276dc5ddc38c65240edb408522bb869f15afb4#r1389848845
|
||||
let ty_eq = |ty: hir::Type| match (ty.as_adt(), self_ty.as_adt()) {
|
||||
let ty_eq = |ty: hir::Type<'_>| match (ty.as_adt(), self_ty.as_adt()) {
|
||||
(Some(ty), Some(self_ty)) => ty == self_ty,
|
||||
(None, None) => ty == *self_ty,
|
||||
_ => false,
|
||||
|
|
@ -1315,7 +1315,7 @@ impl<'a> FindUsages<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn def_to_ty(sema: &Semantics<'_, RootDatabase>, def: &Definition) -> Option<hir::Type> {
|
||||
fn def_to_ty<'db>(sema: &Semantics<'db, RootDatabase>, def: &Definition) -> Option<hir::Type<'db>> {
|
||||
match def {
|
||||
Definition::Adt(adt) => Some(adt.ty(sema.db)),
|
||||
Definition::TypeAlias(it) => Some(it.ty(sema.db)),
|
||||
|
|
|
|||
|
|
@ -151,10 +151,10 @@ impl NameGenerator {
|
|||
/// - If `ty` is an `impl Trait`, it will suggest the name of the first trait.
|
||||
///
|
||||
/// If the suggested name conflicts with reserved keywords, it will return `None`.
|
||||
pub fn for_type(
|
||||
pub fn for_type<'db>(
|
||||
&mut self,
|
||||
ty: &hir::Type,
|
||||
db: &RootDatabase,
|
||||
ty: &hir::Type<'db>,
|
||||
db: &'db RootDatabase,
|
||||
edition: Edition,
|
||||
) -> Option<SmolStr> {
|
||||
let name = name_of_type(ty, db, edition)?;
|
||||
|
|
@ -373,7 +373,11 @@ fn from_type(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<Smo
|
|||
name_of_type(&ty, sema.db, edition)
|
||||
}
|
||||
|
||||
fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option<SmolStr> {
|
||||
fn name_of_type<'db>(
|
||||
ty: &hir::Type<'db>,
|
||||
db: &'db RootDatabase,
|
||||
edition: Edition,
|
||||
) -> Option<SmolStr> {
|
||||
let name = if let Some(adt) = ty.as_adt() {
|
||||
let name = adt.name(db).display(db, edition).to_string();
|
||||
|
||||
|
|
@ -407,7 +411,11 @@ fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option<S
|
|||
normalize(&name)
|
||||
}
|
||||
|
||||
fn sequence_name(inner_ty: Option<&hir::Type>, db: &RootDatabase, edition: Edition) -> SmolStr {
|
||||
fn sequence_name<'db>(
|
||||
inner_ty: Option<&hir::Type<'db>>,
|
||||
db: &'db RootDatabase,
|
||||
edition: Edition,
|
||||
) -> SmolStr {
|
||||
let items_str = SmolStr::new_static("items");
|
||||
let Some(inner_ty) = inner_ty else {
|
||||
return items_str;
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ impl TryEnum {
|
|||
const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result];
|
||||
|
||||
/// Returns `Some(..)` if the provided type is an enum that implements `std::ops::Try`.
|
||||
pub fn from_ty(sema: &Semantics<'_, RootDatabase>, ty: &hir::Type) -> Option<TryEnum> {
|
||||
pub fn from_ty(sema: &Semantics<'_, RootDatabase>, ty: &hir::Type<'_>) -> Option<TryEnum> {
|
||||
let enum_ = match ty.as_adt() {
|
||||
Some(hir::Adt::Enum(it)) => it,
|
||||
_ => return None,
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use syntax::{
|
|||
pub fn use_trivial_constructor(
|
||||
db: &crate::RootDatabase,
|
||||
path: Path,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
edition: Edition,
|
||||
) -> Option<Expr> {
|
||||
match ty.as_adt() {
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
|
|||
// This diagnostic is triggered if a call is made on something that is not callable.
|
||||
pub(crate) fn expected_function(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::ExpectedFunction,
|
||||
d: &hir::ExpectedFunction<'_>,
|
||||
) -> Diagnostic {
|
||||
Diagnostic::new_with_syntax_node_ptr(
|
||||
ctx,
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ macro_rules! format_ty {
|
|||
// Diagnostic: invalid-cast
|
||||
//
|
||||
// This diagnostic is triggered if the code contains an illegal cast
|
||||
pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast) -> Diagnostic {
|
||||
pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast<'_>) -> Diagnostic {
|
||||
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
|
||||
let (code, message) = match d.error {
|
||||
CastError::CastToBool => (
|
||||
|
|
@ -106,7 +106,10 @@ pub(crate) fn invalid_cast(ctx: &DiagnosticsContext<'_>, d: &hir::InvalidCast) -
|
|||
// Diagnostic: cast-to-unsized
|
||||
//
|
||||
// This diagnostic is triggered when casting to an unsized type
|
||||
pub(crate) fn cast_to_unsized(ctx: &DiagnosticsContext<'_>, d: &hir::CastToUnsized) -> Diagnostic {
|
||||
pub(crate) fn cast_to_unsized(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::CastToUnsized<'_>,
|
||||
) -> Diagnostic {
|
||||
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("E0620"),
|
||||
|
|
|
|||
|
|
@ -106,7 +106,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
|
|||
}
|
||||
});
|
||||
|
||||
let generate_fill_expr = |ty: &Type| match ctx.config.expr_fill_default {
|
||||
let generate_fill_expr = |ty: &Type<'_>| match ctx.config.expr_fill_default {
|
||||
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
|
||||
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
|
||||
ExprFillDefaultMode::Default => {
|
||||
|
|
@ -180,7 +180,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
|
|||
}
|
||||
|
||||
fn make_ty(
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
db: &dyn HirDatabase,
|
||||
module: hir::Module,
|
||||
edition: Edition,
|
||||
|
|
@ -198,7 +198,7 @@ fn make_ty(
|
|||
fn get_default_constructor(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::MissingFields,
|
||||
ty: &Type,
|
||||
ty: &Type<'_>,
|
||||
) -> Option<ast::Expr> {
|
||||
if let Some(builtin_ty) = ty.as_builtin() {
|
||||
if builtin_ty.is_int() || builtin_ty.is_uint() {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,10 @@ use hir::HirDisplay;
|
|||
// Diagnostic: moved-out-of-ref
|
||||
//
|
||||
// This diagnostic is triggered on moving non copy things out of references.
|
||||
pub(crate) fn moved_out_of_ref(ctx: &DiagnosticsContext<'_>, d: &hir::MovedOutOfRef) -> Diagnostic {
|
||||
pub(crate) fn moved_out_of_ref(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::MovedOutOfRef<'_>,
|
||||
) -> Diagnostic {
|
||||
Diagnostic::new_with_syntax_node_ptr(
|
||||
ctx,
|
||||
DiagnosticCode::RustcHardError("E0507"),
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ use crate::{Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_dis
|
|||
//
|
||||
// This diagnostic is triggered when the type of an expression or pattern does not match
|
||||
// the expected type.
|
||||
pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
|
||||
pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<'_>) -> Diagnostic {
|
||||
let display_range = adjusted_display_range(ctx, d.expr_or_pat, &|node| {
|
||||
let Either::Left(expr) = node else { return None };
|
||||
let salient_token_range = match expr {
|
||||
|
|
@ -59,7 +59,7 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
|
|||
diag
|
||||
}
|
||||
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch<'_>) -> Option<Vec<Assist>> {
|
||||
let mut fixes = Vec::new();
|
||||
|
||||
if let Some(expr_ptr) = d.expr_or_pat.value.cast::<ast::Expr>() {
|
||||
|
|
@ -76,7 +76,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assi
|
|||
|
||||
fn add_reference(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::TypeMismatch,
|
||||
d: &hir::TypeMismatch<'_>,
|
||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||
acc: &mut Vec<Assist>,
|
||||
) -> Option<()> {
|
||||
|
|
@ -98,7 +98,7 @@ fn add_reference(
|
|||
|
||||
fn add_missing_ok_or_some(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::TypeMismatch,
|
||||
d: &hir::TypeMismatch<'_>,
|
||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||
acc: &mut Vec<Assist>,
|
||||
) -> Option<()> {
|
||||
|
|
@ -188,7 +188,7 @@ fn add_missing_ok_or_some(
|
|||
|
||||
fn remove_unnecessary_wrapper(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::TypeMismatch,
|
||||
d: &hir::TypeMismatch<'_>,
|
||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||
acc: &mut Vec<Assist>,
|
||||
) -> Option<()> {
|
||||
|
|
@ -271,7 +271,7 @@ fn remove_unnecessary_wrapper(
|
|||
|
||||
fn remove_semicolon(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::TypeMismatch,
|
||||
d: &hir::TypeMismatch<'_>,
|
||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||
acc: &mut Vec<Assist>,
|
||||
) -> Option<()> {
|
||||
|
|
@ -301,7 +301,7 @@ fn remove_semicolon(
|
|||
|
||||
fn str_ref_to_owned(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::TypeMismatch,
|
||||
d: &hir::TypeMismatch<'_>,
|
||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||
acc: &mut Vec<Assist>,
|
||||
) -> Option<()> {
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ use syntax::AstNode;
|
|||
// Diagnostic: typed-hole
|
||||
//
|
||||
// This diagnostic is triggered when an underscore expression is used in an invalid position.
|
||||
pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Diagnostic {
|
||||
pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole<'_>) -> Diagnostic {
|
||||
let display_range = ctx.sema.diagnostics_display_range(d.expr.map(|it| it.into()));
|
||||
let (message, fixes) = if d.expected.is_unknown() {
|
||||
("`_` expressions may only appear on the left-hand side of an assignment".to_owned(), None)
|
||||
|
|
@ -41,7 +41,7 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
|
|||
.with_fixes(fixes)
|
||||
}
|
||||
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole<'_>) -> Option<Vec<Assist>> {
|
||||
let db = ctx.sema.db;
|
||||
let root = db.parse_or_expand(d.expr.file_id);
|
||||
let (original_range, _) =
|
||||
|
|
@ -61,7 +61,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>
|
|||
};
|
||||
let paths = term_search(&term_search_ctx);
|
||||
|
||||
let mut formatter = |_: &hir::Type| String::from("_");
|
||||
let mut formatter = |_: &hir::Type<'_>| String::from("_");
|
||||
|
||||
let assists: Vec<Assist> = d
|
||||
.expected
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_ran
|
|||
// This diagnostic is triggered if a field does not exist on a given type.
|
||||
pub(crate) fn unresolved_field(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::UnresolvedField,
|
||||
d: &hir::UnresolvedField<'_>,
|
||||
) -> Diagnostic {
|
||||
let method_suffix = if d.method_with_same_name_exists {
|
||||
", but a method with a similar name exists"
|
||||
|
|
@ -54,7 +54,7 @@ pub(crate) fn unresolved_field(
|
|||
.with_fixes(fixes(ctx, d))
|
||||
}
|
||||
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<Assist>> {
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField<'_>) -> Option<Vec<Assist>> {
|
||||
let mut fixes = Vec::new();
|
||||
if d.method_with_same_name_exists {
|
||||
fixes.extend(method_fix(ctx, &d.expr));
|
||||
|
|
@ -64,7 +64,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<A
|
|||
}
|
||||
|
||||
// FIXME: Add Snippet Support
|
||||
fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Assist> {
|
||||
fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField<'_>) -> Option<Assist> {
|
||||
// Get the FileRange of the invalid field access
|
||||
let root = ctx.sema.db.parse_or_expand(d.expr.file_id);
|
||||
let expr = d.expr.value.to_node(&root).left()?;
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, adjusted_display_ran
|
|||
// This diagnostic is triggered if a method does not exist on a given type.
|
||||
pub(crate) fn unresolved_method(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::UnresolvedMethodCall,
|
||||
d: &hir::UnresolvedMethodCall<'_>,
|
||||
) -> Diagnostic {
|
||||
let suffix = if d.field_with_same_name.is_some() {
|
||||
", but a field with a similar name exists"
|
||||
|
|
@ -49,7 +49,7 @@ pub(crate) fn unresolved_method(
|
|||
.with_fixes(fixes(ctx, d))
|
||||
}
|
||||
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Vec<Assist>> {
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall<'_>) -> Option<Vec<Assist>> {
|
||||
let field_fix = if let Some(ty) = &d.field_with_same_name {
|
||||
field_fix(ctx, d, ty)
|
||||
} else {
|
||||
|
|
@ -72,8 +72,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<
|
|||
|
||||
fn field_fix(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::UnresolvedMethodCall,
|
||||
ty: &hir::Type,
|
||||
d: &hir::UnresolvedMethodCall<'_>,
|
||||
ty: &hir::Type<'_>,
|
||||
) -> Option<Assist> {
|
||||
if !ty.impls_fnonce(ctx.sema.db) {
|
||||
return None;
|
||||
|
|
@ -107,7 +107,10 @@ fn field_fix(
|
|||
})
|
||||
}
|
||||
|
||||
fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Assist> {
|
||||
fn assoc_func_fix(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::UnresolvedMethodCall<'_>,
|
||||
) -> Option<Assist> {
|
||||
if let Some(f) = d.assoc_func_with_same_name {
|
||||
let db = ctx.sema.db;
|
||||
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ pub struct SsrMatches {
|
|||
pub struct MatchFinder<'db> {
|
||||
/// Our source of information about the user's code.
|
||||
sema: Semantics<'db, ide_db::RootDatabase>,
|
||||
rules: Vec<ResolvedRule>,
|
||||
rules: Vec<ResolvedRule<'db>>,
|
||||
resolution_scope: resolving::ResolutionScope<'db>,
|
||||
restrict_ranges: Vec<ide_db::FileRange>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -84,12 +84,12 @@ pub(crate) struct MatchFailed {
|
|||
/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
|
||||
/// the match, if it does. Since we only do matching in this module and searching is done by the
|
||||
/// parent module, we don't populate nested matches.
|
||||
pub(crate) fn get_match(
|
||||
pub(crate) fn get_match<'db>(
|
||||
debug_active: bool,
|
||||
rule: &ResolvedRule,
|
||||
rule: &ResolvedRule<'db>,
|
||||
code: &SyntaxNode,
|
||||
restrict_range: &Option<FileRange>,
|
||||
sema: &Semantics<'_, ide_db::RootDatabase>,
|
||||
sema: &Semantics<'db, ide_db::RootDatabase>,
|
||||
) -> Result<Match, MatchFailed> {
|
||||
record_match_fails_reasons_scope(debug_active, || {
|
||||
Matcher::try_match(rule, code, restrict_range, sema)
|
||||
|
|
@ -102,7 +102,7 @@ struct Matcher<'db, 'sema> {
|
|||
/// If any placeholders come from anywhere outside of this range, then the match will be
|
||||
/// rejected.
|
||||
restrict_range: Option<FileRange>,
|
||||
rule: &'sema ResolvedRule,
|
||||
rule: &'sema ResolvedRule<'db>,
|
||||
}
|
||||
|
||||
/// Which phase of matching we're currently performing. We do two phases because most attempted
|
||||
|
|
@ -117,7 +117,7 @@ enum Phase<'a> {
|
|||
|
||||
impl<'db, 'sema> Matcher<'db, 'sema> {
|
||||
fn try_match(
|
||||
rule: &ResolvedRule,
|
||||
rule: &ResolvedRule<'db>,
|
||||
code: &SyntaxNode,
|
||||
restrict_range: &Option<FileRange>,
|
||||
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
|
||||
|
|
@ -535,7 +535,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
|
|||
fn attempt_match_ufcs_to_method_call(
|
||||
&self,
|
||||
phase: &mut Phase<'_>,
|
||||
pattern_ufcs: &UfcsCallInfo,
|
||||
pattern_ufcs: &UfcsCallInfo<'db>,
|
||||
code: &ast::MethodCallExpr,
|
||||
) -> Result<(), MatchFailed> {
|
||||
use ast::HasArgList;
|
||||
|
|
@ -597,7 +597,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
|
|||
fn attempt_match_ufcs_to_ufcs(
|
||||
&self,
|
||||
phase: &mut Phase<'_>,
|
||||
pattern_ufcs: &UfcsCallInfo,
|
||||
pattern_ufcs: &UfcsCallInfo<'db>,
|
||||
code: &ast::CallExpr,
|
||||
) -> Result<(), MatchFailed> {
|
||||
use ast::HasArgList;
|
||||
|
|
@ -615,7 +615,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
|
|||
/// times. Returns the number of times it needed to be dereferenced.
|
||||
fn check_expr_type(
|
||||
&self,
|
||||
pattern_type: &hir::Type,
|
||||
pattern_type: &hir::Type<'db>,
|
||||
expr: &ast::Expr,
|
||||
) -> Result<usize, MatchFailed> {
|
||||
use hir::HirDisplay;
|
||||
|
|
@ -656,10 +656,10 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
|
|||
}
|
||||
|
||||
impl Match {
|
||||
fn render_template_paths(
|
||||
fn render_template_paths<'db>(
|
||||
&mut self,
|
||||
template: &ResolvedPattern,
|
||||
sema: &Semantics<'_, ide_db::RootDatabase>,
|
||||
template: &ResolvedPattern<'db>,
|
||||
sema: &Semantics<'db, ide_db::RootDatabase>,
|
||||
) -> Result<(), MatchFailed> {
|
||||
let module = sema
|
||||
.scope(&self.matched_node)
|
||||
|
|
|
|||
|
|
@ -14,21 +14,21 @@ use crate::{Match, SsrMatches, fragments, resolving::ResolvedRule};
|
|||
/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
|
||||
/// template. Placeholders in the template will have been substituted with whatever they matched to
|
||||
/// in the original code.
|
||||
pub(crate) fn matches_to_edit(
|
||||
db: &dyn hir::db::ExpandDatabase,
|
||||
pub(crate) fn matches_to_edit<'db>(
|
||||
db: &'db dyn hir::db::ExpandDatabase,
|
||||
matches: &SsrMatches,
|
||||
file_src: &str,
|
||||
rules: &[ResolvedRule],
|
||||
rules: &[ResolvedRule<'db>],
|
||||
) -> TextEdit {
|
||||
matches_to_edit_at_offset(db, matches, file_src, 0.into(), rules)
|
||||
}
|
||||
|
||||
fn matches_to_edit_at_offset(
|
||||
db: &dyn hir::db::ExpandDatabase,
|
||||
fn matches_to_edit_at_offset<'db>(
|
||||
db: &'db dyn hir::db::ExpandDatabase,
|
||||
matches: &SsrMatches,
|
||||
file_src: &str,
|
||||
relative_start: TextSize,
|
||||
rules: &[ResolvedRule],
|
||||
rules: &[ResolvedRule<'db>],
|
||||
) -> TextEdit {
|
||||
let mut edit_builder = TextEdit::builder();
|
||||
for m in &matches.matches {
|
||||
|
|
@ -40,12 +40,12 @@ fn matches_to_edit_at_offset(
|
|||
edit_builder.finish()
|
||||
}
|
||||
|
||||
struct ReplacementRenderer<'a> {
|
||||
db: &'a dyn hir::db::ExpandDatabase,
|
||||
struct ReplacementRenderer<'a, 'db> {
|
||||
db: &'db dyn hir::db::ExpandDatabase,
|
||||
match_info: &'a Match,
|
||||
file_src: &'a str,
|
||||
rules: &'a [ResolvedRule],
|
||||
rule: &'a ResolvedRule,
|
||||
rules: &'a [ResolvedRule<'db>],
|
||||
rule: &'a ResolvedRule<'db>,
|
||||
out: String,
|
||||
// Map from a range within `out` to a token in `template` that represents a placeholder. This is
|
||||
// used to validate that the generated source code doesn't split any placeholder expansions (see
|
||||
|
|
@ -58,11 +58,11 @@ struct ReplacementRenderer<'a> {
|
|||
edition: Edition,
|
||||
}
|
||||
|
||||
fn render_replace(
|
||||
db: &dyn hir::db::ExpandDatabase,
|
||||
fn render_replace<'db>(
|
||||
db: &'db dyn hir::db::ExpandDatabase,
|
||||
match_info: &Match,
|
||||
file_src: &str,
|
||||
rules: &[ResolvedRule],
|
||||
rules: &[ResolvedRule<'db>],
|
||||
edition: Edition,
|
||||
) -> String {
|
||||
let rule = &rules[match_info.rule_index];
|
||||
|
|
@ -89,7 +89,7 @@ fn render_replace(
|
|||
renderer.out
|
||||
}
|
||||
|
||||
impl ReplacementRenderer<'_> {
|
||||
impl<'db> ReplacementRenderer<'_, 'db> {
|
||||
fn render_node_children(&mut self, node: &SyntaxNode) {
|
||||
for node_or_token in node.children_with_tokens() {
|
||||
self.render_node_or_token(&node_or_token);
|
||||
|
|
|
|||
|
|
@ -15,18 +15,18 @@ pub(crate) struct ResolutionScope<'db> {
|
|||
node: SyntaxNode,
|
||||
}
|
||||
|
||||
pub(crate) struct ResolvedRule {
|
||||
pub(crate) pattern: ResolvedPattern,
|
||||
pub(crate) template: Option<ResolvedPattern>,
|
||||
pub(crate) struct ResolvedRule<'db> {
|
||||
pub(crate) pattern: ResolvedPattern<'db>,
|
||||
pub(crate) template: Option<ResolvedPattern<'db>>,
|
||||
pub(crate) index: usize,
|
||||
}
|
||||
|
||||
pub(crate) struct ResolvedPattern {
|
||||
pub(crate) struct ResolvedPattern<'db> {
|
||||
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
|
||||
pub(crate) node: SyntaxNode,
|
||||
// Paths in `node` that we've resolved.
|
||||
pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
|
||||
pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>,
|
||||
pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo<'db>>,
|
||||
pub(crate) contains_self: bool,
|
||||
}
|
||||
|
||||
|
|
@ -36,18 +36,18 @@ pub(crate) struct ResolvedPath {
|
|||
pub(crate) depth: u32,
|
||||
}
|
||||
|
||||
pub(crate) struct UfcsCallInfo {
|
||||
pub(crate) struct UfcsCallInfo<'db> {
|
||||
pub(crate) call_expr: ast::CallExpr,
|
||||
pub(crate) function: hir::Function,
|
||||
pub(crate) qualifier_type: Option<hir::Type>,
|
||||
pub(crate) qualifier_type: Option<hir::Type<'db>>,
|
||||
}
|
||||
|
||||
impl ResolvedRule {
|
||||
impl<'db> ResolvedRule<'db> {
|
||||
pub(crate) fn new(
|
||||
rule: parsing::ParsedRule,
|
||||
resolution_scope: &ResolutionScope<'_>,
|
||||
resolution_scope: &ResolutionScope<'db>,
|
||||
index: usize,
|
||||
) -> Result<ResolvedRule, SsrError> {
|
||||
) -> Result<ResolvedRule<'db>, SsrError> {
|
||||
let resolver =
|
||||
Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
|
||||
let resolved_template = match rule.template {
|
||||
|
|
@ -74,8 +74,8 @@ struct Resolver<'a, 'db> {
|
|||
placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
|
||||
}
|
||||
|
||||
impl Resolver<'_, '_> {
|
||||
fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
|
||||
impl<'db> Resolver<'_, 'db> {
|
||||
fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern<'db>, SsrError> {
|
||||
use syntax::ast::AstNode;
|
||||
use syntax::{SyntaxElement, T};
|
||||
let mut resolved_paths = FxHashMap::default();
|
||||
|
|
@ -250,7 +250,7 @@ impl<'db> ResolutionScope<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> {
|
||||
fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type<'db>> {
|
||||
use syntax::ast::AstNode;
|
||||
if let Some(path) = ast::Path::cast(path.clone()) {
|
||||
if let Some(qualifier) = path.qualifier() {
|
||||
|
|
|
|||
|
|
@ -21,13 +21,13 @@ pub(crate) struct UsageCache {
|
|||
usages: Vec<(Definition, UsageSearchResult)>,
|
||||
}
|
||||
|
||||
impl MatchFinder<'_> {
|
||||
impl<'db> MatchFinder<'db> {
|
||||
/// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
|
||||
/// replacement impossible, so further processing is required in order to properly nest matches
|
||||
/// and remove overlapping matches. This is done in the `nesting` module.
|
||||
pub(crate) fn find_matches_for_rule(
|
||||
&self,
|
||||
rule: &ResolvedRule,
|
||||
rule: &ResolvedRule<'db>,
|
||||
usage_cache: &mut UsageCache,
|
||||
matches_out: &mut Vec<Match>,
|
||||
) {
|
||||
|
|
@ -49,8 +49,8 @@ impl MatchFinder<'_> {
|
|||
|
||||
fn find_matches_for_pattern_tree(
|
||||
&self,
|
||||
rule: &ResolvedRule,
|
||||
pattern: &ResolvedPattern,
|
||||
rule: &ResolvedRule<'db>,
|
||||
pattern: &ResolvedPattern<'db>,
|
||||
usage_cache: &mut UsageCache,
|
||||
matches_out: &mut Vec<Match>,
|
||||
) {
|
||||
|
|
@ -144,7 +144,7 @@ impl MatchFinder<'_> {
|
|||
SearchScope::files(&files)
|
||||
}
|
||||
|
||||
fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
|
||||
fn slow_scan(&self, rule: &ResolvedRule<'db>, matches_out: &mut Vec<Match>) {
|
||||
self.search_files_do(|file_id| {
|
||||
let file = self.sema.parse_guess_edition(file_id);
|
||||
let code = file.syntax();
|
||||
|
|
@ -177,7 +177,7 @@ impl MatchFinder<'_> {
|
|||
fn slow_scan_node(
|
||||
&self,
|
||||
code: &SyntaxNode,
|
||||
rule: &ResolvedRule,
|
||||
rule: &ResolvedRule<'db>,
|
||||
restrict_range: &Option<FileRange>,
|
||||
matches_out: &mut Vec<Match>,
|
||||
) {
|
||||
|
|
@ -206,7 +206,7 @@ impl MatchFinder<'_> {
|
|||
|
||||
fn try_add_match(
|
||||
&self,
|
||||
rule: &ResolvedRule,
|
||||
rule: &ResolvedRule<'db>,
|
||||
code: &SyntaxNode,
|
||||
restrict_range: &Option<FileRange>,
|
||||
matches_out: &mut Vec<Match>,
|
||||
|
|
@ -274,7 +274,7 @@ impl UsageCache {
|
|||
/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
|
||||
/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
|
||||
/// longest as this is hopefully more likely to be less common, making it faster to find.
|
||||
fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
|
||||
fn pick_path_for_usages<'a>(pattern: &'a ResolvedPattern<'_>) -> Option<&'a ResolvedPath> {
|
||||
// FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
|
||||
// private to the current module, then we definitely would want to pick them over say a path
|
||||
// from std. Possibly we should go further than this and intersect the search scopes for all
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ pub(crate) fn goto_implementation(
|
|||
Some(RangeInfo { range, info: navs })
|
||||
}
|
||||
|
||||
fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<NavigationTarget> {
|
||||
fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type<'_>) -> Vec<NavigationTarget> {
|
||||
Impl::all_for_type(sema.db, ty)
|
||||
.into_iter()
|
||||
.filter_map(|imp| imp.try_to_nav(sema.db))
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ pub(crate) fn goto_type_definition(
|
|||
}
|
||||
}
|
||||
};
|
||||
let mut process_ty = |ty: hir::Type| {
|
||||
let mut process_ty = |ty: hir::Type<'_>| {
|
||||
// collect from each `ty` into the `res` result vec
|
||||
let ty = ty.strip_references();
|
||||
ty.walk(db, |t| {
|
||||
|
|
|
|||
|
|
@ -426,7 +426,7 @@ pub(crate) fn hover_for_definition(
|
|||
sema: &Semantics<'_, RootDatabase>,
|
||||
file_id: FileId,
|
||||
def: Definition,
|
||||
subst: Option<GenericSubstitution>,
|
||||
subst: Option<GenericSubstitution<'_>>,
|
||||
scope_node: &SyntaxNode,
|
||||
macro_arm: Option<u32>,
|
||||
render_extras: bool,
|
||||
|
|
@ -483,10 +483,10 @@ pub(crate) fn hover_for_definition(
|
|||
}
|
||||
}
|
||||
|
||||
fn notable_traits(
|
||||
db: &RootDatabase,
|
||||
ty: &hir::Type,
|
||||
) -> Vec<(hir::Trait, Vec<(Option<hir::Type>, hir::Name)>)> {
|
||||
fn notable_traits<'db>(
|
||||
db: &'db RootDatabase,
|
||||
ty: &hir::Type<'db>,
|
||||
) -> Vec<(hir::Trait, Vec<(Option<hir::Type<'db>>, hir::Name)>)> {
|
||||
db.notable_traits_in_deps(ty.krate(db).into())
|
||||
.iter()
|
||||
.flat_map(|it| &**it)
|
||||
|
|
@ -567,8 +567,8 @@ fn runnable_action(
|
|||
fn goto_type_action_for_def(
|
||||
db: &RootDatabase,
|
||||
def: Definition,
|
||||
notable_traits: &[(hir::Trait, Vec<(Option<hir::Type>, hir::Name)>)],
|
||||
subst_types: Option<Vec<(hir::Symbol, hir::Type)>>,
|
||||
notable_traits: &[(hir::Trait, Vec<(Option<hir::Type<'_>>, hir::Name)>)],
|
||||
subst_types: Option<Vec<(hir::Symbol, hir::Type<'_>)>>,
|
||||
edition: Edition,
|
||||
) -> Option<HoverAction> {
|
||||
let mut targets: Vec<hir::ModuleDef> = Vec::new();
|
||||
|
|
@ -622,7 +622,7 @@ fn goto_type_action_for_def(
|
|||
|
||||
fn walk_and_push_ty(
|
||||
db: &RootDatabase,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
push_new_def: &mut dyn FnMut(hir::ModuleDef),
|
||||
) {
|
||||
ty.walk(db, |t| {
|
||||
|
|
|
|||
|
|
@ -476,10 +476,10 @@ pub(super) fn definition(
|
|||
db: &RootDatabase,
|
||||
def: Definition,
|
||||
famous_defs: Option<&FamousDefs<'_, '_>>,
|
||||
notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
|
||||
notable_traits: &[(Trait, Vec<(Option<Type<'_>>, Name)>)],
|
||||
macro_arm: Option<u32>,
|
||||
render_extras: bool,
|
||||
subst_types: Option<&Vec<(Symbol, Type)>>,
|
||||
subst_types: Option<&Vec<(Symbol, Type<'_>)>>,
|
||||
config: &HoverConfig,
|
||||
edition: Edition,
|
||||
display_target: DisplayTarget,
|
||||
|
|
@ -938,7 +938,7 @@ pub(super) fn literal(
|
|||
|
||||
fn render_notable_trait(
|
||||
db: &RootDatabase,
|
||||
notable_traits: &[(Trait, Vec<(Option<Type>, Name)>)],
|
||||
notable_traits: &[(Trait, Vec<(Option<Type<'_>>, Name)>)],
|
||||
edition: Edition,
|
||||
display_target: DisplayTarget,
|
||||
) -> Option<String> {
|
||||
|
|
@ -979,7 +979,7 @@ fn render_notable_trait(
|
|||
fn type_info(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: &HoverConfig,
|
||||
ty: TypeInfo,
|
||||
ty: TypeInfo<'_>,
|
||||
edition: Edition,
|
||||
display_target: DisplayTarget,
|
||||
) -> Option<HoverResult> {
|
||||
|
|
@ -1038,7 +1038,7 @@ fn type_info(
|
|||
fn closure_ty(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: &HoverConfig,
|
||||
TypeInfo { original, adjusted }: &TypeInfo,
|
||||
TypeInfo { original, adjusted }: &TypeInfo<'_>,
|
||||
edition: Edition,
|
||||
display_target: DisplayTarget,
|
||||
) -> Option<HoverResult> {
|
||||
|
|
|
|||
|
|
@ -722,14 +722,14 @@ impl InlayHintLabelBuilder<'_> {
|
|||
fn label_of_ty(
|
||||
famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
|
||||
config: &InlayHintsConfig,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
display_target: DisplayTarget,
|
||||
) -> Option<InlayHintLabel> {
|
||||
fn rec(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
famous_defs: &FamousDefs<'_, '_>,
|
||||
mut max_length: Option<usize>,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
label_builder: &mut InlayHintLabelBuilder<'_>,
|
||||
config: &InlayHintsConfig,
|
||||
display_target: DisplayTarget,
|
||||
|
|
@ -788,11 +788,11 @@ fn label_of_ty(
|
|||
}
|
||||
|
||||
/// Checks if the type is an Iterator from std::iter and returns the iterator trait and the item type of the concrete iterator.
|
||||
fn hint_iterator(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
famous_defs: &FamousDefs<'_, '_>,
|
||||
ty: &hir::Type,
|
||||
) -> Option<(hir::Trait, hir::TypeAlias, hir::Type)> {
|
||||
fn hint_iterator<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
famous_defs: &FamousDefs<'_, 'db>,
|
||||
ty: &hir::Type<'db>,
|
||||
) -> Option<(hir::Trait, hir::TypeAlias, hir::Type<'db>)> {
|
||||
let db = sema.db;
|
||||
let strukt = ty.strip_references().as_adt()?;
|
||||
let krate = strukt.module(db).krate();
|
||||
|
|
@ -826,7 +826,7 @@ fn ty_to_text_edit(
|
|||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: &InlayHintsConfig,
|
||||
node_for_hint: &SyntaxNode,
|
||||
ty: &hir::Type,
|
||||
ty: &hir::Type<'_>,
|
||||
offset_to_insert_ty: TextSize,
|
||||
additional_edits: &dyn Fn(&mut TextEditBuilder),
|
||||
prefix: impl Into<String>,
|
||||
|
|
|
|||
|
|
@ -87,10 +87,10 @@ pub(super) fn hints(
|
|||
Some(())
|
||||
}
|
||||
|
||||
fn get_callable(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
fn get_callable<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
expr: &ast::Expr,
|
||||
) -> Option<(hir::Callable, ast::ArgList)> {
|
||||
) -> Option<(hir::Callable<'db>, ast::ArgList)> {
|
||||
match expr {
|
||||
ast::Expr::CallExpr(expr) => {
|
||||
let descended = sema.descend_node_into_attributes(expr.clone()).pop();
|
||||
|
|
|
|||
|
|
@ -278,7 +278,7 @@ fn signature_help_for_call(
|
|||
}
|
||||
res.signature.push(')');
|
||||
|
||||
let mut render = |ret_type: hir::Type| {
|
||||
let mut render = |ret_type: hir::Type<'_>| {
|
||||
if !ret_type.is_unit() {
|
||||
format_to!(res.signature, " -> {}", ret_type.display(db, display_target));
|
||||
}
|
||||
|
|
@ -597,11 +597,11 @@ fn signature_help_for_tuple_expr(
|
|||
Some(res)
|
||||
}
|
||||
|
||||
fn signature_help_for_record_(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
fn signature_help_for_record_<'db>(
|
||||
sema: &Semantics<'db, RootDatabase>,
|
||||
field_list_children: SyntaxElementChildren,
|
||||
path: &ast::Path,
|
||||
fields2: impl Iterator<Item = (hir::Field, hir::Type)>,
|
||||
fields2: impl Iterator<Item = (hir::Field, hir::Type<'db>)>,
|
||||
token: SyntaxToken,
|
||||
edition: Edition,
|
||||
display_target: DisplayTarget,
|
||||
|
|
@ -689,13 +689,13 @@ fn signature_help_for_record_(
|
|||
Some(res)
|
||||
}
|
||||
|
||||
fn signature_help_for_tuple_pat_ish(
|
||||
db: &RootDatabase,
|
||||
fn signature_help_for_tuple_pat_ish<'db>(
|
||||
db: &'db RootDatabase,
|
||||
mut res: SignatureHelp,
|
||||
pat: &SyntaxNode,
|
||||
token: SyntaxToken,
|
||||
mut field_pats: AstChildren<ast::Pat>,
|
||||
fields: impl ExactSizeIterator<Item = hir::Type>,
|
||||
fields: impl ExactSizeIterator<Item = hir::Type<'db>>,
|
||||
display_target: DisplayTarget,
|
||||
) -> SignatureHelp {
|
||||
let rest_pat = field_pats.find(|it| matches!(it, ast::Pat::RestPat(_)));
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ pub(crate) fn view_memory_layout(
|
|||
fn read_layout(
|
||||
nodes: &mut Vec<MemoryLayoutNode>,
|
||||
db: &RootDatabase,
|
||||
ty: &Type,
|
||||
ty: &Type<'_>,
|
||||
layout: &Layout,
|
||||
parent_idx: usize,
|
||||
display_target: DisplayTarget,
|
||||
|
|
|
|||
|
|
@ -74,8 +74,8 @@ impl ToTokens for TrackedQuery {
|
|||
quote! {
|
||||
#sig {
|
||||
#annotation
|
||||
fn #shim(
|
||||
db: &dyn #trait_name,
|
||||
fn #shim<'db>(
|
||||
db: &'db dyn #trait_name,
|
||||
_input: #input_struct_name,
|
||||
#(#pat_and_tys),*
|
||||
) #ret
|
||||
|
|
@ -88,8 +88,8 @@ impl ToTokens for TrackedQuery {
|
|||
quote! {
|
||||
#sig {
|
||||
#annotation
|
||||
fn #shim(
|
||||
db: &dyn #trait_name,
|
||||
fn #shim<'db>(
|
||||
db: &'db dyn #trait_name,
|
||||
#(#pat_and_tys),*
|
||||
) #ret
|
||||
#invoke_block
|
||||
|
|
|
|||
|
|
@ -532,7 +532,7 @@ impl flags::AnalysisStats {
|
|||
}
|
||||
|
||||
let todo = syntax::ast::make::ext::expr_todo().to_string();
|
||||
let mut formatter = |_: &hir::Type| todo.clone();
|
||||
let mut formatter = |_: &hir::Type<'_>| todo.clone();
|
||||
let mut syntax_hit_found = false;
|
||||
for term in found_terms {
|
||||
let generated = term
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ pub mod panic_context;
|
|||
pub mod process;
|
||||
pub mod rand;
|
||||
pub mod thread;
|
||||
pub mod variance;
|
||||
|
||||
pub use itertools;
|
||||
|
||||
|
|
|
|||
270
crates/stdx/src/variance.rs
Normal file
270
crates/stdx/src/variance.rs
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
//! This is a copy of [`std::marker::variance`].
|
||||
|
||||
use std::any::type_name;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
macro_rules! first_token {
|
||||
($first:tt $($rest:tt)*) => {
|
||||
$first
|
||||
};
|
||||
}
|
||||
macro_rules! phantom_type {
|
||||
($(
|
||||
$(#[$attr:meta])*
|
||||
pub struct $name:ident <$t:ident> ($($inner:tt)*);
|
||||
)*) => {$(
|
||||
$(#[$attr])*
|
||||
pub struct $name<$t>($($inner)*) where T: ?Sized;
|
||||
|
||||
impl<T> $name<T>
|
||||
where T: ?Sized
|
||||
{
|
||||
/// Constructs a new instance of the variance marker.
|
||||
pub const fn new() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> self::sealed::Sealed for $name<T> where T: ?Sized {
|
||||
const VALUE: Self = Self::new();
|
||||
}
|
||||
|
||||
impl<T> Variance for $name<T> where T: ?Sized {}
|
||||
|
||||
impl<T> Default for $name<T>
|
||||
where T: ?Sized
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Debug for $name<T>
|
||||
where T: ?Sized
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}<{}>", stringify!($name), type_name::<T>())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for $name<T>
|
||||
where T: ?Sized
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Copy for $name<T> where T: ?Sized {}
|
||||
|
||||
impl<T> PartialEq for $name<T>
|
||||
where T: ?Sized
|
||||
{
|
||||
fn eq(&self, _: &Self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Eq for $name<T> where T: ?Sized {}
|
||||
|
||||
#[allow(clippy::non_canonical_partial_ord_impl)]
|
||||
impl<T> PartialOrd for $name<T>
|
||||
where T: ?Sized
|
||||
{
|
||||
fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
|
||||
Some(Ordering::Equal)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Ord for $name<T>
|
||||
where T: ?Sized
|
||||
{
|
||||
fn cmp(&self, _: &Self) -> Ordering {
|
||||
Ordering::Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Hash for $name<T>
|
||||
where T: ?Sized
|
||||
{
|
||||
fn hash<H: Hasher>(&self, _: &mut H) {}
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
macro_rules! phantom_lifetime {
|
||||
($(
|
||||
$(#[$attr:meta])*
|
||||
pub struct $name:ident <$lt:lifetime> ($($inner:tt)*);
|
||||
)*) => {$(
|
||||
$(#[$attr])*
|
||||
|
||||
#[derive(Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct $name<$lt>($($inner)*);
|
||||
|
||||
impl $name<'_> {
|
||||
/// Constructs a new instance of the variance marker.
|
||||
pub const fn new() -> Self {
|
||||
Self(first_token!($($inner)*)(PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
impl self::sealed::Sealed for $name<'_> {
|
||||
const VALUE: Self = Self::new();
|
||||
}
|
||||
|
||||
impl Variance for $name<'_> {}
|
||||
|
||||
impl fmt::Debug for $name<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", stringify!($name))
|
||||
}
|
||||
}
|
||||
)*};
|
||||
}
|
||||
|
||||
phantom_lifetime! {
|
||||
/// Zero-sized type used to mark a lifetime as covariant.
|
||||
///
|
||||
/// Covariant lifetimes must live at least as long as declared. See [the reference][1] for more
|
||||
/// information.
|
||||
///
|
||||
/// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
|
||||
///
|
||||
/// ## Layout
|
||||
///
|
||||
/// For all `'a`, the following are guaranteed:
|
||||
/// * `size_of::<PhantomCovariantLifetime<'a>>() == 0`
|
||||
/// * `align_of::<PhantomCovariantLifetime<'a>>() == 1`
|
||||
|
||||
pub struct PhantomCovariantLifetime<'a>(PhantomCovariant<&'a ()>);
|
||||
/// Zero-sized type used to mark a lifetime as contravariant.
|
||||
///
|
||||
/// Contravariant lifetimes must live at most as long as declared. See [the reference][1] for
|
||||
/// more information.
|
||||
///
|
||||
/// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
|
||||
///
|
||||
/// ## Layout
|
||||
///
|
||||
/// For all `'a`, the following are guaranteed:
|
||||
/// * `size_of::<PhantomContravariantLifetime<'a>>() == 0`
|
||||
/// * `align_of::<PhantomContravariantLifetime<'a>>() == 1`
|
||||
|
||||
pub struct PhantomContravariantLifetime<'a>(PhantomContravariant<&'a ()>);
|
||||
/// Zero-sized type used to mark a lifetime as invariant.
|
||||
///
|
||||
/// Invariant lifetimes must be live for the exact length declared, neither shorter nor longer.
|
||||
/// See [the reference][1] for more information.
|
||||
///
|
||||
/// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
|
||||
///
|
||||
/// ## Layout
|
||||
///
|
||||
/// For all `'a`, the following are guaranteed:
|
||||
/// * `size_of::<PhantomInvariantLifetime<'a>>() == 0`
|
||||
/// * `align_of::<PhantomInvariantLifetime<'a>>() == 1`
|
||||
|
||||
pub struct PhantomInvariantLifetime<'a>(PhantomInvariant<&'a ()>);
|
||||
|
||||
}
|
||||
|
||||
phantom_type! {
|
||||
/// Zero-sized type used to mark a type parameter as covariant.
|
||||
///
|
||||
/// Types used as part of the return value from a function are covariant. If the type is _also_
|
||||
/// passed as a parameter then it is [invariant][PhantomInvariant]. See [the reference][1] for
|
||||
/// more information.
|
||||
///
|
||||
/// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
|
||||
///
|
||||
/// ## Layout
|
||||
///
|
||||
/// For all `T`, the following are guaranteed:
|
||||
/// * `size_of::<PhantomCovariant<T>>() == 0`
|
||||
/// * `align_of::<PhantomCovariant<T>>() == 1`
|
||||
|
||||
pub struct PhantomCovariant<T>(PhantomData<fn() -> T>);
|
||||
/// Zero-sized type used to mark a type parameter as contravariant.
|
||||
///
|
||||
/// Types passed as arguments to a function are contravariant. If the type is _also_ part of the
|
||||
/// return value from a function then it is [invariant][PhantomInvariant]. See [the
|
||||
/// reference][1] for more information.
|
||||
///
|
||||
/// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
|
||||
///
|
||||
/// ## Layout
|
||||
///
|
||||
/// For all `T`, the following are guaranteed:
|
||||
/// * `size_of::<PhantomContravariant<T>>() == 0`
|
||||
/// * `align_of::<PhantomContravariant<T>>() == 1`
|
||||
|
||||
pub struct PhantomContravariant<T>(PhantomData<fn(T)>);
|
||||
/// Zero-sized type used to mark a type parameter as invariant.
|
||||
///
|
||||
/// Types that are both passed as an argument _and_ used as part of the return value from a
|
||||
/// function are invariant. See [the reference][1] for more information.
|
||||
///
|
||||
/// [1]: https://doc.rust-lang.org/stable/reference/subtyping.html#variance
|
||||
///
|
||||
/// ## Layout
|
||||
///
|
||||
/// For all `T`, the following are guaranteed:
|
||||
/// * `size_of::<PhantomInvariant<T>>() == 0`
|
||||
/// * `align_of::<PhantomInvariant<T>>() == 1`
|
||||
|
||||
pub struct PhantomInvariant<T>(PhantomData<fn(T) -> T>);
|
||||
|
||||
}
|
||||
|
||||
mod sealed {
|
||||
|
||||
pub trait Sealed {
|
||||
const VALUE: Self;
|
||||
}
|
||||
}
|
||||
/// A marker trait for phantom variance types.
|
||||
pub trait Variance: sealed::Sealed + Default {}
|
||||
/// Construct a variance marker; equivalent to [`Default::default`].
|
||||
///
|
||||
/// This type can be any of the following. You generally should not need to explicitly name the
|
||||
/// type, however.
|
||||
///
|
||||
/// - [`PhantomCovariant`]
|
||||
/// - [`PhantomContravariant`]
|
||||
/// - [`PhantomInvariant`]
|
||||
/// - [`PhantomCovariantLifetime`]
|
||||
/// - [`PhantomContravariantLifetime`]
|
||||
/// - [`PhantomInvariantLifetime`]
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust
|
||||
/// #![feature(phantom_variance_markers)]
|
||||
///
|
||||
/// use core::marker::{PhantomCovariant, variance};
|
||||
///
|
||||
/// struct BoundFn<F, P, R>
|
||||
/// where
|
||||
/// F: Fn(P) -> R,
|
||||
/// {
|
||||
/// function: F,
|
||||
/// parameter: P,
|
||||
/// return_value: PhantomCovariant<R>,
|
||||
/// }
|
||||
///
|
||||
/// let bound_fn = BoundFn {
|
||||
/// function: core::convert::identity,
|
||||
/// parameter: 5u8,
|
||||
/// return_value: variance(),
|
||||
/// };
|
||||
/// ```
|
||||
pub const fn variance<T>() -> T
|
||||
where
|
||||
T: Variance,
|
||||
{
|
||||
T::VALUE
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue