Auto merge of #17478 - kilpkonn:master, r=Veykril

Simplify some term search tactics

Working on the paper `@phijor` found that "Data constructor" tactic could be simplified quite a bit by running it only in the backwards direction. With n+1 rounds it has same coverage as previous implementation in n rounds, however the tactic it self is more simple and also potentially faster as there is less to do.

In a nutshell the idea is to only work with types in the wish-list rather than with any types.

Turns out it is quite a bit faster:

Before:
```
ripgrep:
Tail Expr syntactic hits: 238/1692 (14%)
Tail Exprs found: 1223/1692 (72%)
Term search avg time: 15ms

nalgebra:
Tail Expr syntactic hits: 125/3001 (4%)
Tail Exprs found: 2143/3001 (71%)
Term search avg time: 849ms
```

After
````
ripgrep:
Tail Expr syntactic hits: 246/1692 (14%)
Tail Exprs found: 1209/1692 (71%)
Term search avg time: 8ms

nalgebra:
Tail Expr syntactic hits: 125/3001 (4%)
Tail Exprs found: 2028/3001 (67%)
Term search avg time: 305ms
````

_Also removed niche optimization of removing scope defs from the search space as this wasn't helping much anyway and made code a bit more complex._
This commit is contained in:
bors 2024-06-24 07:20:50 +00:00
commit db69df3216
8 changed files with 182 additions and 475 deletions

View file

@ -93,12 +93,6 @@ struct LookupTable {
data: FxHashMap<Type, AlternativeExprs>, data: FxHashMap<Type, AlternativeExprs>,
/// New types reached since last query by the `NewTypesKey` /// New types reached since last query by the `NewTypesKey`
new_types: FxHashMap<NewTypesKey, Vec<Type>>, new_types: FxHashMap<NewTypesKey, Vec<Type>>,
/// ScopeDefs that are not interesting any more
exhausted_scopedefs: FxHashSet<ScopeDef>,
/// ScopeDefs that were used in current round
round_scopedef_hits: FxHashSet<ScopeDef>,
/// Amount of rounds since scopedef was first used.
rounds_since_sopedef_hit: FxHashMap<ScopeDef, u32>,
/// Types queried but not present /// Types queried but not present
types_wishlist: FxHashSet<Type>, types_wishlist: FxHashSet<Type>,
/// Threshold to squash trees to `Many` /// Threshold to squash trees to `Many`
@ -212,37 +206,6 @@ impl LookupTable {
} }
} }
/// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more
fn mark_exhausted(&mut self, def: ScopeDef) {
self.exhausted_scopedefs.insert(def);
}
/// Mark `ScopeDef` as used meaning we managed to produce something useful from it
fn mark_fulfilled(&mut self, def: ScopeDef) {
self.round_scopedef_hits.insert(def);
}
/// Start new round (meant to be called at the beginning of iteration in `term_search`)
///
/// This functions marks some `ScopeDef`s as exhausted if there have been
/// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`.
fn new_round(&mut self) {
for def in &self.round_scopedef_hits {
let hits =
self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0);
const MAX_ROUNDS_AFTER_HIT: u32 = 2;
if *hits > MAX_ROUNDS_AFTER_HIT {
self.exhausted_scopedefs.insert(*def);
}
}
self.round_scopedef_hits.clear();
}
/// Get exhausted `ScopeDef`s
fn exhausted_scopedefs(&self) -> &FxHashSet<ScopeDef> {
&self.exhausted_scopedefs
}
/// Types queried but not found /// Types queried but not found
fn types_wishlist(&mut self) -> &FxHashSet<Type> { fn types_wishlist(&mut self) -> &FxHashSet<Type> {
&self.types_wishlist &self.types_wishlist
@ -275,7 +238,7 @@ pub struct TermSearchConfig {
impl Default for TermSearchConfig { impl Default for TermSearchConfig {
fn default() -> Self { fn default() -> Self {
Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 400 } Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 1200 }
} }
} }
@ -328,19 +291,12 @@ pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup)); solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
while should_continue() { while should_continue() {
lookup.new_round();
solutions.extend(tactics::data_constructor(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::data_constructor(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::free_function(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::free_function(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup, should_continue));
solutions.extend(tactics::make_tuple(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::make_tuple(ctx, &defs, &mut lookup, should_continue));
// Discard not interesting `ScopeDef`s for speedup
for def in lookup.exhausted_scopedefs() {
defs.remove(def);
}
} }
solutions.into_iter().filter(|it| !it.is_many()).unique().collect() solutions.into_iter().filter(|it| !it.is_many()).unique().collect()

View file

@ -17,11 +17,11 @@ use itertools::Itertools;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use crate::{ use crate::{
Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type, Adt, AssocItem, GenericDef, GenericParam, HasAttrs, HasVisibility, Impl, ModuleDef, ScopeDef,
TypeParam, Variant, Type, TypeParam,
}; };
use crate::term_search::{Expr, TermSearchConfig}; use crate::term_search::Expr;
use super::{LookupTable, NewTypesKey, TermSearchCtx}; use super::{LookupTable, NewTypesKey, TermSearchCtx};
@ -74,8 +74,6 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
_ => None, _ => None,
}?; }?;
lookup.mark_exhausted(*def);
let ty = expr.ty(db); let ty = expr.ty(db);
lookup.insert(ty.clone(), std::iter::once(expr.clone())); lookup.insert(ty.clone(), std::iter::once(expr.clone()));
@ -124,6 +122,10 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>(
.filter(move |it| it.is_visible_from(db, module)) .filter(move |it| it.is_visible_from(db, module))
.filter_map(AssocItem::as_const) .filter_map(AssocItem::as_const)
.filter_map(|it| { .filter_map(|it| {
if it.attrs(db).is_unstable() {
return None;
}
let expr = Expr::Const(it); let expr = Expr::Const(it);
let ty = it.ty(db); let ty = it.ty(db);
@ -151,163 +153,27 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>(
/// * `should_continue` - Function that indicates when to stop iterating /// * `should_continue` - Function that indicates when to stop iterating
pub(super) fn data_constructor<'a, DB: HirDatabase>( pub(super) fn data_constructor<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>, ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>, _defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable, lookup: &'a mut LookupTable,
should_continue: &'a dyn std::ops::Fn() -> bool, should_continue: &'a dyn std::ops::Fn() -> bool,
) -> impl Iterator<Item = Expr> + 'a { ) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db; let db = ctx.sema.db;
let module = ctx.scope.module(); let module = ctx.scope.module();
fn variant_helper( lookup
db: &dyn HirDatabase, .types_wishlist()
lookup: &mut LookupTable, .clone()
should_continue: &dyn std::ops::Fn() -> bool, .into_iter()
parent_enum: Enum, .chain(iter::once(ctx.goal.clone()))
variant: Variant, .filter_map(|ty| ty.as_adt().map(|adt| (adt, ty)))
config: &TermSearchConfig, .filter(|_| should_continue())
) -> Vec<(Type, Vec<Expr>)> { .filter_map(move |(adt, ty)| match adt {
// Ignore unstable Adt::Struct(strukt) => {
if variant.is_unstable(db) { // Ignore unstable or not visible
return Vec::new(); if strukt.is_unstable(db) || !strukt.is_visible_from(db, module) {
}
let generics = GenericDef::from(variant.parent_enum(db));
let Some(type_params) = generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()
else {
// Ignore enums with const generics
return Vec::new();
};
// We currently do not check lifetime bounds so ignore all types that have something to do
// with them
if !generics.lifetime_params(db).is_empty() {
return Vec::new();
}
// Only account for stable type parameters for now, unstable params can be default
// tho, for example in `Box<T, #[unstable] A: Allocator>`
if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
return Vec::new();
}
let non_default_type_params_len =
type_params.iter().filter(|it| it.default(db).is_none()).count();
let enum_ty_shallow = Adt::from(parent_enum).ty(db);
let generic_params = lookup
.types_wishlist()
.clone()
.into_iter()
.filter(|ty| ty.could_unify_with(db, &enum_ty_shallow))
.map(|it| it.type_arguments().collect::<Vec<Type>>())
.chain((non_default_type_params_len == 0).then_some(Vec::new()));
generic_params
.filter(|_| should_continue())
.filter_map(move |generics| {
// Insert default type params
let mut g = generics.into_iter();
let generics: Vec<_> = type_params
.iter()
.map(|it| it.default(db).or_else(|| g.next()))
.collect::<Option<_>>()?;
let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned());
// Ignore types that have something to do with lifetimes
if config.enable_borrowcheck && enum_ty.contains_reference(db) {
return None; return None;
} }
// Early exit if some param cannot be filled from lookup let generics = GenericDef::from(strukt);
let param_exprs: Vec<Vec<Expr>> = variant
.fields(db)
.into_iter()
.map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![Expr::Variant { variant, generics, params: Vec::new() }]
} else {
param_exprs
.into_iter()
.multi_cartesian_product()
.map(|params| Expr::Variant { variant, generics: generics.clone(), params })
.collect()
};
lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned());
Some((enum_ty, variant_exprs))
})
.collect()
}
defs.iter()
.filter_map(move |def| match def {
ScopeDef::ModuleDef(ModuleDef::Variant(it)) => {
let variant_exprs = variant_helper(
db,
lookup,
should_continue,
it.parent_enum(db),
*it,
&ctx.config,
);
if variant_exprs.is_empty() {
return None;
}
if GenericDef::from(it.parent_enum(db))
.type_or_const_params(db)
.into_iter()
.filter_map(|it| it.as_type_param(db))
.all(|it| it.default(db).is_some())
{
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it)));
}
Some(variant_exprs)
}
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => {
let exprs: Vec<(Type, Vec<Expr>)> = enum_
.variants(db)
.into_iter()
.flat_map(|it| {
variant_helper(db, lookup, should_continue, *enum_, it, &ctx.config)
})
.collect();
if exprs.is_empty() {
return None;
}
if GenericDef::from(*enum_)
.type_or_const_params(db)
.into_iter()
.filter_map(|it| it.as_type_param(db))
.all(|it| it.default(db).is_some())
{
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_))));
}
Some(exprs)
}
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => {
// Ignore unstable and not visible
if it.is_unstable(db) || !it.is_visible_from(db, module) {
return None;
}
let generics = GenericDef::from(*it);
// Ignore const params for now
let type_params = generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// We currently do not check lifetime bounds so ignore all types that have something to do // We currently do not check lifetime bounds so ignore all types that have something to do
// with them // with them
@ -315,48 +181,73 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
return None; return None;
} }
// Only account for stable type parameters for now, unstable params can be default if ty.contains_unknown() {
// tho, for example in `Box<T, #[unstable] A: Allocator>`
if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) {
return None; return None;
} }
let non_default_type_params_len = // Ignore types that have something to do with lifetimes
type_params.iter().filter(|it| it.default(db).is_none()).count(); if ctx.config.enable_borrowcheck && ty.contains_reference(db) {
return None;
}
let fields = strukt.fields(db);
// Check if all fields are visible, otherwise we cannot fill them
if fields.iter().any(|it| !it.is_visible_from(db, module)) {
return None;
}
let struct_ty_shallow = Adt::from(*it).ty(db); let generics: Vec<_> = ty.type_arguments().collect();
let generic_params = lookup
.types_wishlist() // Early exit if some param cannot be filled from lookup
.clone() let param_exprs: Vec<Vec<Expr>> = fields
.into_iter() .into_iter()
.filter(|ty| ty.could_unify_with(db, &struct_ty_shallow)) .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
.map(|it| it.type_arguments().collect::<Vec<Type>>()) .collect::<Option<_>>()?;
.chain((non_default_type_params_len == 0).then_some(Vec::new()));
let exprs = generic_params // Note that we need special case for 0 param constructors because of multi cartesian
.filter(|_| should_continue()) // product
.filter_map(|generics| { let exprs: Vec<Expr> = if param_exprs.is_empty() {
// Insert default type params vec![Expr::Struct { strukt, generics, params: Vec::new() }]
let mut g = generics.into_iter(); } else {
let generics: Vec<_> = type_params param_exprs
.iter() .into_iter()
.map(|it| it.default(db).or_else(|| g.next())) .multi_cartesian_product()
.collect::<Option<_>>()?; .map(|params| Expr::Struct { strukt, generics: generics.clone(), params })
.collect()
};
let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned()); lookup.insert(ty.clone(), exprs.iter().cloned());
Some((ty, exprs))
}
Adt::Enum(enum_) => {
// Ignore unstable or not visible
if enum_.is_unstable(db) || !enum_.is_visible_from(db, module) {
return None;
}
// Ignore types that have something to do with lifetimes let generics = GenericDef::from(enum_);
if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { // We currently do not check lifetime bounds so ignore all types that have something to do
return None; // with them
} if !generics.lifetime_params(db).is_empty() {
let fields = it.fields(db); return None;
// Check if all fields are visible, otherwise we cannot fill them }
if fields.iter().any(|it| !it.is_visible_from(db, module)) {
return None;
}
if ty.contains_unknown() {
return None;
}
// Ignore types that have something to do with lifetimes
if ctx.config.enable_borrowcheck && ty.contains_reference(db) {
return None;
}
let generics: Vec<_> = ty.type_arguments().collect();
let exprs = enum_
.variants(db)
.into_iter()
.filter_map(|variant| {
// Early exit if some param cannot be filled from lookup // Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = fields let param_exprs: Vec<Vec<Expr>> = variant
.fields(db)
.into_iter() .into_iter()
.map(|field| { .map(|field| {
lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())) lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))
@ -365,36 +256,33 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>(
// Note that we need special case for 0 param constructors because of multi cartesian // Note that we need special case for 0 param constructors because of multi cartesian
// product // product
let struct_exprs: Vec<Expr> = if param_exprs.is_empty() { let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }] vec![Expr::Variant {
variant,
generics: generics.clone(),
params: Vec::new(),
}]
} else { } else {
param_exprs param_exprs
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.map(|params| Expr::Struct { .map(|params| Expr::Variant {
strukt: *it, variant,
generics: generics.clone(), generics: generics.clone(),
params, params,
}) })
.collect() .collect()
}; };
lookup.insert(ty.clone(), variant_exprs.iter().cloned());
if non_default_type_params_len == 0 { Some(variant_exprs)
// Fulfilled only if there are no generic parameters
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(
Adt::Struct(*it),
)));
}
lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned());
Some((struct_ty, struct_exprs))
}) })
.flatten()
.collect(); .collect();
Some(exprs)
Some((ty, exprs))
} }
_ => None, Adt::Union(_) => None,
}) })
.flatten()
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
.flatten() .flatten()
} }
@ -515,7 +403,6 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
.collect() .collect()
}; };
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it)));
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_exprs)) Some((ret_ty, fn_exprs))
}) })
@ -555,6 +442,8 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
lookup lookup
.new_types(NewTypesKey::ImplMethod) .new_types(NewTypesKey::ImplMethod)
.into_iter() .into_iter()
.filter(|ty| !ty.type_arguments().any(|it| it.contains_unknown()))
.filter(|_| should_continue())
.flat_map(|ty| { .flat_map(|ty| {
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
}) })
@ -563,26 +452,15 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
AssocItem::Function(f) => Some((imp, ty, f)), AssocItem::Function(f) => Some((imp, ty, f)),
_ => None, _ => None,
}) })
.filter(|_| should_continue())
.filter_map(move |(imp, ty, it)| { .filter_map(move |(imp, ty, it)| {
let fn_generics = GenericDef::from(it); let fn_generics = GenericDef::from(it);
let imp_generics = GenericDef::from(imp); let imp_generics = GenericDef::from(imp);
// Ignore const params for now
let imp_type_params = imp_generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore const params for now
let fn_type_params = fn_generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore all functions that have something to do with lifetimes as we don't check them // Ignore all functions that have something to do with lifetimes as we don't check them
if !fn_generics.lifetime_params(db).is_empty() { if !fn_generics.lifetime_params(db).is_empty()
|| !imp_generics.lifetime_params(db).is_empty()
{
return None; return None;
} }
@ -596,112 +474,59 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
return None; return None;
} }
// Only account for stable type parameters for now, unstable params can be default // Ignore functions with generics for now as they kill the performance
// tho, for example in `Box<T, #[unstable] A: Allocator>` // Also checking bounds for generics is problematic
if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) if !fn_generics.type_or_const_params(db).is_empty() {
|| fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) return None;
}
let ret_ty = it.ret_type_with_args(db, ty.type_arguments());
// Filter out functions that return references
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr()
{ {
return None; return None;
} }
// Double check that we have fully known type // Ignore functions that do not change the type
if ty.type_arguments().any(|it| it.contains_unknown()) { if ty.could_unify_with_deeply(db, &ret_ty) {
return None; return None;
} }
let non_default_fn_type_params_len = let self_ty =
fn_type_params.iter().filter(|it| it.default(db).is_none()).count(); it.self_param(db).expect("No self param").ty_with_args(db, ty.type_arguments());
// Ignore functions with generics for now as they kill the performance // Ignore functions that have different self type
// Also checking bounds for generics is problematic if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) {
if non_default_fn_type_params_len > 0 {
return None; return None;
} }
let generic_params = lookup let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
.iter_types()
.collect::<Vec<_>>() // Force take ownership // Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_args(db, ty.type_arguments())
.into_iter() .into_iter()
.permutations(non_default_fn_type_params_len); .map(|field| lookup.find_autoref(db, field.ty()))
.collect::<Option<_>>()?;
let exprs: Vec<_> = generic_params let generics: Vec<_> = ty.type_arguments().collect();
.filter(|_| should_continue()) let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
.filter_map(|generics| { .chain(param_exprs)
// Insert default type params .multi_cartesian_product()
let mut g = generics.into_iter(); .map(|params| {
let generics: Vec<_> = ty let mut params = params.into_iter();
.type_arguments() let target = Box::new(params.next().unwrap());
.map(Some) Expr::Method {
.chain(fn_type_params.iter().map(|it| match it.default(db) { func: it,
Some(ty) => Some(ty), generics: generics.clone(),
None => { target,
let generic = g.next().expect("Missing type param"); params: params.collect(),
// Filter out generics that do not unify due to trait bounds
it.ty(db).could_unify_with(db, &generic).then_some(generic)
}
}))
.collect::<Option<_>>()?;
let ret_ty = it.ret_type_with_args(
db,
ty.type_arguments().chain(generics.iter().cloned()),
);
// Filter out functions that return references
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|| ret_ty.is_raw_ptr()
{
return None;
} }
// Ignore functions that do not change the type
if ty.could_unify_with_deeply(db, &ret_ty) {
return None;
}
let self_ty = it
.self_param(db)
.expect("No self param")
.ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned()));
// Ignore functions that have different self type
if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) {
return None;
}
let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
// Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_args(
db,
ty.type_arguments().chain(generics.iter().cloned()),
)
.into_iter()
.map(|field| lookup.find_autoref(db, field.ty()))
.collect::<Option<_>>()?;
let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
.chain(param_exprs)
.multi_cartesian_product()
.map(|params| {
let mut params = params.into_iter();
let target = Box::new(params.next().unwrap());
Expr::Method {
func: it,
generics: generics.clone(),
target,
params: params.collect(),
}
})
.collect();
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_exprs))
}) })
.collect(); .collect();
Some(exprs)
Some((ret_ty, fn_exprs))
}) })
.flatten()
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
.flatten() .flatten()
} }
@ -805,6 +630,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
.clone() .clone()
.into_iter() .into_iter()
.chain(iter::once(ctx.goal.clone())) .chain(iter::once(ctx.goal.clone()))
.filter(|ty| !ty.type_arguments().any(|it| it.contains_unknown()))
.filter(|_| should_continue()) .filter(|_| should_continue())
.flat_map(|ty| { .flat_map(|ty| {
Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp))
@ -815,24 +641,11 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
AssocItem::Function(f) => Some((imp, ty, f)), AssocItem::Function(f) => Some((imp, ty, f)),
_ => None, _ => None,
}) })
.filter(|_| should_continue())
.filter_map(move |(imp, ty, it)| { .filter_map(move |(imp, ty, it)| {
let fn_generics = GenericDef::from(it); let fn_generics = GenericDef::from(it);
let imp_generics = GenericDef::from(imp); let imp_generics = GenericDef::from(imp);
// Ignore const params for now
let imp_type_params = imp_generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore const params for now
let fn_type_params = fn_generics
.type_or_const_params(db)
.into_iter()
.map(|it| it.as_type_param(db))
.collect::<Option<Vec<TypeParam>>>()?;
// Ignore all functions that have something to do with lifetimes as we don't check them // Ignore all functions that have something to do with lifetimes as we don't check them
if !fn_generics.lifetime_params(db).is_empty() if !fn_generics.lifetime_params(db).is_empty()
|| !imp_generics.lifetime_params(db).is_empty() || !imp_generics.lifetime_params(db).is_empty()
@ -850,104 +663,43 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
return None; return None;
} }
// Only account for stable type parameters for now, unstable params can be default // Ignore functions with generics for now as they kill the performance
// tho, for example in `Box<T, #[unstable] A: Allocator>` // Also checking bounds for generics is problematic
if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) if !fn_generics.type_or_const_params(db).is_empty() {
|| fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) return None;
}
let ret_ty = it.ret_type_with_args(db, ty.type_arguments());
// Filter out functions that return references
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr()
{ {
return None; return None;
} }
// Double check that we have fully known type // Early exit if some param cannot be filled from lookup
if ty.type_arguments().any(|it| it.contains_unknown()) { let param_exprs: Vec<Vec<Expr>> = it
return None; .params_without_self_with_args(db, ty.type_arguments())
}
let non_default_fn_type_params_len =
fn_type_params.iter().filter(|it| it.default(db).is_none()).count();
// Ignore functions with generics for now as they kill the performance
// Also checking bounds for generics is problematic
if non_default_fn_type_params_len > 0 {
return None;
}
let generic_params = lookup
.iter_types()
.collect::<Vec<_>>() // Force take ownership
.into_iter() .into_iter()
.permutations(non_default_fn_type_params_len); .map(|field| lookup.find_autoref(db, field.ty()))
.collect::<Option<_>>()?;
let exprs: Vec<_> = generic_params // Note that we need special case for 0 param constructors because of multi cartesian
.filter(|_| should_continue()) // product
.filter_map(|generics| { let generics = ty.type_arguments().collect();
// Insert default type params let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
let mut g = generics.into_iter(); vec![Expr::Function { func: it, generics, params: Vec::new() }]
let generics: Vec<_> = ty } else {
.type_arguments() param_exprs
.map(Some) .into_iter()
.chain(fn_type_params.iter().map(|it| match it.default(db) { .multi_cartesian_product()
Some(ty) => Some(ty), .map(|params| Expr::Function { func: it, generics: generics.clone(), params })
None => { .collect()
let generic = g.next().expect("Missing type param"); };
it.trait_bounds(db)
.into_iter()
.all(|bound| generic.impls_trait(db, bound, &[]));
// Filter out generics that do not unify due to trait bounds
it.ty(db).could_unify_with(db, &generic).then_some(generic)
}
}))
.collect::<Option<_>>()?;
let ret_ty = it.ret_type_with_args( lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
db,
ty.type_arguments().chain(generics.iter().cloned()),
);
// Filter out functions that return references
if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db)
|| ret_ty.is_raw_ptr()
{
return None;
}
// Ignore functions that do not change the type Some((ret_ty, fn_exprs))
// if ty.could_unify_with_deeply(db, &ret_ty) {
// return None;
// }
// Early exit if some param cannot be filled from lookup
let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_args(
db,
ty.type_arguments().chain(generics.iter().cloned()),
)
.into_iter()
.map(|field| lookup.find_autoref(db, field.ty()))
.collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian
// product
let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![Expr::Function { func: it, generics, params: Vec::new() }]
} else {
param_exprs
.into_iter()
.multi_cartesian_product()
.map(|params| Expr::Function {
func: it,
generics: generics.clone(),
params,
})
.collect()
};
lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_exprs))
})
.collect();
Some(exprs)
}) })
.flatten()
.filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs))
.flatten() .flatten()
} }

View file

@ -144,7 +144,7 @@ fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#,
term_search, term_search,
r#"//- minicore: todo, unimplemented, option r#"//- minicore: todo, unimplemented, option
fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#, fn f() { let a: i32 = 1; let b: Option<i32> = todo$0!(); }"#,
r#"fn f() { let a: i32 = 1; let b: Option<i32> = None; }"#, r#"fn f() { let a: i32 = 1; let b: Option<i32> = Some(a); }"#,
) )
} }

View file

@ -764,6 +764,7 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
st dep::test_mod_b::Struct {} [type_could_unify] st dep::test_mod_b::Struct {} [type_could_unify]
ex dep::test_mod_b::Struct { } [type_could_unify]
st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import] st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import]
fn main() [] fn main() []
fn test() [] fn test() []
@ -839,6 +840,7 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
ev dep::test_mod_b::Enum::variant [type_could_unify] ev dep::test_mod_b::Enum::variant [type_could_unify]
ex dep::test_mod_b::Enum::variant [type_could_unify]
en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import] en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import]
fn main() [] fn main() []
fn test() [] fn test() []
@ -876,6 +878,7 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
ev dep::test_mod_b::Enum::Variant [type_could_unify] ev dep::test_mod_b::Enum::Variant [type_could_unify]
ex dep::test_mod_b::Enum::Variant [type_could_unify]
fn main() [] fn main() []
fn test() [] fn test() []
md dep [] md dep []
@ -1839,7 +1842,6 @@ fn f() { A { bar: b$0 }; }
fn baz() [type] fn baz() [type]
ex baz() [type] ex baz() [type]
ex bar() [type] ex bar() [type]
ex A { bar: ... }.bar [type]
st A [] st A []
fn f() [] fn f() []
"#]], "#]],
@ -1978,7 +1980,6 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify] ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify]
ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify]
lc m [local] lc m [local]
lc t [local] lc t [local]
lc &t [type+local] lc &t [type+local]
@ -2028,7 +2029,6 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify] ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify]
ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify]
lc m [local] lc m [local]
lc t [local] lc t [local]
lc &mut t [type+local] lc &mut t [type+local]
@ -2132,7 +2132,6 @@ fn main() {
} }
"#, "#,
expect![[r#" expect![[r#"
ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify]
ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify] ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify]
st S [] st S []
st &S [type] st &S [type]

View file

@ -276,7 +276,7 @@ impl Foo for Baz {
} }
fn asd() -> Bar { fn asd() -> Bar {
let a = Baz; let a = Baz;
Foo::foo(_) Foo::foo(a)
} }
", ",
); );
@ -365,7 +365,7 @@ impl Foo for A {
} }
fn main() { fn main() {
let a = A; let a = A;
let c: Bar = Foo::foo(_); let c: Bar = Foo::foo(&a);
}"#, }"#,
); );
} }

View file

@ -341,8 +341,8 @@ config_data! {
assist_emitMustUse: bool = false, assist_emitMustUse: bool = false,
/// Placeholder expression to use for missing expressions in assists. /// Placeholder expression to use for missing expressions in assists.
assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo,
/// Term search fuel in "units of work" for assists (Defaults to 400). /// Term search fuel in "units of work" for assists (Defaults to 1800).
assist_termSearch_fuel: usize = 400, assist_termSearch_fuel: usize = 1800,
/// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
imports_granularity_enforce: bool = false, imports_granularity_enforce: bool = false,
@ -426,8 +426,8 @@ config_data! {
}"#).unwrap(), }"#).unwrap(),
/// Whether to enable term search based snippets like `Some(foo.bar().baz())`. /// Whether to enable term search based snippets like `Some(foo.bar().baz())`.
completion_termSearch_enable: bool = false, completion_termSearch_enable: bool = false,
/// Term search fuel in "units of work" for autocompletion (Defaults to 200). /// Term search fuel in "units of work" for autocompletion (Defaults to 1000).
completion_termSearch_fuel: usize = 200, completion_termSearch_fuel: usize = 1000,
/// Controls file watching implementation. /// Controls file watching implementation.
files_watcher: FilesWatcherDef = FilesWatcherDef::Client, files_watcher: FilesWatcherDef = FilesWatcherDef::Client,

View file

@ -9,10 +9,10 @@ for enum variants.
-- --
Placeholder expression to use for missing expressions in assists. Placeholder expression to use for missing expressions in assists.
-- --
[[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `400`):: [[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `1800`)::
+ +
-- --
Term search fuel in "units of work" for assists (Defaults to 400). Term search fuel in "units of work" for assists (Defaults to 1800).
-- --
[[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`):: [[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`)::
+ +
@ -378,10 +378,10 @@ Custom completion snippets.
-- --
Whether to enable term search based snippets like `Some(foo.bar().baz())`. Whether to enable term search based snippets like `Some(foo.bar().baz())`.
-- --
[[rust-analyzer.completion.termSearch.fuel]]rust-analyzer.completion.termSearch.fuel (default: `200`):: [[rust-analyzer.completion.termSearch.fuel]]rust-analyzer.completion.termSearch.fuel (default: `1000`)::
+ +
-- --
Term search fuel in "units of work" for autocompletion (Defaults to 200). Term search fuel in "units of work" for autocompletion (Defaults to 1000).
-- --
[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`):: [[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
+ +

View file

@ -592,8 +592,8 @@
"title": "assist", "title": "assist",
"properties": { "properties": {
"rust-analyzer.assist.termSearch.fuel": { "rust-analyzer.assist.termSearch.fuel": {
"markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 400).", "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 1800).",
"default": 400, "default": 1800,
"type": "integer", "type": "integer",
"minimum": 0 "minimum": 0
} }
@ -1224,8 +1224,8 @@
"title": "completion", "title": "completion",
"properties": { "properties": {
"rust-analyzer.completion.termSearch.fuel": { "rust-analyzer.completion.termSearch.fuel": {
"markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 200).", "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 1000).",
"default": 200, "default": 1000,
"type": "integer", "type": "integer",
"minimum": 0 "minimum": 0
} }