mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 13:51:31 +00:00
Implement autoderef using the Deref trait
- add support for other lang item targets, since we need the Deref lang item
This commit is contained in:
parent
49489dc20c
commit
9c5e7dd849
11 changed files with 153 additions and 48 deletions
|
@ -22,7 +22,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
|
||||||
let expr = match_expr.expr()?;
|
let expr = match_expr.expr()?;
|
||||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None);
|
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None);
|
||||||
let match_expr_ty = analyzer.type_of(ctx.db, expr)?;
|
let match_expr_ty = analyzer.type_of(ctx.db, expr)?;
|
||||||
let enum_def = match_expr_ty.autoderef(ctx.db).find_map(|ty| match ty.as_adt() {
|
let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() {
|
||||||
Some((AdtDef::Enum(e), _)) => Some(e),
|
Some((AdtDef::Enum(e), _)) => Some(e),
|
||||||
_ => None,
|
_ => None,
|
||||||
})?;
|
})?;
|
||||||
|
|
|
@ -4,7 +4,7 @@ use rustc_hash::FxHashMap;
|
||||||
use ra_syntax::{SmolStr, ast::AttrsOwner};
|
use ra_syntax::{SmolStr, ast::AttrsOwner};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Crate, DefDatabase, Enum, Function, HirDatabase, ImplBlock, Module, Static, Struct, Trait, AstDatabase,
|
Crate, DefDatabase, Enum, Function, HirDatabase, ImplBlock, Module, Static, Struct, Trait, ModuleDef, AstDatabase, HasSource
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
@ -87,19 +87,48 @@ impl LangItems {
|
||||||
let source = module.definition_source(db).ast;
|
let source = module.definition_source(db).ast;
|
||||||
for (impl_id, _) in impl_blocks.impls.iter() {
|
for (impl_id, _) in impl_blocks.impls.iter() {
|
||||||
let impl_block = source_map.get(&source, impl_id);
|
let impl_block = source_map.get(&source, impl_id);
|
||||||
let lang_item_name = impl_block
|
if let Some(lang_item_name) = lang_item_name(&*impl_block) {
|
||||||
.attrs()
|
|
||||||
.filter_map(|a| a.as_key_value())
|
|
||||||
.filter(|(key, _)| key == "lang")
|
|
||||||
.map(|(_, val)| val)
|
|
||||||
.nth(0);
|
|
||||||
if let Some(lang_item_name) = lang_item_name {
|
|
||||||
let imp = ImplBlock::from_id(*module, impl_id);
|
let imp = ImplBlock::from_id(*module, impl_id);
|
||||||
self.items.entry(lang_item_name).or_insert_with(|| LangItemTarget::ImplBlock(imp));
|
self.items.entry(lang_item_name).or_insert_with(|| LangItemTarget::ImplBlock(imp));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME we should look for the other lang item targets (traits, structs, ...)
|
// FIXME make this nicer
|
||||||
|
for def in module.declarations(db) {
|
||||||
|
match def {
|
||||||
|
ModuleDef::Trait(trait_) => {
|
||||||
|
let node = trait_.source(db).ast;
|
||||||
|
if let Some(lang_item_name) = lang_item_name(&*node) {
|
||||||
|
self.items.entry(lang_item_name).or_insert(LangItemTarget::Trait(trait_));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ModuleDef::Enum(e) => {
|
||||||
|
let node = e.source(db).ast;
|
||||||
|
if let Some(lang_item_name) = lang_item_name(&*node) {
|
||||||
|
self.items.entry(lang_item_name).or_insert(LangItemTarget::Enum(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ModuleDef::Struct(s) => {
|
||||||
|
let node = s.source(db).ast;
|
||||||
|
if let Some(lang_item_name) = lang_item_name(&*node) {
|
||||||
|
self.items.entry(lang_item_name).or_insert(LangItemTarget::Struct(s));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ModuleDef::Function(f) => {
|
||||||
|
let node = f.source(db).ast;
|
||||||
|
if let Some(lang_item_name) = lang_item_name(&*node) {
|
||||||
|
self.items.entry(lang_item_name).or_insert(LangItemTarget::Function(f));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ModuleDef::Static(s) => {
|
||||||
|
let node = s.source(db).ast;
|
||||||
|
if let Some(lang_item_name) = lang_item_name(&*node) {
|
||||||
|
self.items.entry(lang_item_name).or_insert(LangItemTarget::Static(s));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Look for lang items in the children
|
// Look for lang items in the children
|
||||||
for child in module.children(db) {
|
for child in module.children(db) {
|
||||||
|
@ -107,3 +136,11 @@ impl LangItems {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn lang_item_name<T: AttrsOwner>(node: &T) -> Option<SmolStr> {
|
||||||
|
node.attrs()
|
||||||
|
.filter_map(|a| a.as_key_value())
|
||||||
|
.filter(|(key, _)| key == "lang")
|
||||||
|
.map(|(_, val)| val)
|
||||||
|
.nth(0)
|
||||||
|
}
|
||||||
|
|
|
@ -46,6 +46,11 @@ impl Name {
|
||||||
Name::new(idx.to_string().into())
|
Name::new(idx.to_string().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Needed for Deref
|
||||||
|
pub(crate) fn target() -> Name {
|
||||||
|
Name::new("Target".into())
|
||||||
|
}
|
||||||
|
|
||||||
// There's should be no way to extract a string out of `Name`: `Name` in the
|
// There's should be no way to extract a string out of `Name`: `Name` in the
|
||||||
// future, `Name` will include hygiene information, and you can't encode
|
// future, `Name` will include hygiene information, and you can't encode
|
||||||
// hygiene into a String.
|
// hygiene into a String.
|
||||||
|
|
|
@ -369,6 +369,17 @@ impl SourceAnalyzer {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn autoderef<'a>(
|
||||||
|
&'a self,
|
||||||
|
db: &'a impl HirDatabase,
|
||||||
|
ty: Ty,
|
||||||
|
) -> impl Iterator<Item = Ty> + 'a {
|
||||||
|
// There should be no inference vars in types passed here
|
||||||
|
// FIXME check that?
|
||||||
|
let canonical = crate::ty::Canonical { value: ty, num_vars: 0 };
|
||||||
|
crate::ty::autoderef(db, &self.resolver, canonical).map(|canonical| canonical.value)
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> {
|
pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> {
|
||||||
self.body_source_map.clone().unwrap()
|
self.body_source_map.clone().unwrap()
|
||||||
|
|
|
@ -22,6 +22,7 @@ use display::{HirDisplay, HirFormatter};
|
||||||
pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates, generic_defaults};
|
pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates, generic_defaults};
|
||||||
pub(crate) use infer::{infer_query, InferenceResult, InferTy};
|
pub(crate) use infer::{infer_query, InferenceResult, InferTy};
|
||||||
pub use lower::CallableDef;
|
pub use lower::CallableDef;
|
||||||
|
pub(crate) use autoderef::autoderef;
|
||||||
|
|
||||||
/// A type constructor or type name: this might be something like the primitive
|
/// A type constructor or type name: this might be something like the primitive
|
||||||
/// type `bool`, a struct like `Vec`, or things like function pointers or
|
/// type `bool`, a struct like `Vec`, or things like function pointers or
|
||||||
|
@ -225,8 +226,8 @@ impl Deref for Substs {
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||||
pub struct TraitRef {
|
pub struct TraitRef {
|
||||||
/// FIXME name?
|
/// FIXME name?
|
||||||
trait_: Trait,
|
pub trait_: Trait,
|
||||||
substs: Substs,
|
pub substs: Substs,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TraitRef {
|
impl TraitRef {
|
||||||
|
|
|
@ -5,17 +5,67 @@
|
||||||
|
|
||||||
use std::iter::successors;
|
use std::iter::successors;
|
||||||
|
|
||||||
use crate::HirDatabase;
|
use log::info;
|
||||||
use super::Ty;
|
|
||||||
|
|
||||||
impl Ty {
|
use crate::{HirDatabase, Name, Resolver};
|
||||||
/// Iterates over the possible derefs of `ty`.
|
use super::{traits::Solution, Ty, Canonical};
|
||||||
pub fn autoderef<'a>(self, db: &'a impl HirDatabase) -> impl Iterator<Item = Ty> + 'a {
|
|
||||||
successors(Some(self), move |ty| ty.autoderef_step(db))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn autoderef_step(&self, _db: &impl HirDatabase) -> Option<Ty> {
|
pub(crate) fn autoderef<'a>(
|
||||||
// FIXME Deref::deref
|
db: &'a impl HirDatabase,
|
||||||
self.builtin_deref()
|
resolver: &'a Resolver,
|
||||||
|
ty: Canonical<Ty>,
|
||||||
|
) -> impl Iterator<Item = Canonical<Ty>> + 'a {
|
||||||
|
successors(Some(ty), move |ty| deref(db, resolver, ty))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn deref(
|
||||||
|
db: &impl HirDatabase,
|
||||||
|
resolver: &Resolver,
|
||||||
|
ty: &Canonical<Ty>,
|
||||||
|
) -> Option<Canonical<Ty>> {
|
||||||
|
if let Some(derefed) = ty.value.builtin_deref() {
|
||||||
|
Some(Canonical { value: derefed, num_vars: ty.num_vars })
|
||||||
|
} else {
|
||||||
|
deref_by_trait(db, resolver, ty)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deref_by_trait(
|
||||||
|
db: &impl HirDatabase,
|
||||||
|
resolver: &Resolver,
|
||||||
|
ty: &Canonical<Ty>,
|
||||||
|
) -> Option<Canonical<Ty>> {
|
||||||
|
let krate = resolver.krate()?;
|
||||||
|
let deref_trait = match db.lang_item(krate, "deref".into())? {
|
||||||
|
crate::lang_item::LangItemTarget::Trait(t) => t,
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
let target = deref_trait.associated_type_by_name(db, Name::target())?;
|
||||||
|
|
||||||
|
// FIXME we should check that Deref has no type parameters, because we assume it below
|
||||||
|
|
||||||
|
// FIXME make the Canonical handling nicer
|
||||||
|
// TODO shift inference variables in ty
|
||||||
|
|
||||||
|
let projection = super::traits::ProjectionPredicate {
|
||||||
|
ty: Ty::Bound(0),
|
||||||
|
projection_ty: super::ProjectionTy {
|
||||||
|
associated_ty: target,
|
||||||
|
parameters: vec![ty.value.clone()].into(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let canonical = super::Canonical { num_vars: 1 + ty.num_vars, value: projection };
|
||||||
|
|
||||||
|
let solution = db.normalize(krate, canonical)?;
|
||||||
|
|
||||||
|
match &solution {
|
||||||
|
Solution::Unique(vars) => {
|
||||||
|
Some(Canonical { value: vars.0.value[0].clone(), num_vars: vars.0.num_vars })
|
||||||
|
}
|
||||||
|
Solution::Ambig(_) => {
|
||||||
|
info!("Ambiguous solution for deref: {:?}", solution);
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,7 @@ use crate::{
|
||||||
use super::{
|
use super::{
|
||||||
Ty, TypableDef, Substs, primitive, op, ApplicationTy, TypeCtor, CallableDef, TraitRef,
|
Ty, TypableDef, Substs, primitive, op, ApplicationTy, TypeCtor, CallableDef, TraitRef,
|
||||||
traits::{Solution, Obligation, Guidance},
|
traits::{Solution, Obligation, Guidance},
|
||||||
method_resolution,
|
method_resolution, autoderef,
|
||||||
};
|
};
|
||||||
|
|
||||||
mod unify;
|
mod unify;
|
||||||
|
@ -1074,25 +1074,27 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
}
|
}
|
||||||
Expr::Field { expr, name } => {
|
Expr::Field { expr, name } => {
|
||||||
let receiver_ty = self.infer_expr(*expr, &Expectation::none());
|
let receiver_ty = self.infer_expr(*expr, &Expectation::none());
|
||||||
let ty = receiver_ty
|
let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty);
|
||||||
.autoderef(self.db)
|
let ty = autoderef::autoderef(
|
||||||
.find_map(|derefed_ty| match derefed_ty {
|
self.db,
|
||||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
&self.resolver.clone(),
|
||||||
TypeCtor::Tuple { .. } => {
|
canonicalized.value.clone(),
|
||||||
let i = name.to_string().parse::<usize>().ok();
|
)
|
||||||
i.and_then(|i| a_ty.parameters.0.get(i).cloned())
|
.find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) {
|
||||||
}
|
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||||
TypeCtor::Adt(AdtDef::Struct(s)) => {
|
TypeCtor::Tuple { .. } => {
|
||||||
s.field(self.db, name).map(|field| {
|
let i = name.to_string().parse::<usize>().ok();
|
||||||
self.write_field_resolution(tgt_expr, field);
|
i.and_then(|i| a_ty.parameters.0.get(i).cloned())
|
||||||
field.ty(self.db).subst(&a_ty.parameters)
|
}
|
||||||
})
|
TypeCtor::Adt(AdtDef::Struct(s)) => s.field(self.db, name).map(|field| {
|
||||||
}
|
self.write_field_resolution(tgt_expr, field);
|
||||||
_ => None,
|
field.ty(self.db).subst(&a_ty.parameters)
|
||||||
},
|
}),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
},
|
||||||
.unwrap_or(Ty::Unknown);
|
_ => None,
|
||||||
|
})
|
||||||
|
.unwrap_or(Ty::Unknown);
|
||||||
self.insert_type_vars(ty)
|
self.insert_type_vars(ty)
|
||||||
}
|
}
|
||||||
Expr::Try { expr } => {
|
Expr::Try { expr } => {
|
||||||
|
|
|
@ -16,7 +16,7 @@ use crate::{
|
||||||
generics::HasGenericParams,
|
generics::HasGenericParams,
|
||||||
ty::primitive::{UncertainIntTy, UncertainFloatTy}
|
ty::primitive::{UncertainIntTy, UncertainFloatTy}
|
||||||
};
|
};
|
||||||
use super::{TraitRef, Canonical};
|
use super::{TraitRef, Canonical, autoderef};
|
||||||
|
|
||||||
/// This is used as a key for indexing impls.
|
/// This is used as a key for indexing impls.
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
|
@ -162,8 +162,7 @@ pub(crate) fn iterate_method_candidates<T>(
|
||||||
// rustc does an autoderef and then autoref again).
|
// rustc does an autoderef and then autoref again).
|
||||||
|
|
||||||
let krate = resolver.krate()?;
|
let krate = resolver.krate()?;
|
||||||
for derefed_ty in ty.value.clone().autoderef(db) {
|
for derefed_ty in autoderef::autoderef(db, resolver, ty.clone()) {
|
||||||
let derefed_ty = Canonical { value: derefed_ty, num_vars: ty.num_vars };
|
|
||||||
if let Some(result) = iterate_inherent_methods(&derefed_ty, db, name, krate, &mut callback)
|
if let Some(result) = iterate_inherent_methods(&derefed_ty, db, name, krate, &mut callback)
|
||||||
{
|
{
|
||||||
return Some(result);
|
return Some(result);
|
||||||
|
|
|
@ -80,8 +80,8 @@ pub enum Obligation {
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
pub struct ProjectionPredicate {
|
pub struct ProjectionPredicate {
|
||||||
projection_ty: ProjectionTy,
|
pub projection_ty: ProjectionTy,
|
||||||
ty: Ty,
|
pub ty: Ty,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check using Chalk whether trait is implemented for given parameters including `Self` type.
|
/// Check using Chalk whether trait is implemented for given parameters including `Self` type.
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) {
|
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) {
|
||||||
for receiver in receiver.autoderef(ctx.db) {
|
for receiver in ctx.analyzer.autoderef(ctx.db, receiver) {
|
||||||
if let Ty::Apply(a_ty) = receiver {
|
if let Ty::Apply(a_ty) = receiver {
|
||||||
match a_ty.ctor {
|
match a_ty.ctor {
|
||||||
TypeCtor::Adt(AdtDef::Struct(s)) => {
|
TypeCtor::Adt(AdtDef::Struct(s)) => {
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub(crate) fn goto_type_definition(
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt().map(|adt| adt.0))?;
|
let adt_def = analyzer.autoderef(db, ty).find_map(|ty| ty.as_adt().map(|adt| adt.0))?;
|
||||||
|
|
||||||
let nav = NavigationTarget::from_adt_def(db, adt_def);
|
let nav = NavigationTarget::from_adt_def(db, adt_def);
|
||||||
Some(RangeInfo::new(node.range(), vec![nav]))
|
Some(RangeInfo::new(node.range(), vec![nav]))
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue