mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 22:31:43 +00:00
Merge #10739
10739: internal: Simplify r=Veykril a=Veykril bors r+ Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
commit
e7244e899f
3 changed files with 33 additions and 26 deletions
|
@ -897,13 +897,13 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
|
fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
|
||||||
let sa = self.analyze(node);
|
let SourceAnalyzer { file_id, resolver, .. } = self.analyze(node);
|
||||||
SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
|
SemanticsScope { db: self.db, file_id, resolver }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
|
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
|
||||||
let sa = self.analyze_with_offset(node, offset);
|
let SourceAnalyzer { file_id, resolver, .. } = self.analyze_with_offset(node, offset);
|
||||||
SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
|
SemanticsScope { db: self.db, file_id, resolver }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
|
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
|
||||||
|
@ -924,9 +924,11 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
|
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
|
||||||
self.analyze_impl(node, None)
|
self.analyze_impl(node, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
|
fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
|
||||||
self.analyze_impl(node, Some(offset))
|
self.analyze_impl(node, Some(offset))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
|
fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
|
||||||
let _p = profile::span("Semantics::analyze_impl");
|
let _p = profile::span("Semantics::analyze_impl");
|
||||||
let node = self.find_file(node.clone());
|
let node = self.find_file(node.clone());
|
||||||
|
|
|
@ -13,6 +13,7 @@ use hir_def::{
|
||||||
};
|
};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
use stdx::never;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
autoderef,
|
autoderef,
|
||||||
|
@ -334,7 +335,7 @@ pub fn def_crates(
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
let mod_to_crate_ids = |module: ModuleId| Some(std::iter::once(module.krate()).collect());
|
let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect());
|
||||||
|
|
||||||
let lang_item_targets = match ty.kind(&Interner) {
|
let lang_item_targets = match ty.kind(&Interner) {
|
||||||
TyKind::Adt(AdtId(def_id), _) => {
|
TyKind::Adt(AdtId(def_id), _) => {
|
||||||
|
@ -533,9 +534,16 @@ fn iterate_method_candidates_with_autoref(
|
||||||
name: Option<&Name>,
|
name: Option<&Name>,
|
||||||
mut callback: &mut dyn FnMut(&Canonical<Ty>, AssocItemId) -> ControlFlow<()>,
|
mut callback: &mut dyn FnMut(&Canonical<Ty>, AssocItemId) -> ControlFlow<()>,
|
||||||
) -> ControlFlow<()> {
|
) -> ControlFlow<()> {
|
||||||
|
let (receiver_ty, rest) = match deref_chain.split_first() {
|
||||||
|
Some((rec, rest)) => (rec.clone(), rest),
|
||||||
|
None => {
|
||||||
|
never!("received empty deref-chain");
|
||||||
|
return ControlFlow::Break(());
|
||||||
|
}
|
||||||
|
};
|
||||||
iterate_method_candidates_by_receiver(
|
iterate_method_candidates_by_receiver(
|
||||||
&deref_chain[0],
|
&receiver_ty,
|
||||||
&deref_chain[1..],
|
&rest,
|
||||||
db,
|
db,
|
||||||
env.clone(),
|
env.clone(),
|
||||||
krate,
|
krate,
|
||||||
|
@ -546,8 +554,8 @@ fn iterate_method_candidates_with_autoref(
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let refed = Canonical {
|
let refed = Canonical {
|
||||||
binders: deref_chain[0].binders.clone(),
|
binders: receiver_ty.binders.clone(),
|
||||||
value: TyKind::Ref(Mutability::Not, static_lifetime(), deref_chain[0].value.clone())
|
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
|
||||||
.intern(&Interner),
|
.intern(&Interner),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -564,9 +572,8 @@ fn iterate_method_candidates_with_autoref(
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let ref_muted = Canonical {
|
let ref_muted = Canonical {
|
||||||
binders: deref_chain[0].binders.clone(),
|
binders: receiver_ty.binders,
|
||||||
value: TyKind::Ref(Mutability::Mut, static_lifetime(), deref_chain[0].value.clone())
|
value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value).intern(&Interner),
|
||||||
.intern(&Interner),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
iterate_method_candidates_by_receiver(
|
iterate_method_candidates_by_receiver(
|
||||||
|
@ -596,7 +603,7 @@ fn iterate_method_candidates_by_receiver(
|
||||||
// We're looking for methods with *receiver* type receiver_ty. These could
|
// We're looking for methods with *receiver* type receiver_ty. These could
|
||||||
// be found in any of the derefs of receiver_ty, so we have to go through
|
// be found in any of the derefs of receiver_ty, so we have to go through
|
||||||
// that.
|
// that.
|
||||||
for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
|
for self_ty in iter::once(receiver_ty).chain(rest_of_deref_chain) {
|
||||||
iterate_inherent_methods(
|
iterate_inherent_methods(
|
||||||
self_ty,
|
self_ty,
|
||||||
db,
|
db,
|
||||||
|
@ -609,7 +616,7 @@ fn iterate_method_candidates_by_receiver(
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
|
|
||||||
for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
|
for self_ty in iter::once(receiver_ty).chain(rest_of_deref_chain) {
|
||||||
iterate_trait_method_candidates(
|
iterate_trait_method_candidates(
|
||||||
self_ty,
|
self_ty,
|
||||||
db,
|
db,
|
||||||
|
@ -671,8 +678,7 @@ fn iterate_trait_method_candidates(
|
||||||
}
|
}
|
||||||
_ => Vec::new(),
|
_ => Vec::new(),
|
||||||
};
|
};
|
||||||
let traits =
|
let traits = inherent_trait.chain(env_traits).chain(traits_in_scope.iter().copied());
|
||||||
inherent_trait.chain(env_traits.into_iter()).chain(traits_in_scope.iter().copied());
|
|
||||||
|
|
||||||
'traits: for t in traits {
|
'traits: for t in traits {
|
||||||
let data = db.trait_data(t);
|
let data = db.trait_data(t);
|
||||||
|
@ -800,7 +806,7 @@ fn iterate_inherent_methods(
|
||||||
) -> ControlFlow<()> {
|
) -> ControlFlow<()> {
|
||||||
let impls_for_self_ty = filter_inherent_impls_for_self_ty(impls, &self_ty.value);
|
let impls_for_self_ty = filter_inherent_impls_for_self_ty(impls, &self_ty.value);
|
||||||
for &impl_def in impls_for_self_ty {
|
for &impl_def in impls_for_self_ty {
|
||||||
for &item in db.impl_data(impl_def).items.iter() {
|
for &item in &db.impl_data(impl_def).items {
|
||||||
if !is_valid_candidate(
|
if !is_valid_candidate(
|
||||||
db,
|
db,
|
||||||
env.clone(),
|
env.clone(),
|
||||||
|
|
|
@ -350,29 +350,28 @@ impl<'a> CompletionContext<'a> {
|
||||||
impl<'a> CompletionContext<'a> {
|
impl<'a> CompletionContext<'a> {
|
||||||
pub(super) fn new(
|
pub(super) fn new(
|
||||||
db: &'a RootDatabase,
|
db: &'a RootDatabase,
|
||||||
position: FilePosition,
|
position @ FilePosition { file_id, offset }: FilePosition,
|
||||||
config: &'a CompletionConfig,
|
config: &'a CompletionConfig,
|
||||||
) -> Option<CompletionContext<'a>> {
|
) -> Option<CompletionContext<'a>> {
|
||||||
let _p = profile::span("CompletionContext::new");
|
let _p = profile::span("CompletionContext::new");
|
||||||
let sema = Semantics::new(db);
|
let sema = Semantics::new(db);
|
||||||
|
|
||||||
let original_file = sema.parse(position.file_id);
|
let original_file = sema.parse(file_id);
|
||||||
|
|
||||||
// Insert a fake ident to get a valid parse tree. We will use this file
|
// Insert a fake ident to get a valid parse tree. We will use this file
|
||||||
// to determine context, though the original_file will be used for
|
// to determine context, though the original_file will be used for
|
||||||
// actual completion.
|
// actual completion.
|
||||||
let file_with_fake_ident = {
|
let file_with_fake_ident = {
|
||||||
let parse = db.parse(position.file_id);
|
let parse = db.parse(file_id);
|
||||||
let edit = Indel::insert(position.offset, "intellijRulezz".to_string());
|
let edit = Indel::insert(offset, "intellijRulezz".to_string());
|
||||||
parse.reparse(&edit).tree()
|
parse.reparse(&edit).tree()
|
||||||
};
|
};
|
||||||
let fake_ident_token =
|
let fake_ident_token =
|
||||||
file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap();
|
file_with_fake_ident.syntax().token_at_offset(offset).right_biased().unwrap();
|
||||||
|
|
||||||
let original_token =
|
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
|
||||||
original_file.syntax().token_at_offset(position.offset).left_biased()?;
|
|
||||||
let token = sema.descend_into_macros_single(original_token.clone());
|
let token = sema.descend_into_macros_single(original_token.clone());
|
||||||
let scope = sema.scope_at_offset(&token, position.offset);
|
let scope = sema.scope_at_offset(&token, offset);
|
||||||
let krate = scope.krate();
|
let krate = scope.krate();
|
||||||
let mut locals = vec![];
|
let mut locals = vec![];
|
||||||
scope.process_all_names(&mut |name, scope| {
|
scope.process_all_names(&mut |name, scope| {
|
||||||
|
@ -408,7 +407,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
ctx.expand_and_fill(
|
ctx.expand_and_fill(
|
||||||
original_file.syntax().clone(),
|
original_file.syntax().clone(),
|
||||||
file_with_fake_ident.syntax().clone(),
|
file_with_fake_ident.syntax().clone(),
|
||||||
position.offset,
|
offset,
|
||||||
fake_ident_token,
|
fake_ident_token,
|
||||||
);
|
);
|
||||||
Some(ctx)
|
Some(ctx)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue