Refactor completions expansion

This commit is contained in:
Lukas Wirth 2022-10-10 15:45:24 +02:00
parent 476d043874
commit f3ae5e56fb
2 changed files with 1030 additions and 990 deletions

View file

@ -23,7 +23,10 @@ use syntax::{
};
use text_edit::Indel;
use crate::CompletionConfig;
use crate::{
context::analysis::{expand_and_analyze, AnalysisResult},
CompletionConfig,
};
const COMPLETION_MARKER: &str = "intellijRulezz";
@ -561,15 +564,27 @@ impl<'a> CompletionContext<'a> {
let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
parse.reparse(&edit).tree()
};
let fake_ident_token =
file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?;
// always pick the token to the immediate left of the cursor, as that is what we are actually
// completing on
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
let token = sema.descend_into_macros_single(original_token.clone());
let AnalysisResult {
analysis,
expected: (expected_type, expected_name),
qualifier_ctx,
token,
offset,
} = expand_and_analyze(
&sema,
original_file.syntax().clone(),
file_with_fake_ident.syntax().clone(),
offset,
&original_token,
)?;
// adjust for macro input, this still fails if there is no token written yet
let scope_offset = if original_token == token { offset } else { token.text_range().end() };
let scope = sema.scope_at_offset(&token.parent()?, scope_offset)?;
let scope = sema.scope_at_offset(&token.parent()?, offset)?;
let krate = scope.krate();
let module = scope.module();
@ -583,7 +598,7 @@ impl<'a> CompletionContext<'a> {
let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
let mut ctx = CompletionContext {
let ctx = CompletionContext {
sema,
scope,
db,
@ -593,19 +608,13 @@ impl<'a> CompletionContext<'a> {
token,
krate,
module,
expected_name: None,
expected_type: None,
qualifier_ctx: Default::default(),
expected_name,
expected_type,
qualifier_ctx,
locals,
depth_from_crate_root,
};
let ident_ctx = ctx.expand_and_analyze(
original_file.syntax().clone(),
file_with_fake_ident.syntax().clone(),
offset,
fake_ident_token,
)?;
Some((ctx, ident_ctx))
Some((ctx, analysis))
}
}

View file

@ -11,25 +11,64 @@ use syntax::{
};
use crate::context::{
AttrCtx, CompletionAnalysis, CompletionContext, DotAccess, DotAccessKind, ExprCtx,
ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext,
NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathKind, PatternContext,
PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation,
COMPLETION_MARKER,
AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext,
LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind,
PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER,
};
impl<'a> CompletionContext<'a> {
/// Expand attributes and macro calls at the current cursor position for both the original file
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
/// and speculative states stay in sync.
pub(super) fn expand_and_analyze(
&mut self,
struct ExpansionResult {
original_file: SyntaxNode,
speculative_file: SyntaxNode,
offset: TextSize,
fake_ident_token: SyntaxToken,
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
}
pub(super) struct AnalysisResult {
pub(super) analysis: CompletionAnalysis,
pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
pub(super) qualifier_ctx: QualifierCtx,
pub(super) token: SyntaxToken,
pub(super) offset: TextSize,
}
pub(super) fn expand_and_analyze(
sema: &Semantics<'_, RootDatabase>,
original_file: SyntaxNode,
speculative_file: SyntaxNode,
offset: TextSize,
original_token: &SyntaxToken,
) -> Option<AnalysisResult> {
// as we insert after the offset, right biased will *always* pick the identifier no matter
// if there is an ident already typed or not
let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
// the relative offset between the cursor and the *identifier* token we are completing on
let relative_offset = offset - fake_ident_token.text_range().start();
// make the offset point to the start of the original token, as that is what the
// intermediate offsets calculated in expansion always points to
let offset = offset - relative_offset;
let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token);
// add the relative offset back, so that left_biased finds the proper token
let offset = expansion.offset + relative_offset;
let token = expansion.original_file.token_at_offset(offset).left_biased()?;
analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
})
}
/// Expand attributes and macro calls at the current cursor position for both the original file
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
/// and speculative states stay in sync.
fn expand(
sema: &Semantics<'_, RootDatabase>,
mut original_file: SyntaxNode,
mut speculative_file: SyntaxNode,
mut offset: TextSize,
mut fake_ident_token: SyntaxToken,
) -> Option<CompletionAnalysis> {
let _p = profile::span("CompletionContext::expand_and_fill");
) -> ExpansionResult {
let _p = profile::span("CompletionContext::expand");
let mut derive_ctx = None;
'expansion: loop {
@ -46,8 +85,8 @@ impl<'a> CompletionContext<'a> {
// first try to expand attributes as these are always the outermost macro calls
'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
match (
self.sema.expand_attr_macro(&actual_item),
self.sema.speculative_expand_attr_macro(
sema.expand_attr_macro(&actual_item),
sema.speculative_expand_attr_macro(
&actual_item,
&item_with_fake_ident,
fake_ident_token.clone(),
@ -90,8 +129,8 @@ impl<'a> CompletionContext<'a> {
spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
) {
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
self.sema.speculative_expand_derive_as_pseudo_attr_macro(
sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
sema.speculative_expand_derive_as_pseudo_attr_macro(
&orig_attr,
&spec_attr,
fake_ident_token.clone(),
@ -127,8 +166,8 @@ impl<'a> CompletionContext<'a> {
};
match (
self.sema.expand(&actual_macro_call),
self.sema.speculative_expand(
sema.expand(&actual_macro_call),
sema.speculative_expand(
&actual_macro_call,
&speculative_args,
fake_ident_token.clone(),
@ -157,16 +196,115 @@ impl<'a> CompletionContext<'a> {
// none of our states have changed so stop the loop
break 'expansion;
}
ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
}
self.analyze(&original_file, speculative_file, offset, derive_ctx)
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
/// of the completion location.
fn analyze(
sema: &Semantics<'_, RootDatabase>,
expansion_result: ExpansionResult,
original_token: &SyntaxToken,
self_token: &SyntaxToken,
) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
let _p = profile::span("CompletionContext::analyze");
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
expansion_result;
let syntax_element = NodeOrToken::Token(fake_ident_token);
if is_in_token_of_for_loop(syntax_element.clone()) {
// for pat $0
// there is nothing to complete here except `in` keyword
// don't bother populating the context
// FIXME: the completion calculations should end up good enough
// such that this special case becomes unnecessary
return None;
}
/// Calculate the expected type and name of the cursor position.
fn expected_type_and_name(
&self,
// Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
if let Some(ast::NameLike::NameRef(name_ref)) =
find_node_at_offset(&file_with_fake_ident, offset)
{
let parent = name_ref.syntax().parent()?;
let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?;
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
path_ctx.kind = PathKind::Derive {
existing_derives: sema
.resolve_derive_macro(&origin_attr)
.into_iter()
.flatten()
.flatten()
.collect(),
};
}
return Some((
CompletionAnalysis::NameRef(nameref_ctx),
(None, None),
QualifierCtx::default(),
));
}
return None;
}
let name_like = match find_node_at_offset(&speculative_file, offset) {
Some(it) => it,
None => {
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
CompletionAnalysis::String {
original,
expanded: ast::String::cast(self_token.clone()),
}
} else {
// Fix up trailing whitespace problem
// #[attr(foo = $0
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
let p = token.parent()?;
if p.kind() == SyntaxKind::TOKEN_TREE
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
{
let colon_prefix = previous_non_trivia_token(self_token.clone())
.map_or(false, |it| T![:] == it.kind());
CompletionAnalysis::UnexpandedAttrTT {
fake_attribute_under_caret: syntax_element
.ancestors()
.find_map(ast::Attr::cast),
colon_prefix,
}
} else {
return None;
}
};
return Some((analysis, (None, None), QualifierCtx::default()));
}
};
let expected = expected_type_and_name(sema, &self_token, &name_like);
let mut qual_ctx = QualifierCtx::default();
let analysis = match name_like {
ast::NameLike::Lifetime(lifetime) => {
CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
}
ast::NameLike::NameRef(name_ref) => {
let parent = name_ref.syntax().parent()?;
let (nameref_ctx, qualifier_ctx) =
classify_name_ref(sema, &original_file, name_ref, parent.clone())?;
qual_ctx = qualifier_ctx;
CompletionAnalysis::NameRef(nameref_ctx)
}
ast::NameLike::Name(name) => {
let name_ctx = classify_name(sema, &original_file, name)?;
CompletionAnalysis::Name(name_ctx)
}
};
Some((analysis, expected, qual_ctx))
}
/// Calculate the expected type and name of the cursor position.
fn expected_type_and_name(
sema: &Semantics<'_, RootDatabase>,
token: &SyntaxToken,
name_like: &ast::NameLike,
) -> (Option<Type>, Option<NameOrNameRef>) {
let mut node = match self.token.parent() {
) -> (Option<Type>, Option<NameOrNameRef>) {
let mut node = match token.parent() {
Some(it) => it,
None => return (None, None),
};
@ -215,8 +353,8 @@ impl<'a> CompletionContext<'a> {
cov_mark::hit!(expected_type_let_with_leading_char);
cov_mark::hit!(expected_type_let_without_leading_char);
let ty = it.pat()
.and_then(|pat| self.sema.type_of_pat(&pat))
.or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
.and_then(|pat| sema.type_of_pat(&pat))
.or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
.map(TypeInfo::original);
let name = match it.pat() {
Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
@ -228,16 +366,16 @@ impl<'a> CompletionContext<'a> {
ast::LetExpr(it) => {
cov_mark::hit!(expected_type_if_let_without_leading_char);
let ty = it.pat()
.and_then(|pat| self.sema.type_of_pat(&pat))
.or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
.and_then(|pat| sema.type_of_pat(&pat))
.or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
.map(TypeInfo::original);
(ty, None)
},
ast::ArgList(_) => {
cov_mark::hit!(expected_type_fn_param);
ActiveParameter::at_token(
&self.sema,
self.token.clone(),
&sema,
token.clone(),
).map(|ap| {
let name = ap.ident().map(NameOrNameRef::Name);
@ -249,22 +387,22 @@ impl<'a> CompletionContext<'a> {
ast::RecordExprFieldList(it) => {
// wouldn't try {} be nice...
(|| {
if self.token.kind() == T![..]
|| self.token.prev_token().map(|t| t.kind()) == Some(T![..])
if token.kind() == T![..]
||token.prev_token().map(|t| t.kind()) == Some(T![..])
{
cov_mark::hit!(expected_type_struct_func_update);
let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
let ty = self.sema.type_of_expr(&record_expr.into())?;
let ty = sema.type_of_expr(&record_expr.into())?;
Some((
Some(ty.original),
None
))
} else {
cov_mark::hit!(expected_type_struct_field_without_leading_char);
let expr_field = self.token.prev_sibling_or_token()?
let expr_field = token.prev_sibling_or_token()?
.into_node()
.and_then(ast::RecordExprField::cast)?;
let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
Some((
Some(ty),
expr_field.field_name().map(NameOrNameRef::NameRef),
@ -276,12 +414,12 @@ impl<'a> CompletionContext<'a> {
if let Some(expr) = it.expr() {
cov_mark::hit!(expected_type_struct_field_with_leading_char);
(
self.sema.type_of_expr(&expr).map(TypeInfo::original),
sema.type_of_expr(&expr).map(TypeInfo::original),
it.field_name().map(NameOrNameRef::NameRef),
)
} else {
cov_mark::hit!(expected_type_struct_field_followed_by_comma);
let ty = self.sema.resolve_record_field(&it)
let ty = sema.resolve_record_field(&it)
.map(|(_, _, ty)| ty);
(
ty,
@ -292,41 +430,41 @@ impl<'a> CompletionContext<'a> {
// match foo { $0 }
// match foo { ..., pat => $0 }
ast::MatchExpr(it) => {
let on_arrow = previous_non_trivia_token(self.token.clone()).map_or(false, |it| T![=>] == it.kind());
let on_arrow = previous_non_trivia_token(token.clone()).map_or(false, |it| T![=>] == it.kind());
let ty = if on_arrow {
// match foo { ..., pat => $0 }
cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
self.sema.type_of_expr(&it.into())
sema.type_of_expr(&it.into())
} else {
// match foo { $0 }
cov_mark::hit!(expected_type_match_arm_without_leading_char);
it.expr().and_then(|e| self.sema.type_of_expr(&e))
it.expr().and_then(|e| sema.type_of_expr(&e))
}.map(TypeInfo::original);
(ty, None)
},
ast::IfExpr(it) => {
let ty = it.condition()
.and_then(|e| self.sema.type_of_expr(&e))
.and_then(|e| sema.type_of_expr(&e))
.map(TypeInfo::original);
(ty, None)
},
ast::IdentPat(it) => {
cov_mark::hit!(expected_type_if_let_with_leading_char);
cov_mark::hit!(expected_type_match_arm_with_leading_char);
let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
(ty, None)
},
ast::Fn(it) => {
cov_mark::hit!(expected_type_fn_ret_with_leading_char);
cov_mark::hit!(expected_type_fn_ret_without_leading_char);
let def = self.sema.to_def(&it);
(def.map(|def| def.ret_type(self.db)), None)
let def = sema.to_def(&it);
(def.map(|def| def.ret_type(sema.db)), None)
},
ast::ClosureExpr(it) => {
let ty = self.sema.type_of_expr(&it.into());
ty.and_then(|ty| ty.original.as_callable(self.db))
let ty = sema.type_of_expr(&it.into());
ty.and_then(|ty| ty.original.as_callable(sema.db))
.map(|c| (Some(c.return_type()), None))
.unwrap_or((None, None))
},
@ -345,111 +483,13 @@ impl<'a> CompletionContext<'a> {
}
};
}
}
}
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
/// of the completion location.
fn analyze(
&mut self,
original_file: &SyntaxNode,
file_with_fake_ident: SyntaxNode,
offset: TextSize,
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
) -> Option<CompletionAnalysis> {
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased()?;
let syntax_element = NodeOrToken::Token(fake_ident_token);
if is_in_token_of_for_loop(syntax_element.clone()) {
// for pat $0
// there is nothing to complete here except `in` keyword
// don't bother populating the context
// FIXME: the completion calculations should end up good enough
// such that this special case becomes unnecessary
return None;
}
// Overwrite the path kind for derives
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
if let Some(ast::NameLike::NameRef(name_ref)) =
find_node_at_offset(&file_with_fake_ident, offset)
{
let parent = name_ref.syntax().parent()?;
let (mut nameref_ctx, _) =
Self::classify_name_ref(&self.sema, &original_file, name_ref, parent)?;
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
path_ctx.kind = PathKind::Derive {
existing_derives: self
.sema
.resolve_derive_macro(&origin_attr)
.into_iter()
.flatten()
.flatten()
.collect(),
};
}
return Some(CompletionAnalysis::NameRef(nameref_ctx));
}
return None;
}
let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
Some(it) => it,
None => {
let analysis =
if let Some(original) = ast::String::cast(self.original_token.clone()) {
CompletionAnalysis::String {
original,
expanded: ast::String::cast(self.token.clone()),
}
} else {
// Fix up trailing whitespace problem
// #[attr(foo = $0
let token =
syntax::algo::skip_trivia_token(self.token.clone(), Direction::Prev)?;
let p = token.parent()?;
if p.kind() == SyntaxKind::TOKEN_TREE
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
{
let colon_prefix = previous_non_trivia_token(self.token.clone())
.map_or(false, |it| T![:] == it.kind());
CompletionAnalysis::UnexpandedAttrTT {
fake_attribute_under_caret: syntax_element
.ancestors()
.find_map(ast::Attr::cast),
colon_prefix,
}
} else {
return None;
}
};
return Some(analysis);
}
};
(self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like);
let analysis = match name_like {
ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime(
Self::classify_lifetime(&self.sema, original_file, lifetime)?,
),
ast::NameLike::NameRef(name_ref) => {
let parent = name_ref.syntax().parent()?;
let (nameref_ctx, qualifier_ctx) =
Self::classify_name_ref(&self.sema, &original_file, name_ref, parent.clone())?;
self.qualifier_ctx = qualifier_ctx;
CompletionAnalysis::NameRef(nameref_ctx)
}
ast::NameLike::Name(name) => {
let name_ctx = Self::classify_name(&self.sema, original_file, name)?;
CompletionAnalysis::Name(name_ctx)
}
};
Some(analysis)
}
fn classify_lifetime(
fn classify_lifetime(
_sema: &Semantics<'_, RootDatabase>,
original_file: &SyntaxNode,
lifetime: ast::Lifetime,
) -> Option<LifetimeContext> {
) -> Option<LifetimeContext> {
let parent = lifetime.syntax().parent()?;
if parent.kind() == SyntaxKind::ERROR {
return None;
@ -470,13 +510,13 @@ impl<'a> CompletionContext<'a> {
let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
Some(LifetimeContext { lifetime, kind })
}
}
fn classify_name(
fn classify_name(
sema: &Semantics<'_, RootDatabase>,
original_file: &SyntaxNode,
name: ast::Name,
) -> Option<NameContext> {
) -> Option<NameContext> {
let parent = name.syntax().parent()?;
let kind = match_ast! {
match parent {
@ -510,18 +550,17 @@ impl<'a> CompletionContext<'a> {
};
let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
Some(NameContext { name, kind })
}
}
fn classify_name_ref(
fn classify_name_ref(
sema: &Semantics<'_, RootDatabase>,
original_file: &SyntaxNode,
name_ref: ast::NameRef,
parent: SyntaxNode,
) -> Option<(NameRefContext, QualifierCtx)> {
) -> Option<(NameRefContext, QualifierCtx)> {
let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
let make_res =
|kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
@ -748,9 +787,7 @@ impl<'a> CompletionContext<'a> {
let find_ret_ty = |it: SyntaxNode| {
if let Some(item) = ast::Item::cast(it.clone()) {
match item {
ast::Item::Fn(f) => {
Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
}
ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
ast::Item::MacroCall(_) => None,
_ => Some(None),
}
@ -770,9 +807,7 @@ impl<'a> CompletionContext<'a> {
}
};
let find_fn_self_param = |it| match it {
ast::Item::Fn(fn_) => {
Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
}
ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
ast::Item::MacroCall(_) => None,
_ => Some(None),
};
@ -866,10 +901,8 @@ impl<'a> CompletionContext<'a> {
let kind = attr.kind();
let attached = attr.syntax().parent()?;
let is_trailing_outer_attr = kind != AttrKind::Inner
&& non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next)
.is_none();
let annotated_item_kind =
if is_trailing_outer_attr { None } else { Some(attached.kind()) };
&& non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
};
@ -967,10 +1000,11 @@ impl<'a> CompletionContext<'a> {
.map(|it| it.parent_path());
if let Some(qualifier) = qualifier {
let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
Some(ast::PathSegmentKind::Type {
type_ref: Some(type_ref),
trait_ref,
}) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)),
Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
if qualifier.qualifier().is_none() =>
{
Some((type_ref, trait_ref))
}
_ => None,
};
@ -987,8 +1021,7 @@ impl<'a> CompletionContext<'a> {
// For understanding how and why super_chain_len is calculated the way it
// is check the documentation at it's definition
let mut segment_count = 0;
let super_count =
iter::successors(Some(qualifier.clone()), |p| p.qualifier())
let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
.take_while(|p| {
p.segment()
.and_then(|s| {
@ -1042,8 +1075,7 @@ impl<'a> CompletionContext<'a> {
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.find(|it| it.kind() == T![unsafe]);
qualifier_ctx.vis_node =
error_node.children().find_map(ast::Visibility::cast);
qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
}
}
@ -1067,7 +1099,6 @@ impl<'a> CompletionContext<'a> {
}
}
Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
}
}
fn pattern_context_for(