mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 20:42:04 +00:00
Refactor completions expansion
This commit is contained in:
parent
476d043874
commit
f3ae5e56fb
2 changed files with 1030 additions and 990 deletions
|
@ -23,7 +23,10 @@ use syntax::{
|
||||||
};
|
};
|
||||||
use text_edit::Indel;
|
use text_edit::Indel;
|
||||||
|
|
||||||
use crate::CompletionConfig;
|
use crate::{
|
||||||
|
context::analysis::{expand_and_analyze, AnalysisResult},
|
||||||
|
CompletionConfig,
|
||||||
|
};
|
||||||
|
|
||||||
const COMPLETION_MARKER: &str = "intellijRulezz";
|
const COMPLETION_MARKER: &str = "intellijRulezz";
|
||||||
|
|
||||||
|
@ -561,15 +564,27 @@ impl<'a> CompletionContext<'a> {
|
||||||
let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
|
let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
|
||||||
parse.reparse(&edit).tree()
|
parse.reparse(&edit).tree()
|
||||||
};
|
};
|
||||||
let fake_ident_token =
|
|
||||||
file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?;
|
|
||||||
|
|
||||||
|
// always pick the token to the immediate left of the cursor, as that is what we are actually
|
||||||
|
// completing on
|
||||||
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
|
let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
|
||||||
let token = sema.descend_into_macros_single(original_token.clone());
|
|
||||||
|
let AnalysisResult {
|
||||||
|
analysis,
|
||||||
|
expected: (expected_type, expected_name),
|
||||||
|
qualifier_ctx,
|
||||||
|
token,
|
||||||
|
offset,
|
||||||
|
} = expand_and_analyze(
|
||||||
|
&sema,
|
||||||
|
original_file.syntax().clone(),
|
||||||
|
file_with_fake_ident.syntax().clone(),
|
||||||
|
offset,
|
||||||
|
&original_token,
|
||||||
|
)?;
|
||||||
|
|
||||||
// adjust for macro input, this still fails if there is no token written yet
|
// adjust for macro input, this still fails if there is no token written yet
|
||||||
let scope_offset = if original_token == token { offset } else { token.text_range().end() };
|
let scope = sema.scope_at_offset(&token.parent()?, offset)?;
|
||||||
let scope = sema.scope_at_offset(&token.parent()?, scope_offset)?;
|
|
||||||
|
|
||||||
let krate = scope.krate();
|
let krate = scope.krate();
|
||||||
let module = scope.module();
|
let module = scope.module();
|
||||||
|
@ -583,7 +598,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
|
|
||||||
let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
|
let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
|
||||||
|
|
||||||
let mut ctx = CompletionContext {
|
let ctx = CompletionContext {
|
||||||
sema,
|
sema,
|
||||||
scope,
|
scope,
|
||||||
db,
|
db,
|
||||||
|
@ -593,19 +608,13 @@ impl<'a> CompletionContext<'a> {
|
||||||
token,
|
token,
|
||||||
krate,
|
krate,
|
||||||
module,
|
module,
|
||||||
expected_name: None,
|
expected_name,
|
||||||
expected_type: None,
|
expected_type,
|
||||||
qualifier_ctx: Default::default(),
|
qualifier_ctx,
|
||||||
locals,
|
locals,
|
||||||
depth_from_crate_root,
|
depth_from_crate_root,
|
||||||
};
|
};
|
||||||
let ident_ctx = ctx.expand_and_analyze(
|
Some((ctx, analysis))
|
||||||
original_file.syntax().clone(),
|
|
||||||
file_with_fake_ident.syntax().clone(),
|
|
||||||
offset,
|
|
||||||
fake_ident_token,
|
|
||||||
)?;
|
|
||||||
Some((ctx, ident_ctx))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,25 +11,64 @@ use syntax::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::context::{
|
use crate::context::{
|
||||||
AttrCtx, CompletionAnalysis, CompletionContext, DotAccess, DotAccessKind, ExprCtx,
|
AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext,
|
||||||
ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext,
|
LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind,
|
||||||
NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathKind, PatternContext,
|
PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
|
||||||
PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation,
|
TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER,
|
||||||
COMPLETION_MARKER,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<'a> CompletionContext<'a> {
|
struct ExpansionResult {
|
||||||
|
original_file: SyntaxNode,
|
||||||
|
speculative_file: SyntaxNode,
|
||||||
|
offset: TextSize,
|
||||||
|
fake_ident_token: SyntaxToken,
|
||||||
|
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) struct AnalysisResult {
|
||||||
|
pub(super) analysis: CompletionAnalysis,
|
||||||
|
pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
|
||||||
|
pub(super) qualifier_ctx: QualifierCtx,
|
||||||
|
pub(super) token: SyntaxToken,
|
||||||
|
pub(super) offset: TextSize,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn expand_and_analyze(
|
||||||
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
|
original_file: SyntaxNode,
|
||||||
|
speculative_file: SyntaxNode,
|
||||||
|
offset: TextSize,
|
||||||
|
original_token: &SyntaxToken,
|
||||||
|
) -> Option<AnalysisResult> {
|
||||||
|
// as we insert after the offset, right biased will *always* pick the identifier no matter
|
||||||
|
// if there is an ident already typed or not
|
||||||
|
let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
|
||||||
|
// the relative offset between the cursor and the *identifier* token we are completing on
|
||||||
|
let relative_offset = offset - fake_ident_token.text_range().start();
|
||||||
|
// make the offset point to the start of the original token, as that is what the
|
||||||
|
// intermediate offsets calculated in expansion always points to
|
||||||
|
let offset = offset - relative_offset;
|
||||||
|
let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token);
|
||||||
|
// add the relative offset back, so that left_biased finds the proper token
|
||||||
|
let offset = expansion.offset + relative_offset;
|
||||||
|
let token = expansion.original_file.token_at_offset(offset).left_biased()?;
|
||||||
|
|
||||||
|
analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
|
||||||
|
AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Expand attributes and macro calls at the current cursor position for both the original file
|
/// Expand attributes and macro calls at the current cursor position for both the original file
|
||||||
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
|
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
|
||||||
/// and speculative states stay in sync.
|
/// and speculative states stay in sync.
|
||||||
pub(super) fn expand_and_analyze(
|
fn expand(
|
||||||
&mut self,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
mut original_file: SyntaxNode,
|
mut original_file: SyntaxNode,
|
||||||
mut speculative_file: SyntaxNode,
|
mut speculative_file: SyntaxNode,
|
||||||
mut offset: TextSize,
|
mut offset: TextSize,
|
||||||
mut fake_ident_token: SyntaxToken,
|
mut fake_ident_token: SyntaxToken,
|
||||||
) -> Option<CompletionAnalysis> {
|
) -> ExpansionResult {
|
||||||
let _p = profile::span("CompletionContext::expand_and_fill");
|
let _p = profile::span("CompletionContext::expand");
|
||||||
let mut derive_ctx = None;
|
let mut derive_ctx = None;
|
||||||
|
|
||||||
'expansion: loop {
|
'expansion: loop {
|
||||||
|
@ -46,8 +85,8 @@ impl<'a> CompletionContext<'a> {
|
||||||
// first try to expand attributes as these are always the outermost macro calls
|
// first try to expand attributes as these are always the outermost macro calls
|
||||||
'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
|
'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
|
||||||
match (
|
match (
|
||||||
self.sema.expand_attr_macro(&actual_item),
|
sema.expand_attr_macro(&actual_item),
|
||||||
self.sema.speculative_expand_attr_macro(
|
sema.speculative_expand_attr_macro(
|
||||||
&actual_item,
|
&actual_item,
|
||||||
&item_with_fake_ident,
|
&item_with_fake_ident,
|
||||||
fake_ident_token.clone(),
|
fake_ident_token.clone(),
|
||||||
|
@ -90,8 +129,8 @@ impl<'a> CompletionContext<'a> {
|
||||||
spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
|
spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
|
||||||
) {
|
) {
|
||||||
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
|
if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
|
||||||
self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
|
sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
|
||||||
self.sema.speculative_expand_derive_as_pseudo_attr_macro(
|
sema.speculative_expand_derive_as_pseudo_attr_macro(
|
||||||
&orig_attr,
|
&orig_attr,
|
||||||
&spec_attr,
|
&spec_attr,
|
||||||
fake_ident_token.clone(),
|
fake_ident_token.clone(),
|
||||||
|
@ -127,8 +166,8 @@ impl<'a> CompletionContext<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
match (
|
match (
|
||||||
self.sema.expand(&actual_macro_call),
|
sema.expand(&actual_macro_call),
|
||||||
self.sema.speculative_expand(
|
sema.speculative_expand(
|
||||||
&actual_macro_call,
|
&actual_macro_call,
|
||||||
&speculative_args,
|
&speculative_args,
|
||||||
fake_ident_token.clone(),
|
fake_ident_token.clone(),
|
||||||
|
@ -157,16 +196,115 @@ impl<'a> CompletionContext<'a> {
|
||||||
// none of our states have changed so stop the loop
|
// none of our states have changed so stop the loop
|
||||||
break 'expansion;
|
break 'expansion;
|
||||||
}
|
}
|
||||||
|
ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
|
||||||
|
}
|
||||||
|
|
||||||
self.analyze(&original_file, speculative_file, offset, derive_ctx)
|
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
|
||||||
|
/// of the completion location.
|
||||||
|
fn analyze(
|
||||||
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
|
expansion_result: ExpansionResult,
|
||||||
|
original_token: &SyntaxToken,
|
||||||
|
self_token: &SyntaxToken,
|
||||||
|
) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
|
||||||
|
let _p = profile::span("CompletionContext::analyze");
|
||||||
|
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
|
||||||
|
expansion_result;
|
||||||
|
let syntax_element = NodeOrToken::Token(fake_ident_token);
|
||||||
|
if is_in_token_of_for_loop(syntax_element.clone()) {
|
||||||
|
// for pat $0
|
||||||
|
// there is nothing to complete here except `in` keyword
|
||||||
|
// don't bother populating the context
|
||||||
|
// FIXME: the completion calculations should end up good enough
|
||||||
|
// such that this special case becomes unnecessary
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overwrite the path kind for derives
|
||||||
|
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
|
||||||
|
if let Some(ast::NameLike::NameRef(name_ref)) =
|
||||||
|
find_node_at_offset(&file_with_fake_ident, offset)
|
||||||
|
{
|
||||||
|
let parent = name_ref.syntax().parent()?;
|
||||||
|
let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?;
|
||||||
|
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
|
||||||
|
path_ctx.kind = PathKind::Derive {
|
||||||
|
existing_derives: sema
|
||||||
|
.resolve_derive_macro(&origin_attr)
|
||||||
|
.into_iter()
|
||||||
|
.flatten()
|
||||||
|
.flatten()
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return Some((
|
||||||
|
CompletionAnalysis::NameRef(nameref_ctx),
|
||||||
|
(None, None),
|
||||||
|
QualifierCtx::default(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let name_like = match find_node_at_offset(&speculative_file, offset) {
|
||||||
|
Some(it) => it,
|
||||||
|
None => {
|
||||||
|
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
|
||||||
|
CompletionAnalysis::String {
|
||||||
|
original,
|
||||||
|
expanded: ast::String::cast(self_token.clone()),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fix up trailing whitespace problem
|
||||||
|
// #[attr(foo = $0
|
||||||
|
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
|
||||||
|
let p = token.parent()?;
|
||||||
|
if p.kind() == SyntaxKind::TOKEN_TREE
|
||||||
|
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
|
||||||
|
{
|
||||||
|
let colon_prefix = previous_non_trivia_token(self_token.clone())
|
||||||
|
.map_or(false, |it| T![:] == it.kind());
|
||||||
|
CompletionAnalysis::UnexpandedAttrTT {
|
||||||
|
fake_attribute_under_caret: syntax_element
|
||||||
|
.ancestors()
|
||||||
|
.find_map(ast::Attr::cast),
|
||||||
|
colon_prefix,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return Some((analysis, (None, None), QualifierCtx::default()));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let expected = expected_type_and_name(sema, &self_token, &name_like);
|
||||||
|
let mut qual_ctx = QualifierCtx::default();
|
||||||
|
let analysis = match name_like {
|
||||||
|
ast::NameLike::Lifetime(lifetime) => {
|
||||||
|
CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
|
||||||
|
}
|
||||||
|
ast::NameLike::NameRef(name_ref) => {
|
||||||
|
let parent = name_ref.syntax().parent()?;
|
||||||
|
let (nameref_ctx, qualifier_ctx) =
|
||||||
|
classify_name_ref(sema, &original_file, name_ref, parent.clone())?;
|
||||||
|
qual_ctx = qualifier_ctx;
|
||||||
|
CompletionAnalysis::NameRef(nameref_ctx)
|
||||||
|
}
|
||||||
|
ast::NameLike::Name(name) => {
|
||||||
|
let name_ctx = classify_name(sema, &original_file, name)?;
|
||||||
|
CompletionAnalysis::Name(name_ctx)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Some((analysis, expected, qual_ctx))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculate the expected type and name of the cursor position.
|
/// Calculate the expected type and name of the cursor position.
|
||||||
fn expected_type_and_name(
|
fn expected_type_and_name(
|
||||||
&self,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
|
token: &SyntaxToken,
|
||||||
name_like: &ast::NameLike,
|
name_like: &ast::NameLike,
|
||||||
) -> (Option<Type>, Option<NameOrNameRef>) {
|
) -> (Option<Type>, Option<NameOrNameRef>) {
|
||||||
let mut node = match self.token.parent() {
|
let mut node = match token.parent() {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return (None, None),
|
None => return (None, None),
|
||||||
};
|
};
|
||||||
|
@ -215,8 +353,8 @@ impl<'a> CompletionContext<'a> {
|
||||||
cov_mark::hit!(expected_type_let_with_leading_char);
|
cov_mark::hit!(expected_type_let_with_leading_char);
|
||||||
cov_mark::hit!(expected_type_let_without_leading_char);
|
cov_mark::hit!(expected_type_let_without_leading_char);
|
||||||
let ty = it.pat()
|
let ty = it.pat()
|
||||||
.and_then(|pat| self.sema.type_of_pat(&pat))
|
.and_then(|pat| sema.type_of_pat(&pat))
|
||||||
.or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
|
.or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
|
||||||
.map(TypeInfo::original);
|
.map(TypeInfo::original);
|
||||||
let name = match it.pat() {
|
let name = match it.pat() {
|
||||||
Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
|
Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
|
||||||
|
@ -228,16 +366,16 @@ impl<'a> CompletionContext<'a> {
|
||||||
ast::LetExpr(it) => {
|
ast::LetExpr(it) => {
|
||||||
cov_mark::hit!(expected_type_if_let_without_leading_char);
|
cov_mark::hit!(expected_type_if_let_without_leading_char);
|
||||||
let ty = it.pat()
|
let ty = it.pat()
|
||||||
.and_then(|pat| self.sema.type_of_pat(&pat))
|
.and_then(|pat| sema.type_of_pat(&pat))
|
||||||
.or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
|
.or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
|
||||||
.map(TypeInfo::original);
|
.map(TypeInfo::original);
|
||||||
(ty, None)
|
(ty, None)
|
||||||
},
|
},
|
||||||
ast::ArgList(_) => {
|
ast::ArgList(_) => {
|
||||||
cov_mark::hit!(expected_type_fn_param);
|
cov_mark::hit!(expected_type_fn_param);
|
||||||
ActiveParameter::at_token(
|
ActiveParameter::at_token(
|
||||||
&self.sema,
|
&sema,
|
||||||
self.token.clone(),
|
token.clone(),
|
||||||
).map(|ap| {
|
).map(|ap| {
|
||||||
let name = ap.ident().map(NameOrNameRef::Name);
|
let name = ap.ident().map(NameOrNameRef::Name);
|
||||||
|
|
||||||
|
@ -249,22 +387,22 @@ impl<'a> CompletionContext<'a> {
|
||||||
ast::RecordExprFieldList(it) => {
|
ast::RecordExprFieldList(it) => {
|
||||||
// wouldn't try {} be nice...
|
// wouldn't try {} be nice...
|
||||||
(|| {
|
(|| {
|
||||||
if self.token.kind() == T![..]
|
if token.kind() == T![..]
|
||||||
|| self.token.prev_token().map(|t| t.kind()) == Some(T![..])
|
||token.prev_token().map(|t| t.kind()) == Some(T![..])
|
||||||
{
|
{
|
||||||
cov_mark::hit!(expected_type_struct_func_update);
|
cov_mark::hit!(expected_type_struct_func_update);
|
||||||
let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
|
let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
|
||||||
let ty = self.sema.type_of_expr(&record_expr.into())?;
|
let ty = sema.type_of_expr(&record_expr.into())?;
|
||||||
Some((
|
Some((
|
||||||
Some(ty.original),
|
Some(ty.original),
|
||||||
None
|
None
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
cov_mark::hit!(expected_type_struct_field_without_leading_char);
|
cov_mark::hit!(expected_type_struct_field_without_leading_char);
|
||||||
let expr_field = self.token.prev_sibling_or_token()?
|
let expr_field = token.prev_sibling_or_token()?
|
||||||
.into_node()
|
.into_node()
|
||||||
.and_then(ast::RecordExprField::cast)?;
|
.and_then(ast::RecordExprField::cast)?;
|
||||||
let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
|
let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
|
||||||
Some((
|
Some((
|
||||||
Some(ty),
|
Some(ty),
|
||||||
expr_field.field_name().map(NameOrNameRef::NameRef),
|
expr_field.field_name().map(NameOrNameRef::NameRef),
|
||||||
|
@ -276,12 +414,12 @@ impl<'a> CompletionContext<'a> {
|
||||||
if let Some(expr) = it.expr() {
|
if let Some(expr) = it.expr() {
|
||||||
cov_mark::hit!(expected_type_struct_field_with_leading_char);
|
cov_mark::hit!(expected_type_struct_field_with_leading_char);
|
||||||
(
|
(
|
||||||
self.sema.type_of_expr(&expr).map(TypeInfo::original),
|
sema.type_of_expr(&expr).map(TypeInfo::original),
|
||||||
it.field_name().map(NameOrNameRef::NameRef),
|
it.field_name().map(NameOrNameRef::NameRef),
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
cov_mark::hit!(expected_type_struct_field_followed_by_comma);
|
cov_mark::hit!(expected_type_struct_field_followed_by_comma);
|
||||||
let ty = self.sema.resolve_record_field(&it)
|
let ty = sema.resolve_record_field(&it)
|
||||||
.map(|(_, _, ty)| ty);
|
.map(|(_, _, ty)| ty);
|
||||||
(
|
(
|
||||||
ty,
|
ty,
|
||||||
|
@ -292,41 +430,41 @@ impl<'a> CompletionContext<'a> {
|
||||||
// match foo { $0 }
|
// match foo { $0 }
|
||||||
// match foo { ..., pat => $0 }
|
// match foo { ..., pat => $0 }
|
||||||
ast::MatchExpr(it) => {
|
ast::MatchExpr(it) => {
|
||||||
let on_arrow = previous_non_trivia_token(self.token.clone()).map_or(false, |it| T![=>] == it.kind());
|
let on_arrow = previous_non_trivia_token(token.clone()).map_or(false, |it| T![=>] == it.kind());
|
||||||
|
|
||||||
let ty = if on_arrow {
|
let ty = if on_arrow {
|
||||||
// match foo { ..., pat => $0 }
|
// match foo { ..., pat => $0 }
|
||||||
cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
|
cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
|
||||||
cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
|
cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
|
||||||
self.sema.type_of_expr(&it.into())
|
sema.type_of_expr(&it.into())
|
||||||
} else {
|
} else {
|
||||||
// match foo { $0 }
|
// match foo { $0 }
|
||||||
cov_mark::hit!(expected_type_match_arm_without_leading_char);
|
cov_mark::hit!(expected_type_match_arm_without_leading_char);
|
||||||
it.expr().and_then(|e| self.sema.type_of_expr(&e))
|
it.expr().and_then(|e| sema.type_of_expr(&e))
|
||||||
}.map(TypeInfo::original);
|
}.map(TypeInfo::original);
|
||||||
(ty, None)
|
(ty, None)
|
||||||
},
|
},
|
||||||
ast::IfExpr(it) => {
|
ast::IfExpr(it) => {
|
||||||
let ty = it.condition()
|
let ty = it.condition()
|
||||||
.and_then(|e| self.sema.type_of_expr(&e))
|
.and_then(|e| sema.type_of_expr(&e))
|
||||||
.map(TypeInfo::original);
|
.map(TypeInfo::original);
|
||||||
(ty, None)
|
(ty, None)
|
||||||
},
|
},
|
||||||
ast::IdentPat(it) => {
|
ast::IdentPat(it) => {
|
||||||
cov_mark::hit!(expected_type_if_let_with_leading_char);
|
cov_mark::hit!(expected_type_if_let_with_leading_char);
|
||||||
cov_mark::hit!(expected_type_match_arm_with_leading_char);
|
cov_mark::hit!(expected_type_match_arm_with_leading_char);
|
||||||
let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
|
let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
|
||||||
(ty, None)
|
(ty, None)
|
||||||
},
|
},
|
||||||
ast::Fn(it) => {
|
ast::Fn(it) => {
|
||||||
cov_mark::hit!(expected_type_fn_ret_with_leading_char);
|
cov_mark::hit!(expected_type_fn_ret_with_leading_char);
|
||||||
cov_mark::hit!(expected_type_fn_ret_without_leading_char);
|
cov_mark::hit!(expected_type_fn_ret_without_leading_char);
|
||||||
let def = self.sema.to_def(&it);
|
let def = sema.to_def(&it);
|
||||||
(def.map(|def| def.ret_type(self.db)), None)
|
(def.map(|def| def.ret_type(sema.db)), None)
|
||||||
},
|
},
|
||||||
ast::ClosureExpr(it) => {
|
ast::ClosureExpr(it) => {
|
||||||
let ty = self.sema.type_of_expr(&it.into());
|
let ty = sema.type_of_expr(&it.into());
|
||||||
ty.and_then(|ty| ty.original.as_callable(self.db))
|
ty.and_then(|ty| ty.original.as_callable(sema.db))
|
||||||
.map(|c| (Some(c.return_type()), None))
|
.map(|c| (Some(c.return_type()), None))
|
||||||
.unwrap_or((None, None))
|
.unwrap_or((None, None))
|
||||||
},
|
},
|
||||||
|
@ -347,104 +485,6 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fill the completion context, this is what does semantic reasoning about the surrounding context
|
|
||||||
/// of the completion location.
|
|
||||||
fn analyze(
|
|
||||||
&mut self,
|
|
||||||
original_file: &SyntaxNode,
|
|
||||||
file_with_fake_ident: SyntaxNode,
|
|
||||||
offset: TextSize,
|
|
||||||
derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
|
|
||||||
) -> Option<CompletionAnalysis> {
|
|
||||||
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased()?;
|
|
||||||
let syntax_element = NodeOrToken::Token(fake_ident_token);
|
|
||||||
if is_in_token_of_for_loop(syntax_element.clone()) {
|
|
||||||
// for pat $0
|
|
||||||
// there is nothing to complete here except `in` keyword
|
|
||||||
// don't bother populating the context
|
|
||||||
// FIXME: the completion calculations should end up good enough
|
|
||||||
// such that this special case becomes unnecessary
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Overwrite the path kind for derives
|
|
||||||
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
|
|
||||||
if let Some(ast::NameLike::NameRef(name_ref)) =
|
|
||||||
find_node_at_offset(&file_with_fake_ident, offset)
|
|
||||||
{
|
|
||||||
let parent = name_ref.syntax().parent()?;
|
|
||||||
let (mut nameref_ctx, _) =
|
|
||||||
Self::classify_name_ref(&self.sema, &original_file, name_ref, parent)?;
|
|
||||||
if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
|
|
||||||
path_ctx.kind = PathKind::Derive {
|
|
||||||
existing_derives: self
|
|
||||||
.sema
|
|
||||||
.resolve_derive_macro(&origin_attr)
|
|
||||||
.into_iter()
|
|
||||||
.flatten()
|
|
||||||
.flatten()
|
|
||||||
.collect(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return Some(CompletionAnalysis::NameRef(nameref_ctx));
|
|
||||||
}
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
|
|
||||||
Some(it) => it,
|
|
||||||
None => {
|
|
||||||
let analysis =
|
|
||||||
if let Some(original) = ast::String::cast(self.original_token.clone()) {
|
|
||||||
CompletionAnalysis::String {
|
|
||||||
original,
|
|
||||||
expanded: ast::String::cast(self.token.clone()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Fix up trailing whitespace problem
|
|
||||||
// #[attr(foo = $0
|
|
||||||
let token =
|
|
||||||
syntax::algo::skip_trivia_token(self.token.clone(), Direction::Prev)?;
|
|
||||||
let p = token.parent()?;
|
|
||||||
if p.kind() == SyntaxKind::TOKEN_TREE
|
|
||||||
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
|
|
||||||
{
|
|
||||||
let colon_prefix = previous_non_trivia_token(self.token.clone())
|
|
||||||
.map_or(false, |it| T![:] == it.kind());
|
|
||||||
CompletionAnalysis::UnexpandedAttrTT {
|
|
||||||
fake_attribute_under_caret: syntax_element
|
|
||||||
.ancestors()
|
|
||||||
.find_map(ast::Attr::cast),
|
|
||||||
colon_prefix,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
return Some(analysis);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
(self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like);
|
|
||||||
let analysis = match name_like {
|
|
||||||
ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime(
|
|
||||||
Self::classify_lifetime(&self.sema, original_file, lifetime)?,
|
|
||||||
),
|
|
||||||
ast::NameLike::NameRef(name_ref) => {
|
|
||||||
let parent = name_ref.syntax().parent()?;
|
|
||||||
let (nameref_ctx, qualifier_ctx) =
|
|
||||||
Self::classify_name_ref(&self.sema, &original_file, name_ref, parent.clone())?;
|
|
||||||
|
|
||||||
self.qualifier_ctx = qualifier_ctx;
|
|
||||||
CompletionAnalysis::NameRef(nameref_ctx)
|
|
||||||
}
|
|
||||||
ast::NameLike::Name(name) => {
|
|
||||||
let name_ctx = Self::classify_name(&self.sema, original_file, name)?;
|
|
||||||
CompletionAnalysis::Name(name_ctx)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Some(analysis)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn classify_lifetime(
|
fn classify_lifetime(
|
||||||
_sema: &Semantics<'_, RootDatabase>,
|
_sema: &Semantics<'_, RootDatabase>,
|
||||||
original_file: &SyntaxNode,
|
original_file: &SyntaxNode,
|
||||||
|
@ -520,8 +560,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
) -> Option<(NameRefContext, QualifierCtx)> {
|
) -> Option<(NameRefContext, QualifierCtx)> {
|
||||||
let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
|
let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
|
||||||
|
|
||||||
let make_res =
|
let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
|
||||||
|kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
|
|
||||||
|
|
||||||
if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
|
if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
|
||||||
let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
|
let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
|
||||||
|
@ -748,9 +787,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
let find_ret_ty = |it: SyntaxNode| {
|
let find_ret_ty = |it: SyntaxNode| {
|
||||||
if let Some(item) = ast::Item::cast(it.clone()) {
|
if let Some(item) = ast::Item::cast(it.clone()) {
|
||||||
match item {
|
match item {
|
||||||
ast::Item::Fn(f) => {
|
ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
|
||||||
Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
|
|
||||||
}
|
|
||||||
ast::Item::MacroCall(_) => None,
|
ast::Item::MacroCall(_) => None,
|
||||||
_ => Some(None),
|
_ => Some(None),
|
||||||
}
|
}
|
||||||
|
@ -770,9 +807,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let find_fn_self_param = |it| match it {
|
let find_fn_self_param = |it| match it {
|
||||||
ast::Item::Fn(fn_) => {
|
ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
|
||||||
Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
|
|
||||||
}
|
|
||||||
ast::Item::MacroCall(_) => None,
|
ast::Item::MacroCall(_) => None,
|
||||||
_ => Some(None),
|
_ => Some(None),
|
||||||
};
|
};
|
||||||
|
@ -866,10 +901,8 @@ impl<'a> CompletionContext<'a> {
|
||||||
let kind = attr.kind();
|
let kind = attr.kind();
|
||||||
let attached = attr.syntax().parent()?;
|
let attached = attr.syntax().parent()?;
|
||||||
let is_trailing_outer_attr = kind != AttrKind::Inner
|
let is_trailing_outer_attr = kind != AttrKind::Inner
|
||||||
&& non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next)
|
&& non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
|
||||||
.is_none();
|
let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
|
||||||
let annotated_item_kind =
|
|
||||||
if is_trailing_outer_attr { None } else { Some(attached.kind()) };
|
|
||||||
Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
|
Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -967,10 +1000,11 @@ impl<'a> CompletionContext<'a> {
|
||||||
.map(|it| it.parent_path());
|
.map(|it| it.parent_path());
|
||||||
if let Some(qualifier) = qualifier {
|
if let Some(qualifier) = qualifier {
|
||||||
let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
|
let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
|
||||||
Some(ast::PathSegmentKind::Type {
|
Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
|
||||||
type_ref: Some(type_ref),
|
if qualifier.qualifier().is_none() =>
|
||||||
trait_ref,
|
{
|
||||||
}) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)),
|
Some((type_ref, trait_ref))
|
||||||
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -987,8 +1021,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
// For understanding how and why super_chain_len is calculated the way it
|
// For understanding how and why super_chain_len is calculated the way it
|
||||||
// is check the documentation at it's definition
|
// is check the documentation at it's definition
|
||||||
let mut segment_count = 0;
|
let mut segment_count = 0;
|
||||||
let super_count =
|
let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
|
||||||
iter::successors(Some(qualifier.clone()), |p| p.qualifier())
|
|
||||||
.take_while(|p| {
|
.take_while(|p| {
|
||||||
p.segment()
|
p.segment()
|
||||||
.and_then(|s| {
|
.and_then(|s| {
|
||||||
|
@ -1042,8 +1075,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
.children_with_tokens()
|
.children_with_tokens()
|
||||||
.filter_map(NodeOrToken::into_token)
|
.filter_map(NodeOrToken::into_token)
|
||||||
.find(|it| it.kind() == T![unsafe]);
|
.find(|it| it.kind() == T![unsafe]);
|
||||||
qualifier_ctx.vis_node =
|
qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
|
||||||
error_node.children().find_map(ast::Visibility::cast);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1068,7 +1100,6 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
|
Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fn pattern_context_for(
|
fn pattern_context_for(
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue