mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 06:11:35 +00:00
Start splitting completion into components
This commit is contained in:
parent
12810b93c5
commit
c2bf174e9c
4 changed files with 425 additions and 340 deletions
|
@ -1,18 +1,23 @@
|
||||||
mod completion_item;
|
mod completion_item;
|
||||||
mod reference_completion;
|
mod reference_completion;
|
||||||
|
|
||||||
|
mod complete_fn_param;
|
||||||
|
mod complete_keywords;
|
||||||
|
|
||||||
use ra_editor::find_node_at_offset;
|
use ra_editor::find_node_at_offset;
|
||||||
use ra_text_edit::AtomTextEdit;
|
use ra_text_edit::AtomTextEdit;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::visit::{visitor_ctx, VisitorCtx},
|
algo::{
|
||||||
|
find_leaf_at_offset,
|
||||||
|
},
|
||||||
ast,
|
ast,
|
||||||
AstNode,
|
AstNode,
|
||||||
SyntaxNodeRef,
|
SyntaxNodeRef,
|
||||||
SourceFileNode,
|
SourceFileNode,
|
||||||
TextUnit,
|
TextUnit,
|
||||||
|
SyntaxKind::*,
|
||||||
};
|
};
|
||||||
use ra_db::SyntaxDatabase;
|
use ra_db::SyntaxDatabase;
|
||||||
use rustc_hash::{FxHashMap};
|
|
||||||
use hir::source_binder;
|
use hir::source_binder;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -29,99 +34,133 @@ pub(crate) fn completions(
|
||||||
) -> Cancelable<Option<Completions>> {
|
) -> Cancelable<Option<Completions>> {
|
||||||
let original_file = db.source_file(position.file_id);
|
let original_file = db.source_file(position.file_id);
|
||||||
// Insert a fake ident to get a valid parse tree
|
// Insert a fake ident to get a valid parse tree
|
||||||
|
let file = {
|
||||||
|
let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
|
||||||
|
original_file.reparse(&edit)
|
||||||
|
};
|
||||||
let module = ctry!(source_binder::module_from_position(db, position)?);
|
let module = ctry!(source_binder::module_from_position(db, position)?);
|
||||||
|
|
||||||
let mut acc = Completions::default();
|
let mut acc = Completions::default();
|
||||||
let mut has_completions = false;
|
|
||||||
// First, let's try to complete a reference to some declaration.
|
// First, let's try to complete a reference to some declaration.
|
||||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) {
|
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) {
|
||||||
has_completions = true;
|
|
||||||
reference_completion::completions(&mut acc, db, &module, &file, name_ref)?;
|
reference_completion::completions(&mut acc, db, &module, &file, name_ref)?;
|
||||||
// special case, `trait T { fn foo(i_am_a_name_ref) {} }`
|
|
||||||
if is_node::<ast::Param>(name_ref.syntax()) {
|
|
||||||
param_completions(&mut acc, name_ref.syntax());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, if this is a declaration, use heuristics to suggest a name.
|
let ctx = ctry!(SyntaxContext::new(&original_file, position.offset));
|
||||||
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
|
complete_fn_param::complete_fn_param(&mut acc, &ctx);
|
||||||
if is_node::<ast::Param>(name.syntax()) {
|
complete_keywords::complete_expr_keyword(&mut acc, &ctx);
|
||||||
has_completions = true;
|
|
||||||
param_completions(&mut acc, name.syntax());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !has_completions {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
Ok(Some(acc))
|
Ok(Some(acc))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `SyntaxContext` is created early during completion to figure out, where
|
/// `SyntaxContext` is created early during completion to figure out, where
|
||||||
/// exactly is the cursor, syntax-wise.
|
/// exactly is the cursor, syntax-wise.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(super) enum SyntaxContext<'a> {
|
pub(super) struct SyntaxContext<'a> {
|
||||||
ParameterName(SyntaxNodeRef<'a>),
|
leaf: SyntaxNodeRef<'a>,
|
||||||
Other,
|
enclosing_fn: Option<ast::FnDef<'a>>,
|
||||||
|
is_param: bool,
|
||||||
|
/// a single-indent path, like `foo`.
|
||||||
|
is_trivial_path: bool,
|
||||||
|
after_if: bool,
|
||||||
|
is_stmt: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SyntaxContext {
|
impl SyntaxContext<'_> {
|
||||||
pub(super) fn new(original_file: &SourceFileNode, offset: TextUnit) -> SyntaxContext {
|
pub(super) fn new(original_file: &SourceFileNode, offset: TextUnit) -> Option<SyntaxContext> {
|
||||||
|
let leaf = find_leaf_at_offset(original_file.syntax(), offset).left_biased()?;
|
||||||
|
let mut ctx = SyntaxContext {
|
||||||
|
leaf,
|
||||||
|
enclosing_fn: None,
|
||||||
|
is_param: false,
|
||||||
|
is_trivial_path: false,
|
||||||
|
after_if: false,
|
||||||
|
is_stmt: false,
|
||||||
|
};
|
||||||
|
ctx.fill(original_file, offset);
|
||||||
|
Some(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fill(&mut self, original_file: &SourceFileNode, offset: TextUnit) {
|
||||||
|
// Insert a fake ident to get a valid parse tree. We will use this file
|
||||||
|
// to determine context, though the original_file will be used for
|
||||||
|
// actual completion.
|
||||||
let file = {
|
let file = {
|
||||||
let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
|
let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
|
||||||
original_file.reparse(&edit)
|
original_file.reparse(&edit)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// First, let's try to complete a reference to some declaration.
|
||||||
|
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) {
|
||||||
|
// Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
|
||||||
|
// See RFC#1685.
|
||||||
|
if is_node::<ast::Param>(name_ref.syntax()) {
|
||||||
|
self.is_param = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
self.classify_name_ref(&file, name_ref);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, see if this is a declaration. We can use heuristics to
|
||||||
|
// suggest declaration names, see `CompletionKind::Magic`.
|
||||||
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
|
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
|
||||||
if is_node::<ast::Param>(name.syntax()) {
|
if is_node::<ast::Param>(name.syntax()) {
|
||||||
if let Some(node) = find_leaf_at_offset(original_file, offset).left_biased() {
|
self.is_param = true;
|
||||||
return SyntaxContext::ParameterName(node);
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn classify_name_ref(&mut self, file: &SourceFileNode, name_ref: ast::NameRef) {
|
||||||
|
// let name_range = name_ref.syntax().range();
|
||||||
|
// let top_node = name_ref
|
||||||
|
// .syntax()
|
||||||
|
// .ancestors()
|
||||||
|
// .take_while(|it| it.range() == name_range)
|
||||||
|
// .last()
|
||||||
|
// .unwrap();
|
||||||
|
// match top_node.parent().map(|it| it.kind()) {
|
||||||
|
// Some(SOURCE_FILE) | Some(ITEM_LIST) => return Some(NameRefKind::BareIdentInMod),
|
||||||
|
// _ => (),
|
||||||
|
// }
|
||||||
|
let parent = match name_ref.syntax().parent() {
|
||||||
|
Some(it) => it,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
if let Some(segment) = ast::PathSegment::cast(parent) {
|
||||||
|
let path = segment.parent_path();
|
||||||
|
// if let Some(path) = Path::from_ast(path) {
|
||||||
|
// if !path.is_ident() {
|
||||||
|
// return Some(NameRefKind::Path(path));
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
if path.qualifier().is_none() {
|
||||||
|
self.is_trivial_path = true;
|
||||||
|
self.enclosing_fn = self
|
||||||
|
.leaf
|
||||||
|
.ancestors()
|
||||||
|
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
|
||||||
|
.find_map(ast::FnDef::cast);
|
||||||
|
|
||||||
|
self.is_stmt = match name_ref
|
||||||
|
.syntax()
|
||||||
|
.ancestors()
|
||||||
|
.filter_map(ast::ExprStmt::cast)
|
||||||
|
.next()
|
||||||
|
{
|
||||||
|
None => false,
|
||||||
|
Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
|
||||||
|
if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) {
|
||||||
|
if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
|
||||||
|
self.after_if = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SyntaxContext::Other
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Complete repeated parametes, both name and type. For example, if all
|
|
||||||
/// functions in a file have a `spam: &mut Spam` parameter, a completion with
|
|
||||||
/// `spam: &mut Spam` insert text/label and `spam` lookup string will be
|
|
||||||
/// suggested.
|
|
||||||
fn param_completions(acc: &mut Completions, ctx: SyntaxNodeRef) {
|
|
||||||
let mut params = FxHashMap::default();
|
|
||||||
for node in ctx.ancestors() {
|
|
||||||
let _ = visitor_ctx(&mut params)
|
|
||||||
.visit::<ast::SourceFile, _>(process)
|
|
||||||
.visit::<ast::ItemList, _>(process)
|
|
||||||
.accept(node);
|
|
||||||
}
|
|
||||||
params
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|(label, (count, param))| {
|
|
||||||
let lookup = param.pat()?.syntax().text().to_string();
|
|
||||||
if count < 2 {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some((label, lookup))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.for_each(|(label, lookup)| {
|
|
||||||
CompletionItem::new(label)
|
|
||||||
.lookup_by(lookup)
|
|
||||||
.kind(CompletionKind::Magic)
|
|
||||||
.add_to(acc)
|
|
||||||
});
|
|
||||||
|
|
||||||
fn process<'a, N: ast::FnDefOwner<'a>>(
|
|
||||||
node: N,
|
|
||||||
params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
|
|
||||||
) {
|
|
||||||
node.functions()
|
|
||||||
.filter_map(|it| it.param_list())
|
|
||||||
.flat_map(|it| it.params())
|
|
||||||
.for_each(|param| {
|
|
||||||
let text = param.syntax().text().to_string();
|
|
||||||
params.entry(text).or_insert((0, param)).0 += 1;
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,51 +182,3 @@ fn check_completion(code: &str, expected_completions: &str, kind: CompletionKind
|
||||||
let completions = completions(&analysis.imp.db, position).unwrap().unwrap();
|
let completions = completions(&analysis.imp.db, position).unwrap().unwrap();
|
||||||
completions.assert_match(expected_completions, kind);
|
completions.assert_match(expected_completions, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
fn check_magic_completion(code: &str, expected_completions: &str) {
|
|
||||||
check_completion(code, expected_completions, CompletionKind::Magic);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_param_completion_last_param() {
|
|
||||||
check_magic_completion(
|
|
||||||
r"
|
|
||||||
fn foo(file_id: FileId) {}
|
|
||||||
fn bar(file_id: FileId) {}
|
|
||||||
fn baz(file<|>) {}
|
|
||||||
",
|
|
||||||
r#"file_id "file_id: FileId""#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_param_completion_nth_param() {
|
|
||||||
check_magic_completion(
|
|
||||||
r"
|
|
||||||
fn foo(file_id: FileId) {}
|
|
||||||
fn bar(file_id: FileId) {}
|
|
||||||
fn baz(file<|>, x: i32) {}
|
|
||||||
",
|
|
||||||
r#"file_id "file_id: FileId""#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_param_completion_trait_param() {
|
|
||||||
check_magic_completion(
|
|
||||||
r"
|
|
||||||
pub(crate) trait SourceRoot {
|
|
||||||
pub fn contains(&self, file_id: FileId) -> bool;
|
|
||||||
pub fn module_map(&self) -> &ModuleMap;
|
|
||||||
pub fn lines(&self, file_id: FileId) -> &LineIndex;
|
|
||||||
pub fn syntax(&self, file<|>)
|
|
||||||
}
|
|
||||||
",
|
|
||||||
r#"file_id "file_id: FileId""#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
107
crates/ra_analysis/src/completion/complete_fn_param.rs
Normal file
107
crates/ra_analysis/src/completion/complete_fn_param.rs
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
use ra_syntax::{
|
||||||
|
algo::{
|
||||||
|
visit::{visitor_ctx, VisitorCtx}
|
||||||
|
},
|
||||||
|
ast,
|
||||||
|
AstNode,
|
||||||
|
};
|
||||||
|
use rustc_hash::{FxHashMap};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
completion::{SyntaxContext, Completions, CompletionKind, CompletionItem},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Complete repeated parametes, both name and type. For example, if all
|
||||||
|
/// functions in a file have a `spam: &mut Spam` parameter, a completion with
|
||||||
|
/// `spam: &mut Spam` insert text/label and `spam` lookup string will be
|
||||||
|
/// suggested.
|
||||||
|
pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &SyntaxContext) {
|
||||||
|
if !ctx.is_param {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut params = FxHashMap::default();
|
||||||
|
for node in ctx.leaf.ancestors() {
|
||||||
|
let _ = visitor_ctx(&mut params)
|
||||||
|
.visit::<ast::SourceFile, _>(process)
|
||||||
|
.visit::<ast::ItemList, _>(process)
|
||||||
|
.accept(node);
|
||||||
|
}
|
||||||
|
params
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(label, (count, param))| {
|
||||||
|
let lookup = param.pat()?.syntax().text().to_string();
|
||||||
|
if count < 2 {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some((label, lookup))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.for_each(|(label, lookup)| {
|
||||||
|
CompletionItem::new(label)
|
||||||
|
.lookup_by(lookup)
|
||||||
|
.kind(CompletionKind::Magic)
|
||||||
|
.add_to(acc)
|
||||||
|
});
|
||||||
|
|
||||||
|
fn process<'a, N: ast::FnDefOwner<'a>>(
|
||||||
|
node: N,
|
||||||
|
params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
|
||||||
|
) {
|
||||||
|
node.functions()
|
||||||
|
.filter_map(|it| it.param_list())
|
||||||
|
.flat_map(|it| it.params())
|
||||||
|
.for_each(|param| {
|
||||||
|
let text = param.syntax().text().to_string();
|
||||||
|
params.entry(text).or_insert((0, param)).0 += 1;
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::completion::*;
|
||||||
|
|
||||||
|
fn check_magic_completion(code: &str, expected_completions: &str) {
|
||||||
|
check_completion(code, expected_completions, CompletionKind::Magic);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_param_completion_last_param() {
|
||||||
|
check_magic_completion(
|
||||||
|
r"
|
||||||
|
fn foo(file_id: FileId) {}
|
||||||
|
fn bar(file_id: FileId) {}
|
||||||
|
fn baz(file<|>) {}
|
||||||
|
",
|
||||||
|
r#"file_id "file_id: FileId""#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_param_completion_nth_param() {
|
||||||
|
check_magic_completion(
|
||||||
|
r"
|
||||||
|
fn foo(file_id: FileId) {}
|
||||||
|
fn bar(file_id: FileId) {}
|
||||||
|
fn baz(file<|>, x: i32) {}
|
||||||
|
",
|
||||||
|
r#"file_id "file_id: FileId""#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_param_completion_trait_param() {
|
||||||
|
check_magic_completion(
|
||||||
|
r"
|
||||||
|
pub(crate) trait SourceRoot {
|
||||||
|
pub fn contains(&self, file_id: FileId) -> bool;
|
||||||
|
pub fn module_map(&self) -> &ModuleMap;
|
||||||
|
pub fn lines(&self, file_id: FileId) -> &LineIndex;
|
||||||
|
pub fn syntax(&self, file<|>)
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r#"file_id "file_id: FileId""#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
206
crates/ra_analysis/src/completion/complete_keywords.rs
Normal file
206
crates/ra_analysis/src/completion/complete_keywords.rs
Normal file
|
@ -0,0 +1,206 @@
|
||||||
|
use ra_syntax::{
|
||||||
|
algo::visit::{visitor, Visitor},
|
||||||
|
AstNode,
|
||||||
|
ast::{self, LoopBodyOwner},
|
||||||
|
SyntaxKind::*, SyntaxNodeRef,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
completion::{SyntaxContext, CompletionItem, Completions, CompletionKind::*},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &SyntaxContext) {
|
||||||
|
if !ctx.is_trivial_path {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let fn_def = match ctx.enclosing_fn {
|
||||||
|
Some(it) => it,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
acc.add(keyword("if", "if $0 {}"));
|
||||||
|
acc.add(keyword("match", "match $0 {}"));
|
||||||
|
acc.add(keyword("while", "while $0 {}"));
|
||||||
|
acc.add(keyword("loop", "loop {$0}"));
|
||||||
|
|
||||||
|
if ctx.after_if {
|
||||||
|
acc.add(keyword("else", "else {$0}"));
|
||||||
|
acc.add(keyword("else if", "else if $0 {}"));
|
||||||
|
}
|
||||||
|
if is_in_loop_body(ctx.leaf) {
|
||||||
|
acc.add(keyword("continue", "continue"));
|
||||||
|
acc.add(keyword("break", "break"));
|
||||||
|
}
|
||||||
|
acc.add_all(complete_return(fn_def, ctx.is_stmt));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_in_loop_body(leaf: SyntaxNodeRef) -> bool {
|
||||||
|
for node in leaf.ancestors() {
|
||||||
|
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let loop_body = visitor()
|
||||||
|
.visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
|
||||||
|
.visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
|
||||||
|
.visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
|
||||||
|
.accept(node);
|
||||||
|
if let Some(Some(body)) = loop_body {
|
||||||
|
if leaf.range().is_subrange(&body.syntax().range()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn complete_return(fn_def: ast::FnDef, is_stmt: bool) -> Option<CompletionItem> {
|
||||||
|
let snip = match (is_stmt, fn_def.ret_type().is_some()) {
|
||||||
|
(true, true) => "return $0;",
|
||||||
|
(true, false) => "return;",
|
||||||
|
(false, true) => "return $0",
|
||||||
|
(false, false) => "return",
|
||||||
|
};
|
||||||
|
Some(keyword("return", snip))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn keyword(kw: &str, snippet: &str) -> CompletionItem {
|
||||||
|
CompletionItem::new(kw)
|
||||||
|
.kind(Keyword)
|
||||||
|
.snippet(snippet)
|
||||||
|
.build()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::completion::{CompletionKind, check_completion};
|
||||||
|
fn check_keyword_completion(code: &str, expected_completions: &str) {
|
||||||
|
check_completion(code, expected_completions, CompletionKind::Keyword);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_completion_kewords() {
|
||||||
|
check_keyword_completion(
|
||||||
|
r"
|
||||||
|
fn quux() {
|
||||||
|
<|>
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r#"
|
||||||
|
if "if $0 {}"
|
||||||
|
match "match $0 {}"
|
||||||
|
while "while $0 {}"
|
||||||
|
loop "loop {$0}"
|
||||||
|
return "return"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_completion_else() {
|
||||||
|
check_keyword_completion(
|
||||||
|
r"
|
||||||
|
fn quux() {
|
||||||
|
if true {
|
||||||
|
()
|
||||||
|
} <|>
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r#"
|
||||||
|
if "if $0 {}"
|
||||||
|
match "match $0 {}"
|
||||||
|
while "while $0 {}"
|
||||||
|
loop "loop {$0}"
|
||||||
|
else "else {$0}"
|
||||||
|
else if "else if $0 {}"
|
||||||
|
return "return"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_completion_return_value() {
|
||||||
|
check_keyword_completion(
|
||||||
|
r"
|
||||||
|
fn quux() -> i32 {
|
||||||
|
<|>
|
||||||
|
92
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r#"
|
||||||
|
if "if $0 {}"
|
||||||
|
match "match $0 {}"
|
||||||
|
while "while $0 {}"
|
||||||
|
loop "loop {$0}"
|
||||||
|
return "return $0;"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
check_keyword_completion(
|
||||||
|
r"
|
||||||
|
fn quux() {
|
||||||
|
<|>
|
||||||
|
92
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r#"
|
||||||
|
if "if $0 {}"
|
||||||
|
match "match $0 {}"
|
||||||
|
while "while $0 {}"
|
||||||
|
loop "loop {$0}"
|
||||||
|
return "return;"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_completion_return_no_stmt() {
|
||||||
|
check_keyword_completion(
|
||||||
|
r"
|
||||||
|
fn quux() -> i32 {
|
||||||
|
match () {
|
||||||
|
() => <|>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r#"
|
||||||
|
if "if $0 {}"
|
||||||
|
match "match $0 {}"
|
||||||
|
while "while $0 {}"
|
||||||
|
loop "loop {$0}"
|
||||||
|
return "return $0"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_continue_break_completion() {
|
||||||
|
check_keyword_completion(
|
||||||
|
r"
|
||||||
|
fn quux() -> i32 {
|
||||||
|
loop { <|> }
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r#"
|
||||||
|
if "if $0 {}"
|
||||||
|
match "match $0 {}"
|
||||||
|
while "while $0 {}"
|
||||||
|
loop "loop {$0}"
|
||||||
|
continue "continue"
|
||||||
|
break "break"
|
||||||
|
return "return $0"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
check_keyword_completion(
|
||||||
|
r"
|
||||||
|
fn quux() -> i32 {
|
||||||
|
loop { || { <|> } }
|
||||||
|
}
|
||||||
|
",
|
||||||
|
r#"
|
||||||
|
if "if $0 {}"
|
||||||
|
match "match $0 {}"
|
||||||
|
while "while $0 {}"
|
||||||
|
loop "loop {$0}"
|
||||||
|
return "return $0"
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,9 +1,7 @@
|
||||||
use rustc_hash::{FxHashSet};
|
use rustc_hash::{FxHashSet};
|
||||||
use ra_editor::find_node_at_offset;
|
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
algo::visit::{visitor, Visitor},
|
|
||||||
SourceFileNode, AstNode,
|
SourceFileNode, AstNode,
|
||||||
ast::{self, LoopBodyOwner},
|
ast,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
};
|
};
|
||||||
use hir::{
|
use hir::{
|
||||||
|
@ -21,7 +19,7 @@ pub(super) fn completions(
|
||||||
acc: &mut Completions,
|
acc: &mut Completions,
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
module: &hir::Module,
|
module: &hir::Module,
|
||||||
file: &SourceFileNode,
|
_file: &SourceFileNode,
|
||||||
name_ref: ast::NameRef,
|
name_ref: ast::NameRef,
|
||||||
) -> Cancelable<()> {
|
) -> Cancelable<()> {
|
||||||
let kind = match classify_name_ref(name_ref) {
|
let kind = match classify_name_ref(name_ref) {
|
||||||
|
@ -34,7 +32,7 @@ pub(super) fn completions(
|
||||||
if let Some(fn_def) = enclosing_fn {
|
if let Some(fn_def) = enclosing_fn {
|
||||||
let scopes = FnScopes::new(fn_def);
|
let scopes = FnScopes::new(fn_def);
|
||||||
complete_fn(name_ref, &scopes, acc);
|
complete_fn(name_ref, &scopes, acc);
|
||||||
complete_expr_keywords(&file, fn_def, name_ref, acc);
|
// complete_expr_keywords(&file, fn_def, name_ref, acc);
|
||||||
complete_expr_snippets(acc);
|
complete_expr_snippets(acc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,91 +180,6 @@ fn ${1:feature}() {
|
||||||
.add_to(acc);
|
.add_to(acc);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn complete_expr_keywords(
|
|
||||||
file: &SourceFileNode,
|
|
||||||
fn_def: ast::FnDef,
|
|
||||||
name_ref: ast::NameRef,
|
|
||||||
acc: &mut Completions,
|
|
||||||
) {
|
|
||||||
acc.add(keyword("if", "if $0 {}"));
|
|
||||||
acc.add(keyword("match", "match $0 {}"));
|
|
||||||
acc.add(keyword("while", "while $0 {}"));
|
|
||||||
acc.add(keyword("loop", "loop {$0}"));
|
|
||||||
|
|
||||||
if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
|
|
||||||
if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) {
|
|
||||||
if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
|
|
||||||
acc.add(keyword("else", "else {$0}"));
|
|
||||||
acc.add(keyword("else if", "else if $0 {}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if is_in_loop_body(name_ref) {
|
|
||||||
acc.add(keyword("continue", "continue"));
|
|
||||||
acc.add(keyword("break", "break"));
|
|
||||||
}
|
|
||||||
acc.add_all(complete_return(fn_def, name_ref));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
|
|
||||||
for node in name_ref.syntax().ancestors() {
|
|
||||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
let loop_body = visitor()
|
|
||||||
.visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
|
|
||||||
.visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
|
|
||||||
.visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
|
|
||||||
.accept(node);
|
|
||||||
if let Some(Some(body)) = loop_body {
|
|
||||||
if name_ref
|
|
||||||
.syntax()
|
|
||||||
.range()
|
|
||||||
.is_subrange(&body.syntax().range())
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<CompletionItem> {
|
|
||||||
// let is_last_in_block = name_ref.syntax().ancestors().filter_map(ast::Expr::cast)
|
|
||||||
// .next()
|
|
||||||
// .and_then(|it| it.syntax().parent())
|
|
||||||
// .and_then(ast::Block::cast)
|
|
||||||
// .is_some();
|
|
||||||
|
|
||||||
// if is_last_in_block {
|
|
||||||
// return None;
|
|
||||||
// }
|
|
||||||
|
|
||||||
let is_stmt = match name_ref
|
|
||||||
.syntax()
|
|
||||||
.ancestors()
|
|
||||||
.filter_map(ast::ExprStmt::cast)
|
|
||||||
.next()
|
|
||||||
{
|
|
||||||
None => false,
|
|
||||||
Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(),
|
|
||||||
};
|
|
||||||
let snip = match (is_stmt, fn_def.ret_type().is_some()) {
|
|
||||||
(true, true) => "return $0;",
|
|
||||||
(true, false) => "return;",
|
|
||||||
(false, true) => "return $0",
|
|
||||||
(false, false) => "return",
|
|
||||||
};
|
|
||||||
Some(keyword("return", snip))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn keyword(kw: &str, snippet: &str) -> CompletionItem {
|
|
||||||
CompletionItem::new(kw)
|
|
||||||
.kind(Keyword)
|
|
||||||
.snippet(snippet)
|
|
||||||
.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn complete_expr_snippets(acc: &mut Completions) {
|
fn complete_expr_snippets(acc: &mut Completions) {
|
||||||
CompletionItem::new("pd")
|
CompletionItem::new("pd")
|
||||||
.snippet("eprintln!(\"$0 = {:?}\", $0);")
|
.snippet("eprintln!(\"$0 = {:?}\", $0);")
|
||||||
|
@ -286,10 +199,6 @@ mod tests {
|
||||||
check_completion(code, expected_completions, CompletionKind::Reference);
|
check_completion(code, expected_completions, CompletionKind::Reference);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_keyword_completion(code: &str, expected_completions: &str) {
|
|
||||||
check_completion(code, expected_completions, CompletionKind::Keyword);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn check_snippet_completion(code: &str, expected_completions: &str) {
|
fn check_snippet_completion(code: &str, expected_completions: &str) {
|
||||||
check_completion(code, expected_completions, CompletionKind::Snippet);
|
check_completion(code, expected_completions, CompletionKind::Snippet);
|
||||||
}
|
}
|
||||||
|
@ -470,134 +379,6 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_completion_kewords() {
|
|
||||||
check_keyword_completion(
|
|
||||||
r"
|
|
||||||
fn quux() {
|
|
||||||
<|>
|
|
||||||
}
|
|
||||||
",
|
|
||||||
r#"
|
|
||||||
if "if $0 {}"
|
|
||||||
match "match $0 {}"
|
|
||||||
while "while $0 {}"
|
|
||||||
loop "loop {$0}"
|
|
||||||
return "return"
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_completion_else() {
|
|
||||||
check_keyword_completion(
|
|
||||||
r"
|
|
||||||
fn quux() {
|
|
||||||
if true {
|
|
||||||
()
|
|
||||||
} <|>
|
|
||||||
}
|
|
||||||
",
|
|
||||||
r#"
|
|
||||||
if "if $0 {}"
|
|
||||||
match "match $0 {}"
|
|
||||||
while "while $0 {}"
|
|
||||||
loop "loop {$0}"
|
|
||||||
else "else {$0}"
|
|
||||||
else if "else if $0 {}"
|
|
||||||
return "return"
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_completion_return_value() {
|
|
||||||
check_keyword_completion(
|
|
||||||
r"
|
|
||||||
fn quux() -> i32 {
|
|
||||||
<|>
|
|
||||||
92
|
|
||||||
}
|
|
||||||
",
|
|
||||||
r#"
|
|
||||||
if "if $0 {}"
|
|
||||||
match "match $0 {}"
|
|
||||||
while "while $0 {}"
|
|
||||||
loop "loop {$0}"
|
|
||||||
return "return $0;"
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
check_keyword_completion(
|
|
||||||
r"
|
|
||||||
fn quux() {
|
|
||||||
<|>
|
|
||||||
92
|
|
||||||
}
|
|
||||||
",
|
|
||||||
r#"
|
|
||||||
if "if $0 {}"
|
|
||||||
match "match $0 {}"
|
|
||||||
while "while $0 {}"
|
|
||||||
loop "loop {$0}"
|
|
||||||
return "return;"
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_completion_return_no_stmt() {
|
|
||||||
check_keyword_completion(
|
|
||||||
r"
|
|
||||||
fn quux() -> i32 {
|
|
||||||
match () {
|
|
||||||
() => <|>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
",
|
|
||||||
r#"
|
|
||||||
if "if $0 {}"
|
|
||||||
match "match $0 {}"
|
|
||||||
while "while $0 {}"
|
|
||||||
loop "loop {$0}"
|
|
||||||
return "return $0"
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_continue_break_completion() {
|
|
||||||
check_keyword_completion(
|
|
||||||
r"
|
|
||||||
fn quux() -> i32 {
|
|
||||||
loop { <|> }
|
|
||||||
}
|
|
||||||
",
|
|
||||||
r#"
|
|
||||||
if "if $0 {}"
|
|
||||||
match "match $0 {}"
|
|
||||||
while "while $0 {}"
|
|
||||||
loop "loop {$0}"
|
|
||||||
continue "continue"
|
|
||||||
break "break"
|
|
||||||
return "return $0"
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
check_keyword_completion(
|
|
||||||
r"
|
|
||||||
fn quux() -> i32 {
|
|
||||||
loop { || { <|> } }
|
|
||||||
}
|
|
||||||
",
|
|
||||||
r#"
|
|
||||||
if "if $0 {}"
|
|
||||||
match "match $0 {}"
|
|
||||||
while "while $0 {}"
|
|
||||||
loop "loop {$0}"
|
|
||||||
return "return $0"
|
|
||||||
"#,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn completes_snippets_in_expressions() {
|
fn completes_snippets_in_expressions() {
|
||||||
check_snippet_completion(
|
check_snippet_completion(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue