mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 05:15:04 +00:00
Merge #176
176: Move completio to ra_analysis r=matklad a=matklad While we should handle completion for isolated file, it's better achieved by using empty Analysis, rather than working only with &File: we need memoization for type inference even inside a single file. Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
1dc5608d0b
20 changed files with 1066 additions and 1066 deletions
|
@ -1,16 +1,32 @@
|
|||
use ra_editor::{CompletionItem, find_node_at_offset};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use ra_editor::{find_node_at_offset};
|
||||
use ra_syntax::{
|
||||
AtomEdit, File, TextUnit, AstNode,
|
||||
ast::{self, ModuleItemOwner, AstChildren},
|
||||
AtomEdit, File, TextUnit, AstNode, SyntaxNodeRef,
|
||||
algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx},
|
||||
ast::{self, AstChildren, LoopBodyOwner, ModuleItemOwner},
|
||||
SyntaxKind::*,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
FileId, Cancelable,
|
||||
input::FilesDatabase,
|
||||
db::{self, SyntaxDatabase},
|
||||
descriptors::module::{ModulesDatabase, ModuleTree, ModuleId, scope::ModuleScope},
|
||||
descriptors::DescriptorDatabase,
|
||||
descriptors::function::FnScopes,
|
||||
descriptors::module::{ModuleTree, ModuleId, ModuleScope},
|
||||
};
|
||||
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CompletionItem {
|
||||
/// What user sees in pop-up
|
||||
pub label: String,
|
||||
/// What string is used for filtering, defaults to label
|
||||
pub lookup: Option<String>,
|
||||
/// What is inserted, defaults to label
|
||||
pub snippet: Option<String>,
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_based_completion(db: &db::RootDatabase, file_id: FileId, offset: TextUnit) -> Cancelable<Option<Vec<CompletionItem>>> {
|
||||
let source_root_id = db.file_source_root(file_id);
|
||||
let file = db.file_syntax(file_id);
|
||||
|
@ -72,3 +88,602 @@ fn crate_path(name_ref: ast::NameRef) -> Option<Vec<ast::NameRef>> {
|
|||
res.reverse();
|
||||
Some(res)
|
||||
}
|
||||
|
||||
|
||||
pub(crate) fn scope_completion(
|
||||
db: &db::RootDatabase,
|
||||
file_id: FileId,
|
||||
offset: TextUnit,
|
||||
) -> Option<Vec<CompletionItem>> {
|
||||
let original_file = db.file_syntax(file_id);
|
||||
// Insert a fake ident to get a valid parse tree
|
||||
let file = {
|
||||
let edit = AtomEdit::insert(offset, "intellijRulezz".to_string());
|
||||
original_file.reparse(&edit)
|
||||
};
|
||||
let mut has_completions = false;
|
||||
let mut res = Vec::new();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) {
|
||||
has_completions = true;
|
||||
complete_name_ref(&file, name_ref, &mut res);
|
||||
// special case, `trait T { fn foo(i_am_a_name_ref) {} }`
|
||||
if is_node::<ast::Param>(name_ref.syntax()) {
|
||||
param_completions(name_ref.syntax(), &mut res);
|
||||
}
|
||||
let name_range = name_ref.syntax().range();
|
||||
let top_node = name_ref
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.take_while(|it| it.range() == name_range)
|
||||
.last()
|
||||
.unwrap();
|
||||
match top_node.parent().map(|it| it.kind()) {
|
||||
Some(ROOT) | Some(ITEM_LIST) => complete_mod_item_snippets(&mut res),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
|
||||
if is_node::<ast::Param>(name.syntax()) {
|
||||
has_completions = true;
|
||||
param_completions(name.syntax(), &mut res);
|
||||
}
|
||||
}
|
||||
if has_completions {
|
||||
Some(res)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn complete_module_items(
|
||||
file: &File,
|
||||
items: AstChildren<ast::ModuleItem>,
|
||||
this_item: Option<ast::NameRef>,
|
||||
acc: &mut Vec<CompletionItem>,
|
||||
) {
|
||||
let scope = ModuleScope::from_items(items);
|
||||
acc.extend(
|
||||
scope
|
||||
.entries()
|
||||
.iter()
|
||||
.filter(|entry| {
|
||||
let syntax = entry.ptr().resolve(file);
|
||||
Some(syntax.borrowed()) != this_item.map(|it| it.syntax())
|
||||
})
|
||||
.map(|entry| CompletionItem {
|
||||
label: entry.name().to_string(),
|
||||
lookup: None,
|
||||
snippet: None,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
fn complete_name_ref(
|
||||
file: &File,
|
||||
name_ref: ast::NameRef,
|
||||
acc: &mut Vec<CompletionItem>,
|
||||
) {
|
||||
if !is_node::<ast::Path>(name_ref.syntax()) {
|
||||
return;
|
||||
}
|
||||
let mut visited_fn = false;
|
||||
for node in name_ref.syntax().ancestors() {
|
||||
if let Some(items) = visitor()
|
||||
.visit::<ast::Root, _>(|it| Some(it.items()))
|
||||
.visit::<ast::Module, _>(|it| Some(it.item_list()?.items()))
|
||||
.accept(node)
|
||||
{
|
||||
if let Some(items) = items {
|
||||
complete_module_items(file, items, Some(name_ref), acc);
|
||||
}
|
||||
break;
|
||||
} else if !visited_fn {
|
||||
if let Some(fn_def) = ast::FnDef::cast(node) {
|
||||
visited_fn = true;
|
||||
complete_expr_keywords(&file, fn_def, name_ref, acc);
|
||||
complete_expr_snippets(acc);
|
||||
let scopes = FnScopes::new(fn_def);
|
||||
complete_fn(name_ref, &scopes, acc);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
|
||||
let mut params = FxHashMap::default();
|
||||
for node in ctx.ancestors() {
|
||||
let _ = visitor_ctx(&mut params)
|
||||
.visit::<ast::Root, _>(process)
|
||||
.visit::<ast::ItemList, _>(process)
|
||||
.accept(node);
|
||||
}
|
||||
params
|
||||
.into_iter()
|
||||
.filter_map(|(label, (count, param))| {
|
||||
let lookup = param.pat()?.syntax().text().to_string();
|
||||
if count < 2 {
|
||||
None
|
||||
} else {
|
||||
Some((label, lookup))
|
||||
}
|
||||
})
|
||||
.for_each(|(label, lookup)| {
|
||||
acc.push(CompletionItem {
|
||||
label,
|
||||
lookup: Some(lookup),
|
||||
snippet: None,
|
||||
})
|
||||
});
|
||||
|
||||
fn process<'a, N: ast::FnDefOwner<'a>>(
|
||||
node: N,
|
||||
params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
|
||||
) {
|
||||
node.functions()
|
||||
.filter_map(|it| it.param_list())
|
||||
.flat_map(|it| it.params())
|
||||
.for_each(|param| {
|
||||
let text = param.syntax().text().to_string();
|
||||
params.entry(text).or_insert((0, param)).0 += 1;
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
|
||||
match node.ancestors().filter_map(N::cast).next() {
|
||||
None => false,
|
||||
Some(n) => n.syntax().range() == node.range(),
|
||||
}
|
||||
}
|
||||
|
||||
fn complete_expr_keywords(
|
||||
file: &File,
|
||||
fn_def: ast::FnDef,
|
||||
name_ref: ast::NameRef,
|
||||
acc: &mut Vec<CompletionItem>,
|
||||
) {
|
||||
acc.push(keyword("if", "if $0 {}"));
|
||||
acc.push(keyword("match", "match $0 {}"));
|
||||
acc.push(keyword("while", "while $0 {}"));
|
||||
acc.push(keyword("loop", "loop {$0}"));
|
||||
|
||||
if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
|
||||
if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) {
|
||||
if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
|
||||
acc.push(keyword("else", "else {$0}"));
|
||||
acc.push(keyword("else if", "else if $0 {}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
if is_in_loop_body(name_ref) {
|
||||
acc.push(keyword("continue", "continue"));
|
||||
acc.push(keyword("break", "break"));
|
||||
}
|
||||
acc.extend(complete_return(fn_def, name_ref));
|
||||
}
|
||||
|
||||
fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
|
||||
for node in name_ref.syntax().ancestors() {
|
||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||
break;
|
||||
}
|
||||
let loop_body = visitor()
|
||||
.visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
|
||||
.visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
|
||||
.visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
|
||||
.accept(node);
|
||||
if let Some(Some(body)) = loop_body {
|
||||
if name_ref.syntax().range().is_subrange(&body.syntax().range()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<CompletionItem> {
|
||||
// let is_last_in_block = name_ref.syntax().ancestors().filter_map(ast::Expr::cast)
|
||||
// .next()
|
||||
// .and_then(|it| it.syntax().parent())
|
||||
// .and_then(ast::Block::cast)
|
||||
// .is_some();
|
||||
|
||||
// if is_last_in_block {
|
||||
// return None;
|
||||
// }
|
||||
|
||||
let is_stmt = match name_ref
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.filter_map(ast::ExprStmt::cast)
|
||||
.next()
|
||||
{
|
||||
None => false,
|
||||
Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(),
|
||||
};
|
||||
let snip = match (is_stmt, fn_def.ret_type().is_some()) {
|
||||
(true, true) => "return $0;",
|
||||
(true, false) => "return;",
|
||||
(false, true) => "return $0",
|
||||
(false, false) => "return",
|
||||
};
|
||||
Some(keyword("return", snip))
|
||||
}
|
||||
|
||||
fn keyword(kw: &str, snip: &str) -> CompletionItem {
|
||||
CompletionItem {
|
||||
label: kw.to_string(),
|
||||
lookup: None,
|
||||
snippet: Some(snip.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn complete_expr_snippets(acc: &mut Vec<CompletionItem>) {
|
||||
acc.push(CompletionItem {
|
||||
label: "pd".to_string(),
|
||||
lookup: None,
|
||||
snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()),
|
||||
});
|
||||
acc.push(CompletionItem {
|
||||
label: "ppd".to_string(),
|
||||
lookup: None,
|
||||
snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) {
|
||||
acc.push(CompletionItem {
|
||||
label: "tfn".to_string(),
|
||||
lookup: None,
|
||||
snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()),
|
||||
});
|
||||
acc.push(CompletionItem {
|
||||
label: "pub(crate)".to_string(),
|
||||
lookup: None,
|
||||
snippet: Some("pub(crate) $0".to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) {
|
||||
let mut shadowed = FxHashSet::default();
|
||||
acc.extend(
|
||||
scopes
|
||||
.scope_chain(name_ref.syntax())
|
||||
.flat_map(|scope| scopes.entries(scope).iter())
|
||||
.filter(|entry| shadowed.insert(entry.name()))
|
||||
.map(|entry| CompletionItem {
|
||||
label: entry.name().to_string(),
|
||||
lookup: None,
|
||||
snippet: None,
|
||||
}),
|
||||
);
|
||||
if scopes.self_param.is_some() {
|
||||
acc.push(CompletionItem {
|
||||
label: "self".to_string(),
|
||||
lookup: None,
|
||||
snippet: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test_utils::{assert_eq_dbg, extract_offset};
|
||||
|
||||
use crate::FileId;
|
||||
use crate::mock_analysis::MockAnalysis;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn check_scope_completion(code: &str, expected_completions: &str) {
|
||||
let (off, code) = extract_offset(&code);
|
||||
let analysis = MockAnalysis::with_files(&[("/main.rs", &code)]).analysis();
|
||||
let file_id = FileId(1);
|
||||
let completions = scope_completion(&analysis.imp.db, file_id, off)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.filter(|c| c.snippet.is_none())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq_dbg(expected_completions, &completions);
|
||||
}
|
||||
|
||||
fn check_snippet_completion(code: &str, expected_completions: &str) {
|
||||
let (off, code) = extract_offset(&code);
|
||||
let analysis = MockAnalysis::with_files(&[("/main.rs", &code)]).analysis();
|
||||
let file_id = FileId(1);
|
||||
let completions = scope_completion(&analysis.imp.db, file_id, off)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.filter(|c| c.snippet.is_some())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq_dbg(expected_completions, &completions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_let_scope() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
fn quux(x: i32) {
|
||||
let y = 92;
|
||||
1 + <|>;
|
||||
let z = ();
|
||||
}
|
||||
",
|
||||
r#"[CompletionItem { label: "y", lookup: None, snippet: None },
|
||||
CompletionItem { label: "x", lookup: None, snippet: None },
|
||||
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_if_let_scope() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
fn quux() {
|
||||
if let Some(x) = foo() {
|
||||
let y = 92;
|
||||
};
|
||||
if let Some(a) = bar() {
|
||||
let b = 62;
|
||||
1 + <|>
|
||||
}
|
||||
}
|
||||
",
|
||||
r#"[CompletionItem { label: "b", lookup: None, snippet: None },
|
||||
CompletionItem { label: "a", lookup: None, snippet: None },
|
||||
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_for_scope() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
fn quux() {
|
||||
for x in &[1, 2, 3] {
|
||||
<|>
|
||||
}
|
||||
}
|
||||
",
|
||||
r#"[CompletionItem { label: "x", lookup: None, snippet: None },
|
||||
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_mod_scope() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
struct Foo;
|
||||
enum Baz {}
|
||||
fn quux() {
|
||||
<|>
|
||||
}
|
||||
",
|
||||
r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
|
||||
CompletionItem { label: "Baz", lookup: None, snippet: None },
|
||||
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_mod_scope_no_self_use() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
use foo<|>;
|
||||
",
|
||||
r#"[]"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_mod_scope_nested() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
struct Foo;
|
||||
mod m {
|
||||
struct Bar;
|
||||
fn quux() { <|> }
|
||||
}
|
||||
",
|
||||
r#"[CompletionItem { label: "Bar", lookup: None, snippet: None },
|
||||
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_complete_type() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
struct Foo;
|
||||
fn x() -> <|>
|
||||
",
|
||||
r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
|
||||
CompletionItem { label: "x", lookup: None, snippet: None }]"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_complete_shadowing() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
fn foo() -> {
|
||||
let bar = 92;
|
||||
{
|
||||
let bar = 62;
|
||||
<|>
|
||||
}
|
||||
}
|
||||
",
|
||||
r#"[CompletionItem { label: "bar", lookup: None, snippet: None },
|
||||
CompletionItem { label: "foo", lookup: None, snippet: None }]"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_complete_self() {
|
||||
check_scope_completion(
|
||||
r"
|
||||
impl S { fn foo(&self) { <|> } }
|
||||
",
|
||||
r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_kewords() {
|
||||
check_snippet_completion(r"
|
||||
fn quux() {
|
||||
<|>
|
||||
}
|
||||
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
|
||||
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
|
||||
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
|
||||
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
|
||||
CompletionItem { label: "return", lookup: None, snippet: Some("return") },
|
||||
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
|
||||
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_else() {
|
||||
check_snippet_completion(r"
|
||||
fn quux() {
|
||||
if true {
|
||||
()
|
||||
} <|>
|
||||
}
|
||||
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
|
||||
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
|
||||
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
|
||||
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
|
||||
CompletionItem { label: "else", lookup: None, snippet: Some("else {$0}") },
|
||||
CompletionItem { label: "else if", lookup: None, snippet: Some("else if $0 {}") },
|
||||
CompletionItem { label: "return", lookup: None, snippet: Some("return") },
|
||||
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
|
||||
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_return_value() {
|
||||
check_snippet_completion(r"
|
||||
fn quux() -> i32 {
|
||||
<|>
|
||||
92
|
||||
}
|
||||
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
|
||||
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
|
||||
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
|
||||
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
|
||||
CompletionItem { label: "return", lookup: None, snippet: Some("return $0;") },
|
||||
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
|
||||
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
|
||||
check_snippet_completion(r"
|
||||
fn quux() {
|
||||
<|>
|
||||
92
|
||||
}
|
||||
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
|
||||
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
|
||||
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
|
||||
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
|
||||
CompletionItem { label: "return", lookup: None, snippet: Some("return;") },
|
||||
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
|
||||
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_completion_return_no_stmt() {
|
||||
check_snippet_completion(r"
|
||||
fn quux() -> i32 {
|
||||
match () {
|
||||
() => <|>
|
||||
}
|
||||
}
|
||||
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
|
||||
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
|
||||
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
|
||||
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
|
||||
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
|
||||
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
|
||||
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_continue_break_completion() {
|
||||
check_snippet_completion(r"
|
||||
fn quux() -> i32 {
|
||||
loop { <|> }
|
||||
}
|
||||
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
|
||||
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
|
||||
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
|
||||
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
|
||||
CompletionItem { label: "continue", lookup: None, snippet: Some("continue") },
|
||||
CompletionItem { label: "break", lookup: None, snippet: Some("break") },
|
||||
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
|
||||
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
|
||||
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
|
||||
check_snippet_completion(r"
|
||||
fn quux() -> i32 {
|
||||
loop { || { <|> } }
|
||||
}
|
||||
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
|
||||
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
|
||||
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
|
||||
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
|
||||
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
|
||||
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
|
||||
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_param_completion_last_param() {
|
||||
check_scope_completion(r"
|
||||
fn foo(file_id: FileId) {}
|
||||
fn bar(file_id: FileId) {}
|
||||
fn baz(file<|>) {}
|
||||
", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_param_completion_nth_param() {
|
||||
check_scope_completion(r"
|
||||
fn foo(file_id: FileId) {}
|
||||
fn bar(file_id: FileId) {}
|
||||
fn baz(file<|>, x: i32) {}
|
||||
", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_param_completion_trait_param() {
|
||||
check_scope_completion(r"
|
||||
pub(crate) trait SourceRoot {
|
||||
pub fn contains(&self, file_id: FileId) -> bool;
|
||||
pub fn module_map(&self) -> &ModuleMap;
|
||||
pub fn lines(&self, file_id: FileId) -> &LineIndex;
|
||||
pub fn syntax(&self, file<|>)
|
||||
}
|
||||
", r#"[CompletionItem { label: "self", lookup: None, snippet: None },
|
||||
CompletionItem { label: "SourceRoot", lookup: None, snippet: None },
|
||||
CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_item_snippets() {
|
||||
// check_snippet_completion(r"
|
||||
// <|>
|
||||
// ",
|
||||
// r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") }]"##,
|
||||
// );
|
||||
check_snippet_completion(r"
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
<|>
|
||||
}
|
||||
",
|
||||
r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") },
|
||||
CompletionItem { label: "pub(crate)", lookup: None, snippet: Some("pub(crate) $0") }]"##,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,10 @@ use salsa;
|
|||
use crate::{
|
||||
db,
|
||||
Cancelable, Canceled,
|
||||
descriptors::module::{SubmodulesQuery, ModuleTreeQuery, ModulesDatabase, ModuleScopeQuery},
|
||||
descriptors::{
|
||||
DescriptorDatabase, SubmodulesQuery, ModuleTreeQuery, ModuleScopeQuery,
|
||||
FnSyntaxQuery, FnScopesQuery
|
||||
},
|
||||
symbol_index::SymbolIndex,
|
||||
syntax_ptr::{SyntaxPtrDatabase, ResolveSyntaxPtrQuery},
|
||||
FileId,
|
||||
|
@ -63,10 +66,12 @@ salsa::database_storage! {
|
|||
fn file_lines() for FileLinesQuery;
|
||||
fn file_symbols() for FileSymbolsQuery;
|
||||
}
|
||||
impl ModulesDatabase {
|
||||
impl DescriptorDatabase {
|
||||
fn module_tree() for ModuleTreeQuery;
|
||||
fn module_descriptor() for SubmodulesQuery;
|
||||
fn module_scope() for ModuleScopeQuery;
|
||||
fn fn_syntax() for FnSyntaxQuery;
|
||||
fn fn_scopes() for FnScopesQuery;
|
||||
}
|
||||
impl SyntaxPtrDatabase {
|
||||
fn resolve_syntax_ptr() for ResolveSyntaxPtrQuery;
|
||||
|
|
26
crates/ra_analysis/src/descriptors/function/imp.rs
Normal file
26
crates/ra_analysis/src/descriptors/function/imp.rs
Normal file
|
@ -0,0 +1,26 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::{
|
||||
ast::{AstNode, FnDef, FnDefNode},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
descriptors::{
|
||||
DescriptorDatabase,
|
||||
function::{FnId, FnScopes},
|
||||
},
|
||||
};
|
||||
|
||||
/// Resolve `FnId` to the corresponding `SyntaxNode`
|
||||
/// TODO: this should return something more type-safe then `SyntaxNode`
|
||||
pub(crate) fn fn_syntax(db: &impl DescriptorDatabase, fn_id: FnId) -> FnDefNode {
|
||||
let syntax = db.resolve_syntax_ptr(fn_id.0);
|
||||
let fn_def = FnDef::cast(syntax.borrowed()).unwrap();
|
||||
FnDefNode::new(fn_def)
|
||||
}
|
||||
|
||||
pub(crate) fn fn_scopes(db: &impl DescriptorDatabase, fn_id: FnId) -> Arc<FnScopes> {
|
||||
let syntax = db.fn_syntax(fn_id);
|
||||
let res = FnScopes::new(syntax.ast());
|
||||
Arc::new(res)
|
||||
}
|
83
crates/ra_analysis/src/descriptors/function/mod.rs
Normal file
83
crates/ra_analysis/src/descriptors/function/mod.rs
Normal file
|
@ -0,0 +1,83 @@
|
|||
pub(super) mod imp;
|
||||
mod scope;
|
||||
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, NameOwner}
|
||||
};
|
||||
|
||||
use crate::{
|
||||
FileId,
|
||||
syntax_ptr::SyntaxPtr
|
||||
};
|
||||
|
||||
pub(crate) use self::scope::{FnScopes, resolve_local_name};
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct FnId(SyntaxPtr);
|
||||
|
||||
impl FnId {
|
||||
pub(crate) fn new(file_id: FileId, fn_def: ast::FnDef) -> FnId {
|
||||
let ptr = SyntaxPtr::new(file_id, fn_def.syntax());
|
||||
FnId(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FnDescriptor {
|
||||
pub name: String,
|
||||
pub label: String,
|
||||
pub ret_type: Option<String>,
|
||||
pub params: Vec<String>,
|
||||
}
|
||||
|
||||
impl FnDescriptor {
|
||||
pub fn new(node: ast::FnDef) -> Option<Self> {
|
||||
let name = node.name()?.text().to_string();
|
||||
|
||||
// Strip the body out for the label.
|
||||
let label: String = if let Some(body) = node.body() {
|
||||
let body_range = body.syntax().range();
|
||||
let label: String = node
|
||||
.syntax()
|
||||
.children()
|
||||
.filter(|child| !child.range().is_subrange(&body_range))
|
||||
.map(|node| node.text().to_string())
|
||||
.collect();
|
||||
label
|
||||
} else {
|
||||
node.syntax().text().to_string()
|
||||
};
|
||||
|
||||
let params = FnDescriptor::param_list(node);
|
||||
let ret_type = node.ret_type().map(|r| r.syntax().text().to_string());
|
||||
|
||||
Some(FnDescriptor {
|
||||
name,
|
||||
ret_type,
|
||||
params,
|
||||
label,
|
||||
})
|
||||
}
|
||||
|
||||
fn param_list(node: ast::FnDef) -> Vec<String> {
|
||||
let mut res = vec![];
|
||||
if let Some(param_list) = node.param_list() {
|
||||
if let Some(self_param) = param_list.self_param() {
|
||||
res.push(self_param.syntax().text().to_string())
|
||||
}
|
||||
|
||||
// Maybe use param.pat here? See if we can just extract the name?
|
||||
//res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
|
||||
res.extend(
|
||||
param_list
|
||||
.params()
|
||||
.filter_map(|p| p.pat())
|
||||
.map(|pat| pat.syntax().text().to_string()),
|
||||
);
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
433
crates/ra_analysis/src/descriptors/function/scope.rs
Normal file
433
crates/ra_analysis/src/descriptors/function/scope.rs
Normal file
|
@ -0,0 +1,433 @@
|
|||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ra_syntax::{
|
||||
algo::generate,
|
||||
ast::{self, ArgListOwner, LoopBodyOwner, NameOwner},
|
||||
AstNode, SmolStr, SyntaxNodeRef,
|
||||
};
|
||||
|
||||
use crate::syntax_ptr::LocalSyntaxPtr;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub(crate) struct ScopeId(u32);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct FnScopes {
|
||||
pub(crate) self_param: Option<LocalSyntaxPtr>,
|
||||
scopes: Vec<ScopeData>,
|
||||
scope_for: FxHashMap<LocalSyntaxPtr, ScopeId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ScopeEntry {
|
||||
name: SmolStr,
|
||||
ptr: LocalSyntaxPtr,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct ScopeData {
|
||||
parent: Option<ScopeId>,
|
||||
entries: Vec<ScopeEntry>,
|
||||
}
|
||||
|
||||
impl FnScopes {
|
||||
pub(crate) fn new(fn_def: ast::FnDef) -> FnScopes {
|
||||
let mut scopes = FnScopes {
|
||||
self_param: fn_def
|
||||
.param_list()
|
||||
.and_then(|it| it.self_param())
|
||||
.map(|it| LocalSyntaxPtr::new(it.syntax())),
|
||||
scopes: Vec::new(),
|
||||
scope_for: FxHashMap::default(),
|
||||
};
|
||||
let root = scopes.root_scope();
|
||||
scopes.add_params_bindings(root, fn_def.param_list());
|
||||
if let Some(body) = fn_def.body() {
|
||||
compute_block_scopes(body, &mut scopes, root)
|
||||
}
|
||||
scopes
|
||||
}
|
||||
pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
|
||||
&self.get(scope).entries
|
||||
}
|
||||
pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a {
|
||||
generate(self.scope_for(node), move |&scope| {
|
||||
self.get(scope).parent
|
||||
})
|
||||
}
|
||||
fn root_scope(&mut self) -> ScopeId {
|
||||
let res = ScopeId(self.scopes.len() as u32);
|
||||
self.scopes.push(ScopeData {
|
||||
parent: None,
|
||||
entries: vec![],
|
||||
});
|
||||
res
|
||||
}
|
||||
fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
|
||||
let res = ScopeId(self.scopes.len() as u32);
|
||||
self.scopes.push(ScopeData {
|
||||
parent: Some(parent),
|
||||
entries: vec![],
|
||||
});
|
||||
res
|
||||
}
|
||||
fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) {
|
||||
let entries = pat
|
||||
.syntax()
|
||||
.descendants()
|
||||
.filter_map(ast::BindPat::cast)
|
||||
.filter_map(ScopeEntry::new);
|
||||
self.get_mut(scope).entries.extend(entries);
|
||||
}
|
||||
fn add_params_bindings(&mut self, scope: ScopeId, params: Option<ast::ParamList>) {
|
||||
params
|
||||
.into_iter()
|
||||
.flat_map(|it| it.params())
|
||||
.filter_map(|it| it.pat())
|
||||
.for_each(|it| self.add_bindings(scope, it));
|
||||
}
|
||||
fn set_scope(&mut self, node: SyntaxNodeRef, scope: ScopeId) {
|
||||
self.scope_for.insert(LocalSyntaxPtr::new(node), scope);
|
||||
}
|
||||
fn scope_for(&self, node: SyntaxNodeRef) -> Option<ScopeId> {
|
||||
node.ancestors()
|
||||
.map(LocalSyntaxPtr::new)
|
||||
.filter_map(|it| self.scope_for.get(&it).map(|&scope| scope))
|
||||
.next()
|
||||
}
|
||||
fn get(&self, scope: ScopeId) -> &ScopeData {
|
||||
&self.scopes[scope.0 as usize]
|
||||
}
|
||||
fn get_mut(&mut self, scope: ScopeId) -> &mut ScopeData {
|
||||
&mut self.scopes[scope.0 as usize]
|
||||
}
|
||||
}
|
||||
|
||||
impl ScopeEntry {
|
||||
fn new(pat: ast::BindPat) -> Option<ScopeEntry> {
|
||||
let name = pat.name()?;
|
||||
let res = ScopeEntry {
|
||||
name: name.text(),
|
||||
ptr: LocalSyntaxPtr::new(pat.syntax()),
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
pub(crate) fn name(&self) -> &SmolStr {
|
||||
&self.name
|
||||
}
|
||||
pub(crate) fn ptr(&self) -> LocalSyntaxPtr {
|
||||
self.ptr
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_block_scopes(block: ast::Block, scopes: &mut FnScopes, mut scope: ScopeId) {
|
||||
for stmt in block.statements() {
|
||||
match stmt {
|
||||
ast::Stmt::LetStmt(stmt) => {
|
||||
if let Some(expr) = stmt.initializer() {
|
||||
scopes.set_scope(expr.syntax(), scope);
|
||||
compute_expr_scopes(expr, scopes, scope);
|
||||
}
|
||||
scope = scopes.new_scope(scope);
|
||||
if let Some(pat) = stmt.pat() {
|
||||
scopes.add_bindings(scope, pat);
|
||||
}
|
||||
}
|
||||
ast::Stmt::ExprStmt(expr_stmt) => {
|
||||
if let Some(expr) = expr_stmt.expr() {
|
||||
scopes.set_scope(expr.syntax(), scope);
|
||||
compute_expr_scopes(expr, scopes, scope);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(expr) = block.expr() {
|
||||
scopes.set_scope(expr.syntax(), scope);
|
||||
compute_expr_scopes(expr, scopes, scope);
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
|
||||
match expr {
|
||||
ast::Expr::IfExpr(e) => {
|
||||
let cond_scope = e
|
||||
.condition()
|
||||
.and_then(|cond| compute_cond_scopes(cond, scopes, scope));
|
||||
if let Some(block) = e.then_branch() {
|
||||
compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope));
|
||||
}
|
||||
if let Some(block) = e.else_branch() {
|
||||
compute_block_scopes(block, scopes, scope);
|
||||
}
|
||||
}
|
||||
ast::Expr::BlockExpr(e) => {
|
||||
if let Some(block) = e.block() {
|
||||
compute_block_scopes(block, scopes, scope);
|
||||
}
|
||||
}
|
||||
ast::Expr::LoopExpr(e) => {
|
||||
if let Some(block) = e.loop_body() {
|
||||
compute_block_scopes(block, scopes, scope);
|
||||
}
|
||||
}
|
||||
ast::Expr::WhileExpr(e) => {
|
||||
let cond_scope = e
|
||||
.condition()
|
||||
.and_then(|cond| compute_cond_scopes(cond, scopes, scope));
|
||||
if let Some(block) = e.loop_body() {
|
||||
compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope));
|
||||
}
|
||||
}
|
||||
ast::Expr::ForExpr(e) => {
|
||||
if let Some(expr) = e.iterable() {
|
||||
compute_expr_scopes(expr, scopes, scope);
|
||||
}
|
||||
let mut scope = scope;
|
||||
if let Some(pat) = e.pat() {
|
||||
scope = scopes.new_scope(scope);
|
||||
scopes.add_bindings(scope, pat);
|
||||
}
|
||||
if let Some(block) = e.loop_body() {
|
||||
compute_block_scopes(block, scopes, scope);
|
||||
}
|
||||
}
|
||||
ast::Expr::LambdaExpr(e) => {
|
||||
let scope = scopes.new_scope(scope);
|
||||
scopes.add_params_bindings(scope, e.param_list());
|
||||
if let Some(body) = e.body() {
|
||||
scopes.set_scope(body.syntax(), scope);
|
||||
compute_expr_scopes(body, scopes, scope);
|
||||
}
|
||||
}
|
||||
ast::Expr::CallExpr(e) => {
|
||||
compute_call_scopes(e.expr(), e.arg_list(), scopes, scope);
|
||||
}
|
||||
ast::Expr::MethodCallExpr(e) => {
|
||||
compute_call_scopes(e.expr(), e.arg_list(), scopes, scope);
|
||||
}
|
||||
ast::Expr::MatchExpr(e) => {
|
||||
if let Some(expr) = e.expr() {
|
||||
compute_expr_scopes(expr, scopes, scope);
|
||||
}
|
||||
for arm in e.match_arm_list().into_iter().flat_map(|it| it.arms()) {
|
||||
let scope = scopes.new_scope(scope);
|
||||
for pat in arm.pats() {
|
||||
scopes.add_bindings(scope, pat);
|
||||
}
|
||||
if let Some(expr) = arm.expr() {
|
||||
compute_expr_scopes(expr, scopes, scope);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => expr
|
||||
.syntax()
|
||||
.children()
|
||||
.filter_map(ast::Expr::cast)
|
||||
.for_each(|expr| compute_expr_scopes(expr, scopes, scope)),
|
||||
};
|
||||
|
||||
fn compute_call_scopes(
|
||||
receiver: Option<ast::Expr>,
|
||||
arg_list: Option<ast::ArgList>,
|
||||
scopes: &mut FnScopes,
|
||||
scope: ScopeId,
|
||||
) {
|
||||
arg_list
|
||||
.into_iter()
|
||||
.flat_map(|it| it.args())
|
||||
.chain(receiver)
|
||||
.for_each(|expr| compute_expr_scopes(expr, scopes, scope));
|
||||
}
|
||||
|
||||
fn compute_cond_scopes(
|
||||
cond: ast::Condition,
|
||||
scopes: &mut FnScopes,
|
||||
scope: ScopeId,
|
||||
) -> Option<ScopeId> {
|
||||
if let Some(expr) = cond.expr() {
|
||||
compute_expr_scopes(expr, scopes, scope);
|
||||
}
|
||||
if let Some(pat) = cond.pat() {
|
||||
let s = scopes.new_scope(scope);
|
||||
scopes.add_bindings(s, pat);
|
||||
Some(s)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_local_name<'a>(
|
||||
name_ref: ast::NameRef,
|
||||
scopes: &'a FnScopes,
|
||||
) -> Option<&'a ScopeEntry> {
|
||||
let mut shadowed = FxHashSet::default();
|
||||
let ret = scopes
|
||||
.scope_chain(name_ref.syntax())
|
||||
.flat_map(|scope| scopes.entries(scope).iter())
|
||||
.filter(|entry| shadowed.insert(entry.name()))
|
||||
.filter(|entry| entry.name() == &name_ref.text())
|
||||
.nth(0);
|
||||
ret
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ra_syntax::File;
|
||||
use test_utils::extract_offset;
|
||||
use ra_editor::{find_node_at_offset};
|
||||
|
||||
use super::*;
|
||||
|
||||
|
||||
fn do_check(code: &str, expected: &[&str]) {
|
||||
let (off, code) = extract_offset(code);
|
||||
let code = {
|
||||
let mut buf = String::new();
|
||||
let off = u32::from(off) as usize;
|
||||
buf.push_str(&code[..off]);
|
||||
buf.push_str("marker");
|
||||
buf.push_str(&code[off..]);
|
||||
buf
|
||||
};
|
||||
let file = File::parse(&code);
|
||||
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let scopes = FnScopes::new(fn_def);
|
||||
let actual = scopes
|
||||
.scope_chain(marker.syntax())
|
||||
.flat_map(|scope| scopes.entries(scope))
|
||||
.map(|it| it.name())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual.as_slice(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lambda_scope() {
|
||||
do_check(
|
||||
r"
|
||||
fn quux(foo: i32) {
|
||||
let f = |bar, baz: i32| {
|
||||
<|>
|
||||
};
|
||||
}",
|
||||
&["bar", "baz", "foo"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_call_scope() {
|
||||
do_check(
|
||||
r"
|
||||
fn quux() {
|
||||
f(|x| <|> );
|
||||
}",
|
||||
&["x"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_metod_call_scope() {
|
||||
do_check(
|
||||
r"
|
||||
fn quux() {
|
||||
z.f(|x| <|> );
|
||||
}",
|
||||
&["x"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_loop_scope() {
|
||||
do_check(
|
||||
r"
|
||||
fn quux() {
|
||||
loop {
|
||||
let x = ();
|
||||
<|>
|
||||
};
|
||||
}",
|
||||
&["x"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_match() {
|
||||
do_check(
|
||||
r"
|
||||
fn quux() {
|
||||
match () {
|
||||
Some(x) => {
|
||||
<|>
|
||||
}
|
||||
};
|
||||
}",
|
||||
&["x"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_shadow_variable() {
|
||||
do_check(
|
||||
r"
|
||||
fn foo(x: String) {
|
||||
let x : &str = &x<|>;
|
||||
}",
|
||||
&["x"],
|
||||
);
|
||||
}
|
||||
|
||||
fn do_check_local_name(code: &str, expected_offset: u32) {
|
||||
let (off, code) = extract_offset(code);
|
||||
let file = File::parse(&code);
|
||||
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
|
||||
let scopes = FnScopes::new(fn_def);
|
||||
|
||||
let local_name_entry = resolve_local_name(name_ref, &scopes).unwrap();
|
||||
let local_name = local_name_entry.ptr().resolve(&file);
|
||||
let expected_name =
|
||||
find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap();
|
||||
assert_eq!(local_name.range(), expected_name.syntax().range());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_local_name() {
|
||||
do_check_local_name(
|
||||
r#"
|
||||
fn foo(x: i32, y: u32) {
|
||||
{
|
||||
let z = x * 2;
|
||||
}
|
||||
{
|
||||
let t = x<|> * 3;
|
||||
}
|
||||
}"#,
|
||||
21,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_local_name_declaration() {
|
||||
do_check_local_name(
|
||||
r#"
|
||||
fn foo(x: String) {
|
||||
let x : &str = &x<|>;
|
||||
}"#,
|
||||
21,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resolve_local_name_shadow() {
|
||||
do_check_local_name(
|
||||
r"
|
||||
fn foo(x: String) {
|
||||
let x : &str = &x;
|
||||
x<|>
|
||||
}",
|
||||
46,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,62 +1,46 @@
|
|||
pub(crate) mod module;
|
||||
pub(crate) mod function;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, NameOwner},
|
||||
SmolStr,
|
||||
ast::{FnDefNode},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FnDescriptor {
|
||||
pub name: String,
|
||||
pub label: String,
|
||||
pub ret_type: Option<String>,
|
||||
pub params: Vec<String>,
|
||||
}
|
||||
use crate::{
|
||||
FileId, Cancelable,
|
||||
db::SyntaxDatabase,
|
||||
descriptors::module::{ModuleTree, ModuleId, ModuleScope},
|
||||
descriptors::function::{FnId, FnScopes},
|
||||
input::SourceRootId,
|
||||
syntax_ptr::SyntaxPtrDatabase,
|
||||
};
|
||||
|
||||
impl FnDescriptor {
|
||||
pub fn new(node: ast::FnDef) -> Option<Self> {
|
||||
let name = node.name()?.text().to_string();
|
||||
|
||||
// Strip the body out for the label.
|
||||
let label: String = if let Some(body) = node.body() {
|
||||
let body_range = body.syntax().range();
|
||||
let label: String = node
|
||||
.syntax()
|
||||
.children()
|
||||
.filter(|child| !child.range().is_subrange(&body_range))
|
||||
.map(|node| node.text().to_string())
|
||||
.collect();
|
||||
label
|
||||
} else {
|
||||
node.syntax().text().to_string()
|
||||
};
|
||||
|
||||
let params = FnDescriptor::param_list(node);
|
||||
let ret_type = node.ret_type().map(|r| r.syntax().text().to_string());
|
||||
|
||||
Some(FnDescriptor {
|
||||
name,
|
||||
ret_type,
|
||||
params,
|
||||
label,
|
||||
})
|
||||
}
|
||||
|
||||
fn param_list(node: ast::FnDef) -> Vec<String> {
|
||||
let mut res = vec![];
|
||||
if let Some(param_list) = node.param_list() {
|
||||
if let Some(self_param) = param_list.self_param() {
|
||||
res.push(self_param.syntax().text().to_string())
|
||||
}
|
||||
|
||||
// Maybe use param.pat here? See if we can just extract the name?
|
||||
//res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
|
||||
res.extend(
|
||||
param_list
|
||||
.params()
|
||||
.filter_map(|p| p.pat())
|
||||
.map(|pat| pat.syntax().text().to_string()),
|
||||
);
|
||||
salsa::query_group! {
|
||||
pub(crate) trait DescriptorDatabase: SyntaxDatabase + SyntaxPtrDatabase {
|
||||
fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
|
||||
type ModuleTreeQuery;
|
||||
use fn module::imp::module_tree;
|
||||
}
|
||||
fn submodules(file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> {
|
||||
type SubmodulesQuery;
|
||||
use fn module::imp::submodules;
|
||||
}
|
||||
fn module_scope(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<ModuleScope>> {
|
||||
type ModuleScopeQuery;
|
||||
use fn module::imp::module_scope;
|
||||
}
|
||||
fn fn_syntax(fn_id: FnId) -> FnDefNode {
|
||||
type FnSyntaxQuery;
|
||||
// Don't retain syntax trees in memory
|
||||
storage volatile;
|
||||
use fn function::imp::fn_syntax;
|
||||
}
|
||||
fn fn_scopes(fn_id: FnId) -> Arc<FnScopes> {
|
||||
type FnScopesQuery;
|
||||
use fn function::imp::fn_scopes;
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,14 +10,15 @@ use ra_syntax::{
|
|||
use crate::{
|
||||
FileId, Cancelable, FileResolverImp, db,
|
||||
input::{SourceRoot, SourceRootId},
|
||||
descriptors::DescriptorDatabase,
|
||||
};
|
||||
|
||||
use super::{
|
||||
ModuleData, ModuleTree, ModuleId, LinkId, LinkData, Problem, ModulesDatabase, ModuleScope
|
||||
ModuleData, ModuleTree, ModuleId, LinkId, LinkData, Problem, ModuleScope
|
||||
};
|
||||
|
||||
|
||||
pub(super) fn submodules(db: &impl ModulesDatabase, file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> {
|
||||
pub(crate) fn submodules(db: &impl DescriptorDatabase, file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> {
|
||||
db::check_canceled(db)?;
|
||||
let file = db.file_syntax(file_id);
|
||||
let root = file.ast();
|
||||
|
@ -25,7 +26,7 @@ pub(super) fn submodules(db: &impl ModulesDatabase, file_id: FileId) -> Cancelab
|
|||
Ok(Arc::new(submodules))
|
||||
}
|
||||
|
||||
pub(super) fn modules(root: ast::Root<'_>) -> impl Iterator<Item = (SmolStr, ast::Module<'_>)> {
|
||||
pub(crate) fn modules(root: ast::Root<'_>) -> impl Iterator<Item = (SmolStr, ast::Module<'_>)> {
|
||||
root.modules().filter_map(|module| {
|
||||
let name = module.name()?.text();
|
||||
if !module.has_semi() {
|
||||
|
@ -35,8 +36,8 @@ pub(super) fn modules(root: ast::Root<'_>) -> impl Iterator<Item = (SmolStr, ast
|
|||
})
|
||||
}
|
||||
|
||||
pub(super) fn module_scope(
|
||||
db: &impl ModulesDatabase,
|
||||
pub(crate) fn module_scope(
|
||||
db: &impl DescriptorDatabase,
|
||||
source_root_id: SourceRootId,
|
||||
module_id: ModuleId,
|
||||
) -> Cancelable<Arc<ModuleScope>> {
|
||||
|
@ -47,8 +48,8 @@ pub(super) fn module_scope(
|
|||
Ok(Arc::new(res))
|
||||
}
|
||||
|
||||
pub(super) fn module_tree(
|
||||
db: &impl ModulesDatabase,
|
||||
pub(crate) fn module_tree(
|
||||
db: &impl DescriptorDatabase,
|
||||
source_root: SourceRootId,
|
||||
) -> Cancelable<Arc<ModuleTree>> {
|
||||
db::check_canceled(db)?;
|
||||
|
@ -64,7 +65,7 @@ pub struct Submodule {
|
|||
|
||||
|
||||
fn create_module_tree<'a>(
|
||||
db: &impl ModulesDatabase,
|
||||
db: &impl DescriptorDatabase,
|
||||
source_root: SourceRootId,
|
||||
) -> Cancelable<ModuleTree> {
|
||||
let mut tree = ModuleTree {
|
||||
|
@ -88,7 +89,7 @@ fn create_module_tree<'a>(
|
|||
}
|
||||
|
||||
fn build_subtree(
|
||||
db: &impl ModulesDatabase,
|
||||
db: &impl DescriptorDatabase,
|
||||
source_root: &SourceRoot,
|
||||
tree: &mut ModuleTree,
|
||||
visited: &mut FxHashSet<FileId>,
|
||||
|
|
|
@ -1,37 +1,13 @@
|
|||
mod imp;
|
||||
pub(super) mod imp;
|
||||
pub(crate) mod scope;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use relative_path::RelativePathBuf;
|
||||
use ra_syntax::{ast::{self, NameOwner, AstNode}, SmolStr, SyntaxNode};
|
||||
|
||||
use crate::{
|
||||
FileId, Cancelable,
|
||||
db::SyntaxDatabase,
|
||||
input::SourceRootId,
|
||||
};
|
||||
use crate::FileId;
|
||||
|
||||
pub(crate) use self::scope::ModuleScope;
|
||||
|
||||
salsa::query_group! {
|
||||
pub(crate) trait ModulesDatabase: SyntaxDatabase {
|
||||
fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
|
||||
type ModuleTreeQuery;
|
||||
use fn imp::module_tree;
|
||||
}
|
||||
fn submodules(file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> {
|
||||
type SubmodulesQuery;
|
||||
use fn imp::submodules;
|
||||
}
|
||||
fn module_scope(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<ModuleScope>> {
|
||||
type ModuleScopeQuery;
|
||||
use fn imp::module_scope;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct ModuleTree {
|
||||
mods: Vec<ModuleData>,
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
|
||||
|
||||
use ra_syntax::{
|
||||
ast::{self, AstChildren, ModuleItemOwner},
|
||||
File, AstNode, SmolStr, SyntaxNode, SyntaxNodeRef,
|
||||
ast::{self, ModuleItemOwner},
|
||||
File, AstNode, SmolStr,
|
||||
};
|
||||
|
||||
use crate::syntax_ptr::LocalSyntaxPtr;
|
||||
|
@ -30,8 +30,12 @@ enum EntryKind {
|
|||
|
||||
impl ModuleScope {
|
||||
pub fn new(file: &File) -> ModuleScope {
|
||||
ModuleScope::from_items(file.ast().items())
|
||||
}
|
||||
|
||||
pub fn from_items<'a>(items: impl Iterator<Item = ast::ModuleItem<'a>>) -> ModuleScope {
|
||||
let mut entries = Vec::new();
|
||||
for item in file.ast().items() {
|
||||
for item in items {
|
||||
let entry = match item {
|
||||
ast::ModuleItem::StructDef(item) => Entry::new(item),
|
||||
ast::ModuleItem::EnumDef(item) => Entry::new(item),
|
||||
|
@ -99,7 +103,7 @@ fn collect_imports(tree: ast::UseTree, acc: &mut Vec<Entry>) {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ra_syntax::{ast::ModuleItemOwner, File};
|
||||
use ra_syntax::{File};
|
||||
|
||||
fn do_check(code: &str, expected: &[&str]) {
|
||||
let file = File::parse(&code);
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::{
|
|||
sync::Arc,
|
||||
};
|
||||
|
||||
use ra_editor::{self, find_node_at_offset, resolve_local_name, FileSymbol, LineIndex, LocalEdit, CompletionItem};
|
||||
use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
|
||||
use ra_syntax::{
|
||||
ast::{self, ArgListOwner, Expr, NameOwner},
|
||||
AstNode, File, SmolStr,
|
||||
|
@ -21,9 +21,14 @@ use crate::{
|
|||
self, SyntaxDatabase, FileSyntaxQuery,
|
||||
},
|
||||
input::{SourceRootId, FilesDatabase, SourceRoot, WORKSPACE},
|
||||
descriptors::module::{ModulesDatabase, ModuleTree, Problem},
|
||||
descriptors::{FnDescriptor},
|
||||
descriptors::{
|
||||
DescriptorDatabase,
|
||||
module::{ModuleTree, Problem},
|
||||
function::{FnDescriptor, FnId},
|
||||
},
|
||||
completion::{scope_completion, resolve_based_completion, CompletionItem},
|
||||
symbol_index::SymbolIndex,
|
||||
syntax_ptr::SyntaxPtrDatabase,
|
||||
CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, Position,
|
||||
Query, SourceChange, SourceFileEdit, Cancelable,
|
||||
};
|
||||
|
@ -175,7 +180,7 @@ impl AnalysisHostImpl {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AnalysisImpl {
|
||||
db: db::RootDatabase,
|
||||
pub(crate) db: db::RootDatabase,
|
||||
}
|
||||
|
||||
impl AnalysisImpl {
|
||||
|
@ -245,12 +250,11 @@ impl AnalysisImpl {
|
|||
pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Cancelable<Option<Vec<CompletionItem>>> {
|
||||
let mut res = Vec::new();
|
||||
let mut has_completions = false;
|
||||
let file = self.file_syntax(file_id);
|
||||
if let Some(scope_based) = ra_editor::scope_completion(&file, offset) {
|
||||
if let Some(scope_based) = scope_completion(&self.db, file_id, offset) {
|
||||
res.extend(scope_based);
|
||||
has_completions = true;
|
||||
}
|
||||
if let Some(scope_based) = crate::completion::resolve_based_completion(&self.db, file_id, offset)? {
|
||||
if let Some(scope_based) = resolve_based_completion(&self.db, file_id, offset)? {
|
||||
res.extend(scope_based);
|
||||
has_completions = true;
|
||||
}
|
||||
|
@ -271,7 +275,7 @@ impl AnalysisImpl {
|
|||
let syntax = file.syntax();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
|
||||
// First try to resolve the symbol locally
|
||||
return if let Some((name, range)) = resolve_local_name(name_ref) {
|
||||
return if let Some((name, range)) = resolve_local_name(&self.db, file_id, name_ref) {
|
||||
let mut vec = vec![];
|
||||
vec.push((
|
||||
file_id,
|
||||
|
@ -325,7 +329,7 @@ impl AnalysisImpl {
|
|||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
|
||||
|
||||
// We are only handing local references for now
|
||||
if let Some(resolved) = resolve_local_name(name_ref) {
|
||||
if let Some(resolved) = resolve_local_name(&self.db, file_id, name_ref) {
|
||||
|
||||
ret.push((file_id, resolved.1));
|
||||
|
||||
|
@ -333,7 +337,7 @@ impl AnalysisImpl {
|
|||
|
||||
let refs : Vec<_> = fn_def.syntax().descendants()
|
||||
.filter_map(ast::NameRef::cast)
|
||||
.filter(|&n: &ast::NameRef| resolve_local_name(n) == Some(resolved.clone()))
|
||||
.filter(|&n: &ast::NameRef| resolve_local_name(&self.db, file_id, n) == Some(resolved.clone()))
|
||||
.collect();
|
||||
|
||||
for r in refs {
|
||||
|
@ -597,3 +601,16 @@ impl<'a> FnCallNode<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_local_name(
|
||||
db: &db::RootDatabase,
|
||||
file_id: FileId,
|
||||
name_ref: ast::NameRef,
|
||||
) -> Option<(SmolStr, TextRange)> {
|
||||
let fn_def = name_ref.syntax().ancestors().find_map(ast::FnDef::cast)?;
|
||||
let fn_id = FnId::new(file_id, fn_def);
|
||||
let scopes = db.fn_scopes(fn_id);
|
||||
let scope_entry = crate::descriptors::function::resolve_local_name(name_ref, &scopes)?;
|
||||
let syntax = db.resolve_syntax_ptr(scope_entry.ptr().into_global(file_id));
|
||||
Some((scope_entry.name().clone(), syntax.range()))
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ mod imp;
|
|||
mod symbol_index;
|
||||
mod completion;
|
||||
mod syntax_ptr;
|
||||
mod mock_analysis;
|
||||
|
||||
use std::{
|
||||
fmt,
|
||||
|
@ -29,11 +30,13 @@ use crate::{
|
|||
};
|
||||
|
||||
pub use crate::{
|
||||
descriptors::FnDescriptor,
|
||||
input::{FileId, FileResolver, CrateGraph, CrateId}
|
||||
descriptors::function::FnDescriptor,
|
||||
completion::CompletionItem,
|
||||
input::{FileId, FileResolver, CrateGraph, CrateId},
|
||||
mock_analysis::MockAnalysis,
|
||||
};
|
||||
pub use ra_editor::{
|
||||
CompletionItem, FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable,
|
||||
FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable,
|
||||
RunnableKind, StructureNode,
|
||||
};
|
||||
|
||||
|
@ -197,7 +200,7 @@ impl Query {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct Analysis {
|
||||
imp: AnalysisImpl,
|
||||
pub(crate) imp: AnalysisImpl,
|
||||
}
|
||||
|
||||
impl Analysis {
|
||||
|
|
71
crates/ra_analysis/src/mock_analysis.rs
Normal file
71
crates/ra_analysis/src/mock_analysis.rs
Normal file
|
@ -0,0 +1,71 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use relative_path::{RelativePath, RelativePathBuf};
|
||||
|
||||
use crate::{
|
||||
AnalysisChange, Analysis, AnalysisHost, FileId, FileResolver,
|
||||
};
|
||||
|
||||
/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis
|
||||
/// from a set of in-memory files.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct MockAnalysis {
|
||||
files: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
impl MockAnalysis {
|
||||
pub fn new() -> MockAnalysis {
|
||||
MockAnalysis::default()
|
||||
}
|
||||
pub fn with_files(files: &[(&str, &str)]) -> MockAnalysis {
|
||||
let files = files.iter()
|
||||
.map(|it| (it.0.to_string(), it.1.to_string()))
|
||||
.collect();
|
||||
MockAnalysis { files }
|
||||
}
|
||||
pub fn analysis_host(self) -> AnalysisHost {
|
||||
let mut host = AnalysisHost::new();
|
||||
let mut file_map = Vec::new();
|
||||
let mut change = AnalysisChange::new();
|
||||
for (id, (path, contents)) in self.files.into_iter().enumerate() {
|
||||
let file_id = FileId((id + 1) as u32);
|
||||
assert!(path.starts_with('/'));
|
||||
let path = RelativePathBuf::from_path(&path[1..]).unwrap();
|
||||
change.add_file(file_id, contents);
|
||||
file_map.push((file_id, path));
|
||||
}
|
||||
change.set_file_resolver(Arc::new(FileMap(file_map)));
|
||||
host.apply_change(change);
|
||||
host
|
||||
}
|
||||
pub fn analysis(self) -> Analysis {
|
||||
self.analysis_host().analysis()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct FileMap(Vec<(FileId, RelativePathBuf)>);
|
||||
|
||||
impl FileMap {
|
||||
fn iter<'a>(&'a self) -> impl Iterator<Item = (FileId, &'a RelativePath)> + 'a {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|(id, path)| (*id, path.as_relative_path()))
|
||||
}
|
||||
|
||||
fn path(&self, id: FileId) -> &RelativePath {
|
||||
self.iter().find(|&(it, _)| it == id).unwrap().1
|
||||
}
|
||||
}
|
||||
|
||||
impl FileResolver for FileMap {
|
||||
fn file_stem(&self, id: FileId) -> String {
|
||||
self.path(id).file_stem().unwrap().to_string()
|
||||
}
|
||||
fn resolve(&self, id: FileId, rel: &RelativePath) -> Option<FileId> {
|
||||
let path = self.path(id).join(rel).normalize();
|
||||
let id = self.iter().find(|&(_, p)| path == p)?.0;
|
||||
Some(id)
|
||||
}
|
||||
}
|
|
@ -12,6 +12,7 @@ salsa::query_group! {
|
|||
pub(crate) trait SyntaxPtrDatabase: SyntaxDatabase {
|
||||
fn resolve_syntax_ptr(ptr: SyntaxPtr) -> SyntaxNode {
|
||||
type ResolveSyntaxPtrQuery;
|
||||
// Don't retain syntax trees in memory
|
||||
storage volatile;
|
||||
}
|
||||
}
|
||||
|
@ -83,6 +84,10 @@ impl LocalSyntaxPtr {
|
|||
.unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn into_global(self, file_id: FileId) -> SyntaxPtr {
|
||||
SyntaxPtr { file_id, local: self}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue