mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 13:51:31 +00:00
Merge branch 'master' into add-type-lenses
This commit is contained in:
commit
31aef808d9
36 changed files with 384 additions and 810 deletions
|
@ -4,6 +4,7 @@ mod presentation;
|
|||
|
||||
mod complete_dot;
|
||||
mod complete_struct_literal;
|
||||
mod complete_struct_pattern;
|
||||
mod complete_pattern;
|
||||
mod complete_fn_param;
|
||||
mod complete_keyword;
|
||||
|
@ -65,6 +66,7 @@ pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Opti
|
|||
complete_scope::complete_scope(&mut acc, &ctx);
|
||||
complete_dot::complete_dot(&mut acc, &ctx);
|
||||
complete_struct_literal::complete_struct_literal(&mut acc, &ctx);
|
||||
complete_struct_pattern::complete_struct_pattern(&mut acc, &ctx);
|
||||
complete_pattern::complete_pattern(&mut acc, &ctx);
|
||||
complete_postfix::complete_postfix(&mut acc, &ctx);
|
||||
Some(acc)
|
||||
|
|
|
@ -1,23 +1,22 @@
|
|||
use hir::{Substs, Ty};
|
||||
use hir::Substs;
|
||||
|
||||
use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete fields in fields literals.
|
||||
pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| {
|
||||
Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?))
|
||||
Some((
|
||||
ctx.analyzer.type_of(ctx.db, &it.clone().into())?,
|
||||
ctx.analyzer.resolve_struct_literal(it)?,
|
||||
))
|
||||
}) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let ty_substs = match ty {
|
||||
Ty::Apply(it) => it.parameters,
|
||||
_ => Substs::empty(),
|
||||
};
|
||||
let substs = &ty.substs().unwrap_or_else(Substs::empty);
|
||||
|
||||
for field in variant.fields(ctx.db) {
|
||||
acc.add_field(ctx, field, &ty_substs);
|
||||
acc.add_field(ctx, field, substs);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
94
crates/ra_ide_api/src/completion/complete_struct_pattern.rs
Normal file
94
crates/ra_ide_api/src/completion/complete_struct_pattern.rs
Normal file
|
@ -0,0 +1,94 @@
|
|||
use hir::Substs;
|
||||
|
||||
use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
pub(super) fn complete_struct_pattern(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (ty, variant) = match ctx.struct_lit_pat.as_ref().and_then(|it| {
|
||||
Some((
|
||||
ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?,
|
||||
ctx.analyzer.resolve_struct_pattern(it)?,
|
||||
))
|
||||
}) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
let substs = &ty.substs().unwrap_or_else(Substs::empty);
|
||||
|
||||
for field in variant.fields(ctx.db) {
|
||||
acc.add_field(ctx, field, substs);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::completion::{do_completion, CompletionItem, CompletionKind};
|
||||
use insta::assert_debug_snapshot_matches;
|
||||
|
||||
fn complete(code: &str) -> Vec<CompletionItem> {
|
||||
do_completion(code, CompletionKind::Reference)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_pattern_field() {
|
||||
let completions = complete(
|
||||
r"
|
||||
struct S { foo: u32 }
|
||||
|
||||
fn process(f: S) {
|
||||
match f {
|
||||
S { f<|>: 92 } => (),
|
||||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
assert_debug_snapshot_matches!(completions, @r###"
|
||||
⋮[
|
||||
⋮ CompletionItem {
|
||||
⋮ label: "foo",
|
||||
⋮ source_range: [117; 118),
|
||||
⋮ delete: [117; 118),
|
||||
⋮ insert: "foo",
|
||||
⋮ kind: Field,
|
||||
⋮ detail: "u32",
|
||||
⋮ },
|
||||
⋮]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_pattern_enum_variant() {
|
||||
let completions = complete(
|
||||
r"
|
||||
enum E {
|
||||
S { foo: u32, bar: () }
|
||||
}
|
||||
|
||||
fn process(e: E) {
|
||||
match e {
|
||||
E::S { <|> } => (),
|
||||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
assert_debug_snapshot_matches!(completions, @r###"
|
||||
⋮[
|
||||
⋮ CompletionItem {
|
||||
⋮ label: "bar",
|
||||
⋮ source_range: [161; 161),
|
||||
⋮ delete: [161; 161),
|
||||
⋮ insert: "bar",
|
||||
⋮ kind: Field,
|
||||
⋮ detail: "()",
|
||||
⋮ },
|
||||
⋮ CompletionItem {
|
||||
⋮ label: "foo",
|
||||
⋮ source_range: [161; 161),
|
||||
⋮ delete: [161; 161),
|
||||
⋮ insert: "foo",
|
||||
⋮ kind: Field,
|
||||
⋮ detail: "u32",
|
||||
⋮ },
|
||||
⋮]
|
||||
"###);
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
use hir::source_binder;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset, find_token_at_offset},
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
ast, AstNode, Parse, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
|
@ -21,6 +21,7 @@ pub(crate) struct CompletionContext<'a> {
|
|||
pub(super) function_syntax: Option<ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<ast::UseItem>,
|
||||
pub(super) struct_lit_syntax: Option<ast::StructLit>,
|
||||
pub(super) struct_lit_pat: Option<ast::StructPat>,
|
||||
pub(super) is_param: bool,
|
||||
/// If a name-binding or reference to a const in a pattern.
|
||||
/// Irrefutable patterns (like let) are excluded.
|
||||
|
@ -48,7 +49,7 @@ impl<'a> CompletionContext<'a> {
|
|||
) -> Option<CompletionContext<'a>> {
|
||||
let module = source_binder::module_from_position(db, position);
|
||||
let token =
|
||||
find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?;
|
||||
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
|
||||
let mut ctx = CompletionContext {
|
||||
|
@ -60,6 +61,7 @@ impl<'a> CompletionContext<'a> {
|
|||
function_syntax: None,
|
||||
use_item_syntax: None,
|
||||
struct_lit_syntax: None,
|
||||
struct_lit_pat: None,
|
||||
is_param: false,
|
||||
is_pat_binding: false,
|
||||
is_trivial_path: false,
|
||||
|
@ -106,8 +108,7 @@ impl<'a> CompletionContext<'a> {
|
|||
// Otherwise, see if this is a declaration. We can use heuristics to
|
||||
// suggest declaration names, see `CompletionKind::Magic`.
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
|
||||
if is_node::<ast::BindPat>(name.syntax()) {
|
||||
let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap();
|
||||
if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
|
||||
let parent = bind_pat.syntax().parent();
|
||||
if parent.clone().and_then(ast::MatchArm::cast).is_some()
|
||||
|| parent.and_then(ast::Condition::cast).is_some()
|
||||
|
@ -119,6 +120,10 @@ impl<'a> CompletionContext<'a> {
|
|||
self.is_param = true;
|
||||
return;
|
||||
}
|
||||
if name.syntax().ancestors().find_map(ast::FieldPatList::cast).is_some() {
|
||||
self.struct_lit_pat =
|
||||
find_node_at_offset(original_parse.tree().syntax(), self.offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -235,7 +240,7 @@ fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Op
|
|||
}
|
||||
|
||||
fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
|
||||
match node.ancestors().filter_map(N::cast).next() {
|
||||
match node.ancestors().find_map(N::cast) {
|
||||
None => false,
|
||||
Some(n) => n.syntax().text_range() == node.text_range(),
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::fmt::Write;
|
||||
|
||||
use format_buf::format;
|
||||
use ra_syntax::ast::{self, AstNode, NameOwner, TypeAscriptionOwner, VisibilityOwner};
|
||||
|
||||
pub(crate) trait ShortLabel {
|
||||
|
@ -73,7 +72,7 @@ where
|
|||
let mut buf = short_label_from_node(node, prefix)?;
|
||||
|
||||
if let Some(type_ref) = node.ascribed_type() {
|
||||
write!(buf, ": {}", type_ref.syntax()).unwrap();
|
||||
format!(buf, ": {}", type_ref.syntax());
|
||||
}
|
||||
|
||||
Some(buf)
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_token_at_offset, TokenAtOffset},
|
||||
algo::find_covering_element,
|
||||
ast::{self, AstNode, AstToken},
|
||||
Direction, SyntaxElement,
|
||||
Direction, NodeOrToken,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
|
||||
};
|
||||
|
||||
use crate::{db::RootDatabase, FileRange};
|
||||
|
@ -34,7 +34,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
|||
|
||||
if range.is_empty() {
|
||||
let offset = range.start();
|
||||
let mut leaves = find_token_at_offset(root, offset);
|
||||
let mut leaves = root.token_at_offset(offset);
|
||||
if leaves.clone().all(|it| it.kind() == WHITESPACE) {
|
||||
return Some(extend_ws(root, leaves.next()?, offset));
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
|||
return Some(leaf_range);
|
||||
};
|
||||
let node = match find_covering_element(root, range) {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
if token.text_range() != range {
|
||||
return Some(token.text_range());
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
|||
}
|
||||
token.parent()
|
||||
}
|
||||
SyntaxElement::Node(node) => node,
|
||||
NodeOrToken::Node(node) => node,
|
||||
};
|
||||
if node.text_range() != range {
|
||||
return Some(node.text_range());
|
||||
|
@ -153,8 +153,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
|||
node.siblings_with_tokens(dir)
|
||||
.skip(1)
|
||||
.skip_while(|node| match node {
|
||||
SyntaxElement::Node(_) => false,
|
||||
SyntaxElement::Token(it) => is_single_line_ws(it),
|
||||
NodeOrToken::Node(_) => false,
|
||||
NodeOrToken::Token(it) => is_single_line_ws(it),
|
||||
})
|
||||
.next()
|
||||
.and_then(|it| it.into_token())
|
||||
|
|
|
@ -2,7 +2,7 @@ use rustc_hash::FxHashSet;
|
|||
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, AstToken, VisibilityOwner},
|
||||
Direction, SourceFile, SyntaxElement,
|
||||
Direction, NodeOrToken, SourceFile,
|
||||
SyntaxKind::{self, *},
|
||||
SyntaxNode, TextRange,
|
||||
};
|
||||
|
@ -31,8 +31,8 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
|||
// Fold items that span multiple lines
|
||||
if let Some(kind) = fold_kind(element.kind()) {
|
||||
let is_multiline = match &element {
|
||||
SyntaxElement::Node(node) => node.text().contains_char('\n'),
|
||||
SyntaxElement::Token(token) => token.text().contains('\n'),
|
||||
NodeOrToken::Node(node) => node.text().contains_char('\n'),
|
||||
NodeOrToken::Token(token) => token.text().contains('\n'),
|
||||
};
|
||||
if is_multiline {
|
||||
res.push(Fold { range: element.text_range(), kind });
|
||||
|
@ -41,7 +41,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
|||
}
|
||||
|
||||
match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
// Fold groups of comments
|
||||
if let Some(comment) = ast::Comment::cast(token) {
|
||||
if !visited_comments.contains(&comment) {
|
||||
|
@ -53,7 +53,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
|||
}
|
||||
}
|
||||
}
|
||||
SyntaxElement::Node(node) => {
|
||||
NodeOrToken::Node(node) => {
|
||||
// Fold groups of imports
|
||||
if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
|
||||
if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) {
|
||||
|
@ -108,7 +108,7 @@ fn contiguous_range_for_group_unless(
|
|||
let mut last = first.clone();
|
||||
for element in first.siblings_with_tokens(Direction::Next) {
|
||||
let node = match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||
if !ws.spans_multiple_lines() {
|
||||
// Ignore whitespace without blank lines
|
||||
|
@ -119,7 +119,7 @@ fn contiguous_range_for_group_unless(
|
|||
// group ends here
|
||||
break;
|
||||
}
|
||||
SyntaxElement::Node(node) => node,
|
||||
NodeOrToken::Node(node) => node,
|
||||
};
|
||||
|
||||
// Stop if we find a node that doesn't belong to the group
|
||||
|
@ -154,7 +154,7 @@ fn contiguous_range_for_comment(
|
|||
let mut last = first.clone();
|
||||
for element in first.syntax().siblings_with_tokens(Direction::Next) {
|
||||
match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
|
||||
if !ws.spans_multiple_lines() {
|
||||
// Ignore whitespace without blank lines
|
||||
|
@ -173,7 +173,7 @@ fn contiguous_range_for_comment(
|
|||
// * A comment of a different flavor was reached
|
||||
break;
|
||||
}
|
||||
SyntaxElement::Node(_) => break,
|
||||
NodeOrToken::Node(_) => break,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{algo::find_token_at_offset, ast, AstNode};
|
||||
use ra_syntax::{ast, AstNode};
|
||||
|
||||
use crate::{db::RootDatabase, FilePosition, NavigationTarget, RangeInfo};
|
||||
|
||||
|
@ -9,7 +9,7 @@ pub(crate) fn goto_type_definition(
|
|||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
|
||||
let node = find_token_at_offset(parse.tree().syntax(), position.offset).find_map(|token| {
|
||||
let node = parse.tree().syntax().token_at_offset(position.offset).find_map(|token| {
|
||||
token
|
||||
.parent()
|
||||
.ancestors()
|
||||
|
|
|
@ -3,7 +3,7 @@ use ra_fmt::{compute_ws, extract_trivial_expression};
|
|||
use ra_syntax::{
|
||||
algo::{find_covering_element, non_trivia_sibling},
|
||||
ast::{self, AstNode, AstToken},
|
||||
Direction, SourceFile, SyntaxElement,
|
||||
Direction, NodeOrToken, SourceFile,
|
||||
SyntaxKind::{self, WHITESPACE},
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
|
||||
};
|
||||
|
@ -23,8 +23,8 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
|||
};
|
||||
|
||||
let node = match find_covering_element(file.syntax(), range) {
|
||||
SyntaxElement::Node(node) => node,
|
||||
SyntaxElement::Token(token) => token.parent(),
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => token.parent(),
|
||||
};
|
||||
let mut edit = TextEditBuilder::default();
|
||||
for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
use ra_syntax::{algo::find_token_at_offset, ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
|
||||
use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
|
||||
|
||||
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
|
||||
const BRACES: &[SyntaxKind] =
|
||||
&[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]];
|
||||
let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset)
|
||||
let (brace_node, brace_idx) = file
|
||||
.syntax()
|
||||
.token_at_offset(offset)
|
||||
.filter_map(|node| {
|
||||
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
|
||||
Some((node, idx))
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::db::RootDatabase;
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
algo, AstNode, SourceFile, SyntaxElement,
|
||||
algo, AstNode, NodeOrToken, SourceFile,
|
||||
SyntaxKind::{RAW_STRING, STRING},
|
||||
SyntaxToken, TextRange,
|
||||
};
|
||||
|
@ -16,8 +16,8 @@ pub(crate) fn syntax_tree(
|
|||
let parse = db.parse(file_id);
|
||||
if let Some(text_range) = text_range {
|
||||
let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
|
||||
SyntaxElement::Node(node) => node,
|
||||
SyntaxElement::Token(token) => {
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(tree) = syntax_tree_for_string(&token, text_range) {
|
||||
return tree;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
use ra_db::{FilePosition, SourceDatabase};
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||
algo::find_node_at_offset,
|
||||
ast::{self, AstToken},
|
||||
AstNode, SmolStr, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
SyntaxToken, TextRange, TextUnit, TokenAtOffset,
|
||||
};
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
|
@ -14,7 +14,9 @@ use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
|||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let file = parse.tree();
|
||||
let comment = find_token_at_offset(file.syntax(), position.offset)
|
||||
let comment = file
|
||||
.syntax()
|
||||
.token_at_offset(position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Comment::cast)?;
|
||||
|
||||
|
@ -45,7 +47,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
|||
}
|
||||
|
||||
fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
|
||||
let ws = match find_token_at_offset(file.syntax(), token.text_range().start()) {
|
||||
let ws = match file.syntax().token_at_offset(token.text_range().start()) {
|
||||
TokenAtOffset::Between(l, r) => {
|
||||
assert!(r == *token);
|
||||
l
|
||||
|
@ -91,7 +93,10 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
|
|||
let parse = db.parse(position.file_id);
|
||||
assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.'));
|
||||
|
||||
let whitespace = find_token_at_offset(parse.tree().syntax(), position.offset)
|
||||
let whitespace = parse
|
||||
.tree()
|
||||
.syntax()
|
||||
.token_at_offset(position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Whitespace::cast)?;
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue