mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 04:44:57 +00:00
simplify
This commit is contained in:
parent
d5f7b2e52a
commit
a6b92a8cc0
9 changed files with 141 additions and 144 deletions
|
@ -34,19 +34,13 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static {
|
||||||
|
|
||||||
fn with_position(ra_fixture: &str) -> (Self, FilePosition) {
|
fn with_position(ra_fixture: &str) -> (Self, FilePosition) {
|
||||||
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
|
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
|
||||||
let offset = match range_or_offset {
|
let offset = range_or_offset.expect_offset();
|
||||||
RangeOrOffset::Range(_) => panic!("Expected a cursor position, got a range instead"),
|
|
||||||
RangeOrOffset::Offset(it) => it,
|
|
||||||
};
|
|
||||||
(db, FilePosition { file_id, offset })
|
(db, FilePosition { file_id, offset })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_range(ra_fixture: &str) -> (Self, FileRange) {
|
fn with_range(ra_fixture: &str) -> (Self, FileRange) {
|
||||||
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
|
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
|
||||||
let range = match range_or_offset {
|
let range = range_or_offset.expect_range();
|
||||||
RangeOrOffset::Range(it) => it,
|
|
||||||
RangeOrOffset::Offset(_) => panic!("Expected a cursor range, got a position instead"),
|
|
||||||
};
|
|
||||||
(db, FileRange { file_id, range })
|
(db, FileRange { file_id, range })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! Utilities for creating `Analysis` instances for tests.
|
//! Utilities for creating `Analysis` instances for tests.
|
||||||
use ide_db::base_db::fixture::ChangeFixture;
|
use ide_db::base_db::fixture::ChangeFixture;
|
||||||
use syntax::{TextRange, TextSize};
|
use syntax::{TextRange, TextSize};
|
||||||
use test_utils::{extract_annotations, RangeOrOffset};
|
use test_utils::extract_annotations;
|
||||||
|
|
||||||
use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
|
use crate::{Analysis, AnalysisHost, FileId, FilePosition, FileRange};
|
||||||
|
|
||||||
|
@ -27,10 +27,7 @@ pub(crate) fn position(ra_fixture: &str) -> (Analysis, FilePosition) {
|
||||||
let change_fixture = ChangeFixture::parse(ra_fixture);
|
let change_fixture = ChangeFixture::parse(ra_fixture);
|
||||||
host.db.apply_change(change_fixture.change);
|
host.db.apply_change(change_fixture.change);
|
||||||
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
||||||
let offset = match range_or_offset {
|
let offset = range_or_offset.expect_offset();
|
||||||
RangeOrOffset::Range(_) => panic!(),
|
|
||||||
RangeOrOffset::Offset(it) => it,
|
|
||||||
};
|
|
||||||
(host.analysis(), FilePosition { file_id, offset })
|
(host.analysis(), FilePosition { file_id, offset })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,10 +37,7 @@ pub(crate) fn range(ra_fixture: &str) -> (Analysis, FileRange) {
|
||||||
let change_fixture = ChangeFixture::parse(ra_fixture);
|
let change_fixture = ChangeFixture::parse(ra_fixture);
|
||||||
host.db.apply_change(change_fixture.change);
|
host.db.apply_change(change_fixture.change);
|
||||||
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
||||||
let range = match range_or_offset {
|
let range = range_or_offset.expect_range();
|
||||||
RangeOrOffset::Range(it) => it,
|
|
||||||
RangeOrOffset::Offset(_) => panic!(),
|
|
||||||
};
|
|
||||||
(host.analysis(), FileRange { file_id, range })
|
(host.analysis(), FileRange { file_id, range })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,10 +47,7 @@ pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(Fil
|
||||||
let change_fixture = ChangeFixture::parse(ra_fixture);
|
let change_fixture = ChangeFixture::parse(ra_fixture);
|
||||||
host.db.apply_change(change_fixture.change);
|
host.db.apply_change(change_fixture.change);
|
||||||
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
||||||
let offset = match range_or_offset {
|
let offset = range_or_offset.expect_offset();
|
||||||
RangeOrOffset::Range(_) => panic!(),
|
|
||||||
RangeOrOffset::Offset(it) => it,
|
|
||||||
};
|
|
||||||
|
|
||||||
let annotations = change_fixture
|
let annotations = change_fixture
|
||||||
.files
|
.files
|
||||||
|
|
|
@ -39,6 +39,8 @@ pub(crate) fn complete_use_tree_keyword(acc: &mut Completions, ctx: &CompletionC
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trait Foo {}
|
||||||
|
|
||||||
pub(crate) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) {
|
pub(crate) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) {
|
||||||
if ctx.token.kind() == SyntaxKind::COMMENT {
|
if ctx.token.kind() == SyntaxKind::COMMENT {
|
||||||
cov_mark::hit!(no_keyword_completion_in_comments);
|
cov_mark::hit!(no_keyword_completion_in_comments);
|
||||||
|
@ -48,91 +50,92 @@ pub(crate) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||||
cov_mark::hit!(no_keyword_completion_in_record_lit);
|
cov_mark::hit!(no_keyword_completion_in_record_lit);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
let mut add_keyword = |kw, snippet| add_keyword(ctx, acc, kw, snippet);
|
||||||
|
|
||||||
let expects_assoc_item = ctx.expects_assoc_item();
|
let expects_assoc_item = ctx.expects_assoc_item();
|
||||||
let has_block_expr_parent = ctx.has_block_expr_parent();
|
let has_block_expr_parent = ctx.has_block_expr_parent();
|
||||||
let expects_item = ctx.expects_item();
|
let expects_item = ctx.expects_item();
|
||||||
|
|
||||||
if ctx.has_impl_or_trait_prev_sibling() {
|
if ctx.has_impl_or_trait_prev_sibling() {
|
||||||
add_keyword(ctx, acc, "where", "where ");
|
// FIXME this also incorrectly shows up after a complete trait/impl
|
||||||
|
add_keyword("where", "where ");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if ctx.previous_token_is(T![unsafe]) {
|
if ctx.previous_token_is(T![unsafe]) {
|
||||||
if expects_item || has_block_expr_parent {
|
if expects_item || expects_assoc_item || has_block_expr_parent {
|
||||||
add_keyword(ctx, acc, "fn", "fn $1($2) {\n $0\n}")
|
add_keyword("fn", "fn $1($2) {\n $0\n}")
|
||||||
}
|
}
|
||||||
|
|
||||||
if expects_item || has_block_expr_parent {
|
if expects_item || has_block_expr_parent {
|
||||||
add_keyword(ctx, acc, "trait", "trait $1 {\n $0\n}");
|
add_keyword("trait", "trait $1 {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "impl", "impl $1 {\n $0\n}");
|
add_keyword("impl", "impl $1 {\n $0\n}");
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if expects_item || expects_assoc_item || has_block_expr_parent {
|
|
||||||
add_keyword(ctx, acc, "fn", "fn $1($2) {\n $0\n}");
|
if expects_item || ctx.expects_non_trait_assoc_item() || ctx.expect_record_field() {
|
||||||
|
add_keyword("pub(crate)", "pub(crate) ");
|
||||||
|
add_keyword("pub", "pub ");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if expects_item || expects_assoc_item || has_block_expr_parent || ctx.is_match_arm {
|
||||||
|
add_keyword("unsafe", "unsafe ");
|
||||||
|
}
|
||||||
|
|
||||||
|
if expects_item || expects_assoc_item || has_block_expr_parent {
|
||||||
|
add_keyword("fn", "fn $1($2) {\n $0\n}");
|
||||||
|
add_keyword("const", "const $0");
|
||||||
|
add_keyword("type", "type $0");
|
||||||
|
}
|
||||||
|
|
||||||
if expects_item || has_block_expr_parent {
|
if expects_item || has_block_expr_parent {
|
||||||
add_keyword(ctx, acc, "use", "use ");
|
add_keyword("use", "use $0");
|
||||||
add_keyword(ctx, acc, "impl", "impl $1 {\n $0\n}");
|
add_keyword("impl", "impl $1 {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "trait", "trait $1 {\n $0\n}");
|
add_keyword("trait", "trait $1 {\n $0\n}");
|
||||||
|
add_keyword("static", "static $0");
|
||||||
|
add_keyword("extern", "extern $0");
|
||||||
|
add_keyword("mod", "mod $0");
|
||||||
}
|
}
|
||||||
|
|
||||||
if expects_item {
|
if expects_item {
|
||||||
add_keyword(ctx, acc, "enum", "enum $1 {\n $0\n}");
|
add_keyword("enum", "enum $1 {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "struct", "struct $0");
|
add_keyword("struct", "struct $0");
|
||||||
add_keyword(ctx, acc, "union", "union $1 {\n $0\n}");
|
add_keyword("union", "union $1 {\n $0\n}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.is_expr {
|
if ctx.expects_expression() {
|
||||||
add_keyword(ctx, acc, "match", "match $1 {\n $0\n}");
|
add_keyword("match", "match $1 {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "while", "while $1 {\n $0\n}");
|
add_keyword("while", "while $1 {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "while let", "while let $1 = $2 {\n $0\n}");
|
add_keyword("while let", "while let $1 = $2 {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "loop", "loop {\n $0\n}");
|
add_keyword("loop", "loop {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "if", "if $1 {\n $0\n}");
|
add_keyword("if", "if $1 {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "if let", "if let $1 = $2 {\n $0\n}");
|
add_keyword("if let", "if let $1 = $2 {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "for", "for $1 in $2 {\n $0\n}");
|
add_keyword("for", "for $1 in $2 {\n $0\n}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.previous_token_is(T![if]) || ctx.previous_token_is(T![while]) || has_block_expr_parent {
|
if ctx.previous_token_is(T![if]) || ctx.previous_token_is(T![while]) || has_block_expr_parent {
|
||||||
add_keyword(ctx, acc, "let", "let ");
|
add_keyword("let", "let ");
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.after_if {
|
if ctx.after_if {
|
||||||
add_keyword(ctx, acc, "else", "else {\n $0\n}");
|
add_keyword("else", "else {\n $0\n}");
|
||||||
add_keyword(ctx, acc, "else if", "else if $1 {\n $0\n}");
|
add_keyword("else if", "else if $1 {\n $0\n}");
|
||||||
}
|
|
||||||
if expects_item || has_block_expr_parent {
|
|
||||||
add_keyword(ctx, acc, "mod", "mod $0");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.expects_ident_pat_or_ref_expr() {
|
if ctx.expects_ident_pat_or_ref_expr() {
|
||||||
add_keyword(ctx, acc, "mut", "mut ");
|
add_keyword("mut", "mut ");
|
||||||
}
|
|
||||||
if expects_item || expects_assoc_item || has_block_expr_parent {
|
|
||||||
add_keyword(ctx, acc, "const", "const ");
|
|
||||||
add_keyword(ctx, acc, "type", "type ");
|
|
||||||
}
|
|
||||||
if expects_item || has_block_expr_parent {
|
|
||||||
add_keyword(ctx, acc, "static", "static ");
|
|
||||||
};
|
|
||||||
if expects_item || has_block_expr_parent {
|
|
||||||
add_keyword(ctx, acc, "extern", "extern ");
|
|
||||||
}
|
|
||||||
if expects_item || expects_assoc_item || has_block_expr_parent || ctx.is_match_arm {
|
|
||||||
add_keyword(ctx, acc, "unsafe", "unsafe ");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx.in_loop_body {
|
if ctx.in_loop_body {
|
||||||
if ctx.can_be_stmt {
|
if ctx.can_be_stmt {
|
||||||
add_keyword(ctx, acc, "continue", "continue;");
|
add_keyword("continue", "continue;");
|
||||||
add_keyword(ctx, acc, "break", "break;");
|
add_keyword("break", "break;");
|
||||||
} else {
|
} else {
|
||||||
add_keyword(ctx, acc, "continue", "continue");
|
add_keyword("continue", "continue");
|
||||||
add_keyword(ctx, acc, "break", "break");
|
add_keyword("break", "break");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if expects_item || ctx.expects_non_trait_assoc_item() || ctx.expect_record_field() {
|
|
||||||
add_keyword(ctx, acc, "pub(crate)", "pub(crate) ");
|
|
||||||
add_keyword(ctx, acc, "pub", "pub ");
|
|
||||||
}
|
|
||||||
|
|
||||||
if !ctx.is_trivial_path {
|
if !ctx.is_trivial_path {
|
||||||
return;
|
return;
|
||||||
|
@ -143,8 +146,6 @@ pub(crate) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||||
};
|
};
|
||||||
|
|
||||||
add_keyword(
|
add_keyword(
|
||||||
ctx,
|
|
||||||
acc,
|
|
||||||
"return",
|
"return",
|
||||||
match (ctx.can_be_stmt, fn_def.ret_type().is_some()) {
|
match (ctx.can_be_stmt, fn_def.ret_type().is_some()) {
|
||||||
(true, true) => "return $0;",
|
(true, true) => "return $0;",
|
||||||
|
@ -161,15 +162,12 @@ fn add_keyword(ctx: &CompletionContext, acc: &mut Completions, kw: &str, snippet
|
||||||
|
|
||||||
match ctx.config.snippet_cap {
|
match ctx.config.snippet_cap {
|
||||||
Some(cap) => {
|
Some(cap) => {
|
||||||
let tmp;
|
if snippet.ends_with('}') && ctx.incomplete_let {
|
||||||
let snippet = if snippet.ends_with('}') && ctx.incomplete_let {
|
|
||||||
cov_mark::hit!(let_semi);
|
cov_mark::hit!(let_semi);
|
||||||
tmp = format!("{};", snippet);
|
item.insert_snippet(cap, format!("{};", snippet));
|
||||||
&tmp
|
|
||||||
} else {
|
} else {
|
||||||
snippet
|
item.insert_snippet(cap, snippet);
|
||||||
};
|
}
|
||||||
item.insert_snippet(cap, snippet);
|
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
item.insert_text(if snippet.contains('$') { kw } else { snippet });
|
item.insert_text(if snippet.contains('$') { kw } else { snippet });
|
||||||
|
@ -232,21 +230,21 @@ mod tests {
|
||||||
check(
|
check(
|
||||||
r"m$0",
|
r"m$0",
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw pub(crate)
|
||||||
|
kw pub
|
||||||
|
kw unsafe
|
||||||
kw fn
|
kw fn
|
||||||
|
kw const
|
||||||
|
kw type
|
||||||
kw use
|
kw use
|
||||||
kw impl
|
kw impl
|
||||||
kw trait
|
kw trait
|
||||||
|
kw static
|
||||||
|
kw extern
|
||||||
|
kw mod
|
||||||
kw enum
|
kw enum
|
||||||
kw struct
|
kw struct
|
||||||
kw union
|
kw union
|
||||||
kw mod
|
|
||||||
kw const
|
|
||||||
kw type
|
|
||||||
kw static
|
|
||||||
kw extern
|
|
||||||
kw unsafe
|
|
||||||
kw pub(crate)
|
|
||||||
kw pub
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -256,10 +254,16 @@ mod tests {
|
||||||
check(
|
check(
|
||||||
r"fn quux() { $0 }",
|
r"fn quux() { $0 }",
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw unsafe
|
||||||
kw fn
|
kw fn
|
||||||
|
kw const
|
||||||
|
kw type
|
||||||
kw use
|
kw use
|
||||||
kw impl
|
kw impl
|
||||||
kw trait
|
kw trait
|
||||||
|
kw static
|
||||||
|
kw extern
|
||||||
|
kw mod
|
||||||
kw match
|
kw match
|
||||||
kw while
|
kw while
|
||||||
kw while let
|
kw while let
|
||||||
|
@ -268,12 +272,6 @@ mod tests {
|
||||||
kw if let
|
kw if let
|
||||||
kw for
|
kw for
|
||||||
kw let
|
kw let
|
||||||
kw mod
|
|
||||||
kw const
|
|
||||||
kw type
|
|
||||||
kw static
|
|
||||||
kw extern
|
|
||||||
kw unsafe
|
|
||||||
kw return
|
kw return
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -284,10 +282,16 @@ mod tests {
|
||||||
check(
|
check(
|
||||||
r"fn quux() { if true { $0 } }",
|
r"fn quux() { if true { $0 } }",
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw unsafe
|
||||||
kw fn
|
kw fn
|
||||||
|
kw const
|
||||||
|
kw type
|
||||||
kw use
|
kw use
|
||||||
kw impl
|
kw impl
|
||||||
kw trait
|
kw trait
|
||||||
|
kw static
|
||||||
|
kw extern
|
||||||
|
kw mod
|
||||||
kw match
|
kw match
|
||||||
kw while
|
kw while
|
||||||
kw while let
|
kw while let
|
||||||
|
@ -296,12 +300,6 @@ mod tests {
|
||||||
kw if let
|
kw if let
|
||||||
kw for
|
kw for
|
||||||
kw let
|
kw let
|
||||||
kw mod
|
|
||||||
kw const
|
|
||||||
kw type
|
|
||||||
kw static
|
|
||||||
kw extern
|
|
||||||
kw unsafe
|
|
||||||
kw return
|
kw return
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -312,10 +310,16 @@ mod tests {
|
||||||
check(
|
check(
|
||||||
r#"fn quux() { if true { () } $0 }"#,
|
r#"fn quux() { if true { () } $0 }"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw unsafe
|
||||||
kw fn
|
kw fn
|
||||||
|
kw const
|
||||||
|
kw type
|
||||||
kw use
|
kw use
|
||||||
kw impl
|
kw impl
|
||||||
kw trait
|
kw trait
|
||||||
|
kw static
|
||||||
|
kw extern
|
||||||
|
kw mod
|
||||||
kw match
|
kw match
|
||||||
kw while
|
kw while
|
||||||
kw while let
|
kw while let
|
||||||
|
@ -326,12 +330,6 @@ mod tests {
|
||||||
kw let
|
kw let
|
||||||
kw else
|
kw else
|
||||||
kw else if
|
kw else if
|
||||||
kw mod
|
|
||||||
kw const
|
|
||||||
kw type
|
|
||||||
kw static
|
|
||||||
kw extern
|
|
||||||
kw unsafe
|
|
||||||
kw return
|
kw return
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -353,6 +351,7 @@ fn quux() -> i32 {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw unsafe
|
||||||
kw match
|
kw match
|
||||||
kw while
|
kw while
|
||||||
kw while let
|
kw while let
|
||||||
|
@ -360,7 +359,6 @@ fn quux() -> i32 {
|
||||||
kw if
|
kw if
|
||||||
kw if let
|
kw if let
|
||||||
kw for
|
kw for
|
||||||
kw unsafe
|
|
||||||
kw return
|
kw return
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -371,10 +369,10 @@ fn quux() -> i32 {
|
||||||
check(
|
check(
|
||||||
r"trait My { $0 }",
|
r"trait My { $0 }",
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw unsafe
|
||||||
kw fn
|
kw fn
|
||||||
kw const
|
kw const
|
||||||
kw type
|
kw type
|
||||||
kw unsafe
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -384,12 +382,12 @@ fn quux() -> i32 {
|
||||||
check(
|
check(
|
||||||
r"impl My { $0 }",
|
r"impl My { $0 }",
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw pub(crate)
|
||||||
|
kw pub
|
||||||
|
kw unsafe
|
||||||
kw fn
|
kw fn
|
||||||
kw const
|
kw const
|
||||||
kw type
|
kw type
|
||||||
kw unsafe
|
|
||||||
kw pub(crate)
|
|
||||||
kw pub
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -399,12 +397,12 @@ fn quux() -> i32 {
|
||||||
check(
|
check(
|
||||||
r"impl My { #[foo] $0 }",
|
r"impl My { #[foo] $0 }",
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw pub(crate)
|
||||||
|
kw pub
|
||||||
|
kw unsafe
|
||||||
kw fn
|
kw fn
|
||||||
kw const
|
kw const
|
||||||
kw type
|
kw type
|
||||||
kw unsafe
|
|
||||||
kw pub(crate)
|
|
||||||
kw pub
|
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -414,10 +412,16 @@ fn quux() -> i32 {
|
||||||
check(
|
check(
|
||||||
r"fn my() { loop { $0 } }",
|
r"fn my() { loop { $0 } }",
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
|
kw unsafe
|
||||||
kw fn
|
kw fn
|
||||||
|
kw const
|
||||||
|
kw type
|
||||||
kw use
|
kw use
|
||||||
kw impl
|
kw impl
|
||||||
kw trait
|
kw trait
|
||||||
|
kw static
|
||||||
|
kw extern
|
||||||
|
kw mod
|
||||||
kw match
|
kw match
|
||||||
kw while
|
kw while
|
||||||
kw while let
|
kw while let
|
||||||
|
@ -426,12 +430,6 @@ fn quux() -> i32 {
|
||||||
kw if let
|
kw if let
|
||||||
kw for
|
kw for
|
||||||
kw let
|
kw let
|
||||||
kw mod
|
|
||||||
kw const
|
|
||||||
kw type
|
|
||||||
kw static
|
|
||||||
kw extern
|
|
||||||
kw unsafe
|
|
||||||
kw continue
|
kw continue
|
||||||
kw break
|
kw break
|
||||||
kw return
|
kw return
|
||||||
|
|
|
@ -288,6 +288,10 @@ impl<'a> CompletionContext<'a> {
|
||||||
matches!(self.completion_location, Some(ImmediateLocation::ItemList))
|
matches!(self.completion_location, Some(ImmediateLocation::ItemList))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expects_expression(&self) -> bool {
|
||||||
|
self.is_expr
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn has_block_expr_parent(&self) -> bool {
|
pub(crate) fn has_block_expr_parent(&self) -> bool {
|
||||||
matches!(self.completion_location, Some(ImmediateLocation::BlockExpr))
|
matches!(self.completion_location, Some(ImmediateLocation::BlockExpr))
|
||||||
}
|
}
|
||||||
|
@ -316,7 +320,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
|
|
||||||
fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) {
|
fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) {
|
||||||
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap();
|
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap();
|
||||||
let syntax_element = NodeOrToken::Token(fake_ident_token.clone());
|
let syntax_element = NodeOrToken::Token(fake_ident_token);
|
||||||
self.previous_token = previous_token(syntax_element.clone());
|
self.previous_token = previous_token(syntax_element.clone());
|
||||||
self.in_loop_body = is_in_loop_body(syntax_element.clone());
|
self.in_loop_body = is_in_loop_body(syntax_element.clone());
|
||||||
self.is_match_arm = is_match_arm(syntax_element.clone());
|
self.is_match_arm = is_match_arm(syntax_element.clone());
|
||||||
|
@ -338,8 +342,6 @@ impl<'a> CompletionContext<'a> {
|
||||||
let fn_is_prev = self.previous_token_is(T![fn]);
|
let fn_is_prev = self.previous_token_is(T![fn]);
|
||||||
let for_is_prev2 = for_is_prev2(syntax_element.clone());
|
let for_is_prev2 = for_is_prev2(syntax_element.clone());
|
||||||
self.no_completion_required = (fn_is_prev && !inside_impl_trait_block) || for_is_prev2;
|
self.no_completion_required = (fn_is_prev && !inside_impl_trait_block) || for_is_prev2;
|
||||||
|
|
||||||
self.completion_location = determine_location(fake_ident_token);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fill_impl_def(&mut self) {
|
fn fill_impl_def(&mut self) {
|
||||||
|
@ -465,6 +467,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
Some(it) => it,
|
Some(it) => it,
|
||||||
None => return,
|
None => return,
|
||||||
};
|
};
|
||||||
|
self.completion_location = determine_location(&name_like);
|
||||||
match name_like {
|
match name_like {
|
||||||
ast::NameLike::Lifetime(lifetime) => {
|
ast::NameLike::Lifetime(lifetime) => {
|
||||||
self.classify_lifetime(original_file, lifetime, offset);
|
self.classify_lifetime(original_file, lifetime, offset);
|
||||||
|
|
|
@ -24,12 +24,12 @@ pub(crate) enum ImmediateLocation {
|
||||||
ItemList,
|
ItemList,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn determine_location(tok: SyntaxToken) -> Option<ImmediateLocation> {
|
pub(crate) fn determine_location(name_like: &ast::NameLike) -> Option<ImmediateLocation> {
|
||||||
// First walk the element we are completing up to its highest node that has the same text range
|
// First walk the element we are completing up to its highest node that has the same text range
|
||||||
// as the element so that we can check in what context it immediately lies. We only do this for
|
// as the element so that we can check in what context it immediately lies. We only do this for
|
||||||
// NameRef -> Path as that's the only thing that makes sense to being "expanded" semantically.
|
// NameRef -> Path as that's the only thing that makes sense to being "expanded" semantically.
|
||||||
// We only wanna do this if the NameRef is the last segment of the path.
|
// We only wanna do this if the NameRef is the last segment of the path.
|
||||||
let node = match tok.parent().and_then(ast::NameLike::cast)? {
|
let node = match name_like {
|
||||||
ast::NameLike::NameRef(name_ref) => {
|
ast::NameLike::NameRef(name_ref) => {
|
||||||
if let Some(segment) = name_ref.syntax().parent().and_then(ast::PathSegment::cast) {
|
if let Some(segment) = name_ref.syntax().parent().and_then(ast::PathSegment::cast) {
|
||||||
let p = segment.parent_path();
|
let p = segment.parent_path();
|
||||||
|
@ -93,7 +93,8 @@ pub(crate) fn determine_location(tok: SyntaxToken) -> Option<ImmediateLocation>
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn check_location(code: &str, loc: ImmediateLocation) {
|
fn check_location(code: &str, loc: ImmediateLocation) {
|
||||||
check_pattern_is_applicable(code, |e| {
|
check_pattern_is_applicable(code, |e| {
|
||||||
assert_eq!(determine_location(e.into_token().expect("Expected a token")), Some(loc));
|
let name = &e.parent().and_then(ast::NameLike::cast).expect("Expected a namelike");
|
||||||
|
assert_eq!(determine_location(name), Some(loc));
|
||||||
true
|
true
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -199,6 +200,11 @@ fn test_has_impl_as_prev_sibling() {
|
||||||
check_pattern_is_applicable(r"impl A w$0 {}", |it| has_prev_sibling(it, IMPL));
|
check_pattern_is_applicable(r"impl A w$0 {}", |it| has_prev_sibling(it, IMPL));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_has_trait_as_prev_sibling() {
|
||||||
|
check_pattern_is_applicable(r"trait A w$0 {}", |it| has_prev_sibling(it, TRAIT));
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool {
|
pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool {
|
||||||
element
|
element
|
||||||
.ancestors()
|
.ancestors()
|
||||||
|
|
|
@ -12,7 +12,7 @@ use ide_db::{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use stdx::{format_to, trim_indent};
|
use stdx::{format_to, trim_indent};
|
||||||
use syntax::{AstNode, NodeOrToken, SyntaxElement};
|
use syntax::{AstNode, NodeOrToken, SyntaxElement};
|
||||||
use test_utils::{assert_eq_text, RangeOrOffset};
|
use test_utils::assert_eq_text;
|
||||||
|
|
||||||
use crate::{item::CompletionKind, CompletionConfig, CompletionItem};
|
use crate::{item::CompletionKind, CompletionConfig, CompletionItem};
|
||||||
|
|
||||||
|
@ -36,10 +36,7 @@ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
|
||||||
let mut database = RootDatabase::default();
|
let mut database = RootDatabase::default();
|
||||||
database.apply_change(change_fixture.change);
|
database.apply_change(change_fixture.change);
|
||||||
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
||||||
let offset = match range_or_offset {
|
let offset = range_or_offset.expect_offset();
|
||||||
RangeOrOffset::Range(_) => panic!(),
|
|
||||||
RangeOrOffset::Offset(it) => it,
|
|
||||||
};
|
|
||||||
(database, FilePosition { file_id, offset })
|
(database, FilePosition { file_id, offset })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -52,10 +49,11 @@ pub(crate) fn do_completion_with_config(
|
||||||
code: &str,
|
code: &str,
|
||||||
kind: CompletionKind,
|
kind: CompletionKind,
|
||||||
) -> Vec<CompletionItem> {
|
) -> Vec<CompletionItem> {
|
||||||
let mut kind_completions: Vec<CompletionItem> =
|
get_all_items(config, code)
|
||||||
get_all_items(config, code).into_iter().filter(|c| c.completion_kind == kind).collect();
|
.into_iter()
|
||||||
kind_completions.sort_by(|l, r| l.label().cmp(r.label()));
|
.filter(|c| c.completion_kind == kind)
|
||||||
kind_completions
|
.sorted_by(|l, r| l.label().cmp(r.label()))
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn completion_list(code: &str, kind: CompletionKind) -> String {
|
pub(crate) fn completion_list(code: &str, kind: CompletionKind) -> String {
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use base_db::{fixture::ChangeFixture, FilePosition};
|
use base_db::{fixture::ChangeFixture, FilePosition};
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use test_utils::RangeOrOffset;
|
|
||||||
|
|
||||||
use crate::RootDatabase;
|
use crate::RootDatabase;
|
||||||
|
|
||||||
|
@ -10,10 +9,7 @@ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
|
||||||
let mut database = RootDatabase::default();
|
let mut database = RootDatabase::default();
|
||||||
database.apply_change(change_fixture.change);
|
database.apply_change(change_fixture.change);
|
||||||
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
||||||
let offset = match range_or_offset {
|
let offset = range_or_offset.expect_offset();
|
||||||
RangeOrOffset::Range(_) => panic!(),
|
|
||||||
RangeOrOffset::Offset(it) => it,
|
|
||||||
};
|
|
||||||
(database, FilePosition { file_id, offset })
|
(database, FilePosition { file_id, offset })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ use base_db::{fixture::ChangeFixture, FilePosition};
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use hir::Semantics;
|
use hir::Semantics;
|
||||||
use syntax::ast::{self, AstNode};
|
use syntax::ast::{self, AstNode};
|
||||||
use test_utils::RangeOrOffset;
|
|
||||||
|
|
||||||
use crate::RootDatabase;
|
use crate::RootDatabase;
|
||||||
|
|
||||||
|
@ -12,10 +11,7 @@ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
|
||||||
let mut database = RootDatabase::default();
|
let mut database = RootDatabase::default();
|
||||||
database.apply_change(change_fixture.change);
|
database.apply_change(change_fixture.change);
|
||||||
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
|
||||||
let offset = match range_or_offset {
|
let offset = range_or_offset.expect_offset();
|
||||||
RangeOrOffset::Range(_) => panic!(),
|
|
||||||
RangeOrOffset::Offset(it) => it,
|
|
||||||
};
|
|
||||||
(database, FilePosition { file_id, offset })
|
(database, FilePosition { file_id, offset })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -96,6 +96,21 @@ pub enum RangeOrOffset {
|
||||||
Offset(TextSize),
|
Offset(TextSize),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl RangeOrOffset {
|
||||||
|
pub fn expect_offset(self) -> TextSize {
|
||||||
|
match self {
|
||||||
|
RangeOrOffset::Offset(it) => it,
|
||||||
|
RangeOrOffset::Range(_) => panic!("expected an offset but got a range instead"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn expect_range(self) -> TextRange {
|
||||||
|
match self {
|
||||||
|
RangeOrOffset::Range(it) => it,
|
||||||
|
RangeOrOffset::Offset(_) => panic!("expected a range but got an offset"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<RangeOrOffset> for TextRange {
|
impl From<RangeOrOffset> for TextRange {
|
||||||
fn from(selection: RangeOrOffset) -> Self {
|
fn from(selection: RangeOrOffset) -> Self {
|
||||||
match selection {
|
match selection {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue