mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 22:01:37 +00:00
Merge #1031
1031: Move most things out of ra_ide_api_light r=matklad a=detrumi This moves everything except `structure` out of `ra_ide_api_light`. So this PR and #1019 finish up #1009, whichever is merged last should probably remove the `ra_ide_api_light` crate. Also, `LocalEdit` was removed since it wasn't used any more. Co-authored-by: Wilco Kusee <wilcokusee@gmail.com>
This commit is contained in:
commit
18a8f48039
8 changed files with 222 additions and 236 deletions
|
@ -1,6 +1,5 @@
|
|||
use itertools::Itertools;
|
||||
use hir::{Problem, source_binder};
|
||||
use ra_ide_api_light::Severity;
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
Location, SourceFile, SyntaxKind, TextRange, SyntaxNode,
|
||||
|
@ -11,6 +10,12 @@ use ra_text_edit::{TextEdit, TextEditBuilder};
|
|||
|
||||
use crate::{Diagnostic, FileId, FileSystemEdit, SourceChange, SourceFileEdit, db::RootDatabase};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum Severity {
|
||||
Error,
|
||||
WeakWarning,
|
||||
}
|
||||
|
||||
pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> {
|
||||
let source_file = db.parse(file_id);
|
||||
let mut res = Vec::new();
|
||||
|
|
|
@ -37,6 +37,8 @@ mod line_index;
|
|||
mod folding_ranges;
|
||||
mod line_index_utils;
|
||||
mod join_lines;
|
||||
mod typing;
|
||||
mod matching_brace;
|
||||
|
||||
#[cfg(test)]
|
||||
mod marks;
|
||||
|
@ -69,10 +71,10 @@ pub use crate::{
|
|||
line_index::{LineIndex, LineCol},
|
||||
line_index_utils::translate_offset_with_edit,
|
||||
folding_ranges::{Fold, FoldKind},
|
||||
syntax_highlighting::HighlightedRange,
|
||||
diagnostics::Severity,
|
||||
};
|
||||
pub use ra_ide_api_light::{
|
||||
HighlightedRange, Severity, StructureNode, LocalEdit,
|
||||
};
|
||||
pub use ra_ide_api_light::StructureNode;
|
||||
pub use ra_db::{
|
||||
Canceled, CrateGraph, CrateId, FileId, FilePosition, FileRange, SourceRootId,
|
||||
Edition
|
||||
|
@ -266,7 +268,7 @@ impl Analysis {
|
|||
/// supported).
|
||||
pub fn matching_brace(&self, position: FilePosition) -> Option<TextUnit> {
|
||||
let file = self.db.parse(position.file_id);
|
||||
ra_ide_api_light::matching_brace(&file, position.offset)
|
||||
matching_brace::matching_brace(&file, position.offset)
|
||||
}
|
||||
|
||||
/// Returns a syntax tree represented as `String`, for debug purposes.
|
||||
|
@ -294,9 +296,7 @@ impl Analysis {
|
|||
/// Returns an edit which should be applied when opening a new line, fixing
|
||||
/// up minor stuff like continuing the comment.
|
||||
pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> {
|
||||
let file = self.db.parse(position.file_id);
|
||||
let edit = ra_ide_api_light::on_enter(&file, position.offset)?;
|
||||
Some(SourceChange::from_local_edit(position.file_id, edit))
|
||||
typing::on_enter(&self.db, position)
|
||||
}
|
||||
|
||||
/// Returns an edit which should be applied after `=` was typed. Primarily,
|
||||
|
@ -304,15 +304,18 @@ impl Analysis {
|
|||
// FIXME: use a snippet completion instead of this hack here.
|
||||
pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> {
|
||||
let file = self.db.parse(position.file_id);
|
||||
let edit = ra_ide_api_light::on_eq_typed(&file, position.offset)?;
|
||||
Some(SourceChange::from_local_edit(position.file_id, edit))
|
||||
let edit = typing::on_eq_typed(&file, position.offset)?;
|
||||
Some(SourceChange {
|
||||
label: "add semicolon".to_string(),
|
||||
source_file_edits: vec![SourceFileEdit { edit, file_id: position.file_id }],
|
||||
file_system_edits: vec![],
|
||||
cursor_position: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately.
|
||||
pub fn on_dot_typed(&self, position: FilePosition) -> Option<SourceChange> {
|
||||
let file = self.db.parse(position.file_id);
|
||||
let edit = ra_ide_api_light::on_dot_typed(&file, position.offset)?;
|
||||
Some(SourceChange::from_local_edit(position.file_id, edit))
|
||||
typing::on_dot_typed(&self.db, position)
|
||||
}
|
||||
|
||||
/// Returns a tree representation of symbols in the file. Useful to draw a
|
||||
|
@ -434,18 +437,6 @@ impl Analysis {
|
|||
}
|
||||
}
|
||||
|
||||
impl SourceChange {
|
||||
pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange {
|
||||
let file_edit = SourceFileEdit { file_id, edit: edit.edit };
|
||||
SourceChange {
|
||||
label: edit.label,
|
||||
source_file_edits: vec![file_edit],
|
||||
file_system_edits: vec![],
|
||||
cursor_position: edit.cursor_position.map(|offset| FilePosition { offset, file_id }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn analysis_is_send() {
|
||||
fn is_send<T: Send>() {}
|
||||
|
|
45
crates/ra_ide_api/src/matching_brace.rs
Normal file
45
crates/ra_ide_api/src/matching_brace.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
use ra_syntax::{
|
||||
SourceFile, TextUnit,
|
||||
algo::find_leaf_at_offset,
|
||||
SyntaxKind::{self, *},
|
||||
ast::AstNode,
|
||||
};
|
||||
|
||||
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
|
||||
const BRACES: &[SyntaxKind] =
|
||||
&[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE];
|
||||
let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset)
|
||||
.filter_map(|node| {
|
||||
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
|
||||
Some((node, idx))
|
||||
})
|
||||
.next()?;
|
||||
let parent = brace_node.parent()?;
|
||||
let matching_kind = BRACES[brace_idx ^ 1];
|
||||
let matching_node = parent.children().find(|node| node.kind() == matching_kind)?;
|
||||
Some(matching_node.range().start())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test_utils::{add_cursor, assert_eq_text, extract_offset};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_matching_brace() {
|
||||
fn do_check(before: &str, after: &str) {
|
||||
let (pos, before) = extract_offset(before);
|
||||
let file = SourceFile::parse(&before);
|
||||
let new_pos = match matching_brace(&file, pos) {
|
||||
None => pos,
|
||||
Some(pos) => pos,
|
||||
};
|
||||
let actual = add_cursor(&before, new_pos);
|
||||
assert_eq_text!(after, &actual);
|
||||
}
|
||||
|
||||
do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }");
|
||||
}
|
||||
|
||||
}
|
34
crates/ra_ide_api/src/snapshots/tests__highlighting.snap
Normal file
34
crates/ra_ide_api/src/snapshots/tests__highlighting.snap
Normal file
|
@ -0,0 +1,34 @@
|
|||
---
|
||||
created: "2019-03-23T16:20:31.394314144Z"
|
||||
creator: insta@0.7.1
|
||||
source: crates/ra_ide_api/src/syntax_highlighting.rs
|
||||
expression: result
|
||||
---
|
||||
Ok(
|
||||
[
|
||||
HighlightedRange {
|
||||
range: [1; 11),
|
||||
tag: "comment"
|
||||
},
|
||||
HighlightedRange {
|
||||
range: [12; 14),
|
||||
tag: "keyword"
|
||||
},
|
||||
HighlightedRange {
|
||||
range: [15; 19),
|
||||
tag: "function"
|
||||
},
|
||||
HighlightedRange {
|
||||
range: [29; 37),
|
||||
tag: "macro"
|
||||
},
|
||||
HighlightedRange {
|
||||
range: [38; 50),
|
||||
tag: "string"
|
||||
},
|
||||
HighlightedRange {
|
||||
range: [52; 54),
|
||||
tag: "literal"
|
||||
}
|
||||
]
|
||||
)
|
|
@ -1,12 +1,81 @@
|
|||
use ra_syntax::AstNode;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*};
|
||||
use ra_db::SourceDatabase;
|
||||
|
||||
use crate::{
|
||||
FileId, HighlightedRange,
|
||||
db::RootDatabase,
|
||||
};
|
||||
use crate::{FileId, db::RootDatabase};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct HighlightedRange {
|
||||
pub range: TextRange,
|
||||
pub tag: &'static str,
|
||||
}
|
||||
|
||||
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
|
||||
let source_file = db.parse(file_id);
|
||||
ra_ide_api_light::highlight(source_file.syntax())
|
||||
|
||||
// Visited nodes to handle highlighting priorities
|
||||
let mut highlighted = FxHashSet::default();
|
||||
let mut res = Vec::new();
|
||||
for node in source_file.syntax().descendants() {
|
||||
if highlighted.contains(&node) {
|
||||
continue;
|
||||
}
|
||||
let tag = match node.kind() {
|
||||
COMMENT => "comment",
|
||||
STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string",
|
||||
ATTR => "attribute",
|
||||
NAME_REF => "text",
|
||||
NAME => "function",
|
||||
INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal",
|
||||
LIFETIME => "parameter",
|
||||
k if k.is_keyword() => "keyword",
|
||||
_ => {
|
||||
if let Some(macro_call) = ast::MacroCall::cast(node) {
|
||||
if let Some(path) = macro_call.path() {
|
||||
if let Some(segment) = path.segment() {
|
||||
if let Some(name_ref) = segment.name_ref() {
|
||||
highlighted.insert(name_ref.syntax());
|
||||
let range_start = name_ref.syntax().range().start();
|
||||
let mut range_end = name_ref.syntax().range().end();
|
||||
for sibling in path.syntax().siblings(Direction::Next) {
|
||||
match sibling.kind() {
|
||||
EXCL | IDENT => range_end = sibling.range().end(),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
res.push(HighlightedRange {
|
||||
range: TextRange::from_to(range_start, range_end),
|
||||
tag: "macro",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
};
|
||||
res.push(HighlightedRange { range: node.range(), tag })
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_debug_snapshot_matches;
|
||||
|
||||
use crate::mock_analysis::single_file;
|
||||
|
||||
#[test]
|
||||
fn test_highlighting() {
|
||||
let (analysis, file_id) = single_file(
|
||||
r#"
|
||||
// comment
|
||||
fn main() {}
|
||||
println!("Hello, {}!", 92);
|
||||
"#,
|
||||
);
|
||||
let result = analysis.highlight(file_id);
|
||||
assert_debug_snapshot_matches!("highlighting", result);
|
||||
}
|
||||
}
|
||||
|
|
419
crates/ra_ide_api/src/typing.rs
Normal file
419
crates/ra_ide_api/src/typing.rs
Normal file
|
@ -0,0 +1,419 @@
|
|||
use ra_syntax::{
|
||||
AstNode, SourceFile, SyntaxKind::*,
|
||||
SyntaxNode, TextUnit, TextRange,
|
||||
algo::{find_node_at_offset, find_leaf_at_offset, LeafAtOffset},
|
||||
ast::{self, AstToken},
|
||||
};
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
use ra_db::{FilePosition, SourceDatabase};
|
||||
use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||
|
||||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let file = db.parse(position.file_id);
|
||||
let comment = find_leaf_at_offset(file.syntax(), position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Comment::cast)?;
|
||||
|
||||
if let ast::CommentFlavor::Multiline = comment.flavor() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let prefix = comment.prefix();
|
||||
if position.offset
|
||||
< comment.syntax().range().start() + TextUnit::of_str(prefix) + TextUnit::from(1)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let indent = node_indent(&file, comment.syntax())?;
|
||||
let inserted = format!("\n{}{} ", indent, prefix);
|
||||
let cursor_position = position.offset + TextUnit::of_str(&inserted);
|
||||
let mut edit = TextEditBuilder::default();
|
||||
edit.insert(position.offset, inserted);
|
||||
Some(SourceChange {
|
||||
label: "on enter".to_string(),
|
||||
source_file_edits: vec![SourceFileEdit { edit: edit.finish(), file_id: position.file_id }],
|
||||
file_system_edits: vec![],
|
||||
cursor_position: Some(FilePosition { offset: cursor_position, file_id: position.file_id }),
|
||||
})
|
||||
}
|
||||
|
||||
fn node_indent<'a>(file: &'a SourceFile, node: &SyntaxNode) -> Option<&'a str> {
|
||||
let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) {
|
||||
LeafAtOffset::Between(l, r) => {
|
||||
assert!(r == node);
|
||||
l
|
||||
}
|
||||
LeafAtOffset::Single(n) => {
|
||||
assert!(n == node);
|
||||
return Some("");
|
||||
}
|
||||
LeafAtOffset::None => unreachable!(),
|
||||
};
|
||||
if ws.kind() != WHITESPACE {
|
||||
return None;
|
||||
}
|
||||
let text = ws.leaf_text().unwrap();
|
||||
let pos = text.as_str().rfind('\n').map(|it| it + 1).unwrap_or(0);
|
||||
Some(&text[pos..])
|
||||
}
|
||||
|
||||
pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> {
|
||||
assert_eq!(file.syntax().text().char_at(eq_offset), Some('='));
|
||||
let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?;
|
||||
if let_stmt.has_semi() {
|
||||
return None;
|
||||
}
|
||||
if let Some(expr) = let_stmt.initializer() {
|
||||
let expr_range = expr.syntax().range();
|
||||
if expr_range.contains(eq_offset) && eq_offset != expr_range.start() {
|
||||
return None;
|
||||
}
|
||||
if file.syntax().text().slice(eq_offset..expr_range.start()).contains('\n') {
|
||||
return None;
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
let offset = let_stmt.syntax().range().end();
|
||||
let mut edit = TextEditBuilder::default();
|
||||
edit.insert(offset, ";".to_string());
|
||||
Some(edit.finish())
|
||||
}
|
||||
|
||||
pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let file = db.parse(position.file_id);
|
||||
assert_eq!(file.syntax().text().char_at(position.offset), Some('.'));
|
||||
|
||||
let whitespace = find_leaf_at_offset(file.syntax(), position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Whitespace::cast)?;
|
||||
|
||||
let current_indent = {
|
||||
let text = whitespace.text();
|
||||
let newline = text.rfind('\n')?;
|
||||
&text[newline + 1..]
|
||||
};
|
||||
let current_indent_len = TextUnit::of_str(current_indent);
|
||||
|
||||
// Make sure dot is a part of call chain
|
||||
let field_expr = whitespace.syntax().parent().and_then(ast::FieldExpr::cast)?;
|
||||
let prev_indent = leading_indent(field_expr.syntax())?;
|
||||
let target_indent = format!(" {}", prev_indent);
|
||||
let target_indent_len = TextUnit::of_str(&target_indent);
|
||||
if current_indent_len == target_indent_len {
|
||||
return None;
|
||||
}
|
||||
let mut edit = TextEditBuilder::default();
|
||||
edit.replace(
|
||||
TextRange::from_to(position.offset - current_indent_len, position.offset),
|
||||
target_indent.into(),
|
||||
);
|
||||
let res = SourceChange {
|
||||
label: "reindent dot".to_string(),
|
||||
source_file_edits: vec![SourceFileEdit { edit: edit.finish(), file_id: position.file_id }],
|
||||
file_system_edits: vec![],
|
||||
cursor_position: Some(FilePosition {
|
||||
offset: position.offset + target_indent_len - current_indent_len
|
||||
+ TextUnit::of_char('.'),
|
||||
file_id: position.file_id,
|
||||
}),
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use test_utils::{add_cursor, assert_eq_text, extract_offset};
|
||||
|
||||
use crate::mock_analysis::single_file;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_on_eq_typed() {
|
||||
fn type_eq(before: &str, after: &str) {
|
||||
let (offset, before) = extract_offset(before);
|
||||
let mut edit = TextEditBuilder::default();
|
||||
edit.insert(offset, "=".to_string());
|
||||
let before = edit.finish().apply(&before);
|
||||
let file = SourceFile::parse(&before);
|
||||
if let Some(result) = on_eq_typed(&file, offset) {
|
||||
let actual = result.apply(&before);
|
||||
assert_eq_text!(after, &actual);
|
||||
} else {
|
||||
assert_eq_text!(&before, after)
|
||||
};
|
||||
}
|
||||
|
||||
// do_check(r"
|
||||
// fn foo() {
|
||||
// let foo =<|>
|
||||
// }
|
||||
// ", r"
|
||||
// fn foo() {
|
||||
// let foo =;
|
||||
// }
|
||||
// ");
|
||||
type_eq(
|
||||
r"
|
||||
fn foo() {
|
||||
let foo <|> 1 + 1
|
||||
}
|
||||
",
|
||||
r"
|
||||
fn foo() {
|
||||
let foo = 1 + 1;
|
||||
}
|
||||
",
|
||||
);
|
||||
// do_check(r"
|
||||
// fn foo() {
|
||||
// let foo =<|>
|
||||
// let bar = 1;
|
||||
// }
|
||||
// ", r"
|
||||
// fn foo() {
|
||||
// let foo =;
|
||||
// let bar = 1;
|
||||
// }
|
||||
// ");
|
||||
}
|
||||
|
||||
fn type_dot(before: &str, after: &str) {
|
||||
let (offset, before) = extract_offset(before);
|
||||
let mut edit = TextEditBuilder::default();
|
||||
edit.insert(offset, ".".to_string());
|
||||
let before = edit.finish().apply(&before);
|
||||
let (analysis, file_id) = single_file(&before);
|
||||
if let Some(result) = analysis.on_dot_typed(FilePosition { offset, file_id }) {
|
||||
assert_eq!(result.source_file_edits.len(), 1);
|
||||
let actual = result.source_file_edits[0].edit.apply(&before);
|
||||
assert_eq_text!(after, &actual);
|
||||
} else {
|
||||
assert_eq_text!(&before, after)
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn indents_new_chain_call() {
|
||||
type_dot(
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
<|>
|
||||
}
|
||||
",
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
.
|
||||
}
|
||||
",
|
||||
);
|
||||
type_dot(
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
<|>
|
||||
}
|
||||
",
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
.
|
||||
}
|
||||
",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn indents_new_chain_call_with_semi() {
|
||||
type_dot(
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
<|>;
|
||||
}
|
||||
",
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
.;
|
||||
}
|
||||
",
|
||||
);
|
||||
type_dot(
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
<|>;
|
||||
}
|
||||
",
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
.;
|
||||
}
|
||||
",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn indents_continued_chain_call() {
|
||||
type_dot(
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
.first()
|
||||
<|>
|
||||
}
|
||||
",
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
.first()
|
||||
.
|
||||
}
|
||||
",
|
||||
);
|
||||
type_dot(
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
.first()
|
||||
<|>
|
||||
}
|
||||
",
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
self.child_impl(db, name)
|
||||
.first()
|
||||
.
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn indents_middle_of_chain_call() {
|
||||
type_dot(
|
||||
r"
|
||||
fn source_impl() {
|
||||
let var = enum_defvariant_list().unwrap()
|
||||
<|>
|
||||
.nth(92)
|
||||
.unwrap();
|
||||
}
|
||||
",
|
||||
r"
|
||||
fn source_impl() {
|
||||
let var = enum_defvariant_list().unwrap()
|
||||
.
|
||||
.nth(92)
|
||||
.unwrap();
|
||||
}
|
||||
",
|
||||
);
|
||||
type_dot(
|
||||
r"
|
||||
fn source_impl() {
|
||||
let var = enum_defvariant_list().unwrap()
|
||||
<|>
|
||||
.nth(92)
|
||||
.unwrap();
|
||||
}
|
||||
",
|
||||
r"
|
||||
fn source_impl() {
|
||||
let var = enum_defvariant_list().unwrap()
|
||||
.
|
||||
.nth(92)
|
||||
.unwrap();
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_indent_freestanding_dot() {
|
||||
type_dot(
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
<|>
|
||||
}
|
||||
",
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
.
|
||||
}
|
||||
",
|
||||
);
|
||||
type_dot(
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
<|>
|
||||
}
|
||||
",
|
||||
r"
|
||||
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
|
||||
.
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_on_enter() {
|
||||
fn apply_on_enter(before: &str) -> Option<String> {
|
||||
let (offset, before) = extract_offset(before);
|
||||
let (analysis, file_id) = single_file(&before);
|
||||
let result = analysis.on_enter(FilePosition { offset, file_id })?;
|
||||
|
||||
assert_eq!(result.source_file_edits.len(), 1);
|
||||
let actual = result.source_file_edits[0].edit.apply(&before);
|
||||
let actual = add_cursor(&actual, result.cursor_position.unwrap().offset);
|
||||
Some(actual)
|
||||
}
|
||||
|
||||
fn do_check(before: &str, after: &str) {
|
||||
let actual = apply_on_enter(before).unwrap();
|
||||
assert_eq_text!(after, &actual);
|
||||
}
|
||||
|
||||
fn do_check_noop(text: &str) {
|
||||
assert!(apply_on_enter(text).is_none())
|
||||
}
|
||||
|
||||
do_check(
|
||||
r"
|
||||
/// Some docs<|>
|
||||
fn foo() {
|
||||
}
|
||||
",
|
||||
r"
|
||||
/// Some docs
|
||||
/// <|>
|
||||
fn foo() {
|
||||
}
|
||||
",
|
||||
);
|
||||
do_check(
|
||||
r"
|
||||
impl S {
|
||||
/// Some<|> docs.
|
||||
fn foo() {}
|
||||
}
|
||||
",
|
||||
r"
|
||||
impl S {
|
||||
/// Some
|
||||
/// <|> docs.
|
||||
fn foo() {}
|
||||
}
|
||||
",
|
||||
);
|
||||
do_check_noop(r"<|>//! docz");
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue