mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 05:15:04 +00:00
Add cov_marks to insert_use tests
This commit is contained in:
parent
2c8f1b5c30
commit
b290cd5782
7 changed files with 77 additions and 42 deletions
|
@ -93,7 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
|
||||||
|
|
||||||
let range = ctx.sema.original_range(&syntax_under_caret).range;
|
let range = ctx.sema.original_range(&syntax_under_caret).range;
|
||||||
let group_label = group_label(import_assets.import_candidate());
|
let group_label = group_label(import_assets.import_candidate());
|
||||||
let scope = ImportScope::find_insert_use_container(&syntax_under_caret, &ctx.sema)?;
|
let scope = ImportScope::find_insert_use_container_with_macros(&syntax_under_caret, &ctx.sema)?;
|
||||||
for import in proposed_imports {
|
for import in proposed_imports {
|
||||||
acc.add_group(
|
acc.add_group(
|
||||||
&group_label,
|
&group_label,
|
||||||
|
|
|
@ -5,7 +5,7 @@ use hir::{Module, ModuleDef, Name, Variant};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
defs::Definition,
|
defs::Definition,
|
||||||
helpers::{
|
helpers::{
|
||||||
insert_use::{insert_use, ImportScope},
|
insert_use::{insert_use, ImportScope, InsertUseConfig},
|
||||||
mod_path_to_ast,
|
mod_path_to_ast,
|
||||||
},
|
},
|
||||||
search::FileReference,
|
search::FileReference,
|
||||||
|
@ -79,16 +79,8 @@ pub(crate) fn extract_struct_from_enum_variant(
|
||||||
&variant_hir_name,
|
&variant_hir_name,
|
||||||
references,
|
references,
|
||||||
);
|
);
|
||||||
processed.into_iter().for_each(|(segment, node, import)| {
|
processed.into_iter().for_each(|(path, node, import)| {
|
||||||
if let Some((scope, path)) = import {
|
apply_references(ctx.config.insert_use, path, node, import)
|
||||||
insert_use(&scope, mod_path_to_ast(&path), ctx.config.insert_use);
|
|
||||||
}
|
|
||||||
ted::insert_raw(
|
|
||||||
ted::Position::before(segment.syntax()),
|
|
||||||
make::path_from_text(&format!("{}", segment)).clone_for_update().syntax(),
|
|
||||||
);
|
|
||||||
ted::insert_raw(ted::Position::before(segment.syntax()), make::token(T!['(']));
|
|
||||||
ted::insert_raw(ted::Position::after(&node), make::token(T![')']));
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
builder.edit_file(ctx.frange.file_id);
|
builder.edit_file(ctx.frange.file_id);
|
||||||
|
@ -103,21 +95,12 @@ pub(crate) fn extract_struct_from_enum_variant(
|
||||||
&variant_hir_name,
|
&variant_hir_name,
|
||||||
references,
|
references,
|
||||||
);
|
);
|
||||||
processed.into_iter().for_each(|(segment, node, import)| {
|
processed.into_iter().for_each(|(path, node, import)| {
|
||||||
if let Some((scope, path)) = import {
|
apply_references(ctx.config.insert_use, path, node, import)
|
||||||
insert_use(&scope, mod_path_to_ast(&path), ctx.config.insert_use);
|
|
||||||
}
|
|
||||||
ted::insert_raw(
|
|
||||||
ted::Position::before(segment.syntax()),
|
|
||||||
make::path_from_text(&format!("{}", segment)).clone_for_update().syntax(),
|
|
||||||
);
|
|
||||||
ted::insert_raw(ted::Position::before(segment.syntax()), make::token(T!['(']));
|
|
||||||
ted::insert_raw(ted::Position::after(&node), make::token(T![')']));
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let def = create_struct_def(variant_name.clone(), &field_list, enum_ast.visibility())
|
let def = create_struct_def(variant_name.clone(), &field_list, enum_ast.visibility());
|
||||||
.unwrap();
|
|
||||||
let start_offset = &variant.parent_enum().syntax().clone();
|
let start_offset = &variant.parent_enum().syntax().clone();
|
||||||
ted::insert_raw(ted::Position::before(start_offset), def.syntax());
|
ted::insert_raw(ted::Position::before(start_offset), def.syntax());
|
||||||
ted::insert_raw(ted::Position::before(start_offset), &make::tokens::blank_line());
|
ted::insert_raw(ted::Position::before(start_offset), &make::tokens::blank_line());
|
||||||
|
@ -167,7 +150,7 @@ fn create_struct_def(
|
||||||
variant_name: ast::Name,
|
variant_name: ast::Name,
|
||||||
field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
|
field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
|
||||||
visibility: Option<ast::Visibility>,
|
visibility: Option<ast::Visibility>,
|
||||||
) -> Option<ast::Struct> {
|
) -> ast::Struct {
|
||||||
let pub_vis = Some(make::visibility_pub());
|
let pub_vis = Some(make::visibility_pub());
|
||||||
let field_list = match field_list {
|
let field_list = match field_list {
|
||||||
Either::Left(field_list) => {
|
Either::Left(field_list) => {
|
||||||
|
@ -184,7 +167,7 @@ fn create_struct_def(
|
||||||
.into(),
|
.into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(make::struct_(visibility, variant_name, None, field_list).clone_for_update())
|
make::struct_(visibility, variant_name, None, field_list).clone_for_update()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_variant(variant: &ast::Variant) -> Option<()> {
|
fn update_variant(variant: &ast::Variant) -> Option<()> {
|
||||||
|
@ -199,6 +182,23 @@ fn update_variant(variant: &ast::Variant) -> Option<()> {
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn apply_references(
|
||||||
|
insert_use_cfg: InsertUseConfig,
|
||||||
|
segment: ast::PathSegment,
|
||||||
|
node: SyntaxNode,
|
||||||
|
import: Option<(ImportScope, hir::ModPath)>,
|
||||||
|
) {
|
||||||
|
if let Some((scope, path)) = import {
|
||||||
|
insert_use(&scope, mod_path_to_ast(&path), insert_use_cfg);
|
||||||
|
}
|
||||||
|
ted::insert_raw(
|
||||||
|
ted::Position::before(segment.syntax()),
|
||||||
|
make::path_from_text(&format!("{}", segment)).clone_for_update().syntax(),
|
||||||
|
);
|
||||||
|
ted::insert_raw(ted::Position::before(segment.syntax()), make::token(T!['(']));
|
||||||
|
ted::insert_raw(ted::Position::after(&node), make::token(T![')']));
|
||||||
|
}
|
||||||
|
|
||||||
fn process_references(
|
fn process_references(
|
||||||
ctx: &AssistContext,
|
ctx: &AssistContext,
|
||||||
visited_modules: &mut FxHashSet<Module>,
|
visited_modules: &mut FxHashSet<Module>,
|
||||||
|
@ -207,6 +207,8 @@ fn process_references(
|
||||||
variant_hir_name: &Name,
|
variant_hir_name: &Name,
|
||||||
refs: Vec<FileReference>,
|
refs: Vec<FileReference>,
|
||||||
) -> Vec<(ast::PathSegment, SyntaxNode, Option<(ImportScope, hir::ModPath)>)> {
|
) -> Vec<(ast::PathSegment, SyntaxNode, Option<(ImportScope, hir::ModPath)>)> {
|
||||||
|
// we have to recollect here eagerly as we are about to edit the tree we need to calculate the changes
|
||||||
|
// and corresponding nodes up front
|
||||||
refs.into_iter()
|
refs.into_iter()
|
||||||
.flat_map(|reference| {
|
.flat_map(|reference| {
|
||||||
let (segment, scope_node, module) =
|
let (segment, scope_node, module) =
|
||||||
|
@ -220,8 +222,7 @@ fn process_references(
|
||||||
if let Some(mut mod_path) = mod_path {
|
if let Some(mut mod_path) = mod_path {
|
||||||
mod_path.pop_segment();
|
mod_path.pop_segment();
|
||||||
mod_path.push_segment(variant_hir_name.clone());
|
mod_path.push_segment(variant_hir_name.clone());
|
||||||
// uuuh this wont properly work, find_insert_use_container ascends macros so we might a get new syntax node???
|
let scope = ImportScope::find_insert_use_container(&scope_node)?;
|
||||||
let scope = ImportScope::find_insert_use_container(&scope_node, &ctx.sema)?;
|
|
||||||
visited_modules.insert(module);
|
visited_modules.insert(module);
|
||||||
return Some((segment, scope_node, Some((scope, mod_path))));
|
return Some((segment, scope_node, Some((scope, mod_path))));
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ pub(crate) fn replace_qualified_name_with_use(
|
||||||
}
|
}
|
||||||
|
|
||||||
let target = path.syntax().text_range();
|
let target = path.syntax().text_range();
|
||||||
let scope = ImportScope::find_insert_use_container(path.syntax(), &ctx.sema)?;
|
let scope = ImportScope::find_insert_use_container_with_macros(path.syntax(), &ctx.sema)?;
|
||||||
let syntax = scope.as_syntax_node();
|
let syntax = scope.as_syntax_node();
|
||||||
acc.add(
|
acc.add(
|
||||||
AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite),
|
AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite),
|
||||||
|
@ -42,15 +42,15 @@ pub(crate) fn replace_qualified_name_with_use(
|
||||||
// affected (that is, all paths inside the node we added the `use` to).
|
// affected (that is, all paths inside the node we added the `use` to).
|
||||||
let syntax = builder.make_mut(syntax.clone());
|
let syntax = builder.make_mut(syntax.clone());
|
||||||
if let Some(ref import_scope) = ImportScope::from(syntax.clone()) {
|
if let Some(ref import_scope) = ImportScope::from(syntax.clone()) {
|
||||||
insert_use(import_scope, path.clone(), ctx.config.insert_use);
|
shorten_paths(&syntax, &path.clone_for_update());
|
||||||
|
insert_use(import_scope, path, ctx.config.insert_use);
|
||||||
}
|
}
|
||||||
shorten_paths(syntax.clone(), &path.clone_for_update());
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds replacements to `re` that shorten `path` in all descendants of `node`.
|
/// Adds replacements to `re` that shorten `path` in all descendants of `node`.
|
||||||
fn shorten_paths(node: SyntaxNode, path: &ast::Path) {
|
fn shorten_paths(node: &SyntaxNode, path: &ast::Path) {
|
||||||
for child in node.children() {
|
for child in node.children() {
|
||||||
match_ast! {
|
match_ast! {
|
||||||
match child {
|
match child {
|
||||||
|
@ -59,14 +59,10 @@ fn shorten_paths(node: SyntaxNode, path: &ast::Path) {
|
||||||
ast::Use(_it) => continue,
|
ast::Use(_it) => continue,
|
||||||
// Don't descend into submodules, they don't have the same `use` items in scope.
|
// Don't descend into submodules, they don't have the same `use` items in scope.
|
||||||
ast::Module(_it) => continue,
|
ast::Module(_it) => continue,
|
||||||
|
ast::Path(p) => if maybe_replace_path(p.clone(), path.clone()).is_none() {
|
||||||
ast::Path(p) => {
|
shorten_paths(p.syntax(), path);
|
||||||
match maybe_replace_path(p.clone(), path.clone()) {
|
|
||||||
Some(()) => {},
|
|
||||||
None => shorten_paths(p.syntax().clone(), path),
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
_ => shorten_paths(child, path),
|
_ => shorten_paths(&child, path),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -132,7 +132,7 @@ pub(crate) fn import_on_the_fly(acc: &mut Completions, ctx: &CompletionContext)
|
||||||
|
|
||||||
let user_input_lowercased = potential_import_name.to_lowercase();
|
let user_input_lowercased = potential_import_name.to_lowercase();
|
||||||
let import_assets = import_assets(ctx, potential_import_name)?;
|
let import_assets = import_assets(ctx, potential_import_name)?;
|
||||||
let import_scope = ImportScope::find_insert_use_container(
|
let import_scope = ImportScope::find_insert_use_container_with_macros(
|
||||||
position_for_import(ctx, Some(import_assets.import_candidate()))?,
|
position_for_import(ctx, Some(import_assets.import_candidate()))?,
|
||||||
&ctx.sema,
|
&ctx.sema,
|
||||||
)?;
|
)?;
|
||||||
|
|
|
@ -179,7 +179,7 @@ pub fn resolve_completion_edits(
|
||||||
) -> Option<Vec<TextEdit>> {
|
) -> Option<Vec<TextEdit>> {
|
||||||
let ctx = CompletionContext::new(db, position, config)?;
|
let ctx = CompletionContext::new(db, position, config)?;
|
||||||
let position_for_import = position_for_import(&ctx, None)?;
|
let position_for_import = position_for_import(&ctx, None)?;
|
||||||
let scope = ImportScope::find_insert_use_container(position_for_import, &ctx.sema)?;
|
let scope = ImportScope::find_insert_use_container_with_macros(position_for_import, &ctx.sema)?;
|
||||||
|
|
||||||
let current_module = ctx.sema.scope(position_for_import).module()?;
|
let current_module = ctx.sema.scope(position_for_import).module()?;
|
||||||
let current_crate = current_module.krate();
|
let current_crate = current_module.krate();
|
||||||
|
|
|
@ -38,13 +38,18 @@ impl ImportScope {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
|
/// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
|
||||||
pub fn find_insert_use_container(
|
pub fn find_insert_use_container_with_macros(
|
||||||
position: &SyntaxNode,
|
position: &SyntaxNode,
|
||||||
sema: &Semantics<'_, RootDatabase>,
|
sema: &Semantics<'_, RootDatabase>,
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
sema.ancestors_with_macros(position.clone()).find_map(Self::from)
|
sema.ancestors_with_macros(position.clone()).find_map(Self::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
|
||||||
|
pub fn find_insert_use_container(position: &SyntaxNode) -> Option<Self> {
|
||||||
|
std::iter::successors(Some(position.clone()), SyntaxNode::parent).find_map(Self::from)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn as_syntax_node(&self) -> &SyntaxNode {
|
pub fn as_syntax_node(&self) -> &SyntaxNode {
|
||||||
match self {
|
match self {
|
||||||
ImportScope::File(file) => file.syntax(),
|
ImportScope::File(file) => file.syntax(),
|
||||||
|
@ -446,8 +451,10 @@ fn insert_use_(
|
||||||
|
|
||||||
if !group_imports {
|
if !group_imports {
|
||||||
if let Some((_, _, node)) = path_node_iter.last() {
|
if let Some((_, _, node)) = path_node_iter.last() {
|
||||||
|
cov_mark::hit!(insert_no_grouping_last);
|
||||||
ted::insert(ted::Position::after(node), use_item.syntax());
|
ted::insert(ted::Position::after(node), use_item.syntax());
|
||||||
} else {
|
} else {
|
||||||
|
cov_mark::hit!(insert_no_grouping_last2);
|
||||||
ted::insert(ted::Position::first_child_of(scope_syntax), make::tokens::blank_line());
|
ted::insert(ted::Position::first_child_of(scope_syntax), make::tokens::blank_line());
|
||||||
ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax());
|
ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax());
|
||||||
}
|
}
|
||||||
|
@ -471,10 +478,12 @@ fn insert_use_(
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some((.., node)) = post_insert {
|
if let Some((.., node)) = post_insert {
|
||||||
|
cov_mark::hit!(insert_group);
|
||||||
// insert our import before that element
|
// insert our import before that element
|
||||||
return ted::insert(ted::Position::before(node), use_item.syntax());
|
return ted::insert(ted::Position::before(node), use_item.syntax());
|
||||||
}
|
}
|
||||||
if let Some(node) = last {
|
if let Some(node) = last {
|
||||||
|
cov_mark::hit!(insert_group_last);
|
||||||
// there is no element after our new import, so append it to the end of the group
|
// there is no element after our new import, so append it to the end of the group
|
||||||
return ted::insert(ted::Position::after(node), use_item.syntax());
|
return ted::insert(ted::Position::after(node), use_item.syntax());
|
||||||
}
|
}
|
||||||
|
@ -487,6 +496,7 @@ fn insert_use_(
|
||||||
.inspect(|(.., node)| last = Some(node.clone()))
|
.inspect(|(.., node)| last = Some(node.clone()))
|
||||||
.find(|(p, ..)| ImportGroup::new(p) > group);
|
.find(|(p, ..)| ImportGroup::new(p) > group);
|
||||||
if let Some((.., node)) = post_group {
|
if let Some((.., node)) = post_group {
|
||||||
|
cov_mark::hit!(insert_group_new_group);
|
||||||
ted::insert(ted::Position::before(&node), use_item.syntax());
|
ted::insert(ted::Position::before(&node), use_item.syntax());
|
||||||
if let Some(node) = algo::non_trivia_sibling(node.into(), Direction::Prev) {
|
if let Some(node) = algo::non_trivia_sibling(node.into(), Direction::Prev) {
|
||||||
ted::insert(ted::Position::after(node), make::tokens::single_newline());
|
ted::insert(ted::Position::after(node), make::tokens::single_newline());
|
||||||
|
@ -495,6 +505,7 @@ fn insert_use_(
|
||||||
}
|
}
|
||||||
// there is no such group, so append after the last one
|
// there is no such group, so append after the last one
|
||||||
if let Some(node) = last {
|
if let Some(node) = last {
|
||||||
|
cov_mark::hit!(insert_group_no_group);
|
||||||
ted::insert(ted::Position::after(&node), use_item.syntax());
|
ted::insert(ted::Position::after(&node), use_item.syntax());
|
||||||
ted::insert(ted::Position::after(node), make::tokens::single_newline());
|
ted::insert(ted::Position::after(node), make::tokens::single_newline());
|
||||||
return;
|
return;
|
||||||
|
@ -508,22 +519,26 @@ fn insert_use_(
|
||||||
})
|
})
|
||||||
.last()
|
.last()
|
||||||
{
|
{
|
||||||
|
cov_mark::hit!(insert_group_empty_inner_attr);
|
||||||
ted::insert(ted::Position::after(&last_inner_element), use_item.syntax());
|
ted::insert(ted::Position::after(&last_inner_element), use_item.syntax());
|
||||||
ted::insert(ted::Position::after(last_inner_element), make::tokens::single_newline());
|
ted::insert(ted::Position::after(last_inner_element), make::tokens::single_newline());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
match scope {
|
match scope {
|
||||||
ImportScope::File(_) => {
|
ImportScope::File(_) => {
|
||||||
|
cov_mark::hit!(insert_group_empty_file);
|
||||||
ted::insert(ted::Position::first_child_of(scope_syntax), make::tokens::blank_line());
|
ted::insert(ted::Position::first_child_of(scope_syntax), make::tokens::blank_line());
|
||||||
ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax())
|
ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax())
|
||||||
}
|
}
|
||||||
// don't insert the imports before the item list's opening curly brace
|
// don't insert the imports before the item list's opening curly brace
|
||||||
ImportScope::Module(item_list) => match item_list.l_curly_token() {
|
ImportScope::Module(item_list) => match item_list.l_curly_token() {
|
||||||
Some(b) => {
|
Some(b) => {
|
||||||
|
cov_mark::hit!(insert_group_empty_module);
|
||||||
ted::insert(ted::Position::after(&b), make::tokens::single_newline());
|
ted::insert(ted::Position::after(&b), make::tokens::single_newline());
|
||||||
ted::insert(ted::Position::after(&b), use_item.syntax());
|
ted::insert(ted::Position::after(&b), use_item.syntax());
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
|
// This should never happens, broken module syntax node
|
||||||
ted::insert(
|
ted::insert(
|
||||||
ted::Position::first_child_of(scope_syntax),
|
ted::Position::first_child_of(scope_syntax),
|
||||||
make::tokens::blank_line(),
|
make::tokens::blank_line(),
|
||||||
|
|
|
@ -5,6 +5,7 @@ use test_utils::assert_eq_text;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_not_group() {
|
fn insert_not_group() {
|
||||||
|
cov_mark::check!(insert_no_grouping_last);
|
||||||
check(
|
check(
|
||||||
"use external_crate2::bar::A",
|
"use external_crate2::bar::A",
|
||||||
r"
|
r"
|
||||||
|
@ -26,6 +27,21 @@ use external_crate2::bar::A;",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn insert_not_group_empty() {
|
||||||
|
cov_mark::check!(insert_no_grouping_last2);
|
||||||
|
check(
|
||||||
|
"use external_crate2::bar::A",
|
||||||
|
r"",
|
||||||
|
r"use external_crate2::bar::A;
|
||||||
|
|
||||||
|
",
|
||||||
|
None,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_existing() {
|
fn insert_existing() {
|
||||||
check_full("std::fs", "use std::fs;", "use std::fs;")
|
check_full("std::fs", "use std::fs;", "use std::fs;")
|
||||||
|
@ -65,6 +81,7 @@ fn insert_start_indent() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_middle() {
|
fn insert_middle() {
|
||||||
|
cov_mark::check!(insert_group);
|
||||||
check_none(
|
check_none(
|
||||||
"std::bar::EE",
|
"std::bar::EE",
|
||||||
r"
|
r"
|
||||||
|
@ -101,6 +118,7 @@ fn insert_middle_indent() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_end() {
|
fn insert_end() {
|
||||||
|
cov_mark::check!(insert_group_last);
|
||||||
check_none(
|
check_none(
|
||||||
"std::bar::ZZ",
|
"std::bar::ZZ",
|
||||||
r"
|
r"
|
||||||
|
@ -199,6 +217,7 @@ fn insert_first_matching_group() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_missing_group_std() {
|
fn insert_missing_group_std() {
|
||||||
|
cov_mark::check!(insert_group_new_group);
|
||||||
check_none(
|
check_none(
|
||||||
"std::fmt",
|
"std::fmt",
|
||||||
r"
|
r"
|
||||||
|
@ -214,6 +233,7 @@ fn insert_missing_group_std() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_missing_group_self() {
|
fn insert_missing_group_self() {
|
||||||
|
cov_mark::check!(insert_group_no_group);
|
||||||
check_none(
|
check_none(
|
||||||
"self::fmt",
|
"self::fmt",
|
||||||
r"
|
r"
|
||||||
|
@ -240,6 +260,7 @@ fn main() {}",
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_empty_file() {
|
fn insert_empty_file() {
|
||||||
|
cov_mark::check!(insert_group_empty_file);
|
||||||
// empty files will get two trailing newlines
|
// empty files will get two trailing newlines
|
||||||
// this is due to the test case insert_no_imports above
|
// this is due to the test case insert_no_imports above
|
||||||
check_full(
|
check_full(
|
||||||
|
@ -253,6 +274,7 @@ fn insert_empty_file() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_empty_module() {
|
fn insert_empty_module() {
|
||||||
|
cov_mark::check!(insert_group_empty_module);
|
||||||
check(
|
check(
|
||||||
"foo::bar",
|
"foo::bar",
|
||||||
"mod x {}",
|
"mod x {}",
|
||||||
|
@ -267,6 +289,7 @@ fn insert_empty_module() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn insert_after_inner_attr() {
|
fn insert_after_inner_attr() {
|
||||||
|
cov_mark::check!(insert_group_empty_inner_attr);
|
||||||
check_full(
|
check_full(
|
||||||
"foo::bar",
|
"foo::bar",
|
||||||
r"#![allow(unused_imports)]",
|
r"#![allow(unused_imports)]",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue