mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 13:51:31 +00:00
Auto merge of #15874 - DropDemBits:structured-snippet-migrate-4, r=Veykril
internal: Migrate assists to the structured snippet API, part 4 Continuing from #15260 Migrates the following assists: - `add_turbo_fish` - `add_type_ascription` - `destructure_tuple_binding` - `destructure_tuple_binding_in_subpattern` I did this a while ago, but forgot to make a PR for the changes until now. 😅
This commit is contained in:
commit
535eb0da9d
5 changed files with 433 additions and 141 deletions
|
@ -1,6 +1,9 @@
|
||||||
|
use either::Either;
|
||||||
use ide_db::defs::{Definition, NameRefClass};
|
use ide_db::defs::{Definition, NameRefClass};
|
||||||
use itertools::Itertools;
|
use syntax::{
|
||||||
use syntax::{ast, AstNode, SyntaxKind, T};
|
ast::{self, make, HasArgList},
|
||||||
|
ted, AstNode,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
assist_context::{AssistContext, Assists},
|
assist_context::{AssistContext, Assists},
|
||||||
|
@ -25,21 +28,45 @@ use crate::{
|
||||||
// }
|
// }
|
||||||
// ```
|
// ```
|
||||||
pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||||
let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| {
|
let turbofish_target =
|
||||||
let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?;
|
ctx.find_node_at_offset::<ast::PathSegment>().map(Either::Left).or_else(|| {
|
||||||
if arg_list.args().next().is_some() {
|
let callable_expr = ctx.find_node_at_offset::<ast::CallableExpr>()?;
|
||||||
return None;
|
|
||||||
}
|
if callable_expr.arg_list()?.args().next().is_some() {
|
||||||
cov_mark::hit!(add_turbo_fish_after_call);
|
return None;
|
||||||
cov_mark::hit!(add_type_ascription_after_call);
|
}
|
||||||
arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT)
|
|
||||||
})?;
|
cov_mark::hit!(add_turbo_fish_after_call);
|
||||||
let next_token = ident.next_token()?;
|
cov_mark::hit!(add_type_ascription_after_call);
|
||||||
if next_token.kind() == T![::] {
|
|
||||||
|
match callable_expr {
|
||||||
|
ast::CallableExpr::Call(it) => {
|
||||||
|
let ast::Expr::PathExpr(path) = it.expr()? else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(Either::Left(path.path()?.segment()?))
|
||||||
|
}
|
||||||
|
ast::CallableExpr::MethodCall(it) => Some(Either::Right(it)),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let already_has_turbofish = match &turbofish_target {
|
||||||
|
Either::Left(path_segment) => path_segment.generic_arg_list().is_some(),
|
||||||
|
Either::Right(method_call) => method_call.generic_arg_list().is_some(),
|
||||||
|
};
|
||||||
|
|
||||||
|
if already_has_turbofish {
|
||||||
cov_mark::hit!(add_turbo_fish_one_fish_is_enough);
|
cov_mark::hit!(add_turbo_fish_one_fish_is_enough);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let name_ref = ast::NameRef::cast(ident.parent()?)?;
|
|
||||||
|
let name_ref = match &turbofish_target {
|
||||||
|
Either::Left(path_segment) => path_segment.name_ref()?,
|
||||||
|
Either::Right(method_call) => method_call.name_ref()?,
|
||||||
|
};
|
||||||
|
let ident = name_ref.ident_token()?;
|
||||||
|
|
||||||
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
|
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
|
||||||
NameRefClass::Definition(def) => def,
|
NameRefClass::Definition(def) => def,
|
||||||
NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => {
|
NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => {
|
||||||
|
@ -58,20 +85,27 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
|
||||||
|
|
||||||
if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
|
if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
|
||||||
if let_stmt.colon_token().is_none() {
|
if let_stmt.colon_token().is_none() {
|
||||||
let type_pos = let_stmt.pat()?.syntax().last_token()?.text_range().end();
|
if let_stmt.pat().is_none() {
|
||||||
let semi_pos = let_stmt.syntax().last_token()?.text_range().end();
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
acc.add(
|
acc.add(
|
||||||
AssistId("add_type_ascription", AssistKind::RefactorRewrite),
|
AssistId("add_type_ascription", AssistKind::RefactorRewrite),
|
||||||
"Add `: _` before assignment operator",
|
"Add `: _` before assignment operator",
|
||||||
ident.text_range(),
|
ident.text_range(),
|
||||||
|builder| {
|
|edit| {
|
||||||
|
let let_stmt = edit.make_mut(let_stmt);
|
||||||
|
|
||||||
if let_stmt.semicolon_token().is_none() {
|
if let_stmt.semicolon_token().is_none() {
|
||||||
builder.insert(semi_pos, ";");
|
ted::append_child(let_stmt.syntax(), make::tokens::semicolon());
|
||||||
}
|
}
|
||||||
match ctx.config.snippet_cap {
|
|
||||||
Some(cap) => builder.insert_snippet(cap, type_pos, ": ${0:_}"),
|
let placeholder_ty = make::ty_placeholder().clone_for_update();
|
||||||
None => builder.insert(type_pos, ": _"),
|
|
||||||
|
let_stmt.set_ty(Some(placeholder_ty.clone()));
|
||||||
|
|
||||||
|
if let Some(cap) = ctx.config.snippet_cap {
|
||||||
|
edit.add_placeholder_snippet(cap, placeholder_ty);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)?
|
)?
|
||||||
|
@ -91,38 +125,46 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
|
||||||
AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
|
AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
|
||||||
"Add `::<>`",
|
"Add `::<>`",
|
||||||
ident.text_range(),
|
ident.text_range(),
|
||||||
|builder| {
|
|edit| {
|
||||||
builder.trigger_signature_help();
|
edit.trigger_signature_help();
|
||||||
match ctx.config.snippet_cap {
|
|
||||||
Some(cap) => {
|
let new_arg_list = match turbofish_target {
|
||||||
let fish_head = get_snippet_fish_head(number_of_arguments);
|
Either::Left(path_segment) => {
|
||||||
let snip = format!("::<{fish_head}>");
|
edit.make_mut(path_segment).get_or_create_generic_arg_list()
|
||||||
builder.insert_snippet(cap, ident.text_range().end(), snip)
|
|
||||||
}
|
}
|
||||||
None => {
|
Either::Right(method_call) => {
|
||||||
let fish_head = std::iter::repeat("_").take(number_of_arguments).format(", ");
|
edit.make_mut(method_call).get_or_create_generic_arg_list()
|
||||||
let snip = format!("::<{fish_head}>");
|
}
|
||||||
builder.insert(ident.text_range().end(), snip);
|
};
|
||||||
|
|
||||||
|
let fish_head = get_fish_head(number_of_arguments).clone_for_update();
|
||||||
|
|
||||||
|
// Note: we need to replace the `new_arg_list` instead of being able to use something like
|
||||||
|
// `GenericArgList::add_generic_arg` as `PathSegment::get_or_create_generic_arg_list`
|
||||||
|
// always creates a non-turbofish form generic arg list.
|
||||||
|
ted::replace(new_arg_list.syntax(), fish_head.syntax());
|
||||||
|
|
||||||
|
if let Some(cap) = ctx.config.snippet_cap {
|
||||||
|
for arg in fish_head.generic_args() {
|
||||||
|
edit.add_placeholder_snippet(cap, arg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This will create a snippet string with tabstops marked
|
/// This will create a turbofish generic arg list corresponding to the number of arguments
|
||||||
fn get_snippet_fish_head(number_of_arguments: usize) -> String {
|
fn get_fish_head(number_of_arguments: usize) -> ast::GenericArgList {
|
||||||
let mut fish_head = (1..number_of_arguments)
|
let args = (0..number_of_arguments).map(|_| make::type_arg(make::ty_placeholder()).into());
|
||||||
.format_with("", |i, f| f(&format_args!("${{{i}:_}}, ")))
|
make::turbofish_generic_arg_list(args)
|
||||||
.to_string();
|
|
||||||
|
|
||||||
// tabstop 0 is a special case and always the last one
|
|
||||||
fish_head.push_str("${0:_}");
|
|
||||||
fish_head
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable};
|
use crate::tests::{
|
||||||
|
check_assist, check_assist_by_label, check_assist_not_applicable,
|
||||||
|
check_assist_not_applicable_by_label,
|
||||||
|
};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
@ -363,6 +405,20 @@ fn main() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add_type_ascription_missing_pattern() {
|
||||||
|
check_assist_not_applicable_by_label(
|
||||||
|
add_turbo_fish,
|
||||||
|
r#"
|
||||||
|
fn make<T>() -> T {}
|
||||||
|
fn main() {
|
||||||
|
let = make$0()
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
"Add `: _` before assignment operator",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn add_turbo_fish_function_lifetime_parameter() {
|
fn add_turbo_fish_function_lifetime_parameter() {
|
||||||
check_assist(
|
check_assist(
|
||||||
|
|
|
@ -3,10 +3,12 @@ use ide_db::{
|
||||||
defs::Definition,
|
defs::Definition,
|
||||||
search::{FileReference, SearchScope, UsageSearchResult},
|
search::{FileReference, SearchScope, UsageSearchResult},
|
||||||
};
|
};
|
||||||
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, FieldExpr, HasName, IdentPat, MethodCallExpr},
|
ast::{self, make, AstNode, FieldExpr, HasName, IdentPat, MethodCallExpr},
|
||||||
TextRange,
|
ted, T,
|
||||||
};
|
};
|
||||||
|
use text_edit::TextRange;
|
||||||
|
|
||||||
use crate::assist_context::{AssistContext, Assists, SourceChangeBuilder};
|
use crate::assist_context::{AssistContext, Assists, SourceChangeBuilder};
|
||||||
|
|
||||||
|
@ -61,27 +63,36 @@ pub(crate) fn destructure_tuple_binding_impl(
|
||||||
acc.add(
|
acc.add(
|
||||||
AssistId("destructure_tuple_binding_in_sub_pattern", AssistKind::RefactorRewrite),
|
AssistId("destructure_tuple_binding_in_sub_pattern", AssistKind::RefactorRewrite),
|
||||||
"Destructure tuple in sub-pattern",
|
"Destructure tuple in sub-pattern",
|
||||||
data.range,
|
data.ident_pat.syntax().text_range(),
|
||||||
|builder| {
|
|edit| destructure_tuple_edit_impl(ctx, edit, &data, true),
|
||||||
edit_tuple_assignment(ctx, builder, &data, true);
|
|
||||||
edit_tuple_usages(&data, builder, ctx, true);
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
acc.add(
|
acc.add(
|
||||||
AssistId("destructure_tuple_binding", AssistKind::RefactorRewrite),
|
AssistId("destructure_tuple_binding", AssistKind::RefactorRewrite),
|
||||||
if with_sub_pattern { "Destructure tuple in place" } else { "Destructure tuple" },
|
if with_sub_pattern { "Destructure tuple in place" } else { "Destructure tuple" },
|
||||||
data.range,
|
data.ident_pat.syntax().text_range(),
|
||||||
|builder| {
|
|edit| destructure_tuple_edit_impl(ctx, edit, &data, false),
|
||||||
edit_tuple_assignment(ctx, builder, &data, false);
|
|
||||||
edit_tuple_usages(&data, builder, ctx, false);
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn destructure_tuple_edit_impl(
|
||||||
|
ctx: &AssistContext<'_>,
|
||||||
|
edit: &mut SourceChangeBuilder,
|
||||||
|
data: &TupleData,
|
||||||
|
in_sub_pattern: bool,
|
||||||
|
) {
|
||||||
|
let assignment_edit = edit_tuple_assignment(ctx, edit, &data, in_sub_pattern);
|
||||||
|
let current_file_usages_edit = edit_tuple_usages(&data, edit, ctx, in_sub_pattern);
|
||||||
|
|
||||||
|
assignment_edit.apply();
|
||||||
|
if let Some(usages_edit) = current_file_usages_edit {
|
||||||
|
usages_edit.into_iter().for_each(|usage_edit| usage_edit.apply(edit))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleData> {
|
fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleData> {
|
||||||
if ident_pat.at_token().is_some() {
|
if ident_pat.at_token().is_some() {
|
||||||
// Cannot destructure pattern with sub-pattern:
|
// Cannot destructure pattern with sub-pattern:
|
||||||
|
@ -109,7 +120,6 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = ident_pat.name()?.to_string();
|
let name = ident_pat.name()?.to_string();
|
||||||
let range = ident_pat.syntax().text_range();
|
|
||||||
|
|
||||||
let usages = ctx.sema.to_def(&ident_pat).map(|def| {
|
let usages = ctx.sema.to_def(&ident_pat).map(|def| {
|
||||||
Definition::Local(def)
|
Definition::Local(def)
|
||||||
|
@ -122,7 +132,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
|
||||||
.map(|i| generate_name(ctx, i, &name, &ident_pat, &usages))
|
.map(|i| generate_name(ctx, i, &name, &ident_pat, &usages))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
Some(TupleData { ident_pat, range, ref_type, field_names, usages })
|
Some(TupleData { ident_pat, ref_type, field_names, usages })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_name(
|
fn generate_name(
|
||||||
|
@ -142,72 +152,100 @@ enum RefType {
|
||||||
}
|
}
|
||||||
struct TupleData {
|
struct TupleData {
|
||||||
ident_pat: IdentPat,
|
ident_pat: IdentPat,
|
||||||
// name: String,
|
|
||||||
range: TextRange,
|
|
||||||
ref_type: Option<RefType>,
|
ref_type: Option<RefType>,
|
||||||
field_names: Vec<String>,
|
field_names: Vec<String>,
|
||||||
// field_types: Vec<Type>,
|
|
||||||
usages: Option<UsageSearchResult>,
|
usages: Option<UsageSearchResult>,
|
||||||
}
|
}
|
||||||
fn edit_tuple_assignment(
|
fn edit_tuple_assignment(
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
builder: &mut SourceChangeBuilder,
|
edit: &mut SourceChangeBuilder,
|
||||||
data: &TupleData,
|
data: &TupleData,
|
||||||
in_sub_pattern: bool,
|
in_sub_pattern: bool,
|
||||||
) {
|
) -> AssignmentEdit {
|
||||||
|
let ident_pat = edit.make_mut(data.ident_pat.clone());
|
||||||
|
|
||||||
let tuple_pat = {
|
let tuple_pat = {
|
||||||
let original = &data.ident_pat;
|
let original = &data.ident_pat;
|
||||||
let is_ref = original.ref_token().is_some();
|
let is_ref = original.ref_token().is_some();
|
||||||
let is_mut = original.mut_token().is_some();
|
let is_mut = original.mut_token().is_some();
|
||||||
let fields = data.field_names.iter().map(|name| {
|
let fields = data
|
||||||
ast::Pat::from(ast::make::ident_pat(is_ref, is_mut, ast::make::name(name)))
|
.field_names
|
||||||
});
|
.iter()
|
||||||
ast::make::tuple_pat(fields)
|
.map(|name| ast::Pat::from(make::ident_pat(is_ref, is_mut, make::name(name))));
|
||||||
|
make::tuple_pat(fields).clone_for_update()
|
||||||
};
|
};
|
||||||
|
|
||||||
let add_cursor = |text: &str| {
|
if let Some(cap) = ctx.config.snippet_cap {
|
||||||
// place cursor on first tuple item
|
// place cursor on first tuple name
|
||||||
let first_tuple = &data.field_names[0];
|
if let Some(ast::Pat::IdentPat(first_pat)) = tuple_pat.fields().next() {
|
||||||
text.replacen(first_tuple, &format!("$0{first_tuple}"), 1)
|
edit.add_tabstop_before(
|
||||||
};
|
cap,
|
||||||
|
first_pat.name().expect("first ident pattern should have a name"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
|
AssignmentEdit { ident_pat, tuple_pat, in_sub_pattern }
|
||||||
if in_sub_pattern {
|
}
|
||||||
let text = format!(" @ {tuple_pat}");
|
struct AssignmentEdit {
|
||||||
match ctx.config.snippet_cap {
|
ident_pat: ast::IdentPat,
|
||||||
Some(cap) => {
|
tuple_pat: ast::TuplePat,
|
||||||
let snip = add_cursor(&text);
|
in_sub_pattern: bool,
|
||||||
builder.insert_snippet(cap, data.range.end(), snip);
|
}
|
||||||
}
|
|
||||||
None => builder.insert(data.range.end(), text),
|
impl AssignmentEdit {
|
||||||
};
|
fn apply(self) {
|
||||||
} else {
|
// with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
|
||||||
let text = tuple_pat.to_string();
|
if self.in_sub_pattern {
|
||||||
match ctx.config.snippet_cap {
|
self.ident_pat.set_pat(Some(self.tuple_pat.into()))
|
||||||
Some(cap) => {
|
} else {
|
||||||
let snip = add_cursor(&text);
|
ted::replace(self.ident_pat.syntax(), self.tuple_pat.syntax())
|
||||||
builder.replace_snippet(cap, data.range, snip);
|
}
|
||||||
}
|
|
||||||
None => builder.replace(data.range, text),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn edit_tuple_usages(
|
fn edit_tuple_usages(
|
||||||
data: &TupleData,
|
data: &TupleData,
|
||||||
builder: &mut SourceChangeBuilder,
|
edit: &mut SourceChangeBuilder,
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
in_sub_pattern: bool,
|
in_sub_pattern: bool,
|
||||||
) {
|
) -> Option<Vec<EditTupleUsage>> {
|
||||||
if let Some(usages) = data.usages.as_ref() {
|
let mut current_file_usages = None;
|
||||||
for (file_id, refs) in usages.iter() {
|
|
||||||
builder.edit_file(*file_id);
|
|
||||||
|
|
||||||
for r in refs {
|
if let Some(usages) = data.usages.as_ref() {
|
||||||
edit_tuple_usage(ctx, builder, r, data, in_sub_pattern);
|
// We need to collect edits first before actually applying them
|
||||||
|
// as mapping nodes to their mutable node versions requires an
|
||||||
|
// unmodified syntax tree.
|
||||||
|
//
|
||||||
|
// We also defer editing usages in the current file first since
|
||||||
|
// tree mutation in the same file breaks when `builder.edit_file`
|
||||||
|
// is called
|
||||||
|
|
||||||
|
if let Some((_, refs)) = usages.iter().find(|(file_id, _)| **file_id == ctx.file_id()) {
|
||||||
|
current_file_usages = Some(
|
||||||
|
refs.iter()
|
||||||
|
.filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
|
||||||
|
.collect_vec(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (file_id, refs) in usages.iter() {
|
||||||
|
if *file_id == ctx.file_id() {
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
edit.edit_file(*file_id);
|
||||||
|
|
||||||
|
let tuple_edits = refs
|
||||||
|
.iter()
|
||||||
|
.filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
|
||||||
|
.collect_vec();
|
||||||
|
|
||||||
|
tuple_edits.into_iter().for_each(|tuple_edit| tuple_edit.apply(edit))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
current_file_usages
|
||||||
}
|
}
|
||||||
fn edit_tuple_usage(
|
fn edit_tuple_usage(
|
||||||
ctx: &AssistContext<'_>,
|
ctx: &AssistContext<'_>,
|
||||||
|
@ -215,25 +253,14 @@ fn edit_tuple_usage(
|
||||||
usage: &FileReference,
|
usage: &FileReference,
|
||||||
data: &TupleData,
|
data: &TupleData,
|
||||||
in_sub_pattern: bool,
|
in_sub_pattern: bool,
|
||||||
) {
|
) -> Option<EditTupleUsage> {
|
||||||
match detect_tuple_index(usage, data) {
|
match detect_tuple_index(usage, data) {
|
||||||
Some(index) => edit_tuple_field_usage(ctx, builder, data, index),
|
Some(index) => Some(edit_tuple_field_usage(ctx, builder, data, index)),
|
||||||
None => {
|
None if in_sub_pattern => {
|
||||||
if in_sub_pattern {
|
cov_mark::hit!(destructure_tuple_call_with_subpattern);
|
||||||
cov_mark::hit!(destructure_tuple_call_with_subpattern);
|
return None;
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// no index access -> make invalid -> requires handling by user
|
|
||||||
// -> put usage in block comment
|
|
||||||
//
|
|
||||||
// Note: For macro invocations this might result in still valid code:
|
|
||||||
// When a macro accepts the tuple as argument, as well as no arguments at all,
|
|
||||||
// uncommenting the tuple still leaves the macro call working (see `tests::in_macro_call::empty_macro`).
|
|
||||||
// But this is an unlikely case. Usually the resulting macro call will become erroneous.
|
|
||||||
builder.insert(usage.range.start(), "/*");
|
|
||||||
builder.insert(usage.range.end(), "*/");
|
|
||||||
}
|
}
|
||||||
|
None => Some(EditTupleUsage::NoIndex(usage.range)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -242,19 +269,47 @@ fn edit_tuple_field_usage(
|
||||||
builder: &mut SourceChangeBuilder,
|
builder: &mut SourceChangeBuilder,
|
||||||
data: &TupleData,
|
data: &TupleData,
|
||||||
index: TupleIndex,
|
index: TupleIndex,
|
||||||
) {
|
) -> EditTupleUsage {
|
||||||
let field_name = &data.field_names[index.index];
|
let field_name = &data.field_names[index.index];
|
||||||
|
let field_name = make::expr_path(make::ext::ident_path(field_name));
|
||||||
|
|
||||||
if data.ref_type.is_some() {
|
if data.ref_type.is_some() {
|
||||||
let ref_data = handle_ref_field_usage(ctx, &index.field_expr);
|
let (replace_expr, ref_data) = handle_ref_field_usage(ctx, &index.field_expr);
|
||||||
builder.replace(ref_data.range, ref_data.format(field_name));
|
let replace_expr = builder.make_mut(replace_expr);
|
||||||
|
EditTupleUsage::ReplaceExpr(replace_expr, ref_data.wrap_expr(field_name))
|
||||||
} else {
|
} else {
|
||||||
builder.replace(index.range, field_name);
|
let field_expr = builder.make_mut(index.field_expr);
|
||||||
|
EditTupleUsage::ReplaceExpr(field_expr.into(), field_name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
enum EditTupleUsage {
|
||||||
|
/// no index access -> make invalid -> requires handling by user
|
||||||
|
/// -> put usage in block comment
|
||||||
|
///
|
||||||
|
/// Note: For macro invocations this might result in still valid code:
|
||||||
|
/// When a macro accepts the tuple as argument, as well as no arguments at all,
|
||||||
|
/// uncommenting the tuple still leaves the macro call working (see `tests::in_macro_call::empty_macro`).
|
||||||
|
/// But this is an unlikely case. Usually the resulting macro call will become erroneous.
|
||||||
|
NoIndex(TextRange),
|
||||||
|
ReplaceExpr(ast::Expr, ast::Expr),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EditTupleUsage {
|
||||||
|
fn apply(self, edit: &mut SourceChangeBuilder) {
|
||||||
|
match self {
|
||||||
|
EditTupleUsage::NoIndex(range) => {
|
||||||
|
edit.insert(range.start(), "/*");
|
||||||
|
edit.insert(range.end(), "*/");
|
||||||
|
}
|
||||||
|
EditTupleUsage::ReplaceExpr(target_expr, replace_with) => {
|
||||||
|
ted::replace(target_expr.syntax(), replace_with.clone_for_update().syntax())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
struct TupleIndex {
|
struct TupleIndex {
|
||||||
index: usize,
|
index: usize,
|
||||||
range: TextRange,
|
|
||||||
field_expr: FieldExpr,
|
field_expr: FieldExpr,
|
||||||
}
|
}
|
||||||
fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIndex> {
|
fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIndex> {
|
||||||
|
@ -296,7 +351,7 @@ fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIn
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(TupleIndex { index: idx, range: field_expr.syntax().text_range(), field_expr })
|
Some(TupleIndex { index: idx, field_expr })
|
||||||
} else {
|
} else {
|
||||||
// tuple index out of range
|
// tuple index out of range
|
||||||
None
|
None
|
||||||
|
@ -307,32 +362,34 @@ fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIn
|
||||||
}
|
}
|
||||||
|
|
||||||
struct RefData {
|
struct RefData {
|
||||||
range: TextRange,
|
|
||||||
needs_deref: bool,
|
needs_deref: bool,
|
||||||
needs_parentheses: bool,
|
needs_parentheses: bool,
|
||||||
}
|
}
|
||||||
impl RefData {
|
impl RefData {
|
||||||
fn format(&self, field_name: &str) -> String {
|
fn wrap_expr(&self, mut expr: ast::Expr) -> ast::Expr {
|
||||||
match (self.needs_deref, self.needs_parentheses) {
|
if self.needs_deref {
|
||||||
(true, true) => format!("(*{field_name})"),
|
expr = make::expr_prefix(T![*], expr);
|
||||||
(true, false) => format!("*{field_name}"),
|
|
||||||
(false, true) => format!("({field_name})"),
|
|
||||||
(false, false) => field_name.to_string(),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.needs_parentheses {
|
||||||
|
expr = make::expr_paren(expr);
|
||||||
|
}
|
||||||
|
|
||||||
|
return expr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> RefData {
|
fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> (ast::Expr, RefData) {
|
||||||
let s = field_expr.syntax();
|
let s = field_expr.syntax();
|
||||||
let mut ref_data =
|
let mut ref_data = RefData { needs_deref: true, needs_parentheses: true };
|
||||||
RefData { range: s.text_range(), needs_deref: true, needs_parentheses: true };
|
let mut target_node = field_expr.clone().into();
|
||||||
|
|
||||||
let parent = match s.parent().map(ast::Expr::cast) {
|
let parent = match s.parent().map(ast::Expr::cast) {
|
||||||
Some(Some(parent)) => parent,
|
Some(Some(parent)) => parent,
|
||||||
Some(None) => {
|
Some(None) => {
|
||||||
ref_data.needs_parentheses = false;
|
ref_data.needs_parentheses = false;
|
||||||
return ref_data;
|
return (target_node, ref_data);
|
||||||
}
|
}
|
||||||
None => return ref_data,
|
None => return (target_node, ref_data),
|
||||||
};
|
};
|
||||||
|
|
||||||
match parent {
|
match parent {
|
||||||
|
@ -342,7 +399,7 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
|
||||||
// there might be a ref outside: `&(t.0)` -> can be removed
|
// there might be a ref outside: `&(t.0)` -> can be removed
|
||||||
if let Some(it) = it.syntax().parent().and_then(ast::RefExpr::cast) {
|
if let Some(it) = it.syntax().parent().and_then(ast::RefExpr::cast) {
|
||||||
ref_data.needs_deref = false;
|
ref_data.needs_deref = false;
|
||||||
ref_data.range = it.syntax().text_range();
|
target_node = it.into();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Expr::RefExpr(it) => {
|
ast::Expr::RefExpr(it) => {
|
||||||
|
@ -351,8 +408,8 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
|
||||||
ref_data.needs_parentheses = false;
|
ref_data.needs_parentheses = false;
|
||||||
// might be surrounded by parens -> can be removed too
|
// might be surrounded by parens -> can be removed too
|
||||||
match it.syntax().parent().and_then(ast::ParenExpr::cast) {
|
match it.syntax().parent().and_then(ast::ParenExpr::cast) {
|
||||||
Some(parent) => ref_data.range = parent.syntax().text_range(),
|
Some(parent) => target_node = parent.into(),
|
||||||
None => ref_data.range = it.syntax().text_range(),
|
None => target_node = it.into(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
// higher precedence than deref `*`
|
// higher precedence than deref `*`
|
||||||
|
@ -414,7 +471,7 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
ref_data
|
(target_node, ref_data)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -100,6 +100,11 @@ pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) {
|
||||||
check(assist, ra_fixture, ExpectedResult::NotApplicable, None);
|
check(assist, ra_fixture, ExpectedResult::NotApplicable, None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
pub(crate) fn check_assist_not_applicable_by_label(assist: Handler, ra_fixture: &str, label: &str) {
|
||||||
|
check(assist, ra_fixture, ExpectedResult::NotApplicable, Some(label));
|
||||||
|
}
|
||||||
|
|
||||||
/// Check assist in unresolved state. Useful to check assists for lazy computation.
|
/// Check assist in unresolved state. Useful to check assists for lazy computation.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
|
pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
|
||||||
|
|
|
@ -3,18 +3,17 @@
|
||||||
use std::iter::{empty, successors};
|
use std::iter::{empty, successors};
|
||||||
|
|
||||||
use parser::{SyntaxKind, T};
|
use parser::{SyntaxKind, T};
|
||||||
use rowan::SyntaxElement;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
algo::{self, neighbor},
|
algo::{self, neighbor},
|
||||||
ast::{self, edit::IndentLevel, make, HasGenericParams},
|
ast::{self, edit::IndentLevel, make, HasGenericParams},
|
||||||
ted::{self, Position},
|
ted::{self, Position},
|
||||||
AstNode, AstToken, Direction,
|
AstNode, AstToken, Direction, SyntaxElement,
|
||||||
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
|
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
|
||||||
SyntaxNode, SyntaxToken,
|
SyntaxNode, SyntaxToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::HasName;
|
use super::{HasArgList, HasName};
|
||||||
|
|
||||||
pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
|
pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
|
||||||
fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
|
fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
|
||||||
|
@ -362,6 +361,24 @@ impl ast::PathSegment {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ast::MethodCallExpr {
|
||||||
|
pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
|
||||||
|
if self.generic_arg_list().is_none() {
|
||||||
|
let generic_arg_list = make::turbofish_generic_arg_list(empty()).clone_for_update();
|
||||||
|
|
||||||
|
if let Some(arg_list) = self.arg_list() {
|
||||||
|
ted::insert_raw(
|
||||||
|
ted::Position::before(arg_list.syntax()),
|
||||||
|
generic_arg_list.syntax(),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
ted::append_child(self.syntax(), generic_arg_list.syntax());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.generic_arg_list().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Removable for ast::UseTree {
|
impl Removable for ast::UseTree {
|
||||||
fn remove(&self) {
|
fn remove(&self) {
|
||||||
for dir in [Direction::Next, Direction::Prev] {
|
for dir in [Direction::Next, Direction::Prev] {
|
||||||
|
@ -559,7 +576,7 @@ impl ast::AssocItemList {
|
||||||
None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
|
None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let elements: Vec<SyntaxElement<_>> = vec![
|
let elements: Vec<SyntaxElement> = vec![
|
||||||
make::tokens::whitespace(&format!("{whitespace}{indent}")).into(),
|
make::tokens::whitespace(&format!("{whitespace}{indent}")).into(),
|
||||||
item.syntax().clone().into(),
|
item.syntax().clone().into(),
|
||||||
];
|
];
|
||||||
|
@ -629,6 +646,50 @@ impl ast::MatchArmList {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ast::LetStmt {
|
||||||
|
pub fn set_ty(&self, ty: Option<ast::Type>) {
|
||||||
|
match ty {
|
||||||
|
None => {
|
||||||
|
if let Some(colon_token) = self.colon_token() {
|
||||||
|
ted::remove(colon_token);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(existing_ty) = self.ty() {
|
||||||
|
if let Some(sibling) = existing_ty.syntax().prev_sibling_or_token() {
|
||||||
|
if sibling.kind() == SyntaxKind::WHITESPACE {
|
||||||
|
ted::remove(sibling);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ted::remove(existing_ty.syntax());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove any trailing ws
|
||||||
|
if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE)
|
||||||
|
{
|
||||||
|
last.detach();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(new_ty) => {
|
||||||
|
if self.colon_token().is_none() {
|
||||||
|
ted::insert_raw(
|
||||||
|
Position::after(
|
||||||
|
self.pat().expect("let stmt should have a pattern").syntax(),
|
||||||
|
),
|
||||||
|
make::token(T![:]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(old_ty) = self.ty() {
|
||||||
|
ted::replace(old_ty.syntax(), new_ty.syntax());
|
||||||
|
} else {
|
||||||
|
ted::insert(Position::after(self.colon_token().unwrap()), new_ty.syntax());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ast::RecordExprFieldList {
|
impl ast::RecordExprFieldList {
|
||||||
pub fn add_field(&self, field: ast::RecordExprField) {
|
pub fn add_field(&self, field: ast::RecordExprField) {
|
||||||
let is_multiline = self.syntax().text().contains_char('\n');
|
let is_multiline = self.syntax().text().contains_char('\n');
|
||||||
|
@ -753,7 +814,7 @@ impl ast::VariantList {
|
||||||
None => (IndentLevel::single(), Position::last_child_of(self.syntax())),
|
None => (IndentLevel::single(), Position::last_child_of(self.syntax())),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let elements: Vec<SyntaxElement<_>> = vec![
|
let elements: Vec<SyntaxElement> = vec![
|
||||||
make::tokens::whitespace(&format!("{}{indent}", "\n")).into(),
|
make::tokens::whitespace(&format!("{}{indent}", "\n")).into(),
|
||||||
variant.syntax().clone().into(),
|
variant.syntax().clone().into(),
|
||||||
ast::make::token(T![,]).into(),
|
ast::make::token(T![,]).into(),
|
||||||
|
@ -788,6 +849,53 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
|
||||||
Some(())
|
Some(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ast::IdentPat {
|
||||||
|
pub fn set_pat(&self, pat: Option<ast::Pat>) {
|
||||||
|
match pat {
|
||||||
|
None => {
|
||||||
|
if let Some(at_token) = self.at_token() {
|
||||||
|
// Remove `@ Pat`
|
||||||
|
let start = at_token.clone().into();
|
||||||
|
let end = self
|
||||||
|
.pat()
|
||||||
|
.map(|it| it.syntax().clone().into())
|
||||||
|
.unwrap_or_else(|| at_token.into());
|
||||||
|
|
||||||
|
ted::remove_all(start..=end);
|
||||||
|
|
||||||
|
// Remove any trailing ws
|
||||||
|
if let Some(last) =
|
||||||
|
self.syntax().last_token().filter(|it| it.kind() == WHITESPACE)
|
||||||
|
{
|
||||||
|
last.detach();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(pat) => {
|
||||||
|
if let Some(old_pat) = self.pat() {
|
||||||
|
// Replace existing pattern
|
||||||
|
ted::replace(old_pat.syntax(), pat.syntax())
|
||||||
|
} else if let Some(at_token) = self.at_token() {
|
||||||
|
// Have an `@` token but not a pattern yet
|
||||||
|
ted::insert(ted::Position::after(at_token), pat.syntax());
|
||||||
|
} else {
|
||||||
|
// Don't have an `@`, should have a name
|
||||||
|
let name = self.name().unwrap();
|
||||||
|
|
||||||
|
ted::insert_all(
|
||||||
|
ted::Position::after(name.syntax()),
|
||||||
|
vec![
|
||||||
|
make::token(T![@]).into(),
|
||||||
|
make::tokens::single_space().into(),
|
||||||
|
pat.syntax().clone().into(),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub trait HasVisibilityEdit: ast::HasVisibility {
|
pub trait HasVisibilityEdit: ast::HasVisibility {
|
||||||
fn set_visibility(&self, visbility: ast::Visibility) {
|
fn set_visibility(&self, visbility: ast::Visibility) {
|
||||||
match self.visibility() {
|
match self.visibility() {
|
||||||
|
@ -889,6 +997,65 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ident_pat_set_pat() {
|
||||||
|
#[track_caller]
|
||||||
|
fn check(before: &str, expected: &str, pat: Option<ast::Pat>) {
|
||||||
|
let pat = pat.map(|it| it.clone_for_update());
|
||||||
|
|
||||||
|
let ident_pat = ast_mut_from_text::<ast::IdentPat>(&format!("fn f() {{ {before} }}"));
|
||||||
|
ident_pat.set_pat(pat);
|
||||||
|
|
||||||
|
let after = ast_mut_from_text::<ast::IdentPat>(&format!("fn f() {{ {expected} }}"));
|
||||||
|
assert_eq!(ident_pat.to_string(), after.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// replacing
|
||||||
|
check("let a @ _;", "let a @ ();", Some(make::tuple_pat([]).into()));
|
||||||
|
|
||||||
|
// note: no trailing semicolon is added for the below tests since it
|
||||||
|
// seems to be picked up by the ident pat during error recovery?
|
||||||
|
|
||||||
|
// adding
|
||||||
|
check("let a ", "let a @ ()", Some(make::tuple_pat([]).into()));
|
||||||
|
check("let a @ ", "let a @ ()", Some(make::tuple_pat([]).into()));
|
||||||
|
|
||||||
|
// removing
|
||||||
|
check("let a @ ()", "let a", None);
|
||||||
|
check("let a @ ", "let a", None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_let_stmt_set_ty() {
|
||||||
|
#[track_caller]
|
||||||
|
fn check(before: &str, expected: &str, ty: Option<ast::Type>) {
|
||||||
|
let ty = ty.map(|it| it.clone_for_update());
|
||||||
|
|
||||||
|
let let_stmt = ast_mut_from_text::<ast::LetStmt>(&format!("fn f() {{ {before} }}"));
|
||||||
|
let_stmt.set_ty(ty);
|
||||||
|
|
||||||
|
let after = ast_mut_from_text::<ast::LetStmt>(&format!("fn f() {{ {expected} }}"));
|
||||||
|
assert_eq!(let_stmt.to_string(), after.to_string(), "{let_stmt:#?}\n!=\n{after:#?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// adding
|
||||||
|
check("let a;", "let a: ();", Some(make::ty_tuple([])));
|
||||||
|
// no semicolon due to it being eaten during error recovery
|
||||||
|
check("let a:", "let a: ()", Some(make::ty_tuple([])));
|
||||||
|
|
||||||
|
// replacing
|
||||||
|
check("let a: u8;", "let a: ();", Some(make::ty_tuple([])));
|
||||||
|
check("let a: u8 = 3;", "let a: () = 3;", Some(make::ty_tuple([])));
|
||||||
|
check("let a: = 3;", "let a: () = 3;", Some(make::ty_tuple([])));
|
||||||
|
|
||||||
|
// removing
|
||||||
|
check("let a: u8;", "let a;", None);
|
||||||
|
check("let a:;", "let a;", None);
|
||||||
|
|
||||||
|
check("let a: u8 = 3;", "let a = 3;", None);
|
||||||
|
check("let a: = 3;", "let a = 3;", None);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn add_variant_to_empty_enum() {
|
fn add_variant_to_empty_enum() {
|
||||||
let variant = make::variant(make::name("Bar"), None).clone_for_update();
|
let variant = make::variant(make::name("Bar"), None).clone_for_update();
|
||||||
|
|
|
@ -941,6 +941,13 @@ pub fn lifetime_arg(lifetime: ast::Lifetime) -> ast::LifetimeArg {
|
||||||
ast_from_text(&format!("const S: T<{lifetime}> = ();"))
|
ast_from_text(&format!("const S: T<{lifetime}> = ();"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn turbofish_generic_arg_list(
|
||||||
|
args: impl IntoIterator<Item = ast::GenericArg>,
|
||||||
|
) -> ast::GenericArgList {
|
||||||
|
let args = args.into_iter().join(", ");
|
||||||
|
ast_from_text(&format!("const S: T::<{args}> = ();"))
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn generic_arg_list(
|
pub(crate) fn generic_arg_list(
|
||||||
args: impl IntoIterator<Item = ast::GenericArg>,
|
args: impl IntoIterator<Item = ast::GenericArg>,
|
||||||
) -> ast::GenericArgList {
|
) -> ast::GenericArgList {
|
||||||
|
@ -1126,7 +1133,7 @@ pub mod tokens {
|
||||||
|
|
||||||
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
|
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
|
||||||
SourceFile::parse(
|
SourceFile::parse(
|
||||||
"const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p)\n;\n\n",
|
"const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\n",
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue