mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 04:19:13 +00:00
Inline all format arguments where possible
This makes code more readale and concise, moving all format arguments like `format!("{}", foo)` into the more compact `format!("{foo}")` form. The change was automatically created with, so there are far less change of an accidental typo. ``` cargo clippy --fix -- -A clippy::all -W clippy::uninlined_format_args ```
This commit is contained in:
parent
1927c2e1d8
commit
e16c76e3c3
180 changed files with 487 additions and 501 deletions
|
@ -101,7 +101,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
|
|||
}
|
||||
try_cnt += 1;
|
||||
if try_cnt > 100 {
|
||||
panic!("invocaton fixture {} cannot be generated.\n", name);
|
||||
panic!("invocaton fixture {name} cannot be generated.\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -139,7 +139,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
|
|||
}
|
||||
|
||||
None => (),
|
||||
Some(kind) => panic!("Unhandled kind {:?}", kind),
|
||||
Some(kind) => panic!("Unhandled kind {kind:?}"),
|
||||
},
|
||||
Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()),
|
||||
Op::Repeat { tokens, kind, separator } => {
|
||||
|
|
|
@ -237,7 +237,7 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
|
|||
let char = match token.to_char(conv) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
panic!("Token from lexer must be single char: token = {:#?}", token);
|
||||
panic!("Token from lexer must be single char: token = {token:#?}");
|
||||
}
|
||||
};
|
||||
tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
|
||||
|
|
|
@ -19,7 +19,7 @@ fn check_punct_spacing(fixture: &str) {
|
|||
let spacing = match annotation.as_str() {
|
||||
"Alone" => Spacing::Alone,
|
||||
"Joint" => Spacing::Joint,
|
||||
a => panic!("unknown annotation: {}", a),
|
||||
a => panic!("unknown annotation: {a}"),
|
||||
};
|
||||
(token, spacing)
|
||||
})
|
||||
|
@ -39,7 +39,7 @@ fn check_punct_spacing(fixture: &str) {
|
|||
cursor = cursor.bump();
|
||||
}
|
||||
|
||||
assert!(annotations.is_empty(), "unchecked annotations: {:?}", annotations);
|
||||
assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -60,7 +60,7 @@ pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
|
|||
},
|
||||
tt::Leaf::Punct(punct) => {
|
||||
let kind = SyntaxKind::from_char(punct.char)
|
||||
.unwrap_or_else(|| panic!("{:#?} is not a valid punct", punct));
|
||||
.unwrap_or_else(|| panic!("{punct:#?} is not a valid punct"));
|
||||
res.push(kind);
|
||||
if punct.spacing == tt::Spacing::Joint {
|
||||
res.was_joint();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue