Inline all format arguments where possible

This makes code more readale and concise,
moving all format arguments like `format!("{}", foo)`
into the more compact `format!("{foo}")` form.

The change was automatically created with, so there are far less change
of an accidental typo.

```
cargo clippy --fix -- -A clippy::all -W clippy::uninlined_format_args
```
This commit is contained in:
Yuri Astrakhan 2022-12-23 13:42:58 -05:00
parent 1927c2e1d8
commit e16c76e3c3
180 changed files with 487 additions and 501 deletions

View file

@ -616,7 +616,7 @@ fn main() {
let fmt_syntax = |syn: &SyntaxElement| match syn.kind() {
SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()),
_ => format!("{}", syn),
_ => format!("{syn}"),
};
let insertions =
@ -637,7 +637,7 @@ fn main() {
.iter()
.sorted_by_key(|(syntax, _)| syntax.text_range().start())
.format_with("\n", |(k, v), f| {
f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v)))
f(&format!("Line {}: {k:?} -> {}", line_number(k), fmt_syntax(v)))
});
let deletions = diff

View file

@ -87,7 +87,7 @@ impl IndentLevel {
for token in tokens {
if let Some(ws) = ast::Whitespace::cast(token) {
if ws.text().contains('\n') {
let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax()));
ted::replace(ws.syntax(), &new_ws);
}
}
@ -103,7 +103,7 @@ impl IndentLevel {
if let Some(ws) = ast::Whitespace::cast(token) {
if ws.text().contains('\n') {
let new_ws = make::tokens::whitespace(
&ws.syntax().text().replace(&format!("\n{}", self), "\n"),
&ws.syntax().text().replace(&format!("\n{self}"), "\n"),
);
ted::replace(ws.syntax(), &new_ws);
}

View file

@ -481,7 +481,7 @@ impl ast::AssocItemList {
},
};
let elements: Vec<SyntaxElement<_>> = vec![
make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(),
make::tokens::whitespace(&format!("{whitespace}{indent}")).into(),
item.syntax().clone().into(),
];
ted::insert_all(position, elements);
@ -537,7 +537,7 @@ impl ast::MatchArmList {
},
};
let indent = IndentLevel::from_node(self.syntax()) + 1;
elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into());
elements.push(make::tokens::whitespace(&format!("\n{indent}")).into());
elements.push(arm.syntax().clone().into());
if needs_comma(&arm) {
ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA));
@ -555,7 +555,7 @@ impl ast::RecordExprFieldList {
let is_multiline = self.syntax().text().contains_char('\n');
let whitespace = if is_multiline {
let indent = IndentLevel::from_node(self.syntax()) + 1;
make::tokens::whitespace(&format!("\n{}", indent))
make::tokens::whitespace(&format!("\n{indent}"))
} else {
make::tokens::single_space()
};
@ -616,7 +616,7 @@ impl ast::RecordPatFieldList {
let is_multiline = self.syntax().text().contains_char('\n');
let whitespace = if is_multiline {
let indent = IndentLevel::from_node(self.syntax()) + 1;
make::tokens::whitespace(&format!("\n{}", indent))
make::tokens::whitespace(&format!("\n{indent}"))
} else {
make::tokens::single_space()
};
@ -681,7 +681,7 @@ impl ast::VariantList {
},
};
let elements: Vec<SyntaxElement<_>> = vec![
make::tokens::whitespace(&format!("{}{}", "\n", indent)).into(),
make::tokens::whitespace(&format!("{}{indent}", "\n")).into(),
variant.syntax().clone().into(),
ast::make::token(T![,]).into(),
];
@ -704,11 +704,11 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
match l.next_sibling_or_token() {
Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => {
if ws.next_sibling_or_token()?.into_token()? == r {
ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent)));
ted::replace(ws, make::tokens::whitespace(&format!("\n{indent}")));
}
}
Some(ws) if ws.kind() == T!['}'] => {
ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent)));
ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{indent}")));
}
_ => (),
}

View file

@ -436,9 +436,7 @@ mod tests {
fn check_string_value<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
assert_eq!(
ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) }
.value()
.as_deref(),
ast::String { syntax: make::tokens::literal(&format!("\"{lit}\"")) }.value().as_deref(),
expected.into()
);
}
@ -461,7 +459,7 @@ bcde", "abcde",
expected: impl Into<Option<&'a [u8; N]>>,
) {
assert_eq!(
ast::ByteString { syntax: make::tokens::literal(&format!("b\"{}\"", lit)) }
ast::ByteString { syntax: make::tokens::literal(&format!("b\"{lit}\"")) }
.value()
.as_deref(),
expected.into().map(|value| &value[..])

View file

@ -36,7 +36,7 @@ impl CheckReparse {
let delete_len = usize::from_str(lines.next()?).ok()?;
let insert = lines.next()?.to_string();
let text = lines.collect::<Vec<_>>().join("\n");
let text = format!("{}{}{}", PREFIX, text, SUFFIX);
let text = format!("{PREFIX}{text}{SUFFIX}");
text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range
let delete =
TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
@ -60,8 +60,8 @@ impl CheckReparse {
eprint!("reparsed:\n{:#?}", new_parse.tree().syntax());
eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax());
assert_eq!(
format!("{:?}", a),
format!("{:?}", b),
format!("{a:?}"),
format!("{b:?}"),
"different syntax tree produced by the full reparse"
);
}

View file

@ -6,7 +6,7 @@ use crate::{ast, AstNode};
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
let s = s.trim();
let file = ast::SourceFile::parse(&format!("const _: () = {};", s));
let file = ast::SourceFile::parse(&format!("const _: () = {s};"));
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
if expr.syntax().text() != s {
return None;

View file

@ -157,7 +157,7 @@ fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) {
let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into());
indent.0 += 1;
return Some(make::tokens::whitespace(&format!("\n{}", indent)));
return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
}
@ -165,7 +165,7 @@ fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) {
let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into());
indent.0 += 1;
return Some(make::tokens::whitespace(&format!("\n{}", indent)));
return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
}
@ -200,7 +200,7 @@ fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option<SyntaxToken
if left.kind() == SyntaxKind::USE {
indent.0 = IndentLevel::from_element(right).0.max(indent.0);
}
return Some(make::tokens::whitespace(&format!("\n{}", indent)));
return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
Some(make::tokens::single_space())
}

View file

@ -108,7 +108,7 @@ fn self_hosting_parsing() {
.into_iter()
.map(|(path, err)| format!("{}: {:?}\n", path.display(), err[0]))
.collect::<String>();
panic!("Parsing errors:\n{}\n", errors);
panic!("Parsing errors:\n{errors}\n");
}
}
@ -181,6 +181,6 @@ fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> {
/// so this should always be correct.
fn read_text(path: &Path) -> String {
fs::read_to_string(path)
.unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
.unwrap_or_else(|_| panic!("File at {path:?} should be valid"))
.replace("\r\n", "\n")
}

View file

@ -328,7 +328,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
fn write_doc_comment(contents: &[String], dest: &mut String) {
for line in contents {
writeln!(dest, "///{}", line).unwrap();
writeln!(dest, "///{line}").unwrap();
}
}
@ -501,7 +501,7 @@ fn to_pascal_case(s: &str) -> String {
}
fn pluralize(s: &str) -> String {
format!("{}s", s)
format!("{s}s")
}
impl Field {
@ -637,7 +637,7 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
let mut name = grammar[*token].name.clone();
if name != "int_number" && name != "string" {
if "[]{}()".contains(&name) {
name = format!("'{}'", name);
name = format!("'{name}'");
}
let field = Field::Token(name);
acc.push(field);
@ -651,7 +651,7 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
acc.push(field);
return;
}
panic!("unhandled rule: {:?}", rule)
panic!("unhandled rule: {rule:?}")
}
Rule::Labeled { label: l, rule } => {
assert!(label.is_none());