mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-08-18 01:20:20 +00:00
Inline all format arguments where possible
This makes code more readale and concise, moving all format arguments like `format!("{}", foo)` into the more compact `format!("{foo}")` form. The change was automatically created with, so there are far less change of an accidental typo. ``` cargo clippy --fix -- -A clippy::all -W clippy::uninlined_format_args ```
This commit is contained in:
parent
1927c2e1d8
commit
e16c76e3c3
180 changed files with 487 additions and 501 deletions
|
@ -205,7 +205,7 @@ impl<'t> Parser<'t> {
|
|||
if self.eat(kind) {
|
||||
return true;
|
||||
}
|
||||
self.error(format!("expected {:?}", kind));
|
||||
self.error(format!("expected {kind:?}"));
|
||||
false
|
||||
}
|
||||
|
||||
|
|
|
@ -37,8 +37,8 @@ fn lex(text: &str) -> String {
|
|||
let text = lexed.text(i);
|
||||
let error = lexed.error(i);
|
||||
|
||||
let error = error.map(|err| format!(" error: {}", err)).unwrap_or_default();
|
||||
writeln!(res, "{:?} {:?}{}", kind, text, error).unwrap();
|
||||
let error = error.map(|err| format!(" error: {err}")).unwrap_or_default();
|
||||
writeln!(res, "{kind:?} {text:?}{error}").unwrap();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ fn lex(text: &str) -> String {
|
|||
fn parse_ok() {
|
||||
for case in TestCase::list("parser/ok") {
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
|
||||
assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual);
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ fn parse_ok() {
|
|||
fn parse_inline_ok() {
|
||||
for case in TestCase::list("parser/inline/ok") {
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
|
||||
assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual);
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ fn parse_inline_ok() {
|
|||
fn parse_err() {
|
||||
for case in TestCase::list("parser/err") {
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ fn parse_err() {
|
|||
fn parse_inline_err() {
|
||||
for case in TestCase::list("parser/inline/err") {
|
||||
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
|
||||
assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
|
||||
expect_file![case.rast].assert_eq(&actual)
|
||||
}
|
||||
}
|
||||
|
@ -93,14 +93,14 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
|
|||
crate::StrStep::Token { kind, text } => {
|
||||
assert!(depth > 0);
|
||||
len += text.len();
|
||||
write!(buf, "{}", indent).unwrap();
|
||||
write!(buf, "{:?} {:?}\n", kind, text).unwrap();
|
||||
write!(buf, "{indent}").unwrap();
|
||||
write!(buf, "{kind:?} {text:?}\n").unwrap();
|
||||
}
|
||||
crate::StrStep::Enter { kind } => {
|
||||
assert!(depth > 0 || len == 0);
|
||||
depth += 1;
|
||||
write!(buf, "{}", indent).unwrap();
|
||||
write!(buf, "{:?}\n", kind).unwrap();
|
||||
write!(buf, "{indent}").unwrap();
|
||||
write!(buf, "{kind:?}\n").unwrap();
|
||||
indent.push_str(" ");
|
||||
}
|
||||
crate::StrStep::Exit => {
|
||||
|
@ -111,7 +111,7 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
|
|||
}
|
||||
crate::StrStep::Error { msg, pos } => {
|
||||
assert!(depth > 0);
|
||||
errors.push(format!("error {}: {}\n", pos, msg))
|
||||
errors.push(format!("error {pos}: {msg}\n"))
|
||||
}
|
||||
});
|
||||
assert_eq!(
|
||||
|
@ -124,7 +124,7 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
|
|||
|
||||
for (token, msg) in lexed.errors() {
|
||||
let pos = lexed.text_start(token);
|
||||
errors.push(format!("error {}: {}\n", pos, msg));
|
||||
errors.push(format!("error {pos}: {msg}\n"));
|
||||
}
|
||||
|
||||
let has_errors = !errors.is_empty();
|
||||
|
@ -149,7 +149,7 @@ impl TestCase {
|
|||
|
||||
let mut res = Vec::new();
|
||||
let read_dir = fs::read_dir(&dir)
|
||||
.unwrap_or_else(|err| panic!("can't `read_dir` {}: {}", dir.display(), err));
|
||||
.unwrap_or_else(|err| panic!("can't `read_dir` {}: {err}", dir.display()));
|
||||
for file in read_dir {
|
||||
let file = file.unwrap();
|
||||
let path = file.path();
|
||||
|
|
|
@ -23,7 +23,7 @@ fn sourcegen_parser_tests() {
|
|||
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
|
||||
let existing = existing_tests(&tests_dir, true);
|
||||
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
|
||||
panic!("Test is deleted: {}", t);
|
||||
panic!("Test is deleted: {t}");
|
||||
}
|
||||
|
||||
let mut new_idx = existing.len() + 1;
|
||||
|
@ -31,7 +31,7 @@ fn sourcegen_parser_tests() {
|
|||
let path = match existing.get(name) {
|
||||
Some((path, _test)) => path.clone(),
|
||||
None => {
|
||||
let file_name = format!("{:04}_{}.rs", new_idx, name);
|
||||
let file_name = format!("{new_idx:04}_{name}.rs");
|
||||
new_idx += 1;
|
||||
tests_dir.join(file_name)
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ fn existing_tests(dir: &Path, ok: bool) -> HashMap<String, (PathBuf, Test)> {
|
|||
let text = fs::read_to_string(&path).unwrap();
|
||||
let test = Test { name: name.clone(), text, ok };
|
||||
if let Some(old) = res.insert(name, (path, test)) {
|
||||
println!("Duplicate test: {:?}", old);
|
||||
println!("Duplicate test: {old:?}");
|
||||
}
|
||||
}
|
||||
res
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue