Auto merge of #17315 - hamirmahal:style/simplify-string-interpolation, r=Veykril

style: simplify string interpolation
This commit is contained in:
bors 2024-06-03 12:29:27 +00:00
commit cb6b808185
43 changed files with 71 additions and 79 deletions

View file

@ -304,7 +304,7 @@ impl FlycheckActor {
Some(c) => c, Some(c) => c,
None => continue, None => continue,
}; };
let formatted_command = format!("{:?}", command); let formatted_command = format!("{command:?}");
tracing::debug!(?command, "will restart flycheck"); tracing::debug!(?command, "will restart flycheck");
let (sender, receiver) = unbounded(); let (sender, receiver) = unbounded();
@ -318,8 +318,7 @@ impl FlycheckActor {
} }
Err(error) => { Err(error) => {
self.report_progress(Progress::DidFailToRestart(format!( self.report_progress(Progress::DidFailToRestart(format!(
"Failed to run the following command: {} error={}", "Failed to run the following command: {formatted_command} error={error}"
formatted_command, error
))); )));
self.status = FlycheckStatus::Finished; self.status = FlycheckStatus::Finished;
} }
@ -331,7 +330,7 @@ impl FlycheckActor {
// Watcher finished // Watcher finished
let command_handle = self.command_handle.take().unwrap(); let command_handle = self.command_handle.take().unwrap();
self.command_receiver.take(); self.command_receiver.take();
let formatted_handle = format!("{:?}", command_handle); let formatted_handle = format!("{command_handle:?}");
let res = command_handle.join(); let res = command_handle.join();
if let Err(error) = &res { if let Err(error) = &res {

View file

@ -73,7 +73,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
Ok(t) => t, Ok(t) => t,
Err(e) => { Err(e) => {
let err = pretty_print_err(e, db); let err = pretty_print_err(e, db);
panic!("Error in evaluating goal: {}", err); panic!("Error in evaluating goal: {err}");
} }
}; };
match &r.data(Interner).value { match &r.data(Interner).value {
@ -81,7 +81,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
ConstScalar::Bytes(b, mm) => { ConstScalar::Bytes(b, mm) => {
check(b, mm); check(b, mm);
} }
x => panic!("Expected number but found {:?}", x), x => panic!("Expected number but found {x:?}"),
}, },
_ => panic!("result of const eval wasn't a concrete const"), _ => panic!("result of const eval wasn't a concrete const"),
} }
@ -89,7 +89,7 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String { fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
let mut err = String::new(); let mut err = String::new();
let span_formatter = |file, range| format!("{:?} {:?}", file, range); let span_formatter = |file, range| format!("{file:?} {range:?}");
match e { match e {
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter), ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter),
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter), ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter),

View file

@ -670,7 +670,7 @@ fn render_const_scalar(
TyKind::FnDef(..) => ty.hir_fmt(f), TyKind::FnDef(..) => ty.hir_fmt(f),
TyKind::Function(_) | TyKind::Raw(_, _) => { TyKind::Function(_) | TyKind::Raw(_, _) => {
let it = u128::from_le_bytes(pad16(b, false)); let it = u128::from_le_bytes(pad16(b, false));
write!(f, "{:#X} as ", it)?; write!(f, "{it:#X} as ")?;
ty.hir_fmt(f) ty.hir_fmt(f)
} }
TyKind::Array(ty, len) => { TyKind::Array(ty, len) => {

View file

@ -363,7 +363,7 @@ impl MirEvalError {
)?; )?;
} }
Either::Right(closure) => { Either::Right(closure) => {
writeln!(f, "In {:?}", closure)?; writeln!(f, "In {closure:?}")?;
} }
} }
let source_map = db.body_with_source_map(*def).1; let source_map = db.body_with_source_map(*def).1;
@ -424,7 +424,7 @@ impl MirEvalError {
| MirEvalError::StackOverflow | MirEvalError::StackOverflow
| MirEvalError::CoerceUnsizedError(_) | MirEvalError::CoerceUnsizedError(_)
| MirEvalError::InternalError(_) | MirEvalError::InternalError(_)
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?, | MirEvalError::InvalidVTableId(_) => writeln!(f, "{err:?}")?,
} }
Ok(()) Ok(())
} }

View file

@ -77,7 +77,7 @@ fn check_panic(ra_fixture: &str, expected_panic: &str) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture); let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap(); let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err(); let e = eval_main(&db, file_id).unwrap_err();
assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {:?}", e)), expected_panic); assert_eq!(e.is_panic().unwrap_or_else(|| panic!("unexpected error: {e:?}")), expected_panic);
} }
#[test] #[test]

View file

@ -213,7 +213,7 @@ impl MirLowerError {
| MirLowerError::LangItemNotFound(_) | MirLowerError::LangItemNotFound(_)
| MirLowerError::MutatingRvalue | MirLowerError::MutatingRvalue
| MirLowerError::UnresolvedLabel | MirLowerError::UnresolvedLabel
| MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{:?}", self)?, | MirLowerError::UnresolvedUpvar(_) => writeln!(f, "{self:?}")?,
} }
Ok(()) Ok(())
} }

View file

@ -2422,9 +2422,9 @@ impl Const {
let value_signed = let value_signed =
i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_)))); i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
if value >= 10 { if value >= 10 {
return Ok(format!("{} ({:#X})", value_signed, value)); return Ok(format!("{value_signed} ({value:#X})"));
} else { } else {
return Ok(format!("{}", value_signed)); return Ok(format!("{value_signed}"));
} }
} }
} }

View file

@ -140,7 +140,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
acc.add_group( acc.add_group(
&group_label, &group_label,
assist_id, assist_id,
format!("Import `{}`", import_name), format!("Import `{import_name}`"),
range, range,
|builder| { |builder| {
let scope = match scope.clone() { let scope = match scope.clone() {
@ -165,7 +165,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
acc.add_group( acc.add_group(
&group_label, &group_label,
assist_id, assist_id,
format!("Import `{} as _`", import_name), format!("Import `{import_name} as _`"),
range, range,
|builder| { |builder| {
let scope = match scope.clone() { let scope = match scope.clone() {

View file

@ -228,7 +228,7 @@ fn replace_usages(
edit.replace( edit.replace(
prefix_expr.syntax().text_range(), prefix_expr.syntax().text_range(),
format!("{} == Bool::False", inner_expr), format!("{inner_expr} == Bool::False"),
); );
} else if let Some((record_field, initializer)) = name } else if let Some((record_field, initializer)) = name
.as_name_ref() .as_name_ref()
@ -275,7 +275,7 @@ fn replace_usages(
} else if let Some(receiver) = find_method_call_expr_usage(&name) { } else if let Some(receiver) = find_method_call_expr_usage(&name) {
edit.replace( edit.replace(
receiver.syntax().text_range(), receiver.syntax().text_range(),
format!("({} == Bool::True)", receiver), format!("({receiver} == Bool::True)"),
); );
} else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() { } else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() {
// for any other usage in an expression, replace it with a check that it is the true variant // for any other usage in an expression, replace it with a check that it is the true variant

View file

@ -242,7 +242,7 @@ fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<(
.iter() .iter()
.enumerate() .enumerate()
.map(|(index, _)| { .map(|(index, _)| {
let new_name = new_field_name((format!("_{}", index)).into(), &data.names_in_scope); let new_name = new_field_name((format!("_{index}")).into(), &data.names_in_scope);
(index.to_string().into(), new_name) (index.to_string().into(), new_name)
}) })
.collect(), .collect(),

View file

@ -758,7 +758,7 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<Ass
} }
fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path { fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
make::path_from_text(&format!("{}::{}", qual_path_ty, path_expr_seg)) make::path_from_text(&format!("{qual_path_ty}::{path_expr_seg}"))
} }
#[cfg(test)] #[cfg(test)]

View file

@ -47,7 +47,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
} }
// Prepend set_ to fn names. // Prepend set_ to fn names.
fn_names.iter_mut().for_each(|name| *name = format!("set_{}", name)); fn_names.iter_mut().for_each(|name| *name = format!("set_{name}"));
// Return early if we've found an existing fn // Return early if we've found an existing fn
let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?; let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?;

View file

@ -105,7 +105,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
"Generate `IndexMut` impl from this `Index` trait", "Generate `IndexMut` impl from this `Index` trait",
target, target,
|edit| { |edit| {
edit.insert(target.start(), format!("$0{}\n\n", impl_def)); edit.insert(target.start(), format!("$0{impl_def}\n\n"));
}, },
) )
} }

View file

@ -67,9 +67,9 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
edit.replace( edit.replace(
method_call.syntax().text_range(), method_call.syntax().text_range(),
if sc.chars().all(|c| c.is_alphanumeric() || c == ':') { if sc.chars().all(|c| c.is_alphanumeric() || c == ':') {
format!("{}::from({})", sc, receiver) format!("{sc}::from({receiver})")
} else { } else {
format!("<{}>::from({})", sc, receiver) format!("<{sc}>::from({receiver})")
}, },
); );
}, },

View file

@ -86,7 +86,7 @@ pub(crate) fn merge_nested_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
nested_if_cond.syntax().text().to_string() nested_if_cond.syntax().text().to_string()
}; };
let replace_cond = format!("{} && {}", cond_text, nested_if_cond_text); let replace_cond = format!("{cond_text} && {nested_if_cond_text}");
edit.replace(cond_range, replace_cond); edit.replace(cond_range, replace_cond);
edit.replace(then_branch_range, nested_if_then_branch.syntax().text()); edit.replace(then_branch_range, nested_if_then_branch.syntax().text());

View file

@ -48,7 +48,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
} }
None => false, None => false,
}; };
let expr = if need_to_add_ws { format!(" {}", expr) } else { expr.to_string() }; let expr = if need_to_add_ws { format!(" {expr}") } else { expr.to_string() };
builder.replace(parens.syntax().text_range(), expr) builder.replace(parens.syntax().text_range(), expr)
}, },

View file

@ -65,7 +65,7 @@ pub(crate) fn add_format_like_completions(
let exprs = with_placeholders(exprs); let exprs = with_placeholders(exprs);
for (label, macro_name) in KINDS { for (label, macro_name) in KINDS {
let snippet = if exprs.is_empty() { let snippet = if exprs.is_empty() {
format!(r#"{}({})"#, macro_name, out) format!(r#"{macro_name}({out})"#)
} else { } else {
format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", ")) format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", "))
}; };
@ -108,7 +108,7 @@ mod tests {
for (kind, input, output) in test_vector { for (kind, input, output) in test_vector {
let (parsed_string, _exprs) = parse_format_exprs(input).unwrap(); let (parsed_string, _exprs) = parse_format_exprs(input).unwrap();
let snippet = format!(r#"{}("{}")"#, kind, parsed_string); let snippet = format!(r#"{kind}("{parsed_string}")"#);
assert_eq!(&snippet, output); assert_eq!(&snippet, output);
} }
} }

View file

@ -37,7 +37,7 @@ impl State {
self.names.insert(name.clone(), 1); self.names.insert(name.clone(), 1);
1 1
}; };
make::name(&format!("{}{}", name, count)) make::name(&format!("{name}{count}"))
} }
fn serde_derive(&self) -> String { fn serde_derive(&self) -> String {

View file

@ -27,7 +27,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
hir::AssocItem::Function(id) => { hir::AssocItem::Function(id) => {
let function = id; let function = id;
( (
format!("`fn {}`", redundant_assoc_item_name), format!("`fn {redundant_assoc_item_name}`"),
function function
.source(db) .source(db)
.map(|it| it.syntax().value.text_range()) .map(|it| it.syntax().value.text_range())
@ -38,7 +38,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
hir::AssocItem::Const(id) => { hir::AssocItem::Const(id) => {
let constant = id; let constant = id;
( (
format!("`const {}`", redundant_assoc_item_name), format!("`const {redundant_assoc_item_name}`"),
constant constant
.source(db) .source(db)
.map(|it| it.syntax().value.text_range()) .map(|it| it.syntax().value.text_range())
@ -49,7 +49,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
hir::AssocItem::TypeAlias(id) => { hir::AssocItem::TypeAlias(id) => {
let type_alias = id; let type_alias = id;
( (
format!("`type {}`", redundant_assoc_item_name), format!("`type {redundant_assoc_item_name}`"),
type_alias type_alias
.source(db) .source(db)
.map(|it| it.syntax().value.text_range()) .map(|it| it.syntax().value.text_range())

View file

@ -161,12 +161,11 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
// we could omit generic parameters cause compiler can deduce it automatically // we could omit generic parameters cause compiler can deduce it automatically
if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() { if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() {
let generic_parameters = generic_parameters.join(", "); let generic_parameters = generic_parameters.join(", ");
receiver_type_adt_name = receiver_type_adt_name = format!("{receiver_type_adt_name}::<{generic_parameters}>");
format!("{}::<{}>", receiver_type_adt_name, generic_parameters);
} }
let method_name = call.name_ref()?; let method_name = call.name_ref()?;
let assoc_func_call = format!("{}::{}()", receiver_type_adt_name, method_name); let assoc_func_call = format!("{receiver_type_adt_name}::{method_name}()");
let assoc_func_call = make::expr_path(make::path_from_text(&assoc_func_call)); let assoc_func_call = make::expr_path(make::path_from_text(&assoc_func_call));
@ -184,8 +183,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
Some(Assist { Some(Assist {
id: AssistId("method_call_to_assoc_func_call_fix", AssistKind::QuickFix), id: AssistId("method_call_to_assoc_func_call_fix", AssistKind::QuickFix),
label: Label::new(format!( label: Label::new(format!(
"Use associated func call instead: `{}`", "Use associated func call instead: `{assoc_func_call_expr_string}`"
assoc_func_call_expr_string
)), )),
group: None, group: None,
target: range, target: range,

View file

@ -43,7 +43,7 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<Strin
let path = path.as_deref().unwrap_or("<unknown file>"); let path = path.as_deref().unwrap_or("<unknown file>");
match db.line_index(file_id).try_line_col(text_range.start()) { match db.line_index(file_id).try_line_col(text_range.start()) {
Some(line_col) => format!("file://{path}#{}:{}", line_col.line + 1, line_col.col), Some(line_col) => format!("file://{path}#{}:{}", line_col.line + 1, line_col.col),
None => format!("file://{path} range {:?}", text_range), None => format!("file://{path} range {text_range:?}"),
} }
}; };
Some(def.eval(db, span_formatter)) Some(def.eval(db, span_formatter))

View file

@ -40,7 +40,7 @@ impl fmt::Display for RecursiveMemoryLayout {
"{}: {} (size: {}, align: {}, field offset: {})\n", "{}: {} (size: {}, align: {}, field offset: {})\n",
node.item_name, node.typename, node.size, node.alignment, node.offset node.item_name, node.typename, node.size, node.alignment, node.offset
); );
write!(fmt, "{}", out)?; write!(fmt, "{out}")?;
if node.children_start != -1 { if node.children_start != -1 {
for j in nodes[idx].children_start for j in nodes[idx].children_start
..(nodes[idx].children_start + nodes[idx].children_len as i64) ..(nodes[idx].children_start + nodes[idx].children_len as i64)

View file

@ -418,7 +418,7 @@ fn delimited(
} }
if !p.eat(delim) { if !p.eat(delim) {
if p.at_ts(first_set) { if p.at_ts(first_set) {
p.error(format!("expected {:?}", delim)); p.error(format!("expected {delim:?}"));
} else { } else {
break; break;
} }

View file

@ -106,7 +106,7 @@ impl AbsPathBuf {
/// Panics if `path` is not absolute. /// Panics if `path` is not absolute.
pub fn assert(path: Utf8PathBuf) -> AbsPathBuf { pub fn assert(path: Utf8PathBuf) -> AbsPathBuf {
AbsPathBuf::try_from(path) AbsPathBuf::try_from(path)
.unwrap_or_else(|path| panic!("expected absolute path, got {}", path)) .unwrap_or_else(|path| panic!("expected absolute path, got {path}"))
} }
/// Wrap the given absolute path in `AbsPathBuf` /// Wrap the given absolute path in `AbsPathBuf`

View file

@ -50,8 +50,7 @@ impl ProcMacroProcessSrv {
Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new( Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
io::ErrorKind::Other, io::ErrorKind::Other,
format!( format!(
"proc-macro server's api version ({}) is newer than rust-analyzer's ({})", "proc-macro server's api version ({v}) is newer than rust-analyzer's ({CURRENT_API_VERSION})"
v, CURRENT_API_VERSION
), ),
)), )),
Ok(v) => { Ok(v) => {

View file

@ -219,8 +219,7 @@ impl Sysroot {
", try running `rustup component add rust-src` to possibly fix this" ", try running `rustup component add rust-src` to possibly fix this"
}; };
sysroot.error = Some(format!( sysroot.error = Some(format!(
"sysroot at `{}` is missing a `core` library{var_note}", "sysroot at `{src_root}` is missing a `core` library{var_note}",
src_root,
)); ));
} }
} }

View file

@ -126,7 +126,7 @@ fn replace_fake_sys_root(s: &mut String) {
let fake_sysroot_path = get_test_path("fake-sysroot"); let fake_sysroot_path = get_test_path("fake-sysroot");
let fake_sysroot_path = if cfg!(windows) { let fake_sysroot_path = if cfg!(windows) {
let normalized_path = fake_sysroot_path.as_str().replace('\\', r#"\\"#); let normalized_path = fake_sysroot_path.as_str().replace('\\', r#"\\"#);
format!(r#"{}\\"#, normalized_path) format!(r#"{normalized_path}\\"#)
} else { } else {
format!("{}/", fake_sysroot_path.as_str()) format!("{}/", fake_sysroot_path.as_str())
}; };

View file

@ -479,7 +479,7 @@ impl flags::AnalysisStats {
.or_insert(1); .or_insert(1);
} else { } else {
acc.syntax_errors += 1; acc.syntax_errors += 1;
bar.println(format!("Syntax error: \n{}", err)); bar.println(format!("Syntax error: \n{err}"));
} }
} }
} }

View file

@ -49,7 +49,7 @@ impl flags::RunTests {
let mut sw_all = StopWatch::start(); let mut sw_all = StopWatch::start();
for test in tests { for test in tests {
let full_name = full_name_of_item(db, test.module(db), test.name(db)); let full_name = full_name_of_item(db, test.module(db), test.name(db));
println!("test {}", full_name); println!("test {full_name}");
if test.is_ignore(db) { if test.is_ignore(db) {
println!("ignored"); println!("ignored");
ignore_count += 1; ignore_count += 1;
@ -62,7 +62,7 @@ impl flags::RunTests {
} else { } else {
fail_count += 1; fail_count += 1;
} }
println!("{}", result); println!("{result}");
eprintln!("{:<20} {}", format!("test {}", full_name), sw_one.elapsed()); eprintln!("{:<20} {}", format!("test {}", full_name), sw_one.elapsed());
} }
println!("{pass_count} passed, {fail_count} failed, {ignore_count} ignored"); println!("{pass_count} passed, {fail_count} failed, {ignore_count} ignored");

View file

@ -220,8 +220,8 @@ impl Tester {
self.pass_count += 1; self.pass_count += 1;
} else { } else {
println!("{p:?} FAIL"); println!("{p:?} FAIL");
println!("actual (r-a) = {:?}", actual); println!("actual (r-a) = {actual:?}");
println!("expected (rustc) = {:?}", expected); println!("expected (rustc) = {expected:?}");
self.fail_count += 1; self.fail_count += 1;
} }
} }

View file

@ -199,7 +199,7 @@ impl Node {
let _ = write!(out, " ({} calls)", self.count); let _ = write!(out, " ({} calls)", self.count);
} }
eprintln!("{}", out); eprintln!("{out}");
for child in &self.children { for child in &self.children {
child.go(level + 1, filter) child.go(level + 1, filter)

View file

@ -20,7 +20,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
let input_span = input.span(); let input_span = input.span();
let (trait_attrs, salsa_attrs) = filter_attrs(input.attrs); let (trait_attrs, salsa_attrs) = filter_attrs(input.attrs);
if !salsa_attrs.is_empty() { if !salsa_attrs.is_empty() {
return Error::new(input_span, format!("unsupported attributes: {:?}", salsa_attrs)) return Error::new(input_span, format!("unsupported attributes: {salsa_attrs:?}"))
.to_compile_error() .to_compile_error()
.into(); .into();
} }
@ -78,7 +78,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
num_storages += 1; num_storages += 1;
} }
_ => { _ => {
return Error::new(span, format!("unknown salsa attribute `{}`", name)) return Error::new(span, format!("unknown salsa attribute `{name}`"))
.to_compile_error() .to_compile_error()
.into(); .into();
} }
@ -111,7 +111,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
_ => { _ => {
return Error::new( return Error::new(
sig_span, sig_span,
format!("first argument of query `{}` must be `&self`", query_name), format!("first argument of query `{query_name}` must be `&self`"),
) )
.to_compile_error() .to_compile_error()
.into(); .into();
@ -130,7 +130,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
arg => { arg => {
return Error::new( return Error::new(
arg.span(), arg.span(),
format!("unsupported argument `{:?}` of `{}`", arg, query_name,), format!("unsupported argument `{arg:?}` of `{query_name}`",),
) )
.to_compile_error() .to_compile_error()
.into(); .into();
@ -144,7 +144,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
ref ret => { ref ret => {
return Error::new( return Error::new(
ret.span(), ret.span(),
format!("unsupported return type `{:?}` of `{}`", ret, query_name), format!("unsupported return type `{ret:?}` of `{query_name}`"),
) )
.to_compile_error() .to_compile_error()
.into(); .into();
@ -169,7 +169,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
let lookup_keys = vec![(parse_quote! { key }, value.clone())]; let lookup_keys = vec![(parse_quote! { key }, value.clone())];
Some(Query { Some(Query {
query_type: lookup_query_type, query_type: lookup_query_type,
query_name: format!("{}", lookup_fn_name), query_name: format!("{lookup_fn_name}"),
fn_name: lookup_fn_name, fn_name: lookup_fn_name,
receiver: self_receiver.clone(), receiver: self_receiver.clone(),
attrs: vec![], // FIXME -- some automatically generated docs on this method? attrs: vec![], // FIXME -- some automatically generated docs on this method?
@ -274,8 +274,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
*Note:* Setting values will trigger cancellation *Note:* Setting values will trigger cancellation
of any ongoing queries; this method blocks until of any ongoing queries; this method blocks until
those queries have been cancelled. those queries have been cancelled.
", "
fn_name = fn_name
); );
let set_constant_fn_docs = format!( let set_constant_fn_docs = format!(
@ -290,8 +289,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
*Note:* Setting values will trigger cancellation *Note:* Setting values will trigger cancellation
of any ongoing queries; this method blocks until of any ongoing queries; this method blocks until
those queries have been cancelled. those queries have been cancelled.
", "
fn_name = fn_name
); );
query_fn_declarations.extend(quote! { query_fn_declarations.extend(quote! {

View file

@ -162,7 +162,7 @@ fn extract_cycle(f: impl FnOnce() + UnwindSafe) -> salsa::Cycle {
return cycle.clone(); return cycle.clone();
} }
} }
panic!("unexpected value: {:?}", v) panic!("unexpected value: {v:?}")
} }
#[test] #[test]

View file

@ -13,12 +13,12 @@ pub(crate) trait ConstantsDatabase: TestContext {
} }
fn add(db: &dyn ConstantsDatabase, key1: char, key2: char) -> usize { fn add(db: &dyn ConstantsDatabase, key1: char, key2: char) -> usize {
db.log().add(format!("add({}, {})", key1, key2)); db.log().add(format!("add({key1}, {key2})"));
db.input(key1) + db.input(key2) db.input(key1) + db.input(key2)
} }
fn add3(db: &dyn ConstantsDatabase, key1: char, key2: char, key3: char) -> usize { fn add3(db: &dyn ConstantsDatabase, key1: char, key2: char, key3: char) -> usize {
db.log().add(format!("add3({}, {}, {})", key1, key2, key3)); db.log().add(format!("add3({key1}, {key2}, {key3})"));
db.add(key1, key2) + db.input(key3) db.add(key1, key2) + db.input(key3)
} }

View file

@ -26,7 +26,7 @@ pub(crate) struct TestContextImpl {
impl TestContextImpl { impl TestContextImpl {
#[track_caller] #[track_caller]
pub(crate) fn assert_log(&self, expected_log: &[&str]) { pub(crate) fn assert_log(&self, expected_log: &[&str]) {
let expected_text = &format!("{:#?}", expected_log); let expected_text = &format!("{expected_log:#?}");
let actual_text = &format!("{:#?}", self.log().take()); let actual_text = &format!("{:#?}", self.log().take());
if expected_text == actual_text { if expected_text == actual_text {
@ -36,9 +36,9 @@ impl TestContextImpl {
#[allow(clippy::print_stdout)] #[allow(clippy::print_stdout)]
for diff in dissimilar::diff(expected_text, actual_text) { for diff in dissimilar::diff(expected_text, actual_text) {
match diff { match diff {
dissimilar::Chunk::Delete(l) => println!("-{}", l), dissimilar::Chunk::Delete(l) => println!("-{l}"),
dissimilar::Chunk::Equal(l) => println!(" {}", l), dissimilar::Chunk::Equal(l) => println!(" {l}"),
dissimilar::Chunk::Insert(r) => println!("+{}", r), dissimilar::Chunk::Insert(r) => println!("+{r}"),
} }
} }

View file

@ -33,7 +33,7 @@ fn parallel_cycle_none_recover() {
"#]] "#]]
.assert_debug_eq(&c.unexpected_participants(&db)); .assert_debug_eq(&c.unexpected_participants(&db));
} else { } else {
panic!("b failed in an unexpected way: {:?}", err_b); panic!("b failed in an unexpected way: {err_b:?}");
} }
// We expect A to propagate a panic, which causes us to use the sentinel // We expect A to propagate a panic, which causes us to use the sentinel

View file

@ -28,7 +28,7 @@ fn in_par_get_set_race() {
// cancellation, it'll unwind. // cancellation, it'll unwind.
let result1 = thread1.join().unwrap(); let result1 = thread1.join().unwrap();
if let Ok(value1) = result1 { if let Ok(value1) = result1 {
assert!(value1 == 111 || value1 == 1011, "illegal result {}", value1); assert!(value1 == 111 || value1 == 1011, "illegal result {value1}");
} }
// thread2 can not observe a cancellation because it performs a // thread2 can not observe a cancellation because it performs a

View file

@ -218,7 +218,7 @@ impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self { fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
_ = Self::ASSERT_MAX_FILE_ID_IS_SAME; _ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
let id = id.as_u32(); let id = id.as_u32();
assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id); assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {id} is too large");
HirFileId(id | Self::MACRO_FILE_TAG_MASK) HirFileId(id | Self::MACRO_FILE_TAG_MASK)
} }
} }

View file

@ -172,7 +172,7 @@ pub fn ty_alias(
assignment: Option<(ast::Type, Option<ast::WhereClause>)>, assignment: Option<(ast::Type, Option<ast::WhereClause>)>,
) -> ast::TypeAlias { ) -> ast::TypeAlias {
let mut s = String::new(); let mut s = String::new();
s.push_str(&format!("type {}", ident)); s.push_str(&format!("type {ident}"));
if let Some(list) = generic_param_list { if let Some(list) = generic_param_list {
s.push_str(&list.to_string()); s.push_str(&list.to_string());
@ -297,7 +297,7 @@ pub fn impl_trait(
}; };
let where_clause = merge_where_clause(ty_where_clause, trait_where_clause) let where_clause = merge_where_clause(ty_where_clause, trait_where_clause)
.map_or_else(|| " ".to_owned(), |wc| format!("\n{}\n", wc)); .map_or_else(|| " ".to_owned(), |wc| format!("\n{wc}\n"));
let body = match body { let body = match body {
Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""), Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),

View file

@ -196,7 +196,7 @@ impl ChangeFixture {
origin, origin,
); );
let prev = crates.insert(crate_name.clone(), crate_id); let prev = crates.insert(crate_name.clone(), crate_id);
assert!(prev.is_none(), "multiple crates with same name: {}", crate_name); assert!(prev.is_none(), "multiple crates with same name: {crate_name}");
for dep in meta.deps { for dep in meta.deps {
let prelude = match &meta.extern_prelude { let prelude = match &meta.extern_prelude {
Some(v) => v.contains(&dep), Some(v) => v.contains(&dep),

View file

@ -450,7 +450,7 @@ impl MiniCore {
} }
if !active_regions.is_empty() { if !active_regions.is_empty() {
panic!("unclosed regions: {:?} Add an `endregion` comment", active_regions); panic!("unclosed regions: {active_regions:?} Add an `endregion` comment");
} }
for flag in &self.valid_flags { for flag in &self.valid_flags {

View file

@ -433,8 +433,7 @@ mod tests {
initialize_start_test(TestCase { initialize_start_test(TestCase {
test_messages: vec![notification_msg.clone()], test_messages: vec![notification_msg.clone()],
expected_resp: Err(ProtocolError::new(format!( expected_resp: Err(ProtocolError::new(format!(
"expected initialize request, got {:?}", "expected initialize request, got {notification_msg:?}"
notification_msg
))), ))),
}); });
} }

View file

@ -64,7 +64,7 @@ impl flags::Metrics {
}; };
let mut file = let mut file =
fs::File::options().write(true).create(true).open(format!("target/{}.json", name))?; fs::File::options().write(true).create(true).open(format!("target/{name}.json"))?;
writeln!(file, "{}", metrics.json())?; writeln!(file, "{}", metrics.json())?;
eprintln!("{metrics:#?}"); eprintln!("{metrics:#?}");
Ok(()) Ok(())