Auto-attach database in Analysis calls

This commit is contained in:
Lukas Wirth 2025-08-16 16:26:30 +02:00
parent becf04b67a
commit aed0fec1a9
12 changed files with 137 additions and 101 deletions

View file

@ -46,7 +46,7 @@ impl<'db> GenericArg<'db> {
pub fn expect_ty(self) -> Ty<'db> { pub fn expect_ty(self) -> Ty<'db> {
match self.kind() { match self.kind() {
GenericArgKind::Type(ty) => ty, GenericArgKind::Type(ty) => ty,
_ => panic!("Expected ty, got {:?}", self), _ => panic!("Expected ty, got {self:?}"),
} }
} }

View file

@ -67,7 +67,7 @@ mod tests;
pub mod utils; pub mod utils;
use hir::Semantics; use hir::Semantics;
use ide_db::{EditionedFileId, RootDatabase, base_db::salsa}; use ide_db::{EditionedFileId, RootDatabase};
use syntax::{Edition, TextRange}; use syntax::{Edition, TextRange};
pub(crate) use crate::assist_context::{AssistContext, Assists}; pub(crate) use crate::assist_context::{AssistContext, Assists};
@ -93,11 +93,8 @@ pub fn assists(
.unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT)); .unwrap_or_else(|| EditionedFileId::new(db, range.file_id, Edition::CURRENT));
let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range }); let ctx = AssistContext::new(sema, config, hir::FileRange { file_id, range: range.range });
let mut acc = Assists::new(&ctx, resolve); let mut acc = Assists::new(&ctx, resolve);
// the handlers may invoke trait solving related things which accesses salsa structs outside queries handlers::all().iter().for_each(|handler| {
salsa::attach(db, || { handler(&mut acc, &ctx);
handlers::all().iter().for_each(|handler| {
handler(&mut acc, &ctx);
});
}); });
acc.finish() acc.finish()
} }

View file

@ -1,7 +1,7 @@
mod generated; mod generated;
use expect_test::expect; use expect_test::expect;
use hir::{Semantics, setup_tracing}; use hir::{Semantics, db::HirDatabase, setup_tracing};
use ide_db::{ use ide_db::{
EditionedFileId, FileRange, RootDatabase, SnippetCap, EditionedFileId, FileRange, RootDatabase, SnippetCap,
assists::ExprFillDefaultMode, assists::ExprFillDefaultMode,
@ -16,7 +16,7 @@ use test_utils::{assert_eq_text, extract_offset};
use crate::{ use crate::{
Assist, AssistConfig, AssistContext, AssistKind, AssistResolveStrategy, Assists, SingleResolve, Assist, AssistConfig, AssistContext, AssistKind, AssistResolveStrategy, Assists, SingleResolve,
assists, handlers::Handler, handlers::Handler,
}; };
pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
@ -103,6 +103,18 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
prefer_self_ty: false, prefer_self_ty: false,
}; };
fn assists(
db: &RootDatabase,
config: &AssistConfig,
resolve: AssistResolveStrategy,
range: ide_db::FileRange,
) -> Vec<Assist> {
salsa::attach(db, || {
HirDatabase::zalsa_register_downcaster(db);
crate::assists(db, config, resolve, range)
})
}
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) { pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
RootDatabase::with_single_file(text) RootDatabase::with_single_file(text)
} }
@ -320,6 +332,7 @@ fn check_with_config(
}; };
let mut acc = Assists::new(&ctx, resolve); let mut acc = Assists::new(&ctx, resolve);
salsa::attach(&db, || { salsa::attach(&db, || {
HirDatabase::zalsa_register_downcaster(&db);
handler(&mut acc, &ctx); handler(&mut acc, &ctx);
}); });
let mut res = acc.finish(); let mut res = acc.finish();

View file

@ -10,7 +10,6 @@ mod snippet;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use base_db::salsa;
use ide_db::{ use ide_db::{
FilePosition, FxHashSet, RootDatabase, FilePosition, FxHashSet, RootDatabase,
imports::insert_use::{self, ImportScope}, imports::insert_use::{self, ImportScope},
@ -229,7 +228,7 @@ pub fn completions(
{ {
let acc = &mut completions; let acc = &mut completions;
salsa::attach(db, || match analysis { match analysis {
CompletionAnalysis::Name(name_ctx) => completions::complete_name(acc, ctx, name_ctx), CompletionAnalysis::Name(name_ctx) => completions::complete_name(acc, ctx, name_ctx),
CompletionAnalysis::NameRef(name_ref_ctx) => { CompletionAnalysis::NameRef(name_ref_ctx) => {
completions::complete_name_ref(acc, ctx, name_ref_ctx) completions::complete_name_ref(acc, ctx, name_ref_ctx)
@ -257,7 +256,7 @@ pub fn completions(
); );
} }
CompletionAnalysis::UnexpandedAttrTT { .. } | CompletionAnalysis::String { .. } => (), CompletionAnalysis::UnexpandedAttrTT { .. } | CompletionAnalysis::String { .. } => (),
}) }
} }
Some(completions.into()) Some(completions.into())

View file

@ -24,7 +24,7 @@ mod type_pos;
mod use_tree; mod use_tree;
mod visibility; mod visibility;
use base_db::SourceDatabase; use base_db::{SourceDatabase, salsa};
use expect_test::Expect; use expect_test::Expect;
use hir::{PrefixKind, setup_tracing}; use hir::{PrefixKind, setup_tracing};
use ide_db::{ use ide_db::{
@ -243,7 +243,7 @@ pub(crate) fn check_edit_with_config(
let ra_fixture_after = trim_indent(ra_fixture_after); let ra_fixture_after = trim_indent(ra_fixture_after);
let (db, position) = position(ra_fixture_before); let (db, position) = position(ra_fixture_before);
let completions: Vec<CompletionItem> = let completions: Vec<CompletionItem> =
crate::completions(&db, &config, position, None).unwrap(); salsa::attach(&db, || crate::completions(&db, &config, position, None).unwrap());
let (completion,) = completions let (completion,) = completions
.iter() .iter()
.filter(|it| it.lookup() == what) .filter(|it| it.lookup() == what)
@ -306,7 +306,7 @@ pub(crate) fn get_all_items(
trigger_character: Option<char>, trigger_character: Option<char>,
) -> Vec<CompletionItem> { ) -> Vec<CompletionItem> {
let (db, position) = position(code); let (db, position) = position(code);
let res = crate::completions(&db, &config, position, trigger_character) let res = salsa::attach(&db, || crate::completions(&db, &config, position, trigger_character))
.map_or_else(Vec::default, Into::into); .map_or_else(Vec::default, Into::into);
// validate // validate
res.iter().for_each(|it| { res.iter().for_each(|it| {

View file

@ -92,7 +92,7 @@ use hir::{
use ide_db::{ use ide_db::{
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap, EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
assists::{Assist, AssistId, AssistResolveStrategy, ExprFillDefaultMode}, assists::{Assist, AssistId, AssistResolveStrategy, ExprFillDefaultMode},
base_db::{ReleaseChannel, RootQueryDb as _, salsa}, base_db::{ReleaseChannel, RootQueryDb as _},
generated::lints::{CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS, DEFAULT_LINTS, Lint, LintGroup}, generated::lints::{CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS, DEFAULT_LINTS, Lint, LintGroup},
imports::insert_use::InsertUseConfig, imports::insert_use::InsertUseConfig,
label::Label, label::Label,
@ -537,12 +537,10 @@ pub fn full_diagnostics(
resolve: &AssistResolveStrategy, resolve: &AssistResolveStrategy,
file_id: FileId, file_id: FileId,
) -> Vec<Diagnostic> { ) -> Vec<Diagnostic> {
salsa::attach(db, || { let mut res = syntax_diagnostics(db, config, file_id);
let mut res = syntax_diagnostics(db, config, file_id); let sema = semantic_diagnostics(db, config, resolve, file_id);
let sema = semantic_diagnostics(db, config, resolve, file_id); res.extend(sema);
res.extend(sema); res
res
})
} }
/// Returns whether to keep this diagnostic (or remove it). /// Returns whether to keep this diagnostic (or remove it).

View file

@ -6,7 +6,7 @@ use hir::setup_tracing;
use ide_db::{ use ide_db::{
LineIndexDatabase, RootDatabase, LineIndexDatabase, RootDatabase,
assists::{AssistResolveStrategy, ExprFillDefaultMode}, assists::{AssistResolveStrategy, ExprFillDefaultMode},
base_db::SourceDatabase, base_db::{SourceDatabase, salsa},
}; };
use itertools::Itertools; use itertools::Itertools;
use stdx::trim_indent; use stdx::trim_indent;
@ -74,14 +74,16 @@ fn check_nth_fix_with_config(
let after = trim_indent(ra_fixture_after); let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before); let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let diagnostic = super::full_diagnostics( let diagnostic = salsa::attach(&db, || {
&db, super::full_diagnostics(
&config, &db,
&AssistResolveStrategy::All, &config,
file_position.file_id.file_id(&db), &AssistResolveStrategy::All,
) file_position.file_id.file_id(&db),
.pop() )
.expect("no diagnostics"); .pop()
.expect("no diagnostics")
});
let fix = &diagnostic let fix = &diagnostic
.fixes .fixes
.unwrap_or_else(|| panic!("{:?} diagnostic misses fixes", diagnostic.code))[nth]; .unwrap_or_else(|| panic!("{:?} diagnostic misses fixes", diagnostic.code))[nth];
@ -127,12 +129,14 @@ pub(crate) fn check_has_fix(
let (db, file_position) = RootDatabase::with_position(ra_fixture_before); let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let mut conf = DiagnosticsConfig::test_sample(); let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default; conf.expr_fill_default = ExprFillDefaultMode::Default;
let fix = super::full_diagnostics( let fix = salsa::attach(&db, || {
&db, super::full_diagnostics(
&conf, &db,
&AssistResolveStrategy::All, &conf,
file_position.file_id.file_id(&db), &AssistResolveStrategy::All,
) file_position.file_id.file_id(&db),
)
})
.into_iter() .into_iter()
.find(|d| { .find(|d| {
d.fixes d.fixes
@ -166,12 +170,14 @@ pub(crate) fn check_has_fix(
/// Checks that there's a diagnostic *without* fix at `$0`. /// Checks that there's a diagnostic *without* fix at `$0`.
pub(crate) fn check_no_fix(#[rust_analyzer::rust_fixture] ra_fixture: &str) { pub(crate) fn check_no_fix(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture); let (db, file_position) = RootDatabase::with_position(ra_fixture);
let diagnostic = super::full_diagnostics( let diagnostic = salsa::attach(&db, || {
&db, super::full_diagnostics(
&DiagnosticsConfig::test_sample(), &db,
&AssistResolveStrategy::All, &DiagnosticsConfig::test_sample(),
file_position.file_id.file_id(&db), &AssistResolveStrategy::All,
) file_position.file_id.file_id(&db),
)
})
.pop() .pop()
.unwrap(); .unwrap();
assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {diagnostic:?}"); assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {diagnostic:?}");
@ -206,7 +212,13 @@ pub(crate) fn check_diagnostics_with_config(
.iter() .iter()
.copied() .copied()
.flat_map(|file_id| { .flat_map(|file_id| {
super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.file_id(&db)) salsa::attach(&db, || {
super::full_diagnostics(
&db,
&config,
&AssistResolveStrategy::All,
file_id.file_id(&db),
)
.into_iter() .into_iter()
.map(|d| { .map(|d| {
let mut annotation = String::new(); let mut annotation = String::new();
@ -224,6 +236,7 @@ pub(crate) fn check_diagnostics_with_config(
annotation.push_str(&d.message); annotation.push_str(&d.message);
(d.range, annotation) (d.range, annotation)
}) })
})
}) })
.map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation))) .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation)))
.into_group_map(); .into_group_map();
@ -275,15 +288,19 @@ fn test_disabled_diagnostics() {
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#); let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
let file_id = file_id.file_id(&db); let file_id = file_id.file_id(&db);
let diagnostics = super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); let diagnostics = salsa::attach(&db, || {
super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id)
});
assert!(diagnostics.is_empty()); assert!(diagnostics.is_empty());
let diagnostics = super::full_diagnostics( let diagnostics = salsa::attach(&db, || {
&db, super::full_diagnostics(
&DiagnosticsConfig::test_sample(), &db,
&AssistResolveStrategy::All, &DiagnosticsConfig::test_sample(),
file_id, &AssistResolveStrategy::All,
); file_id,
)
});
assert!(!diagnostics.is_empty()); assert!(!diagnostics.is_empty());
} }

View file

@ -10,7 +10,7 @@ use hir::{
}; };
use ide_db::{ use ide_db::{
RootDatabase, SymbolKind, RootDatabase, SymbolKind,
base_db::{AnchoredPath, SourceDatabase, salsa}, base_db::{AnchoredPath, SourceDatabase},
defs::{Definition, IdentClass}, defs::{Definition, IdentClass},
famous_defs::FamousDefs, famous_defs::FamousDefs,
helpers::pick_best_token, helpers::pick_best_token,
@ -108,7 +108,7 @@ pub(crate) fn goto_definition(
} }
Some( Some(
salsa::attach(sema.db, || IdentClass::classify_node(sema, &parent))? IdentClass::classify_node(sema, &parent)?
.definitions() .definitions()
.into_iter() .into_iter()
.flat_map(|(def, _)| { .flat_map(|(def, _)| {

View file

@ -12,7 +12,6 @@ use hir::{
}; };
use ide_db::{ use ide_db::{
FileRange, FxIndexSet, Ranker, RootDatabase, FileRange, FxIndexSet, Ranker, RootDatabase,
base_db::salsa,
defs::{Definition, IdentClass, NameRefClass, OperatorClass}, defs::{Definition, IdentClass, NameRefClass, OperatorClass},
famous_defs::FamousDefs, famous_defs::FamousDefs,
helpers::pick_best_token, helpers::pick_best_token,
@ -137,20 +136,18 @@ pub(crate) fn hover(
let edition = let edition =
sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT); sema.attach_first_edition(file_id).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT);
let display_target = sema.first_crate(file_id)?.to_display_target(db); let display_target = sema.first_crate(file_id)?.to_display_target(db);
let mut res = salsa::attach(sema.db, || { let mut res = if range.is_empty() {
if range.is_empty() { hover_offset(
hover_offset( sema,
sema, FilePosition { file_id, offset: range.start() },
FilePosition { file_id, offset: range.start() }, file,
file, config,
config, edition,
edition, display_target,
display_target, )
) } else {
} else { hover_ranged(sema, frange, file, config, edition, display_target)
hover_ranged(sema, frange, file, config, edition, display_target) }?;
}
})?;
if let HoverDocFormat::PlainText = config.format { if let HoverDocFormat::PlainText = config.format {
res.info.markup = remove_markdown(res.info.markup.as_str()).into(); res.info.markup = remove_markdown(res.info.markup.as_str()).into();
@ -293,7 +290,7 @@ fn hover_offset(
.into_iter() .into_iter()
.unique_by(|&((def, _), _, _, _)| def) .unique_by(|&((def, _), _, _, _)| def)
.map(|((def, subst), macro_arm, hovered_definition, node)| { .map(|((def, subst), macro_arm, hovered_definition, node)| {
salsa::attach(sema.db, || hover_for_definition( hover_for_definition(
sema, sema,
file_id, file_id,
def, def,
@ -304,7 +301,7 @@ fn hover_offset(
config, config,
edition, edition,
display_target, display_target,
)) )
}) })
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
) )
@ -583,13 +580,11 @@ fn goto_type_action_for_def(
}); });
} }
salsa::attach(db, || { if let Ok(generic_def) = GenericDef::try_from(def) {
if let Ok(generic_def) = GenericDef::try_from(def) { generic_def.type_or_const_params(db).into_iter().for_each(|it| {
generic_def.type_or_const_params(db).into_iter().for_each(|it| { walk_and_push_ty(db, &it.ty(db), &mut push_new_def);
walk_and_push_ty(db, &it.ty(db), &mut push_new_def); });
}); }
}
});
let ty = match def { let ty = match def {
Definition::Local(it) => Some(it.ty(db)), Definition::Local(it) => Some(it.ty(db)),

View file

@ -10,7 +10,6 @@ use hir::{
}; };
use ide_db::{ use ide_db::{
RootDatabase, RootDatabase,
base_db::salsa,
defs::Definition, defs::Definition,
documentation::{DocsRangeMap, HasDocs}, documentation::{DocsRangeMap, HasDocs},
famous_defs::FamousDefs, famous_defs::FamousDefs,
@ -45,7 +44,7 @@ pub(super) fn type_info_of(
Either::Left(expr) => sema.type_of_expr(expr)?, Either::Left(expr) => sema.type_of_expr(expr)?,
Either::Right(pat) => sema.type_of_pat(pat)?, Either::Right(pat) => sema.type_of_pat(pat)?,
}; };
salsa::attach(sema.db, || type_info(sema, _config, ty_info, edition, display_target)) type_info(sema, _config, ty_info, edition, display_target)
} }
pub(super) fn closure_expr( pub(super) fn closure_expr(
@ -912,7 +911,7 @@ pub(super) fn literal(
}; };
let ty = ty.display(sema.db, display_target); let ty = ty.display(sema.db, display_target);
let mut s = salsa::attach(sema.db, || format!("```rust\n{ty}\n```\n___\n\n")); let mut s = format!("```rust\n{ty}\n```\n___\n\n");
match value { match value {
Ok(value) => { Ok(value) => {
let backtick_len = value.chars().filter(|c| *c == '`').count(); let backtick_len = value.chars().filter(|c| *c == '`').count();

View file

@ -62,7 +62,7 @@ use std::panic::{AssertUnwindSafe, UnwindSafe};
use cfg::CfgOptions; use cfg::CfgOptions;
use fetch_crates::CrateInfo; use fetch_crates::CrateInfo;
use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, sym}; use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, db::HirDatabase, sym};
use ide_db::{ use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase, FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{ base_db::{
@ -478,10 +478,12 @@ impl Analysis {
/// Fuzzy searches for a symbol. /// Fuzzy searches for a symbol.
pub fn symbol_search(&self, query: Query, limit: usize) -> Cancellable<Vec<NavigationTarget>> { pub fn symbol_search(&self, query: Query, limit: usize) -> Cancellable<Vec<NavigationTarget>> {
self.with_db(|db| { // `world_symbols` currently clones the database to run stuff in parallel, which will make any query panic
symbol_index::world_symbols(db, query) // if we were to attach it here.
.into_iter() // xx: should we make this a par iter? Cancelled::catch(|| {
.filter_map(|s| s.try_to_nav(db)) symbol_index::world_symbols(&self.db, query)
.into_iter()
.filter_map(|s| s.try_to_nav(&self.db))
.take(limit) .take(limit)
.map(UpmappingResult::call_site) .map(UpmappingResult::call_site)
.collect::<Vec<_>>() .collect::<Vec<_>>()
@ -660,15 +662,6 @@ impl Analysis {
}) })
} }
/// Computes syntax highlighting for the given file
pub fn highlight(
&self,
highlight_config: HighlightConfig,
file_id: FileId,
) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None))
}
/// Computes all ranges to highlight for a given item in a file. /// Computes all ranges to highlight for a given item in a file.
pub fn highlight_related( pub fn highlight_related(
&self, &self,
@ -682,20 +675,42 @@ impl Analysis {
}) })
} }
/// Computes syntax highlighting for the given file
pub fn highlight(
&self,
highlight_config: HighlightConfig,
file_id: FileId,
) -> Cancellable<Vec<HlRange>> {
// highlighting may construct a new database for "speculative" execution, so we can't currently attach the database
// highlighting instead sets up the attach hook where neceesary for the trait solver
Cancelled::catch(|| {
syntax_highlighting::highlight(&self.db, highlight_config, file_id, None)
})
}
/// Computes syntax highlighting for the given file range. /// Computes syntax highlighting for the given file range.
pub fn highlight_range( pub fn highlight_range(
&self, &self,
highlight_config: HighlightConfig, highlight_config: HighlightConfig,
frange: FileRange, frange: FileRange,
) -> Cancellable<Vec<HlRange>> { ) -> Cancellable<Vec<HlRange>> {
self.with_db(|db| { // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database
syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range)) // highlighting instead sets up the attach hook where neceesary for the trait solver
Cancelled::catch(|| {
syntax_highlighting::highlight(
&self.db,
highlight_config,
frange.file_id,
Some(frange.range),
)
}) })
} }
/// Computes syntax highlighting for the given file. /// Computes syntax highlighting for the given file.
pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable<String> { pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable<String> {
self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) // highlighting may construct a new database for "speculative" execution, so we can't currently attach the database
// highlighting instead sets up the attach hook where neceesary for the trait solver
Cancelled::catch(|| syntax_highlighting::highlight_as_html(&self.db, file_id, rainbow))
} }
/// Computes completions at the given position. /// Computes completions at the given position.
@ -873,8 +888,12 @@ impl Analysis {
where where
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
{ {
let snap = self.db.clone(); salsa::attach(&self.db, || {
Cancelled::catch(|| f(&snap)) // the trait solver code may invoke `as_view<HirDatabase>` outside of queries,
// so technically we might run into a panic in salsa if the downcaster has not yet been registered.
HirDatabase::zalsa_register_downcaster(&self.db);
Cancelled::catch(|| f(&self.db))
})
} }
} }

View file

@ -289,8 +289,7 @@ impl<'a> Converter<'a> {
let error_msg = if has_unterminated { let error_msg = if has_unterminated {
format!( format!(
"unknown literal prefix `{}` (note: check for unterminated string literal)", "unknown literal prefix `{token_text}` (note: check for unterminated string literal)"
token_text
) )
} else { } else {
"unknown literal prefix".to_owned() "unknown literal prefix".to_owned()