Switch to home-made db attaching infrastructure

Instead of using Salsa's, as we can no longer can a `dyn HirDatabase` from the `dyn salsa::Database` Salsa provides.
This commit is contained in:
Chayim Refael Friedman 2025-10-05 09:55:50 +03:00
parent a7234f8b3a
commit c6ef51e550
52 changed files with 499 additions and 418 deletions

18
Cargo.lock generated
View file

@ -235,7 +235,6 @@ name = "cfg"
version = "0.0.0"
dependencies = [
"arbitrary",
"derive_arbitrary",
"expect-test",
"intern",
"oorandom",
@ -1615,7 +1614,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54acf3a685220b533e437e264e4d932cfbdc4cc7ec0cd232ed73c08d03b8a7ca"
dependencies = [
"fixedbitset",
"hashbrown 0.15.4",
"hashbrown 0.15.5",
"indexmap",
]
@ -2145,8 +2144,9 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=e257df12eabd566825ba53bb12d782560b9a4dcd#e257df12eabd566825ba53bb12d782560b9a4dcd"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27956164373aeec733ac24ff1736de8541234e3a8e7e6f916b28175b5752af3b"
dependencies = [
"boxcar",
"crossbeam-queue",
@ -2169,13 +2169,15 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=e257df12eabd566825ba53bb12d782560b9a4dcd#e257df12eabd566825ba53bb12d782560b9a4dcd"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ca3b9d6e47c08b5de4b218e0c5f7ec910b51bce6314e651c8e7b9d154d174da"
[[package]]
name = "salsa-macros"
version = "0.23.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=e257df12eabd566825ba53bb12d782560b9a4dcd#e257df12eabd566825ba53bb12d782560b9a4dcd"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6337b62f2968be6b8afa30017d7564ecbde6832ada47ed2261fb14d0fd402ff4"
dependencies = [
"proc-macro2",
"quote",

View file

@ -137,13 +137,12 @@ rayon = "1.10.0"
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "e257df12eabd566825ba53bb12d782560b9a4dcd", default-features = true, features = [
salsa = { version = "0.24.0", default-features = true, features = [
"rayon",
"salsa_unstable",
"macros",
] }
# salsa-macros = "0.23.0"
salsa-macros = { git = "https://github.com/salsa-rs/salsa.git", rev = "e257df12eabd566825ba53bb12d782560b9a4dcd" }
salsa-macros = "0.24.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }

View file

@ -23,11 +23,7 @@ intern.workspace = true
[dev-dependencies]
expect-test = "1.5.1"
oorandom = "11.1.5"
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
# supports `arbitrary`. This way, we avoid feature unification.
arbitrary = "1.4.1"
derive_arbitrary = "1.4.1"
arbitrary = { version = "1.4.1", features = ["derive"] }
# local deps
syntax-bridge.workspace = true

View file

@ -47,7 +47,7 @@ impl fmt::Display for CfgAtom {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(test, derive(derive_arbitrary::Arbitrary))]
#[cfg_attr(test, derive(arbitrary::Arbitrary))]
pub enum CfgExpr {
Invalid,
Atom(CfgAtom),

View file

@ -1,6 +1,5 @@
//! Database used for testing `hir_def`.
use salsa::database::AsDynDatabase;
use std::{fmt, panic, sync::Mutex};
use base_db::{
@ -8,7 +7,7 @@ use base_db::{
SourceDatabase, SourceRoot, SourceRootId, SourceRootInput,
};
use hir_expand::{InFile, files::FilePosition};
use salsa::{Durability, database::AsDynDatabase};
use salsa::Durability;
use span::FileId;
use syntax::{AstNode, algo, ast};
use triomphe::Arc;
@ -304,8 +303,7 @@ impl TestDB {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
salsa::EventKind::WillExecute { database_key } => {
let ingredient = self
.as_dyn_database()
let ingredient = (self as &dyn salsa::Database)
.ingredient_debug_name(database_key.ingredient_index());
Some(ingredient.to_string())
}

View file

@ -36,7 +36,7 @@ fn check_fail(
error: impl FnOnce(ConstEvalError<'_>) -> bool,
) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
salsa::attach(&db, || match eval_goal(&db, file_id) {
crate::attach_db(&db, || match eval_goal(&db, file_id) {
Ok(_) => panic!("Expected fail, but it succeeded"),
Err(e) => {
assert!(error(simplify(e.clone())), "Actual error was: {}", pretty_print_err(e, &db))
@ -79,7 +79,7 @@ fn check_answer(
check: impl FnOnce(&[u8], &MemoryMap<'_>),
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let r = match eval_goal(&db, file_id) {
Ok(t) => t,
@ -2506,8 +2506,10 @@ fn enums() {
const GOAL: E = E::A;
"#,
);
let r = eval_goal(&db, file_id).unwrap();
assert_eq!(try_const_usize(&db, &r), Some(1));
crate::attach_db(&db, || {
let r = eval_goal(&db, file_id).unwrap();
assert_eq!(try_const_usize(&db, &r), Some(1));
})
}
#[test]

View file

@ -57,7 +57,7 @@ fn check_dyn_compatibility<'a>(
};
let mut osvs = FxHashSet::default();
let db = &db;
salsa::attach(db, || {
crate::attach_db(db, || {
_ = dyn_compatibility_with_callback(db, trait_id, &mut |osv| {
osvs.insert(match osv {
DynCompatibilityViolation::SizedSelf => SizedSelf,

View file

@ -400,7 +400,7 @@ fn pointer_kind<'db>(
Ok(Some(PointerKind::Thin))
}
}
TyKind::Tuple(subst) => match subst.iter().last() {
TyKind::Tuple(subst) => match subst.iter().next_back() {
None => Ok(Some(PointerKind::Thin)),
Some(ty) => pointer_kind(ty, ctx),
},

View file

@ -385,7 +385,7 @@ fn struct_tail_erasing_lifetimes<'a>(db: &'a dyn HirDatabase, pointee: Ty<'a>) -
}
}
TyKind::Tuple(tys) => {
if let Some(last_field_ty) = tys.iter().last() {
if let Some(last_field_ty) = tys.iter().next_back() {
struct_tail_erasing_lifetimes(db, last_field_ty)
} else {
pointee

View file

@ -79,7 +79,7 @@ fn eval_goal(
Some(adt_or_type_alias_id)
})
.unwrap();
salsa::attach(&db, || {
crate::attach_db(&db, || {
let interner = DbInterner::new_with(&db, None, None);
let goal_ty = match adt_or_type_alias_id {
Either::Left(adt_id) => crate::next_solver::Ty::new_adt(
@ -112,29 +112,34 @@ fn eval_expr(
);
let (db, file_id) = TestDB::with_single_file(&ra_fixture);
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let function_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
let name =
db.function_signature(x).name.display_no_db(file_id.edition(&db)).to_smolstr();
(name == "main").then_some(x)
}
_ => None,
})
.unwrap();
let hir_body = db.body(function_id.into());
let b = hir_body
.bindings()
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
let infer = db.infer(function_id.into());
let goal_ty = infer.type_of_binding[b];
salsa::attach(&db, || db.layout_of_ty(goal_ty, db.trait_environment(function_id.into())))
crate::attach_db(&db, || {
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let function_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
let name = db
.function_signature(x)
.name
.display_no_db(file_id.edition(&db))
.to_smolstr();
(name == "main").then_some(x)
}
_ => None,
})
.unwrap();
let hir_body = db.body(function_id.into());
let b = hir_body
.bindings()
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;
let infer = db.infer(function_id.into());
let goal_ty = infer.type_of_binding[b];
db.layout_of_ty(goal_ty, db.trait_environment(function_id.into()))
})
}
#[track_caller]

View file

@ -114,6 +114,7 @@ pub use mapping::{
to_foreign_def_id, to_placeholder_idx, to_placeholder_idx_no_index,
};
pub use method_resolution::check_orphan_rules;
pub use next_solver::interner::{attach_db, attach_db_allow_change, with_attached_db};
pub use target_feature::TargetFeatures;
pub use traits::TraitEnvironment;
pub use utils::{

View file

@ -12,7 +12,7 @@ use crate::{
use super::{MirEvalError, interpret_mir};
fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), MirEvalError<'_>> {
salsa::attach(db, || {
crate::attach_db(db, || {
let module_id = db.module_for_file(file_id.file_id(db));
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
@ -56,7 +56,7 @@ fn check_pass_and_stdio(
) {
let _tracing = setup_tracing();
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let x = eval_main(&db, file_id);
match x {
@ -102,7 +102,7 @@ fn check_pass_and_stdio(
fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic: &str) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err();
assert_eq!(
@ -117,7 +117,7 @@ fn check_error_with(
expect_err: impl FnOnce(MirEvalError<'_>) -> bool,
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err();
assert!(expect_err(e));

View file

@ -11,21 +11,24 @@ fn lower_mir(
) -> FxHashMap<String, Result<Arc<MirBody>, ()>> {
let _tracing = setup_tracing();
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let funcs = scope.declarations().filter_map(|x| match x {
hir_def::ModuleDefId::FunctionId(it) => Some(it),
_ => None,
});
funcs
.map(|func| {
let name = db.function_signature(func).name.display(&db, Edition::CURRENT).to_string();
let mir = db.mir_body(func.into());
(name, mir.map_err(drop))
})
.collect()
crate::attach_db(&db, || {
let file_id = *file_ids.last().unwrap();
let module_id = db.module_for_file(file_id.file_id(&db));
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
let funcs = scope.declarations().filter_map(|x| match x {
hir_def::ModuleDefId::FunctionId(it) => Some(it),
_ => None,
});
funcs
.map(|func| {
let name =
db.function_signature(func).name.display(&db, Edition::CURRENT).to_string();
let mir = db.mir_body(func.into());
(name, mir.map_err(drop))
})
.collect()
})
}
#[test]

View file

@ -41,13 +41,12 @@ impl<'db> Const<'db> {
}
pub fn inner(&self) -> &WithCachedTypeInfo<ConstKind<'db>> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Const<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
pub fn error(interner: DbInterner<'db>) -> Self {
@ -197,21 +196,19 @@ pub struct Valtree<'db> {
impl<'db> Valtree<'db> {
pub fn new(bytes: ConstBytes<'db>) -> Self {
salsa::with_attached_database(|db| unsafe {
crate::with_attached_db(|db| unsafe {
// SAFETY: ¯\_(ツ)_/¯
std::mem::transmute(Valtree::new_(db, bytes))
})
.unwrap()
}
pub fn inner(&self) -> &ConstBytes<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.bytes_(db);
// SAFETY: The caller already has access to a `Valtree<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}

View file

@ -1,6 +1,8 @@
//! Things related to the Interner in the next-trait-solver.
#![allow(unused)]
pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db};
use base_db::Crate;
use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variances};
use hir_def::lang_item::LangItem;
@ -127,11 +129,10 @@ macro_rules! _interned_vec_nolifetime_salsa {
pub fn inner(&self) -> &smallvec::SmallVec<[$ty; 2]> {
// SAFETY: ¯\_(ツ)_/¯
salsa::with_attached_database(|db| {
$crate::with_attached_db(|db| {
let inner = self.inner_(db);
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -230,11 +231,10 @@ macro_rules! _interned_vec_db {
pub fn inner(&self) -> &smallvec::SmallVec<[$ty<'db>; 2]> {
// SAFETY: ¯\_(ツ)_/¯
salsa::with_attached_database(|db| {
$crate::with_attached_db(|db| {
let inner = self.inner_(db);
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -285,12 +285,11 @@ unsafe impl Sync for DbInterner<'_> {}
impl<'db> DbInterner<'db> {
// FIXME(next-solver): remove this method
pub fn conjure() -> DbInterner<'db> {
salsa::with_attached_database(|db| DbInterner {
db: unsafe { std::mem::transmute::<&dyn salsa::Database, &'db dyn HirDatabase>(db) },
crate::with_attached_db(|db| DbInterner {
db: unsafe { std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>(db) },
krate: None,
block: None,
})
.expect("db is expected to be attached")
}
pub fn new_with(
@ -583,12 +582,11 @@ impl AdtDef {
}
pub fn inner(&self) -> &AdtDefInner {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.data_(db);
// SAFETY: ¯\_(ツ)_/¯
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
pub fn is_enum(&self) -> bool {
@ -706,21 +704,20 @@ impl<'db> inherent::AdtDef<DbInterner<'db>> for AdtDef {
impl fmt::Debug for AdtDef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
salsa::with_attached_database(|db| match self.inner().id {
crate::with_attached_db(|db| match self.inner().id {
AdtId::StructId(struct_id) => {
let data = db.as_view::<dyn HirDatabase>().struct_signature(struct_id);
let data = db.struct_signature(struct_id);
f.write_str(data.name.as_str())
}
AdtId::UnionId(union_id) => {
let data = db.as_view::<dyn HirDatabase>().union_signature(union_id);
let data = db.union_signature(union_id);
f.write_str(data.name.as_str())
}
AdtId::EnumId(enum_id) => {
let data = db.as_view::<dyn HirDatabase>().enum_signature(enum_id);
let data = db.enum_signature(enum_id);
f.write_str(data.name.as_str())
}
})
.unwrap_or_else(|| f.write_str(&format!("AdtDef({:?})", self.inner().id)))
}
}
@ -776,13 +773,12 @@ impl<'db> Pattern<'db> {
}
pub fn inner(&self) -> &PatternKind<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -1018,17 +1014,7 @@ impl<'db> rustc_type_ir::Interner for DbInterner<'db> {
self,
f: impl FnOnce(&mut rustc_type_ir::search_graph::GlobalCache<Self>) -> R,
) -> R {
salsa::with_attached_database(|db| {
tls_cache::with_cache(
unsafe {
std::mem::transmute::<&dyn HirDatabase, &'db dyn HirDatabase>(
db.as_view::<dyn HirDatabase>(),
)
},
f,
)
})
.unwrap()
tls_cache::with_cache(self.db, f)
}
fn canonical_param_env_cache_get_or_insert<R>(
@ -2104,6 +2090,117 @@ TrivialTypeTraversalImpls! {
Placeholder<BoundVar>,
}
mod tls_db {
use std::{cell::Cell, ptr::NonNull};
use crate::db::HirDatabase;
struct Attached {
database: Cell<Option<NonNull<dyn HirDatabase>>>,
}
impl Attached {
#[inline]
fn attach<R>(&self, db: &dyn HirDatabase, op: impl FnOnce() -> R) -> R {
struct DbGuard<'s> {
state: Option<&'s Attached>,
}
impl<'s> DbGuard<'s> {
#[inline]
fn new(attached: &'s Attached, db: &dyn HirDatabase) -> Self {
match attached.database.get() {
Some(current_db) => {
let new_db = NonNull::from(db);
if !std::ptr::addr_eq(current_db.as_ptr(), new_db.as_ptr()) {
panic!(
"Cannot change attached database. This is likely a bug.\n\
If this is not a bug, you can use `attach_db_allow_change()`."
);
}
Self { state: None }
}
None => {
// Otherwise, set the database.
attached.database.set(Some(NonNull::from(db)));
Self { state: Some(attached) }
}
}
}
}
impl Drop for DbGuard<'_> {
#[inline]
fn drop(&mut self) {
// Reset database to null if we did anything in `DbGuard::new`.
if let Some(attached) = self.state {
attached.database.set(None);
}
}
}
let _guard = DbGuard::new(self, db);
op()
}
#[inline]
fn attach_allow_change<R>(&self, db: &dyn HirDatabase, op: impl FnOnce() -> R) -> R {
struct DbGuard<'s> {
state: &'s Attached,
prev: Option<NonNull<dyn HirDatabase>>,
}
impl<'s> DbGuard<'s> {
#[inline]
fn new(attached: &'s Attached, db: &dyn HirDatabase) -> Self {
let prev = attached.database.replace(Some(NonNull::from(db)));
Self { state: attached, prev }
}
}
impl Drop for DbGuard<'_> {
#[inline]
fn drop(&mut self) {
self.state.database.set(self.prev);
}
}
let _guard = DbGuard::new(self, db);
op()
}
#[inline]
fn with<R>(&self, op: impl FnOnce(&dyn HirDatabase) -> R) -> R {
let db = self.database.get().expect("Try to use attached db, but not db is attached");
// SAFETY: The db is attached, so it must be valid.
op(unsafe { db.as_ref() })
}
}
thread_local! {
static GLOBAL_DB: Attached = const { Attached { database: Cell::new(None) } };
}
#[inline]
pub fn attach_db<R>(db: &dyn HirDatabase, op: impl FnOnce() -> R) -> R {
GLOBAL_DB.with(|global_db| global_db.attach(db, op))
}
#[inline]
pub fn attach_db_allow_change<R>(db: &dyn HirDatabase, op: impl FnOnce() -> R) -> R {
GLOBAL_DB.with(|global_db| global_db.attach_allow_change(db, op))
}
#[inline]
pub fn with_attached_db<R>(op: impl FnOnce(&dyn HirDatabase) -> R) -> R {
GLOBAL_DB.with(
#[inline]
|a| a.with(op),
)
}
}
mod tls_cache {
use crate::db::HirDatabase;

View file

@ -16,10 +16,10 @@ impl<'db> IrPrint<ty::AliasTy<Self>> for DbInterner<'db> {
}
fn print_debug(t: &ty::AliasTy<Self>, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
salsa::with_attached_database(|db| match t.def_id {
crate::with_attached_db(|db| match t.def_id {
SolverDefId::TypeAliasId(id) => fmt.write_str(&format!(
"AliasTy({:?}[{:?}])",
db.as_view::<dyn HirDatabase>().type_alias_signature(id).name.as_str(),
db.type_alias_signature(id).name.as_str(),
t.args
)),
SolverDefId::InternedOpaqueTyId(id) => {
@ -27,7 +27,6 @@ impl<'db> IrPrint<ty::AliasTy<Self>> for DbInterner<'db> {
}
_ => panic!("Expected TypeAlias or OpaqueTy."),
})
.unwrap_or_else(|| fmt.write_str(&format!("AliasTy({:?}[{:?}])", t.def_id, t.args)))
}
}
@ -37,10 +36,10 @@ impl<'db> IrPrint<ty::AliasTerm<Self>> for DbInterner<'db> {
}
fn print_debug(t: &ty::AliasTerm<Self>, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
salsa::with_attached_database(|db| match t.def_id {
crate::with_attached_db(|db| match t.def_id {
SolverDefId::TypeAliasId(id) => fmt.write_str(&format!(
"AliasTerm({:?}[{:?}])",
db.as_view::<dyn HirDatabase>().type_alias_signature(id).name.as_str(),
db.type_alias_signature(id).name.as_str(),
t.args
)),
SolverDefId::InternedOpaqueTyId(id) => {
@ -48,7 +47,6 @@ impl<'db> IrPrint<ty::AliasTerm<Self>> for DbInterner<'db> {
}
_ => panic!("Expected TypeAlias or OpaqueTy."),
})
.unwrap_or_else(|| fmt.write_str(&format!("AliasTerm({:?}[{:?}])", t.def_id, t.args)))
}
}
impl<'db> IrPrint<ty::TraitRef<Self>> for DbInterner<'db> {
@ -57,29 +55,25 @@ impl<'db> IrPrint<ty::TraitRef<Self>> for DbInterner<'db> {
}
fn print_debug(t: &ty::TraitRef<Self>, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let trait_ = t.def_id.0;
let self_ty = &t.args.as_slice()[0];
let trait_args = &t.args.as_slice()[1..];
if trait_args.is_empty() {
let db = db.zalsa().views().downcaster_for::<dyn HirDatabase>();
db.downcast_unchecked(db)
fmt.write_str(&format!(
"{:?}: {}",
self_ty,
db.as_dyn_database().as_dyn_database(),
db.as_view::<dyn HirDatabase>().trait_signature(trait_).name.as_str()
db.trait_signature(trait_).name.as_str()
))
} else {
fmt.write_str(&format!(
"{:?}: {}<{:?}>",
self_ty,
db.as_view::<dyn HirDatabase>().trait_signature(trait_).name.as_str(),
db.trait_signature(trait_).name.as_str(),
trait_args
))
}
})
.unwrap_or_else(|| fmt.write_str(&format!("TraitRef({:?}[{:?}])", t.def_id, t.args)))
}
}
impl<'db> IrPrint<ty::TraitPredicate<Self>> for DbInterner<'db> {
@ -121,17 +115,14 @@ impl<'db> IrPrint<ty::ExistentialTraitRef<Self>> for DbInterner<'db> {
t: &ty::ExistentialTraitRef<Self>,
fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let trait_ = t.def_id.0;
fmt.write_str(&format!(
"ExistentialTraitRef({:?}[{:?}])",
db.as_view::<dyn HirDatabase>().trait_signature(trait_).name.as_str(),
db.trait_signature(trait_).name.as_str(),
t.args
))
})
.unwrap_or_else(|| {
fmt.write_str(&format!("ExistentialTraitRef({:?}[{:?}])", t.def_id, t.args))
})
}
}
impl<'db> IrPrint<ty::ExistentialProjection<Self>> for DbInterner<'db> {
@ -146,24 +137,18 @@ impl<'db> IrPrint<ty::ExistentialProjection<Self>> for DbInterner<'db> {
t: &ty::ExistentialProjection<Self>,
fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let id = match t.def_id {
SolverDefId::TypeAliasId(id) => id,
_ => panic!("Expected trait."),
};
fmt.write_str(&format!(
"ExistentialProjection(({:?}[{:?}]) -> {:?})",
db.as_view::<dyn HirDatabase>().type_alias_signature(id).name.as_str(),
db.type_alias_signature(id).name.as_str(),
t.args,
t.term
))
})
.unwrap_or_else(|| {
fmt.write_str(&format!(
"ExistentialProjection(({:?}[{:?}]) -> {:?})",
t.def_id, t.args, t.term
))
})
}
}
impl<'db> IrPrint<ty::ProjectionPredicate<Self>> for DbInterner<'db> {
@ -178,24 +163,18 @@ impl<'db> IrPrint<ty::ProjectionPredicate<Self>> for DbInterner<'db> {
t: &ty::ProjectionPredicate<Self>,
fmt: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let id = match t.projection_term.def_id {
SolverDefId::TypeAliasId(id) => id,
_ => panic!("Expected trait."),
};
fmt.write_str(&format!(
"ProjectionPredicate(({:?}[{:?}]) -> {:?})",
db.as_view::<dyn HirDatabase>().type_alias_signature(id).name.as_str(),
db.type_alias_signature(id).name.as_str(),
t.projection_term.args,
t.term
))
})
.unwrap_or_else(|| {
fmt.write_str(&format!(
"ProjectionPredicate(({:?}[{:?}]) -> {:?})",
t.projection_term.def_id, t.projection_term.args, t.term
))
})
}
}
impl<'db> IrPrint<ty::NormalizesTo<Self>> for DbInterner<'db> {

View file

@ -24,12 +24,11 @@ impl<'db> PredefinedOpaques<'db> {
}
pub fn inner(&self) -> &PredefinedOpaquesData<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: ¯\_(ツ)_/¯
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}
@ -96,12 +95,11 @@ impl<'db> ExternalConstraints<'db> {
}
pub fn inner(&self) -> &ExternalConstraintsData<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: ¯\_(ツ)_/¯
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}

View file

@ -232,13 +232,12 @@ impl<'db> Predicate<'db> {
}
pub fn inner(&self) -> &WithCachedTypeInfo<Binder<'db, PredicateKind<'db>>> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Predicate<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
/// Flips the polarity of a Predicate.
@ -303,13 +302,12 @@ impl<'db> Clauses<'db> {
}
pub fn inner(&self) -> &InternedClausesWrapper<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.inner_(db);
// SAFETY: The caller already has access to a `Clauses<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
}

View file

@ -35,13 +35,12 @@ impl<'db> Region<'db> {
}
pub fn inner(&self) -> &RegionKind<'db> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = self.kind_(db);
// SAFETY: The caller already has access to a `Region<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute::<&RegionKind<'_>, &RegionKind<'db>>(inner) }
})
.unwrap()
}
pub fn new_early_param(

View file

@ -68,13 +68,12 @@ impl<'db> Ty<'db> {
}
pub fn inner(&self) -> &WithCachedTypeInfo<TyKind<'db>> {
salsa::with_attached_database(|db| {
crate::with_attached_db(|db| {
let inner = &self.kind_(db).0;
// SAFETY: The caller already has access to a `Ty<'db>`, so borrowchecking will
// make sure that our returned value is valid for the lifetime `'db`.
unsafe { std::mem::transmute(inner) }
})
.unwrap()
}
pub fn new_adt(interner: DbInterner<'db>, adt_id: AdtId, args: GenericArgs<'db>) -> Self {

View file

@ -486,7 +486,7 @@ pub fn sizedness_constraint_for_ty<'db>(
Tuple(tys) => tys
.into_iter()
.last()
.next_back()
.and_then(|ty| sizedness_constraint_for_ty(interner, sizedness, ty)),
Adt(adt, args) => {

View file

@ -10,7 +10,7 @@ use base_db::{
use hir_def::{ModuleId, db::DefDatabase, nameres::crate_def_map};
use hir_expand::EditionedFileId;
use rustc_hash::FxHashMap;
use salsa::{AsDynDatabase, Durability};
use salsa::Durability;
use span::FileId;
use syntax::TextRange;
use test_utils::extract_annotations;
@ -191,8 +191,7 @@ impl TestDB {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
salsa::EventKind::WillExecute { database_key } => {
let ingredient = self
.as_dyn_database()
let ingredient = (self as &dyn salsa::Database)
.ingredient_debug_name(database_key.ingredient_index());
Some(ingredient.to_string())
}

View file

@ -79,7 +79,7 @@ fn check_impl(
let _tracing = setup_tracing();
let (db, files) = TestDB::with_many_files(ra_fixture);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let mut had_annotations = false;
let mut mismatches = FxHashMap::default();
let mut types = FxHashMap::default();
@ -283,7 +283,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let _tracing = setup_tracing();
let (db, file_id) = TestDB::with_single_file(content);
salsa::attach(&db, || {
crate::attach_db(&db, || {
let mut buf = String::new();
let mut infer_def = |inference_result: Arc<InferenceResult<'_>>,
@ -558,15 +558,17 @@ fn salsa_bug() {
",
);
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
crate::attach_db(&db, || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
});
});
});
@ -597,15 +599,17 @@ fn salsa_bug() {
db.set_file_text(pos.file_id.file_id(&db), new_text);
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
crate::attach_db(&db, || {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
});
});
});
})
}

View file

@ -18,96 +18,105 @@ use super::{setup_tracing, visit_module};
fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let _tracing = setup_tracing();
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
crate::attach_db(&db, || {
let module = db.module_for_file(file_id.file_id(&db));
let def_map = module.def_map(&db);
let mut defs = Vec::new();
visit_module(&db, def_map, module.local_id, &mut |it| defs.push(it));
let mut defs = Vec::new();
visit_module(&db, def_map, module.local_id, &mut |it| defs.push(it));
let mut captures_info = Vec::new();
for def in defs {
let def = match def {
hir_def::ModuleDefId::FunctionId(it) => it.into(),
hir_def::ModuleDefId::EnumVariantId(it) => it.into(),
hir_def::ModuleDefId::ConstId(it) => it.into(),
hir_def::ModuleDefId::StaticId(it) => it.into(),
_ => continue,
};
let infer = db.infer(def);
let db = &db;
captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {
let closure = db.lookup_intern_closure(*closure_id);
let source_map = db.body_with_source_map(closure.0).1;
let closure_text_range = source_map
.expr_syntax(closure.1)
.expect("failed to map closure to SyntaxNode")
.value
.text_range();
captures.iter().map(move |capture| {
fn text_range<N: AstNode>(
db: &TestDB,
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
) -> TextRange {
let root = syntax.file_syntax(db);
syntax.value.to_node(&root).syntax().text_range()
}
// FIXME: Deduplicate this with hir::Local::sources().
let (body, source_map) = db.body_with_source_map(closure.0);
let local_text_range = match body.self_param.zip(source_map.self_param_syntax()) {
Some((param, source)) if param == capture.local() => {
format!("{:?}", text_range(db, source))
}
_ => source_map
.patterns_for_binding(capture.local())
.iter()
.map(|&definition| {
text_range(db, source_map.pat_syntax(definition).unwrap())
})
.map(|it| format!("{it:?}"))
.join(", "),
};
let place = capture.display_place(closure.0, db);
let capture_ty = salsa::attach(db, || {
capture
.ty
.skip_binder()
.display_test(db, DisplayTarget::from_crate(db, module.krate()))
.to_string()
});
let spans = capture
.spans()
.iter()
.flat_map(|span| match *span {
MirSpan::ExprId(expr) => {
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
let mut captures_info = Vec::new();
for def in defs {
let def = match def {
hir_def::ModuleDefId::FunctionId(it) => it.into(),
hir_def::ModuleDefId::EnumVariantId(it) => it.into(),
hir_def::ModuleDefId::ConstId(it) => it.into(),
hir_def::ModuleDefId::StaticId(it) => it.into(),
_ => continue,
};
let infer = db.infer(def);
let db = &db;
captures_info.extend(infer.closure_info.iter().flat_map(
|(closure_id, (captures, _))| {
let closure = db.lookup_intern_closure(*closure_id);
let source_map = db.body_with_source_map(closure.0).1;
let closure_text_range = source_map
.expr_syntax(closure.1)
.expect("failed to map closure to SyntaxNode")
.value
.text_range();
captures.iter().map(move |capture| {
fn text_range<N: AstNode>(
db: &TestDB,
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
) -> TextRange {
let root = syntax.file_syntax(db);
syntax.value.to_node(&root).syntax().text_range()
}
MirSpan::PatId(pat) => {
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
}
MirSpan::BindingId(binding) => source_map
.patterns_for_binding(binding)
// FIXME: Deduplicate this with hir::Local::sources().
let (body, source_map) = db.body_with_source_map(closure.0);
let local_text_range =
match body.self_param.zip(source_map.self_param_syntax()) {
Some((param, source)) if param == capture.local() => {
format!("{:?}", text_range(db, source))
}
_ => source_map
.patterns_for_binding(capture.local())
.iter()
.map(|&definition| {
text_range(db, source_map.pat_syntax(definition).unwrap())
})
.map(|it| format!("{it:?}"))
.join(", "),
};
let place = capture.display_place(closure.0, db);
let capture_ty = capture
.ty
.skip_binder()
.display_test(db, DisplayTarget::from_crate(db, module.krate()))
.to_string();
let spans = capture
.spans()
.iter()
.map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap()))
.collect(),
MirSpan::SelfParam => {
vec![text_range(db, source_map.self_param_syntax().unwrap())]
}
MirSpan::Unknown => Vec::new(),
.flat_map(|span| match *span {
MirSpan::ExprId(expr) => {
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
}
MirSpan::PatId(pat) => {
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
}
MirSpan::BindingId(binding) => source_map
.patterns_for_binding(binding)
.iter()
.map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap()))
.collect(),
MirSpan::SelfParam => {
vec![text_range(db, source_map.self_param_syntax().unwrap())]
}
MirSpan::Unknown => Vec::new(),
})
.sorted_by_key(|it| it.start())
.map(|it| format!("{it:?}"))
.join(",");
(
closure_text_range,
local_text_range,
spans,
place,
capture_ty,
capture.kind(),
)
})
.sorted_by_key(|it| it.start())
.map(|it| format!("{it:?}"))
.join(",");
},
));
}
captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| {
(closure_text_range.start(), local_text_range.clone())
});
(closure_text_range, local_text_range, spans, place, capture_ty, capture.kind())
})
}));
}
captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| {
(closure_text_range.start(), local_text_range.clone())
});
let rendered = captures_info
let rendered = captures_info
.iter()
.map(|(closure_text_range, local_text_range, spans, place, capture_ty, capture_kind)| {
format!(
@ -116,7 +125,8 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
})
.join("\n");
expect.assert_eq(&rendered);
expect.assert_eq(&rendered);
})
}
#[test]

View file

@ -710,8 +710,8 @@ fn execute_assert_events(
required: &[(&str, usize)],
expect: Expect,
) {
let (executed, events) = db.log_executed(f);
salsa::attach(db, || {
crate::attach_db(db, || {
let (executed, events) = db.log_executed(f);
for (event, count) in required {
let n = executed.iter().filter(|it| it.contains(event)).count();
assert_eq!(

View file

@ -1001,84 +1001,86 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
// ));
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let mut defs: Vec<GenericDefId> = Vec::new();
let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
let def_map = module.def_map(&db);
crate::tests::visit_module(&db, def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::AdtId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::TraitId(it) => it.into(),
ModuleDefId::TypeAliasId(it) => it.into(),
_ => return,
})
});
let defs = defs
.into_iter()
.filter_map(|def| {
Some((
def,
match def {
GenericDefId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::EnumId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::StructId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::UnionId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::TraitId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::TypeAliasId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::ImplId(_) => return None,
GenericDefId::ConstId(_) => return None,
GenericDefId::StaticId(_) => return None,
},
))
})
.sorted_by_key(|(_, n)| n.syntax().text_range().start());
let mut res = String::new();
for (def, name) in defs {
let Some(variances) = db.variances_of(def) else {
continue;
};
format_to!(
res,
"{name}[{}]\n",
generics(&db, def)
.iter()
.map(|(_, param)| match param {
GenericParamDataRef::TypeParamData(type_param_data) => {
type_param_data.name.as_ref().unwrap()
}
GenericParamDataRef::ConstParamData(const_param_data) =>
&const_param_data.name,
GenericParamDataRef::LifetimeParamData(lifetime_param_data) => {
&lifetime_param_data.name
}
})
.zip_eq(&*variances)
.format_with(", ", |(name, var), f| f(&format_args!(
"{}: {var}",
name.as_str()
)))
);
}
crate::attach_db(&db, || {
let mut defs: Vec<GenericDefId> = Vec::new();
let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
let def_map = module.def_map(&db);
crate::tests::visit_module(&db, def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::AdtId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::TraitId(it) => it.into(),
ModuleDefId::TypeAliasId(it) => it.into(),
_ => return,
})
});
let defs = defs
.into_iter()
.filter_map(|def| {
Some((
def,
match def {
GenericDefId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::EnumId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::StructId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::AdtId(AdtId::UnionId(it)) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::TraitId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::TypeAliasId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.name().unwrap()
}
GenericDefId::ImplId(_) => return None,
GenericDefId::ConstId(_) => return None,
GenericDefId::StaticId(_) => return None,
},
))
})
.sorted_by_key(|(_, n)| n.syntax().text_range().start());
let mut res = String::new();
for (def, name) in defs {
let Some(variances) = db.variances_of(def) else {
continue;
};
format_to!(
res,
"{name}[{}]\n",
generics(&db, def)
.iter()
.map(|(_, param)| match param {
GenericParamDataRef::TypeParamData(type_param_data) => {
type_param_data.name.as_ref().unwrap()
}
GenericParamDataRef::ConstParamData(const_param_data) =>
&const_param_data.name,
GenericParamDataRef::LifetimeParamData(lifetime_param_data) => {
&lifetime_param_data.name
}
})
.zip_eq(&*variances)
.format_with(", ", |(name, var), f| f(&format_args!(
"{}: {var}",
name.as_str()
)))
);
}
expected.assert_eq(&res);
expected.assert_eq(&res);
})
}
}

View file

@ -157,7 +157,7 @@ pub use {
tt,
},
hir_ty::{
CastError, DropGlue, FnAbi, PointerCast, Variance,
CastError, DropGlue, FnAbi, PointerCast, Variance, attach_db, attach_db_allow_change,
consteval::ConstEvalError,
diagnostics::UnsafetyReason,
display::{ClosureStyle, DisplayTarget, HirDisplay, HirDisplayError, HirWrite},

View file

@ -14,7 +14,6 @@ use crate::{
db::HirDatabase,
semantics::{PathResolution, PathResolutionPerNs},
};
use base_db::salsa;
use either::Either;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId,
@ -1637,7 +1636,7 @@ fn resolve_hir_path_(
Some(unresolved) => resolver
.generic_def()
.and_then(|def| {
salsa::attach(db, || {
hir_ty::attach_db(db, || {
hir_ty::associated_type_shorthand_candidates(
db,
def,

View file

@ -5,7 +5,7 @@ use hir::{Semantics, db::HirDatabase, setup_tracing};
use ide_db::{
EditionedFileId, FileRange, RootDatabase, SnippetCap,
assists::ExprFillDefaultMode,
base_db::{SourceDatabase, salsa},
base_db::SourceDatabase,
imports::insert_use::{ImportGranularity, InsertUseConfig},
source_change::FileSystemEdit,
};
@ -109,7 +109,7 @@ fn assists(
resolve: AssistResolveStrategy,
range: ide_db::FileRange,
) -> Vec<Assist> {
salsa::attach(db, || {
hir::attach_db(db, || {
HirDatabase::zalsa_register_downcaster(db);
crate::assists(db, config, resolve, range)
})
@ -332,7 +332,7 @@ fn check_with_config(
_ => AssistResolveStrategy::All,
};
let mut acc = Assists::new(&ctx, resolve);
salsa::attach(&db, || {
hir::attach_db(&db, || {
HirDatabase::zalsa_register_downcaster(&db);
handler(&mut acc, &ctx);
});

View file

@ -1,7 +1,6 @@
//! Module responsible for analyzing the code surrounding the cursor for completion.
use std::iter;
use base_db::salsa;
use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
use itertools::Either;
@ -86,7 +85,7 @@ pub(super) fn expand_and_analyze<'db>(
let original_offset = expansion.original_offset + relative_offset;
let token = expansion.original_file.token_at_offset(original_offset).left_biased()?;
salsa::attach(sema.db, || analyze(sema, expansion, original_token, &token)).map(
hir::attach_db(sema.db, || analyze(sema, expansion, original_token, &token)).map(
|(analysis, expected, qualifier_ctx)| AnalysisResult {
analysis,
expected,

View file

@ -1,4 +1,3 @@
use base_db::salsa;
use expect_test::{Expect, expect};
use hir::HirDisplay;
@ -11,12 +10,12 @@ fn check_expected_type_and_name(#[rust_analyzer::rust_fixture] ra_fixture: &str,
let (db, pos) = position(ra_fixture);
let config = TEST_CONFIG;
let (completion_context, _analysis) =
salsa::attach(&db, || CompletionContext::new(&db, pos, &config).unwrap());
hir::attach_db(&db, || CompletionContext::new(&db, pos, &config).unwrap());
let ty = completion_context
.expected_type
.map(|t| {
salsa::attach(&db, || {
hir::attach_db(&db, || {
t.display_test(&db, completion_context.krate.to_display_target(&db)).to_string()
})
})

View file

@ -24,7 +24,7 @@ mod type_pos;
mod use_tree;
mod visibility;
use base_db::{SourceDatabase, salsa};
use base_db::SourceDatabase;
use expect_test::Expect;
use hir::db::HirDatabase;
use hir::{PrefixKind, setup_tracing};
@ -244,7 +244,7 @@ pub(crate) fn check_edit_with_config(
let ra_fixture_after = trim_indent(ra_fixture_after);
let (db, position) = position(ra_fixture_before);
let completions: Vec<CompletionItem> =
salsa::attach(&db, || crate::completions(&db, &config, position, None).unwrap());
hir::attach_db(&db, || crate::completions(&db, &config, position, None).unwrap());
let (completion,) = completions
.iter()
.filter(|it| it.lookup() == what)
@ -307,7 +307,7 @@ pub(crate) fn get_all_items(
trigger_character: Option<char>,
) -> Vec<CompletionItem> {
let (db, position) = position(code);
let res = salsa::attach(&db, || {
let res = hir::attach_db(&db, || {
HirDatabase::zalsa_register_downcaster(&db);
crate::completions(&db, &config, position, trigger_character)
})

View file

@ -1,4 +1,3 @@
use base_db::salsa;
use expect_test::{Expect, expect};
use crate::{
@ -20,7 +19,7 @@ fn check_with_config(
let (ctx, analysis) = crate::context::CompletionContext::new(&db, position, &config).unwrap();
let mut acc = crate::completions::Completions::default();
salsa::attach(ctx.db, || {
hir::attach_db(ctx.db, || {
if let CompletionAnalysis::Name(NameContext { kind: NameKind::IdentPat(pat_ctx), .. }) =
&analysis
{

View file

@ -134,7 +134,7 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar
let _p = tracing::info_span!("library_symbols").entered();
// We call this without attaching because this runs in parallel, so we need to attach here.
salsa::attach(db, || {
hir::attach_db(db, || {
let mut symbol_collector = SymbolCollector::new(db);
db.source_root_crates(source_root_id)
@ -153,7 +153,7 @@ fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex>
let _p = tracing::info_span!("module_symbols").entered();
// We call this without attaching because this runs in parallel, so we need to attach here.
salsa::attach(db, || Arc::new(SymbolIndex::new(SymbolCollector::new_module(db, module))))
hir::attach_db(db, || Arc::new(SymbolIndex::new(SymbolCollector::new_module(db, module))))
}
pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> {

View file

@ -473,7 +473,7 @@ mod tests {
frange.range,
"selection is not an expression(yet contained in one)"
);
let name = salsa::attach(sema.db, || NameGenerator::default().for_variable(&expr, &sema));
let name = hir::attach_db(sema.db, || NameGenerator::default().for_variable(&expr, &sema));
assert_eq!(&name, expected);
}

View file

@ -6,7 +6,7 @@ use hir::setup_tracing;
use ide_db::{
LineIndexDatabase, RootDatabase,
assists::{AssistResolveStrategy, ExprFillDefaultMode},
base_db::{SourceDatabase, salsa},
base_db::SourceDatabase,
};
use itertools::Itertools;
use stdx::trim_indent;
@ -74,7 +74,7 @@ fn check_nth_fix_with_config(
let after = trim_indent(ra_fixture_after);
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let diagnostic = salsa::attach(&db, || {
let diagnostic = hir::attach_db(&db, || {
super::full_diagnostics(
&db,
&config,
@ -129,7 +129,7 @@ pub(crate) fn check_has_fix(
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
let mut conf = DiagnosticsConfig::test_sample();
conf.expr_fill_default = ExprFillDefaultMode::Default;
let fix = salsa::attach(&db, || {
let fix = hir::attach_db(&db, || {
super::full_diagnostics(
&db,
&conf,
@ -170,7 +170,7 @@ pub(crate) fn check_has_fix(
/// Checks that there's a diagnostic *without* fix at `$0`.
pub(crate) fn check_no_fix(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let (db, file_position) = RootDatabase::with_position(ra_fixture);
let diagnostic = salsa::attach(&db, || {
let diagnostic = hir::attach_db(&db, || {
super::full_diagnostics(
&db,
&DiagnosticsConfig::test_sample(),
@ -212,7 +212,7 @@ pub(crate) fn check_diagnostics_with_config(
.iter()
.copied()
.flat_map(|file_id| {
salsa::attach(&db, || {
hir::attach_db(&db, || {
super::full_diagnostics(
&db,
&config,
@ -288,12 +288,12 @@ fn test_disabled_diagnostics() {
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
let file_id = file_id.file_id(&db);
let diagnostics = salsa::attach(&db, || {
let diagnostics = hir::attach_db(&db, || {
super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id)
});
assert!(diagnostics.is_empty());
let diagnostics = salsa::attach(&db, || {
let diagnostics = hir::attach_db(&db, || {
super::full_diagnostics(
&db,
&DiagnosticsConfig::test_sample(),

View file

@ -791,8 +791,6 @@ impl PatternIterator {
#[cfg(test)]
mod tests {
use ide_db::base_db::salsa;
use crate::{MatchFinder, SsrRule};
#[test]
@ -801,7 +799,7 @@ mod tests {
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
let (db, position, selections) = crate::tests::single_file(input);
salsa::attach(&db, || {
hir::attach_db(&db, || {
let position = ide_db::FilePosition {
file_id: position.file_id.file_id(&db),
offset: position.offset,

View file

@ -1,7 +1,7 @@
//! This module is responsible for resolving paths within rules.
use hir::AsAssocItem;
use ide_db::{FxHashMap, base_db::salsa};
use ide_db::FxHashMap;
use parsing::Placeholder;
use syntax::{
SmolStr, SyntaxKind, SyntaxNode, SyntaxToken,
@ -48,7 +48,7 @@ impl<'db> ResolvedRule<'db> {
resolution_scope: &ResolutionScope<'db>,
index: usize,
) -> Result<ResolvedRule<'db>, SsrError> {
salsa::attach(resolution_scope.scope.db, || {
hir::attach_db(resolution_scope.scope.db, || {
let resolver = Resolver {
resolution_scope,
placeholders_by_stand_in: rule.placeholders_by_stand_in,

View file

@ -2,10 +2,7 @@ use expect_test::{Expect, expect};
use hir::{FilePosition, FileRange};
use ide_db::{
EditionedFileId, FxHashSet,
base_db::{
SourceDatabase,
salsa::{self, Durability},
},
base_db::{SourceDatabase, salsa::Durability},
};
use test_utils::RangeOrOffset;
use triomphe::Arc;
@ -101,7 +98,7 @@ fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
let (db, position, selections) = single_file(input);
salsa::attach(&db, || {
hir::attach_db(&db, || {
let position = ide_db::FilePosition {
file_id: position.file_id.file_id(&db),
offset: position.offset,
@ -149,7 +146,7 @@ fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: EditionedFile
fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
let (db, position, selections) = single_file(code);
salsa::attach(&db, || {
hir::attach_db(&db, || {
let mut match_finder = MatchFinder::in_context(
&db,
ide_db::FilePosition {
@ -177,7 +174,7 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
fn assert_no_match(pattern: &str, code: &str) {
let (db, position, selections) = single_file(code);
salsa::attach(&db, || {
hir::attach_db(&db, || {
let mut match_finder = MatchFinder::in_context(
&db,
ide_db::FilePosition {

View file

@ -4,7 +4,6 @@ use expect_test::{Expect, expect};
use hir::Semantics;
use ide_db::{
FilePosition, FileRange, RootDatabase,
base_db::salsa,
defs::Definition,
documentation::{DocsRangeMap, Documentation, HasDocs},
};
@ -48,7 +47,7 @@ fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect
let sema = &Semantics::new(&analysis.db);
let (cursor_def, docs, range) = def_under_cursor(sema, &position);
let res =
salsa::attach(sema.db, || rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range)));
hir::attach_db(sema.db, || rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range)));
expect.assert_eq(&res)
}
@ -65,7 +64,7 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
.flat_map(|(text_range, link, ns)| {
let attr = range.map(text_range);
let is_inner_attr = attr.map(|(_file, attr)| attr.is_inner_attr()).unwrap_or(false);
let def = salsa::attach(sema.db, || {
let def = hir::attach_db(sema.db, || {
resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr)
.unwrap_or_else(|| panic!("Failed to resolve {link}"))
});

View file

@ -8,9 +8,7 @@ use hir::{
ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError,
HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym,
};
use ide_db::{
FileRange, RootDatabase, base_db::salsa, famous_defs::FamousDefs, text_edit::TextEditBuilder,
};
use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder};
use ide_db::{FxHashSet, text_edit::TextEdit};
use itertools::Itertools;
use smallvec::{SmallVec, smallvec};
@ -107,7 +105,7 @@ pub(crate) fn inlay_hints(
}
};
let mut preorder = file.preorder();
salsa::attach(sema.db, || {
hir::attach_db(sema.db, || {
while let Some(event) = preorder.next() {
if matches!((&event, range_limit), (WalkEvent::Enter(node), Some(range)) if range.intersect(node.text_range()).is_none())
{
@ -739,7 +737,7 @@ fn label_of_ty(
config: &InlayHintsConfig,
display_target: DisplayTarget,
) -> Result<(), HirDisplayError> {
salsa::attach(sema.db, || {
hir::attach_db(sema.db, || {
let iter_item_type = hint_iterator(sema, famous_defs, ty);
match iter_item_type {
Some((iter_trait, item, ty)) => {

View file

@ -10,7 +10,7 @@ use hir::{
Adjust, Adjustment, AutoBorrow, DisplayTarget, HirDisplay, Mutability, OverloadedDeref,
PointerCast, Safety,
};
use ide_db::{base_db::salsa, famous_defs::FamousDefs};
use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::TextEditBuilder;
use syntax::ast::{self, AstNode, prec::ExprPrecedence};
@ -216,7 +216,7 @@ pub(super) fn hints(
text: if postfix { format!(".{}", text.trim_end()) } else { text.to_owned() },
linked_location: None,
tooltip: Some(config.lazy_tooltip(|| {
salsa::attach(sema.db, || {
hir::attach_db(sema.db, || {
InlayTooltip::Markdown(format!(
"`{}` → `{}`\n\n**{}**\n\n{}",
source.display(sema.db, display_target),

View file

@ -67,7 +67,7 @@ use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
salsa::{self, Cancelled},
salsa::Cancelled,
},
prime_caches, symbol_index,
};
@ -480,7 +480,7 @@ impl Analysis {
// if we were to attach it here.
Cancelled::catch(|| {
let symbols = symbol_index::world_symbols(&self.db, query);
salsa::attach(&self.db, || {
hir::attach_db(&self.db, || {
symbols
.into_iter()
.filter_map(|s| s.try_to_nav(&Semantics::new(&self.db)))
@ -899,7 +899,7 @@ impl Analysis {
where
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
{
salsa::attach(&self.db, || {
hir::attach_db(&self.db, || {
// the trait solver code may invoke `as_view<HirDatabase>` outside of queries,
// so technically we might run into a panic in salsa if the downcaster has not yet been registered.
HirDatabase::zalsa_register_downcaster(&self.db);

View file

@ -10,7 +10,7 @@ use hir::{
};
use ide_db::{
FileId, FileRange, RootDatabase, SymbolKind,
base_db::{CrateOrigin, LangCrateOrigin, RootQueryDb, salsa},
base_db::{CrateOrigin, LangCrateOrigin, RootQueryDb},
defs::{Definition, find_std_module},
documentation::{Documentation, HasDocs},
famous_defs::FamousDefs,
@ -399,7 +399,7 @@ where
)
.map(|mut res| {
res.docs = self.docs(db);
res.description = salsa::attach(db, || {
res.description = hir::attach_db(db, || {
Some(self.display(db, self.krate(db).to_display_target(db)).to_string())
});
res.container_name = self.container_name(db);
@ -520,7 +520,7 @@ impl TryToNav for hir::Field {
NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map(
|mut res| {
res.docs = self.docs(db);
res.description = salsa::attach(db, || {
res.description = hir::attach_db(db, || {
Some(self.display(db, krate.to_display_target(db)).to_string())
});
res

View file

@ -10,7 +10,7 @@ use hir::{
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
use ide_db::{
FilePosition, FxHashMap, FxIndexMap, FxIndexSet, RootDatabase, SymbolKind,
base_db::{RootQueryDb, salsa},
base_db::RootQueryDb,
defs::Definition,
documentation::docs_from_attrs,
helpers::visit_file_defs,
@ -413,7 +413,7 @@ pub(crate) fn runnable_impl(
let ty = def.self_ty(sema.db);
let adt_name = ty.as_adt()?.name(sema.db);
let mut ty_args = ty.generic_parameters(sema.db, display_target).peekable();
let params = salsa::attach(sema.db, || {
let params = hir::attach_db(sema.db, || {
if ty_args.peek().is_some() {
format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty)))
} else {
@ -522,7 +522,7 @@ fn module_def_doctest(sema: &Semantics<'_, RootDatabase>, def: Definition) -> Op
let mut ty_args = ty.generic_parameters(db, display_target).peekable();
format_to!(path, "{}", name.display(db, edition));
if ty_args.peek().is_some() {
salsa::attach(db, || {
hir::attach_db(db, || {
format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty)));
});
}

View file

@ -11,7 +11,6 @@ use hir::{
use ide_db::{
FilePosition, FxIndexMap,
active_parameter::{callable_for_arg_list, generic_def_for_node},
base_db::salsa,
documentation::{Documentation, HasDocs},
};
use itertools::Itertools;
@ -267,7 +266,7 @@ fn signature_help_for_call(
// In that case, fall back to render definitions of the respective parameters.
// This is overly conservative: we do not substitute known type vars
// (see FIXME in tests::impl_trait) and falling back on any unknowns.
salsa::attach(db, || match (p.ty().contains_unknown(), fn_params.as_deref()) {
hir::attach_db(db, || match (p.ty().contains_unknown(), fn_params.as_deref()) {
(true, Some(fn_params)) => {
format_to!(buf, "{}", fn_params[idx].ty().display(db, display_target))
}
@ -730,7 +729,7 @@ fn signature_help_for_tuple_pat_ish<'db>(
mod tests {
use expect_test::{Expect, expect};
use ide_db::{FilePosition, base_db::salsa};
use ide_db::FilePosition;
use stdx::format_to;
use test_fixture::ChangeFixture;
@ -759,7 +758,7 @@ mod tests {
"#
);
let (db, position) = position(&fixture);
let sig_help = salsa::attach(&db, || crate::signature_help::signature_help(&db, position));
let sig_help = hir::attach_db(&db, || crate::signature_help::signature_help(&db, position));
let actual = match sig_help {
Some(sig_help) => {
let mut rendered = String::new();

View file

@ -5,7 +5,7 @@ use arrayvec::ArrayVec;
use hir::{Crate, Module, Semantics, db::HirDatabase};
use ide_db::{
FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
base_db::{RootQueryDb, SourceDatabase, VfsPath, salsa},
base_db::{RootQueryDb, SourceDatabase, VfsPath},
defs::{Definition, IdentClass},
documentation::Documentation,
famous_defs::FamousDefs,
@ -276,7 +276,7 @@ impl StaticIndex<'_> {
for token in tokens {
let range = token.text_range();
let node = token.parent().unwrap();
match salsa::attach(self.db, || get_definitions(&sema, token.clone())) {
match hir::attach_db(self.db, || get_definitions(&sema, token.clone())) {
Some(it) => {
for i in it {
add_token(i, range, &node);
@ -293,7 +293,7 @@ impl StaticIndex<'_> {
vendored_libs_config: VendoredLibrariesConfig<'_>,
) -> StaticIndex<'a> {
let db = &analysis.db;
salsa::attach(db, || {
hir::attach_db(db, || {
let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source_file_id(db).original_file(db);
let source_root =

View file

@ -16,7 +16,7 @@ use std::ops::ControlFlow;
use either::Either;
use hir::{DefWithBody, EditionedFileId, InFile, InRealFile, MacroKind, Name, Semantics};
use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind, base_db::salsa};
use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind};
use syntax::{
AstNode, AstToken, NodeOrToken,
SyntaxKind::*,
@ -428,7 +428,7 @@ fn traverse(
Some(current_body) => {
let (ops, bindings) = per_body_cache.entry(current_body).or_insert_with(|| {
(
salsa::attach(sema.db, || sema.get_unsafe_ops(current_body)),
hir::attach_db(sema.db, || sema.get_unsafe_ops(current_body)),
Default::default(),
)
});
@ -440,7 +440,7 @@ fn traverse(
|node| unsafe_ops.contains(&InFile::new(descended_element.file_id, node));
let element = match descended_element.value {
NodeOrToken::Node(name_like) => {
let hl = salsa::attach(sema.db, || {
let hl = hir::attach_db(sema.db, || {
highlight::name_like(
sema,
krate,
@ -458,7 +458,7 @@ fn traverse(
}
hl
}
NodeOrToken::Token(token) => salsa::attach(sema.db, || {
NodeOrToken::Token(token) => hir::attach_db(sema.db, || {
highlight::token(sema, token, edition, &is_unsafe_node, tt_level > 0)
.zip(Some(None))
}),

View file

@ -5,7 +5,7 @@ use std::mem;
use either::Either;
use hir::{EditionedFileId, HirFileId, InFile, Semantics, sym};
use ide_db::{
SymbolKind, active_parameter::ActiveParameter, base_db::salsa, defs::Definition,
SymbolKind, active_parameter::ActiveParameter, defs::Definition,
documentation::docs_with_rangemap, rust_doc::is_rust_fence,
};
use syntax::{
@ -27,7 +27,7 @@ pub(super) fn ra_fixture(
expanded: &ast::String,
) -> Option<()> {
let active_parameter =
salsa::attach(sema.db, || ActiveParameter::at_token(sema, expanded.syntax().clone()))?;
hir::attach_db(sema.db, || ActiveParameter::at_token(sema, expanded.syntax().clone()))?;
let has_rust_fixture_attr = active_parameter.attrs().is_some_and(|attrs| {
attrs.filter_map(|attr| attr.as_simple_path()).any(|path| {
path.segments()
@ -128,7 +128,7 @@ pub(super) fn doc_comment(
// Extract intra-doc links and emit highlights for them.
if let Some((docs, doc_mapping)) = docs_with_rangemap(sema.db, &attributes) {
salsa::attach(sema.db, || {
hir::attach_db(sema.db, || {
extract_definitions_from_docs(&docs)
.into_iter()
.filter_map(|(range, link, ns)| {

View file

@ -213,14 +213,13 @@ mod tests {
use crate::fixture;
use expect_test::expect;
use ide_db::base_db::salsa;
fn make_memory_layout(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) -> Option<RecursiveMemoryLayout> {
let (analysis, position, _) = fixture::annotations(ra_fixture);
salsa::attach(&analysis.db, || view_memory_layout(&analysis.db, position))
hir::attach_db(&analysis.db, || view_memory_layout(&analysis.db, position))
}
#[test]

View file

@ -26,10 +26,7 @@ use ide::{
};
use ide_db::{
EditionedFileId, LineIndexDatabase, SnippetCap,
base_db::{
SourceDatabase,
salsa::{self, Database},
},
base_db::{SourceDatabase, salsa::Database},
};
use itertools::Itertools;
use load_cargo::{LoadCargoConfig, ProcMacroServerChoice, load_workspace};
@ -315,7 +312,7 @@ impl flags::AnalysisStats {
shuffle(&mut rng, &mut bodies);
}
salsa::attach(db, || {
hir::attach_db(db, || {
if !self.skip_lowering {
self.run_body_lowering(db, &vfs, &bodies, verbosity);
}

View file

@ -67,6 +67,16 @@ const _: () = {
self.parent.hash(state);
}
}
impl zalsa_::HasJar for SyntaxContext {
type Jar = zalsa_struct_::JarImpl<SyntaxContext>;
const KIND: zalsa_::JarKind = zalsa_::JarKind::Struct;
}
zalsa_::register_jar! {
zalsa_::ErasedJar::erase::<SyntaxContext>()
}
/// Key to use during hash lookups. Each field is some type that implements `Lookup<T>`
/// for the owned type. This permits interning with an `&str` when a `String` is required and so forth.
#[derive(Hash)]