internal: port rust-analyzer to new Salsa

This commit is contained in:
David Barsky 2024-11-05 12:24:41 -05:00
parent 394374e769
commit 74620e64ec
161 changed files with 3075 additions and 2331 deletions

875
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -72,7 +72,7 @@ proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
profile = { path = "./crates/profile", version = "0.0.0" }
project-model = { path = "./crates/project-model", version = "0.0.0" }
ra-salsa = { path = "./crates/ra-salsa", package = "salsa", version = "0.0.0" }
query-group = { package = "query-group-macro", path = "./crates/query-group-macro", version = "0.0.0" }
span = { path = "./crates/span", version = "0.0.0" }
stdx = { path = "./crates/stdx", version = "0.0.0" }
syntax = { path = "./crates/syntax", version = "0.0.0" }
@ -135,6 +135,7 @@ process-wrap = { version = "8.0.2", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.0", default-features = false }
rayon = "1.8.0"
salsa = "0.19"
rustc-hash = "2.0.0"
semver = "1.0.14"
serde = { version = "1.0.192" }

View file

@ -15,7 +15,9 @@ rust-version.workspace = true
lz4_flex = { version = "0.11", default-features = false }
la-arena.workspace = true
ra-salsa.workspace = true
dashmap.workspace = true
salsa.workspace = true
query-group.workspace = true
rustc-hash.workspace = true
triomphe.workspace = true
semver.workspace = true

View file

@ -3,15 +3,12 @@
use std::fmt;
use ra_salsa::Durability;
use rustc_hash::FxHashMap;
use salsa::Durability;
use triomphe::Arc;
use vfs::FileId;
use crate::{
CrateGraph, CrateId, CrateWorkspaceData, SourceDatabaseFileInputExt, SourceRoot,
SourceRootDatabase, SourceRootId,
};
use crate::{CrateGraph, CrateId, CrateWorkspaceData, RootQueryDb, SourceRoot, SourceRootId};
/// Encapsulate a bunch of raw `.set` calls on the database.
#[derive(Default)]
@ -59,7 +56,7 @@ impl FileChange {
self.ws_data = Some(data);
}
pub fn apply(self, db: &mut dyn SourceRootDatabase) {
pub fn apply(self, db: &mut dyn RootQueryDb) {
let _p = tracing::info_span!("FileChange::apply").entered();
if let Some(roots) = self.roots {
for (idx, root) in roots.into_iter().enumerate() {
@ -68,14 +65,16 @@ impl FileChange {
for file_id in root.iter() {
db.set_file_source_root_with_durability(file_id, root_id, durability);
}
db.set_source_root_with_durability(root_id, Arc::new(root), durability);
}
}
for (file_id, text) in self.files_changed {
let source_root_id = db.file_source_root(file_id);
let source_root = db.source_root(source_root_id);
let durability = durability(&source_root);
let source_root = db.source_root(source_root_id.source_root_id(db));
let durability = durability(&source_root.source_root(db));
// XXX: can't actually remove the file, just reset the text
let text = text.unwrap_or_default();
db.set_file_text_with_durability(file_id, &text, durability)

View file

@ -3,14 +3,7 @@
mod change;
mod input;
use std::panic;
use ra_salsa::Durability;
use rustc_hash::FxHashMap;
use span::EditionedFileId;
use syntax::{ast, Parse, SourceFile, SyntaxError};
use triomphe::Arc;
use vfs::FileId;
use std::hash::BuildHasherDefault;
pub use crate::{
change::FileChange,
@ -20,20 +13,30 @@ pub use crate::{
TargetLayoutLoadResult,
},
};
pub use ra_salsa::{self, Cancelled};
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath};
use dashmap::{mapref::entry::Entry, DashMap};
pub use query_group::{self};
use rustc_hash::{FxHashMap, FxHasher};
pub use salsa::{self};
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
use syntax::{ast, Parse, SyntaxError};
use triomphe::Arc;
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
#[macro_export]
macro_rules! impl_intern_key {
($name:ident) => {
impl $crate::ra_salsa::InternKey for $name {
fn from_intern_id(v: $crate::ra_salsa::InternId) -> Self {
$name(v)
($id:ident, $loc:ident) => {
#[salsa::interned(no_debug, no_lifetime)]
pub struct $id {
pub loc: $loc,
}
fn as_intern_id(&self) -> $crate::ra_salsa::InternId {
self.0
// If we derive this salsa prints the values recursively, and this causes us to blow.
impl ::std::fmt::Debug for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
f.debug_tuple(stringify!($id))
.field(&format_args!("{:04x}", self.0.as_u32()))
.finish()
}
}
};
@ -47,39 +50,213 @@ pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16;
pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
pub trait FileLoader {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
/// Crates whose root's source root is the same as the source root of `file_id`
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
#[derive(Debug, Default)]
pub struct Files {
files: Arc<DashMap<vfs::FileId, FileText, BuildHasherDefault<FxHasher>>>,
source_roots: Arc<DashMap<SourceRootId, SourceRootInput, BuildHasherDefault<FxHasher>>>,
file_source_roots: Arc<DashMap<vfs::FileId, FileSourceRootInput, BuildHasherDefault<FxHasher>>>,
}
impl Files {
pub fn file_text(&self, file_id: vfs::FileId) -> FileText {
*self.files.get(&file_id).expect("Unable to fetch file; this is a bug")
}
pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
let files = Arc::clone(&self.files);
match files.entry(file_id) {
Entry::Occupied(mut occupied) => {
occupied.get_mut().set_text(db).to(Arc::from(text));
}
Entry::Vacant(vacant) => {
let text = FileText::new(db, Arc::from(text), file_id);
vacant.insert(text);
}
};
}
pub fn set_file_text_with_durability(
&self,
db: &mut dyn SourceDatabase,
file_id: vfs::FileId,
text: &str,
durability: Durability,
) {
let files = Arc::clone(&self.files);
match files.entry(file_id) {
Entry::Occupied(mut occupied) => {
occupied.get_mut().set_text(db).to(Arc::from(text));
}
Entry::Vacant(vacant) => {
let text =
FileText::builder(Arc::from(text), file_id).durability(durability).new(db);
vacant.insert(text);
}
};
}
/// Source root of the file.
pub fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
let source_root = self
.source_roots
.get(&source_root_id)
.expect("Unable to fetch source root id; this is a bug");
*source_root
}
pub fn set_source_root_with_durability(
&self,
db: &mut dyn SourceDatabase,
source_root_id: SourceRootId,
source_root: Arc<SourceRoot>,
durability: Durability,
) {
let source_roots = Arc::clone(&self.source_roots);
match source_roots.entry(source_root_id) {
Entry::Occupied(mut occupied) => {
occupied.get_mut().set_source_root(db).to(source_root);
}
Entry::Vacant(vacant) => {
let source_root =
SourceRootInput::builder(source_root).durability(durability).new(db);
vacant.insert(source_root);
}
};
}
pub fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
let file_source_root = self
.file_source_roots
.get(&id)
.expect("Unable to fetch FileSourceRootInput; this is a bug");
*file_source_root
}
pub fn set_file_source_root_with_durability(
&self,
db: &mut dyn SourceDatabase,
id: vfs::FileId,
source_root_id: SourceRootId,
durability: Durability,
) {
let file_source_roots = Arc::clone(&self.file_source_roots);
// let db = self;
match file_source_roots.entry(id) {
Entry::Occupied(mut occupied) => {
occupied.get_mut().set_source_root_id(db).to(source_root_id);
}
Entry::Vacant(vacant) => {
let file_source_root =
FileSourceRootInput::builder(source_root_id).durability(durability).new(db);
vacant.insert(file_source_root);
}
};
}
}
#[salsa::interned(no_lifetime)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
}
impl EditionedFileId {
pub fn file_id(&self, db: &dyn salsa::Database) -> vfs::FileId {
let id = self.editioned_file_id(db);
id.file_id()
}
fn unpack(&self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
let id = self.editioned_file_id(db);
(id.file_id(), id.edition())
}
}
#[salsa::input]
pub struct FileText {
pub text: Arc<str>,
pub file_id: vfs::FileId,
}
#[salsa::input]
pub struct FileSourceRootInput {
pub source_root_id: SourceRootId,
}
#[salsa::input]
pub struct SourceRootInput {
pub source_root: Arc<SourceRoot>,
}
/// Database which stores all significant input facts: source code and project
/// model. Everything else in rust-analyzer is derived from these queries.
#[ra_salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
#[ra_salsa::input]
fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
/// Text of the file.
#[ra_salsa::lru]
fn file_text(&self, file_id: FileId) -> Arc<str>;
#[query_group::query_group]
pub trait RootQueryDb: SourceDatabase + salsa::Database {
/// Parses the file into the syntax tree.
#[ra_salsa::lru]
#[salsa::invoke_actual(parse)]
#[salsa::lru(128)]
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
/// Returns the set of errors obtained from parsing the file including validation errors.
fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>;
/// The crate graph.
#[ra_salsa::input]
#[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>;
#[ra_salsa::input]
#[salsa::input]
fn crate_workspace_data(&self) -> Arc<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>;
#[ra_salsa::transparent]
#[salsa::transparent]
fn toolchain_channel(&self, krate: CrateId) -> Option<ReleaseChannel>;
/// Crates whose root file is in `id`.
fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
#[salsa::transparent]
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
}
#[salsa::db]
pub trait SourceDatabase: salsa::Database {
/// Text of the file.
fn file_text(&self, file_id: vfs::FileId) -> FileText;
fn set_file_text(&mut self, file_id: vfs::FileId, text: &str);
fn set_file_text_with_durability(
&mut self,
file_id: vfs::FileId,
text: &str,
durability: Durability,
);
/// Contents of the source root.
fn source_root(&self, id: SourceRootId) -> SourceRootInput;
fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput;
fn set_file_source_root_with_durability(
&mut self,
id: vfs::FileId,
source_root_id: SourceRootId,
durability: Durability,
);
/// Source root of the file.
fn set_source_root_with_durability(
&mut self,
source_root_id: SourceRootId,
source_root: Arc<SourceRoot>,
durability: Durability,
);
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
// FIXME: this *somehow* should be platform agnostic...
let source_root = self.file_source_root(path.anchor);
let source_root = self.source_root(source_root.source_root_id(self));
source_root.source_root(self).resolve_path(path)
}
}
/// Crate related data shared by the whole workspace.
@ -91,7 +268,7 @@ pub struct CrateWorkspaceData {
pub toolchain: Option<Version>,
}
fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseChannel> {
fn toolchain_channel(db: &dyn RootQueryDb, krate: CrateId) -> Option<ReleaseChannel> {
db.crate_workspace_data()
.get(&krate)?
.toolchain
@ -99,14 +276,14 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
.and_then(|v| ReleaseChannel::from_str(&v.pre))
}
fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
let _p = tracing::info_span!("parse", ?file_id).entered();
let (file_id, edition) = file_id.unpack();
let text = db.file_text(file_id);
SourceFile::parse(&text, edition)
let (file_id, edition) = file_id.unpack(db.as_dyn_database());
let text = db.file_text(file_id).text(db);
ast::SourceFile::parse(&text, edition)
}
fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
let errors = db.parse(file_id).errors();
match &*errors {
[] => None,
@ -114,67 +291,13 @@ fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc
}
}
fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc<str> {
let bytes = db.compressed_file_text(file_id);
let bytes =
lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
Arc::from(text)
}
/// We don't want to give HIR knowledge of source roots, hence we extract these
/// methods into a separate DB.
#[ra_salsa::query_group(SourceRootDatabaseStorage)]
pub trait SourceRootDatabase: SourceDatabase {
/// Path to a file, relative to the root of its source root.
/// Source root of the file.
#[ra_salsa::input]
fn file_source_root(&self, file_id: FileId) -> SourceRootId;
/// Contents of the source root.
#[ra_salsa::input]
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
/// Crates whose root file is in `id`.
fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
}
pub trait SourceDatabaseFileInputExt {
fn set_file_text(&mut self, file_id: FileId, text: &str) {
self.set_file_text_with_durability(file_id, text, Durability::LOW);
}
fn set_file_text_with_durability(
&mut self,
file_id: FileId,
text: &str,
durability: Durability,
);
}
impl<Db: ?Sized + SourceRootDatabase> SourceDatabaseFileInputExt for Db {
fn set_file_text_with_durability(
&mut self,
file_id: FileId,
text: &str,
durability: Durability,
) {
let bytes = text.as_bytes();
let compressed = lz4_flex::compress_prepend_size(bytes);
self.set_compressed_file_text_with_durability(
file_id,
Arc::from(compressed.as_slice()),
durability,
)
}
}
fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> {
fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[CrateId]> {
let graph = db.crate_graph();
let mut crates = graph
.iter()
.filter(|&krate| {
let root_file = graph[krate].root_file_id;
db.file_source_root(root_file) == id
db.file_source_root(root_file).source_root_id(db) == id
})
.collect::<Vec<_>>();
crates.sort();
@ -182,22 +305,9 @@ fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[Cra
crates.into_iter().collect()
}
// FIXME: Would be nice to get rid of this somehow
/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split
/// regarding FileLoader
pub struct FileLoaderDelegate<T>(pub T);
impl<T: SourceRootDatabase> FileLoader for FileLoaderDelegate<&'_ T> {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
// FIXME: this *somehow* should be platform agnostic...
let source_root = self.0.file_source_root(path.anchor);
let source_root = self.0.source_root(source_root);
source_root.resolve_path(path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
fn relevant_crates(db: &dyn RootQueryDb, file_id: FileId) -> Arc<[CrateId]> {
let _p = tracing::info_span!("relevant_crates").entered();
let source_root = self.0.file_source_root(file_id);
self.0.source_root_crates(source_root)
}
let source_root = db.file_source_root(file_id);
db.source_root_crates(source_root.source_root_id(db))
}

View file

@ -19,6 +19,16 @@ impl Edition {
/// The current latest stable edition, note this is usually not the right choice in code.
pub const CURRENT_FIXME: Edition = Edition::Edition2021;
pub fn from_u32(u32: u32) -> Edition {
match u32 {
0 => Edition::Edition2015,
1 => Edition::Edition2018,
2 => Edition::Edition2021,
3 => Edition::Edition2024,
_ => panic!("invalid edition"),
}
}
pub fn at_least_2024(self) -> bool {
self >= Edition::Edition2024
}

View file

@ -29,6 +29,8 @@ hashbrown.workspace = true
triomphe.workspace = true
rustc_apfloat = "0.2.0"
text-size.workspace = true
salsa.workspace = true
query-group.workspace = true
ra-ap-rustc_hashes.workspace = true
ra-ap-rustc_parse_format.workspace = true
@ -45,7 +47,6 @@ cfg.workspace = true
tt.workspace = true
span.workspace = true
[dev-dependencies]
expect-test.workspace = true
@ -53,6 +54,7 @@ expect-test.workspace = true
test-utils.workspace = true
test-fixture.workspace = true
syntax-bridge.workspace = true
[features]
in-rust-tree = ["hir-expand/in-rust-tree"]

View file

@ -601,17 +601,14 @@ impl<'attr> AttrQuery<'attr> {
fn any_has_attrs<'db>(
db: &(dyn DefDatabase + 'db),
id: impl Lookup<
Database<'db> = dyn DefDatabase + 'db,
Data = impl HasSource<Value = impl ast::HasAttrs>,
>,
id: impl Lookup<Database = dyn DefDatabase, Data = impl HasSource<Value = impl ast::HasAttrs>>,
) -> InFile<ast::AnyHasAttrs> {
id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
}
fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>(
db: &(dyn DefDatabase + 'db),
lookup: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = impl ItemTreeLoc<Id = N>>,
lookup: impl Lookup<Database = dyn DefDatabase, Data = impl ItemTreeLoc<Id = N>>,
) -> RawAttrs {
let id = lookup.lookup(db).item_tree_id();
let tree = id.item_tree(db);

View file

@ -228,6 +228,7 @@ impl StructData {
None,
);
let types_map = strukt.types_map.clone();
(
Arc::new(StructData {
name: strukt.name.clone(),

View file

@ -1,5 +1,5 @@
//! Defines database & queries for name resolution.
use base_db::{ra_salsa, CrateId, SourceDatabase, Upcast};
use base_db::{CrateId, RootQueryDb, SourceDatabase, Upcast};
use either::Either;
use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
use intern::sym;
@ -33,178 +33,204 @@ use crate::{
UseId, UseLoc, VariantId,
};
#[ra_salsa::query_group(InternDatabaseStorage)]
pub trait InternDatabase: SourceDatabase {
// region: items
#[ra_salsa::interned]
fn intern_use(&self, loc: UseLoc) -> UseId;
#[ra_salsa::interned]
fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId;
#[ra_salsa::interned]
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
#[ra_salsa::interned]
fn intern_struct(&self, loc: StructLoc) -> StructId;
#[ra_salsa::interned]
fn intern_union(&self, loc: UnionLoc) -> UnionId;
#[ra_salsa::interned]
fn intern_enum(&self, loc: EnumLoc) -> EnumId;
#[ra_salsa::interned]
fn intern_enum_variant(&self, loc: EnumVariantLoc) -> EnumVariantId;
#[ra_salsa::interned]
fn intern_const(&self, loc: ConstLoc) -> ConstId;
#[ra_salsa::interned]
fn intern_static(&self, loc: StaticLoc) -> StaticId;
#[ra_salsa::interned]
fn intern_trait(&self, loc: TraitLoc) -> TraitId;
#[ra_salsa::interned]
fn intern_trait_alias(&self, loc: TraitAliasLoc) -> TraitAliasId;
#[ra_salsa::interned]
fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
#[ra_salsa::interned]
fn intern_impl(&self, loc: ImplLoc) -> ImplId;
#[ra_salsa::interned]
fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId;
#[ra_salsa::interned]
fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id;
#[ra_salsa::interned]
fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
#[ra_salsa::interned]
fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
// endregion: items
use salsa::plumbing::AsId;
#[ra_salsa::interned]
#[query_group::query_group(InternDatabaseStorage)]
pub trait InternDatabase: RootQueryDb {
// region: items
#[salsa::interned]
fn intern_use(&self, loc: UseLoc) -> UseId;
#[salsa::interned]
fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId;
#[salsa::interned]
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
#[salsa::interned]
fn intern_struct(&self, loc: StructLoc) -> StructId;
#[salsa::interned]
fn intern_union(&self, loc: UnionLoc) -> UnionId;
#[salsa::interned]
fn intern_enum(&self, loc: EnumLoc) -> EnumId;
#[salsa::interned]
fn intern_enum_variant(&self, loc: EnumVariantLoc) -> EnumVariantId;
#[salsa::interned]
fn intern_const(&self, loc: ConstLoc) -> ConstId;
#[salsa::interned]
fn intern_static(&self, loc: StaticLoc) -> StaticId;
#[salsa::interned]
fn intern_trait(&self, loc: TraitLoc) -> TraitId;
#[salsa::interned]
fn intern_trait_alias(&self, loc: TraitAliasLoc) -> TraitAliasId;
#[salsa::interned]
fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
#[salsa::interned]
fn intern_impl(&self, loc: ImplLoc) -> ImplId;
#[salsa::interned]
fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId;
#[salsa::interned]
fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id;
#[salsa::interned]
fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
#[salsa::interned]
fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
// // endregion: items
#[salsa::interned]
fn intern_block(&self, loc: BlockLoc) -> BlockId;
#[ra_salsa::interned]
#[salsa::interned]
fn intern_anonymous_const(&self, id: ConstBlockLoc) -> ConstBlockId;
#[ra_salsa::interned]
#[salsa::interned]
fn intern_in_type_const(&self, id: InTypeConstLoc) -> InTypeConstId;
}
#[ra_salsa::query_group(DefDatabaseStorage)]
pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDatabase> {
#[query_group::query_group]
pub trait DefDatabase:
InternDatabase
+ ExpandDatabase
+ SourceDatabase
+ Upcast<dyn ExpandDatabase>
+ Upcast<dyn RootQueryDb>
{
/// Whether to expand procedural macros during name resolution.
#[ra_salsa::input]
#[salsa::input]
fn expand_proc_attr_macros(&self) -> bool;
/// Computes an [`ItemTree`] for the given file or macro expansion.
#[ra_salsa::invoke(ItemTree::file_item_tree_query)]
#[salsa::invoke(ItemTree::file_item_tree_query)]
fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
#[ra_salsa::invoke(ItemTree::block_item_tree_query)]
#[salsa::invoke_actual(ItemTree::block_item_tree_query)]
fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
#[ra_salsa::invoke(ItemTree::file_item_tree_with_source_map_query)]
#[salsa::invoke(ItemTree::file_item_tree_with_source_map_query)]
fn file_item_tree_with_source_map(
&self,
file_id: HirFileId,
) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>);
#[ra_salsa::invoke(ItemTree::block_item_tree_with_source_map_query)]
#[salsa::invoke_actual(ItemTree::block_item_tree_with_source_map_query)]
fn block_item_tree_with_source_map(
&self,
block_id: BlockId,
) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>);
#[ra_salsa::invoke(DefMap::crate_def_map_query)]
#[salsa::invoke(DefMap::crate_def_map_query)]
fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;
/// Computes the block-level `DefMap`.
#[ra_salsa::invoke(DefMap::block_def_map_query)]
#[salsa::invoke_actual(DefMap::block_def_map_query)]
fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
/// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
#[salsa::invoke_actual(macro_def)]
fn macro_def(&self, m: MacroId) -> MacroDefId;
// region:data
#[ra_salsa::transparent]
#[ra_salsa::invoke(StructData::struct_data_query)]
#[salsa::transparent]
#[salsa::invoke_actual(StructData::struct_data_query)]
fn struct_data(&self, id: StructId) -> Arc<StructData>;
#[ra_salsa::invoke(StructData::struct_data_with_diagnostics_query)]
#[salsa::invoke_actual(StructData::struct_data_with_diagnostics_query)]
fn struct_data_with_diagnostics(&self, id: StructId) -> (Arc<StructData>, DefDiagnostics);
#[ra_salsa::transparent]
#[ra_salsa::invoke(StructData::union_data_query)]
#[salsa::transparent]
#[salsa::invoke_actual(StructData::union_data_query)]
fn union_data(&self, id: UnionId) -> Arc<StructData>;
#[ra_salsa::invoke(StructData::union_data_with_diagnostics_query)]
#[salsa::invoke_actual(StructData::union_data_with_diagnostics_query)]
fn union_data_with_diagnostics(&self, id: UnionId) -> (Arc<StructData>, DefDiagnostics);
#[ra_salsa::invoke(EnumData::enum_data_query)]
#[salsa::invoke_actual(EnumData::enum_data_query)]
fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
#[ra_salsa::transparent]
#[ra_salsa::invoke(EnumVariantData::enum_variant_data_query)]
#[salsa::transparent]
#[salsa::invoke_actual(EnumVariantData::enum_variant_data_query)]
fn enum_variant_data(&self, id: EnumVariantId) -> Arc<EnumVariantData>;
#[ra_salsa::invoke(EnumVariantData::enum_variant_data_with_diagnostics_query)]
#[salsa::invoke_actual(EnumVariantData::enum_variant_data_with_diagnostics_query)]
fn enum_variant_data_with_diagnostics(
&self,
id: EnumVariantId,
) -> (Arc<EnumVariantData>, DefDiagnostics);
#[ra_salsa::transparent]
#[ra_salsa::invoke(VariantData::variant_data)]
#[salsa::transparent]
#[salsa::invoke_actual(VariantData::variant_data)]
fn variant_data(&self, id: VariantId) -> Arc<VariantData>;
#[ra_salsa::transparent]
#[ra_salsa::invoke(ImplData::impl_data_query)]
#[salsa::transparent]
#[salsa::invoke_actual(ImplData::impl_data_query)]
fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
#[ra_salsa::invoke(ImplData::impl_data_with_diagnostics_query)]
#[salsa::invoke_actual(ImplData::impl_data_with_diagnostics_query)]
fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc<ImplData>, DefDiagnostics);
#[ra_salsa::transparent]
#[ra_salsa::invoke(TraitData::trait_data_query)]
#[salsa::transparent]
#[salsa::invoke_actual(TraitData::trait_data_query)]
fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
#[ra_salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
#[salsa::invoke_actual(TraitData::trait_data_with_diagnostics_query)]
fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitData>, DefDiagnostics);
#[ra_salsa::invoke(TraitAliasData::trait_alias_query)]
#[salsa::invoke_actual(TraitAliasData::trait_alias_query)]
fn trait_alias_data(&self, e: TraitAliasId) -> Arc<TraitAliasData>;
#[ra_salsa::invoke(TypeAliasData::type_alias_data_query)]
#[salsa::invoke_actual(TypeAliasData::type_alias_data_query)]
fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
#[ra_salsa::invoke(FunctionData::fn_data_query)]
#[salsa::invoke_actual(FunctionData::fn_data_query)]
fn function_data(&self, func: FunctionId) -> Arc<FunctionData>;
#[ra_salsa::invoke(ConstData::const_data_query)]
#[salsa::invoke_actual(ConstData::const_data_query)]
fn const_data(&self, konst: ConstId) -> Arc<ConstData>;
#[ra_salsa::invoke(StaticData::static_data_query)]
#[salsa::invoke_actual(StaticData::static_data_query)]
fn static_data(&self, statik: StaticId) -> Arc<StaticData>;
#[ra_salsa::invoke(Macro2Data::macro2_data_query)]
#[salsa::invoke_actual(Macro2Data::macro2_data_query)]
fn macro2_data(&self, makro: Macro2Id) -> Arc<Macro2Data>;
#[ra_salsa::invoke(MacroRulesData::macro_rules_data_query)]
#[salsa::invoke_actual(MacroRulesData::macro_rules_data_query)]
fn macro_rules_data(&self, makro: MacroRulesId) -> Arc<MacroRulesData>;
#[ra_salsa::invoke(ProcMacroData::proc_macro_data_query)]
#[salsa::invoke_actual(ProcMacroData::proc_macro_data_query)]
fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
#[ra_salsa::invoke(ExternCrateDeclData::extern_crate_decl_data_query)]
#[salsa::invoke_actual(ExternCrateDeclData::extern_crate_decl_data_query)]
fn extern_crate_decl_data(&self, extern_crate: ExternCrateId) -> Arc<ExternCrateDeclData>;
// endregion:data
#[ra_salsa::invoke(Body::body_with_source_map_query)]
#[ra_salsa::lru]
#[salsa::invoke(Body::body_with_source_map_query)]
#[salsa::lru(512)]
fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
#[ra_salsa::invoke(Body::body_query)]
#[salsa::invoke(Body::body_query)]
fn body(&self, def: DefWithBodyId) -> Arc<Body>;
#[ra_salsa::invoke(ExprScopes::expr_scopes_query)]
#[salsa::invoke_actual(ExprScopes::expr_scopes_query)]
fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
#[ra_salsa::invoke(GenericParams::generic_params_query)]
#[salsa::invoke_actual(GenericParams::generic_params_query)]
fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>;
/// If this returns `None` for the source map, that means it is the same as with the item tree.
#[ra_salsa::invoke(GenericParams::generic_params_with_source_map_query)]
#[salsa::invoke_actual(GenericParams::generic_params_with_source_map_query)]
fn generic_params_with_source_map(
&self,
def: GenericDefId,
@ -212,51 +238,51 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
// region:attrs
#[ra_salsa::invoke(Attrs::fields_attrs_query)]
#[salsa::invoke_actual(Attrs::fields_attrs_query)]
fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
// should this really be a query?
#[ra_salsa::invoke(crate::attr::fields_attrs_source_map)]
#[salsa::invoke_actual(crate::attr::fields_attrs_source_map)]
fn fields_attrs_source_map(
&self,
def: VariantId,
) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
#[ra_salsa::invoke(AttrsWithOwner::attrs_query)]
#[salsa::invoke(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs;
#[ra_salsa::transparent]
#[ra_salsa::invoke(lang_item::lang_attr)]
#[salsa::transparent]
#[salsa::invoke(lang_item::lang_attr)]
fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>;
// endregion:attrs
#[ra_salsa::invoke(LangItems::lang_item_query)]
#[salsa::invoke(LangItems::lang_item_query)]
fn lang_item(&self, start_crate: CrateId, item: LangItem) -> Option<LangItemTarget>;
#[ra_salsa::invoke(ImportMap::import_map_query)]
#[salsa::invoke(ImportMap::import_map_query)]
fn import_map(&self, krate: CrateId) -> Arc<ImportMap>;
// region:visibilities
#[ra_salsa::invoke(visibility::field_visibilities_query)]
#[salsa::invoke(visibility::field_visibilities_query)]
fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
// FIXME: unify function_visibility and const_visibility?
#[ra_salsa::invoke(visibility::function_visibility_query)]
#[salsa::invoke_actual(visibility::function_visibility_query)]
fn function_visibility(&self, def: FunctionId) -> Visibility;
#[ra_salsa::invoke(visibility::const_visibility_query)]
#[salsa::invoke_actual(visibility::const_visibility_query)]
fn const_visibility(&self, def: ConstId) -> Visibility;
// endregion:visibilities
#[ra_salsa::invoke(LangItems::crate_lang_items_query)]
#[salsa::invoke(LangItems::crate_lang_items_query)]
fn crate_lang_items(&self, krate: CrateId) -> Option<Arc<LangItems>>;
#[ra_salsa::invoke(crate::lang_item::notable_traits_in_deps)]
#[salsa::invoke(crate::lang_item::notable_traits_in_deps)]
fn notable_traits_in_deps(&self, krate: CrateId) -> Arc<[Arc<[TraitId]>]>;
#[ra_salsa::invoke(crate::lang_item::crate_notable_traits)]
#[salsa::invoke(crate::lang_item::crate_notable_traits)]
fn crate_notable_traits(&self, krate: CrateId) -> Option<Arc<[TraitId]>>;
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;

View file

@ -16,7 +16,7 @@ use hir_expand::{name::Name, ExpandError, InFile};
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{Edition, MacroFileId, SyntaxContextData};
use span::{Edition, MacroFileId, SyntaxContext};
use syntax::{ast, AstPtr, SyntaxNodePtr};
use triomphe::Arc;
use tt::TextRange;
@ -49,8 +49,9 @@ impl HygieneId {
Self(ctx)
}
pub(crate) fn lookup(self, db: &dyn DefDatabase) -> SyntaxContextData {
db.lookup_intern_syntax_context(self.0)
// FIXME: Inline this
pub(crate) fn lookup(self) -> SyntaxContext {
self.0
}
pub(crate) fn is_root(self) -> bool {

View file

@ -1931,11 +1931,11 @@ impl ExprCollector<'_> {
None => (HygieneId::ROOT, None),
Some(span_map) => {
let span = span_map.span_at(lifetime.syntax().text_range().start());
let ctx = self.db.lookup_intern_syntax_context(span.ctx);
let hygiene_id = HygieneId::new(ctx.opaque_and_semitransparent);
let hygiene_info = ctx.outer_expn.map(|expansion| {
let ctx = span.ctx;
let hygiene_id = HygieneId::new(ctx.opaque_and_semitransparent(self.db));
let hygiene_info = ctx.outer_expn(self.db).map(|expansion| {
let expansion = self.db.lookup_intern_macro_call(expansion);
(ctx.parent, expansion.def)
(ctx.parent(self.db), expansion.def)
});
(hygiene_id, hygiene_info)
}
@ -1962,11 +1962,12 @@ impl ExprCollector<'_> {
// A macro is allowed to refer to labels from before its declaration.
// Therefore, if we got to the rib of its declaration, give up its hygiene
// and use its parent expansion.
let parent_ctx = self.db.lookup_intern_syntax_context(parent_ctx);
hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent);
hygiene_info = parent_ctx.outer_expn.map(|expansion| {
hygiene_id =
HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db));
hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| {
let expansion = self.db.lookup_intern_macro_call(expansion);
(parent_ctx.parent, expansion.def)
(parent_ctx.parent(self.db), expansion.def)
});
}
}
@ -2593,7 +2594,7 @@ impl ExprCollector<'_> {
None => HygieneId::ROOT,
Some(span_map) => {
let ctx = span_map.span_at(span_start).ctx;
HygieneId::new(self.db.lookup_intern_syntax_context(ctx).opaque_and_semitransparent)
HygieneId::new(ctx.opaque_and_semitransparent(self.db))
}
}
}

View file

@ -324,8 +324,9 @@ fn compute_expr_scopes(
#[cfg(test)]
mod tests {
use base_db::SourceDatabase;
use base_db::RootQueryDb;
use hir_expand::{name::AsName, InFile};
use salsa::AsDynDatabase;
use span::FileId;
use syntax::{algo::find_node_at_offset, ast, AstNode};
use test_fixture::WithFixture;
@ -357,18 +358,22 @@ mod tests {
};
let (db, position) = TestDB::with_position(&code);
let file_id = position.file_id;
let editioned_file_id = position.file_id;
let offset = position.offset;
let file_syntax = db.parse(file_id).syntax_node();
let (file_id, _) = editioned_file_id.unpack();
let editioned_file_id_wrapper =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let file_syntax = db.parse(editioned_file_id_wrapper).syntax_node();
let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
let function = find_function(&db, file_id.file_id());
let function = find_function(&db, file_id);
let scopes = db.expr_scopes(function.into());
let (_body, source_map) = db.body_with_source_map(function.into());
let expr_id = source_map
.node_expr(InFile { file_id: file_id.into(), value: &marker.into() })
.node_expr(InFile { file_id: editioned_file_id.into(), value: &marker.into() })
.unwrap()
.as_expr()
.unwrap();
@ -511,15 +516,19 @@ fn foo() {
fn do_check_local_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_offset: u32) {
let (db, position) = TestDB::with_position(ra_fixture);
let file_id = position.file_id;
let editioned_file_id = position.file_id;
let offset = position.offset;
let file = db.parse(file_id).ok().unwrap();
let (file_id, _) = editioned_file_id.unpack();
let file_id_wrapper =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let file = db.parse(file_id_wrapper).ok().unwrap();
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
.expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();
let function = find_function(&db, file_id.file_id());
let function = find_function(&db, file_id);
let scopes = db.expr_scopes(function.into());
let (_, source_map) = db.body_with_source_map(function.into());
@ -527,7 +536,7 @@ fn foo() {
let expr_scope = {
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
let expr_id = source_map
.node_expr(InFile { file_id: file_id.into(), value: &expr_ast })
.node_expr(InFile { file_id: editioned_file_id.into(), value: &expr_ast })
.unwrap()
.as_expr()
.unwrap();

View file

@ -189,8 +189,8 @@ fn f() {
}
"#,
expect![[r#"
BlockId(1) in BlockRelativeModuleId { block: Some(BlockId(0)), local_id: Idx::<ModuleData>(1) }
BlockId(0) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
BlockId(4c01) in BlockRelativeModuleId { block: Some(BlockId(4c00)), local_id: Idx::<ModuleData>(1) }
BlockId(4c00) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope
"#]],
);

View file

@ -362,10 +362,7 @@ impl GenericParams {
};
fn id_to_generics<Id: GenericsItemTreeNode>(
db: &dyn DefDatabase,
id: impl for<'db> Lookup<
Database<'db> = dyn DefDatabase + 'db,
Data = impl ItemTreeLoc<Id = Id>,
>,
id: impl Lookup<Database = dyn DefDatabase, Data = impl ItemTreeLoc<Id = Id>>,
enabled_params: impl Fn(
&Arc<GenericParams>,
&ItemTree,
@ -378,6 +375,7 @@ impl GenericParams {
let id = id.lookup(db).item_tree_id();
let tree = id.item_tree(db);
let item = &tree[id.value];
(enabled_params(item.generic_params(), &tree, id.value.into()), None)
}

View file

@ -475,7 +475,7 @@ fn search_maps(
#[cfg(test)]
mod tests {
use base_db::{SourceDatabase, Upcast};
use base_db::{RootQueryDb, Upcast};
use expect_test::{expect, Expect};
use test_fixture::WithFixture;

View file

@ -546,7 +546,7 @@ impl Printer<'_> {
let MacroCall { path, ast_id, expand_to, ctxt } = &self.tree[it];
let _ = writeln!(
self,
"// AstId: {:?}, SyntaxContext: {}, ExpandTo: {:?}",
"// AstId: {:?}, SyntaxContextId: {}, ExpandTo: {:?}",
ast_id.erase().into_raw(),
ctxt,
expand_to

View file

@ -270,7 +270,7 @@ m!();
// AstId: 2
pub macro m2 { ... }
// AstId: 3, SyntaxContext: 2, ExpandTo: Items
// AstId: 3, SyntaxContextId: 4294967037, ExpandTo: Items
m!(...);
"#]],
);

View file

@ -69,16 +69,9 @@ mod pretty;
#[cfg(test)]
mod test_db;
use std::{
hash::{Hash, Hasher},
panic::{RefUnwindSafe, UnwindSafe},
};
use std::hash::{Hash, Hasher};
use base_db::{
impl_intern_key,
ra_salsa::{self, InternValueTrivial},
CrateId,
};
use base_db::{impl_intern_key, CrateId};
use hir_expand::{
builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
@ -192,8 +185,7 @@ pub trait ItemTreeLoc {
macro_rules! impl_intern {
($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
impl_intern_key!($id);
impl InternValueTrivial for $loc {}
impl_intern_key!($id, $loc);
impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup);
};
}
@ -213,87 +205,58 @@ macro_rules! impl_loc {
};
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FunctionId(ra_salsa::InternId);
type FunctionLoc = AssocItemLoc<Function>;
impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
impl_loc!(FunctionLoc, id: Function, container: ItemContainerId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct StructId(ra_salsa::InternId);
type StructLoc = ItemLoc<Struct>;
impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
impl_loc!(StructLoc, id: Struct, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct UnionId(ra_salsa::InternId);
pub type UnionLoc = ItemLoc<Union>;
impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
impl_loc!(UnionLoc, id: Union, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct EnumId(ra_salsa::InternId);
pub type EnumLoc = ItemLoc<Enum>;
impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
impl_loc!(EnumLoc, id: Enum, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ConstId(ra_salsa::InternId);
type ConstLoc = AssocItemLoc<Const>;
impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const);
impl_loc!(ConstLoc, id: Const, container: ItemContainerId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticId(ra_salsa::InternId);
pub type StaticLoc = AssocItemLoc<Static>;
impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
impl_loc!(StaticLoc, id: Static, container: ItemContainerId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct TraitId(ra_salsa::InternId);
pub type TraitLoc = ItemLoc<Trait>;
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
impl_loc!(TraitLoc, id: Trait, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TraitAliasId(ra_salsa::InternId);
pub type TraitAliasLoc = ItemLoc<TraitAlias>;
impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias);
impl_loc!(TraitAliasLoc, id: TraitAlias, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeAliasId(ra_salsa::InternId);
type TypeAliasLoc = AssocItemLoc<TypeAlias>;
impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias);
impl_loc!(TypeAliasLoc, id: TypeAlias, container: ItemContainerId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ImplId(ra_salsa::InternId);
type ImplLoc = ItemLoc<Impl>;
impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
impl_loc!(ImplLoc, id: Impl, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct UseId(ra_salsa::InternId);
type UseLoc = ItemLoc<Use>;
impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use);
impl_loc!(UseLoc, id: Use, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ExternCrateId(ra_salsa::InternId);
type ExternCrateLoc = ItemLoc<ExternCrate>;
impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate);
impl_loc!(ExternCrateLoc, id: ExternCrate, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ExternBlockId(ra_salsa::InternId);
type ExternBlockLoc = ItemLoc<ExternBlock>;
impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block);
impl_loc!(ExternBlockLoc, id: ExternBlock, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumVariantId(ra_salsa::InternId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumVariantLoc {
pub id: ItemTreeId<Variant>,
@ -302,9 +265,6 @@ pub struct EnumVariantLoc {
}
impl_intern!(EnumVariantId, EnumVariantLoc, intern_enum_variant, lookup_intern_enum_variant);
impl_loc!(EnumVariantLoc, id: Variant, parent: EnumId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct Macro2Id(ra_salsa::InternId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Macro2Loc {
pub container: ModuleId,
@ -316,8 +276,6 @@ pub struct Macro2Loc {
impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2);
impl_loc!(Macro2Loc, id: Macro2, container: ModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct MacroRulesId(ra_salsa::InternId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroRulesLoc {
pub container: ModuleId,
@ -345,8 +303,7 @@ pub enum MacroExpander {
BuiltInDerive(BuiltinDeriveExpander),
BuiltInEager(EagerExpander),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ProcMacroId(ra_salsa::InternId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ProcMacroLoc {
pub container: CrateRootModuleId,
@ -358,8 +315,6 @@ pub struct ProcMacroLoc {
impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro);
impl_loc!(ProcMacroLoc, id: Function, container: CrateRootModuleId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct BlockId(ra_salsa::InternId);
#[derive(Debug, Hash, PartialEq, Eq, Clone)]
pub struct BlockLoc {
pub ast_id: AstId<ast::BlockExpr>,
@ -368,10 +323,8 @@ pub struct BlockLoc {
}
impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block);
/// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and
/// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct ConstBlockId(ra_salsa::InternId);
// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and
// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent.
impl_intern!(ConstBlockId, ConstBlockLoc, intern_anonymous_const, lookup_intern_anonymous_const);
#[derive(Debug, Hash, PartialEq, Eq, Clone)]
@ -536,12 +489,11 @@ pub struct TupleFieldId {
pub index: u32,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct TypeOrConstParamId {
pub parent: GenericDefId,
pub local_id: LocalTypeOrConstParamId,
}
impl InternValueTrivial for TypeOrConstParamId {}
/// A TypeOrConstParamId with an invariant that it actually belongs to a type
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -603,7 +555,6 @@ pub struct LifetimeParamId {
pub local_id: LocalLifetimeParamId,
}
pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
impl InternValueTrivial for LifetimeParamId {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ItemContainerId {
@ -615,7 +566,7 @@ pub enum ItemContainerId {
impl_from!(ModuleId for ItemContainerId);
/// A Data Type
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum AdtId {
StructId(StructId),
UnionId(UnionId),
@ -624,7 +575,7 @@ pub enum AdtId {
impl_from!(StructId, UnionId, EnumId for AdtId);
/// A macro
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum MacroId {
Macro2Id(Macro2Id),
MacroRulesId(MacroRulesId),
@ -760,9 +711,7 @@ impl From<GenericDefId> for TypeOwnerId {
/// currently only used in `InTypeConstId` for storing the type (which has type `Ty` defined in
/// the `hir-ty` crate) of the constant in its id, which is a temporary hack so we may want
/// to remove this after removing that.
pub trait OpaqueInternableThing:
std::any::Any + std::fmt::Debug + Sync + Send + UnwindSafe + RefUnwindSafe
{
pub trait OpaqueInternableThing: std::any::Any + std::fmt::Debug + Sync + Send {
fn as_any(&self) -> &dyn std::any::Any;
fn box_any(&self) -> Box<dyn std::any::Any>;
fn dyn_hash(&self, state: &mut dyn Hasher);
@ -809,11 +758,9 @@ impl Clone for Box<dyn OpaqueInternableThing> {
// and the name of the struct that contains this constant is resolved, so a query that only traverses the
// type owner by its syntax tree might have a hard time here.
/// A constant in a type as a substitution for const generics (like `Foo<{ 2 + 2 }>`) or as an array
/// length (like `[u8; 2 + 2]`). These constants are body owner and are a variant of `DefWithBodyId`. These
/// are not called `AnonymousConstId` to prevent confusion with [`ConstBlockId`].
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct InTypeConstId(ra_salsa::InternId);
// A constant in a type as a substitution for const generics (like `Foo<{ 2 + 2 }>`) or as an array
// length (like `[u8; 2 + 2]`). These constants are body owner and are a variant of `DefWithBodyId`. These
// are not called `AnonymousConstId` to prevent confusion with [`ConstBlockId`].
impl_intern!(InTypeConstId, InTypeConstLoc, intern_in_type_const, lookup_intern_in_type_const);
// We would like to set `derive(PartialEq)`
@ -838,8 +785,8 @@ impl InTypeConstId {
pub fn source(&self, db: &dyn DefDatabase) -> ast::ConstArg {
let src = self.lookup(db).id;
let file_id = src.file_id;
let root = &db.parse_or_expand(file_id);
db.ast_id_map(file_id).get(src.value).to_node(root)
let root = db.parse_or_expand(file_id);
db.ast_id_map(file_id).get(src.value).to_node(&root)
}
}
@ -884,7 +831,7 @@ impl GeneralConstId {
}
/// The defs which have a body.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum DefWithBodyId {
FunctionId(FunctionId),
StaticId(StaticId),
@ -892,7 +839,6 @@ pub enum DefWithBodyId {
InTypeConstId(InTypeConstId),
VariantId(EnumVariantId),
}
impl_from!(FunctionId, ConstId, StaticId, InTypeConstId for DefWithBodyId);
impl From<EnumVariantId> for DefWithBodyId {
@ -928,7 +874,7 @@ pub enum AssocItemId {
// casting them, and somehow making the constructors private, which would be annoying.
impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum GenericDefId {
AdtId(AdtId),
// consts can have type parameters from their parents (i.e. associated consts of traits)
@ -962,7 +908,7 @@ impl GenericDefId {
) -> (HirFileId, Option<ast::GenericParamList>) {
fn file_id_and_params_of_item_loc<Loc>(
db: &dyn DefDatabase,
def: impl for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = Loc>,
def: impl Lookup<Database = dyn DefDatabase, Data = Loc>,
) -> (HirFileId, Option<ast::GenericParamList>)
where
Loc: src::HasSource,
@ -1017,15 +963,13 @@ impl From<AssocItemId> for GenericDefId {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum CallableDefId {
FunctionId(FunctionId),
StructId(StructId),
EnumVariantId(EnumVariantId),
}
impl InternValueTrivial for CallableDefId {}
impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
impl From<CallableDefId> for ModuleDefId {
fn from(def: CallableDefId) -> ModuleDefId {
@ -1135,7 +1079,7 @@ impl From<VariantId> for AttrDefId {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum VariantId {
EnumVariantId(EnumVariantId),
StructId(StructId),
@ -1197,7 +1141,7 @@ pub trait HasModule {
impl<N, ItemId> HasModule for ItemId
where
N: ItemTreeNode,
ItemId: for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = ItemLoc<N>> + Copy,
ItemId: Lookup<Database = dyn DefDatabase, Data = ItemLoc<N>> + Copy,
{
#[inline]
fn module(&self, db: &dyn DefDatabase) -> ModuleId {
@ -1222,7 +1166,7 @@ where
#[inline]
fn module_for_assoc_item_loc<'db>(
db: &(dyn 'db + DefDatabase),
id: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<impl ItemTreeNode>>,
id: impl Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<impl ItemTreeNode>>,
) -> ModuleId {
id.lookup(db).container.module(db)
}

View file

@ -35,9 +35,9 @@ macro_rules! f {
};
}
struct#0:1@58..64#4# MyTraitMap2#0:2@31..42#2# {#0:1@72..73#4#
map#0:1@86..89#4#:#0:1@89..90#4# #0:1@89..90#4#::#0:1@91..93#4#std#0:1@93..96#4#::#0:1@96..98#4#collections#0:1@98..109#4#::#0:1@109..111#4#HashSet#0:1@111..118#4#<#0:1@118..119#4#(#0:1@119..120#4#)#0:1@120..121#4#>#0:1@121..122#4#,#0:1@122..123#4#
}#0:1@132..133#4#
struct#0:1@58..64#20480# MyTraitMap2#0:2@31..42#4294967037# {#0:1@72..73#20480#
map#0:1@86..89#20480#:#0:1@89..90#20480# #0:1@89..90#20480#::#0:1@91..93#20480#std#0:1@93..96#20480#::#0:1@96..98#20480#collections#0:1@98..109#20480#::#0:1@109..111#20480#HashSet#0:1@111..118#20480#<#0:1@118..119#20480#(#0:1@119..120#20480#)#0:1@120..121#20480#>#0:1@121..122#20480#,#0:1@122..123#20480#
}#0:1@132..133#20480#
"#]],
);
}
@ -75,12 +75,12 @@ macro_rules! f {
};
}
fn#0:2@30..32#2# main#0:2@33..37#2#(#0:2@37..38#2#)#0:2@38..39#2# {#0:2@40..41#2#
1#0:2@50..51#2#;#0:2@51..52#2#
1.0#0:2@61..64#2#;#0:2@64..65#2#
(#0:2@74..75#2#(#0:2@75..76#2#1#0:2@76..77#2#,#0:2@77..78#2# )#0:2@78..79#2#,#0:2@79..80#2# )#0:2@80..81#2#.#0:2@81..82#2#0#0:2@82..85#2#.#0:2@82..85#2#0#0:2@82..85#2#;#0:2@85..86#2#
let#0:2@95..98#2# x#0:2@99..100#2# =#0:2@101..102#2# 1#0:2@103..104#2#;#0:2@104..105#2#
}#0:2@110..111#2#
fn#0:2@30..32#4294967037# main#0:2@33..37#4294967037#(#0:2@37..38#4294967037#)#0:2@38..39#4294967037# {#0:2@40..41#4294967037#
1#0:2@50..51#4294967037#;#0:2@51..52#4294967037#
1.0#0:2@61..64#4294967037#;#0:2@64..65#4294967037#
(#0:2@74..75#4294967037#(#0:2@75..76#4294967037#1#0:2@76..77#4294967037#,#0:2@77..78#4294967037# )#0:2@78..79#4294967037#,#0:2@79..80#4294967037# )#0:2@80..81#4294967037#.#0:2@81..82#4294967037#0#0:2@82..85#4294967037#.#0:2@82..85#4294967037#0#0:2@82..85#4294967037#;#0:2@85..86#4294967037#
let#0:2@95..98#4294967037# x#0:2@99..100#4294967037# =#0:2@101..102#4294967037# 1#0:2@103..104#4294967037#;#0:2@104..105#4294967037#
}#0:2@110..111#4294967037#
"#]],
@ -171,7 +171,7 @@ fn main(foo: ()) {
}
fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#2#;
/* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#4294967037#;
}
}
@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
struct#1:1@59..65#4# Foo#0:2@32..35#2#(#1:1@70..71#4#u32#0:2@41..44#2#)#1:1@74..75#4#;#1:1@75..76#4#
struct#1:1@59..65#20480# Foo#0:2@32..35#4294967037#(#1:1@70..71#20480#u32#0:2@41..44#4294967037#)#1:1@74..75#20480#;#1:1@75..76#20480#
"#]],
);
}
@ -423,10 +423,10 @@ m! { foo, bar }
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
impl#\4# Bar#\4# {#\4#
fn#\4# foo#\2#(#\4#)#\4# {#\4#}#\4#
fn#\4# bar#\2#(#\4#)#\4# {#\4#}#\4#
}#\4#
impl#\20480# Bar#\20480# {#\20480#
fn#\20480# foo#\4294967037#(#\20480#)#\20480# {#\20480#}#\20480#
fn#\20480# bar#\4294967037#(#\20480#)#\20480# {#\20480#}#\20480#
}#\20480#
"#]],
);
}

View file

@ -16,7 +16,7 @@ mod proc_macros;
use std::{iter, ops::Range, sync};
use base_db::SourceDatabase;
use base_db::RootQueryDb;
use expect_test::Expect;
use hir_expand::{
db::ExpandDatabase,
@ -26,6 +26,7 @@ use hir_expand::{
};
use intern::Symbol;
use itertools::Itertools;
use salsa::AsDynDatabase;
use span::{Edition, Span};
use stdx::{format_to, format_to_acc};
use syntax::{
@ -63,9 +64,13 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
MacroCallKind::Derive { ast_id, .. } => ast_id.map(|it| it.erase()),
MacroCallKind::Attr { ast_id, .. } => ast_id.map(|it| it.erase()),
};
let ast = db
.parse(ast_id.file_id.file_id().expect("macros inside macros are not supported"))
.syntax_node();
let editioned_file_id =
ast_id.file_id.file_id().expect("macros inside macros are not supported");
let editioned_file_id =
base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let ast = db.parse(editioned_file_id).syntax_node();
let ast_id_map = db.ast_id_map(ast_id.file_id);
let node = ast_id_map.get_erased(ast_id.value).to_node(&ast);
Some((node.text_range(), errors))

View file

@ -181,9 +181,9 @@ fn foo(&self) {
self.0. 1;
}
fn#0:1@45..47#2# foo#0:1@48..51#2#(#0:1@51..52#2#&#0:1@52..53#2#self#0:1@53..57#2# )#0:1@57..58#2# {#0:1@59..60#2#
self#0:1@65..69#2# .#0:1@69..70#2#0#0:1@70..71#2#.#0:1@71..72#2#1#0:1@73..74#2#;#0:1@74..75#2#
}#0:1@76..77#2#"#]],
fn#0:1@45..47#4294967037# foo#0:1@48..51#4294967037#(#0:1@51..52#4294967037#&#0:1@52..53#4294967037#self#0:1@53..57#4294967037# )#0:1@57..58#4294967037# {#0:1@59..60#4294967037#
self#0:1@65..69#4294967037# .#0:1@69..70#4294967037#0#0:1@70..71#4294967037#.#0:1@71..72#4294967037#1#0:1@73..74#4294967037#;#0:1@74..75#4294967037#
}#0:1@76..77#4294967037#"#]],
);
}

View file

@ -295,9 +295,12 @@ impl ModuleOrigin {
/// That is, a file or a `mod foo {}` with items.
pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
match self {
&ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
&ModuleOrigin::File { definition: editioned_file_id, .. }
| &ModuleOrigin::CrateRoot { definition: editioned_file_id } => {
let definition = base_db::EditionedFileId::new(db, editioned_file_id);
let sf = db.parse(definition).tree();
InFile::new(definition.into(), ModuleSource::SourceFile(sf))
InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf))
}
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
definition_tree_id.file_id(),

View file

@ -376,7 +376,7 @@ impl DefCollector<'_> {
'resolve_attr: loop {
let _p = tracing::info_span!("resolve_macros loop").entered();
'resolve_macros: loop {
self.db.unwind_if_cancelled();
self.db.unwind_if_revision_cancelled();
{
let _p = tracing::info_span!("resolve_imports loop").entered();
@ -977,7 +977,7 @@ impl DefCollector<'_> {
vis: Visibility,
import: Option<ImportOrExternCrate>,
) {
self.db.unwind_if_cancelled();
self.db.unwind_if_revision_cancelled();
self.update_recursive(module_id, resolutions, vis, import, 0)
}
@ -2517,7 +2517,7 @@ impl ModCollector<'_, '_> {
#[cfg(test)]
mod tests {
use base_db::SourceDatabase;
use base_db::RootQueryDb;
use test_fixture::WithFixture;
use crate::{nameres::DefMapCrateData, test_db::TestDB};

View file

@ -1,6 +1,6 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::AnchoredPath;
use base_db::{AnchoredPath, RootQueryDb};
use hir_expand::{name::Name, HirFileIdExt};
use span::EditionedFileId;
@ -80,7 +80,8 @@ impl ModDir {
let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
for candidate in candidate_files.iter() {
let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() };
if let Some(file_id) = db.resolve_path(path) {
if let Some(file_id) = base_db::Upcast::<dyn RootQueryDb>::upcast(db).resolve_path(path)
{
let is_mod_rs = candidate.ends_with("/mod.rs");
let root_dir_owner = is_mod_rs || attr_path.is_some();

View file

@ -4,7 +4,7 @@ mod macros;
mod mod_resolution;
mod primitives;
use base_db::SourceDatabase;
use base_db::RootQueryDb;
use expect_test::{expect, Expect};
use test_fixture::WithFixture;
use triomphe::Arc;

View file

@ -1,4 +1,4 @@
use base_db::SourceDatabaseFileInputExt as _;
use base_db::SourceDatabase;
use test_fixture::WithFixture;
use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
@ -255,10 +255,10 @@ m!(Z);
assert_eq!(module_data.scope.resolutions().count(), 4);
});
let n_recalculated_item_trees =
events.iter().filter(|it| it.contains("item_tree(")).count();
events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
assert_eq!(n_recalculated_item_trees, 6);
let n_reparsed_macros =
events.iter().filter(|it| it.contains("parse_macro_expansion(")).count();
events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count();
assert_eq!(n_reparsed_macros, 3);
}
@ -276,10 +276,11 @@ m!(Z);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 4);
});
let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
assert_eq!(n_recalculated_item_trees, 1);
let n_recalculated_item_trees =
events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
assert_eq!(n_recalculated_item_trees, 0);
let n_reparsed_macros =
events.iter().filter(|it| it.contains("parse_macro_expansion(")).count();
events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count();
assert_eq!(n_reparsed_macros, 0);
}
}
@ -310,14 +311,15 @@ pub type Ty = ();
let events = db.log_executed(|| {
db.file_item_tree(pos.file_id.into());
});
let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree(")).count();
let n_calculated_item_trees =
events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
assert_eq!(n_calculated_item_trees, 1);
let n_parsed_files = events.iter().filter(|it| it.contains("parse(")).count();
let n_parsed_files = events.iter().filter(|it| it.contains("parse")).count();
assert_eq!(n_parsed_files, 1);
}
// Delete the parse tree.
base_db::ParseQuery.in_db(&db).purge();
// FIXME(salsa-transition): bring this back
// base_db::ParseQuery.in_db(&db).purge();
{
let events = db.log_executed(|| {

View file

@ -185,7 +185,7 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
if segments.len() == 1 && kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctxt = ctx.span_map().span_for_range(path.segment()?.syntax().text_range()).ctx;
if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
if let Some(macro_call_id) = syn_ctxt.outer_expn(ctx.db) {
if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
Some(crate_root) => PathKind::DollarCrate(crate_root),

View file

@ -207,7 +207,13 @@ impl Resolver {
return self.module_scope.resolve_path_in_type_ns(db, path);
}
let remaining_idx = || if path.segments().len() == 1 { None } else { Some(1) };
let remaining_idx = || {
if path.segments().len() == 1 {
None
} else {
Some(1)
}
};
for scope in self.scopes() {
match scope {
@ -314,7 +320,7 @@ impl Resolver {
None,
),
ResolvePathResultPrefixInfo::default(),
))
));
}
Path::LangItem(l, Some(_)) => {
let type_ns = match *l {
@ -889,11 +895,10 @@ fn handle_macro_def_scope(
// A macro is allowed to refer to variables from before its declaration.
// Therefore, if we got to the rib of its declaration, give up its hygiene
// and use its parent expansion.
let parent_ctx = db.lookup_intern_syntax_context(*parent_ctx);
*hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent);
*hygiene_info = parent_ctx.outer_expn.map(|expansion| {
*hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db));
*hygiene_info = parent_ctx.outer_expn(db).map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion);
(parent_ctx.parent, expansion.def)
(parent_ctx.parent(db), expansion.def)
});
}
}
@ -905,10 +910,10 @@ fn hygiene_info(
hygiene_id: HygieneId,
) -> Option<(SyntaxContextId, MacroDefId)> {
if !hygiene_id.is_root() {
let ctx = hygiene_id.lookup(db);
ctx.outer_expn.map(|expansion| {
let ctx = hygiene_id.lookup();
ctx.outer_expn(db).map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion);
(ctx.parent, expansion.def)
(ctx.parent(db), expansion.def)
})
} else {
None
@ -1438,7 +1443,7 @@ impl HasResolver for MacroRulesId {
fn lookup_resolver<'db>(
db: &(dyn DefDatabase + 'db),
lookup: impl Lookup<
Database<'db> = dyn DefDatabase + 'db,
Database = dyn DefDatabase,
Data = impl ItemTreeLoc<Container = impl HasResolver>,
>,
) -> Resolver {

View file

@ -3,10 +3,11 @@
use std::{fmt, panic, sync::Mutex};
use base_db::{
ra_salsa::{self, Durability},
AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
CrateId, FileSourceRootInput, FileText, RootQueryDb, SourceDatabase, SourceRoot, SourceRootId,
SourceRootInput, Upcast,
};
use hir_expand::{db::ExpandDatabase, files::FilePosition, InFile};
use salsa::{AsDynDatabase, Durability};
use span::{EditionedFileId, FileId};
use syntax::{algo, ast, AstNode};
use triomphe::Arc;
@ -18,43 +19,58 @@ use crate::{
LocalModuleId, Lookup, ModuleDefId, ModuleId,
};
#[ra_salsa::database(
base_db::SourceRootDatabaseStorage,
base_db::SourceDatabaseStorage,
hir_expand::db::ExpandDatabaseStorage,
crate::db::InternDatabaseStorage,
crate::db::DefDatabaseStorage
)]
#[salsa::db]
#[derive(Clone)]
pub(crate) struct TestDB {
storage: ra_salsa::Storage<TestDB>,
events: Mutex<Option<Vec<ra_salsa::Event>>>,
storage: salsa::Storage<Self>,
files: Arc<base_db::Files>,
events: Arc<Mutex<Option<Vec<salsa::Event>>>>,
}
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
let mut this = Self {
storage: Default::default(),
events: Default::default(),
files: Default::default(),
};
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
}
impl Upcast<dyn ExpandDatabase> for TestDB {
#[inline]
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
self
}
}
impl Upcast<dyn DefDatabase> for TestDB {
#[inline]
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
self
}
}
impl ra_salsa::Database for TestDB {
fn salsa_event(&self, event: ra_salsa::Event) {
impl Upcast<dyn RootQueryDb> for TestDB {
fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
self
}
}
impl Upcast<dyn SourceDatabase> for TestDB {
fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
self
}
}
#[salsa::db]
impl salsa::Database for TestDB {
fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
let mut events = self.events.lock().unwrap();
if let Some(events) = &mut *events {
let event = event();
events.push(event);
}
}
@ -68,12 +84,54 @@ impl fmt::Debug for TestDB {
impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
#[salsa::db]
impl SourceDatabase for TestDB {
fn file_text(&self, file_id: base_db::FileId) -> FileText {
self.files.file_text(file_id)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
FileLoaderDelegate(self).relevant_crates(file_id)
fn set_file_text(&mut self, file_id: base_db::FileId, text: &str) {
let files = Arc::clone(&self.files);
files.set_file_text(self, file_id, text);
}
fn set_file_text_with_durability(
&mut self,
file_id: base_db::FileId,
text: &str,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_file_text_with_durability(self, file_id, text, durability);
}
/// Source root of the file.
fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
self.files.source_root(source_root_id)
}
fn set_source_root_with_durability(
&mut self,
source_root_id: SourceRootId,
source_root: Arc<SourceRoot>,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_source_root_with_durability(self, source_root_id, source_root, durability);
}
fn file_source_root(&self, id: base_db::FileId) -> FileSourceRootInput {
self.files.file_source_root(id)
}
fn set_file_source_root_with_durability(
&mut self,
id: base_db::FileId,
source_root_id: SourceRootId,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_file_source_root_with_durability(self, id, source_root_id, durability);
}
}
@ -92,8 +150,10 @@ impl TestDB {
}
pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
let crate_def_map = db.crate_def_map(krate);
for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
return crate_def_map.module_id(local_id);
@ -104,8 +164,10 @@ impl TestDB {
}
pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
let file_module = self.module_for_file(position.file_id.file_id());
let mut def_map = file_module.def_map(self);
let mut def_map = file_module.def_map(db);
let module = self.mod_at_position(&def_map, position);
def_map = match self.block_at_position(&def_map, position) {
@ -128,10 +190,11 @@ impl TestDB {
/// Finds the smallest/innermost module in `def_map` containing `position`.
fn mod_at_position(&self, def_map: &DefMap, position: FilePosition) -> LocalModuleId {
let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
let mut size = None;
let mut res = DefMap::ROOT;
for (module, data) in def_map.modules() {
let src = data.definition_source(self);
let src = data.definition_source(db);
if src.file_id != position.file_id {
continue;
}
@ -167,17 +230,18 @@ impl TestDB {
}
fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<Arc<DefMap>> {
let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
// Find the smallest (innermost) function in `def_map` containing the cursor.
let mut size = None;
let mut fn_def = None;
for (_, module) in def_map.modules() {
let file_id = module.definition_source(self).file_id;
let file_id = module.definition_source(db).file_id;
if file_id != position.file_id {
continue;
}
for decl in module.scope.declarations() {
if let ModuleDefId::FunctionId(it) = decl {
let range = it.lookup(self).source(self).value.syntax().text_range();
let range = it.lookup(db).source(db).value.syntax().text_range();
if !range.contains(position.offset) {
continue;
@ -203,10 +267,13 @@ impl TestDB {
// Find the innermost block expression that has a `DefMap`.
let def_with_body = fn_def?.into();
let (_, source_map) = self.body_with_source_map(def_with_body);
let scopes = self.expr_scopes(def_with_body);
let root = self.parse(position.file_id);
let source_map = db.body_with_source_map(def_with_body).1;
let scopes = db.expr_scopes(def_with_body);
let editioned_file_id_wrapper =
base_db::EditionedFileId::new(db.as_dyn_database(), position.file_id);
let root = db.parse(editioned_file_id_wrapper);
let scope_iter = algo::ancestors_at_offset(&root.syntax_node(), position.offset)
.filter_map(|node| {
let block = ast::BlockExpr::cast(node)?;
@ -223,7 +290,7 @@ impl TestDB {
let mut containing_blocks =
scopes.scope_chain(Some(scope)).filter_map(|scope| scopes.block(scope));
if let Some(block) = containing_blocks.next().map(|block| self.block_def_map(block)) {
if let Some(block) = containing_blocks.next().map(|block| db.block_def_map(block)) {
return Some(block);
}
}
@ -231,7 +298,7 @@ impl TestDB {
None
}
pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<ra_salsa::Event> {
pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
*self.events.lock().unwrap() = Some(Vec::new());
f();
self.events.lock().unwrap().take().unwrap()
@ -244,8 +311,11 @@ impl TestDB {
.filter_map(|e| match e.kind {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
ra_salsa::EventKind::WillExecute { database_key } => {
Some(format!("{:?}", database_key.debug(self)))
salsa::EventKind::WillExecute { database_key } => {
let ingredient = self
.as_dyn_database()
.ingredient_debug_name(database_key.ingredient_index());
Some(ingredient.to_string())
}
_ => None,
})

View file

@ -21,6 +21,8 @@ itertools.workspace = true
hashbrown.workspace = true
smallvec.workspace = true
triomphe.workspace = true
query-group.workspace = true
salsa.workspace = true
# local deps
stdx.workspace = true

View file

@ -3,9 +3,12 @@
use base_db::AnchoredPath;
use cfg::CfgExpr;
use either::Either;
use intern::{sym, Symbol};
use intern::{
sym::{self},
Symbol,
};
use mbe::{expect_fragment, DelimiterKind};
use span::{Edition, EditionedFileId, Span};
use span::{Edition, EditionedFileId, FileId, Span};
use stdx::format_to;
use syntax::{
format_smolstr,
@ -401,7 +404,7 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
// stack that does not have #[allow_internal_unstable(edition_panic)].
// (To avoid using the edition of e.g. the assert!() or debug_assert!() definition.)
loop {
let Some(expn) = db.lookup_intern_syntax_context(span.ctx).outer_expn else {
let Some(expn) = span.ctx.outer_expn(db) else {
break false;
};
let expn = db.lookup_intern_macro_call(expn);
@ -656,10 +659,10 @@ fn relative_file(
allow_recursion: bool,
err_span: Span,
) -> Result<EditionedFileId, ExpandError> {
let lookup = call_id.lookup(db);
let lookup = db.lookup_intern_macro_call(call_id);
let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
let path = AnchoredPath { anchor: call_site, path: path_str };
let res = db
let res: FileId = db
.resolve_path(path)
.ok_or_else(|| ExpandError::other(err_span, format!("failed to load file `{path_str}`")))?;
// Prevent include itself
@ -725,8 +728,10 @@ fn include_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok(it) => it,
let (file_id_wrapper, editioned_file_id) = match include_input_to_file_id(db, arg_id, tt) {
Ok(editioned_file_id) => {
(base_db::EditionedFileId::new(db, editioned_file_id), editioned_file_id)
}
Err(e) => {
return ExpandResult::new(
tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
@ -734,10 +739,10 @@ fn include_expand(
)
}
};
let span_map = db.real_span_map(file_id);
let span_map = db.real_span_map(editioned_file_id);
// FIXME: Parse errors
ExpandResult::ok(syntax_node_to_token_tree(
&db.parse(file_id).syntax_node(),
&db.parse(file_id_wrapper).syntax_node(),
SpanMap::RealSpanMap(span_map),
span,
syntax_bridge::DocCommentDesugarMode::ProcMacro,
@ -800,7 +805,7 @@ fn include_str_expand(
};
let text = db.file_text(file_id.file_id());
let text = &*text;
let text = &*text.text(db);
ExpandResult::ok(quote!(span =>#text))
}

View file

@ -277,8 +277,8 @@ mod tests {
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:#?}");
expect![[r#"
SUBTREE $$ 937550:0@0..0#2 937550:0@0..0#2
IDENT hello 937550:0@0..0#2"#]]
SUBTREE $$ 937550:0@0..0#4294967037 937550:0@0..0#4294967037
IDENT hello 937550:0@0..0#4294967037"#]]
.assert_eq(&t);
}

View file

@ -1,10 +1,8 @@
//! Defines a unit of change that can applied to the database to get the next
//! state. Changes are transactional.
use base_db::{
ra_salsa::Durability, CrateGraph, CrateId, CrateWorkspaceData, FileChange, SourceRoot,
SourceRootDatabase,
};
use base_db::{CrateGraph, CrateId, CrateWorkspaceData, FileChange, SourceRoot};
use rustc_hash::FxHashMap;
use salsa::Durability;
use span::FileId;
use triomphe::Arc;
@ -21,7 +19,7 @@ impl ChangeWithProcMacros {
Self::default()
}
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) {
pub fn apply(self, db: &mut impl ExpandDatabase) {
self.source_change.apply(db);
if let Some(proc_macros) = self.proc_macros {
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);

View file

@ -1,10 +1,14 @@
//! Defines database & queries for macro expansion.
use base_db::{ra_salsa, CrateId, SourceDatabase};
use base_db::{CrateId, RootQueryDb};
use either::Either;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
use salsa::plumbing::AsId;
use span::{
AstIdMap, Edition, EditionedFileId, HirFileId, HirFileIdRepr, MacroCallId, MacroFileId, Span,
SyntaxContextId,
};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
use triomphe::Arc;
@ -19,12 +23,11 @@ use crate::{
span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
SyntaxContextExt as _,
},
proc_macro::ProcMacros,
span_map::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
CustomProcMacroExpander, EagerCallInfo, EagerExpander, ExpandError, ExpandResult, ExpandTo,
ExpansionSpanMap, HirFileId, HirFileIdRepr, Lookup, MacroCallId, MacroCallKind, MacroCallLoc,
MacroDefId, MacroDefKind, MacroFileId,
proc_macro::{CustomProcMacroExpander, ProcMacros},
span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
EagerExpander, ExpandError, ExpandResult, ExpandTo, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind,
};
/// This is just to ensure the types of smart_macro_arg and macro_arg are the same
type MacroArgResult = (Arc<tt::TopSubtree>, SyntaxFixupUndoInfo, Span);
@ -52,32 +55,32 @@ pub enum TokenExpander {
ProcMacro(CustomProcMacroExpander),
}
#[ra_salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
#[query_group::query_group]
pub trait ExpandDatabase: RootQueryDb {
/// The proc macros.
#[ra_salsa::input]
#[salsa::input]
fn proc_macros(&self) -> Arc<ProcMacros>;
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
/// Main public API -- parses a hir file, not caring whether it's a real
/// file or a macro expansion.
#[ra_salsa::transparent]
#[salsa::transparent]
fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode;
/// Implementation for the macro case.
#[ra_salsa::lru]
#[salsa::lru(512)]
fn parse_macro_expansion(
&self,
macro_file: MacroFileId,
macro_file: span::MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[ra_salsa::transparent]
#[ra_salsa::invoke(SpanMap::new)]
#[salsa::transparent]
#[salsa::invoke(SpanMap::new)]
fn span_map(&self, file_id: HirFileId) -> SpanMap;
#[ra_salsa::transparent]
#[ra_salsa::invoke(crate::span_map::expansion_span_map)]
#[salsa::transparent]
#[salsa::invoke(crate::span_map::expansion_span_map)]
fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
#[ra_salsa::invoke(crate::span_map::real_span_map)]
#[salsa::invoke(crate::span_map::real_span_map)]
fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
@ -85,43 +88,47 @@ pub trait ExpandDatabase: SourceDatabase {
///
/// We encode macro definitions into ids of macro calls, this what allows us
/// to be incremental.
#[ra_salsa::interned]
#[salsa::transparent]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
#[ra_salsa::interned]
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
#[salsa::transparent]
fn lookup_intern_macro_call(&self, macro_call: MacroCallId) -> MacroCallLoc;
#[ra_salsa::transparent]
fn setup_syntax_context_root(&self) -> ();
#[ra_salsa::transparent]
#[ra_salsa::invoke(crate::hygiene::dump_syntax_contexts)]
#[salsa::transparent]
#[salsa::invoke(crate::hygiene::dump_syntax_contexts)]
fn dump_syntax_contexts(&self) -> String;
/// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
#[deprecated = "calling this is incorrect, call `macro_arg_considering_derives` instead"]
#[salsa::invoke(macro_arg)]
fn macro_arg(&self, id: MacroCallId) -> MacroArgResult;
#[ra_salsa::transparent]
#[salsa::transparent]
fn macro_arg_considering_derives(
&self,
id: MacroCallId,
kind: &MacroCallKind,
) -> MacroArgResult;
/// Fetches the expander for this macro.
#[ra_salsa::transparent]
#[ra_salsa::invoke(TokenExpander::macro_expander)]
#[salsa::transparent]
#[salsa::invoke(TokenExpander::macro_expander)]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
/// Fetches (and compiles) the expander of this decl macro.
#[ra_salsa::invoke(DeclarativeMacroExpander::expander)]
#[salsa::invoke(DeclarativeMacroExpander::expander)]
fn decl_macro_expander(
&self,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
/// @edwin0cheng heroically debugged this once! See #4315 for details
#[salsa::invoke(expand_proc_macro)]
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::TopSubtree>>;
/// Retrieves the span to be used for a proc-macro expansions spans.
/// This is a firewall query as it requires parsing the file, which we don't want proc-macros to
@ -129,22 +136,42 @@ pub trait ExpandDatabase: SourceDatabase {
/// parse queries being LRU cached. If they weren't the invalidations would only happen if the
/// user wrote in the file that defines the proc-macro.
fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
#[salsa::invoke(parse_macro_expansion_error)]
fn parse_macro_expansion_error(
&self,
macro_call: MacroCallId,
) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>>;
#[ra_salsa::transparent]
#[salsa::transparent]
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContextId;
}
#[salsa::interned(no_lifetime, id = span::MacroCallId)]
pub struct MacroCallWrapper {
pub loc: MacroCallLoc,
}
fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
MacroCallWrapper::new(db, macro_call).0
}
fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
MacroCallWrapper::ingredient(db).data(db.as_dyn_database(), macro_call.as_id()).0.clone()
}
#[salsa::interned(no_lifetime, id = span::SyntaxContextId)]
pub struct SyntaxContextWrapper {
pub data: SyntaxContextId,
}
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContextId {
match file.repr() {
HirFileIdRepr::FileId(_) => SyntaxContextId::root(edition),
HirFileIdRepr::MacroFile(m) => {
db.macro_arg_considering_derives(m.macro_call_id, &m.macro_call_id.lookup(db).kind)
.2
.ctx
let kind = db.lookup_intern_macro_call(m.macro_call_id).kind;
db.macro_arg_considering_derives(m.macro_call_id, &kind).2.ctx
}
}
}
@ -322,9 +349,15 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Ar
triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
}
/// Main public API -- parses a hir file, not caring whether it's a real
/// file or a macro expansion.
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
db.parse(file_id).syntax_node()
}
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@ -376,8 +409,13 @@ pub(crate) fn parse_with_map(
file_id: HirFileId,
) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
(
db.parse(file_id).to_syntax(),
SpanMap::RealSpanMap(db.real_span_map(editioned_file_id)),
)
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
@ -597,7 +635,7 @@ fn macro_expand(
let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
MacroDefKind::ProcMacro(..) => {
return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None)
return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None);
}
_ => {
let (macro_arg, undo_info, span) =
@ -728,12 +766,7 @@ pub(crate) fn token_tree_to_syntax_node(
ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
};
syntax_bridge::token_tree_to_syntax_node(
tt,
entry_point,
&mut |ctx| ctx.lookup(db).edition,
edition,
)
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, &mut |ctx| ctx.edition(db), edition)
}
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
@ -754,9 +787,3 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
})
}
}
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
for edition in Edition::iter() {
db.intern_syntax_context(SyntaxContextData::root(edition));
}
}

View file

@ -102,12 +102,13 @@ impl DeclarativeMacroExpander {
};
let ctx_edition = |ctx: SyntaxContextId| {
let crate_graph = db.crate_graph();
if ctx.is_root() {
crate_graph[def_crate].edition
} else {
let data = db.lookup_intern_syntax_context(ctx);
// UNWRAP-SAFETY: Only the root context has no outer expansion
crate_graph[data.outer_expn.unwrap().lookup(db).def.krate].edition
let krate = db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap()).def.krate;
crate_graph[krate].edition
}
};
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {

View file

@ -28,7 +28,7 @@ use crate::{
ast::{self, AstNode},
db::ExpandDatabase,
mod_path::ModPath,
AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern,
AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile,
MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
};
@ -47,13 +47,13 @@ pub fn expand_eager_macro_input(
// When `lazy_expand` is called, its *parent* file must already exist.
// Here we store an eager macro id for the argument expanded subtree
// for that purpose.
let arg_id = MacroCallLoc {
let loc = MacroCallLoc {
def,
krate,
kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None },
ctxt: call_site,
}
.intern(db);
};
let arg_id = db.intern_macro_call(loc);
#[allow(deprecated)] // builtin eager macros are never derives
let (_, _, span) = db.macro_arg(arg_id);
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
@ -107,7 +107,7 @@ pub fn expand_eager_macro_input(
ctxt: call_site,
};
ExpandResult { value: Some(loc.intern(db)), err }
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
}
fn lazy_expand(

View file

@ -159,7 +159,9 @@ trait FileIdToSyntax: Copy {
impl FileIdToSyntax for EditionedFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse(self).syntax_node()
let file_id = base_db::EditionedFileId::new(db, self);
db.parse(file_id).syntax_node()
}
}
impl FileIdToSyntax for MacroFileId {
@ -274,7 +276,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.borrow().clone() })
return Some(InRealFile { file_id, value: self.value.borrow().clone() });
}
HirFileIdRepr::MacroFile(m)
if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
@ -284,12 +286,14 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
_ => return None,
};
let FileRange { file_id, range } = map_node_range_up_rooted(
let FileRange { file_id: editioned_file_id, range } = map_node_range_up_rooted(
db,
&db.expansion_span_map(file_id),
self.value.borrow().text_range(),
)?;
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let kind = self.kind();
let value = db
.parse(file_id)
@ -298,7 +302,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
.ancestors()
.take_while(|it| it.text_range() == range)
.find(|it| it.kind() == kind)?;
Some(InRealFile::new(file_id, value))
Some(InRealFile::new(editioned_file_id, value))
}
}
@ -453,7 +457,7 @@ impl<N: AstNode> InFile<N> {
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value })
return Some(InRealFile { file_id, value: self.value });
}
HirFileIdRepr::MacroFile(m) => m,
};
@ -461,16 +465,18 @@ impl<N: AstNode> InFile<N> {
return None;
}
let FileRange { file_id, range } = map_node_range_up_rooted(
let FileRange { file_id: editioned_file_id, range } = map_node_range_up_rooted(
db,
&db.expansion_span_map(file_id),
self.value.syntax().text_range(),
)?;
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(file_id, value))
Some(InRealFile::new(editioned_file_id, value))
}
}

View file

@ -22,11 +22,11 @@
// FIXME: Move this into the span crate? Not quite possible today as that depends on `MacroCallLoc`
// which contains a bunch of unrelated things
use std::iter;
use std::{convert::identity, iter};
use span::{Edition, MacroCallId, Span, SyntaxContextData, SyntaxContextId};
use span::{Edition, MacroCallId, Span, SyntaxContextId};
use crate::db::{ExpandDatabase, InternSyntaxContextQuery};
use crate::db::{ExpandDatabase, MacroCallWrapper};
pub use span::Transparency;
@ -72,8 +72,8 @@ fn span_with_ctxt_from_mark(
pub(super) fn apply_mark(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: MacroCallId,
ctxt: span::SyntaxContextId,
call_id: span::MacroCallId,
transparency: Transparency,
edition: Edition,
) -> SyntaxContextId {
@ -114,92 +114,75 @@ fn apply_mark_internal(
transparency: Transparency,
edition: Edition,
) -> SyntaxContextId {
use base_db::ra_salsa;
let call_id = Some(call_id);
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
let mut opaque = syntax_context_data.opaque;
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
let mut opaque = ctxt.opaque(db);
let mut opaque_and_semitransparent = ctxt.opaque_and_semitransparent(db);
if transparency >= Transparency::Opaque {
let parent = opaque;
opaque = ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
(parent, call_id, transparency, edition),
|new_opaque| SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
edition,
},
);
opaque =
SyntaxContextId::new(db, call_id, transparency, edition, parent, identity, identity);
}
if transparency >= Transparency::SemiTransparent {
let parent = opaque_and_semitransparent;
opaque_and_semitransparent =
ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
(parent, call_id, transparency, edition),
|new_opaque_and_semitransparent| SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
edition,
},
);
SyntaxContextId::new(db, call_id, transparency, edition, parent, |_| opaque, identity);
}
let parent = ctxt;
db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent,
SyntaxContextId::new(
db,
call_id,
transparency,
edition,
})
parent,
|_| opaque,
|_| opaque_and_semitransparent,
)
}
pub trait SyntaxContextExt {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>;
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId;
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId;
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId;
fn remove_mark(&mut self, db: &dyn ExpandDatabase)
-> (Option<span::MacroCallId>, Transparency);
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<span::MacroCallId>, Transparency);
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(span::MacroCallId, Transparency)>;
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool;
}
impl SyntaxContextExt for SyntaxContextId {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
db.lookup_intern_syntax_context(self).opaque_and_semitransparent
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId {
self.opaque_and_semitransparent(db)
}
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
db.lookup_intern_syntax_context(self).opaque
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId {
self.opaque(db)
}
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
db.lookup_intern_syntax_context(self).parent
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId {
self.parent(db)
}
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(self);
(data.outer_expn, data.outer_transparency)
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<span::MacroCallId>, Transparency) {
let data = self;
(data.outer_expn(db), data.outer_transparency(db))
}
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(*self);
*self = data.parent;
(data.outer_expn, data.outer_transparency)
fn remove_mark(
&mut self,
db: &dyn ExpandDatabase,
) -> (Option<span::MacroCallId>, Transparency) {
let data = *self;
*self = data.parent(db);
(data.outer_expn(db), data.outer_transparency(db))
}
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)> {
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(span::MacroCallId, Transparency)> {
let mut marks = marks_rev(self, db).collect::<Vec<_>>();
marks.reverse();
marks
}
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool {
!self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque()
!self.is_root() && self.outer_transparency(db).is_opaque()
}
}
@ -207,7 +190,7 @@ impl SyntaxContextExt for SyntaxContextId {
pub fn marks_rev(
ctxt: SyntaxContextId,
db: &dyn ExpandDatabase,
) -> impl Iterator<Item = (MacroCallId, Transparency)> + '_ {
) -> impl Iterator<Item = (span::MacroCallId, Transparency)> + '_ {
iter::successors(Some(ctxt), move |&mark| Some(mark.parent_ctxt(db)))
.take_while(|&it| !it.is_root())
.map(|ctx| {
@ -219,18 +202,14 @@ pub fn marks_rev(
}
pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
use crate::db::{InternMacroCallLookupQuery, InternSyntaxContextLookupQuery};
use base_db::ra_salsa::debug::DebugQueryTable;
let mut s = String::from("Expansions:");
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
let id = e.key;
let expn_data = e.value.as_ref().unwrap();
let entries =
MacroCallWrapper::ingredient(db).entries(db.as_dyn_database()).collect::<Vec<_>>();
for loc in entries {
let expn_data = &loc.fields().0;
s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, kind: {:?}",
id,
"parent: {:?}, call_site_ctxt: {:?}, kind: {:?}",
expn_data.kind.file_id(),
expn_data.ctxt,
expn_data.kind.descr(),
@ -238,28 +217,25 @@ pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
}
s.push_str("\n\nSyntaxContexts:\n");
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
let entries = SyntaxContextId::ingredient(db).entries(db.as_dyn_database()).collect::<Vec<_>>();
for e in entries {
struct SyntaxContextDebug<'a>(
&'a dyn ExpandDatabase,
SyntaxContextId,
&'a SyntaxContextData,
&'a span::SyntaxContextUnderlyingData,
);
impl std::fmt::Debug for SyntaxContextDebug<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fancy_debug(self.2, self.1, self.0, f)
fancy_debug(self.1, self.0, f)
}
}
fn fancy_debug(
this: &SyntaxContextData,
self_id: SyntaxContextId,
this: &span::SyntaxContextUnderlyingData,
db: &dyn ExpandDatabase,
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
write!(f, "#{self_id} parent: #{}, outer_mark: (", this.parent)?;
write!(f, "parent: #{}, outer_mark: (", this.parent)?;
match this.outer_expn {
Some(id) => {
write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
@ -269,7 +245,8 @@ pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
write!(f, ", {:?})", this.outer_transparency)
}
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
let dbg = SyntaxContextDebug(db, e.fields());
stdx::format_to!(s, "{:?}\n", dbg);
}
s
}

View file

@ -33,11 +33,11 @@ use triomphe::Arc;
use core::fmt;
use std::hash::Hash;
use base_db::{ra_salsa::InternValueTrivial, CrateId};
use base_db::CrateId;
use either::Either;
use span::{
Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor,
SyntaxContextData, SyntaxContextId,
SyntaxContextId,
};
use syntax::{
ast::{self, AstNode},
@ -89,17 +89,17 @@ pub mod tt {
macro_rules! impl_intern_lookup {
($db:ident, $id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
impl $crate::Intern for $loc {
type Database<'db> = dyn $db + 'db;
type Database = dyn $db;
type ID = $id;
fn intern(self, db: &Self::Database<'_>) -> $id {
fn intern(self, db: &Self::Database) -> Self::ID {
db.$intern(self)
}
}
impl $crate::Lookup for $id {
type Database<'db> = dyn $db + 'db;
type Database = dyn $db;
type Data = $loc;
fn lookup(&self, db: &Self::Database<'_>) -> $loc {
fn lookup(&self, db: &Self::Database) -> Self::Data {
db.$lookup(*self)
}
}
@ -108,15 +108,15 @@ macro_rules! impl_intern_lookup {
// ideally these would be defined in base-db, but the orphan rule doesn't let us
pub trait Intern {
type Database<'db>: ?Sized;
type Database: ?Sized;
type ID;
fn intern(self, db: &Self::Database<'_>) -> Self::ID;
fn intern(self, db: &Self::Database) -> Self::ID;
}
pub trait Lookup {
type Database<'db>: ?Sized;
type Database: ?Sized;
type Data;
fn lookup(&self, db: &Self::Database<'_>) -> Self::Data;
fn lookup(&self, db: &Self::Database) -> Self::Data;
}
impl_intern_lookup!(
@ -127,14 +127,6 @@ impl_intern_lookup!(
lookup_intern_macro_call
);
impl_intern_lookup!(
ExpandDatabase,
SyntaxContextId,
SyntaxContextData,
intern_syntax_context,
lookup_intern_syntax_context
);
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
@ -262,7 +254,6 @@ pub struct MacroCallLoc {
pub kind: MacroCallKind,
pub ctxt: SyntaxContextId,
}
impl InternValueTrivial for MacroCallLoc {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId {
@ -357,7 +348,7 @@ impl HirFileIdExt for HirFileId {
fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self.repr() {
HirFileIdRepr::FileId(file_id) => file_id.edition(),
HirFileIdRepr::MacroFile(m) => m.macro_call_id.lookup(db).def.edition,
HirFileIdRepr::MacroFile(m) => db.lookup_intern_macro_call(m.macro_call_id).def.edition,
}
}
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
@ -366,7 +357,7 @@ impl HirFileIdExt for HirFileId {
match file_id.repr() {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
file_id = macro_call_id.lookup(db).kind.file_id();
file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id()
}
}
}
@ -409,7 +400,7 @@ impl HirFileIdExt for HirFileId {
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let attr = match loc.def.kind {
MacroDefKind::BuiltInDerive(..) => loc.to_node(db),
_ => return None,
@ -467,7 +458,7 @@ impl MacroFileIdExt for MacroFileId {
let mut level = 0;
let mut macro_file = self;
loop {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
level += 1;
macro_file = match loc.kind.file_id().repr() {
@ -477,7 +468,7 @@ impl MacroFileIdExt for MacroFileId {
}
}
fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
self.macro_call_id.lookup(db).kind.file_id()
db.lookup_intern_macro_call(self.macro_call_id).kind.file_id()
}
/// Return expansion information if it is a macro-expansion file
@ -538,7 +529,7 @@ impl MacroDefId {
kind: MacroCallKind,
ctxt: SyntaxContextId,
) -> MacroCallId {
MacroCallLoc { def: self, krate, kind, ctxt }.intern(db)
db.intern_macro_call(MacroCallLoc { def: self, krate, kind, ctxt })
}
pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {

View file

@ -277,7 +277,7 @@ fn convert_path(
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
let syn_ctx = span_for_range(segment.syntax().text_range());
if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
if let Some(macro_call_id) = syn_ctx.outer_expn(db) {
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
@ -336,7 +336,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: tt::TokenTreesView<'_>) -> Optio
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
// as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
// as described in `SyntaxContextId::apply_mark`, so we ignore prepended opaque marks.
// FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
// definitions actually produced by `macro` and `macro` definitions produced by
// `macro_rules!`, but at least such configurations are not stable yet.

View file

@ -25,9 +25,8 @@ pub fn prettify_macro_expansion(
let ctx = span_map.span_at(dollar_crate.text_range().start() + span_offset).ctx;
let replacement =
syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
let ctx_data = db.lookup_intern_syntax_context(ctx);
let macro_call_id =
ctx_data.outer_expn.expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
let macro_call = db.lookup_intern_macro_call(macro_call_id);
let macro_def_crate = macro_call.def.krate;
// First, if this is the same crate as the macro, nothing will work but `crate`.

View file

@ -79,10 +79,16 @@ impl SpanMapRef<'_> {
}
}
pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -> Arc<RealSpanMap> {
pub(crate) fn real_span_map(
db: &dyn ExpandDatabase,
editioned_file_id: EditionedFileId,
) -> Arc<RealSpanMap> {
use syntax::ast::HasModuleItem;
let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
let ast_id_map = db.ast_id_map(editioned_file_id.into());
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
let tree = db.parse(file_id).tree();
// This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
// would mean we'd invalidate everything whenever we type. So instead we make the text ranges
@ -134,7 +140,7 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -
});
Arc::new(RealSpanMap::from_file(
file_id,
editioned_file_id,
pairs.into_boxed_slice(),
tree.syntax().text_range().end(),
))

View file

@ -31,8 +31,11 @@ la-arena.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
typed-arena = "2.0.1"
dashmap.workspace = true
indexmap.workspace = true
rustc_apfloat = "0.2.0"
query-group.workspace = true
salsa.workspace = true
ra-ap-rustc_abi.workspace = true
ra-ap-rustc_index.workspace = true

View file

@ -451,10 +451,10 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
self.db.type_alias_data(id).name.display(self.db.upcast(), self.edition()).to_string()
}
fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
format!("Opaque_{}", opaque_ty_id.0)
format!("Opaque_{:?}", opaque_ty_id.0)
}
fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
format!("fn_{}", fn_def_id.0)
format!("fn_{:?}", fn_def_id.0)
}
fn coroutine_datum(
&self,

View file

@ -1,6 +1,6 @@
//! Constant evaluation details
use base_db::{ra_salsa::Cycle, CrateId};
use base_db::CrateId;
use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
use hir_def::{
expr_store::{Body, HygieneId},
@ -11,14 +11,19 @@ use hir_def::{
ConstBlockLoc, EnumVariantId, GeneralConstId, HasModule as _, StaticId,
};
use hir_expand::Lookup;
use salsa::Cycle;
use stdx::never;
use triomphe::Arc;
use crate::{
db::HirDatabase, display::DisplayTarget, generics::Generics, infer::InferenceContext,
lower::ParamLoweringMode, mir::monomorphize_mir_body_bad, to_placeholder_idx, Const, ConstData,
ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty,
TyBuilder,
db::{HirDatabase, HirDatabaseData},
display::DisplayTarget,
generics::Generics,
infer::InferenceContext,
lower::ParamLoweringMode,
mir::monomorphize_mir_body_bad,
to_placeholder_idx, Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap,
Substitution, TraitEnvironment, Ty, TyBuilder,
};
use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
@ -224,9 +229,10 @@ pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option<i128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: &GeneralConstId,
_: &Substitution,
_: &Option<Arc<TraitEnvironment>>,
_: HirDatabaseData,
_: GeneralConstId,
_: Substitution,
_: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
@ -234,7 +240,7 @@ pub(crate) fn const_eval_recover(
pub(crate) fn const_eval_static_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: &StaticId,
_: StaticId,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
@ -242,7 +248,7 @@ pub(crate) fn const_eval_static_recover(
pub(crate) fn const_eval_discriminant_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: &EnumVariantId,
_: EnumVariantId,
) -> Result<i128, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}

View file

@ -1,4 +1,4 @@
use base_db::SourceDatabase;
use base_db::RootQueryDb;
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
use rustc_apfloat::{

View file

@ -3,11 +3,7 @@
use std::sync;
use base_db::{
impl_intern_key,
ra_salsa::{self, InternValueTrivial},
CrateId, Upcast,
};
use base_db::{impl_intern_key, CrateId, Upcast};
use hir_def::{
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, CallableDefId,
ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
@ -15,6 +11,7 @@ use hir_def::{
};
use hir_expand::name::Name;
use la_arena::ArenaMap;
use salsa::plumbing::AsId;
use smallvec::SmallVec;
use triomphe::Arc;
@ -31,22 +28,22 @@ use crate::{
PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
};
#[ra_salsa::query_group(HirDatabaseStorage)]
pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[ra_salsa::invoke(crate::infer::infer_query)]
#[query_group::query_group]
pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> + std::fmt::Debug {
#[salsa::invoke_actual(crate::infer::infer_query)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
// region:mir
#[ra_salsa::invoke(crate::mir::mir_body_query)]
#[ra_salsa::cycle(crate::mir::mir_body_recover)]
#[salsa::invoke_actual(crate::mir::mir_body_query)]
#[salsa::cycle(crate::mir::mir_body_recover)]
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
#[ra_salsa::invoke(crate::mir::mir_body_for_closure_query)]
#[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure(&self, def: ClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[ra_salsa::invoke(crate::mir::monomorphized_mir_body_query)]
#[ra_salsa::cycle(crate::mir::monomorphized_mir_body_recover)]
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
#[salsa::cycle(crate::mir::monomorphized_mir_body_recover)]
fn monomorphized_mir_body(
&self,
def: DefWithBodyId,
@ -54,7 +51,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[ra_salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
fn monomorphized_mir_body_for_closure(
&self,
def: ClosureId,
@ -62,12 +59,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[ra_salsa::invoke(crate::mir::borrowck_query)]
#[ra_salsa::lru]
#[salsa::invoke(crate::mir::borrowck_query)]
#[salsa::lru(2024)]
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
#[ra_salsa::invoke(crate::consteval::const_eval_query)]
#[ra_salsa::cycle(crate::consteval::const_eval_recover)]
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
fn const_eval(
&self,
def: GeneralConstId,
@ -75,15 +72,15 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError>;
#[ra_salsa::invoke(crate::consteval::const_eval_static_query)]
#[ra_salsa::cycle(crate::consteval::const_eval_static_recover)]
#[salsa::invoke_actual(crate::consteval::const_eval_static_query)]
#[salsa::cycle(crate::consteval::const_eval_static_recover)]
fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
#[ra_salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[ra_salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
#[salsa::invoke_actual(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
#[ra_salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
fn lookup_impl_method(
&self,
env: Arc<TraitEnvironment>,
@ -93,8 +90,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
// endregion:mir
#[ra_salsa::invoke(crate::layout::layout_of_adt_query)]
#[ra_salsa::cycle(crate::layout::layout_of_adt_recover)]
#[salsa::invoke(crate::layout::layout_of_adt_query)]
#[salsa::cycle(crate::layout::layout_of_adt_recover)]
fn layout_of_adt(
&self,
def: AdtId,
@ -102,63 +99,67 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[ra_salsa::invoke(crate::layout::layout_of_ty_query)]
#[ra_salsa::cycle(crate::layout::layout_of_ty_recover)]
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
#[ra_salsa::invoke(crate::layout::target_data_layout_query)]
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
#[ra_salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
#[salsa::invoke_actual(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
#[ra_salsa::invoke(crate::lower::ty_query)]
#[ra_salsa::cycle(crate::lower::ty_recover)]
#[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)]
fn ty(&self, def: TyDefId) -> Binders<Ty>;
#[ra_salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
#[salsa::invoke_actual(crate::lower::type_for_type_alias_with_diagnostics_query)]
fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics);
/// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
/// a `StructId` or `EnumVariantId` with a record constructor.
#[ra_salsa::invoke(crate::lower::value_ty_query)]
#[salsa::invoke_actual(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>;
#[ra_salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
#[ra_salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)]
#[salsa::invoke_actual(crate::lower::impl_self_ty_with_diagnostics_query)]
#[salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)]
fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders<Ty>, Diagnostics);
#[ra_salsa::invoke(crate::lower::impl_self_ty_query)]
#[salsa::invoke_actual(crate::lower::impl_self_ty_query)]
fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
#[ra_salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)]
#[salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)]
fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
#[ra_salsa::invoke(crate::lower::const_param_ty_query)]
#[salsa::invoke(crate::lower::const_param_ty_query)]
fn const_param_ty(&self, def: ConstParamId) -> Ty;
#[ra_salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
#[salsa::invoke_actual(crate::lower::impl_trait_with_diagnostics_query)]
fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders<TraitRef>, Diagnostics)>;
#[ra_salsa::invoke(crate::lower::impl_trait_query)]
#[salsa::invoke_actual(crate::lower::impl_trait_query)]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
#[ra_salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
#[salsa::invoke_actual(crate::lower::field_types_with_diagnostics_query)]
fn field_types_with_diagnostics(
&self,
var: VariantId,
) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics);
#[ra_salsa::invoke(crate::lower::field_types_query)]
#[salsa::invoke_actual(crate::lower::field_types_query)]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
#[ra_salsa::invoke(crate::lower::callable_item_sig)]
#[salsa::invoke_actual(crate::lower::callable_item_sig)]
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
#[ra_salsa::invoke(crate::lower::return_type_impl_traits)]
#[salsa::invoke_actual(crate::lower::return_type_impl_traits)]
fn return_type_impl_traits(&self, def: FunctionId) -> Option<Arc<Binders<ImplTraits>>>;
#[ra_salsa::invoke(crate::lower::type_alias_impl_traits)]
#[salsa::invoke_actual(crate::lower::type_alias_impl_traits)]
fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option<Arc<Binders<ImplTraits>>>;
#[ra_salsa::invoke(crate::lower::generic_predicates_for_param_query)]
#[ra_salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
#[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
fn generic_predicates_for_param(
&self,
def: GenericDefId,
@ -166,132 +167,139 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
assoc_name: Option<Name>,
) -> GenericPredicates;
#[ra_salsa::invoke(crate::lower::generic_predicates_query)]
#[salsa::invoke_actual(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
#[ra_salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
#[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
fn generic_predicates_without_parent_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericPredicates, Diagnostics);
#[ra_salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
#[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_query)]
fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates;
#[ra_salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[ra_salsa::transparent]
#[salsa::invoke_actual(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
#[ra_salsa::invoke(crate::lower::trait_environment_query)]
#[salsa::invoke_actual(crate::lower::trait_environment_query)]
fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
#[ra_salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
#[ra_salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)]
#[salsa::invoke_actual(crate::lower::generic_defaults_with_diagnostics_query)]
#[salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)]
fn generic_defaults_with_diagnostics(
&self,
def: GenericDefId,
) -> (GenericDefaults, Diagnostics);
#[ra_salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::invoke_actual(crate::lower::generic_defaults_query)]
fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
#[ra_salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
#[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
#[ra_salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
#[salsa::invoke_actual(InherentImpls::inherent_impls_in_block_query)]
fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
/// Collects all crates in the dependency graph that have impls for the
/// given fingerprint. This is only used for primitive types and types
/// annotated with `rustc_has_incoherent_inherent_impls`; for other types
/// we just look at the crate where the type is defined.
#[ra_salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)]
#[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)]
fn incoherent_inherent_impl_crates(
&self,
krate: CrateId,
fp: TyFingerprint,
) -> SmallVec<[CrateId; 2]>;
#[ra_salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
#[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
#[ra_salsa::invoke(TraitImpls::trait_impls_in_block_query)]
#[salsa::invoke_actual(TraitImpls::trait_impls_in_block_query)]
fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>;
#[ra_salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
#[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<[Arc<TraitImpls>]>;
// Interned IDs for Chalk integration
#[ra_salsa::interned]
#[salsa::interned]
fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
#[ra_salsa::interned]
#[salsa::interned]
fn intern_type_or_const_param_id(
&self,
param_id: TypeOrConstParamId,
) -> InternedTypeOrConstParamId;
#[ra_salsa::interned]
#[salsa::interned]
fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
#[ra_salsa::interned]
#[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
#[ra_salsa::interned]
#[salsa::interned]
fn intern_closure(&self, id: InternedClosure) -> InternedClosureId;
#[ra_salsa::interned]
#[salsa::interned]
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
#[ra_salsa::invoke(chalk_db::associated_ty_data_query)]
#[salsa::invoke(chalk_db::associated_ty_data_query)]
fn associated_ty_data(
&self,
id: chalk_db::AssocTypeId,
) -> sync::Arc<chalk_db::AssociatedTyDatum>;
#[ra_salsa::invoke(chalk_db::trait_datum_query)]
#[salsa::invoke(chalk_db::trait_datum_query)]
fn trait_datum(
&self,
krate: CrateId,
trait_id: chalk_db::TraitId,
) -> sync::Arc<chalk_db::TraitDatum>;
#[ra_salsa::invoke(chalk_db::adt_datum_query)]
#[salsa::invoke(chalk_db::adt_datum_query)]
fn adt_datum(
&self,
krate: CrateId,
struct_id: chalk_db::AdtId,
) -> sync::Arc<chalk_db::AdtDatum>;
#[ra_salsa::invoke(chalk_db::impl_datum_query)]
#[salsa::invoke(chalk_db::impl_datum_query)]
fn impl_datum(
&self,
krate: CrateId,
impl_id: chalk_db::ImplId,
) -> sync::Arc<chalk_db::ImplDatum>;
#[ra_salsa::invoke(chalk_db::fn_def_datum_query)]
#[salsa::invoke(chalk_db::fn_def_datum_query)]
fn fn_def_datum(&self, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>;
#[ra_salsa::invoke(chalk_db::fn_def_variance_query)]
#[salsa::invoke(chalk_db::fn_def_variance_query)]
fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
#[ra_salsa::invoke(chalk_db::adt_variance_query)]
#[salsa::invoke(chalk_db::adt_variance_query)]
fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances;
#[ra_salsa::invoke(crate::variance::variances_of)]
#[ra_salsa::cycle(crate::variance::variances_of_cycle)]
#[salsa::invoke_actual(crate::variance::variances_of)]
#[salsa::cycle(crate::variance::variances_of_cycle)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
#[ra_salsa::invoke(chalk_db::associated_ty_value_query)]
#[salsa::invoke(chalk_db::associated_ty_value_query)]
fn associated_ty_value(
&self,
krate: CrateId,
id: chalk_db::AssociatedTyValueId,
) -> sync::Arc<chalk_db::AssociatedTyValue>;
#[ra_salsa::invoke(crate::traits::normalize_projection_query)]
#[ra_salsa::transparent]
#[salsa::invoke(crate::traits::normalize_projection_query)]
#[salsa::transparent]
fn normalize_projection(
&self,
projection: crate::ProjectionTy,
env: Arc<TraitEnvironment>,
) -> Ty;
#[ra_salsa::invoke(crate::traits::trait_solve_query)]
#[salsa::invoke(crate::traits::trait_solve_query)]
fn trait_solve(
&self,
krate: CrateId,
@ -299,7 +307,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
) -> Option<crate::Solution>;
#[ra_salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
#[salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
fn program_clauses_for_chalk_env(
&self,
krate: CrateId,
@ -307,9 +315,9 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
env: chalk_ir::Environment<Interner>,
) -> chalk_ir::ProgramClauses<Interner>;
#[ra_salsa::invoke(crate::drop::has_drop_glue)]
#[ra_salsa::cycle(crate::drop::has_drop_glue_recover)]
fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue {}
#[salsa::invoke(crate::drop::has_drop_glue)]
#[salsa::cycle(crate::drop::has_drop_glue_recover)]
fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue;
}
#[test]
@ -317,41 +325,22 @@ fn hir_database_is_dyn_compatible() {
fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedTypeOrConstParamId(ra_salsa::InternId);
impl_intern_key!(InternedTypeOrConstParamId);
impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedLifetimeParamId(ra_salsa::InternId);
impl_intern_key!(InternedLifetimeParamId);
impl_intern_key!(InternedLifetimeParamId, LifetimeParamId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedConstParamId(ra_salsa::InternId);
impl_intern_key!(InternedConstParamId);
impl_intern_key!(InternedConstParamId, ConstParamId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedOpaqueTyId(ra_salsa::InternId);
impl_intern_key!(InternedOpaqueTyId);
impl_intern_key!(InternedOpaqueTyId, ImplTraitId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedClosureId(ra_salsa::InternId);
impl_intern_key!(InternedClosureId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct InternedClosure(pub DefWithBodyId, pub ExprId);
impl_intern_key!(InternedClosureId, InternedClosure);
impl InternValueTrivial for InternedClosure {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InternedCoroutineId(ra_salsa::InternId);
impl_intern_key!(InternedCoroutineId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
impl InternValueTrivial for InternedCoroutine {}
impl_intern_key!(InternedCoroutineId, InternedCoroutine);
/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
/// we have different IDs for struct and enum variant constructors.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct InternedCallableDefId(ra_salsa::InternId);
impl_intern_key!(InternedCallableDefId);
// This exists just for Chalk, because Chalk just has a single `FnDefId` where
// we have different IDs for struct and enum variant constructors.
impl_intern_key!(InternedCallableDefId, CallableDefId);

View file

@ -251,7 +251,7 @@ impl<'a> DeclValidator<'a> {
return;
}
let (_, source_map) = self.db.body_with_source_map(func.into());
let source_map = self.db.body_with_source_map(func.into()).1;
for (id, replacement) in pats_replacements {
let Ok(source_ptr) = source_map.pat_syntax(id) else {
continue;
@ -597,7 +597,7 @@ impl<'a> DeclValidator<'a> {
) where
N: AstNode + HasName + fmt::Debug,
S: HasSource<Value = N>,
L: Lookup<Data = S, Database<'a> = dyn DefDatabase + 'a> + HasModule + Copy,
L: Lookup<Data = S, Database = dyn DefDatabase> + HasModule + Copy,
{
let to_expected_case_type = match expected_case {
CaseType::LowerSnakeCase => to_lower_snake_case,

View file

@ -434,7 +434,7 @@ impl ExprValidator {
let last_then_expr_ty = &self.infer[last_then_expr];
if last_then_expr_ty.is_never() {
// Only look at sources if the then branch diverges and we have an else branch.
let (_, source_map) = db.body_with_source_map(self.owner);
let source_map = db.body_with_source_map(self.owner).1;
let Ok(source_ptr) = source_map.expr_syntax(id) else {
return;
};

View file

@ -1,6 +1,5 @@
//! Utilities for computing drop info about types.
use base_db::ra_salsa;
use chalk_ir::cast::Cast;
use hir_def::data::adt::StructFlags;
use hir_def::lang_item::LangItem;
@ -8,6 +7,7 @@ use hir_def::AdtId;
use stdx::never;
use triomphe::Arc;
use crate::db::HirDatabaseData;
use crate::{
db::HirDatabase, method_resolution::TyFingerprint, AliasTy, Canonical, CanonicalVarKinds,
InEnvironment, Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind,
@ -201,9 +201,10 @@ fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
pub(crate) fn has_drop_glue_recover(
_db: &dyn HirDatabase,
_cycle: &ra_salsa::Cycle,
_ty: &Ty,
_env: &Arc<TraitEnvironment>,
_cycle: &salsa::Cycle,
_: HirDatabaseData,
_ty: Ty,
_env: Arc<TraitEnvironment>,
) -> DropGlue {
DropGlue::None
}

View file

@ -289,7 +289,7 @@ impl InferenceContext<'_> {
expected: &Expectation,
is_read: ExprIsRead,
) -> Ty {
self.db.unwind_if_cancelled();
self.db.unwind_if_revision_cancelled();
let ty = match &self.body[tgt_expr] {
Expr::Missing => self.err_ty(),

View file

@ -8,7 +8,6 @@ use crate::{
ProgramClauseData, ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses,
Substitution, Ty, TyData, TyKind, VariableKind, VariableKinds,
};
use base_db::ra_salsa::InternId;
use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variance};
use hir_def::TypeAliasId;
use intern::{impl_internable, Interned};
@ -68,7 +67,7 @@ impl chalk_ir::interner::Interner for Interner {
type InternedCanonicalVarKinds = Interned<InternedWrapper<Vec<CanonicalVarKind>>>;
type InternedConstraints = Vec<InEnvironment<Constraint>>;
type InternedVariances = SmallVec<[Variance; 16]>;
type DefId = InternId;
type DefId = salsa::Id;
type InternedAdtId = hir_def::AdtId;
type Identifier = TypeAliasId;
type FnAbi = FnAbi;
@ -98,7 +97,7 @@ impl chalk_ir::interner::Interner for Interner {
opaque_ty_id: OpaqueTyId,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
Some(write!(fmt, "OpaqueTy#{:?}", opaque_ty_id.0))
}
fn debug_fn_def_id(fn_def_id: FnDefId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {

View file

@ -2,7 +2,6 @@
use std::fmt;
use base_db::ra_salsa::Cycle;
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
use hir_def::{
layout::{
@ -14,21 +13,20 @@ use hir_def::{
use la_arena::{Idx, RawIdx};
use rustc_abi::AddressSpace;
use rustc_index::IndexVec;
use salsa::Cycle;
use triomphe::Arc;
use crate::{
consteval::try_const_usize,
db::{HirDatabase, InternedClosure},
db::{HirDatabase, HirDatabaseData, InternedClosure},
infer::normalize,
utils::ClosureSubst,
Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
};
pub use self::{
adt::{layout_of_adt_query, layout_of_adt_recover},
target::target_data_layout_query,
};
pub(crate) use self::adt::layout_of_adt_recover;
pub use self::{adt::layout_of_adt_query, target::target_data_layout_query};
mod adt;
mod target;
@ -367,11 +365,12 @@ pub fn layout_of_ty_query(
Ok(Arc::new(result))
}
pub fn layout_of_ty_recover(
pub(crate) fn layout_of_ty_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: &Ty,
_: &Arc<TraitEnvironment>,
_: HirDatabaseData,
_: Ty,
_: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}

View file

@ -2,7 +2,6 @@
use std::{cmp, ops::Bound};
use base_db::ra_salsa::Cycle;
use hir_def::{
data::adt::VariantData,
layout::{Integer, ReprOptions, TargetDataLayout},
@ -10,6 +9,7 @@ use hir_def::{
};
use intern::sym;
use rustc_index::IndexVec;
use salsa::Cycle;
use smallvec::SmallVec;
use triomphe::Arc;
@ -20,7 +20,7 @@ use crate::{
Substitution, TraitEnvironment,
};
use super::LayoutCx;
use super::{HirDatabaseData, LayoutCx};
pub fn layout_of_adt_query(
db: &dyn HirDatabase,
@ -131,12 +131,13 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
)
}
pub fn layout_of_adt_recover(
pub(crate) fn layout_of_adt_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: &AdtId,
_: &Substitution,
_: &Arc<TraitEnvironment>,
_: HirDatabaseData,
_: AdtId,
_: Substitution,
_: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
Err(LayoutError::RecursiveTypeWithoutIndirection)
}

View file

@ -56,7 +56,6 @@ mod variance;
use std::hash::Hash;
use base_db::ra_salsa::InternValueTrivial;
use chalk_ir::{
fold::{Shift, TypeFoldable},
interner::HasInterner,
@ -610,7 +609,6 @@ pub enum ImplTraitId {
TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
}
impl InternValueTrivial for ImplTraitId {}
#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTraits {

View file

@ -14,7 +14,7 @@ use std::{
ops::{self, Not as _},
};
use base_db::{ra_salsa::Cycle, CrateId};
use base_db::CrateId;
use chalk_ir::{
cast::Cast,
fold::{Shift, TypeFoldable},
@ -47,6 +47,7 @@ use hir_expand::{name::Name, ExpandResult};
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashSet;
use rustc_pattern_analysis::Captures;
use salsa::Cycle;
use stdx::{impl_from, never};
use syntax::ast;
use triomphe::{Arc, ThinArc};
@ -57,7 +58,7 @@ use crate::{
intern_const_ref, intern_const_scalar, path_to_const, unknown_const,
unknown_const_as_generic,
},
db::HirDatabase,
db::{HirDatabase, HirDatabaseData},
error_lifetime,
generics::{generics, trait_self_param_idx, Generics},
lower::{
@ -1111,10 +1112,11 @@ pub(crate) fn generic_predicates_for_param_query(
pub(crate) fn generic_predicates_for_param_recover(
_db: &dyn HirDatabase,
_cycle: &Cycle,
_def: &GenericDefId,
_param_id: &TypeOrConstParamId,
_assoc_name: &Option<Name>,
_cycle: &salsa::Cycle,
_: HirDatabaseData,
_def: GenericDefId,
_param_id: TypeOrConstParamId,
_assoc_name: Option<Name>,
) -> GenericPredicates {
GenericPredicates(None)
}
@ -1271,6 +1273,7 @@ where
);
};
}
(
GenericPredicates(predicates.is_empty().not().then(|| predicates.into())),
create_diagnostics(ctx.diagnostics),
@ -1414,9 +1417,9 @@ pub(crate) fn generic_defaults_with_diagnostics_query(
pub(crate) fn generic_defaults_with_diagnostics_recover(
db: &dyn HirDatabase,
_cycle: &Cycle,
def: &GenericDefId,
def: GenericDefId,
) -> (GenericDefaults, Diagnostics) {
let generic_params = generics(db.upcast(), *def);
let generic_params = generics(db.upcast(), def);
if generic_params.len() == 0 {
return (GenericDefaults(None), None);
}
@ -1591,6 +1594,7 @@ pub(crate) fn type_for_type_alias_with_diagnostics_query(
.map(|type_ref| ctx.lower_ty(type_ref))
.unwrap_or_else(|| TyKind::Error.intern(Interner))
};
(make_binders(db, &generics, inner), create_diagnostics(ctx.diagnostics))
}
@ -1602,7 +1606,7 @@ pub enum TyDefId {
}
impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
pub enum ValueTyDefId {
FunctionId(FunctionId),
StructId(StructId),
@ -1638,8 +1642,13 @@ pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
}
}
pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders<Ty> {
let generics = match *def {
pub(crate) fn ty_recover(
db: &dyn HirDatabase,
_cycle: &salsa::Cycle,
_: HirDatabaseData,
def: TyDefId,
) -> Binders<Ty> {
let generics = match def {
TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
@ -1703,10 +1712,10 @@ pub(crate) fn const_param_ty_with_diagnostics_query(
pub(crate) fn impl_self_ty_with_diagnostics_recover(
db: &dyn HirDatabase,
_cycle: &Cycle,
impl_id: &ImplId,
_cycle: &salsa::Cycle,
impl_id: ImplId,
) -> (Binders<Ty>, Diagnostics) {
let generics = generics(db.upcast(), (*impl_id).into());
let generics = generics(db.upcast(), (impl_id).into());
(make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
}

View file

@ -5,8 +5,11 @@
use chalk_solve::rust_ir;
use base_db::ra_salsa::{self, InternKey};
use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId};
use salsa::{
plumbing::{AsId, FromId},
Id,
};
use crate::{
chalk_db, db::HirDatabase, AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId,
@ -30,11 +33,11 @@ impl ToChalk for hir_def::ImplId {
type Chalk = chalk_db::ImplId;
fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::ImplId {
chalk_ir::ImplId(self.as_intern_id())
chalk_ir::ImplId(self.as_id())
}
fn from_chalk(_db: &dyn HirDatabase, impl_id: chalk_db::ImplId) -> hir_def::ImplId {
InternKey::from_intern_id(impl_id.0)
FromId::from_id(impl_id.0.as_id())
}
}
@ -56,84 +59,84 @@ impl ToChalk for TypeAliasAsValue {
type Chalk = chalk_db::AssociatedTyValueId;
fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId {
rust_ir::AssociatedTyValueId(self.0.as_intern_id())
rust_ir::AssociatedTyValueId(self.0.as_id())
}
fn from_chalk(
_db: &dyn HirDatabase,
assoc_ty_value_id: chalk_db::AssociatedTyValueId,
) -> TypeAliasAsValue {
TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
TypeAliasAsValue(TypeAliasId::from_id(assoc_ty_value_id.0))
}
}
impl From<FnDefId> for crate::db::InternedCallableDefId {
fn from(fn_def_id: FnDefId) -> Self {
InternKey::from_intern_id(fn_def_id.0)
Self::from_id(fn_def_id.0)
}
}
impl From<crate::db::InternedCallableDefId> for FnDefId {
fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
chalk_ir::FnDefId(callable_def_id.as_intern_id())
chalk_ir::FnDefId(callable_def_id.as_id())
}
}
impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
fn from(id: OpaqueTyId) -> Self {
InternKey::from_intern_id(id.0)
FromId::from_id(id.0)
}
}
impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
fn from(id: crate::db::InternedOpaqueTyId) -> Self {
chalk_ir::OpaqueTyId(id.as_intern_id())
chalk_ir::OpaqueTyId(id.as_id())
}
}
impl From<chalk_ir::ClosureId<Interner>> for crate::db::InternedClosureId {
fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
Self::from_intern_id(id.0)
FromId::from_id(id.0)
}
}
impl From<crate::db::InternedClosureId> for chalk_ir::ClosureId<Interner> {
fn from(id: crate::db::InternedClosureId) -> Self {
chalk_ir::ClosureId(id.as_intern_id())
chalk_ir::ClosureId(id.as_id())
}
}
impl From<chalk_ir::CoroutineId<Interner>> for crate::db::InternedCoroutineId {
fn from(id: chalk_ir::CoroutineId<Interner>) -> Self {
Self::from_intern_id(id.0)
Self::from_id(id.0)
}
}
impl From<crate::db::InternedCoroutineId> for chalk_ir::CoroutineId<Interner> {
fn from(id: crate::db::InternedCoroutineId) -> Self {
chalk_ir::CoroutineId(id.as_intern_id())
chalk_ir::CoroutineId(id.as_id())
}
}
pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId {
chalk_ir::ForeignDefId(ra_salsa::InternKey::as_intern_id(&id))
chalk_ir::ForeignDefId(id.as_id())
}
pub fn from_foreign_def_id(id: ForeignDefId) -> TypeAliasId {
ra_salsa::InternKey::from_intern_id(id.0)
FromId::from_id(id.0)
}
pub fn to_assoc_type_id(id: TypeAliasId) -> AssocTypeId {
chalk_ir::AssocTypeId(ra_salsa::InternKey::as_intern_id(&id))
chalk_ir::AssocTypeId(id.as_id())
}
pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
ra_salsa::InternKey::from_intern_id(id.0)
FromId::from_id(id.0)
}
pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
let interned_id = ra_salsa::InternKey::from_intern_id(ra_salsa::InternId::from(idx.idx));
let interned_id = FromId::from_id(Id::from_u32(idx.idx.try_into().unwrap()));
db.lookup_intern_type_or_const_param_id(interned_id)
}
@ -141,13 +144,13 @@ pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Place
let interned_id = db.intern_type_or_const_param_id(id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
idx: ra_salsa::InternKey::as_intern_id(&interned_id).as_usize(),
idx: interned_id.as_id().as_u32() as usize,
}
}
pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
let interned_id = ra_salsa::InternKey::from_intern_id(ra_salsa::InternId::from(idx.idx));
let interned_id = FromId::from_id(Id::from_u32(idx.idx.try_into().unwrap()));
db.lookup_intern_lifetime_param_id(interned_id)
}
@ -155,14 +158,14 @@ pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> Place
let interned_id = db.intern_lifetime_param_id(id);
PlaceholderIndex {
ui: chalk_ir::UniverseIndex::ROOT,
idx: ra_salsa::InternKey::as_intern_id(&interned_id).as_usize(),
idx: interned_id.as_id().as_u32() as usize,
}
}
pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId {
chalk_ir::TraitId(ra_salsa::InternKey::as_intern_id(&id))
chalk_ir::TraitId(id.as_id())
}
pub fn from_chalk_trait_id(id: ChalkTraitId) -> TraitId {
ra_salsa::InternKey::from_intern_id(id.0)
FromId::from_id(id.0)
}

View file

@ -714,6 +714,7 @@ pub(crate) fn lookup_impl_method_query(
else {
return (func, fn_subst);
};
(
impl_fn,
Substitution::from_iter(

View file

@ -32,17 +32,18 @@ pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
pub use eval::{
interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap,
};
pub use lower::{
lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
};
pub use lower::{lower_to_mir, mir_body_for_closure_query, mir_body_query, MirLowerError};
pub use monomorphization::{
monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query,
monomorphized_mir_body_query, monomorphized_mir_body_recover,
monomorphized_mir_body_query,
};
use rustc_hash::FxHashMap;
use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never};
pub(crate) use lower::mir_body_recover;
pub(crate) use monomorphization::monomorphized_mir_body_recover;
use super::consteval::{intern_const_scalar, try_const_usize};
pub type BasicBlockId = Idx<BasicBlock>;

View file

@ -145,7 +145,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
Operand::Constant(_) | Operand::Static(_) => (),
};
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(_, r) => match r {
@ -235,7 +235,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
Operand::Constant(_) | Operand::Static(_) => (),
};
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(_, r) => match r {
@ -306,7 +306,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
let mut borrows = FxHashMap::default();
for (_, block) in body.basic_blocks.iter() {
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
for statement in &block.statements {
if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
borrows
@ -477,7 +477,7 @@ fn ever_initialized_map(
dfs(db, body, l, &mut stack, &mut result);
}
for l in body.locals.iter().map(|it| it.0) {
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
if !result[body.start_block].contains_idx(l) {
result[body.start_block].insert(l, false);
stack.clear();

View file

@ -2558,6 +2558,7 @@ impl Evaluator<'_> {
} else {
let (imp, generic_args) =
self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args.clone());
let mir_body = self
.db
.monomorphized_mir_body(imp.into(), generic_args, self.trait_env.clone())

View file

@ -2,7 +2,7 @@
use std::{fmt::Write, iter, mem};
use base_db::{ra_salsa::Cycle, CrateId};
use base_db::{salsa::Cycle, CrateId};
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
use hir_def::{
data::adt::{StructKind, VariantData},
@ -2149,10 +2149,10 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
Ok(Arc::new(result))
}
pub fn mir_body_recover(
pub(crate) fn mir_body_recover(
_db: &dyn HirDatabase,
_cycle: &Cycle,
_def: &DefWithBodyId,
_def: DefWithBodyId,
) -> Result<Arc<MirBody>> {
Err(MirLowerError::Loop)
}

View file

@ -9,7 +9,6 @@
use std::mem;
use base_db::ra_salsa::Cycle;
use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
ConstData, DebruijnIndex,
@ -19,7 +18,7 @@ use triomphe::Arc;
use crate::{
consteval::{intern_const_scalar, unknown_const},
db::{HirDatabase, InternedClosure},
db::{HirDatabase, HirDatabaseData, InternedClosure},
from_placeholder_idx,
generics::{generics, Generics},
infer::normalize,
@ -314,12 +313,13 @@ pub fn monomorphized_mir_body_query(
Ok(Arc::new(body))
}
pub fn monomorphized_mir_body_recover(
pub(crate) fn monomorphized_mir_body_recover(
_: &dyn HirDatabase,
_: &Cycle,
_: &DefWithBodyId,
_: &Substitution,
_: &Arc<crate::TraitEnvironment>,
_: &salsa::Cycle,
_: HirDatabaseData,
_: DefWithBodyId,
_: Substitution,
_: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
Err(MirLowerError::Loop)
}

View file

@ -3,34 +3,34 @@
use std::{fmt, panic, sync::Mutex};
use base_db::{
ra_salsa::{self, Durability},
AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
FileSourceRootInput, FileText, RootQueryDb, SourceDatabase, SourceRoot, SourceRootId,
SourceRootInput, Upcast,
};
use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::db::ExpandDatabase;
use rustc_hash::FxHashMap;
use salsa::{AsDynDatabase, Durability};
use span::{EditionedFileId, FileId};
use syntax::TextRange;
use test_utils::extract_annotations;
use triomphe::Arc;
#[ra_salsa::database(
base_db::SourceRootDatabaseStorage,
base_db::SourceDatabaseStorage,
hir_expand::db::ExpandDatabaseStorage,
hir_def::db::InternDatabaseStorage,
hir_def::db::DefDatabaseStorage,
crate::db::HirDatabaseStorage
)]
#[salsa::db]
#[derive(Clone)]
pub(crate) struct TestDB {
storage: ra_salsa::Storage<TestDB>,
events: Mutex<Option<Vec<ra_salsa::Event>>>,
storage: salsa::Storage<Self>,
files: Arc<base_db::Files>,
events: Arc<Mutex<Option<Vec<salsa::Event>>>>,
}
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
let mut this = Self {
storage: Default::default(),
events: Default::default(),
files: Default::default(),
};
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
@ -54,35 +54,81 @@ impl Upcast<dyn DefDatabase> for TestDB {
}
}
impl ra_salsa::Database for TestDB {
fn salsa_event(&self, event: ra_salsa::Event) {
let mut events = self.events.lock().unwrap();
if let Some(events) = &mut *events {
events.push(event);
}
impl Upcast<dyn RootQueryDb> for TestDB {
fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
self
}
}
impl ra_salsa::ParallelDatabase for TestDB {
fn snapshot(&self) -> ra_salsa::Snapshot<TestDB> {
ra_salsa::Snapshot::new(TestDB {
storage: self.storage.snapshot(),
events: Default::default(),
})
impl Upcast<dyn SourceDatabase> for TestDB {
fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
self
}
}
#[salsa::db]
impl SourceDatabase for TestDB {
fn file_text(&self, file_id: base_db::FileId) -> FileText {
self.files.file_text(file_id)
}
fn set_file_text(&mut self, file_id: base_db::FileId, text: &str) {
let files = Arc::clone(&self.files);
files.set_file_text(self, file_id, text);
}
fn set_file_text_with_durability(
&mut self,
file_id: base_db::FileId,
text: &str,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_file_text_with_durability(self, file_id, text, durability);
}
/// Source root of the file.
fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
self.files.source_root(source_root_id)
}
fn set_source_root_with_durability(
&mut self,
source_root_id: SourceRootId,
source_root: Arc<SourceRoot>,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_source_root_with_durability(self, source_root_id, source_root, durability);
}
fn file_source_root(&self, id: base_db::FileId) -> FileSourceRootInput {
self.files.file_source_root(id)
}
fn set_file_source_root_with_durability(
&mut self,
id: base_db::FileId,
source_root_id: SourceRootId,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_file_source_root_with_durability(self, id, source_root_id, durability);
}
}
#[salsa::db]
impl salsa::Database for TestDB {
fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
let mut events = self.events.lock().unwrap();
if let Some(events) = &mut *events {
events.push(event());
}
}
}
impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}
impl TestDB {
pub(crate) fn module_for_file_opt(&self, file_id: impl Into<FileId>) -> Option<ModuleId> {
let file_id = file_id.into();
@ -117,7 +163,7 @@ impl TestDB {
.into_iter()
.filter_map(|file_id| {
let text = self.file_text(file_id.file_id());
let annotations = extract_annotations(&text);
let annotations = extract_annotations(&text.text(self));
if annotations.is_empty() {
return None;
}
@ -128,7 +174,7 @@ impl TestDB {
}
impl TestDB {
pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<ra_salsa::Event> {
pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
*self.events.lock().unwrap() = Some(Vec::new());
f();
self.events.lock().unwrap().take().unwrap()
@ -141,8 +187,11 @@ impl TestDB {
.filter_map(|e| match e.kind {
// This is pretty horrible, but `Debug` is the only way to inspect
// QueryDescriptor at the moment.
ra_salsa::EventKind::WillExecute { database_key } => {
Some(format!("{:?}", database_key.debug(self)))
salsa::EventKind::WillExecute { database_key } => {
let ingredient = self
.as_dyn_database()
.ingredient_debug_name(database_key.ingredient_index());
Some(ingredient.to_string())
}
_ => None,
})

View file

@ -15,7 +15,7 @@ mod type_alias_impl_traits;
use std::env;
use std::sync::LazyLock;
use base_db::{CrateId, SourceDatabaseFileInputExt as _};
use base_db::{CrateId, SourceDatabase};
use expect_test::Expect;
use hir_def::{
db::DefDatabase,

View file

@ -1,8 +1,8 @@
use base_db::ra_salsa::InternKey;
use expect_test::{expect, Expect};
use hir_def::db::DefDatabase;
use hir_expand::files::InFileWrapper;
use itertools::Itertools;
use salsa::plumbing::FromId;
use span::{HirFileId, TextRange};
use syntax::{AstNode, AstPtr};
use test_fixture::WithFixture;
@ -34,8 +34,8 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
let infer = db.infer(def);
let db = &db;
captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {
let closure = db.lookup_intern_closure(InternedClosureId::from_intern_id(closure_id.0));
let (_, source_map) = db.body_with_source_map(closure.0);
let closure = db.lookup_intern_closure(InternedClosureId::from_id(closure_id.0));
let source_map = db.body_with_source_map(closure.0).1;
let closure_text_range = source_map
.expr_syntax(closure.1)
.expect("failed to map closure to SyntaxNode")

View file

@ -1,4 +1,4 @@
use base_db::SourceDatabaseFileInputExt as _;
use base_db::SourceDatabase;
use hir_def::ModuleDefId;
use test_fixture::WithFixture;
@ -25,7 +25,7 @@ fn foo() -> i32 {
}
});
});
assert!(format!("{events:?}").contains("infer"))
assert!(format!("{events:?}").contains("infer_shim"))
}
let new_text = "
@ -47,7 +47,7 @@ fn foo() -> i32 {
}
});
});
assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
assert!(!format!("{events:?}").contains("infer_shim"), "{events:#?}")
}
}
@ -76,7 +76,7 @@ fn baz() -> i32 {
}
});
});
assert!(format!("{events:?}").contains("infer"))
assert!(format!("{events:?}").contains("infer_shim"))
}
let new_text = "
@ -103,6 +103,6 @@ fn baz() -> i32 {
}
});
});
assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}")
assert!(format!("{events:?}").matches("infer_shim").count() == 1, "{events:#?}")
}
}

View file

@ -160,7 +160,7 @@ fn solve(
let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
let should_continue = || {
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
let remaining = fuel.get();
fuel.set(remaining - 1);
if remaining == 0 {

View file

@ -19,7 +19,7 @@ use crate::{
AliasTy, Const, ConstScalar, DynTyExt, GenericArg, GenericArgData, Interner, Lifetime,
LifetimeData, Ty, TyKind,
};
use base_db::ra_salsa::Cycle;
use base_db::salsa::Cycle;
use chalk_ir::Mutability;
use hir_def::data::adt::StructFlags;
use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId};
@ -58,9 +58,9 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
pub(crate) fn variances_of_cycle(
db: &dyn HirDatabase,
_cycle: &Cycle,
def: &GenericDefId,
def: GenericDefId,
) -> Option<Arc<[Variance]>> {
let generics = generics(db.upcast(), *def);
let generics = generics(db.upcast(), def);
let count = generics.len();
if count == 0 {

View file

@ -3,44 +3,43 @@
//! we didn't do that.
//!
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::{
AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery,
BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery,
CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase,
DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery,
ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery,
FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery,
FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
GenericParamsWithSourceMapQuery, ImplDataWithDiagnosticsQuery, ImportMapQuery,
IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery,
InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery,
InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery,
InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery,
InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery,
InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery,
MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery,
StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataWithDiagnosticsQuery,
TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery,
RealSpanMapQuery,
};
pub use hir_ty::db::{
AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery,
CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery,
ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery,
FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery,
GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage,
ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery,
InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery,
InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery,
InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery,
MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery,
MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery,
TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery,
TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery,
TypeAliasImplTraitsQuery, ValueTyQuery,
};
pub use hir_def::db::DefDatabase;
// AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery,
// BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery,
// CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase,
// DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery,
// ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery,
// FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery,
// FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
// GenericParamsWithSourceMapQuery, ImplDataWithDiagnosticsQuery, ImportMapQuery,
// IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery,
// InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery,
// InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery,
// InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery,
// InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery,
// InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery,
// MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery,
// StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataWithDiagnosticsQuery,
// TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
// };
pub use hir_expand::db::ExpandDatabase;
// AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
// ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
// ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery,
// RealSpanMapQuery,
pub use hir_ty::db::HirDatabase;
// AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery,
// CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery,
// ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery,
// FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery,
// GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage,
// ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery,
// InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery,
// InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery,
// InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery,
// MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery,
// MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery,
// TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery,
// TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery,
// TypeAliasImplTraitsQuery, ValueTyQuery,
// };

View file

@ -306,7 +306,7 @@ impl HasSource for ExternCrateDecl {
impl HasSource for InlineAsmOperand {
type Ast = ast::AsmOperandNamed;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let (_body, source_map) = db.body_with_source_map(self.owner);
let source_map = db.body_with_source_map(self.owner).1;
if let Ok(src) = source_map.expr_syntax(self.expr) {
let root = src.file_syntax(db.upcast());
return src

View file

@ -662,6 +662,7 @@ impl Module {
db.field_types_with_diagnostics(s.id.into()).1,
tree_source_maps.strukt(tree_id.value).item(),
);
for diag in db.struct_data_with_diagnostics(s.id).1.iter() {
emit_def_diagnostic(db, acc, diag, edition);
}
@ -675,6 +676,7 @@ impl Module {
db.field_types_with_diagnostics(u.id.into()).1,
tree_source_maps.union(tree_id.value).item(),
);
for diag in db.union_data_with_diagnostics(u.id).1.iter() {
emit_def_diagnostic(db, acc, diag, edition);
}
@ -1906,6 +1908,7 @@ impl DefWithBody {
let krate = self.module(db).id.krate();
let (body, source_map) = db.body_with_source_map(self.into());
let item_tree_source_maps;
let outer_types_source_map = match self {
DefWithBody::Function(function) => {
@ -1955,7 +1958,7 @@ impl DefWithBody {
None
};
MacroError {
node: (*node).map(|it| it.into()),
node: (node).map(|it| it.into()),
precise_location,
message,
error,
@ -3346,7 +3349,7 @@ fn as_assoc_item<'db, ID, DEF, LOC>(
id: ID,
) -> Option<AssocItem>
where
ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<LOC>>,
ID: Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<LOC>>,
DEF: From<ID>,
LOC: ItemTreeNode,
{
@ -3362,7 +3365,7 @@ fn as_extern_assoc_item<'db, ID, DEF, LOC>(
id: ID,
) -> Option<ExternAssocItem>
where
ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<LOC>>,
ID: Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<LOC>>,
DEF: From<ID>,
LOC: ItemTreeNode,
{
@ -4656,6 +4659,7 @@ pub struct CaptureUsages {
impl CaptureUsages {
pub fn sources(&self, db: &dyn HirDatabase) -> Vec<CaptureUsageSource> {
let (body, source_map) = db.body_with_source_map(self.parent);
let mut result = Vec::with_capacity(self.spans.len());
for &span in self.spans.iter() {
let is_ref = span.is_ref_span(&body);

View file

@ -307,9 +307,10 @@ impl<'db> SemanticsImpl<'db> {
SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
}
pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
pub fn parse(&self, file_id: base_db::EditionedFileId) -> ast::SourceFile {
let hir_file_id = file_id.editioned_file_id(self.db).into();
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into());
self.cache(tree.syntax().clone(), hir_file_id);
tree
}
@ -329,11 +330,14 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
let file_id = self
let editioned_file_id = self
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(file_id));
let file_id = base_db::EditionedFileId::new(self.db, editioned_file_id);
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into());
self.cache(tree.syntax().clone(), editioned_file_id.into());
tree
}
@ -1903,7 +1907,9 @@ fn macro_call_to_macro_id(
match loc.def.ast_id() {
Either::Left(it) => {
let node = match it.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
@ -1915,7 +1921,9 @@ fn macro_call_to_macro_id(
}
Either::Right(it) => {
let node = match it.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
HirFileIdRepr::FileId(editioned_file_id) => {
let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {

View file

@ -254,7 +254,7 @@ fn insert_item_loc<ID, N, Data>(
id: ID,
key: Key<N::Source, ID>,
) where
ID: for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = Data> + 'static,
ID: Lookup<Database = dyn DefDatabase, Data = Data> + 'static,
Data: ItemTreeLoc<Id = N>,
N: ItemTreeNode,
N::Source: 'static,

View file

@ -85,6 +85,7 @@
//! active crate for a given position, and then provide an API to resolve all
//! syntax nodes against this specific crate.
use base_db::{RootQueryDb, Upcast};
use either::Either;
use hir_def::{
dyn_map::{
@ -99,11 +100,11 @@ use hir_def::{
};
use hir_expand::{
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId,
MacroFileIdExt,
MacroFileId, MacroFileIdExt,
};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{EditionedFileId, FileId, MacroFileId};
use span::{EditionedFileId, FileId};
use stdx::impl_from;
use syntax::{
ast::{self, HasName},
@ -142,7 +143,7 @@ impl SourceToDefCache {
return m;
}
self.included_file_cache.insert(file, None);
for &crate_id in db.relevant_crates(file.into()).iter() {
for &crate_id in Upcast::<dyn RootQueryDb>::upcast(db).relevant_crates(file.into()).iter() {
db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
});
@ -176,7 +177,9 @@ impl SourceToDefCtx<'_, '_> {
let _p = tracing::info_span!("SourceToDefCtx::file_to_def").entered();
self.cache.file_to_def_cache.entry(file).or_insert_with(|| {
let mut mods = SmallVec::new();
for &crate_id in self.db.relevant_crates(file).iter() {
for &crate_id in Upcast::<dyn RootQueryDb>::upcast(self.db).relevant_crates(file).iter()
{
// Note: `mod` declarations in block modules cannot be supported here
let crate_def_map = self.db.crate_def_map(crate_id);
let n_mods = mods.len();
@ -344,7 +347,7 @@ impl SourceToDefCtx<'_, '_> {
})
.position(|it| it == *src.value)?;
let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (_, source_map) = self.db.body_with_source_map(container);
let source_map = self.db.body_with_source_map(container).1;
let expr = source_map.node_expr(src.with_value(&ast::Expr::AsmExpr(asm)))?.as_expr()?;
Some(InlineAsmOperand { owner: container, expr, index })
}
@ -377,7 +380,8 @@ impl SourceToDefCtx<'_, '_> {
src: InFile<&ast::Label>,
) -> Option<(DefWithBodyId, LabelId)> {
let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (_body, source_map) = self.db.body_with_source_map(container);
let source_map = self.db.body_with_source_map(container).1;
let label_id = source_map.node_label(src)?;
Some((container, label_id))
}

View file

@ -1635,8 +1635,7 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
};
let span_map = db.expansion_span_map(macro_file);
let ctx = span_map.span_at(name.value.text_range().start()).ctx;
let ctx = db.lookup_intern_syntax_context(ctx);
HygieneId::new(ctx.opaque_and_semitransparent)
HygieneId::new(ctx.opaque_and_semitransparent(db))
}
fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {

View file

@ -111,7 +111,7 @@ impl<'a> SymbolCollector<'a> {
fn do_work(&mut self, work: SymbolCollectorWork) {
let _p = tracing::info_span!("SymbolCollector::do_work", ?work).entered();
tracing::info!(?work, "SymbolCollector::do_work");
self.db.unwind_if_cancelled();
self.db.unwind_if_revision_cancelled();
let parent_name = work.parent.map(|name| name.as_str().to_smolstr());
self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id));
@ -346,9 +346,9 @@ impl<'a> SymbolCollector<'a> {
}
}
fn push_decl<'db, L>(&mut self, id: L, name: &Name, is_assoc: bool)
fn push_decl<L>(&mut self, id: L, name: &Name, is_assoc: bool)
where
L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>,
L: Lookup<Database = dyn DefDatabase> + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource,
<<L as Lookup>::Data as HasSource>::Value: HasName,
{

View file

@ -1,6 +1,7 @@
//! See [`AssistContext`].
use hir::{FileRange, Semantics};
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::EditionedFileId;
use ide_db::{label::Label, FileId, RootDatabase};
use syntax::Edition;
@ -64,7 +65,10 @@ impl<'a> AssistContext<'a> {
config: &'a AssistConfig,
frange: FileRange,
) -> AssistContext<'a> {
let source_file = sema.parse(frange.file_id);
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), frange.file_id);
let source_file = sema.parse(editioned_file_id);
let start = frange.range.start();
let end = frange.range.end();

View file

@ -2,6 +2,7 @@ use std::iter::{self, Peekable};
use either::Either;
use hir::{sym, Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics};
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::syntax_helpers::suggest_name;
use ide_db::RootDatabase;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
@ -256,7 +257,12 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
// Just replace the element that the original range came from
let old_place = {
// Find the original element
let file = ctx.sema.parse(arm_list_range.file_id);
let editioned_file_id = ide_db::base_db::EditionedFileId::new(
ctx.sema.db.as_dyn_database(),
arm_list_range.file_id,
);
let file = ctx.sema.parse(editioned_file_id);
let old_place = file.syntax().covering_element(arm_list_range.range);
match old_place {

View file

@ -1136,7 +1136,7 @@ fn foo() {
}
//- /main.rs
use foo::Foo;
use foo::{Bool, Foo};
mod foo;

View file

@ -590,7 +590,7 @@ fn handle_call(
let indent =
if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() };
// FIXME: This text manipulation seems risky.
let text = ctx.db().file_text(file_id.file_id());
let text = ctx.db().file_text(file_id.file_id()).text(ctx.db());
let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end();
if !text.ends_with(')') {
return None;

View file

@ -2,6 +2,7 @@ use std::iter;
use either::Either;
use hir::{HasSource, HirFileIdExt, ModuleSource};
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::{
assists::{AssistId, AssistKind},
defs::{Definition, NameClass, NameRefClass},
@ -331,7 +332,10 @@ impl Module {
let mut use_stmts_set = FxHashSet::default();
for (file_id, refs) in node_def.usages(&ctx.sema).all() {
let source_file = ctx.sema.parse(file_id);
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
let source_file = ctx.sema.parse(editioned_file_id);
let usages = refs.into_iter().filter_map(|FileReference { range, .. }| {
// handle normal usages
let name_ref = find_node_at_range::<ast::NameRef>(source_file.syntax(), range)?;
@ -457,7 +461,11 @@ impl Module {
let selection_range = ctx.selection_trimmed();
let file_id = ctx.file_id();
let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all();
let file = ctx.sema.parse(file_id);
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
let file = ctx.sema.parse(editioned_file_id);
// track uses which does not exists in `Use`
let mut uses_exist_in_sel = false;

View file

@ -2,6 +2,7 @@ use hir::{
Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics,
StructKind, Type, TypeInfo,
};
use ide_db::base_db::salsa::AsDynDatabase;
use ide_db::{
defs::{Definition, NameRefClass},
famous_defs::FamousDefs,
@ -205,7 +206,11 @@ fn get_adt_source(
fn_name: &str,
) -> Option<(Option<ast::Impl>, FileId)> {
let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db);
let file = ctx.sema.parse(range.file_id);
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), range.file_id);
let file = ctx.sema.parse(editioned_file_id);
let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()])

View file

@ -1,4 +1,6 @@
use ide_db::{defs::Definition, search::FileReference, EditionedFileId};
use ide_db::{
base_db::salsa::AsDynDatabase, defs::Definition, search::FileReference, EditionedFileId,
};
use syntax::{
algo::{find_node_at_range, least_common_ancestor_element},
ast::{self, HasArgList},
@ -102,7 +104,11 @@ fn process_usages(
arg_to_remove: usize,
is_self_present: bool,
) {
let source_file = ctx.sema.parse(file_id);
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
let source_file = ctx.sema.parse(editioned_file_id_wrapper);
builder.edit_file(file_id);
let possible_ranges = references
.into_iter()
.filter_map(|usage| process_usage(&source_file, usage, arg_to_remove, is_self_present));

View file

@ -3,7 +3,7 @@ mod generated;
use expect_test::expect;
use hir::{FileRange, Semantics};
use ide_db::{
base_db::{SourceDatabase, SourceRootDatabase},
base_db::SourceDatabase,
imports::insert_use::{ImportGranularity, InsertUseConfig},
source_change::FileSystemEdit,
EditionedFileId, RootDatabase, SnippetCap,
@ -222,7 +222,7 @@ pub(crate) fn check_assist_unresolved(
fn check_doc_test(assist_id: &str, before: &str, after: &str) {
let after = trim_indent(after);
let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
let before = db.file_text(file_id.file_id()).to_string();
let before = db.file_text(file_id.file_id()).text(&db).to_string();
let frange = FileRange { file_id, range: selection.into() };
let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange.into())
@ -281,7 +281,7 @@ fn check_with_config(
) {
let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
db.enable_proc_attr_macros();
let text_without_caret = db.file_text(file_with_caret_id.into()).to_string();
let text_without_caret = db.file_text(file_with_caret_id.into()).text(&db).to_string();
let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
@ -311,14 +311,14 @@ fn check_with_config(
let mut buf = String::new();
for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
let mut text = db.file_text(file_id).as_ref().to_owned();
let mut text = db.file_text(file_id).text(&db).as_ref().to_owned();
edit.apply(&mut text);
if let Some(snippet_edit) = snippet_edit {
snippet_edit.apply(&mut text);
}
if !skip_header {
let sr = db.file_source_root(file_id);
let sr = db.source_root(sr);
let source_root_id = db.file_source_root(file_id).source_root_id(&db);
let sr = db.source_root(source_root_id).source_root(&db);
let path = sr.path_for_file(&file_id).unwrap();
format_to!(buf, "//- {}\n", path)
}
@ -329,15 +329,16 @@ fn check_with_config(
let (dst, contents) = match file_system_edit {
FileSystemEdit::CreateFile { dst, initial_contents } => (dst, initial_contents),
FileSystemEdit::MoveFile { src, dst } => {
(dst, db.file_text(src).as_ref().to_owned())
(dst, db.file_text(src).text(&db).as_ref().to_owned())
}
FileSystemEdit::MoveDir { src, src_id, dst } => {
// temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet.
(dst, format!("{src_id:?}\n{src:?}"))
}
};
let sr = db.file_source_root(dst.anchor);
let sr = db.source_root(sr);
let source_root_id = db.file_source_root(dst.anchor).source_root_id(&db);
let sr = db.source_root(source_root_id).source_root(&db);
let mut base = sr.path_for_file(&dst.anchor).unwrap().clone();
base.pop();
let created_file_path = base.join(&dst.path).unwrap();

View file

@ -4,7 +4,7 @@ use std::iter;
use hir::{HirFileIdExt, Module};
use ide_db::{
base_db::{SourceRootDatabase, VfsPath},
base_db::{SourceDatabase, VfsPath},
FxHashSet, RootDatabase, SymbolKind,
};
use syntax::{ast, AstNode, SyntaxKind};
@ -43,7 +43,10 @@ pub(crate) fn complete_mod(
let module_definition_file =
current_module.definition_source_file_id(ctx.db).original_file(ctx.db);
let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file.file_id()));
let source_root_id =
ctx.db.file_source_root(module_definition_file.file_id()).source_root_id(ctx.db);
let source_root = ctx.db.source_root(source_root_id).source_root(ctx.db);
let directory_to_look_for_submodules = directory_to_look_for_submodules(
current_module,
ctx.db,

View file

@ -277,7 +277,7 @@ fn get_receiver_text(
range.range = TextRange::at(range.range.start(), range.range.len() - TextSize::of('.'))
}
let file_text = sema.db.file_text(range.file_id.file_id());
let mut text = file_text[range.range].to_owned();
let mut text = file_text.text(sema.db)[range.range].to_owned();
// The receiver texts should be interpreted as-is, as they are expected to be
// normal Rust expressions.

View file

@ -6,13 +6,14 @@ mod tests;
use std::{iter, ops::ControlFlow};
use base_db::{salsa::AsDynDatabase, RootQueryDb as _};
use hir::{
DisplayTarget, HasAttrs, Local, ModPath, ModuleDef, ModuleSource, Name, PathResolution,
ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
};
use ide_db::{
base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition,
FxHashMap, FxHashSet, RootDatabase,
famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition, FxHashMap, FxHashSet,
RootDatabase,
};
use syntax::{
ast::{self, AttrKind, NameOrNameRef},
@ -706,15 +707,19 @@ impl<'a> CompletionContext<'a> {
let _p = tracing::info_span!("CompletionContext::new").entered();
let sema = Semantics::new(db);
let file_id = sema.attach_first_edition(file_id)?;
let original_file = sema.parse(file_id);
let editioned_file_id = sema.attach_first_edition(file_id)?;
let editioned_file_id_wrapper =
ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), editioned_file_id);
let original_file = sema.parse(editioned_file_id_wrapper);
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file_with_fake_ident = {
let parse = db.parse(file_id);
parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, file_id.edition()).tree()
let (_, edition) = editioned_file_id.unpack();
let parse = db.parse(editioned_file_id_wrapper);
parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, edition).tree()
};
// always pick the token to the immediate left of the cursor, as that is what we are actually

View file

@ -11,6 +11,7 @@ mod snippet;
mod tests;
use ide_db::{
base_db::salsa::AsDynDatabase,
imports::insert_use::{self, ImportScope},
syntax_helpers::tree_diff::diff,
text_edit::TextEdit,
@ -275,7 +276,11 @@ pub fn resolve_completion_edits(
let _p = tracing::info_span!("resolve_completion_edits").entered();
let sema = hir::Semantics::new(db);
let original_file = sema.parse(sema.attach_first_edition(file_id)?);
let editioned_file_id = sema.attach_first_edition(file_id)?;
let editioned_file_id =
ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
let original_file = sema.parse(editioned_file_id);
let original_token =
syntax::AstNode::syntax(&original_file).token_at_offset(offset).left_biased()?;
let position_for_import = &original_token.parent()?;

View file

@ -2042,8 +2042,8 @@ fn f() { A { bar: b$0 }; }
expect![[r#"
fn bar() fn() -> u8 [type+name]
fn baz() fn() -> u8 [type]
ex bar() [type]
ex baz() [type]
ex bar() [type]
st A A []
fn f() fn() []
"#]],

View file

@ -246,7 +246,7 @@ pub(crate) fn check_edit_with_config(
.filter(|it| it.lookup() == what)
.collect_tuple()
.unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}"));
let mut actual = db.file_text(position.file_id).to_string();
let mut actual = db.file_text(position.file_id).text(&db).to_string();
let mut combined_edit = completion.text_edit.clone();

View file

@ -7,7 +7,7 @@ use crate::tests::{completion_list_with_config_raw, position, TEST_CONFIG};
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let completions = completion_list_with_config_raw(TEST_CONFIG, ra_fixture, true, None);
let (db, position) = position(ra_fixture);
let mut actual = db.file_text(position.file_id).to_string();
let mut actual = db.file_text(position.file_id).text(&db).to_string();
completions
.into_iter()
.exactly_one()

View file

@ -15,6 +15,7 @@ rust-version.workspace = true
cov-mark = "2.0.0-pre.1"
crossbeam-channel.workspace = true
tracing.workspace = true
dashmap.workspace = true
rayon.workspace = true
fst = { version = "0.4.7", default-features = false }
rustc-hash.workspace = true
@ -23,6 +24,8 @@ itertools.workspace = true
arrayvec.workspace = true
indexmap.workspace = true
memchr = "2.6.4"
salsa.workspace = true
query-group.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
bitflags.workspace = true
@ -34,6 +37,7 @@ profile.workspace = true
stdx.workspace = true
syntax.workspace = true
span.workspace = true
vfs.workspace = true
# ide should depend only on the top-level `hir` package. if you need
# something from some `hir-xxx` subpackage, reexport the API via `hir`.
hir.workspace = true

View file

@ -1,14 +1,9 @@
//! Applies changes to the IDE state transactionally.
use base_db::{
ra_salsa::{
debug::{DebugQueryTable, TableEntry},
Database, Durability, Query, QueryTable,
},
SourceRootId,
};
use profile::{memory_usage, Bytes};
use base_db::SourceRootId;
use profile::Bytes;
use rustc_hash::FxHashSet;
use salsa::{Database as _, Durability};
use triomphe::Arc;
use crate::{symbol_index::SymbolsDatabase, ChangeWithProcMacros, RootDatabase};
@ -52,23 +47,23 @@ impl RootDatabase {
pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes, usize)> {
let mut acc: Vec<(String, Bytes, usize)> = vec![];
fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize
where
QueryTable<'q, Q>: DebugQueryTable,
Q: Query,
<Q as Query>::Storage: 'q,
{
struct EntryCounter(usize);
impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
fn from_iter<T>(iter: T) -> EntryCounter
where
T: IntoIterator<Item = TableEntry<K, V>>,
{
EntryCounter(iter.into_iter().count())
}
}
table.entries::<EntryCounter>().0
}
// fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize
// where
// QueryTable<'q, Q>: DebugQueryTable,
// Q: Query,
// <Q as Query>::Storage: 'q,
// {
// struct EntryCounter(usize);
// impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
// fn from_iter<T>(iter: T) -> EntryCounter
// where
// T: IntoIterator<Item = TableEntry<K, V>>,
// {
// EntryCounter(iter.into_iter().count())
// }
// }
// table.entries::<EntryCounter>().0
// }
macro_rules! purge_each_query {
($($q:path)*) => {$(
@ -83,170 +78,170 @@ impl RootDatabase {
)*}
}
purge_each_query![
// SymbolsDatabase
crate::symbol_index::ModuleSymbolsQuery
crate::symbol_index::LibrarySymbolsQuery
crate::symbol_index::LocalRootsQuery
crate::symbol_index::LibraryRootsQuery
// HirDatabase
hir::db::AdtDatumQuery
hir::db::AdtVarianceQuery
hir::db::AssociatedTyDataQuery
hir::db::AssociatedTyValueQuery
hir::db::BorrowckQuery
hir::db::CallableItemSignatureQuery
hir::db::ConstEvalDiscriminantQuery
hir::db::ConstEvalQuery
hir::db::ConstEvalStaticQuery
hir::db::ConstParamTyQuery
hir::db::DynCompatibilityOfTraitQuery
hir::db::FieldTypesQuery
hir::db::FnDefDatumQuery
hir::db::FnDefVarianceQuery
hir::db::GenericDefaultsQuery
hir::db::GenericPredicatesForParamQuery
hir::db::GenericPredicatesQuery
hir::db::GenericPredicatesWithoutParentQuery
hir::db::ImplDatumQuery
hir::db::ImplSelfTyQuery
hir::db::ImplTraitQuery
hir::db::IncoherentInherentImplCratesQuery
hir::db::InferQuery
hir::db::InherentImplsInBlockQuery
hir::db::InherentImplsInCrateQuery
hir::db::InternCallableDefQuery
hir::db::InternClosureQuery
hir::db::InternCoroutineQuery
hir::db::InternImplTraitIdQuery
hir::db::InternLifetimeParamIdQuery
hir::db::InternTypeOrConstParamIdQuery
hir::db::LayoutOfAdtQuery
hir::db::LayoutOfTyQuery
hir::db::LookupImplMethodQuery
hir::db::MirBodyForClosureQuery
hir::db::MirBodyQuery
hir::db::MonomorphizedMirBodyForClosureQuery
hir::db::MonomorphizedMirBodyQuery
hir::db::ProgramClausesForChalkEnvQuery
hir::db::ReturnTypeImplTraitsQuery
hir::db::TargetDataLayoutQuery
hir::db::TraitDatumQuery
hir::db::TraitEnvironmentQuery
hir::db::TraitImplsInBlockQuery
hir::db::TraitImplsInCrateQuery
hir::db::TraitImplsInDepsQuery
hir::db::TraitSolveQuery
hir::db::TyQuery
hir::db::TypeAliasImplTraitsQuery
hir::db::ValueTyQuery
// // SymbolsDatabase
// crate::symbol_index::ModuleSymbolsQuery
// crate::symbol_index::LibrarySymbolsQuery
// crate::symbol_index::LocalRootsQuery
// crate::symbol_index::LibraryRootsQuery
// // HirDatabase
// hir::db::AdtDatumQuery
// hir::db::AdtVarianceQuery
// hir::db::AssociatedTyDataQuery
// hir::db::AssociatedTyValueQuery
// hir::db::BorrowckQuery
// hir::db::CallableItemSignatureQuery
// hir::db::ConstEvalDiscriminantQuery
// hir::db::ConstEvalQuery
// hir::db::ConstEvalStaticQuery
// hir::db::ConstParamTyQuery
// hir::db::DynCompatibilityOfTraitQuery
// hir::db::FieldTypesQuery
// hir::db::FnDefDatumQuery
// hir::db::FnDefVarianceQuery
// hir::db::GenericDefaultsQuery
// hir::db::GenericPredicatesForParamQuery
// hir::db::GenericPredicatesQuery
// hir::db::GenericPredicatesWithoutParentQuery
// hir::db::ImplDatumQuery
// hir::db::ImplSelfTyQuery
// hir::db::ImplTraitQuery
// hir::db::IncoherentInherentImplCratesQuery
// hir::db::InferQuery
// hir::db::InherentImplsInBlockQuery
// hir::db::InherentImplsInCrateQuery
// hir::db::InternCallableDefQuery
// hir::db::InternClosureQuery
// hir::db::InternCoroutineQuery
// hir::db::InternImplTraitIdQuery
// hir::db::InternLifetimeParamIdQuery
// hir::db::InternTypeOrConstParamIdQuery
// hir::db::LayoutOfAdtQuery
// hir::db::LayoutOfTyQuery
// hir::db::LookupImplMethodQuery
// hir::db::MirBodyForClosureQuery
// hir::db::MirBodyQuery
// hir::db::MonomorphizedMirBodyForClosureQuery
// hir::db::MonomorphizedMirBodyQuery
// hir::db::ProgramClausesForChalkEnvQuery
// hir::db::ReturnTypeImplTraitsQuery
// hir::db::TargetDataLayoutQuery
// hir::db::TraitDatumQuery
// hir::db::TraitEnvironmentQuery
// hir::db::TraitImplsInBlockQuery
// hir::db::TraitImplsInCrateQuery
// hir::db::TraitImplsInDepsQuery
// hir::db::TraitSolveQuery
// hir::db::TyQuery
// hir::db::TypeAliasImplTraitsQuery
// hir::db::ValueTyQuery
// DefDatabase
hir::db::AttrsQuery
hir::db::BlockDefMapQuery
hir::db::BlockItemTreeQuery
hir::db::BlockItemTreeWithSourceMapQuery
hir::db::BodyQuery
hir::db::BodyWithSourceMapQuery
hir::db::ConstDataQuery
hir::db::ConstVisibilityQuery
hir::db::CrateDefMapQuery
hir::db::CrateLangItemsQuery
hir::db::CrateNotableTraitsQuery
hir::db::CrateSupportsNoStdQuery
hir::db::EnumDataQuery
hir::db::EnumVariantDataWithDiagnosticsQuery
hir::db::ExpandProcAttrMacrosQuery
hir::db::ExprScopesQuery
hir::db::ExternCrateDeclDataQuery
hir::db::FieldVisibilitiesQuery
hir::db::FieldsAttrsQuery
hir::db::FieldsAttrsSourceMapQuery
hir::db::FileItemTreeQuery
hir::db::FileItemTreeWithSourceMapQuery
hir::db::FunctionDataQuery
hir::db::FunctionVisibilityQuery
hir::db::GenericParamsQuery
hir::db::GenericParamsWithSourceMapQuery
hir::db::ImplDataWithDiagnosticsQuery
hir::db::ImportMapQuery
hir::db::IncludeMacroInvocQuery
hir::db::InternAnonymousConstQuery
hir::db::InternBlockQuery
hir::db::InternConstQuery
hir::db::InternEnumQuery
hir::db::InternExternBlockQuery
hir::db::InternExternCrateQuery
hir::db::InternFunctionQuery
hir::db::InternImplQuery
hir::db::InternInTypeConstQuery
hir::db::InternMacro2Query
hir::db::InternMacroRulesQuery
hir::db::InternProcMacroQuery
hir::db::InternStaticQuery
hir::db::InternStructQuery
hir::db::InternTraitAliasQuery
hir::db::InternTraitQuery
hir::db::InternTypeAliasQuery
hir::db::InternUnionQuery
hir::db::InternUseQuery
hir::db::LangItemQuery
hir::db::Macro2DataQuery
hir::db::MacroDefQuery
hir::db::MacroRulesDataQuery
hir::db::NotableTraitsInDepsQuery
hir::db::ProcMacroDataQuery
hir::db::StaticDataQuery
hir::db::StructDataWithDiagnosticsQuery
hir::db::TraitAliasDataQuery
hir::db::TraitDataWithDiagnosticsQuery
hir::db::TypeAliasDataQuery
hir::db::UnionDataWithDiagnosticsQuery
// // DefDatabase
// hir::db::AttrsQuery
// hir::db::BlockDefMapQuery
// hir::db::BlockItemTreeQuery
// hir::db::BlockItemTreeWithSourceMapQuery
// hir::db::BodyQuery
// hir::db::BodyWithSourceMapQuery
// hir::db::ConstDataQuery
// hir::db::ConstVisibilityQuery
// hir::db::CrateDefMapQuery
// hir::db::CrateLangItemsQuery
// hir::db::CrateNotableTraitsQuery
// hir::db::CrateSupportsNoStdQuery
// hir::db::EnumDataQuery
// hir::db::EnumVariantDataWithDiagnosticsQuery
// hir::db::ExpandProcAttrMacrosQuery
// hir::db::ExprScopesQuery
// hir::db::ExternCrateDeclDataQuery
// hir::db::FieldVisibilitiesQuery
// hir::db::FieldsAttrsQuery
// hir::db::FieldsAttrsSourceMapQuery
// hir::db::FileItemTreeQuery
// hir::db::FileItemTreeWithSourceMapQuery
// hir::db::FunctionDataQuery
// hir::db::FunctionVisibilityQuery
// hir::db::GenericParamsQuery
// hir::db::GenericParamsWithSourceMapQuery
// hir::db::ImplDataWithDiagnosticsQuery
// hir::db::ImportMapQuery
// hir::db::IncludeMacroInvocQuery
// hir::db::InternAnonymousConstQuery
// hir::db::InternBlockQuery
// hir::db::InternConstQuery
// hir::db::InternEnumQuery
// hir::db::InternExternBlockQuery
// hir::db::InternExternCrateQuery
// hir::db::InternFunctionQuery
// hir::db::InternImplQuery
// hir::db::InternInTypeConstQuery
// hir::db::InternMacro2Query
// hir::db::InternMacroRulesQuery
// hir::db::InternProcMacroQuery
// hir::db::InternStaticQuery
// hir::db::InternStructQuery
// hir::db::InternTraitAliasQuery
// hir::db::InternTraitQuery
// hir::db::InternTypeAliasQuery
// hir::db::InternUnionQuery
// hir::db::InternUseQuery
// hir::db::LangItemQuery
// hir::db::Macro2DataQuery
// hir::db::MacroDefQuery
// hir::db::MacroRulesDataQuery
// hir::db::NotableTraitsInDepsQuery
// hir::db::ProcMacroDataQuery
// hir::db::StaticDataQuery
// hir::db::StructDataWithDiagnosticsQuery
// hir::db::TraitAliasDataQuery
// hir::db::TraitDataWithDiagnosticsQuery
// hir::db::TypeAliasDataQuery
// hir::db::UnionDataWithDiagnosticsQuery
// InternDatabase
hir::db::InternFunctionQuery
hir::db::InternStructQuery
hir::db::InternUnionQuery
hir::db::InternEnumQuery
hir::db::InternConstQuery
hir::db::InternStaticQuery
hir::db::InternTraitQuery
hir::db::InternTraitAliasQuery
hir::db::InternTypeAliasQuery
hir::db::InternImplQuery
hir::db::InternExternBlockQuery
hir::db::InternBlockQuery
hir::db::InternMacro2Query
hir::db::InternProcMacroQuery
hir::db::InternMacroRulesQuery
// // InternDatabase
// hir::db::InternFunctionQuery
// hir::db::InternStructQuery
// hir::db::InternUnionQuery
// hir::db::InternEnumQuery
// hir::db::InternConstQuery
// hir::db::InternStaticQuery
// hir::db::InternTraitQuery
// hir::db::InternTraitAliasQuery
// hir::db::InternTypeAliasQuery
// hir::db::InternImplQuery
// hir::db::InternExternBlockQuery
// hir::db::InternBlockQuery
// hir::db::InternMacro2Query
// hir::db::InternProcMacroQuery
// hir::db::InternMacroRulesQuery
// ExpandDatabase
hir::db::AstIdMapQuery
hir::db::DeclMacroExpanderQuery
hir::db::ExpandProcMacroQuery
hir::db::InternMacroCallQuery
hir::db::InternSyntaxContextQuery
hir::db::MacroArgQuery
hir::db::ParseMacroExpansionErrorQuery
hir::db::ParseMacroExpansionQuery
hir::db::ProcMacroSpanQuery
hir::db::ProcMacrosQuery
hir::db::RealSpanMapQuery
// // ExpandDatabase
// hir::db::AstIdMapQuery
// hir::db::DeclMacroExpanderQuery
// hir::db::ExpandProcMacroQuery
// hir::db::InternMacroCallQuery
// hir::db::InternSyntaxContextQuery
// hir::db::MacroArgQuery
// hir::db::ParseMacroExpansionErrorQuery
// hir::db::ParseMacroExpansionQuery
// hir::db::ProcMacroSpanQuery
// hir::db::ProcMacrosQuery
// hir::db::RealSpanMapQuery
// LineIndexDatabase
crate::LineIndexQuery
// // LineIndexDatabase
// crate::LineIndexQuery
// SourceDatabase
base_db::ParseQuery
base_db::ParseErrorsQuery
base_db::CrateGraphQuery
base_db::CrateWorkspaceDataQuery
// // SourceDatabase
// base_db::ParseQuery
// base_db::ParseErrorsQuery
// base_db::CrateGraphQuery
// base_db::CrateWorkspaceDataQuery
// SourceDatabaseExt
base_db::FileTextQuery
base_db::CompressedFileTextQuery
base_db::FileSourceRootQuery
base_db::SourceRootQuery
base_db::SourceRootCratesQuery
// // SourceDatabaseExt
// base_db::FileTextQuery
// base_db::CompressedFileTextQuery
// base_db::FileSourceRootQuery
// base_db::SourceRootQuery
// base_db::SourceRootCratesQuery
];
acc.sort_by_key(|it| std::cmp::Reverse(it.1));

View file

@ -1,6 +1,6 @@
//! See [`FamousDefs`].
use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase};
use base_db::{CrateOrigin, LangCrateOrigin, RootQueryDb as _};
use hir::{Crate, Enum, Function, Macro, Module, ScopeDef, Semantics, Trait};
use crate::RootDatabase;

View file

@ -2,7 +2,7 @@
use std::collections::VecDeque;
use base_db::SourceRootDatabase;
use base_db::SourceDatabase;
use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics};
use span::{Edition, FileId};
use syntax::{
@ -108,8 +108,8 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool {
let root_file = krate.root_file(db);
let source_root_id = db.file_source_root(root_file);
!db.source_root(source_root_id).is_library
let source_root_id = db.file_source_root(root_file).source_root_id(db);
!db.source_root(source_root_id).source_root(db).is_library
}
// FIXME: This is a weird function

View file

@ -1,3 +1,4 @@
use salsa::AsDynDatabase;
use stdx::trim_indent;
use test_fixture::WithFixture;
use test_utils::{assert_eq_text, CURSOR_MARKER};
@ -1250,9 +1251,15 @@ fn check_with_config(
) {
let (db, file_id, pos) = if ra_fixture_before.contains(CURSOR_MARKER) {
let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture_before);
let file_id = crate::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
(db, file_id, Some(range_or_offset))
} else {
let (db, file_id) = RootDatabase::with_single_file(ra_fixture_before);
let file_id = crate::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
(db, file_id, None)
};
let sema = &Semantics::new(&db);

View file

@ -46,13 +46,14 @@ pub mod syntax_helpers {
}
pub use hir::ChangeWithProcMacros;
use salsa::Durability;
use std::{fmt, mem::ManuallyDrop};
use base_db::{
ra_salsa::{self, Durability},
AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
DEFAULT_FILE_TEXT_LRU_CAP,
query_group::{self},
FileSourceRootInput, FileText, Files, RootQueryDb, SourceDatabase, SourceRoot, SourceRootId,
SourceRootInput, Upcast,
};
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
@ -76,22 +77,21 @@ pub type FxIndexMap<K, V> =
pub type FilePosition = FilePositionWrapper<FileId>;
pub type FileRange = FileRangeWrapper<FileId>;
#[ra_salsa::database(
base_db::SourceRootDatabaseStorage,
base_db::SourceDatabaseStorage,
hir::db::ExpandDatabaseStorage,
hir::db::DefDatabaseStorage,
hir::db::HirDatabaseStorage,
hir::db::InternDatabaseStorage,
LineIndexDatabaseStorage,
symbol_index::SymbolsDatabaseStorage
)]
#[salsa::db]
pub struct RootDatabase {
// We use `ManuallyDrop` here because every codegen unit that contains a
// `&RootDatabase -> &dyn OtherDatabase` cast will instantiate its drop glue in the vtable,
// which duplicates `Weak::drop` and `Arc::drop` tens of thousands of times, which makes
// compile times of all `ide_*` and downstream crates suffer greatly.
storage: ManuallyDrop<ra_salsa::Storage<RootDatabase>>,
storage: ManuallyDrop<salsa::Storage<Self>>,
files: Arc<Files>,
}
impl std::panic::RefUnwindSafe for RootDatabase {}
#[salsa::db]
impl salsa::Database for RootDatabase {
fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {}
}
impl Drop for RootDatabase {
@ -100,6 +100,12 @@ impl Drop for RootDatabase {
}
}
impl Clone for RootDatabase {
fn clone(&self) -> Self {
Self { storage: self.storage.clone(), files: self.files.clone() }
}
}
impl fmt::Debug for RootDatabase {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RootDatabase").finish()
@ -127,16 +133,68 @@ impl Upcast<dyn HirDatabase> for RootDatabase {
}
}
impl FileLoader for RootDatabase {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
FileLoaderDelegate(self).relevant_crates(file_id)
impl Upcast<dyn RootQueryDb> for RootDatabase {
fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
self
}
}
impl ra_salsa::Database for RootDatabase {}
impl Upcast<dyn SourceDatabase> for RootDatabase {
fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
self
}
}
#[salsa::db]
impl SourceDatabase for RootDatabase {
fn file_text(&self, file_id: vfs::FileId) -> FileText {
self.files.file_text(file_id)
}
fn set_file_text(&mut self, file_id: vfs::FileId, text: &str) {
let files = Arc::clone(&self.files);
files.set_file_text(self, file_id, text);
}
fn set_file_text_with_durability(
&mut self,
file_id: vfs::FileId,
text: &str,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_file_text_with_durability(self, file_id, text, durability);
}
/// Source root of the file.
fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
self.files.source_root(source_root_id)
}
fn set_source_root_with_durability(
&mut self,
source_root_id: SourceRootId,
source_root: Arc<SourceRoot>,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_source_root_with_durability(self, source_root_id, source_root, durability);
}
fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
self.files.file_source_root(id)
}
fn set_file_source_root_with_durability(
&mut self,
id: vfs::FileId,
source_root_id: SourceRootId,
durability: Durability,
) {
let files = Arc::clone(&self.files);
files.set_file_source_root_with_durability(self, id, source_root_id, durability);
}
}
impl Default for RootDatabase {
fn default() -> RootDatabase {
@ -146,14 +204,16 @@ impl Default for RootDatabase {
impl RootDatabase {
pub fn new(lru_capacity: Option<u16>) -> RootDatabase {
let mut db = RootDatabase { storage: ManuallyDrop::new(ra_salsa::Storage::default()) };
let mut db = RootDatabase {
storage: ManuallyDrop::new(salsa::Storage::default()),
files: Default::default(),
};
db.set_crate_graph_with_durability(Default::default(), Durability::HIGH);
db.set_proc_macros_with_durability(Default::default(), Durability::HIGH);
db.set_local_roots_with_durability(Default::default(), Durability::HIGH);
db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
db.update_base_query_lru_capacities(lru_capacity);
db.setup_syntax_context_root();
db
}
@ -161,57 +221,54 @@ impl RootDatabase {
self.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
}
pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<u16>) {
let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP);
base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
// macro expansions are usually rather small, so we can afford to keep more of them alive
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP);
hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
pub fn update_base_query_lru_capacities(&mut self, _lru_capacity: Option<u16>) {
// let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP);
// base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
// base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
// // macro expansions are usually rather small, so we can afford to keep more of them alive
// hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
// hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP);
// hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
}
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, u16>) {
use hir::db as hir_db;
pub fn update_lru_capacities(&mut self, _lru_capacities: &FxHashMap<Box<str>, u16>) {
// FIXME(salsa-transition): bring this back; allow changing LRU settings at runtime.
// use hir::db as hir_db;
base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(
lru_capacities
.get(stringify!(ParseQuery))
.copied()
.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP),
);
hir_db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(
lru_capacities
.get(stringify!(ParseMacroExpansionQuery))
.copied()
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
);
hir_db::BorrowckQuery.in_db_mut(self).set_lru_capacity(
lru_capacities
.get(stringify!(BorrowckQuery))
.copied()
.unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP),
);
hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
// base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
// base_db::ParseQuery.in_db_mut(self).set_lru_capacity(
// lru_capacities
// .get(stringify!(ParseQuery))
// .copied()
// .unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP),
// );
// hir_db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(
// lru_capacities
// .get(stringify!(ParseMacroExpansionQuery))
// .copied()
// .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
// );
// hir_db::BorrowckQuery.in_db_mut(self).set_lru_capacity(
// lru_capacities
// .get(stringify!(BorrowckQuery))
// .copied()
// .unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP),
// );
// hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
}
pub fn snapshot(&self) -> Self {
Self { storage: self.storage.clone(), files: self.files.clone() }
}
}
impl ra_salsa::ParallelDatabase for RootDatabase {
fn snapshot(&self) -> ra_salsa::Snapshot<RootDatabase> {
ra_salsa::Snapshot::new(RootDatabase {
storage: ManuallyDrop::new(self.storage.snapshot()),
})
}
}
#[ra_salsa::query_group(LineIndexDatabaseStorage)]
pub trait LineIndexDatabase: base_db::SourceDatabase {
#[query_group::query_group]
pub trait LineIndexDatabase: base_db::RootQueryDb {
fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
}
fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);
let text = db.file_text(file_id).text(db);
Arc::new(LineIndex::new(&text))
}

View file

@ -8,12 +8,10 @@ use std::time::Duration;
use hir::{db::DefDatabase, Symbol};
use itertools::Itertools;
use salsa::{Cancelled, Database};
use crate::{
base_db::{
ra_salsa::{Database, ParallelDatabase, Snapshot},
Cancelled, CrateId, SourceDatabase,
},
base_db::{CrateId, RootQueryDb},
symbol_index::SymbolsDatabase,
FxIndexMap, RootDatabase,
};
@ -66,7 +64,7 @@ pub fn parallel_prime_caches(
let (work_sender, progress_receiver) = {
let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
let (work_sender, work_receiver) = crossbeam_channel::unbounded();
let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
let prime_caches_worker = move |db: RootDatabase| {
while let Ok((crate_id, crate_name, kind)) = work_receiver.recv() {
progress_sender
.send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
@ -90,7 +88,7 @@ pub fn parallel_prime_caches(
stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
.allow_leak(true)
.name(format!("PrimeCaches#{id}"))
.spawn(move || Cancelled::catch(|| worker(db)))
.spawn(move || Cancelled::catch(|| worker(db.snapshot())))
.expect("failed to spawn thread");
}
@ -108,7 +106,7 @@ pub fn parallel_prime_caches(
let mut additional_phases = vec![];
while crates_done < crates_total {
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
for crate_id in &mut crates_to_prime {
let krate = &graph[crate_id];
@ -145,7 +143,7 @@ pub fn parallel_prime_caches(
}
Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
// our workers may have died from a cancelled task, so we'll check and re-raise here.
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
break;
}
};
@ -177,7 +175,7 @@ pub fn parallel_prime_caches(
}
while crates_done < crates_total {
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
// recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
// is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
@ -189,7 +187,7 @@ pub fn parallel_prime_caches(
}
Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
// our workers may have died from a cancelled task, so we'll check and re-raise here.
db.unwind_if_cancelled();
db.unwind_if_revision_cancelled();
break;
}
};

Some files were not shown because too many files have changed in this diff Show more