make analysis sync

This commit is contained in:
Aleksey Kladov 2018-10-15 22:29:24 +03:00
parent 76c51fae77
commit bbcd998433
4 changed files with 37 additions and 19 deletions

View file

@ -30,6 +30,20 @@ impl salsa::Database for RootDatabase {
} }
} }
impl salsa::ParallelDatabase for RootDatabase {
fn fork(&self) -> Self {
RootDatabase {
runtime: self.runtime.fork(),
}
}
}
impl Clone for RootDatabase {
fn clone(&self) -> RootDatabase {
salsa::ParallelDatabase::fork(self)
}
}
salsa::database_storage! { salsa::database_storage! {
pub(crate) struct RootDatabaseStorage for RootDatabase { pub(crate) struct RootDatabaseStorage for RootDatabase {
impl FilesDatabase { impl FilesDatabase {

View file

@ -78,13 +78,13 @@ impl Default for FileResolverImp {
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct AnalysisHostImpl { pub(crate) struct AnalysisHostImpl {
data: Arc<WorldData> data: WorldData
} }
impl AnalysisHostImpl { impl AnalysisHostImpl {
pub fn new() -> AnalysisHostImpl { pub fn new() -> AnalysisHostImpl {
AnalysisHostImpl { AnalysisHostImpl {
data: Arc::new(WorldData::default()), data: WorldData::default(),
} }
} }
pub fn analysis(&self) -> AnalysisImpl { pub fn analysis(&self) -> AnalysisImpl {
@ -114,18 +114,18 @@ impl AnalysisHostImpl {
self.data_mut().libs.push(Arc::new(root)); self.data_mut().libs.push(Arc::new(root));
} }
fn data_mut(&mut self) -> &mut WorldData { fn data_mut(&mut self) -> &mut WorldData {
Arc::make_mut(&mut self.data) &mut self.data
} }
} }
pub(crate) struct AnalysisImpl { pub(crate) struct AnalysisImpl {
needs_reindex: AtomicBool, needs_reindex: AtomicBool,
data: Arc<WorldData>, data: WorldData,
} }
impl fmt::Debug for AnalysisImpl { impl fmt::Debug for AnalysisImpl {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(&*self.data).fmt(f) self.data.fmt(f)
} }
} }
@ -133,7 +133,7 @@ impl Clone for AnalysisImpl {
fn clone(&self) -> AnalysisImpl { fn clone(&self) -> AnalysisImpl {
AnalysisImpl { AnalysisImpl {
needs_reindex: AtomicBool::new(self.needs_reindex.load(SeqCst)), needs_reindex: AtomicBool::new(self.needs_reindex.load(SeqCst)),
data: Arc::clone(&self.data), data: self.data.clone(),
} }
} }
} }

View file

@ -258,3 +258,9 @@ impl LibraryData {
LibraryData { root } LibraryData { root }
} }
} }
#[test]
fn analysis_is_send() {
fn is_send<T: Send>() {}
is_send::<Analysis>();
}

View file

@ -2,7 +2,6 @@ use std::{
sync::Arc, sync::Arc,
panic, panic,
}; };
use parking_lot::RwLock;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use rayon::prelude::*; use rayon::prelude::*;
@ -30,7 +29,7 @@ pub(crate) trait SourceRoot {
#[derive(Default, Debug, Clone)] #[derive(Default, Debug, Clone)]
pub(crate) struct WritableSourceRoot { pub(crate) struct WritableSourceRoot {
db: Arc<RwLock<db::RootDatabase>>, db: db::RootDatabase,
} }
impl WritableSourceRoot { impl WritableSourceRoot {
@ -39,7 +38,6 @@ impl WritableSourceRoot {
changes: &mut dyn Iterator<Item=(FileId, Option<String>)>, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>,
file_resolver: Option<FileResolverImp>, file_resolver: Option<FileResolverImp>,
) { ) {
let db = self.db.write();
let mut changed = FxHashSet::default(); let mut changed = FxHashSet::default();
let mut removed = FxHashSet::default(); let mut removed = FxHashSet::default();
for (file_id, text) in changes { for (file_id, text) in changes {
@ -48,13 +46,13 @@ impl WritableSourceRoot {
removed.insert(file_id); removed.insert(file_id);
} }
Some(text) => { Some(text) => {
db.query(db::FileTextQuery) self.db.query(db::FileTextQuery)
.set(file_id, Arc::new(text)); .set(file_id, Arc::new(text));
changed.insert(file_id); changed.insert(file_id);
} }
} }
} }
let file_set = db.file_set(()); let file_set = self.db.file_set(());
let mut files: FxHashSet<FileId> = file_set let mut files: FxHashSet<FileId> = file_set
.files .files
.clone(); .clone();
@ -63,28 +61,28 @@ impl WritableSourceRoot {
} }
files.extend(changed); files.extend(changed);
let resolver = file_resolver.unwrap_or_else(|| file_set.resolver.clone()); let resolver = file_resolver.unwrap_or_else(|| file_set.resolver.clone());
db.query(db::FileSetQuery) self.db.query(db::FileSetQuery)
.set((), Arc::new(db::FileSet { files, resolver })); .set((), Arc::new(db::FileSet { files, resolver }));
} }
} }
impl SourceRoot for WritableSourceRoot { impl SourceRoot for WritableSourceRoot {
fn module_tree(&self) -> Arc<ModuleTreeDescriptor> { fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
self.db.read().module_tree(()) self.db.module_tree(())
} }
fn contains(&self, file_id: FileId) -> bool { fn contains(&self, file_id: FileId) -> bool {
let db = self.db.read(); self.db.file_set(())
let files = &db.file_set(()).files; .files
files.contains(&file_id) .contains(&file_id)
} }
fn lines(&self, file_id: FileId) -> Arc<LineIndex> { fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
self.db.read().file_lines(file_id) self.db.file_lines(file_id)
} }
fn syntax(&self, file_id: FileId) -> File { fn syntax(&self, file_id: FileId) -> File {
self.db.read().file_syntax(file_id) self.db.file_syntax(file_id)
} }
fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) { fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) {
let db = self.db.read(); let db = &self.db;
let symbols = db.file_set(()); let symbols = db.file_set(());
let symbols = symbols let symbols = symbols
.files .files