mirror of
https://github.com/google/gn-language-server.git
synced 2025-12-23 12:26:43 +00:00
Support parallel analysis (again)
This commit is contained in:
parent
8d66390609
commit
3fd94dc3b6
10 changed files with 88 additions and 57 deletions
|
|
@ -51,7 +51,7 @@ The server uses a freshness-checking mechanism to avoid re-analyzing unchanged f
|
|||
|
||||
### Concurrency
|
||||
|
||||
The server is built on `tokio` to handle multiple LSP requests concurrently without blocking. Shared state, such as the `DocumentStorage` and `Analyzer`, is managed safely across threads using `Arc<Mutex<T>>`.
|
||||
The server is built on `tokio` to handle multiple LSP requests concurrently without blocking. Shared state is managed safely across threads. `DocumentStorage` uses `Arc<Mutex<T>>`, while the `Analyzer` uses `RwLock` and fine-grained internal locking to allow concurrent analysis of multiple files.
|
||||
|
||||
### Background Indexing
|
||||
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ mod utils;
|
|||
pub struct Analyzer {
|
||||
storage: Arc<Mutex<DocumentStorage>>,
|
||||
workspace_finder: WorkspaceFinder,
|
||||
workspaces: RwLock<BTreeMap<PathBuf, Arc<Mutex<WorkspaceAnalyzer>>>>,
|
||||
workspaces: RwLock<BTreeMap<PathBuf, Arc<WorkspaceAnalyzer>>>,
|
||||
}
|
||||
|
||||
impl Analyzer {
|
||||
|
|
@ -71,11 +71,7 @@ impl Analyzer {
|
|||
}
|
||||
|
||||
pub fn analyze_file(&self, path: &Path, request_time: Instant) -> Result<Arc<AnalyzedFile>> {
|
||||
Ok(self
|
||||
.workspace_for(path)?
|
||||
.lock()
|
||||
.unwrap()
|
||||
.analyze_file(path, request_time))
|
||||
Ok(self.workspace_for(path)?.analyze_file(path, request_time))
|
||||
}
|
||||
|
||||
pub fn analyze_at(
|
||||
|
|
@ -86,12 +82,10 @@ impl Analyzer {
|
|||
) -> Result<OwnedEnvironment> {
|
||||
Ok(self
|
||||
.workspace_for(&file.document.path)?
|
||||
.lock()
|
||||
.unwrap()
|
||||
.analyze_at(file, pos, request_time))
|
||||
}
|
||||
|
||||
pub fn workspaces(&self) -> BTreeMap<PathBuf, Arc<Mutex<WorkspaceAnalyzer>>> {
|
||||
pub fn workspaces(&self) -> BTreeMap<PathBuf, Arc<WorkspaceAnalyzer>> {
|
||||
self.workspaces.read().unwrap().clone()
|
||||
}
|
||||
|
||||
|
|
@ -99,7 +93,7 @@ impl Analyzer {
|
|||
&self.workspace_finder
|
||||
}
|
||||
|
||||
pub fn workspace_for(&self, path: &Path) -> Result<Arc<Mutex<WorkspaceAnalyzer>>> {
|
||||
pub fn workspace_for(&self, path: &Path) -> Result<Arc<WorkspaceAnalyzer>> {
|
||||
if !path.is_absolute() {
|
||||
return Err(Error::General("Path must be absolute".to_string()));
|
||||
}
|
||||
|
|
@ -117,7 +111,7 @@ impl Analyzer {
|
|||
{
|
||||
let read_lock = self.workspaces.read().unwrap();
|
||||
if let Some(analyzer) = read_lock.get(workspace_root) {
|
||||
if analyzer.lock().unwrap().context().dot_gn_version == dot_gn_version {
|
||||
if analyzer.context().dot_gn_version == dot_gn_version {
|
||||
return Ok(analyzer.clone());
|
||||
}
|
||||
}
|
||||
|
|
@ -135,7 +129,7 @@ impl Analyzer {
|
|||
build_config,
|
||||
};
|
||||
|
||||
let analyzer = Arc::new(Mutex::new(WorkspaceAnalyzer::new(&context, &self.storage)));
|
||||
let analyzer = Arc::new(WorkspaceAnalyzer::new(&context, &self.storage));
|
||||
|
||||
let mut write_lock = self.workspaces.write().unwrap();
|
||||
Ok(write_lock
|
||||
|
|
@ -148,7 +142,8 @@ impl Analyzer {
|
|||
pub struct WorkspaceAnalyzer {
|
||||
context: WorkspaceContext,
|
||||
storage: Arc<Mutex<DocumentStorage>>,
|
||||
cache: BTreeMap<PathBuf, Arc<AnalyzedFile>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
cache: RwLock<BTreeMap<PathBuf, Arc<Mutex<Option<Arc<AnalyzedFile>>>>>>,
|
||||
}
|
||||
|
||||
impl WorkspaceAnalyzer {
|
||||
|
|
@ -166,18 +161,38 @@ impl WorkspaceAnalyzer {
|
|||
|
||||
pub fn cached_files_for_symbols(&self) -> Vec<Arc<AnalyzedFile>> {
|
||||
self.cache
|
||||
.read()
|
||||
.unwrap()
|
||||
.values()
|
||||
.filter_map(|entry| entry.lock().unwrap().clone())
|
||||
.filter(|file| !file.external)
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn cached_files_for_references(&self) -> Vec<Arc<AnalyzedFile>> {
|
||||
self.cache.values().cloned().collect()
|
||||
self.cache
|
||||
.read()
|
||||
.unwrap()
|
||||
.values()
|
||||
.filter_map(|entry| entry.lock().unwrap().clone())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn analyze_file(&mut self, path: &Path, request_time: Instant) -> Arc<AnalyzedFile> {
|
||||
if let Some(cached_file) = self.cache.get(path) {
|
||||
pub fn analyze_file(&self, path: &Path, request_time: Instant) -> Arc<AnalyzedFile> {
|
||||
let entry = {
|
||||
let read = self.cache.read().unwrap();
|
||||
if let Some(entry) = read.get(path) {
|
||||
entry.clone()
|
||||
} else {
|
||||
drop(read);
|
||||
let mut write = self.cache.write().unwrap();
|
||||
write.entry(path.to_path_buf()).or_default().clone()
|
||||
}
|
||||
};
|
||||
|
||||
let mut entry = entry.lock().unwrap();
|
||||
|
||||
if let Some(cached_file) = entry.as_ref() {
|
||||
if cached_file
|
||||
.key
|
||||
.verify(request_time, &self.storage.lock().unwrap())
|
||||
|
|
@ -187,11 +202,11 @@ impl WorkspaceAnalyzer {
|
|||
}
|
||||
|
||||
let new_file = Arc::new(self.analyze_file_uncached(path, request_time));
|
||||
self.cache.insert(path.to_path_buf(), new_file.clone());
|
||||
*entry = Some(new_file.clone());
|
||||
new_file
|
||||
}
|
||||
|
||||
pub fn analyze_files(&mut self, path: &Path, request_time: Instant) -> OwnedEnvironment {
|
||||
pub fn analyze_files(&self, path: &Path, request_time: Instant) -> OwnedEnvironment {
|
||||
let mut files: Vec<Arc<AnalyzedFile>> = Vec::new();
|
||||
self.collect_imports(path, request_time, &mut files, &mut HashSet::new());
|
||||
|
||||
|
|
@ -210,7 +225,7 @@ impl WorkspaceAnalyzer {
|
|||
}
|
||||
|
||||
pub fn analyze_at(
|
||||
&mut self,
|
||||
&self,
|
||||
file: &Arc<AnalyzedFile>,
|
||||
pos: usize,
|
||||
request_time: Instant,
|
||||
|
|
@ -256,7 +271,7 @@ impl WorkspaceAnalyzer {
|
|||
}
|
||||
|
||||
fn collect_imports(
|
||||
&mut self,
|
||||
&self,
|
||||
path: &Path,
|
||||
request_time: Instant,
|
||||
files: &mut Vec<Arc<AnalyzedFile>>,
|
||||
|
|
@ -272,7 +287,7 @@ impl WorkspaceAnalyzer {
|
|||
}
|
||||
}
|
||||
|
||||
fn analyze_file_uncached(&mut self, path: &Path, request_time: Instant) -> AnalyzedFile {
|
||||
fn analyze_file_uncached(&self, path: &Path, request_time: Instant) -> AnalyzedFile {
|
||||
let document = self.storage.lock().unwrap().read(path);
|
||||
let ast = OwnedBlock::new(document.clone(), |document| parse(&document.data));
|
||||
|
||||
|
|
@ -299,11 +314,7 @@ impl WorkspaceAnalyzer {
|
|||
)
|
||||
}
|
||||
|
||||
fn analyze_block<'p>(
|
||||
&mut self,
|
||||
block: &'p Block<'p>,
|
||||
document: &'p Document,
|
||||
) -> AnalyzedBlock<'p> {
|
||||
fn analyze_block<'p>(&self, block: &'p Block<'p>, document: &'p Document) -> AnalyzedBlock<'p> {
|
||||
let mut statements: Vec<AnalyzedStatement> = Vec::new();
|
||||
|
||||
for statement in &block.statements {
|
||||
|
|
@ -347,7 +358,7 @@ impl WorkspaceAnalyzer {
|
|||
}
|
||||
|
||||
fn analyze_call<'p>(
|
||||
&mut self,
|
||||
&self,
|
||||
call: &'p Call<'p>,
|
||||
document: &'p Document,
|
||||
) -> AnalyzedStatement<'p> {
|
||||
|
|
@ -458,7 +469,7 @@ impl WorkspaceAnalyzer {
|
|||
}
|
||||
|
||||
fn analyze_condition<'p>(
|
||||
&mut self,
|
||||
&self,
|
||||
condition: &'p Condition<'p>,
|
||||
document: &'p Document,
|
||||
) -> AnalyzedCondition<'p> {
|
||||
|
|
@ -482,7 +493,7 @@ impl WorkspaceAnalyzer {
|
|||
}
|
||||
|
||||
fn analyze_expr<'p>(
|
||||
&mut self,
|
||||
&self,
|
||||
expr: &'p Expr<'p>,
|
||||
document: &'p Document,
|
||||
) -> Vec<AnalyzedBlock<'p>> {
|
||||
|
|
@ -526,11 +537,7 @@ impl WorkspaceAnalyzer {
|
|||
}
|
||||
}
|
||||
|
||||
fn analyze_exports<'p>(
|
||||
&mut self,
|
||||
block: &'p Block<'p>,
|
||||
document: &'p Document,
|
||||
) -> FileExports<'p> {
|
||||
fn analyze_exports<'p>(&self, block: &'p Block<'p>, document: &'p Document) -> FileExports<'p> {
|
||||
let mut exports = MutableFileExports::new();
|
||||
let mut declare_args_stack: Vec<&Call> = Vec::new();
|
||||
|
||||
|
|
|
|||
|
|
@ -46,4 +46,5 @@ pub struct ExperimentalConfigurations {
|
|||
pub undefined_variable_analysis: bool,
|
||||
pub workspace_symbols: bool,
|
||||
pub target_lens: bool,
|
||||
pub parallel_indexing: bool,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ impl<'p> PrimaryExpr<'p> {
|
|||
fn collect_undefined_identifiers(
|
||||
&self,
|
||||
file: &'p AnalyzedFile,
|
||||
analyzer: &mut WorkspaceAnalyzer,
|
||||
analyzer: &WorkspaceAnalyzer,
|
||||
request_time: Instant,
|
||||
tracker: &EnvironmentTracker,
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
|
|
@ -188,7 +188,7 @@ impl<'p> Expr<'p> {
|
|||
fn collect_undefined_identifiers(
|
||||
&self,
|
||||
file: &'p AnalyzedFile,
|
||||
analyzer: &mut WorkspaceAnalyzer,
|
||||
analyzer: &WorkspaceAnalyzer,
|
||||
request_time: Instant,
|
||||
tracker: &EnvironmentTracker,
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
|
|
@ -236,7 +236,7 @@ impl<'p> AnalyzedBlock<'p> {
|
|||
fn collect_undefined_identifiers(
|
||||
&self,
|
||||
file: &AnalyzedFile,
|
||||
analyzer: &mut WorkspaceAnalyzer,
|
||||
analyzer: &WorkspaceAnalyzer,
|
||||
request_time: Instant,
|
||||
tracker: &mut EnvironmentTracker,
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
|
|
@ -397,7 +397,6 @@ pub fn collect_undefined_identifiers(
|
|||
let Ok(analyzer) = analyzer.workspace_for(&file.workspace_root) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let mut analyzer = analyzer.lock().unwrap();
|
||||
|
||||
// Process BUILDCONFIG.gn.
|
||||
let mut tracker = EnvironmentTracker::new();
|
||||
|
|
@ -408,7 +407,7 @@ pub fn collect_undefined_identifiers(
|
|||
let mut diagnostics: Vec<Diagnostic> = Vec::new();
|
||||
file.analyzed_root.get().collect_undefined_identifiers(
|
||||
file,
|
||||
&mut analyzer,
|
||||
&analyzer,
|
||||
request_time,
|
||||
&mut tracker,
|
||||
&mut diagnostics,
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@
|
|||
|
||||
use std::{path::Path, time::Instant};
|
||||
|
||||
use futures::{future::join_all, FutureExt};
|
||||
use tower_lsp::lsp_types::MessageType;
|
||||
|
||||
use crate::{
|
||||
|
|
@ -24,7 +25,7 @@ use crate::{
|
|||
server::RequestContext,
|
||||
};
|
||||
|
||||
pub async fn index(context: &RequestContext, workspace_root: &Path) {
|
||||
pub async fn index(context: &RequestContext, workspace_root: &Path, parallel: bool) {
|
||||
context
|
||||
.client
|
||||
.log_message(
|
||||
|
|
@ -34,16 +35,29 @@ pub async fn index(context: &RequestContext, workspace_root: &Path) {
|
|||
.await;
|
||||
|
||||
let start_time = Instant::now();
|
||||
let request_time = context.request_time;
|
||||
let mut tasks = Vec::new();
|
||||
let mut count = 0;
|
||||
|
||||
for path in find_gn_in_workspace_for_scan(workspace_root) {
|
||||
context
|
||||
.analyzer
|
||||
.analyze_file(&path, context.request_time)
|
||||
.ok();
|
||||
let analyzer = context.analyzer.clone();
|
||||
let task = async move {
|
||||
analyzer.analyze_file(&path, request_time).ok();
|
||||
};
|
||||
let task = if parallel {
|
||||
async move {
|
||||
tokio::spawn(task);
|
||||
}
|
||||
.boxed()
|
||||
} else {
|
||||
task.boxed()
|
||||
};
|
||||
tasks.push(task);
|
||||
count += 1;
|
||||
}
|
||||
|
||||
join_all(tasks).await;
|
||||
|
||||
let elapsed = start_time.elapsed();
|
||||
context
|
||||
.client
|
||||
|
|
|
|||
|
|
@ -118,7 +118,12 @@ impl Backend {
|
|||
}
|
||||
}
|
||||
|
||||
async fn maybe_index_workspace_for(&self, context: &RequestContext, path: &Path) {
|
||||
async fn maybe_index_workspace_for(
|
||||
&self,
|
||||
context: &RequestContext,
|
||||
path: &Path,
|
||||
parallel_indexing: bool,
|
||||
) {
|
||||
let Some(workspace_root) = context.analyzer.workspace_finder().find_for(path) else {
|
||||
return;
|
||||
};
|
||||
|
|
@ -136,7 +141,7 @@ impl Backend {
|
|||
|
||||
let context = context.clone();
|
||||
spawn(async move {
|
||||
indexing::index(&context, &workspace_root).await;
|
||||
indexing::index(&context, &workspace_root, parallel_indexing).await;
|
||||
indexed.set();
|
||||
});
|
||||
}
|
||||
|
|
@ -209,7 +214,12 @@ impl LanguageServer for Backend {
|
|||
};
|
||||
let configurations = self.context.client.configurations().await;
|
||||
if configurations.background_indexing {
|
||||
self.maybe_index_workspace_for(&context, &path).await;
|
||||
self.maybe_index_workspace_for(
|
||||
&context,
|
||||
&path,
|
||||
configurations.experimental.parallel_indexing,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
providers::document::did_open(&self.context.request(), params).await;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -88,11 +88,8 @@ fn compute_import_actions(
|
|||
let Ok(workspace) = context.analyzer.workspace_for(path) else {
|
||||
return Vec::new();
|
||||
};
|
||||
let current_file = workspace
|
||||
.lock()
|
||||
.unwrap()
|
||||
.analyze_file(path, context.request_time);
|
||||
let workspace_files = workspace.lock().unwrap().cached_files_for_symbols();
|
||||
let current_file = workspace.analyze_file(path, context.request_time);
|
||||
let workspace_files = workspace.cached_files_for_symbols();
|
||||
let imports: Vec<String> = workspace_files
|
||||
.into_iter()
|
||||
.filter(|file| file.exports.get().variables.contains_key(name))
|
||||
|
|
|
|||
|
|
@ -41,8 +41,6 @@ pub fn target_references(
|
|||
let cached_files = context
|
||||
.analyzer
|
||||
.workspace_for(¤t_file.workspace_root)?
|
||||
.lock()
|
||||
.unwrap()
|
||||
.cached_files_for_references();
|
||||
|
||||
let mut references: Vec<Location> = Vec::new();
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ pub async fn workspace_symbol(
|
|||
signal.wait().await;
|
||||
}
|
||||
|
||||
let files = workspace.lock().unwrap().cached_files_for_symbols();
|
||||
let files = workspace.cached_files_for_symbols();
|
||||
for file in files {
|
||||
symbols.extend(extract_symbols(&file, &query));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -78,6 +78,11 @@
|
|||
"default": true,
|
||||
"description": "Reports syntax errors."
|
||||
},
|
||||
"gn.experimental.parallelIndexing": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"description": "Enables parallel indexing (experimental)."
|
||||
},
|
||||
"gn.experimental.targetLens": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue