Auto merge of #12808 - Veykril:check-workspace, r=Veykril

feat: Only flycheck workspace that belongs to saved file

Supercedes https://github.com/rust-lang/rust-analyzer/pull/11038

There is still the problem that all the diagnostics are cleared, only clearing diagnostics of the relevant workspace isn't easily doable though I think, will have to dig into that
This commit is contained in:
bors 2022-08-04 12:57:04 +00:00
commit 0fe3bcfd35
7 changed files with 123 additions and 23 deletions

View file

@ -57,6 +57,7 @@ pub struct FlycheckHandle {
// XXX: drop order is significant // XXX: drop order is significant
sender: Sender<Restart>, sender: Sender<Restart>,
_thread: jod_thread::JoinHandle, _thread: jod_thread::JoinHandle,
id: usize,
} }
impl FlycheckHandle { impl FlycheckHandle {
@ -72,18 +73,22 @@ impl FlycheckHandle {
.name("Flycheck".to_owned()) .name("Flycheck".to_owned())
.spawn(move || actor.run(receiver)) .spawn(move || actor.run(receiver))
.expect("failed to spawn thread"); .expect("failed to spawn thread");
FlycheckHandle { sender, _thread: thread } FlycheckHandle { id, sender, _thread: thread }
} }
/// Schedule a re-start of the cargo check worker. /// Schedule a re-start of the cargo check worker.
pub fn update(&self) { pub fn update(&self) {
self.sender.send(Restart).unwrap(); self.sender.send(Restart).unwrap();
} }
pub fn id(&self) -> usize {
self.id
}
} }
pub enum Message { pub enum Message {
/// Request adding a diagnostic with fixes included to a file /// Request adding a diagnostic with fixes included to a file
AddDiagnostic { workspace_root: AbsPathBuf, diagnostic: Diagnostic }, AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
/// Request check progress notification to client /// Request check progress notification to client
Progress { Progress {
@ -96,8 +101,9 @@ pub enum Message {
impl fmt::Debug for Message { impl fmt::Debug for Message {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Message::AddDiagnostic { workspace_root, diagnostic } => f Message::AddDiagnostic { id, workspace_root, diagnostic } => f
.debug_struct("AddDiagnostic") .debug_struct("AddDiagnostic")
.field("id", id)
.field("workspace_root", workspace_root) .field("workspace_root", workspace_root)
.field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code)) .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
.finish(), .finish(),
@ -183,7 +189,7 @@ impl FlycheckActor {
} }
} }
Event::CheckEvent(None) => { Event::CheckEvent(None) => {
tracing::debug!("flycheck finished"); tracing::debug!(flycheck_id = self.id, "flycheck finished");
// Watcher finished // Watcher finished
let cargo_handle = self.cargo_handle.take().unwrap(); let cargo_handle = self.cargo_handle.take().unwrap();
@ -203,6 +209,7 @@ impl FlycheckActor {
CargoMessage::Diagnostic(msg) => { CargoMessage::Diagnostic(msg) => {
self.send(Message::AddDiagnostic { self.send(Message::AddDiagnostic {
id: self.id,
workspace_root: self.workspace_root.clone(), workspace_root: self.workspace_root.clone(),
diagnostic: msg, diagnostic: msg,
}); });

View file

@ -106,6 +106,14 @@ impl AsRef<Path> for AbsPath {
} }
} }
impl ToOwned for AbsPath {
type Owned = AbsPathBuf;
fn to_owned(&self) -> Self::Owned {
AbsPathBuf(self.0.to_owned())
}
}
impl<'a> TryFrom<&'a Path> for &'a AbsPath { impl<'a> TryFrom<&'a Path> for &'a AbsPath {
type Error = &'a Path; type Error = &'a Path;
fn try_from(path: &'a Path) -> Result<&'a AbsPath, &'a Path> { fn try_from(path: &'a Path) -> Result<&'a AbsPath, &'a Path> {

View file

@ -8,7 +8,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use crate::lsp_ext; use crate::lsp_ext;
pub(crate) type CheckFixes = Arc<FxHashMap<FileId, Vec<Fix>>>; pub(crate) type CheckFixes = Arc<FxHashMap<usize, FxHashMap<FileId, Vec<Fix>>>>;
#[derive(Debug, Default, Clone)] #[derive(Debug, Default, Clone)]
pub struct DiagnosticsMapConfig { pub struct DiagnosticsMapConfig {
@ -22,7 +22,7 @@ pub(crate) struct DiagnosticCollection {
// FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>> // FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>, pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
// FIXME: should be Vec<flycheck::Diagnostic> // FIXME: should be Vec<flycheck::Diagnostic>
pub(crate) check: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>, pub(crate) check: FxHashMap<usize, FxHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check_fixes: CheckFixes, pub(crate) check_fixes: CheckFixes,
changes: FxHashSet<FileId>, changes: FxHashSet<FileId>,
} }
@ -35,9 +35,19 @@ pub(crate) struct Fix {
} }
impl DiagnosticCollection { impl DiagnosticCollection {
pub(crate) fn clear_check(&mut self) { pub(crate) fn clear_check(&mut self, flycheck_id: usize) {
if let Some(it) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
it.clear();
}
if let Some(it) = self.check.get_mut(&flycheck_id) {
self.changes.extend(it.drain().map(|(key, _value)| key));
}
}
pub(crate) fn clear_check_all(&mut self) {
Arc::make_mut(&mut self.check_fixes).clear(); Arc::make_mut(&mut self.check_fixes).clear();
self.changes.extend(self.check.drain().map(|(key, _value)| key)) self.changes
.extend(self.check.values_mut().flat_map(|it| it.drain().map(|(key, _value)| key)))
} }
pub(crate) fn clear_native_for(&mut self, file_id: FileId) { pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
@ -47,11 +57,12 @@ impl DiagnosticCollection {
pub(crate) fn add_check_diagnostic( pub(crate) fn add_check_diagnostic(
&mut self, &mut self,
flycheck_id: usize,
file_id: FileId, file_id: FileId,
diagnostic: lsp_types::Diagnostic, diagnostic: lsp_types::Diagnostic,
fix: Option<Fix>, fix: Option<Fix>,
) { ) {
let diagnostics = self.check.entry(file_id).or_default(); let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default();
for existing_diagnostic in diagnostics.iter() { for existing_diagnostic in diagnostics.iter() {
if are_diagnostics_equal(existing_diagnostic, &diagnostic) { if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
return; return;
@ -59,7 +70,7 @@ impl DiagnosticCollection {
} }
let check_fixes = Arc::make_mut(&mut self.check_fixes); let check_fixes = Arc::make_mut(&mut self.check_fixes);
check_fixes.entry(file_id).or_default().extend(fix); check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().extend(fix);
diagnostics.push(diagnostic); diagnostics.push(diagnostic);
self.changes.insert(file_id); self.changes.insert(file_id);
} }
@ -89,7 +100,8 @@ impl DiagnosticCollection {
file_id: FileId, file_id: FileId,
) -> impl Iterator<Item = &lsp_types::Diagnostic> { ) -> impl Iterator<Item = &lsp_types::Diagnostic> {
let native = self.native.get(&file_id).into_iter().flatten(); let native = self.native.get(&file_id).into_iter().flatten();
let check = self.check.get(&file_id).into_iter().flatten(); let check =
self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
native.chain(check) native.chain(check)
} }

View file

@ -192,6 +192,7 @@ impl GlobalState {
if let Some(path) = vfs.file_path(file.file_id).as_path() { if let Some(path) = vfs.file_path(file.file_id).as_path() {
let path = path.to_path_buf(); let path = path.to_path_buf();
if reload::should_refresh_for_change(&path, file.change_kind) { if reload::should_refresh_for_change(&path, file.change_kind) {
tracing::warn!("fetch-fiel_change");
self.fetch_workspaces_queue self.fetch_workspaces_queue
.request_op(format!("vfs file change: {}", path.display())); .request_op(format!("vfs file change: {}", path.display()));
} }
@ -201,6 +202,7 @@ impl GlobalState {
} }
} }
// Clear native diagnostics when their file gets deleted
if !file.exists() { if !file.exists() {
self.diagnostics.clear_native_for(file.file_id); self.diagnostics.clear_native_for(file.file_id);
} }

View file

@ -1094,7 +1094,9 @@ pub(crate) fn handle_code_action(
} }
// Fixes from `cargo check`. // Fixes from `cargo check`.
for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() { for fix in
snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten()
{
// FIXME: this mapping is awkward and shouldn't exist. Refactor // FIXME: this mapping is awkward and shouldn't exist. Refactor
// `snap.check_fixes` to not convert to LSP prematurely. // `snap.check_fixes` to not convert to LSP prematurely.
let intersect_fix_range = fix let intersect_fix_range = fix

View file

@ -2,13 +2,15 @@
//! requests/replies and notifications back to the client. //! requests/replies and notifications back to the client.
use std::{ use std::{
fmt, fmt,
ops::Deref,
sync::Arc, sync::Arc,
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use always_assert::always; use always_assert::always;
use crossbeam_channel::{select, Receiver}; use crossbeam_channel::{select, Receiver};
use ide_db::base_db::{SourceDatabaseExt, VfsPath}; use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
use itertools::Itertools;
use lsp_server::{Connection, Notification, Request}; use lsp_server::{Connection, Notification, Request};
use lsp_types::notification::Notification as _; use lsp_types::notification::Notification as _;
use vfs::{ChangeKind, FileId}; use vfs::{ChangeKind, FileId};
@ -371,7 +373,7 @@ impl GlobalState {
let _p = profile::span("GlobalState::handle_event/flycheck"); let _p = profile::span("GlobalState::handle_event/flycheck");
loop { loop {
match task { match task {
flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => { flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
let snap = self.snapshot(); let snap = self.snapshot();
let diagnostics = let diagnostics =
crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
@ -383,6 +385,7 @@ impl GlobalState {
for diag in diagnostics { for diag in diagnostics {
match url_to_file_id(&self.vfs.read().0, &diag.url) { match url_to_file_id(&self.vfs.read().0, &diag.url) {
Ok(file_id) => self.diagnostics.add_check_diagnostic( Ok(file_id) => self.diagnostics.add_check_diagnostic(
id,
file_id, file_id,
diag.diagnostic, diag.diagnostic,
diag.fix, diag.fix,
@ -400,7 +403,7 @@ impl GlobalState {
flycheck::Message::Progress { id, progress } => { flycheck::Message::Progress { id, progress } => {
let (state, message) = match progress { let (state, message) = match progress {
flycheck::Progress::DidStart => { flycheck::Progress::DidStart => {
self.diagnostics.clear_check(); self.diagnostics.clear_check(id);
(Progress::Begin, None) (Progress::Begin, None)
} }
flycheck::Progress::DidCheckCrate(target) => { flycheck::Progress::DidCheckCrate(target) => {
@ -444,7 +447,10 @@ impl GlobalState {
let memdocs_added_or_removed = self.mem_docs.take_changes(); let memdocs_added_or_removed = self.mem_docs.take_changes();
if self.is_quiescent() { if self.is_quiescent() {
if !was_quiescent { if !was_quiescent
&& !self.fetch_workspaces_queue.op_requested()
&& !self.fetch_build_data_queue.op_requested()
{
for flycheck in &self.flycheck { for flycheck in &self.flycheck {
flycheck.update(); flycheck.update();
} }
@ -734,13 +740,76 @@ impl GlobalState {
Ok(()) Ok(())
})? })?
.on::<lsp_types::notification::DidSaveTextDocument>(|this, params| { .on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
for flycheck in &this.flycheck { let mut updated = false;
flycheck.update(); if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
let (vfs, _) = &*this.vfs.read();
if let Some(file_id) = vfs.file_id(&vfs_path) {
let analysis = this.analysis_host.analysis();
// Crates containing or depending on the saved file
let crate_ids: Vec<_> = analysis
.crate_for(file_id)?
.into_iter()
.flat_map(|id| {
this.analysis_host
.raw_database()
.crate_graph()
.transitive_rev_deps(id)
})
.sorted()
.unique()
.collect();
let crate_root_paths: Vec<_> = crate_ids
.iter()
.filter_map(|&crate_id| {
analysis
.crate_root(crate_id)
.map(|file_id| {
vfs.file_path(file_id).as_path().map(ToOwned::to_owned)
})
.transpose()
})
.collect::<ide::Cancellable<_>>()?;
let crate_root_paths: Vec<_> =
crate_root_paths.iter().map(Deref::deref).collect();
// Find all workspaces that have at least one target containing the saved file
let workspace_ids =
this.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
project_model::ProjectWorkspace::Cargo { cargo, .. } => {
cargo.packages().any(|pkg| {
cargo[pkg].targets.iter().any(|&it| {
crate_root_paths.contains(&cargo[it].root.as_path())
})
})
}
project_model::ProjectWorkspace::Json { project, .. } => project
.crates()
.any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
project_model::ProjectWorkspace::DetachedFiles { .. } => false,
});
// Find and trigger corresponding flychecks
for flycheck in &this.flycheck {
for (id, _) in workspace_ids.clone() {
if id == flycheck.id() {
updated = true;
flycheck.update();
continue;
}
}
}
}
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
this.fetch_workspaces_queue
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
}
}
} }
if let Ok(abs_path) = from_proto::abs_path(&params.text_document.uri) { if !updated {
if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) { for flycheck in &this.flycheck {
this.fetch_workspaces_queue flycheck.update();
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
} }
} }
Ok(()) Ok(())

View file

@ -458,7 +458,7 @@ impl GlobalState {
Some(it) => it, Some(it) => it,
None => { None => {
self.flycheck = Vec::new(); self.flycheck = Vec::new();
self.diagnostics.clear_check(); self.diagnostics.clear_check_all();
return; return;
} }
}; };