Removed LatestDocument and replaced it with a OnceLock

Signed-off-by: faldor20 <eli.jambu@yahoo.com>

removed unused imports

Signed-off-by: faldor20 <eli.jambu@yahoo.com>
This commit is contained in:
Eli Dowling 2023-12-21 21:29:30 +10:00 committed by faldor20
parent 8a6855f29c
commit c081bb7016
No known key found for this signature in database
GPG key ID: F2216079B890CD57
3 changed files with 124 additions and 119 deletions

View file

@ -1,11 +1,6 @@
//! Traversals over the can ast. //! Traversals over the can ast.
use std::process; use roc_module::{ident::Lowercase, symbol::Symbol};
use roc_module::{
ident::Lowercase,
symbol::{Interns, Symbol},
};
use roc_region::all::{Loc, Position, Region}; use roc_region::all::{Loc, Position, Region};
use roc_types::{subs::Variable, types::MemberImpl}; use roc_types::{subs::Variable, types::MemberImpl};

View file

@ -1,7 +1,12 @@
use log::{debug, info}; use log::{debug, info, trace, warn};
use std::{collections::HashMap, future::Future, sync::Arc};
use std::{
collections::HashMap,
sync::{Arc, OnceLock},
};
use tokio::sync::{Mutex, MutexGuard};
use tokio::sync::{Mutex, MutexGuard, RwLock, RwLockWriteGuard};
use tower_lsp::lsp_types::{ use tower_lsp::lsp_types::{
CompletionResponse, Diagnostic, GotoDefinitionResponse, Hover, Position, SemanticTokensResult, CompletionResponse, Diagnostic, GotoDefinitionResponse, Hover, Position, SemanticTokensResult,
TextEdit, Url, TextEdit, Url,
@ -9,42 +14,26 @@ use tower_lsp::lsp_types::{
use crate::analysis::{AnalyzedDocument, DocInfo}; use crate::analysis::{AnalyzedDocument, DocInfo};
#[derive(Debug)]
pub(crate) struct LatestDocument {
pub info: DocInfo,
analyzed: tokio::sync::RwLock<Option<Arc<AnalyzedDocument>>>,
}
impl LatestDocument {
pub(crate) async fn get_latest(&self) -> Arc<AnalyzedDocument> {
self.analyzed.read().await.as_ref().unwrap().clone()
}
pub(crate) fn get_lock(
&self,
) -> impl Future<Output = RwLockWriteGuard<Option<Arc<AnalyzedDocument>>>> {
self.analyzed.write()
}
pub(crate) fn new(doc_info: DocInfo) -> LatestDocument {
let val = RwLock::new(None);
LatestDocument {
info: doc_info,
analyzed: val,
}
}
pub(crate) fn new_initialised(analyzed: Arc<AnalyzedDocument>) -> LatestDocument {
LatestDocument {
info: analyzed.doc_info.clone(),
analyzed: RwLock::new(Some(analyzed)),
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct DocumentPair { pub(crate) struct DocumentPair {
latest_document: Arc<LatestDocument>, info: DocInfo,
latest_document: OnceLock<Arc<AnalyzedDocument>>,
last_good_document: Arc<AnalyzedDocument>, last_good_document: Arc<AnalyzedDocument>,
} }
impl DocumentPair {
pub(crate) fn new(
latest_doc: Arc<AnalyzedDocument>,
last_good_document: Arc<AnalyzedDocument>,
) -> Self {
Self {
info: latest_doc.doc_info.clone(),
latest_document: OnceLock::from(latest_doc),
last_good_document,
}
}
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(crate) struct Registry { pub(crate) struct Registry {
documents: Mutex<HashMap<Url, DocumentPair>>, documents: Mutex<HashMap<Url, DocumentPair>>,
@ -56,46 +45,33 @@ impl Registry {
.lock() .lock()
.await .await
.get(&url) .get(&url)
.map(|x| x.latest_document.info.version) .map(|x| x.info.version)
} }
fn update_document<'a>( fn update_document<'a>(
documents: &mut MutexGuard<'a, HashMap<Url, DocumentPair>>, documents: &mut MutexGuard<'a, HashMap<Url, DocumentPair>>,
document: Arc<AnalyzedDocument>, document: Arc<AnalyzedDocument>,
) { ) {
let url = document.url().clone(); let url = document.url().clone();
let latest_doc = Arc::new(LatestDocument::new_initialised(document.clone()));
match documents.get_mut(&url) { match documents.get_mut(&url) {
Some(old_doc) => { Some(old_doc) => {
if document.type_checked() { if document.type_checked() {
*old_doc = DocumentPair { *old_doc = DocumentPair::new(document.clone(), document);
latest_document: latest_doc,
last_good_document: document,
};
} else { } else {
*old_doc = DocumentPair { debug!(
latest_document: latest_doc, "Document typechecking failed at version {:?}, not updating last_good_document",
last_good_document: old_doc.last_good_document.clone(), &document.doc_info.version
}; );
*old_doc = DocumentPair::new(document, old_doc.last_good_document.clone());
} }
} }
None => { None => {
documents.insert( documents.insert(url.clone(), DocumentPair::new(document.clone(), document));
url.clone(),
DocumentPair {
latest_document: latest_doc,
last_good_document: document,
},
);
} }
} }
} }
pub async fn apply_changes<'a>( pub async fn apply_changes<'a>(&self, analysed_docs: Vec<AnalyzedDocument>, updating_url: Url) {
&self,
analysed_docs: Vec<AnalyzedDocument>,
mut partial_writer: RwLockWriteGuard<'a, Option<Arc<AnalyzedDocument>>>,
updating_url: Url,
) {
let mut documents = self.documents.lock().await; let mut documents = self.documents.lock().await;
debug!( debug!(
"finised doc analysis for doc: {:?}", "finised doc analysis for doc: {:?}",
@ -106,13 +82,15 @@ impl Registry {
let document = Arc::new(document); let document = Arc::new(document);
//Write the newly analysed document into the partial document that any request requiring the latest document will be waiting on //Write the newly analysed document into the partial document that any request requiring the latest document will be waiting on
if document.doc_info.url == updating_url { if document.doc_info.url == updating_url {
*partial_writer = Some(document.clone()); documents
.get_mut(&updating_url)
.map(|a| a.latest_document.set(document.clone()).unwrap());
} }
Registry::update_document(&mut documents, document); Registry::update_document(&mut documents, document);
} }
} }
pub async fn apply_doc_info_changes(&self, url: Url, partial: Arc<LatestDocument>) { pub async fn apply_doc_info_changes(&self, url: Url, info: DocInfo) {
let mut documents_lock = self.documents.lock().await; let mut documents_lock = self.documents.lock().await;
let doc = documents_lock.get_mut(&url); let doc = documents_lock.get_mut(&url);
match doc { match doc {
@ -120,27 +98,41 @@ impl Registry {
debug!( debug!(
"set the docInfo for {:?} to version:{:?}", "set the docInfo for {:?} to version:{:?}",
url.as_str(), url.as_str(),
partial.info.version info.version
); );
a.latest_document = partial; *a = DocumentPair {
info,
last_good_document: a.last_good_document.clone(),
latest_document: OnceLock::new(),
};
} }
None => debug!("no existing docinfo for {:?} ", url.as_str()), None => debug!("no existing docinfo for {:?} ", url.as_str()),
} }
} }
fn document_info_by_url(&self, url: &Url) -> Option<DocInfo> { async fn document_info_by_url(&self, url: &Url) -> Option<DocInfo> {
self.documents self.documents.lock().await.get(url).map(|a| a.info.clone())
.blocking_lock()
.get(url)
.map(|a| a.latest_document.info.clone())
} }
///Tries to get the latest document from analysis.
///Gives up and returns none after 5 seconds.
async fn latest_document_by_url(&self, url: &Url) -> Option<Arc<AnalyzedDocument>> { async fn latest_document_by_url(&self, url: &Url) -> Option<Arc<AnalyzedDocument>> {
let start = std::time::Instant::now();
let duration = std::time::Duration::from_secs(5);
while start.elapsed() < duration {
match self.documents.lock().await.get(url) { match self.documents.lock().await.get(url) {
Some(a) => Some(a.latest_document.get_latest().await), Some(a) => match a.latest_document.get() {
None => None, Some(a) => return Some(a.clone()),
None => (),
},
None => return None,
} }
} }
warn!("Timed out tring to get latest document");
None
}
pub async fn diagnostics(&self, url: &Url) -> Vec<Diagnostic> { pub async fn diagnostics(&self, url: &Url) -> Vec<Diagnostic> {
let Some( document) = self.latest_document_by_url(url).await else { let Some( document) = self.latest_document_by_url(url).await else {
@ -166,13 +158,13 @@ impl Registry {
def_document.definition(symbol) def_document.definition(symbol)
} }
pub fn formatting(&self, url: &Url) -> Option<Vec<TextEdit>> { pub async fn formatting(&self, url: &Url) -> Option<Vec<TextEdit>> {
let document = self.document_info_by_url(url)?; let document = self.document_info_by_url(url).await?;
document.format() document.format()
} }
pub fn semantic_tokens(&self, url: &Url) -> Option<SemanticTokensResult> { pub async fn semantic_tokens(&self, url: &Url) -> Option<SemanticTokensResult> {
let document = self.document_info_by_url(url)?; let document = self.document_info_by_url(url).await?;
document.semantic_tokens() document.semantic_tokens()
} }
pub async fn completion_items( pub async fn completion_items(
@ -180,9 +172,11 @@ impl Registry {
url: &Url, url: &Url,
position: Position, position: Position,
) -> Option<CompletionResponse> { ) -> Option<CompletionResponse> {
trace!("starting completion ");
let lock = self.documents.lock().await; let lock = self.documents.lock().await;
let pair = lock.get(url)?; let pair = lock.get(url)?;
let latest_doc_info = &pair.latest_document.info;
let latest_doc_info = &pair.info;
info!( info!(
"using document version:{:?} for completion ", "using document version:{:?} for completion ",
latest_doc_info.version latest_doc_info.version

View file

@ -1,16 +1,14 @@
use analysis::HIGHLIGHT_TOKENS_LEGEND; use analysis::HIGHLIGHT_TOKENS_LEGEND;
use log::debug; use log::{debug, trace};
use registry::Registry; use registry::Registry;
use std::future::Future; use std::future::Future;
use tokio::sync::RwLock;
use std::sync::Arc; use std::time::Duration;
use tower_lsp::jsonrpc::Result; use tower_lsp::jsonrpc::Result;
use tower_lsp::lsp_types::*; use tower_lsp::lsp_types::*;
use tower_lsp::{Client, LanguageServer, LspService, Server}; use tower_lsp::{Client, LanguageServer, LspService, Server};
use crate::analysis::global_analysis; use crate::analysis::global_analysis;
use crate::registry::LatestDocument;
mod analysis; mod analysis;
mod convert; mod convert;
@ -18,12 +16,14 @@ mod registry;
#[derive(Debug)] #[derive(Debug)]
struct RocLs { struct RocLs {
pub inner: Arc<Inner>, pub inner: Inner,
client: Client, client: Client,
} }
///This exists so we can test most of RocLs without anything LSP related
#[derive(Debug)] #[derive(Debug)]
struct Inner { struct Inner {
registry: RwLock<Registry>, registry: Registry,
} }
impl std::panic::RefUnwindSafe for RocLs {} impl std::panic::RefUnwindSafe for RocLs {}
@ -31,7 +31,7 @@ impl std::panic::RefUnwindSafe for RocLs {}
impl RocLs { impl RocLs {
pub fn new(client: Client) -> Self { pub fn new(client: Client) -> Self {
Self { Self {
inner: Arc::new(Inner::new()), inner: Inner::new(),
client, client,
} }
} }
@ -69,7 +69,7 @@ impl RocLs {
full: Some(SemanticTokensFullOptions::Bool(true)), full: Some(SemanticTokensFullOptions::Bool(true)),
}); });
let completion_provider = CompletionOptions { let completion_provider = CompletionOptions {
resolve_provider: Some(true), resolve_provider: Some(false),
trigger_characters: Some(vec![".".to_string()]), trigger_characters: Some(vec![".".to_string()]),
//TODO: what is this? //TODO: what is this?
all_commit_characters: None, all_commit_characters: None,
@ -99,7 +99,7 @@ impl RocLs {
} }
debug!("applied_change getting and returning diagnostics"); debug!("applied_change getting and returning diagnostics");
let diagnostics = self.inner.registry().await.diagnostics(&fi).await; let diagnostics = self.inner.registry.diagnostics(&fi).await;
self.client self.client
.publish_diagnostics(fi, diagnostics, Some(version)) .publish_diagnostics(fi, diagnostics, Some(version))
@ -110,12 +110,12 @@ impl RocLs {
impl Inner { impl Inner {
pub fn new() -> Inner { pub fn new() -> Inner {
Self { Self {
registry: RwLock::new(Registry::default()), registry: Registry::default(),
} }
} }
fn registry(&self) -> impl Future<Output = tokio::sync::RwLockReadGuard<Registry>> { async fn registry(&self) -> &Registry {
self.registry.read() &self.registry
} }
async fn close(&self, _fi: Url) { async fn close(&self, _fi: Url) {
@ -128,30 +128,41 @@ impl Inner {
text: String, text: String,
version: i32, version: i32,
) -> std::result::Result<(), String> { ) -> std::result::Result<(), String> {
debug!("starting change"); debug!("V{:?}:starting change", version);
let registry_write_lock = self.registry.write().await; //was write lock
debug!("change aquired registry lock"); debug!("V{:?}:change aquired registry lock", version);
let (results, partial) = global_analysis(fi.clone(), text, version); let (results, partial) = global_analysis(fi.clone(), text, version);
let partial_document = Arc::new(LatestDocument::new(partial.clone()));
//TODO check if allowing context switching here is an issue
let partial_doc_write_lock = partial_document.get_lock().await;
registry_write_lock self.registry
.apply_doc_info_changes(fi.clone(), partial_document.clone()) .apply_doc_info_changes(fi.clone(), partial.clone())
.await; .await;
//Now that we've got our new partial document written and we hold the exclusive write_handle to its analysis we can allow other tasks to access the registry and the doc_info inside this partial document //Now that we've got our new partial document written and we hold the exclusive write_handle to its analysis we can allow other tasks to access the registry and the doc_info inside this partial document
drop(registry_write_lock);
debug!("finished updating docinfo, starting analysis ",); debug!(
"V{:?}:finished updating docinfo, starting analysis ",
version
);
let inner_ref = self.clone(); let inner_ref = self;
let updating_result = async { let updating_result = async {
//This reduces wasted computation by waiting to see if a new change comes in, but does delay the final analysis. Ideally this would be replaced with cancelling the analysis when a new one comes in.
tokio::time::sleep(Duration::from_millis(100)).await;
let is_latest = inner_ref
.registry
.get_latest_version(fi)
.await
.map(|latest| latest == version)
.unwrap_or(true);
if !is_latest {
return Err("Not latest version skipping analysis".to_string());
}
let results = match tokio::task::spawn_blocking(results).await { let results = match tokio::task::spawn_blocking(results).await {
Err(e) => return Err(format!("Document analysis failed. reason:{:?}", e)), Err(e) => return Err(format!("Document analysis failed. reason:{:?}", e)),
Ok(a) => a, Ok(a) => a,
}; };
let latest_version = inner_ref.registry.read().await.get_latest_version(fi).await; let latest_version = inner_ref.registry.get_latest_version(fi).await;
//if this version is not the latest another change must have come in and this analysis is useless //if this version is not the latest another change must have come in and this analysis is useless
//if there is no older version we can just proceed with the update //if there is no older version we can just proceed with the update
@ -163,18 +174,16 @@ impl Inner {
)); ));
} }
} }
debug!("finished updating documents returning ",); debug!(
"V{:?}:finished document analysis applying changes ",
version
);
inner_ref inner_ref.registry.apply_changes(results, fi.clone()).await;
.registry
.write()
.await
.apply_changes(results, partial_doc_write_lock, fi.clone())
.await;
Ok(()) Ok(())
} }
.await; .await;
debug!("finished updating documents returning ",); debug!("V{:?}:finished document change process", version);
updating_result updating_result
} }
} }
@ -209,6 +218,7 @@ impl LanguageServer for RocLs {
let TextDocumentContentChangeEvent { text, .. } = let TextDocumentContentChangeEvent { text, .. } =
params.content_changes.into_iter().next().unwrap(); params.content_changes.into_iter().next().unwrap();
trace!("got did_change");
self.change(uri, text, version).await; self.change(uri, text, version).await;
} }
@ -233,8 +243,7 @@ impl LanguageServer for RocLs {
panic_wrapper_async(|| async { panic_wrapper_async(|| async {
self.inner self.inner
.registry() .registry
.await
.hover(&text_document.uri, position) .hover(&text_document.uri, position)
.await .await
}) })
@ -272,7 +281,13 @@ impl LanguageServer for RocLs {
work_done_progress_params: _, work_done_progress_params: _,
} = params; } = params;
panic_wrapper_async(|| async { self.inner.registry().await.formatting(&text_document.uri) }) panic_wrapper_async(|| async {
self.inner
.registry()
.await
.formatting(&text_document.uri)
.await
})
.await .await
} }
@ -291,15 +306,16 @@ impl LanguageServer for RocLs {
.registry() .registry()
.await .await
.semantic_tokens(&text_document.uri) .semantic_tokens(&text_document.uri)
.await
}) })
.await .await
} }
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> { async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
let doc = params.text_document_position; let doc = params.text_document_position;
trace!("got completion request");
let res = panic_wrapper_async(|| async { let res = panic_wrapper_async(|| async {
self.inner self.inner
.registry() .registry
.await
.completion_items(&doc.text_document.uri, doc.position) .completion_items(&doc.text_document.uri, doc.position)
.await .await
}) })
@ -320,7 +336,7 @@ where
} }
} }
#[tokio::main] #[tokio::main(flavor = "multi_thread")]
async fn main() { async fn main() {
env_logger::init(); env_logger::init();