mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-23 12:46:43 +00:00
feat: introspect and show complation statistics happening in the language server (#1958)
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
Adds capability to introspect complations happening in the language server, to help improve efficiency. I expect most compilations are caused by tracing for analyzing dynamic expressions, but I haven't really profiled a document. Then introspection will help confirm or refute the expectation.
This commit is contained in:
parent
f13532964d
commit
7c00eba127
14 changed files with 229 additions and 201 deletions
|
|
@ -1,7 +1,6 @@
|
|||
//! Semantic static and dynamic analysis of the source code.
|
||||
|
||||
mod bib;
|
||||
|
||||
pub(crate) use bib::*;
|
||||
pub mod call;
|
||||
pub use call::*;
|
||||
|
|
@ -15,30 +14,30 @@ pub mod doc_highlight;
|
|||
pub use doc_highlight::*;
|
||||
pub mod link_expr;
|
||||
pub use link_expr::*;
|
||||
pub mod stats;
|
||||
pub use stats::*;
|
||||
pub mod definition;
|
||||
pub use definition::*;
|
||||
pub mod signature;
|
||||
pub use signature::*;
|
||||
pub mod semantic_tokens;
|
||||
pub use semantic_tokens::*;
|
||||
use tinymist_std::error::WithContextUntyped;
|
||||
mod post_tyck;
|
||||
mod tyck;
|
||||
pub(crate) use crate::ty::*;
|
||||
pub(crate) use post_tyck::*;
|
||||
pub(crate) use tyck::*;
|
||||
mod prelude;
|
||||
|
||||
mod global;
|
||||
mod post_tyck;
|
||||
mod prelude;
|
||||
mod tyck;
|
||||
|
||||
pub(crate) use crate::ty::*;
|
||||
pub use global::*;
|
||||
pub(crate) use post_tyck::*;
|
||||
pub(crate) use tinymist_analysis::stats::{AnalysisStats, QueryStatGuard};
|
||||
pub(crate) use tyck::*;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ecow::eco_format;
|
||||
use lsp_types::Url;
|
||||
use tinymist_project::LspComputeGraph;
|
||||
use tinymist_std::error::WithContextUntyped;
|
||||
use tinymist_std::{Result, bail};
|
||||
use tinymist_world::{EntryReader, EntryState, TaskInputs};
|
||||
use typst::diag::{FileError, FileResult, StrResult};
|
||||
|
|
|
|||
|
|
@ -1115,13 +1115,7 @@ impl SharedContext {
|
|||
}
|
||||
|
||||
fn query_stat(&self, id: TypstFileId, query: &'static str) -> QueryStatGuard {
|
||||
let stats = &self.analysis.stats.query_stats;
|
||||
let entry = stats.entry(id).or_default();
|
||||
let entry = entry.entry(query).or_default();
|
||||
QueryStatGuard {
|
||||
bucket: entry.clone(),
|
||||
since: tinymist_std::time::Instant::now(),
|
||||
}
|
||||
self.analysis.stats.stat(id, query)
|
||||
}
|
||||
|
||||
/// Check on a module before really needing them. But we likely use them
|
||||
|
|
|
|||
|
|
@ -1,115 +0,0 @@
|
|||
//! Statistics about the analyzers
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use parking_lot::Mutex;
|
||||
use tinymist_std::hash::FxDashMap;
|
||||
use tinymist_std::time::Duration;
|
||||
use typst::syntax::FileId;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct QueryStatBucketData {
|
||||
pub query: u64,
|
||||
pub missing: u64,
|
||||
pub total: Duration,
|
||||
pub min: Duration,
|
||||
pub max: Duration,
|
||||
}
|
||||
|
||||
impl Default for QueryStatBucketData {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
query: 0,
|
||||
missing: 0,
|
||||
total: Duration::from_secs(0),
|
||||
min: Duration::from_secs(u64::MAX),
|
||||
max: Duration::from_secs(0),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Statistics about some query
|
||||
#[derive(Default, Clone)]
|
||||
pub(crate) struct QueryStatBucket {
|
||||
pub data: Arc<Mutex<QueryStatBucketData>>,
|
||||
}
|
||||
|
||||
pub(crate) struct QueryStatGuard {
|
||||
pub bucket: QueryStatBucket,
|
||||
pub since: tinymist_std::time::Instant,
|
||||
}
|
||||
|
||||
impl Drop for QueryStatGuard {
|
||||
fn drop(&mut self) {
|
||||
let elapsed = self.since.elapsed();
|
||||
let mut data = self.bucket.data.lock();
|
||||
data.query += 1;
|
||||
data.total += elapsed;
|
||||
data.min = data.min.min(elapsed);
|
||||
data.max = data.max.max(elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
impl QueryStatGuard {
|
||||
pub(crate) fn miss(&self) {
|
||||
let mut data = self.bucket.data.lock();
|
||||
data.missing += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Statistics about the analyzers
|
||||
#[derive(Default)]
|
||||
pub struct AnalysisStats {
|
||||
pub(crate) query_stats: FxDashMap<FileId, FxDashMap<&'static str, QueryStatBucket>>,
|
||||
}
|
||||
|
||||
impl AnalysisStats {
|
||||
/// Report the statistics of the analysis.
|
||||
pub fn report(&self) -> String {
|
||||
let stats = &self.query_stats;
|
||||
let mut data = Vec::new();
|
||||
for refs in stats.iter() {
|
||||
let id = refs.key();
|
||||
let queries = refs.value();
|
||||
for refs2 in queries.iter() {
|
||||
let query = refs2.key();
|
||||
let bucket = refs2.value().data.lock().clone();
|
||||
let name = format!("{id:?}:{query}").replace('\\', "/");
|
||||
data.push((name, bucket));
|
||||
}
|
||||
}
|
||||
|
||||
// sort by query duration
|
||||
data.sort_by(|x, y| y.1.max.cmp(&x.1.max));
|
||||
|
||||
// format to html
|
||||
|
||||
let mut html = String::new();
|
||||
html.push_str(r#"<div>
|
||||
<style>
|
||||
table.analysis-stats { width: 100%; border-collapse: collapse; }
|
||||
table.analysis-stats th, table.analysis-stats td { border: 1px solid black; padding: 8px; text-align: center; }
|
||||
table.analysis-stats th.name-column, table.analysis-stats td.name-column { text-align: left; }
|
||||
table.analysis-stats tr:nth-child(odd) { background-color: rgba(242, 242, 242, 0.8); }
|
||||
@media (prefers-color-scheme: dark) {
|
||||
table.analysis-stats tr:nth-child(odd) { background-color: rgba(50, 50, 50, 0.8); }
|
||||
}
|
||||
</style>
|
||||
<table class="analysis-stats"><tr><th class="query-column">Query</th><th>Count</th><th>Missing</th><th>Total</th><th>Min</th><th>Max</th></tr>"#);
|
||||
|
||||
for (name, bucket) in data {
|
||||
html.push_str("<tr>");
|
||||
html.push_str(&format!(r#"<td class="query-column">{name}</td>"#));
|
||||
html.push_str(&format!("<td>{}</td>", bucket.query));
|
||||
html.push_str(&format!("<td>{}</td>", bucket.missing));
|
||||
html.push_str(&format!("<td>{:?}</td>", bucket.total));
|
||||
html.push_str(&format!("<td>{:?}</td>", bucket.min));
|
||||
html.push_str(&format!("<td>{:?}</td>", bucket.max));
|
||||
html.push_str("</tr>");
|
||||
}
|
||||
html.push_str("</table>");
|
||||
html.push_str("</div>");
|
||||
|
||||
html
|
||||
}
|
||||
}
|
||||
|
|
@ -127,6 +127,7 @@ mod tests {
|
|||
source: &Source,
|
||||
request_range: &LspRange,
|
||||
) -> CodeActionContext {
|
||||
// todo: reuse world compute graph APIs.
|
||||
let Warned {
|
||||
output,
|
||||
warnings: compiler_warnings,
|
||||
|
|
@ -144,8 +145,8 @@ mod tests {
|
|||
CodeActionContext {
|
||||
// The filtering here matches the LSP specification and VS Code behavior;
|
||||
// see https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#codeActionContext:
|
||||
// `diagnostics`: An array of diagnostics known on the client side overlapping the range
|
||||
// provided to the textDocument/codeAction request [...]
|
||||
// `diagnostics`: An array of diagnostics known on the client side overlapping the
|
||||
// range provided to the textDocument/codeAction request [...]
|
||||
diagnostics: diagnostics
|
||||
.filter(|diag| ranges_overlap(&diag.range, request_range))
|
||||
.collect(),
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ pub mod index;
|
|||
pub mod package;
|
||||
pub mod syntax;
|
||||
pub mod testing;
|
||||
pub use tinymist_analysis::{ty, upstream};
|
||||
pub use tinymist_analysis::{stats::GLOBAL_STATS, ty, upstream};
|
||||
|
||||
/// The physical position in a document.
|
||||
pub type FramePosition = typst::layout::Position;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
use std::{collections::BTreeMap, ops::Deref, sync::LazyLock};
|
||||
|
||||
use ecow::eco_format;
|
||||
use tinymist_analysis::stats::GLOBAL_STATS;
|
||||
use typst::foundations::{IntoValue, Module, Str, Type};
|
||||
|
||||
use crate::{StrRef, adt::interner::Interned};
|
||||
|
|
@ -20,6 +21,8 @@ pub(crate) fn do_compute_docstring(
|
|||
docs: String,
|
||||
kind: DefKind,
|
||||
) -> Option<DocString> {
|
||||
let _guard = GLOBAL_STATS.stat(fid, "compute_docstring");
|
||||
|
||||
let checker = DocsChecker {
|
||||
fid,
|
||||
ctx,
|
||||
|
|
|
|||
|
|
@ -460,19 +460,6 @@ pub(crate) fn file_path_(uri: &lsp_types::Url) -> String {
|
|||
unix_slash(&rel_path)
|
||||
}
|
||||
|
||||
pub struct HashRepr<T>(pub T);
|
||||
|
||||
// sha256
|
||||
impl fmt::Display for HashRepr<JsonRepr> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
let res = self.0.to_string();
|
||||
let hash = Sha256::digest(res).to_vec();
|
||||
write!(f, "sha256:{}", hex::encode(hash))
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension methods for `Regex` that operate on `Cow<str>` instead of `&str`.
|
||||
pub trait RegexCowExt {
|
||||
/// [`Regex::replace_all`], but taking text as `Cow<str>` instead of `&str`.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue