mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-24 13:13:43 +00:00
dev: collect server information for summary (#162)
* dev: collect server information for summary * dev: humanize font variant to show * fix: let focus state correct
This commit is contained in:
parent
6722b2501f
commit
703c8b4c1d
13 changed files with 467 additions and 46 deletions
|
@ -15,8 +15,7 @@ use typst::syntax::Source;
|
|||
|
||||
use super::SearchCtx;
|
||||
use crate::syntax::{
|
||||
find_source_by_import_path, get_lexical_hierarchy, IdentRef, LexicalHierarchy, LexicalKind,
|
||||
LexicalScopeKind, LexicalVarKind, ModSrc,
|
||||
find_source_by_import_path, IdentRef, LexicalHierarchy, LexicalKind, LexicalVarKind, ModSrc,
|
||||
};
|
||||
use crate::{adt::snapshot_map::SnapshotMap, syntax::LexicalModKind};
|
||||
|
||||
|
@ -57,6 +56,23 @@ pub struct DefUseInfo {
|
|||
}
|
||||
|
||||
impl DefUseInfo {
|
||||
/// Get the estimated memory usage of the def-use information.
|
||||
pub fn estimated_memory(&self) -> usize {
|
||||
std::mem::size_of::<Self>()
|
||||
+ self.ident_defs.capacity()
|
||||
* (std::mem::size_of::<IdentDef>() + std::mem::size_of::<IdentRef>() + 32)
|
||||
+ self.external_refs.capacity()
|
||||
* (std::mem::size_of::<(TypstFileId, Option<String>)>()
|
||||
+ std::mem::size_of::<Vec<(Option<DefId>, IdentRef)>>()
|
||||
+ 32)
|
||||
+ self.ident_refs.capacity()
|
||||
* (std::mem::size_of::<IdentRef>() + std::mem::size_of::<DefId>() + 32)
|
||||
+ (self.undefined_refs.capacity() * std::mem::size_of::<IdentRef>() + 32)
|
||||
+ (self.exports_refs.capacity() * std::mem::size_of::<DefId>() + 32)
|
||||
+ self.exports_defs.capacity()
|
||||
* (std::mem::size_of::<String>() + std::mem::size_of::<DefId>() + 32)
|
||||
}
|
||||
|
||||
/// Get the definition id of a symbol by its name reference.
|
||||
pub fn get_ref(&self, ident: &IdentRef) -> Option<DefId> {
|
||||
self.ident_refs.get(ident).copied()
|
||||
|
@ -112,7 +128,7 @@ pub(super) fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<A
|
|||
return None;
|
||||
}
|
||||
|
||||
let e = get_lexical_hierarchy(source, LexicalScopeKind::DefUse)?;
|
||||
let e = ctx.ctx.def_use_lexical_hierarchy(source)?;
|
||||
|
||||
let mut collector = DefUseCollector {
|
||||
ctx,
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
hash::Hash,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use ecow::EcoVec;
|
||||
use once_cell::sync::OnceCell;
|
||||
use reflexo::{cow_mut::CowMut, debug_loc::DataSource, ImmutPath};
|
||||
use typst::syntax::FileId as TypstFileId;
|
||||
|
@ -17,7 +19,9 @@ use typst::{
|
|||
use super::{get_def_use_inner, DefUseInfo};
|
||||
use crate::{
|
||||
lsp_to_typst,
|
||||
syntax::{construct_module_dependencies, scan_workspace_files, ModuleDependency},
|
||||
syntax::{
|
||||
construct_module_dependencies, scan_workspace_files, LexicalHierarchy, ModuleDependency,
|
||||
},
|
||||
typst_to_lsp, LspPosition, LspRange, PositionEncoding, TypstRange,
|
||||
};
|
||||
|
||||
|
@ -59,6 +63,102 @@ pub struct Analysis {
|
|||
pub root: ImmutPath,
|
||||
/// The position encoding for the workspace.
|
||||
pub position_encoding: PositionEncoding,
|
||||
/// The global caches for analysis.
|
||||
pub caches: AnalysisGlobalCaches,
|
||||
}
|
||||
|
||||
impl Analysis {
|
||||
/// Get estimated memory usage of the analysis data.
|
||||
pub fn estimated_memory(&self) -> usize {
|
||||
self.caches.modules.capacity() * 32
|
||||
+ self
|
||||
.caches
|
||||
.modules
|
||||
.values()
|
||||
.map(|v| {
|
||||
v.def_use_lexical_heirarchy
|
||||
.output
|
||||
.as_ref()
|
||||
.map_or(0, |e| e.iter().map(|e| e.estimated_memory()).sum())
|
||||
})
|
||||
.sum::<usize>()
|
||||
}
|
||||
}
|
||||
|
||||
struct ComputingNode<Inputs, Output> {
|
||||
name: &'static str,
|
||||
inputs: Option<Inputs>,
|
||||
output: Option<Output>,
|
||||
}
|
||||
|
||||
pub(crate) trait ComputeDebug {
|
||||
fn compute_debug_repr(&self) -> impl std::fmt::Debug;
|
||||
}
|
||||
|
||||
impl ComputeDebug for Source {
|
||||
fn compute_debug_repr(&self) -> impl std::fmt::Debug {
|
||||
self.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inputs, Output> ComputingNode<Inputs, Output> {
|
||||
fn new(name: &'static str) -> Self {
|
||||
Self {
|
||||
name,
|
||||
inputs: None,
|
||||
output: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn compute(
|
||||
&mut self,
|
||||
inputs: Inputs,
|
||||
compute: impl FnOnce(Option<Inputs>, Inputs) -> Option<Output>,
|
||||
) -> Option<Output>
|
||||
where
|
||||
Inputs: ComputeDebug + Hash + Clone,
|
||||
Output: Clone,
|
||||
{
|
||||
match &self.inputs {
|
||||
Some(s) if reflexo::hash::hash128(&inputs) == reflexo::hash::hash128(&s) => {
|
||||
log::debug!(
|
||||
"{}({:?}): hit cache",
|
||||
self.name,
|
||||
inputs.compute_debug_repr()
|
||||
);
|
||||
self.output.clone()
|
||||
}
|
||||
_ => {
|
||||
log::info!("{}({:?}): compute", self.name, inputs.compute_debug_repr());
|
||||
let output = compute(self.inputs.clone(), inputs.clone());
|
||||
self.output = output.clone();
|
||||
self.inputs = Some(inputs);
|
||||
output
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A cache for module-level analysis results of a module.
|
||||
///
|
||||
/// You should not holds across requests, because source code may change.
|
||||
pub struct ModuleAnalysisGlobalCache {
|
||||
def_use_lexical_heirarchy: ComputingNode<Source, EcoVec<LexicalHierarchy>>,
|
||||
}
|
||||
|
||||
impl Default for ModuleAnalysisGlobalCache {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
def_use_lexical_heirarchy: ComputingNode::new("def_use_lexical_heirarchy"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A global (compiler server spanned) cache for all level of analysis results
|
||||
/// of a module.
|
||||
#[derive(Default)]
|
||||
pub struct AnalysisGlobalCaches {
|
||||
modules: HashMap<TypstFileId, ModuleAnalysisGlobalCache>,
|
||||
}
|
||||
|
||||
/// A cache for all level of analysis results of a module.
|
||||
|
@ -233,6 +333,21 @@ impl<'w> AnalysisContext<'w> {
|
|||
pub fn to_lsp_range(&self, position: TypstRange, src: &Source) -> LspRange {
|
||||
typst_to_lsp::range(position, src, self.analysis.position_encoding)
|
||||
}
|
||||
|
||||
pub(crate) fn def_use_lexical_hierarchy(
|
||||
&mut self,
|
||||
source: Source,
|
||||
) -> Option<EcoVec<LexicalHierarchy>> {
|
||||
self.analysis
|
||||
.caches
|
||||
.modules
|
||||
.entry(source.id())
|
||||
.or_default()
|
||||
.def_use_lexical_heirarchy
|
||||
.compute(source, |_before, after| {
|
||||
crate::syntax::get_lexical_hierarchy(after, crate::syntax::LexicalScopeKind::DefUse)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The context for searching in the workspace.
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::{collections::HashMap, path::PathBuf};
|
|||
|
||||
use reflexo::debug_loc::DataSource;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use typst::text::Font;
|
||||
use typst::text::{Font, FontStretch, FontStyle, FontWeight};
|
||||
use typst::{
|
||||
layout::{Frame, FrameItem},
|
||||
model::Document,
|
||||
|
@ -39,6 +39,12 @@ pub struct DocumentFontInfo {
|
|||
/// The display name of the font, which is computed by this crate and
|
||||
/// unnecessary from any fields of the font file.
|
||||
pub name: String,
|
||||
/// The style of the font.
|
||||
pub style: FontStyle,
|
||||
/// The weight of the font.
|
||||
pub weight: FontWeight,
|
||||
/// The stretch of the font.
|
||||
pub stretch: FontStretch,
|
||||
/// The PostScript name of the font.
|
||||
pub postscript_name: Option<String>,
|
||||
/// The Family in font file.
|
||||
|
@ -165,12 +171,16 @@ impl<'a, 'w> DocumentMetricsWorker<'a, 'w> {
|
|||
.into_iter()
|
||||
.map(|(font, uses)| {
|
||||
let extra = self.ctx.resources.font_info(font.clone());
|
||||
let info = &font.info();
|
||||
DocumentFontInfo {
|
||||
name: format!("{} ({:?})", font.info().family, font.info().variant),
|
||||
name: info.family.clone(),
|
||||
style: info.variant.style,
|
||||
weight: info.variant.weight,
|
||||
stretch: info.variant.stretch,
|
||||
postscript_name: font.find_name(POST_SCRIPT_NAME),
|
||||
full_name: font.find_name(FULL_NAME),
|
||||
family: font.find_name(FAMILY),
|
||||
fixed_family: Some(font.info().family.clone()),
|
||||
fixed_family: Some(info.family.clone()),
|
||||
source: extra.map(|e| self.internal_source(e)),
|
||||
index: Some(font.index()),
|
||||
uses_scale: Some(uses),
|
||||
|
|
|
@ -113,6 +113,7 @@ pub trait StatefulRequest {
|
|||
mod polymorphic {
|
||||
use lsp_types::TextEdit;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use typst::foundations::Dict;
|
||||
|
||||
use super::prelude::*;
|
||||
use super::*;
|
||||
|
@ -161,6 +162,19 @@ mod polymorphic {
|
|||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ServerInfoRequest {}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ServerInfoReponse {
|
||||
pub root: Option<PathBuf>,
|
||||
#[serde(rename = "fontPaths")]
|
||||
pub font_paths: Vec<PathBuf>,
|
||||
pub inputs: Dict,
|
||||
#[serde(rename = "estimatedMemoryUsage")]
|
||||
pub estimated_memory_usage: HashMap<String, usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum FoldRequestFeature {
|
||||
PinnedFirst,
|
||||
|
@ -192,6 +206,7 @@ mod polymorphic {
|
|||
SelectionRange(SelectionRangeRequest),
|
||||
|
||||
DocumentMetrics(DocumentMetricsRequest),
|
||||
ServerInfo(ServerInfoRequest),
|
||||
}
|
||||
|
||||
impl CompilerQueryRequest {
|
||||
|
@ -219,6 +234,7 @@ mod polymorphic {
|
|||
CompilerQueryRequest::SelectionRange(..) => ContextFreeUnique,
|
||||
|
||||
CompilerQueryRequest::DocumentMetrics(..) => PinnedFirst,
|
||||
CompilerQueryRequest::ServerInfo(..) => Mergable,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -245,6 +261,7 @@ mod polymorphic {
|
|||
CompilerQueryRequest::SelectionRange(req) => &req.path,
|
||||
|
||||
CompilerQueryRequest::DocumentMetrics(req) => &req.path,
|
||||
CompilerQueryRequest::ServerInfo(..) => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -272,6 +289,7 @@ mod polymorphic {
|
|||
SelectionRange(Option<Vec<SelectionRange>>),
|
||||
|
||||
DocumentMetrics(Option<DocumentMetricsResponse>),
|
||||
ServerInfo(Option<HashMap<String, ServerInfoReponse>>),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -233,6 +233,18 @@ pub(crate) struct LexicalHierarchy {
|
|||
pub children: Option<LazyHash<EcoVec<LexicalHierarchy>>>,
|
||||
}
|
||||
|
||||
impl LexicalHierarchy {
|
||||
pub fn estimated_memory(&self) -> usize {
|
||||
std::mem::size_of::<Self>()
|
||||
+ std::mem::size_of::<LexicalInfo>()
|
||||
+ self.info.name.len()
|
||||
+ self
|
||||
.children
|
||||
.as_ref()
|
||||
.map_or(0, |c| c.iter().map(|e| e.estimated_memory()).sum())
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for LexicalHierarchy {
|
||||
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
use serde::ser::SerializeStruct;
|
||||
|
|
|
@ -74,6 +74,7 @@ pub fn snapshot_testing(name: &str, f: &impl Fn(&mut AnalysisContext, PathBuf))
|
|||
Analysis {
|
||||
root,
|
||||
position_encoding: PositionEncoding::Utf16,
|
||||
caches: Default::default(),
|
||||
},
|
||||
);
|
||||
ctx.test_files(|| paths);
|
||||
|
|
|
@ -6,6 +6,9 @@ pub mod render;
|
|||
pub mod typ_client;
|
||||
pub mod typ_server;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use tinymist_query::analysis::Analysis;
|
||||
use tinymist_query::ExportKind;
|
||||
use tokio::sync::{broadcast, watch};
|
||||
use typst::util::Deferred;
|
||||
|
@ -107,7 +110,11 @@ impl CompileServer {
|
|||
let driver = CompileDriver {
|
||||
inner: driver,
|
||||
handler,
|
||||
position_encoding,
|
||||
analysis: Analysis {
|
||||
position_encoding,
|
||||
root: Path::new("").into(),
|
||||
caches: Default::default(),
|
||||
},
|
||||
};
|
||||
|
||||
// Create the actor
|
||||
|
|
|
@ -26,6 +26,8 @@
|
|||
//! information to other actors.
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
@ -35,7 +37,7 @@ use log::{error, info, trace};
|
|||
use parking_lot::Mutex;
|
||||
use tinymist_query::{
|
||||
analysis::{Analysis, AnalysisContext, AnaylsisResources},
|
||||
DiagnosticsMap, ExportKind, PositionEncoding, VersionedDocument,
|
||||
DiagnosticsMap, ExportKind, ServerInfoReponse, VersionedDocument,
|
||||
};
|
||||
use tokio::sync::{broadcast, mpsc, oneshot, watch};
|
||||
use typst::{
|
||||
|
@ -143,7 +145,7 @@ pub struct CompileDriver {
|
|||
pub(super) inner: CompileDriverInner,
|
||||
#[allow(unused)]
|
||||
pub(super) handler: CompileHandler,
|
||||
pub(super) position_encoding: PositionEncoding,
|
||||
pub(super) analysis: Analysis,
|
||||
}
|
||||
|
||||
impl CompileMiddleware for CompileDriver {
|
||||
|
@ -208,7 +210,6 @@ impl CompileDriver {
|
|||
&mut self,
|
||||
f: impl FnOnce(&mut AnalysisContext<'_>) -> T,
|
||||
) -> anyhow::Result<T> {
|
||||
let enc = self.position_encoding;
|
||||
let w = self.inner.world_mut();
|
||||
|
||||
let Some(main) = w.main_id() else {
|
||||
|
@ -252,19 +253,15 @@ impl CompileDriver {
|
|||
}
|
||||
|
||||
let w = WrapWorld(w);
|
||||
Ok(f(&mut AnalysisContext::new(
|
||||
&w,
|
||||
Analysis {
|
||||
root,
|
||||
position_encoding: enc,
|
||||
},
|
||||
)))
|
||||
|
||||
self.analysis.root = root;
|
||||
Ok(f(&mut AnalysisContext::new_borrow(&w, &mut self.analysis)))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CompileClientActor {
|
||||
diag_group: String,
|
||||
config: CompileConfig,
|
||||
pub diag_group: String,
|
||||
pub config: CompileConfig,
|
||||
entry: Arc<Mutex<EntryState>>,
|
||||
inner: Deferred<CompileClient>,
|
||||
render_tx: broadcast::Sender<RenderActorRequest>,
|
||||
|
@ -419,6 +416,29 @@ impl CompileClientActor {
|
|||
}))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn collect_server_info(&self) -> anyhow::Result<HashMap<String, ServerInfoReponse>> {
|
||||
let dg = self.diag_group.clone();
|
||||
let res = self.steal(move |c| {
|
||||
let cc = &c.compiler.compiler;
|
||||
|
||||
let info = ServerInfoReponse {
|
||||
root: cc.world().entry.root().map(|e| e.as_ref().to_owned()),
|
||||
// todo: font paths
|
||||
// font_paths: cc.world().font_resolver.inner,
|
||||
font_paths: vec![],
|
||||
inputs: cc.world().inputs.as_ref().deref().clone(),
|
||||
estimated_memory_usage: HashMap::from_iter([
|
||||
("vfs".to_owned(), { cc.world().vfs.memory_usage() }),
|
||||
("analysis".to_owned(), cc.analysis.estimated_memory()),
|
||||
]),
|
||||
};
|
||||
|
||||
HashMap::from_iter([(dg, info)])
|
||||
})?;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl CompileClientActor {
|
||||
|
|
|
@ -602,6 +602,7 @@ impl TypstLanguageServer {
|
|||
redirected_command!("tinymist.doInitTemplate", Self::init_template),
|
||||
redirected_command!("tinymist.doGetTemplateEntry", Self::do_get_template_entry),
|
||||
redirected_command!("tinymist.getDocumentMetrics", Self::get_document_metrics),
|
||||
redirected_command!("tinymist.getServerInfo", Self::get_server_info),
|
||||
// For Documentations
|
||||
redirected_command!("tinymist.getResources", Self::get_resources),
|
||||
])
|
||||
|
@ -650,8 +651,7 @@ impl TypstLanguageServer {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
/// Export the current document as some format. The client is responsible
|
||||
/// for passing the correct absolute path of typst document.
|
||||
/// Get the metrics of the document.
|
||||
pub fn get_document_metrics(&self, arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
|
||||
let path = parse_path(arguments.first())?.as_ref().to_owned();
|
||||
|
||||
|
@ -662,6 +662,16 @@ impl TypstLanguageServer {
|
|||
Ok(res)
|
||||
}
|
||||
|
||||
/// Get the server info.
|
||||
pub fn get_server_info(&self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
|
||||
let res = run_query!(self.ServerInfo())?;
|
||||
|
||||
let res = serde_json::to_value(res)
|
||||
.map_err(|e| internal_error(format!("Cannot serialize response {e}")))?;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Clear all cached resources.
|
||||
///
|
||||
/// # Errors
|
||||
|
|
|
@ -151,9 +151,9 @@ impl TypstLanguageServer {
|
|||
|
||||
#[macro_export]
|
||||
macro_rules! run_query {
|
||||
($self: ident.$query: ident ($($arg_key:ident),+ $(,)?)) => {{
|
||||
($self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
|
||||
use tinymist_query::*;
|
||||
let req = paste! { [<$query Request>] { $($arg_key),+ } };
|
||||
let req = paste! { [<$query Request>] { $($arg_key),* } };
|
||||
$self
|
||||
.query(CompilerQueryRequest::$query(req.clone()))
|
||||
.map_err(|err| {
|
||||
|
@ -268,6 +268,10 @@ impl TypstLanguageServer {
|
|||
Symbol(req) => query_world!(client, Symbol, req),
|
||||
|
||||
DocumentMetrics(req) => query_state!(client, DocumentMetrics, req),
|
||||
ServerInfo(_) => {
|
||||
let res = client.collect_server_info()?;
|
||||
Ok(CompilerQueryResponse::ServerInfo(Some(res)))
|
||||
}
|
||||
|
||||
FoldingRange(..)
|
||||
| SelectionRange(..)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue