feat: provide package view and local documentation (#596)

* feat: move featured components

* feat: provide package view and local documentation

* stage

* fix: compile error by merged commits
This commit is contained in:
Myriad-Dreamin 2024-09-12 21:17:07 +08:00 committed by GitHub
parent b06447ffe2
commit 78f3893185
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
70 changed files with 3422 additions and 593 deletions

24
Cargo.lock generated
View file

@ -3945,6 +3945,23 @@ dependencies = [
"walkdir",
]
[[package]]
name = "tinymist-analysis"
version = "0.11.20"
dependencies = [
"base64 0.22.1",
"comemo 0.4.0",
"ecow 0.2.2",
"insta",
"log",
"regex",
"tinymist-world",
"toml 0.8.14",
"typst",
"typst-svg",
"typst-syntax 0.11.1",
]
[[package]]
name = "tinymist-assets"
version = "0.11.20"
@ -3960,6 +3977,7 @@ name = "tinymist-query"
version = "0.11.20"
dependencies = [
"anyhow",
"base64 0.22.1",
"biblatex",
"chrono",
"comemo 0.4.0",
@ -3991,9 +4009,12 @@ dependencies = [
"sha2",
"siphasher 1.0.1",
"strum 0.26.3",
"tinymist-analysis",
"tinymist-world",
"toml 0.8.14",
"triomphe",
"ttf-parser",
"typlite",
"typst",
"typst-assets",
"typst-shim",
@ -4300,7 +4321,7 @@ dependencies = [
"ecow 0.2.2",
"insta",
"regex",
"tinymist-query",
"tinymist-analysis",
"tinymist-world",
"typst",
"typst-svg",
@ -4312,6 +4333,7 @@ name = "typlite-cli"
version = "0.0.0"
dependencies = [
"clap",
"ecow 0.2.2",
"tinymist-world",
"typlite",
]

View file

@ -139,6 +139,7 @@ insta = { version = "1.39", features = ["glob"] }
typst-preview = { path = "./crates/typst-preview/" }
tinymist-assets = { version = "0.11.20" }
tinymist = { path = "./crates/tinymist/" }
tinymist-analysis = { path = "./crates/tinymist-analysis/" }
tinymist-query = { path = "./crates/tinymist-query/" }
tinymist-world = { path = "./crates/tinymist-world/" }
tinymist-render = { path = "./crates/tinymist-render/" }

View file

@ -12,3 +12,4 @@ path = "src/main.rs"
typlite.workspace = true
clap.workspace = true
tinymist-world.workspace = true
ecow.workspace = true

View file

@ -1,6 +1,7 @@
#![doc = include_str!("../README.md")]
extern crate clap;
extern crate ecow;
extern crate tinymist_world;
extern crate typlite;
@ -10,6 +11,8 @@ use std::{
};
use clap::Parser;
use ecow::{eco_format, EcoString};
use typlite::value::*;
use typlite::{CompileOnceArgs, Typlite};
/// Common arguments of compile, watch, and query.
@ -41,7 +44,8 @@ fn main() -> typlite::Result<()> {
let universe = args.compile.resolve().map_err(|e| format!("{e:?}"))?;
let world = universe.snapshot();
let conv = Typlite::new(Arc::new(world)).convert();
let converter = Typlite::new(Arc::new(world)).with_library(lib());
let conv = converter.convert();
match (conv, output) {
(Ok(conv), None) => println!("{}", conv),
@ -54,3 +58,25 @@ fn main() -> typlite::Result<()> {
Ok(())
}
fn lib() -> Arc<typlite::scopes::Scopes<Value>> {
let mut scopes = typlite::library::library();
// todo: how to import this function correctly?
scopes.define("cross-link", cross_link as RawFunc);
Arc::new(scopes)
}
/// Evaluate a `cross-link`.
pub fn cross_link(mut args: Args) -> typlite::Result<Value> {
let dest = get_pos_named!(args, dest: EcoString);
let body = get_pos_named!(args, body: Content);
let dest = std::path::Path::new(dest.as_str()).with_extension("html");
Ok(Value::Content(eco_format!(
"[{body}](https://myriad-dreamin.github.io/tinymist/{dest})",
dest = dest.to_string_lossy()
)))
}

View file

@ -0,0 +1,30 @@
[package]
name = "tinymist-analysis"
description = "Typst Static Analyzers for Tinymist."
categories = ["compilers"]
keywords = ["language", "typst"]
authors.workspace = true
version.workspace = true
license.workspace = true
edition.workspace = true
homepage.workspace = true
repository.workspace = true
[dependencies]
typst-syntax.workspace = true
tinymist-world.workspace = true
ecow.workspace = true
comemo.workspace = true
log.workspace = true
toml.workspace = true
typst.workspace = true
typst-svg.workspace = true
base64.workspace = true
[dev-dependencies]
insta.workspace = true
regex.workspace = true
[lints]
workspace = true

View file

@ -1,3 +1,5 @@
//! Import resolution utilities.
use crate::prelude::*;
/// Resolve a file id by its import path.

View file

@ -0,0 +1,4 @@
//! Tinymist Analysis
pub mod import;
mod prelude;

View file

@ -0,0 +1,10 @@
pub use std::path::Path;
pub use typst::diag::FileError;
pub use typst::syntax::FileId as TypstFileId;
pub use typst::syntax::{
ast::{self},
package::{PackageManifest, PackageSpec},
Source, VirtualPath,
};
pub use typst::World;

View file

@ -51,6 +51,10 @@ dashmap.workspace = true
rustc-hash.workspace = true
hashbrown.workspace = true
triomphe.workspace = true
base64.workspace = true
typlite.workspace = true
tinymist-world.workspace = true
tinymist-analysis.workspace = true
[dev-dependencies]
once_cell.workspace = true

View file

@ -333,7 +333,7 @@ mod lexical_hierarchy_tests {
undefined_refs.sort();
let entry = DefUseEntry {
def: &IdentDef {
name: "<nil>".to_string(),
name: "<nil>".into(),
kind: LexicalKind::Block,
range: 0..0,
},

View file

@ -2,7 +2,7 @@
use std::{collections::HashMap, ops::Range, sync::Arc};
use ecow::EcoVec;
use ecow::{EcoString, EcoVec};
use reflexo::hash::hash128;
use super::{prelude::*, ImportInfo};
@ -18,7 +18,7 @@ enum Ns {
Value,
}
type ExternalRefMap = HashMap<(TypstFileId, Option<String>), Vec<(Option<DefId>, IdentRef)>>;
type ExternalRefMap = HashMap<(TypstFileId, Option<EcoString>), Vec<(Option<DefId>, IdentRef)>>;
/// The def-use information of a source file.
#[derive(Default)]
@ -29,11 +29,11 @@ pub struct DefUseInfo {
/// The references to defined symbols.
pub ident_refs: HashMap<IdentRef, DefId>,
/// The references of labels.
pub label_refs: HashMap<String, Vec<Range<usize>>>,
pub label_refs: HashMap<EcoString, Vec<Range<usize>>>,
/// The references to undefined symbols.
pub undefined_refs: Vec<IdentRef>,
exports_refs: Vec<DefId>,
exports_defs: HashMap<String, DefId>,
exports_defs: HashMap<EcoString, DefId>,
self_id: Option<TypstFileId>,
self_hash: u128,
@ -93,7 +93,7 @@ impl DefUseInfo {
pub fn get_external_refs(
&self,
ext_id: TypstFileId,
ext_name: Option<String>,
ext_name: Option<EcoString>,
) -> impl Iterator<Item = &(Option<DefId>, IdentRef)> {
self.external_refs
.get(&(ext_id, ext_name))
@ -160,8 +160,8 @@ pub(super) fn get_def_use_inner(
struct DefUseCollector<'a, 'b, 'w> {
ctx: &'a mut SearchCtx<'b, 'w>,
info: DefUseInfo,
label_scope: SnapshotMap<String, DefId>,
id_scope: SnapshotMap<String, DefId>,
label_scope: SnapshotMap<EcoString, DefId>,
id_scope: SnapshotMap<EcoString, DefId>,
import: Arc<ImportInfo>,
current_id: TypstFileId,
@ -317,7 +317,7 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
}
}
fn insert_extern(&mut self, name: String, range: Range<usize>, redefine_id: Option<DefId>) {
fn insert_extern(&mut self, name: EcoString, range: Range<usize>, redefine_id: Option<DefId>) {
if let Some(src) = &self.ext_src {
self.info.external_refs.insert(
(src.id(), Some(name.clone())),

View file

@ -174,6 +174,11 @@ pub trait AnalysisResources {
None
}
/// Get the local packages and their descriptions.
fn local_packages(&self) -> EcoVec<PackageSpec> {
EcoVec::new()
}
/// Resolve telescope image at the given position.
fn periscope_at(
&self,
@ -596,32 +601,7 @@ impl<'w> AnalysisContext<'w> {
}
pub(crate) fn with_vm<T>(&self, f: impl FnOnce(&mut typst::eval::Vm) -> T) -> T {
use comemo::Track;
use typst::engine::*;
use typst::eval::*;
use typst::foundations::*;
use typst::introspection::*;
let mut locator = Locator::default();
let introspector = Introspector::default();
let mut tracer = Tracer::new();
let engine = Engine {
world: self.world().track(),
route: Route::default(),
introspector: introspector.track(),
locator: &mut locator,
tracer: tracer.track_mut(),
};
let context = Context::none();
let mut vm = Vm::new(
engine,
context.track(),
Scopes::new(Some(self.world().library())),
Span::detached(),
);
f(&mut vm)
crate::upstream::with_vm(self.world(), f)
}
pub(crate) fn const_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
@ -651,17 +631,26 @@ impl<'w> AnalysisContext<'w> {
Some(analyze_dyn_signature(self, func.clone()).type_sig())
}
pub(crate) fn user_type_of_def(&mut self, source: &Source, def: &DefinitionLink) -> Option<Ty> {
let def_at = def.def_at.clone()?;
pub(crate) fn user_type_of_ident(
&mut self,
source: &Source,
def_fid: TypstFileId,
def_ident: &IdentRef,
) -> Option<Ty> {
let ty_chk = self.type_check(source.clone())?;
let def_use = self.def_use(source.clone())?;
let (def_id, _) = def_use.get_def(def_fid, def_ident)?;
ty_chk.type_of_def(def_id)
}
pub(crate) fn user_type_of_def(&mut self, source: &Source, def: &DefinitionLink) -> Option<Ty> {
let def_at = def.def_at.clone()?;
let def_ident = IdentRef {
name: def.name.clone(),
range: def_at.1,
};
let (def_id, _) = def_use.get_def(def_at.0, &def_ident)?;
ty_chk.type_of_def(def_id)
self.user_type_of_ident(source, def_at.0, &def_ident)
}
pub(crate) fn type_of_span(&mut self, s: Span) -> Option<Ty> {

View file

@ -26,7 +26,7 @@ pub struct DefinitionLink {
/// A possible instance of the definition.
pub value: Option<Value>,
/// The name of the definition.
pub name: String,
pub name: EcoString,
/// The location of the definition.
pub def_at: Option<(TypstFileId, Range<usize>)>,
/// The range of the name of the definition.
@ -54,7 +54,7 @@ pub fn find_definition(
let source = find_source_by_expr(ctx.world(), def_fid, import_node.source())?;
return Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathVar),
name: String::new(),
name: EcoString::new(),
value: None,
def_at: Some((source.id(), LinkedNode::new(source.root()).range())),
name_range: None,
@ -67,7 +67,7 @@ pub fn find_definition(
let source = find_source_by_expr(ctx.world(), def_fid, include_node.source())?;
return Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathInclude),
name: String::new(),
name: EcoString::new(),
value: None,
def_at: Some((source.id(), (LinkedNode::new(source.root())).range())),
name_range: None,
@ -125,7 +125,7 @@ pub fn find_definition(
Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Label),
name: ref_node.to_owned(),
name: ref_node.into(),
value: Some(Value::Content(elem)),
def_at,
name_range,
@ -140,11 +140,11 @@ pub fn find_definition(
// Lexical reference
let ident_ref = match use_site.cast::<ast::Expr>()? {
ast::Expr::Ident(e) => Some(IdentRef {
name: e.get().to_string(),
name: e.get().clone(),
range: use_site.range(),
}),
ast::Expr::MathIdent(e) => Some(IdentRef {
name: e.get().to_string(),
name: e.get().clone(),
range: use_site.range(),
}),
ast::Expr::FieldAccess(..) => {
@ -173,12 +173,7 @@ pub fn find_definition(
// Global definition
let Some((def_fid, def)) = def_info else {
return resolve_global_value(ctx, use_site.clone(), false).and_then(move |f| {
value_to_def(
ctx,
f,
|| Some(use_site.get().clone().into_text().to_string()),
None,
)
value_to_def(ctx, f, || Some(use_site.get().clone().into_text()), None)
});
};
@ -237,7 +232,7 @@ fn find_bib_definition(bib_elem: Arc<BibInfo>, key: &str) -> Option<DefinitionLi
let entry = entry?;
Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::BibKey),
name: key.to_string(),
name: key.into(),
value: None,
def_at: Some((entry.file_id, entry.span.clone())),
// todo: rename with regard to string format: yaml-key/bib etc.
@ -448,7 +443,7 @@ pub(crate) fn resolve_global_value(
fn value_to_def(
ctx: &mut AnalysisContext,
value: Value,
name: impl FnOnce() -> Option<String>,
name: impl FnOnce() -> Option<EcoString>,
name_range: Option<Range<usize>>,
) -> Option<DefinitionLink> {
let mut def_at = |span: Span| {
@ -460,7 +455,7 @@ fn value_to_def(
Some(match value {
Value::Func(func) => {
let name = func.name().map(|e| e.to_owned()).or_else(name)?;
let name = func.name().map(|e| e.into()).or_else(name)?;
let span = func.span();
DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Function),
@ -471,7 +466,7 @@ fn value_to_def(
}
}
Value::Module(module) => {
let name = module.name().to_string();
let name = module.name().clone();
DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Variable),
name,

View file

@ -441,7 +441,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
fn to_ident_ref(root: &LinkedNode, c: ast::Ident) -> Option<IdentRef> {
Some(IdentRef {
name: c.get().to_string(),
name: c.get().clone(),
range: root.find(c.span())?.range(),
})
}

View file

@ -182,7 +182,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
fn check_ident(&mut self, root: LinkedNode<'_>, mode: InterpretMode) -> Option<Ty> {
let ident: ast::Ident = root.cast()?;
let ident_ref = IdentRef {
name: ident.get().to_string(),
name: ident.get().clone(),
range: root.range(),
};

View file

@ -0,0 +1,24 @@
use std::sync::Arc;
use ecow::eco_format;
use typlite::value::*;
pub(super) fn lib() -> Arc<typlite::scopes::Scopes<Value>> {
let mut scopes = typlite::library::library();
// todo: how to import this function correctly?
scopes.define("example", example as RawFunc);
Arc::new(scopes)
}
/// Evaluate a `example`.
pub fn example(mut args: Args) -> typlite::Result<Value> {
let body = get_pos_named!(args, body: Content).0;
let body = body.trim();
let ticks = body.chars().take_while(|t| *t == '`').collect::<String>();
let body = &body[ticks.len()..];
let body = eco_format!("{ticks}typ{body}");
Ok(Value::Content(body))
}

View file

@ -0,0 +1,816 @@
//! Package management tools.
mod library;
mod tidy;
use core::fmt::{self, Write};
use std::collections::{HashMap, HashSet};
use std::ops::Range;
use std::path::PathBuf;
use std::sync::Arc;
use comemo::Track;
use ecow::{eco_vec, EcoString, EcoVec};
use indexmap::IndexSet;
use parking_lot::Mutex;
use serde::{Deserialize, Serialize};
use tinymist_world::base::{EntryState, ShadowApi, TaskInputs};
use tinymist_world::LspWorld;
use typst::diag::{eco_format, StrResult};
use typst::engine::Route;
use typst::eval::Tracer;
use typst::foundations::{Bytes, Value};
use typst::syntax::package::{PackageManifest, PackageSpec};
use typst::syntax::{FileId, Span, VirtualPath};
use typst::World;
use self::tidy::*;
use crate::analysis::analyze_dyn_signature;
use crate::syntax::{find_docs_of, get_non_strict_def_target, IdentRef};
use crate::ty::Ty;
use crate::upstream::truncated_doc_repr;
use crate::AnalysisContext;
/// Information about a package.
#[derive(Debug, Serialize, Deserialize)]
pub struct PackageInfo {
/// The path to the package if any.
pub path: PathBuf,
/// The namespace the package lives in.
pub namespace: EcoString,
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: String,
}
impl From<(PathBuf, PackageSpec)> for PackageInfo {
fn from((path, spec): (PathBuf, PackageSpec)) -> Self {
Self {
path,
namespace: spec.namespace,
name: spec.name,
version: spec.version.to_string(),
}
}
}
/// Docs about a symbol.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "kind")]
pub enum Docs {
/// Docs about a function.
#[serde(rename = "func")]
Function(TidyFuncDocs),
/// Docs about a variable.
#[serde(rename = "var")]
Variable(TidyVarDocs),
/// Docs about a module.
#[serde(rename = "module")]
Module(TidyModuleDocs),
/// Other kinds of docs.
#[serde(rename = "plain")]
Plain(EcoString),
}
impl Docs {
/// Get the markdown representation of the docs.
pub fn docs(&self) -> &str {
match self {
Self::Function(docs) => docs.docs.as_str(),
Self::Variable(docs) => docs.docs.as_str(),
Self::Module(docs) => docs.docs.as_str(),
Self::Plain(docs) => docs.as_str(),
}
}
}
/// Describes a primary function signature.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DocSignature {
/// The positional parameters.
pub pos: Vec<DocParamSpec>,
/// The named parameters.
pub named: HashMap<String, DocParamSpec>,
/// The rest parameter.
pub rest: Option<DocParamSpec>,
/// The return type.
pub ret_ty: Option<(String, String)>,
}
/// Describes a function parameter.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DocParamSpec {
/// The parameter's name.
pub name: String,
/// Documentation for the parameter.
pub docs: String,
/// Inferred type of the parameter.
pub cano_type: Option<(String, String)>,
/// The parameter's default name as type.
pub type_repr: Option<EcoString>,
/// The parameter's default name as value.
pub expr: Option<EcoString>,
/// Is the parameter positional?
pub positional: bool,
/// Is the parameter named?
///
/// Can be true even if `positional` is true if the parameter can be given
/// in both variants.
pub named: bool,
/// Can the parameter be given any number of times?
pub variadic: bool,
/// Is the parameter settable with a set rule?
pub settable: bool,
}
/// Information about a symbol.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SymbolInfoHead {
/// The name of the symbol.
pub name: EcoString,
/// The kind of the symbol.
pub kind: EcoString,
/// The location (file, start, end) of the symbol.
pub loc: Option<(usize, usize, usize)>,
/// The raw documentation of the symbol.
pub docs: Option<String>,
/// The signature of the symbol.
pub signature: Option<DocSignature>,
/// The parsed documentation of the symbol.
pub parsed_docs: Option<Docs>,
/// The value of the symbol.
#[serde(skip)]
pub constant: Option<EcoString>,
/// The span of the symbol.
#[serde(skip)]
pub span: Option<Span>,
/// The name range of the symbol.
#[serde(skip)]
pub name_range: Option<Range<usize>>,
/// The value of the symbol.
#[serde(skip)]
pub value: Option<Value>,
}
/// Information about a symbol.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SymbolInfo {
/// The primary information about the symbol.
#[serde(flatten)]
pub head: SymbolInfoHead,
/// The children of the symbol.
pub children: EcoVec<SymbolInfo>,
}
/// Information about a package.
#[derive(Debug, Serialize, Deserialize)]
pub struct PackageMeta {
/// The namespace the package lives in.
pub namespace: EcoString,
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: String,
/// The package's manifest information.
pub manifest: Option<PackageManifest>,
}
/// Information about a package.
#[derive(Debug, Serialize, Deserialize)]
pub struct PackageMetaEnd {
packages: Vec<PackageMeta>,
files: Vec<FileMeta>,
}
/// Information about a package.
#[derive(Debug, Serialize, Deserialize)]
pub struct FileMeta {
package: Option<usize>,
path: PathBuf,
}
/// Parses the manifest of the package located at `package_path`.
pub fn get_manifest(world: &LspWorld, toml_id: FileId) -> StrResult<PackageManifest> {
let toml_data = world
.file(toml_id)
.map_err(|err| eco_format!("failed to read package manifest ({})", err))?;
let string = std::str::from_utf8(&toml_data)
.map_err(|err| eco_format!("package manifest is not valid UTF-8 ({})", err))?;
toml::from_str(string)
.map_err(|err| eco_format!("package manifest is malformed ({})", err.message()))
}
/// List all symbols in a package.
pub fn list_symbols(world: &LspWorld, spec: &PackageInfo) -> StrResult<SymbolInfo> {
let toml_id = FileId::new(
Some(PackageSpec {
namespace: spec.namespace.clone(),
name: spec.name.clone(),
version: spec.version.parse()?,
}),
VirtualPath::new("typst.toml"),
);
let manifest = get_manifest(world, toml_id)?;
let entry_point = toml_id.join(&manifest.package.entrypoint);
let source = world.source(entry_point).map_err(|e| eco_format!("{e}"))?;
let route = Route::default();
let mut tracer = Tracer::default();
let w: &dyn typst::World = world;
let src = typst::eval::eval(w.track(), route.track(), tracer.track_mut(), &source)
.map_err(|e| eco_format!("{e:?}"))?;
let for_spec = PackageSpec {
namespace: spec.namespace.clone(),
name: spec.name.clone(),
version: spec.version.parse()?,
};
Ok(symbol(world, Some(&for_spec), "root", &Value::Module(src)))
}
fn jbase64<T: Serialize>(s: &T) -> String {
use base64::Engine;
let content = serde_json::to_string(s).unwrap();
base64::engine::general_purpose::STANDARD.encode(content)
}
// Unfortunately, we have only 65536 possible file ids and we cannot revoke
// them. So we share a global file id for all docs conversion.
static DOCS_CONVERT_ID: std::sync::LazyLock<Mutex<FileId>> = std::sync::LazyLock::new(|| {
Mutex::new(FileId::new(None, VirtualPath::new("__tinymist_docs__.typ")))
});
fn convert_docs(world: &LspWorld, content: &str) -> StrResult<EcoString> {
static DOCS_LIB: std::sync::LazyLock<Arc<typlite::scopes::Scopes<typlite::value::Value>>> =
std::sync::LazyLock::new(library::lib);
let conv_id = DOCS_CONVERT_ID.lock();
let entry = EntryState::new_rootless(conv_id.vpath().as_rooted_path().into()).unwrap();
let entry = entry.select_in_workspace(*conv_id);
let mut w = world.task(TaskInputs {
entry: Some(entry),
inputs: None,
});
w.map_shadow_by_id(*conv_id, Bytes::from(content.as_bytes().to_owned()))?;
// todo: bad performance
w.source_db.take_state();
let conv = typlite::Typlite::new(Arc::new(w))
.with_library(DOCS_LIB.clone())
.annotate_elements(true)
.convert()
.map_err(|e| eco_format!("failed to convert to markdown: {e}"))?;
Ok(conv)
}
#[derive(Serialize, Deserialize)]
struct ConvertResult {
errors: Vec<String>,
}
/// Generate full documents in markdown format
pub fn generate_md_docs(
ctx: &mut AnalysisContext,
world: &LspWorld,
spec: &PackageInfo,
) -> StrResult<String> {
log::info!("generate_md_docs {spec:?}");
let toml_id = FileId::new(
Some(PackageSpec {
namespace: spec.namespace.clone(),
name: spec.name.clone(),
version: spec.version.parse()?,
}),
VirtualPath::new("typst.toml"),
);
let mut md = String::new();
let sym = list_symbols(world, spec)?;
let title = format!("@{}/{}:{}", spec.namespace, spec.name, spec.version);
let mut errors = vec![];
writeln!(md, "# {title}").unwrap();
md.push('\n');
writeln!(md, "This documentation is generated locally. Please submit issues to [tinymist](https://github.com/Myriad-Dreamin/tinymist/issues) if you see **incorrect** information in it.").unwrap();
md.push('\n');
md.push('\n');
let manifest = get_manifest(world, toml_id)?;
let meta = PackageMeta {
namespace: spec.namespace.clone(),
name: spec.name.clone(),
version: spec.version.to_string(),
manifest: Some(manifest),
};
let package_meta = jbase64(&meta);
let _ = writeln!(md, "<!-- begin:package {package_meta} -->");
let mut key = 0;
let mut modules_to_generate = vec![(EcoString::new(), sym.head.name.clone(), sym)];
let mut generated_modules = HashSet::new();
let mut file_ids = IndexSet::new();
while !modules_to_generate.is_empty() {
for (prefix, parent_ident, sym) in std::mem::take(&mut modules_to_generate) {
// parent_ident, symbols
let symbols = sym.children;
if !prefix.is_empty() {
let _ = writeln!(md, "---\n## Module: {prefix}");
}
let module_val = sym.head.value.as_ref().unwrap();
let module = match module_val {
Value::Module(m) => m,
_ => todo!(),
};
let fid = module.file_id();
let type_info = None.or_else(|| {
let file_id = fid?;
let src = world.source(file_id).ok()?;
let def_use = ctx.def_use(src.clone())?;
let ty_chck = ctx.type_check(src)?;
Some((def_use, ty_chck))
});
let type_info = type_info.as_ref();
let persist_fid = fid.map(|f| file_ids.insert_full(f).0);
#[derive(Serialize)]
struct ModuleInfo {
prefix: EcoString,
name: EcoString,
loc: Option<usize>,
parent_ident: EcoString,
}
let m = jbase64(&ModuleInfo {
prefix: prefix.clone(),
name: sym.head.name.clone(),
loc: persist_fid,
parent_ident: parent_ident.clone(),
});
let _ = writeln!(md, "<!-- begin:module {parent_ident} {m} -->");
for mut sym in symbols {
let span = sym.head.span.and_then(|v| {
v.id().and_then(|e| {
let fid = file_ids.insert_full(e).0;
let src = world.source(e).ok()?;
let rng = src.range(v)?;
Some((fid, rng.start, rng.end))
})
});
sym.head.loc = span;
let mut convert_err = None;
if let Some(docs) = &sym.head.docs {
match convert_docs(world, docs) {
Ok(content) => {
let docs = match sym.head.kind.as_str() {
"function" => {
let t = identify_tidy_func_docs(&content).ok();
t.map(Docs::Function).unwrap_or(Docs::Plain(content))
}
"variable" => {
let t = identify_tidy_var_docs(&content).ok();
t.map(Docs::Variable).unwrap_or(Docs::Plain(content))
}
"module" => {
let t = identify_tidy_module_docs(&content).ok();
t.map(Docs::Module).unwrap_or(Docs::Plain(content))
}
_ => Docs::Plain(content),
};
sym.head.parsed_docs = Some(docs.clone());
sym.head.docs = None;
}
Err(e) => {
let err = format!("failed to convert docs in {title}: {e}").replace(
"-->", "—>", // avoid markdown comment
);
log::error!("{err}");
convert_err = Some(err);
}
}
}
let signature =
match &sym.head.parsed_docs {
Some(Docs::Function(TidyFuncDocs {
params, return_ty, ..
})) => sym.head.value.clone().and_then(|e| {
let func = match e {
Value::Func(f) => f,
_ => return None,
};
let sig = analyze_dyn_signature(ctx, func.clone());
let type_sig = type_info.and_then(|(def_use, ty_chk)| {
let def_fid = func.span().id()?;
let def_ident = IdentRef {
name: sym.head.name.clone(),
range: sym.head.name_range.clone()?,
};
let (def_id, _) = def_use.get_def(def_fid, &def_ident)?;
ty_chk.type_of_def(def_id)
});
let type_sig = type_sig.and_then(|type_sig| type_sig.sig_repr(true));
let pos_in = sig.primary().pos.iter().enumerate().map(|(i, pos)| {
(pos, type_sig.as_ref().and_then(|sig| sig.pos(i)))
});
let named_in = sig
.primary()
.named
.iter()
.map(|x| (x, type_sig.as_ref().and_then(|sig| sig.named(x.0))));
let rest_in =
sig.primary().rest.as_ref().map(|x| {
(x, type_sig.as_ref().and_then(|sig| sig.rest_param()))
});
let ret_in = type_sig
.as_ref()
.and_then(|sig| sig.body.as_ref())
.or_else(|| sig.primary().ret_ty.as_ref());
let doc_ty = |ty: Option<&Ty>| {
ty.and_then(|ty| ty.describe().map(|e| (e, format!("{ty:?}"))))
};
let _ = params;
let _ = return_ty;
let pos = pos_in
.map(|(param, ty)| DocParamSpec {
name: param.name.as_ref().to_owned(),
docs: param.docs.as_ref().to_owned(),
cano_type: doc_ty(ty),
type_repr: param.type_repr.clone(),
expr: param.expr.clone(),
positional: param.positional,
named: param.named,
variadic: param.variadic,
settable: param.settable,
})
.collect();
let named = named_in
.map(|((name, param), ty)| {
(
name.as_ref().to_owned(),
DocParamSpec {
name: param.name.as_ref().to_owned(),
docs: param.docs.as_ref().to_owned(),
cano_type: doc_ty(ty),
type_repr: param.type_repr.clone(),
expr: param.expr.clone(),
positional: param.positional,
named: param.named,
variadic: param.variadic,
settable: param.settable,
},
)
})
.collect();
let rest = rest_in.map(|(param, ty)| DocParamSpec {
name: param.name.as_ref().to_owned(),
docs: param.docs.as_ref().to_owned(),
cano_type: doc_ty(ty),
type_repr: param.type_repr.clone(),
expr: param.expr.clone(),
positional: param.positional,
named: param.named,
variadic: param.variadic,
settable: param.settable,
});
let ret_ty = doc_ty(ret_in);
Some(DocSignature {
pos,
named,
rest,
ret_ty,
})
}),
_ => None,
};
sym.head.signature = signature;
let _ = writeln!(md, "### {}: {}", sym.head.kind, sym.head.name);
let ident = eco_format!("symbol-{}-{}-{key}", sym.head.kind, sym.head.name);
key += 1;
let head = jbase64(&sym.head);
let _ = writeln!(md, "<!-- begin:symbol {ident} {head} -->");
if let Some(sig) = &sym.head.signature {
let _ = writeln!(md, "<!-- begin:sig -->");
let _ = writeln!(md, "```typc");
let _ = writeln!(
md,
"let {name}({params});",
name = sym.head.name,
params = ParamTooltip(sig)
);
let _ = writeln!(md, "```");
let _ = writeln!(md, "<!-- end:sig -->");
}
match (&sym.head.parsed_docs, convert_err) {
(_, Some(err)) => {
let err = format!("failed to convert docs in {title}: {err}").replace(
"-->", "—>", // avoid markdown comment
);
let _ = writeln!(md, "<!-- convert-error: {err} -->");
errors.push(err);
}
(Some(docs), _) => {
let _ = writeln!(md, "{}", remove_list_annotations(docs.docs()));
if let Docs::Function(f) = docs {
for param in &f.params {
let _ = writeln!(md, "<!-- begin:param {} -->", param.name);
let _ = writeln!(
md,
"#### {} ({})\n<!-- begin:param-doc {} -->\n{}\n<!-- end:param-doc {} -->",
param.name, param.types, param.name, param.docs, param.name
);
let _ = writeln!(md, "<!-- end:param -->");
}
}
}
(None, None) => {}
}
if let Some(docs) = &sym.head.docs {
let contains_code = docs.contains("```");
if contains_code {
let _ = writeln!(md, "`````typ");
}
let _ = writeln!(md, "{docs}");
if contains_code {
let _ = writeln!(md, "`````");
}
}
if !sym.children.is_empty() {
let mut full_path = prefix.clone();
if !full_path.is_empty() {
full_path.push_str(".");
}
full_path.push_str(&sym.head.name);
let link = format!("Module-{full_path}").replace(".", "-");
let _ = writeln!(md, "[Module Docs](#{link})\n");
if generated_modules.insert(full_path.clone()) {
modules_to_generate.push((full_path, ident.clone(), sym));
}
}
let _ = writeln!(md, "<!-- end:symbol {ident} -->");
}
let _ = writeln!(md, "<!-- end:module {parent_ident} -->");
}
}
let res = ConvertResult { errors };
let err = jbase64(&res);
let _ = writeln!(md, "<!-- begin:errors {err} -->");
let _ = writeln!(md, "## Errors");
for e in res.errors {
let _ = writeln!(md, "- {e}");
}
let _ = writeln!(md, "<!-- end:errors -->");
let mut packages = IndexSet::new();
let files = file_ids
.into_iter()
.map(|e| {
let pkg = e.package().map(|e| packages.insert_full(e.clone()).0);
FileMeta {
package: pkg,
path: e.vpath().as_rootless_path().to_owned(),
}
})
.collect();
let packages = packages
.into_iter()
.map(|e| PackageMeta {
namespace: e.namespace.clone(),
name: e.name.clone(),
version: e.version.to_string(),
manifest: None,
})
.collect();
let meta = PackageMetaEnd { packages, files };
let package_meta = jbase64(&meta);
let _ = writeln!(md, "<!-- end:package {package_meta} -->");
Ok(md)
}
fn kind_of(val: &Value) -> EcoString {
match val {
Value::Module(_) => "module",
Value::Type(_) => "struct",
Value::Func(_) => "function",
Value::Label(_) => "reference",
_ => "constant",
}
.into()
}
fn symbol(world: &LspWorld, for_spec: Option<&PackageSpec>, key: &str, val: &Value) -> SymbolInfo {
let children = match val {
Value::Module(module) => {
// only generate docs for the same package
if module.file_id().map_or(true, |e| e.package() != for_spec) {
eco_vec![]
} else {
let symbols = module.scope().iter();
symbols
.map(|(k, v)| symbol(world, for_spec, k, v))
.collect()
}
}
_ => eco_vec![],
};
SymbolInfo {
head: create_head(world, key, val),
children,
}
}
fn create_head(world: &LspWorld, k: &str, v: &Value) -> SymbolInfoHead {
let kind = kind_of(v);
let (docs, name_range, span) = match v {
Value::Func(f) => {
let mut span = None;
let mut name_range = None;
let docs = None.or_else(|| {
let source = world.source(f.span().id()?).ok()?;
let node = source.find(f.span())?;
log::debug!("node: {k} -> {:?}", node.parent());
// use parent of params, todo: reliable way to get the def target
let def = get_non_strict_def_target(node.parent()?.clone())?;
span = Some(def.node().span());
name_range = def.name_range();
find_docs_of(&source, def)
});
(docs, name_range, span.or(Some(f.span())))
}
_ => (None, None, None),
};
SymbolInfoHead {
name: k.to_string().into(),
kind,
loc: None,
constant: None.or_else(|| match v {
Value::Func(_) => None,
t => Some(truncated_doc_repr(t)),
}),
signature: None,
parsed_docs: None,
docs,
name_range,
span,
value: Some(v.clone()),
}
}
// todo: hover with `with_stack`
struct ParamTooltip<'a>(&'a DocSignature);
impl<'a> fmt::Display for ParamTooltip<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut is_first = true;
let mut write_sep = |f: &mut fmt::Formatter<'_>| {
if is_first {
is_first = false;
return Ok(());
}
f.write_str(", ")
};
let primary_sig = self.0;
for p in &primary_sig.pos {
write_sep(f)?;
write!(f, "{}", p.name)?;
}
if let Some(rest) = &primary_sig.rest {
write_sep(f)?;
write!(f, "{}", rest.name)?;
}
if !primary_sig.named.is_empty() {
let mut name_prints = vec![];
for v in primary_sig.named.values() {
name_prints.push((v.name.clone(), v.type_repr.clone()))
}
name_prints.sort();
for (k, v) in name_prints {
write_sep(f)?;
let v = v.as_deref().unwrap_or("any");
let mut v = v.trim();
if v.starts_with('{') && v.ends_with('}') && v.len() > 30 {
v = "{ ... }"
}
if v.starts_with('`') && v.ends_with('`') && v.len() > 30 {
v = "raw"
}
if v.starts_with('[') && v.ends_with(']') && v.len() > 30 {
v = "content"
}
write!(f, "{k}: {v}")?;
}
}
Ok(())
}
}
fn remove_list_annotations(s: &str) -> String {
let s = s.to_string();
static REG: std::sync::LazyLock<regex::Regex> = std::sync::LazyLock::new(|| {
regex::Regex::new(r"<!-- typlite:(?:begin|end):[\w\-]+ \d+ -->").unwrap()
});
REG.replace_all(&s, "").to_string()
}
#[cfg(test)]
mod tests {
use reflexo_typst::package::{PackageRegistry, PackageSpec};
use super::{generate_md_docs, PackageInfo};
use crate::tests::*;
fn test(pkg: PackageSpec) {
run_with_sources("", |verse: &mut LspUniverse, p| {
let w = verse.snapshot();
let path = verse.registry.resolve(&pkg).unwrap();
let pi = PackageInfo {
path: path.as_ref().to_owned(),
namespace: pkg.namespace,
name: pkg.name,
version: pkg.version.to_string(),
};
run_with_ctx(verse, p, &|a, _p| {
let d = generate_md_docs(a, &w, &pi).unwrap();
let dest = format!(
"../../target/{}-{}-{}.md",
pi.namespace, pi.name, pi.version
);
std::fs::write(dest, d).unwrap();
})
})
}
#[test]
fn tidy() {
test(PackageSpec {
namespace: "preview".into(),
name: "tidy".into(),
version: "0.3.0".parse().unwrap(),
});
}
#[test]
fn touying() {
test(PackageSpec {
namespace: "preview".into(),
name: "touying".into(),
version: "0.5.2".parse().unwrap(),
});
}
#[test]
fn cetz() {
test(PackageSpec {
namespace: "preview".into(),
name: "cetz".into(),
version: "0.2.2".parse().unwrap(),
});
}
}

View file

@ -0,0 +1,359 @@
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use typst::diag::StrResult;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyParamDocs {
pub name: String,
pub docs: String,
pub types: String,
pub default: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyFuncDocs {
pub docs: String,
pub return_ty: Option<String>,
pub params: Vec<TidyParamDocs>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyVarDocs {
pub docs: String,
pub return_ty: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyModuleDocs {
pub docs: String,
}
pub fn identify_tidy_func_docs(converted: &str) -> StrResult<TidyFuncDocs> {
let lines = converted.lines().collect::<Vec<_>>();
let mut matching_return_ty = true;
let mut buf = vec![];
let mut params = vec![];
let mut return_ty = None;
let mut break_line = None;
let mut i = lines.len();
'search: loop {
if i == 0 {
break;
}
i -= 1;
let line = lines[i];
if line.is_empty() {
continue;
}
loop {
if matching_return_ty {
matching_return_ty = false;
let Some(w) = line.trim_start().strip_prefix("->") else {
// break_line = Some(i);
continue;
};
return_ty = Some(w.trim().to_string());
break;
}
let Some(mut line) = line
.trim_end()
.strip_suffix("<!-- typlite:end:list-item 0 -->")
else {
break_line = Some(i + 1);
break 'search;
};
let mut current_line_no = i;
loop {
// <!-- typlite:begin:list-item -->
let t = line
.trim_start()
.strip_prefix("- ")
.and_then(|t| t.trim().strip_prefix("<!-- typlite:begin:list-item 0 -->"));
let line_content = match t {
Some(t) => {
buf.push(t);
break;
}
None => line,
};
buf.push(line_content);
if current_line_no == 0 {
break_line = Some(i + 1);
break 'search;
}
current_line_no -= 1;
line = lines[current_line_no];
}
let mut buf = std::mem::take(&mut buf);
buf.reverse();
let Some(first_line) = buf.first_mut() else {
break_line = Some(i + 1);
break 'search;
};
*first_line = first_line.trim();
let Some(param_line) = None.or_else(|| {
let (param_name, rest) = first_line.split_once(" ")?;
let (type_content, rest) = match_brace(rest.trim_start().strip_prefix("(")?)?;
let (_, rest) = rest.split_once(":")?;
*first_line = rest.trim();
Some((param_name.into(), type_content.into()))
}) else {
break_line = Some(i + 1);
break 'search;
};
i = current_line_no;
params.push(TidyParamDocs {
name: param_line.0,
types: param_line.1,
default: None,
docs: buf.into_iter().join("\n"),
});
break;
}
}
let docs = match break_line {
Some(line_no) => (lines[..line_no]).iter().copied().join("\n"),
None => converted.to_owned(),
};
params.reverse();
Ok(TidyFuncDocs {
docs,
return_ty,
params,
})
}
pub fn identify_tidy_var_docs(converted: &str) -> StrResult<TidyVarDocs> {
let lines = converted.lines().collect::<Vec<_>>();
let mut return_ty = None;
let mut break_line = None;
let mut i = lines.len();
loop {
if i == 0 {
break;
}
i -= 1;
let line = lines[i];
if line.is_empty() {
continue;
}
let Some(w) = line.trim_start().strip_prefix("->") else {
break_line = Some(i + 1);
break;
};
return_ty = Some(w.trim().to_string());
break_line = Some(i);
break;
}
let docs = match break_line {
Some(line_no) => (lines[..line_no]).iter().copied().join("\n"),
None => converted.to_owned(),
};
Ok(TidyVarDocs { docs, return_ty })
}
pub fn identify_tidy_module_docs(converted: &str) -> StrResult<TidyModuleDocs> {
Ok(TidyModuleDocs {
docs: converted.to_owned(),
})
}
fn match_brace(trim_start: &str) -> Option<(&str, &str)> {
let mut brace_count = 1;
let mut end = 0;
for (i, c) in trim_start.char_indices() {
match c {
'(' => brace_count += 1,
')' => brace_count -= 1,
_ => {}
}
if brace_count == 0 {
end = i;
break;
}
}
if brace_count != 0 {
return None;
}
let (type_content, rest) = trim_start.split_at(end);
Some((type_content, rest))
}
#[cfg(test)]
mod tests {
use std::fmt::Write;
use super::TidyParamDocs;
fn func(s: &str) -> String {
let f = super::identify_tidy_func_docs(s).unwrap();
let mut res = format!(">> docs:\n{}\n<< docs", f.docs);
if let Some(t) = f.return_ty {
res.push_str(&format!("\n>>return\n{t}\n<<return"));
}
for TidyParamDocs {
name,
types,
docs,
default: _,
} in f.params
{
let _ = write!(res, "\n>>arg {name}: {types}\n{docs}\n<< arg");
}
res
}
fn var(s: &str) -> String {
let f = super::identify_tidy_var_docs(s).unwrap();
let mut res = format!(">> docs:\n{}\n<< docs", f.docs);
if let Some(t) = f.return_ty {
res.push_str(&format!("\n>>return\n{t}\n<<return"));
}
res
}
#[test]
fn test_identify_tidy_docs() {
insta::assert_snapshot!(func(r###"These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`types` (optional): A list of accepted argument types.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`default` (optional): Default value for this argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
- <!-- typlite:begin:list-item 0 -->content (string): Content of `.typ` file to analyze for docstrings.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->label-prefix (auto, string): The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->require-all-parameters (boolean): Require that all parameters of a
functions are documented and fail if some are not.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->scope (dictionary): A dictionary of definitions that are then available
in all function and parameter descriptions.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->preamble (string): Code to prepend to all code snippets shown with `#example()`.
This can for instance be used to import something from the scope.<!-- typlite:end:list-item 0 -->
-> string"###), @r###"
>> docs:
These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`types` (optional): A list of accepted argument types.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`default` (optional): Default value for this argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
<< docs
>>return
string
<<return
>>arg content: string
Content of `.typ` file to analyze for docstrings.
<< arg
>>arg name: string
The name for the module.
<< arg
>>arg label-prefix: auto, string
The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
<< arg
>>arg require-all-parameters: boolean
Require that all parameters of a
functions are documented and fail if some are not.
<< arg
>>arg scope: dictionary
A dictionary of definitions that are then available
in all function and parameter descriptions.
<< arg
>>arg preamble: string
Code to prepend to all code snippets shown with `#example()`.
This can for instance be used to import something from the scope.
<< arg
"###);
}
#[test]
fn test_identify_tidy_docs_nested() {
insta::assert_snapshot!(func(r###"These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->label-prefix (auto, string): The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
- <!-- typlite:begin:list-item 1 -->nested something<!-- typlite:end:list-item 1 -->
- <!-- typlite:begin:list-item 1 -->nested something 2<!-- typlite:end:list-item 1 --><!-- typlite:end:list-item 0 -->
-> string"###), @r###"
>> docs:
These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
<< docs
>>return
string
<<return
>>arg name: string
The name for the module.
<< arg
>>arg label-prefix: auto, string
The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
- <!-- typlite:begin:list-item 1 -->nested something<!-- typlite:end:list-item 1 -->
- <!-- typlite:begin:list-item 1 -->nested something 2<!-- typlite:end:list-item 1 -->
<< arg
"###);
}
#[test]
fn test_identify_tidy_docs3() {
insta::assert_snapshot!(var(r###"See @@show-module() for outputting the results of this function.
-> string"###), @r###"
>> docs:
See @@show-module() for outputting the results of this function.
<< docs
>>return
string
<<return
"###);
}
#[test]
fn test_identify_tidy_docs4() {
insta::assert_snapshot!(var(r###"
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
-> string"###), @r###"
>> docs:
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
<< docs
>>return
string
<<return
"###);
}
}

View file

@ -49,7 +49,7 @@ fn filter_document_symbols(
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
DocumentSymbol {
name: e.info.name.clone(),
name: e.info.name.to_string(),
detail: None,
kind: e.info.kind.clone().try_into().unwrap(),
tags: None,

View file

@ -113,7 +113,7 @@ fn calc_folding_range(
end_line: rng.end.line,
end_character: Some(rng.end.character),
kind: None,
collapsed_text: Some(e.info.name.clone()),
collapsed_text: Some(e.info.name.to_string()),
};
let next_start = if is_not_last_range {

View file

@ -9,14 +9,13 @@
mod adt;
pub mod analysis;
pub mod docs;
pub mod syntax;
pub mod ty;
mod upstream;
use std::sync::Arc;
pub use analysis::AnalysisContext;
use typst::{model::Document as TypstDocument, syntax::Source};
pub use upstream::with_vm;
mod diagnostics;
pub use diagnostics::*;
@ -80,6 +79,10 @@ pub use lsp_features::*;
mod prelude;
use std::sync::Arc;
use typst::{model::Document as TypstDocument, syntax::Source};
/// The physical position in a document.
pub type FramePosition = typst::layout::Position;

View file

@ -20,13 +20,12 @@ pub use lsp_types::{
};
pub use reflexo::vector::ir::DefId;
pub use serde_json::Value as JsonValue;
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};
pub use typst::diag::{EcoString, FileResult, Tracepoint};
pub use typst::foundations::{Func, Value};
pub use typst::syntax::FileId as TypstFileId;
pub use typst::syntax::{
ast::{self, AstNode},
package::{PackageManifest, PackageSpec},
LinkedNode, Source, Spanned, SyntaxKind, VirtualPath,
LinkedNode, Source, Spanned, SyntaxKind,
};
pub use typst::World;

View file

@ -47,7 +47,7 @@ impl StatefulRequest for PrepareRenameRequest {
debug!("prepare_rename: {}", lnk.name);
Some(PrepareRenameResponse::RangeWithPlaceholder {
range: origin_selection_range,
placeholder: lnk.name,
placeholder: lnk.name.to_string(),
})
}
}

View file

@ -159,7 +159,7 @@ impl SemanticRequest for SignatureHelpRequest {
Some(SignatureHelp {
signatures: vec![SignatureInformation {
label,
label: label.to_string(),
documentation,
parameters: Some(params),
active_parameter: active_parameter.map(|x| x as u32),

View file

@ -85,7 +85,7 @@ fn filter_document_symbols(
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
Some(SymbolInformation {
name: e.info.name.clone(),
name: e.info.name.to_string(),
kind: e.info.kind.clone().try_into().unwrap(),
tags: None,
deprecated: None,

View file

@ -5,6 +5,8 @@ use typst_shim::syntax::LinkedNodeExt;
use crate::prelude::*;
use crate::syntax::get_def_target;
use super::DefTarget;
fn extract_document_between(
node: &LinkedNode,
rng: Range<usize>,
@ -93,6 +95,11 @@ pub fn find_docs_before(src: &Source, cursor: usize) -> Option<String> {
let root = LinkedNode::new(src.root());
let leaf = root.leaf_at_compat(cursor)?;
let def_target = get_def_target(leaf.clone())?;
find_docs_of(src, def_target)
}
pub fn find_docs_of(src: &Source, def_target: DefTarget) -> Option<String> {
let root = LinkedNode::new(src.root());
log::debug!("found docs target: {:?}", def_target.node().kind());
// todo: import node
let target = def_target.node().clone();

View file

@ -4,7 +4,7 @@ use std::{
};
use anyhow::{anyhow, Context};
use ecow::{eco_vec, EcoVec};
use ecow::{eco_vec, EcoString, EcoVec};
use log::info;
use lsp_types::SymbolKind;
use serde::{Deserialize, Serialize};
@ -30,7 +30,7 @@ pub(crate) fn get_lexical_hierarchy(
};
worker.stack.push((
LexicalInfo {
name: "deadbeef".to_string(),
name: "deadbeef".into(),
kind: LexicalKind::Heading(-1),
range: 0..0,
},
@ -223,7 +223,7 @@ impl LexicalScopeKind {
#[derive(Debug, Clone, Hash)]
pub(crate) struct LexicalInfo {
pub name: String,
pub name: EcoString,
pub kind: LexicalKind,
pub range: Range<usize>,
}
@ -451,14 +451,14 @@ impl LexicalHierarchyWorker {
let symbol = if self.g == LexicalScopeKind::DefUse {
// DefUse mode does not nest symbols inside of functions
LexicalInfo {
name: String::new(),
name: EcoString::new(),
kind: LexicalKind::Block,
range: body.range(),
}
} else if current == self.stack.last().unwrap().1.len() {
// Closure has no updated symbol stack
LexicalInfo {
name: "<anonymous>".to_string(),
name: "<anonymous>".into(),
kind: LexicalKind::function(),
range: node.range(),
}
@ -495,9 +495,9 @@ impl LexicalHierarchyWorker {
let target_name_node = node.find(target_name.span()).context("no pos")?;
self.push_leaf(LexicalInfo {
name: origin_name.get().to_string(),
name: origin_name.get().clone(),
kind: LexicalKind::module_import_alias(IdentRef {
name: target_name.get().to_string(),
name: target_name.get().clone(),
range: target_name_node.range(),
}),
range: origin_name_node.range(),
@ -590,19 +590,19 @@ impl LexicalHierarchyWorker {
let ast_node = node
.cast::<ast::Label>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let name = ast_node.get().into();
(name, LexicalKind::label())
}
SyntaxKind::RefMarker if self.g.affect_ref() => {
let name = node.text().trim_start_matches('@').to_owned();
let name = node.text().trim_start_matches('@').into();
(name, LexicalKind::label_ref())
}
SyntaxKind::Ident if self.g.affect_symbol() => {
let ast_node = node
.cast::<ast::Ident>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let name = ast_node.get().clone();
let kind = match self.ident_context {
IdentContext::Ref if self.g.affect_ref() => LexicalKind::val_ref(),
IdentContext::Func => LexicalKind::function(),
@ -616,10 +616,10 @@ impl LexicalHierarchyWorker {
SyntaxKind::Equation | SyntaxKind::Raw | SyntaxKind::BlockComment
if self.g.affect_markup() =>
{
(String::new(), LexicalKind::Block)
(EcoString::new(), LexicalKind::Block)
}
SyntaxKind::CodeBlock | SyntaxKind::ContentBlock if self.g.affect_block() => {
(String::new(), LexicalKind::Block)
(EcoString::new(), LexicalKind::Block)
}
SyntaxKind::Parenthesized
| SyntaxKind::Destructuring
@ -628,7 +628,7 @@ impl LexicalHierarchyWorker {
| SyntaxKind::Dict
if self.g.affect_expr() =>
{
(String::new(), LexicalKind::Block)
(EcoString::new(), LexicalKind::Block)
}
SyntaxKind::ModuleImport if self.g.affect_import() => {
let src = node
@ -639,23 +639,23 @@ impl LexicalHierarchyWorker {
match src {
ast::Expr::Str(e) => {
let e = e.get();
(String::new(), LexicalKind::module(e.as_ref().into()))
(EcoString::new(), LexicalKind::module(e.as_ref().into()))
}
src => {
let e = node
.find(src.span())
.ok_or_else(|| anyhow!("find expression failed: {:?}", src))?;
let e = IdentRef {
name: String::new(),
name: EcoString::new(),
range: e.range(),
};
(String::new(), LexicalKind::module_expr(e.into()))
(EcoString::new(), LexicalKind::module_expr(e.into()))
}
}
}
SyntaxKind::Markup => {
let name = node.get().to_owned().into_text().to_string();
let name = node.get().to_owned().into_text();
if name.is_empty() {
return Ok(None);
}
@ -703,7 +703,7 @@ impl LexicalHierarchyWorker {
// ^^^
let import_node = node.find(name.span()).context("no pos")?;
self.push_leaf(LexicalInfo {
name: name.get().to_string(),
name: name.get().clone(),
kind: LexicalKind::module_as(),
range: import_node.range(),
});
@ -721,14 +721,14 @@ impl LexicalHierarchyWorker {
let spec = e
.parse::<PackageSpec>()
.map_err(|e| anyhow!("parse package spec failed: {:?}", e))?;
spec.name.to_string()
spec.name.clone()
} else {
let e = Path::new(e.as_ref())
.file_name()
.context("no file name")?
.to_string_lossy();
let e = e.as_ref();
e.strip_suffix(".typ").context("no suffix")?.to_owned()
e.strip_suffix(".typ").context("no suffix")?.into()
};
// return (e == name).then_some(ImportRef::Path(v));
@ -757,7 +757,7 @@ impl LexicalHierarchyWorker {
.context("no star")?;
let v = node.find(wildcard.span()).context("no pos")?;
self.push_leaf(LexicalInfo {
name: "*".to_string(),
name: "*".into(),
kind: LexicalKind::module_star(),
range: v.range(),
});

View file

@ -1,3 +1,5 @@
use std::ops::Range;
use ecow::EcoVec;
use serde::Serialize;
use typst::{
@ -211,6 +213,29 @@ impl<'a> DefTarget<'a> {
DefTarget::Import(node) => node,
}
}
pub fn name_range(&self) -> Option<Range<usize>> {
match self {
DefTarget::Let(node) => {
let lb: ast::LetBinding<'_> = node.cast()?;
let names = match lb.kind() {
ast::LetBindingKind::Closure(name) => node.find(name.span())?,
ast::LetBindingKind::Normal(ast::Pattern::Normal(name)) => {
node.find(name.span())?
}
_ => return None,
};
Some(names.range())
}
DefTarget::Import(_node) => {
// let ident = node.cast::<ast::ImportItem>()?;
// Some(ident.span().into())
// todo: implement this
None
}
}
}
}
// todo: whether we should distinguish between strict and non-strict def targets

View file

@ -5,8 +5,8 @@
// todo: remove this
#![allow(missing_docs)]
pub(crate) mod import;
pub use import::*;
use ecow::EcoString;
pub use tinymist_analysis::import::*;
pub(crate) mod lexical_hierarchy;
pub use lexical_hierarchy::*;
pub(crate) mod matcher;
@ -28,7 +28,7 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct IdentRef {
/// The name of the symbol.
pub name: String,
pub name: EcoString,
/// The byte range of the symbol in the source file.
pub range: Range<usize>,
}
@ -89,7 +89,7 @@ impl<'de> Deserialize<'de> for IdentRef {
(name, st_ed[0]..st_ed[1])
};
Ok(IdentRef {
name: name.to_string(),
name: name.into(),
range,
})
}
@ -101,7 +101,7 @@ impl<'de> Deserialize<'de> for IdentRef {
#[derive(Debug, Clone, Serialize)]
pub struct IdentDef {
/// The name of the symbol.
pub name: String,
pub name: EcoString,
/// The kind of the symbol.
pub kind: LexicalKind,
/// The byte range of the symbol in the source file.

View file

@ -1,7 +1,6 @@
use core::fmt;
use std::sync::Arc;
use std::{
borrow::Cow,
collections::{HashMap, HashSet},
ops::Range,
path::{Path, PathBuf},
@ -9,21 +8,19 @@ use std::{
use ecow::EcoVec;
use once_cell::sync::Lazy;
use reflexo_typst::config::CompileOpts;
use reflexo_typst::package::{PackageRegistry, PackageSpec};
use reflexo_typst::world::{EntryOpts, EntryState};
use reflexo_typst::{
CompileDriver, EntryManager, EntryReader, ShadowApi, TypstSystemUniverse, WorldDeps,
};
use reflexo_typst::world::EntryState;
use reflexo_typst::{CompileDriverImpl, EntryManager, EntryReader, ShadowApi, WorldDeps};
use serde_json::{ser::PrettyFormatter, Serializer, Value};
use tinymist_world::CompileFontArgs;
use typst::syntax::ast::{self, AstNode};
use typst::syntax::{FileId as TypstFileId, LinkedNode, Source, SyntaxKind, VirtualPath};
use typst::{diag::PackageError, foundations::Bytes};
pub use insta::assert_snapshot;
pub use reflexo_typst::TypstSystemWorld;
pub use serde::Serialize;
pub use serde_json::json;
pub use tinymist_world::{LspUniverse, LspUniverseBuilder, LspWorld};
use typst_shim::syntax::LinkedNodeExt;
use crate::{
@ -32,7 +29,9 @@ use crate::{
typst_to_lsp, LspPosition, PositionEncoding, VersionedDocument,
};
struct WrapWorld<'a>(&'a mut TypstSystemWorld);
type CompileDriver<C> = CompileDriverImpl<C, tinymist_world::LspCompilerFeat>;
struct WrapWorld<'a>(&'a mut LspWorld);
impl<'a> AnalysisResources for WrapWorld<'a> {
fn world(&self) -> &dyn typst::World {
@ -64,27 +63,33 @@ pub fn snapshot_testing(name: &str, f: &impl Fn(&mut AnalysisContext, PathBuf))
#[cfg(windows)]
let contents = contents.replace("\r\n", "\n");
run_with_sources(&contents, |w: &mut TypstSystemUniverse, p| {
let root = w.workspace_root().unwrap();
let paths = w
.shadow_paths()
.into_iter()
.map(|p| {
TypstFileId::new(None, VirtualPath::new(p.strip_prefix(&root).unwrap()))
})
.collect::<Vec<_>>();
let mut w = w.snapshot();
let w = WrapWorld(&mut w);
let a = Analysis::default();
let mut ctx = AnalysisContext::new(root, &w, &a);
ctx.test_completion_files(Vec::new);
ctx.test_files(|| paths);
f(&mut ctx, p);
run_with_sources(&contents, |w, p| {
run_with_ctx(w, p, f);
});
});
});
}
pub fn run_with_ctx<T>(
w: &mut LspUniverse,
p: PathBuf,
f: &impl Fn(&mut AnalysisContext, PathBuf) -> T,
) -> T {
let root = w.workspace_root().unwrap();
let paths = w
.shadow_paths()
.into_iter()
.map(|p| TypstFileId::new(None, VirtualPath::new(p.strip_prefix(&root).unwrap())))
.collect::<Vec<_>>();
let mut w = w.snapshot();
let w = WrapWorld(&mut w);
let a = Analysis::default();
let mut ctx = AnalysisContext::new(root, &w, &a);
ctx.test_completion_files(Vec::new);
ctx.test_files(|| paths);
f(&mut ctx, p)
}
pub fn get_test_properties(s: &str) -> HashMap<&'_ str, &'_ str> {
let mut props = HashMap::new();
for line in s.lines() {
@ -116,21 +121,24 @@ pub fn compile_doc_for_test(
})
}
pub fn run_with_sources<T>(
source: &str,
f: impl FnOnce(&mut TypstSystemUniverse, PathBuf) -> T,
) -> T {
pub fn run_with_sources<T>(source: &str, f: impl FnOnce(&mut LspUniverse, PathBuf) -> T) -> T {
let root = if cfg!(windows) {
PathBuf::from("C:\\")
} else {
PathBuf::from("/")
};
let mut world = TypstSystemUniverse::new(CompileOpts {
entry: EntryOpts::new_rooted(root.as_path().into(), None),
with_embedded_fonts: typst_assets::fonts().map(Cow::Borrowed).collect(),
no_system_fonts: true,
..Default::default()
})
let mut world = LspUniverseBuilder::build(
EntryState::new_rooted(root.as_path().into(), None),
Arc::new(
LspUniverseBuilder::resolve_fonts(CompileFontArgs {
ignore_system_fonts: true,
..Default::default()
})
.unwrap(),
),
Default::default(),
None,
)
.unwrap();
let sources = source.split("-----");

View file

@ -8,7 +8,6 @@ use serde::{Deserialize, Serialize};
use typst::foundations::{fields_on, format_str, repr, Repr, StyleChain, Styles, Value};
use typst::model::Document;
use typst::syntax::ast::AstNode;
use typst::syntax::package::PackageSpec;
use typst::syntax::{ast, is_id_continue, is_id_start, is_ident, LinkedNode, Source, SyntaxKind};
use typst::text::RawElem;
use typst_shim::syntax::LinkedNodeExt;
@ -1048,74 +1047,21 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
}
}
/// Get local packages
fn local_packages(&mut self) -> Vec<(PackageSpec, Option<EcoString>)> {
// search packages locally. We only search in the data
// directory and not the cache directory, because the latter is not
// intended for storage of local packages.
let mut packages = vec![];
let Some(data_dir) = dirs::data_dir() else {
return packages;
};
let local_path = data_dir.join("typst/packages");
if !local_path.exists() {
return packages;
}
// namespace/package_name/version
// 1. namespace
let namespaces = std::fs::read_dir(local_path).unwrap();
for namespace in namespaces {
let namespace = namespace.unwrap();
if !namespace.file_type().unwrap().is_dir() {
continue;
}
// start with . are hidden directories
if namespace.file_name().to_string_lossy().starts_with('.') {
continue;
}
// 2. package_name
let package_names = std::fs::read_dir(namespace.path()).unwrap();
for package in package_names {
let package = package.unwrap();
if !package.file_type().unwrap().is_dir() {
continue;
}
if package.file_name().to_string_lossy().starts_with('.') {
continue;
}
// 3. version
let versions = std::fs::read_dir(package.path()).unwrap();
for version in versions {
let version = version.unwrap();
if !version.file_type().unwrap().is_dir() {
continue;
}
if version.file_name().to_string_lossy().starts_with('.') {
continue;
}
let version = version.file_name().to_string_lossy().parse().unwrap();
let spec = PackageSpec {
namespace: namespace.file_name().to_string_lossy().into(),
name: package.file_name().to_string_lossy().into(),
version,
};
let description = eco_format!("{} v{}", spec.name, spec.version);
let package = (spec, Some(description));
packages.push(package);
}
}
}
packages
}
/// Add completions for all available packages.
fn package_completions(&mut self, all_versions: bool) {
let mut packages: Vec<_> = self.world().packages().iter().collect();
let mut packages: Vec<_> = self
.world()
.packages()
.iter()
.map(|e| (&e.0, e.1.clone()))
.collect();
// local_packages to references and add them to the packages
let local_packages = self.local_packages();
let local_packages_refs: Vec<&(PackageSpec, Option<EcoString>)> =
local_packages.iter().collect();
packages.extend(local_packages_refs);
let local_packages_refs = self.ctx.resources.local_packages();
packages.extend(
local_packages_refs
.iter()
.map(|spec| (spec, Some(eco_format!("{} v{}", spec.name, spec.version)))),
);
packages.sort_by_key(|(spec, _)| (&spec.namespace, &spec.name, Reverse(spec.version)));
if !all_versions {

View file

@ -8,6 +8,7 @@ use typst::{
diag::{bail, StrResult},
foundations::{Content, Func, Module, Type, Value},
introspection::MetadataElem,
syntax::Span,
text::{FontInfo, FontStyle},
Library,
};
@ -397,8 +398,7 @@ fn summarize_font_family<'a>(variants: impl Iterator<Item = &'a FontInfo>) -> Ec
detail
}
pub fn truncated_repr(value: &Value) -> EcoString {
const _10MB: usize = 100 * 1024 * 1024;
pub fn truncated_repr_<const SZ_LIMIT: usize>(value: &Value) -> EcoString {
use typst::foundations::Repr;
let data: Option<Content> = value.clone().cast().ok();
@ -411,13 +411,53 @@ pub fn truncated_repr(value: &Value) -> EcoString {
value.repr()
};
if repr.len() > _10MB {
if repr.len() > SZ_LIMIT {
eco_format!("[truncated-repr: {} bytes]", repr.len())
} else {
repr
}
}
pub fn truncated_repr(value: &Value) -> EcoString {
const _10MB: usize = 100 * 1024 * 1024;
truncated_repr_::<_10MB>(value)
}
pub fn truncated_doc_repr(value: &Value) -> EcoString {
const _128B: usize = 128;
truncated_repr_::<_128B>(value)
}
/// Run a function with a VM instance in the world
pub fn with_vm<T>(world: &dyn typst::World, f: impl FnOnce(&mut typst::eval::Vm) -> T) -> T {
use comemo::Track;
use typst::engine::*;
use typst::eval::*;
use typst::foundations::*;
use typst::introspection::*;
let mut locator = Locator::default();
let introspector = Introspector::default();
let mut tracer = Tracer::new();
let engine = Engine {
world: world.track(),
route: Route::default(),
introspector: introspector.track(),
locator: &mut locator,
tracer: tracer.track_mut(),
};
let context = Context::none();
let mut vm = Vm::new(
engine,
context.track(),
Scopes::new(Some(world.library())),
Span::detached(),
);
f(&mut vm)
}
#[cfg(test)]
mod tests {
#[test]

View file

@ -67,7 +67,7 @@ fn filter_document_labels(
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
Some(SymbolInformation {
name: e.info.name.clone(),
name: e.info.name.to_string(),
kind: e.info.kind.clone().try_into().unwrap(),
tags: None,
deprecated: None,

View file

@ -31,5 +31,8 @@ flate2 = "1"
tar = "0.4"
reqwest = "^0.11"
[features]
no-content-hint = ["reflexo-typst/no-content-hint"]
[lints]
workspace = true

View file

@ -1,3 +1,5 @@
//! Https registry for tinymist.
pub use reflexo_typst::font::FontResolverImpl;
use std::path::Path;
@ -6,21 +8,18 @@ use std::{path::PathBuf, sync::Arc};
use reflexo_typst::vfs::system::SystemAccessModel;
use reflexo_typst::{CompilerFeat, CompilerUniverse, CompilerWorld};
use std::sync::OnceLock;
use log::error;
use parking_lot::Mutex;
use reflexo_typst::package::{DummyNotifier, Notifier, PackageError, PackageRegistry, PackageSpec};
use reflexo_typst::typst::{
diag::{eco_format, EcoString},
syntax::package::PackageVersion,
};
use reqwest::{
blocking::Response,
Certificate
};
use log::error;
use parking_lot::Mutex;
use reflexo_typst::package::{DummyNotifier, Notifier, PackageError, PackageSpec, PackageRegistry};
use reqwest::{blocking::Response, Certificate};
use std::sync::OnceLock;
/// Compiler feature for LSP universe and worlds without typst.ts to implement more for tinymist.
/// type trait of [`TypstSystemWorld`].
/// Compiler feature for LSP universe and worlds without typst.ts to implement
/// more for tinymist. type trait of [`TypstSystemWorld`].
#[derive(Debug, Clone, Copy)]
pub struct SystemCompilerFeatExtend;
@ -62,6 +61,7 @@ impl Default for HttpsRegistry {
}
impl HttpsRegistry {
/// Create a new registry.
pub fn new(cert_path: Option<PathBuf>) -> Self {
Self {
notifier: Arc::new(Mutex::<DummyNotifier>::default()),
@ -101,7 +101,6 @@ impl HttpsRegistry {
res
}
/// Make a package available in the on-disk cache.
pub fn prepare_package(&self, spec: &PackageSpec) -> Result<Arc<Path>, PackageError> {
let subdir = format!(
@ -220,11 +219,13 @@ fn threaded_http<T: Send + Sync>(
f: impl FnOnce(Result<Response, reqwest::Error>) -> T + Send + Sync,
) -> Option<T> {
std::thread::scope(|s| {
s.spawn(move || {
s.spawn(move || {
let client_builder = reqwest::blocking::Client::builder();
let client = if let Some(cert_path) = cert_path {
let cert = std::fs::read(cert_path).ok().and_then(|buf| Certificate::from_pem(&buf).ok());
let cert = std::fs::read(cert_path)
.ok()
.and_then(|buf| Certificate::from_pem(&buf).ok());
if let Some(cert) = cert {
client_builder.add_root_certificate(cert).build().unwrap()
} else {

View file

@ -20,8 +20,10 @@ use reflexo_typst::vfs::{system::SystemAccessModel, Vfs};
use reflexo_typst::TypstDict;
use serde::{Deserialize, Serialize};
mod https;
use https::{SystemCompilerFeatExtend, TypstSystemUniverseExtend, TypstSystemWorldExtend, HttpsRegistry};
pub mod https;
use https::{
HttpsRegistry, SystemCompilerFeatExtend, TypstSystemUniverseExtend, TypstSystemWorldExtend,
};
const ENV_PATH_SEP: char = if cfg!(windows) { ';' } else { ':' };
@ -81,12 +83,9 @@ pub struct CompileOnceArgs {
)]
pub creation_timestamp: Option<DateTime<Utc>>,
/// Path to CA certificate file for network access, especially for downloading typst packages.
#[clap(
long = "cert",
env = "TYPST_CERT",
value_name = "CERT_PATH"
)]
/// Path to CA certificate file for network access, especially for
/// downloading typst packages.
#[clap(long = "cert", env = "TYPST_CERT", value_name = "CERT_PATH")]
pub certification: Option<PathBuf>,
}
@ -102,8 +101,13 @@ impl CompileOnceArgs {
.collect();
let cert_path = self.certification.clone();
LspUniverseBuilder::build(entry, Arc::new(fonts), Arc::new(Prehashed::new(inputs)), cert_path)
.context("failed to create universe")
LspUniverseBuilder::build(
entry,
Arc::new(fonts),
Arc::new(Prehashed::new(inputs)),
cert_path,
)
.context("failed to create universe")
}
/// Get the entry options from the arguments.

View file

@ -32,7 +32,7 @@ use std::{
use anyhow::{anyhow, bail};
use log::{error, info, trace};
use reflexo_typst::{
debug_loc::DataSource, error::prelude::*, typst::prelude::EcoVec, vfs::notify::MemoryEvent,
debug_loc::DataSource, error::prelude::*, typst::prelude::*, vfs::notify::MemoryEvent,
world::EntryState, CompileReport, EntryReader, Error, ImmutPath, TaskInputs, TypstFont,
};
use sync_lsp::{just_future, QueryFuture};
@ -230,6 +230,17 @@ impl CompileHandler {
self.0.font_resolver.describe_font(&font)
}
/// Get the local packages and their descriptions.
fn local_packages(&self) -> EcoVec<PackageSpec> {
crate::tool::package::list_package_by_namespace(
&self.0.registry,
eco_format!("local"),
)
.into_iter()
.map(|(_, spec)| spec)
.collect()
}
/// Resolve periscope image at the given position.
fn periscope_at(
&self,

View file

@ -10,8 +10,9 @@ use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use task::TraceParams;
use tinymist_assets::TYPST_PREVIEW_HTML;
use tinymist_query::docs::PackageInfo;
use tinymist_query::{ExportKind, PageSelection};
use typst::diag::StrResult;
use typst::diag::{EcoString, StrResult};
use typst::syntax::package::{PackageSpec, VersionlessPackageSpec};
use super::server::*;
@ -498,19 +499,94 @@ impl LanguageState {
Err(method_not_found())
}
/// Get directory of local pacakges
pub fn resource_local_packages(
/// Get directory of pacakges
pub fn resource_package_dirs(&mut self, _arguments: Vec<JsonValue>) -> AnySchedulableResponse {
let snap = self.primary().snapshot().map_err(z_internal_error)?;
just_future(async move {
let snap = snap.receive().await.map_err(z_internal_error)?;
let paths = snap.world.registry.paths();
serde_json::to_value(paths).map_err(|e| internal_error(e.to_string()))
})
}
/// Get writable directory of pacakges
pub fn resource_local_package_dir(
&mut self,
_arguments: Vec<JsonValue>,
) -> AnySchedulableResponse {
let Some(data_dir) = dirs::data_dir() else {
return just_ok(JsonValue::Null);
};
let local_path = data_dir.join("typst/packages");
if !local_path.exists() {
return just_ok(JsonValue::Null);
}
let local_path = local_path.to_string_lossy().to_string();
just_ok(JsonValue::String(local_path))
let snap = self.primary().snapshot().map_err(z_internal_error)?;
just_future(async move {
let snap = snap.receive().await.map_err(z_internal_error)?;
let paths = snap
.world
.registry
.local_path()
.into_iter()
.collect::<Vec<_>>();
serde_json::to_value(paths).map_err(|e| internal_error(e.to_string()))
})
}
/// Get writable directory of pacakges
pub fn resource_package_by_ns(
&mut self,
mut arguments: Vec<JsonValue>,
) -> AnySchedulableResponse {
let ns = get_arg!(arguments[1] as EcoString);
let snap = self.primary().snapshot().map_err(z_internal_error)?;
just_future(async move {
let snap = snap.receive().await.map_err(z_internal_error)?;
let packages = tool::package::list_package_by_namespace(&snap.world.registry, ns)
.into_iter()
.map(PackageInfo::from)
.collect::<Vec<_>>();
serde_json::to_value(packages).map_err(|e| internal_error(e.to_string()))
})
}
/// Get the all valid symbols
pub fn resource_package_symbols(
&mut self,
mut arguments: Vec<JsonValue>,
) -> AnySchedulableResponse {
let info = get_arg!(arguments[1] as PackageInfo);
let snap = self.primary().snapshot().map_err(z_internal_error)?;
just_future(async move {
let snap = snap.receive().await.map_err(z_internal_error)?;
let w = snap.world.as_ref();
let symbols = tinymist_query::docs::list_symbols(w, &info)
.map_err(map_string_err("failed to list symbols"))
.map_err(z_internal_error)?;
serde_json::to_value(symbols).map_err(|e| internal_error(e.to_string()))
})
}
/// Get the all symbol docs
pub fn resource_package_docs(
&mut self,
mut arguments: Vec<JsonValue>,
) -> AnySchedulableResponse {
let info = get_arg!(arguments[1] as PackageInfo);
let handle = self.primary().handle.clone();
let snap = handle.snapshot().map_err(z_internal_error)?;
just_future(async move {
let snap = snap.receive().await.map_err(z_internal_error)?;
let w = snap.world.as_ref();
let res = handle.run_analysis(w, |a| {
let symbols = tinymist_query::docs::generate_md_docs(a, w, &info)
.map_err(map_string_err("failed to list symbols"))
.map_err(z_internal_error)?;
serde_json::to_value(symbols).map_err(|e| internal_error(e.to_string()))
});
res.map_err(|e| internal_error(e.to_string()))?
})
}
}

View file

@ -262,7 +262,11 @@ impl LanguageState {
.with_resource("/symbols", State::resource_symbols)
.with_resource("/preview/index.html", State::resource_preview_html)
.with_resource("/tutorial", State::resource_tutoral)
.with_resource("/dirs/local-packages", State::resource_local_packages);
.with_resource("/package/by-namespace", State::resource_package_by_ns)
.with_resource("/package/symbol", State::resource_package_symbols)
.with_resource("/package/docs", State::resource_package_docs)
.with_resource("/dir/package", State::resource_package_dirs)
.with_resource("/dir/package/local", State::resource_local_package_dir);
// todo: generalize me
provider.args.add_commands(

View file

@ -4,8 +4,9 @@ use std::io::Write;
use std::path::{Path, PathBuf};
use reflexo_typst::{Bytes, ImmutPath, TypstFileId};
use tinymist_query::docs::get_manifest;
use typst::diag::{bail, eco_format, FileError, FileResult, StrResult};
use typst::syntax::package::{PackageManifest, PackageSpec, TemplateInfo};
use typst::syntax::package::{PackageSpec, TemplateInfo};
use typst::syntax::VirtualPath;
use typst::World;
@ -31,10 +32,7 @@ pub fn get_entry(world: &LspWorld, tmpl: TemplateSource) -> StrResult<Bytes> {
let TemplateSource::Package(spec) = tmpl;
let toml_id = TypstFileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
// Parse the manifest.
let manifest = parse_manifest(world, toml_id)?;
manifest.validate(&spec)?;
let manifest = get_manifest(world, toml_id)?;
// Ensure that it is indeed a template.
let Some(tmpl_info) = &manifest.template else {
@ -56,10 +54,7 @@ pub fn init(world: &LspWorld, task: InitTask) -> StrResult<PathBuf> {
.unwrap_or_else(|| Path::new(spec.name.as_str()).into());
let toml_id = TypstFileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
// Parse the manifest.
let manifest = parse_manifest(world, toml_id)?;
manifest.validate(&spec)?;
let manifest = get_manifest(world, toml_id)?;
// Ensure that it is indeed a template.
let Some(template) = &manifest.template else {
@ -78,19 +73,6 @@ pub fn init(world: &LspWorld, task: InitTask) -> StrResult<PathBuf> {
Ok(entry_point)
}
/// Parses the manifest of the package located at `package_path`.
fn parse_manifest(world: &LspWorld, toml_id: TypstFileId) -> StrResult<PackageManifest> {
let toml_data = world
.file(toml_id)
.map_err(|err| eco_format!("failed to read package manifest ({})", err))?;
let string = std::str::from_utf8(&toml_data)
.map_err(|err| eco_format!("package manifest is not valid UTF-8 ({})", err))?;
toml::from_str(string)
.map_err(|err| eco_format!("package manifest is malformed ({})", err.message()))
}
/// Creates the project directory with the template's contents and returns the
/// path at which it was created.
fn scaffold_project(

View file

@ -1,7 +1,11 @@
//! Package management tools.
use reflexo_typst::package::PackageRegistry;
use typst::diag::{eco_format, StrResult};
use std::path::PathBuf;
use reflexo_typst::package::{PackageRegistry, PackageSpec};
use reflexo_typst::typst::prelude::*;
use tinymist_world::https::HttpsRegistry;
use typst::diag::{eco_format, EcoString, StrResult};
use typst::syntax::package::{PackageVersion, VersionlessPackageSpec};
use crate::LspWorld;
@ -40,3 +44,58 @@ pub fn determine_latest_version(
.ok_or_else(|| eco_format!("please specify the desired version"))
}
}
/// Get the packages in namespaces and their descriptions.
pub fn list_package_by_namespace(
registry: &HttpsRegistry,
ns: EcoString,
) -> EcoVec<(PathBuf, PackageSpec)> {
// search packages locally. We only search in the data
// directory and not the cache directory, because the latter is not
// intended for storage of local packages.
let mut packages = eco_vec![];
log::info!(
"searching for packages in namespace {ns} in paths {:?}",
registry.paths()
);
for dir in registry.paths() {
let local_path = dir.join(ns.as_str());
if !local_path.exists() || !local_path.is_dir() {
continue;
}
// namespace/package_name/version
// 2. package_name
let package_names = std::fs::read_dir(local_path).unwrap();
for package in package_names {
let package = package.unwrap();
if !package.file_type().unwrap().is_dir() {
continue;
}
if package.file_name().to_string_lossy().starts_with('.') {
continue;
}
// 3. version
let versions = std::fs::read_dir(package.path()).unwrap();
for version in versions {
let version = version.unwrap();
if !version.file_type().unwrap().is_dir() {
continue;
}
if version.file_name().to_string_lossy().starts_with('.') {
continue;
}
let path = version.path();
let version = version.file_name().to_string_lossy().parse().unwrap();
let spec = PackageSpec {
namespace: ns.clone(),
name: package.file_name().to_string_lossy().into(),
version,
};
packages.push((path, spec));
}
}
}
packages
}

View file

@ -12,8 +12,8 @@ repository.workspace = true
[dependencies]
typst-syntax.workspace = true
tinymist-query.workspace = true
tinymist-world.workspace = true
tinymist-analysis.workspace = true
tinymist-world = { workspace = true, features = ["no-content-hint"] }
ecow.workspace = true
comemo.workspace = true
@ -25,5 +25,5 @@ base64.workspace = true
insta.workspace = true
regex.workspace = true
[lints]
workspace = true
# [lints]
# workspace = true

View file

@ -1,10 +1,11 @@
//! # Typlite
mod error;
mod library;
pub mod library;
pub mod scopes;
mod value;
pub mod value;
use std::fmt::Write;
use std::sync::Arc;
pub use error::*;
@ -30,6 +31,10 @@ pub use tinymist_world::CompileOnceArgs;
pub struct Typlite {
/// The universe to use for the conversion.
world: Arc<LspWorld>,
/// library to use for the conversion.
library: Option<Arc<Scopes<Value>>>,
/// Documentation style to use for annotating the document.
do_annotate: bool,
/// Whether to enable GFM (GitHub Flavored Markdown) features.
gfm: bool,
}
@ -40,11 +45,31 @@ impl Typlite {
/// This is useful when you have a [`Source`] instance and you can avoid
/// reparsing the content.
pub fn new(world: Arc<LspWorld>) -> Self {
Self { world, gfm: false }
Self {
world,
library: None,
do_annotate: false,
gfm: false,
}
}
/// Set library to use for the conversion.
pub fn with_library(mut self, library: Arc<Scopes<Value>>) -> Self {
self.library = Some(library);
self
}
/// Annotate the elements for identification.
pub fn annotate_elements(mut self, do_annotate: bool) -> Self {
self.do_annotate = do_annotate;
self
}
/// Convert the content to a markdown string.
pub fn convert(self) -> Result<EcoString> {
static DEFAULT_LIB: std::sync::LazyLock<Arc<Scopes<Value>>> =
std::sync::LazyLock::new(|| Arc::new(library::library()));
let main = self.world.entry_state().main();
let current = main.ok_or("no main file in workspace")?;
let world = self.world;
@ -56,7 +81,13 @@ impl Typlite {
let worker = TypliteWorker {
current,
gfm: self.gfm,
scopes: Arc::new(library::library()),
do_annotate: self.do_annotate,
list_depth: 0,
scopes: self
.library
.as_ref()
.unwrap_or_else(|| &*DEFAULT_LIB)
.clone(),
world,
};
@ -64,12 +95,15 @@ impl Typlite {
}
}
/// Typlite worker
#[derive(Clone)]
struct TypliteWorker {
pub struct TypliteWorker {
current: FileId,
gfm: bool,
do_annotate: bool,
scopes: Arc<Scopes<Value>>,
world: Arc<LspWorld>,
list_depth: usize,
}
impl TypliteWorker {
@ -110,7 +144,8 @@ impl TypliteWorker {
}
// Text nodes
Text | Space | Linebreak | Parbreak => Self::str(node),
Text | Space | Parbreak => Self::str(node),
Linebreak => Self::char('\n'),
// Semantic nodes
Escape => Self::escape(node),
@ -394,7 +429,22 @@ impl TypliteWorker {
}
fn list_item(&mut self, node: &SyntaxNode) -> Result<Value> {
self.reduce(node)
let mut s = EcoString::new();
let list_item = node.cast::<ast::ListItem>().unwrap();
s.push_str("- ");
if self.do_annotate {
let _ = write!(s, "<!-- typlite:begin:list-item {} -->", self.list_depth);
self.list_depth += 1;
}
s.push_str(&Self::value(self.eval(list_item.body().to_untyped())?));
if self.do_annotate {
self.list_depth -= 1;
let _ = write!(s, "<!-- typlite:end:list-item {} -->", self.list_depth);
}
Ok(Value::Content(s))
}
fn enum_item(&mut self, node: &SyntaxNode) -> Result<Value> {
@ -460,7 +510,7 @@ impl TypliteWorker {
let path = include.source();
let src =
tinymist_query::syntax::find_source_by_expr(self.world.as_ref(), self.current, path)
tinymist_analysis::import::find_source_by_expr(self.world.as_ref(), self.current, path)
.ok_or_else(|| format!("failed to find source on path {path:?}"))?;
self.clone().sub_file(src).map(Value::Content)

View file

@ -1,3 +1,5 @@
//! # Typlite Library
use super::*;
use ecow::eco_format;
use value::*;
@ -6,11 +8,11 @@ pub fn library() -> Scopes<Value> {
let mut scopes = Scopes::new();
scopes.define("link", link as RawFunc);
scopes.define("kbd", kbd as RawFunc);
// todo: how to import this function correctly?
scopes.define("cross-link", cross_link as RawFunc);
scopes.define("md-alter", md_alter as RawFunc);
scopes.define("image", image as RawFunc);
scopes.define("figure", figure as RawFunc);
scopes.define("raw", raw as RawFunc);
scopes.define("pad", pad as RawFunc);
scopes.define("note-box", note as RawFunc);
scopes.define("tip-box", tip as RawFunc);
scopes.define("important-box", important_box as RawFunc);
@ -52,6 +54,18 @@ pub fn figure(mut args: Args) -> Result<Value> {
}
}
/// Evaluate a raw.
pub fn raw(mut args: Args) -> Result<Value> {
let content = get_pos_named!(args, content: EcoString);
Ok(Value::Content(eco_format!("```` {content} ````")))
}
/// Evaluate a padded content.
pub fn pad(mut args: Args) -> Result<Value> {
Ok(get_pos_named!(args, path: Value))
}
/// Evaluate a `kbd` element.
pub fn kbd(mut args: Args) -> Result<Value> {
let key = get_pos_named!(args, key: EcoString);
@ -59,19 +73,6 @@ pub fn kbd(mut args: Args) -> Result<Value> {
Ok(Value::Content(eco_format!("<kbd>{key}</kbd>")))
}
/// Evaluate a `cross-link`.
pub fn cross_link(mut args: Args) -> Result<Value> {
let dest = get_pos_named!(args, dest: EcoString);
let body = get_pos_named!(args, body: Content);
let dest = std::path::Path::new(dest.as_str()).with_extension("html");
Ok(Value::Content(eco_format!(
"[{body}](https://myriad-dreamin.github.io/tinymist/{dest})",
dest = dest.to_string_lossy()
)))
}
/// Evaluate a markdown alteration.
pub fn md_alter(mut args: Args) -> Result<Value> {
let _: () = get_pos_named!(args, left: ());

View file

@ -9,7 +9,7 @@ use typst_syntax::Source;
use super::*;
fn conv(s: &str) -> EcoString {
fn conv_(s: &str, for_docs: bool) -> EcoString {
static FONT_RESOLVER: LazyLock<Result<Arc<FontResolverImpl>>> = LazyLock::new(|| {
Ok(Arc::new(
LspUniverseBuilder::resolve_fonts(CompileFontArgs::default())
@ -32,7 +32,8 @@ fn conv(s: &str) -> EcoString {
.unwrap();
let world = universe.snapshot();
let res = Typlite::new(Arc::new(world)).convert().unwrap();
let converter = Typlite::new(Arc::new(world)).annotate_elements(for_docs);
let res = converter.convert().unwrap();
static REG: OnceLock<Regex> = OnceLock::new();
let reg = REG.get_or_init(|| Regex::new(r#"data:image/svg\+xml;base64,([^"]+)"#).unwrap());
let res = reg.replace(&res, |_captures: &regex::Captures| {
@ -47,6 +48,14 @@ fn conv(s: &str) -> EcoString {
res.into()
}
fn conv(s: &str) -> EcoString {
conv_(s, false)
}
fn conv_docs(s: &str) -> EcoString {
conv_(s, true)
}
#[test]
fn test_converted() {
insta::assert_snapshot!(conv(r###"
@ -86,3 +95,75 @@ $
$
"###), @r###"<p align="center"><img src="data:image-hash/svg+xml;base64,redacted" alt="typst-block" /></p>"###);
}
#[test]
fn test_converted_docs() {
insta::assert_snapshot!(conv_docs(r###"
These again are dictionaries with the keys
- `description` (optional): The description for the argument.
- `types` (optional): A list of accepted argument types.
- `default` (optional): Default value for this argument.
See @@show-module() for outputting the results of this function.
- content (string): Content of `.typ` file to analyze for docstrings.
- name (string): The name for the module.
- label-prefix (auto, string): The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
- require-all-parameters (boolean): Require that all parameters of a
functions are documented and fail if some are not.
- scope (dictionary): A dictionary of definitions that are then available
in all function and parameter descriptions.
- preamble (string): Code to prepend to all code snippets shown with `#example()`.
This can for instance be used to import something from the scope.
-> string
"###), @r###"
These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`types` (optional): A list of accepted argument types.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`default` (optional): Default value for this argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
- <!-- typlite:begin:list-item 0 -->content (string): Content of `.typ` file to analyze for docstrings.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->label-prefix (auto, string): The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->require-all-parameters (boolean): Require that all parameters of a
functions are documented and fail if some are not.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->scope (dictionary): A dictionary of definitions that are then available
in all function and parameter descriptions.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->preamble (string): Code to prepend to all code snippets shown with `#example()`.
This can for instance be used to import something from the scope.<!-- typlite:end:list-item 0 -->
-> string
"###);
insta::assert_snapshot!(conv_docs(r###"
These again are dictionaries with the keys
- `description` (optional): The description for the argument.
See @@show-module() for outputting the results of this function.
- name (string): The name for the module.
- label-prefix (auto, string): The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
- nested something
- nested something 2
-> string
"###), @r###"
These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->label-prefix (auto, string): The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
- <!-- typlite:begin:list-item 1 -->nested something<!-- typlite:end:list-item 1 -->
- <!-- typlite:begin:list-item 1 -->nested something 2<!-- typlite:end:list-item 1 --><!-- typlite:end:list-item 0 -->
-> string
"###);
}

View file

@ -1,3 +1,5 @@
//! # Typlite Values
use core::fmt;
use crate::*;
@ -88,6 +90,7 @@ impl<'a> Args<'a> {
}
}
#[macro_export]
macro_rules! get_pos_named {
(
$args:expr,
@ -97,8 +100,9 @@ macro_rules! get_pos_named {
$args.parse::<$ty>(raw)?
}};
}
pub(crate) use get_pos_named;
pub use get_pos_named;
#[macro_export]
macro_rules! get_named {
(
$args:expr,
@ -128,7 +132,7 @@ macro_rules! get_named {
}
}};
}
pub(crate) use get_named;
pub use get_named;
/// Evaluate an expression.
pub trait Eval<'a>: Sized {

View file

@ -46,6 +46,11 @@
"name": "DevKit View",
"when": "ext.tinymistActivated && ext.tinymistDevKit"
},
{
"id": "tinymist.package-view",
"name": "Package",
"when": "ext.tinymistActivated"
},
{
"id": "tinymist.preview.content-preview",
"type": "webview",

View file

@ -1,10 +1,10 @@
import * as vscode from "vscode";
import * as path from "path";
import { readFile, writeFile } from "fs/promises";
import { getFocusingFile, getLastFocusingDoc } from "./extension";
import { tinymist } from "./lsp";
import { extensionState, ExtensionContext } from "./state";
async function loadHTMLFile(context: vscode.ExtensionContext, relativePath: string) {
async function loadHTMLFile(context: ExtensionContext, relativePath: string) {
const filePath = path.resolve(context.extensionPath, relativePath);
const fileContents = await readFile(filePath, "utf8");
return fileContents;
@ -25,7 +25,7 @@ export interface PackageData {
};
}
export function getUserPackageData(context: vscode.ExtensionContext) {
export function getUserPackageData(context: ExtensionContext) {
const defaultPackageData: Versioned<PackageData> = {
version: USER_PACKAGE_VERSION,
data: {},
@ -106,7 +106,7 @@ export const fontsExportDefaultConfigure: fontsExportConfigure = {
},
};
export function getFontsExportConfigure(context: vscode.ExtensionContext) {
export function getFontsExportConfigure(context: ExtensionContext) {
const defaultConfigure: Versioned<fontsExportConfigure> = {
version: FONTS_EXPORT_CONFIGURE_VERSION,
data: fontsExportDefaultConfigure,
@ -120,14 +120,19 @@ export function getFontsExportConfigure(context: vscode.ExtensionContext) {
return configure;
}
export type EditorToolName = "template-gallery" | "tracing" | "summary" | "symbol-view";
export async function activateEditorTool(context: vscode.ExtensionContext, tool: EditorToolName) {
const Standalone: Partial<Record<EditorToolName, boolean>> = {
"symbol-view": true,
} as const;
export type EditorToolName = "template-gallery" | "tracing" | "summary" | "symbol-view" | "docs";
export async function editorTool(context: ExtensionContext, tool: EditorToolName, opts?: any) {
// Create and show a new WebView
const title = {
"template-gallery": "Template Gallery",
"symbol-view": "Symbol View",
tracing: "Tracing",
summary: "Summary",
docs: `@${opts?.pkg?.namespace}/${opts?.pkg?.name}:${opts?.pkg?.version} (Docs)`,
}[tool];
const panel = vscode.window.createWebviewPanel(
`tinymist-${tool}`,
@ -139,16 +144,17 @@ export async function activateEditorTool(context: vscode.ExtensionContext, tool:
{
enableScripts: true,
retainContextWhenHidden: true,
enableFindWidget: tool === "docs",
},
);
await activateEditorToolAt(context, tool, panel);
await editorToolAt(context, tool, panel, opts);
}
export class SymbolViewProvider implements vscode.WebviewViewProvider {
static readonly Name = "tinymist.side-symbol-view";
constructor(private context: vscode.ExtensionContext) {}
constructor(private context: ExtensionContext) {}
public resolveWebviewView(
webviewView: vscode.WebviewView,
@ -160,14 +166,15 @@ export class SymbolViewProvider implements vscode.WebviewViewProvider {
enableScripts: true,
};
activateEditorToolAt(this.context, "symbol-view", webviewView);
editorToolAt(this.context, "symbol-view", webviewView);
}
}
async function activateEditorToolAt(
context: vscode.ExtensionContext,
tool: "template-gallery" | "tracing" | "summary" | "symbol-view",
async function editorToolAt(
context: ExtensionContext,
tool: EditorToolName,
panel: vscode.WebviewView | vscode.WebviewPanel,
opts?: any,
) {
const dispose = () => {
// if has dispose method
@ -220,7 +227,7 @@ async function activateEditorToolAt(
break;
}
case "editText": {
const activeDocument = getLastFocusingDoc();
const activeDocument = extensionState.getFocusingDoc();
if (!activeDocument) {
await vscode.window.showErrorMessage("No focusing document");
return;
@ -333,7 +340,8 @@ async function activateEditorToolAt(
disposed = true;
});
let html = await loadHTMLFile(context, "./out/editor-tools/index.html");
const appDir = Standalone[tool] ? tool : "default";
let html = await loadHTMLFile(context, `./out/editor-tools/${appDir}/index.html`);
// packageData
html = html.replace(
@ -350,7 +358,7 @@ async function activateEditorToolAt(
html = html.replace(":[[preview:FavoritePlaceholder]]:", btoa(packageData));
break;
case "tracing": {
const focusingFile = getFocusingFile();
const focusingFile = extensionState.getFocusingFile();
if (focusingFile === undefined) {
await vscode.window.showErrorMessage("No focusing typst file");
return;
@ -406,6 +414,10 @@ async function activateEditorToolAt(
html = html.replace(":[[preview:SymbolInformation]]:", btoa(symbolInfo));
break;
}
case "docs": {
html = html.replace(":[[preview:DocContent]]:", btoa(encodeURIComponent(opts.content)));
break;
}
}
panel.webview.html = html;
@ -420,7 +432,7 @@ async function fetchSummaryInfo(): Promise<[any | undefined, any | undefined]> {
let res: [any | undefined, any | undefined] = [undefined, undefined];
for (const to of waitTimeList) {
const focusingFile = getFocusingFile();
const focusingFile = extensionState.getFocusingFile();
if (focusingFile === undefined) {
await vscode.window.showErrorMessage("No focusing typst file");
return res;

View file

@ -1,7 +1,7 @@
// The module 'vscode' contains the VS Code extensibility API
// Import the module and reference it with the alias vscode in your code below
import * as vscode from "vscode";
import { previewActivate, previewDeactivate } from "./preview";
import { previewActivate, previewDeactivate } from "./features/preview";
// This method is called when your extension is activated
// Your extension is activated the very first time the command is executed

View file

@ -20,25 +20,26 @@ import { loadTinymistConfig, substVscodeVarsInConfig } from "./config";
import {
EditorToolName,
SymbolViewProvider as SymbolViewProvider,
activateEditorTool,
editorTool,
getUserPackageData,
} from "./editor-tools";
import { triggerStatusBar, wordCountItemProcess } from "./ui-extends";
import { setIsTinymist as previewSetIsTinymist } from "./preview-compat";
import { setIsTinymist as previewSetIsTinymist } from "./features/preview-compat";
import {
previewActivate,
previewDeactivate,
previewPreload,
previewProcessOutline,
} from "./preview";
} from "./features/preview";
import { commandCreateLocalPackage, commandOpenLocalPackage } from "./package-manager";
import { activeTypstEditor, DisposeList, getSensibleTextEditorColumn } from "./util";
import { client, getClient, setClient, tinymist } from "./lsp";
import { taskActivate } from "./tasks";
import { taskActivate } from "./features/tasks";
import { onEnterHandler } from "./lsp.on-enter";
import { extensionState } from "./state";
import { devKitActivate } from "./dev-kit";
import { labelViewActivate } from "./label";
import { devKitFeatureActivate } from "./features/dev-kit";
import { labelFeatureActivate } from "./features/label";
import { packageFeatureActivate } from "./features/package";
export async function activate(context: ExtensionContext): Promise<void> {
try {
@ -68,12 +69,13 @@ export async function doActivate(context: ExtensionContext): Promise<void> {
const client = initClient(context, config);
setClient(client);
// Activates features
labelViewActivate(context);
labelFeatureActivate(context);
packageFeatureActivate(context);
if (extensionState.features.task) {
taskActivate(context);
}
if (extensionState.features.devKit) {
devKitActivate(context);
devKitFeatureActivate(context);
}
if (extensionState.features.preview) {
const typstPreviewExtension = vscode.extensions.getExtension("mgt19937.typst-preview");
@ -224,15 +226,15 @@ async function startClient(client: LanguageClient, context: ExtensionContext): P
);
context.subscriptions.push(
vscode.workspace.onDidCloseTextDocument((doc: vscode.TextDocument) => {
if (focusingDoc === doc) {
focusingDoc = undefined;
if (extensionState.mut.focusingDoc === doc) {
extensionState.mut.focusingDoc = undefined;
commandActivateDoc(undefined);
}
}),
);
const editorToolCommand = (tool: EditorToolName) => async () => {
await activateEditorTool(context, tool);
await editorTool(context, tool);
};
const initTemplateCommand =
@ -651,15 +653,6 @@ async function initTemplate(context: vscode.ExtensionContext, inPlace: boolean,
}
}
let focusingFile: string | undefined = undefined;
let focusingDoc: vscode.TextDocument | undefined = undefined;
export function getFocusingFile() {
return focusingFile;
}
export function getLastFocusingDoc() {
return focusingDoc;
}
async function commandActivateDoc(doc: vscode.TextDocument | undefined): Promise<void> {
await commandActivateDocPath(doc, doc?.uri.fsPath);
}
@ -669,15 +662,15 @@ async function commandActivateDocPath(
fsPath: string | undefined,
): Promise<void> {
// console.log("focus main", fsPath, new Error().stack);
focusingFile = fsPath;
extensionState.mut.focusingFile = fsPath;
if (fsPath) {
focusingDoc = doc;
extensionState.mut.focusingDoc = doc;
}
if (focusingDoc?.isClosed) {
focusingDoc = undefined;
if (extensionState.mut.focusingDoc?.isClosed) {
extensionState.mut.focusingDoc = undefined;
}
// remove the status bar until the last focusing file is closed
triggerStatusBar(!!(fsPath || focusingDoc?.isClosed === false));
triggerStatusBar(!!(fsPath || extensionState.mut.focusingDoc?.isClosed === false));
await client?.sendRequest("workspace/executeCommand", {
command: "tinymist.focusMain",
arguments: [fsPath],

View file

@ -1,15 +1,15 @@
import * as vscode from "vscode";
export function devKitActivate(context: vscode.ExtensionContext) {
export function devKitFeatureActivate(context: vscode.ExtensionContext) {
vscode.commands.executeCommand("setContext", "ext.tinymistDevKit", true);
const devKitProvider = new DevKitProvider();
const devKitProvider = new DevKitViewProvider();
context.subscriptions.push(
vscode.window.registerTreeDataProvider("tinymist.dev-kit", devKitProvider),
);
}
class DevKitProvider implements vscode.TreeDataProvider<DevKitItem> {
class DevKitViewProvider implements vscode.TreeDataProvider<DevKitItem> {
constructor() {}
refresh(): void {}

View file

@ -1,9 +1,8 @@
import * as vscode from "vscode";
import { tinymist } from "./lsp";
import { WorkspaceSymbol } from "vscode-languageclient";
import { tinymist } from "../lsp";
export function labelViewActivate(context: vscode.ExtensionContext) {
const labelViewProvider = new LabelViewProviderProvider();
export function labelFeatureActivate(context: vscode.ExtensionContext) {
const labelViewProvider = new LabelViewProvider();
context.subscriptions.push(
vscode.window.registerTreeDataProvider("tinymist.label-view", labelViewProvider),
// tinymist.syncLabel
@ -13,7 +12,7 @@ export function labelViewActivate(context: vscode.ExtensionContext) {
);
}
class LabelViewProviderProvider implements vscode.TreeDataProvider<LabelViewItem> {
class LabelViewProvider implements vscode.TreeDataProvider<LabelViewItem> {
changeTreeDataEvent = new vscode.EventEmitter<LabelViewItem | undefined>();
onDidChangeTreeData = this.changeTreeDataEvent.event;

View file

@ -0,0 +1,261 @@
import * as vscode from "vscode";
import { PackageInfo, SymbolInfo, tinymist } from "../lsp";
import { getTargetViewColumn } from "../util";
import { editorTool } from "../editor-tools";
export function packageFeatureActivate(context: vscode.ExtensionContext) {
const packageView = new PackageViewProvider();
context.subscriptions.push(
vscode.window.registerTreeDataProvider("tinymist.package-view", packageView),
vscode.commands.registerCommand(
"tinymist.showPackageDocsInternal",
async (pkg: PackageInfo) => {
// console.log("show package docs", pkg);
//
try {
const docs = await tinymist.getResource("/package/docs", pkg);
// console.log("docs", docs);
const content = (await vscode.commands.executeCommand(
"markdown.api.render",
docs,
)) as string;
await editorTool(context, "docs", { pkg, content });
} catch (e) {
console.error("show package docs error", e);
vscode.window.showErrorMessage(`Failed to show package documentation: ${e}`);
}
},
),
);
}
class PackageViewProvider implements vscode.TreeDataProvider<vscode.TreeItem> {
constructor() {}
refresh(): void {}
getTreeItem(element: NamespaceItem): vscode.TreeItem {
return element;
}
getChildren(element?: any): Thenable<vscode.TreeItem[]> {
if (element && CommandsItem.is(element)) {
return this.getCommands();
} else if (element && NamespaceItem.is(element)) {
return this.getNsPackages(element.namespace);
} else if (element && PackageGroupItem.is(element)) {
return Promise.resolve(element.packages);
} else if (element && PackageItem.is(element)) {
return this.getPackageActions(element);
} else if (element && SymbolsItem.is(element)) {
return this.getPackageSymbols(element);
} else if (element && SymbolItem.is(element)) {
console.log("symbol item children", element);
if (!element.info.children) {
return Promise.resolve([]);
}
return Promise.resolve(createPackageSymbols(element.pkg, element.info.children));
} else if (element) {
return Promise.resolve([]);
}
return Promise.resolve([
new CommandsItem(),
...["preview", "local"].map((ns) => new NamespaceItem(ns)),
]);
}
private async getCommands(): Promise<CommandsItem[]> {
return [
new CommandItem({
title: "Create Local Package",
command: "tinymist.createLocalPackage",
tooltip: `Create a Typst local package.`,
}),
new CommandItem({
title: "Open Local Package",
command: "tinymist.openLocalPackage",
tooltip: `Open a Typst local package.`,
}),
];
}
private async getNsPackages(ns: string): Promise<NamespaceItem[]> {
const packages = await tinymist.getResource("/package/by-namespace", ns);
// group by name
const groups = new Map<string, PackageItem[]>();
for (const pkg of packages) {
const group = groups.get(pkg.name) || [];
group.push(new PackageItem(pkg));
groups.set(pkg.name, group);
}
return Array.from(groups.entries()).map(([name, packages]) => {
return new PackageGroupItem(ns, name, packages);
});
}
async getPackageSymbols(element: SymbolsItem): Promise<vscode.TreeItem[]> {
return createPackageSymbols(
element.pkg,
await tinymist.getResource("/package/symbol", element.pkg.pkg),
);
}
private async getPackageActions(pkg: PackageItem): Promise<vscode.TreeItem[]> {
return [
new CommandItem({
title: "Documentation",
command: "tinymist.showPackageDocsInternal",
arguments: [pkg.pkg],
tooltip: `Open package documentation to side.`,
}),
new CommandItem({
title: "Open",
command: "vscode.openFolder",
arguments: [vscode.Uri.file(pkg.pkg.path), { forceNewWindow: true }],
tooltip: `Open the package directory in editor.`,
}),
new CommandItem({
title: "Reveal in File Explorer",
command: "revealFileInOS",
arguments: [vscode.Uri.file(pkg.pkg.path)],
tooltip: `Reveal the directory of the package in File Explorer.`,
}),
new SymbolsItem(pkg),
];
}
}
export class CommandsItem extends vscode.TreeItem {
static is(element: vscode.TreeItem): element is CommandsItem {
return element.contextValue === "package-commands";
}
constructor(public description = "") {
super(`commands`, vscode.TreeItemCollapsibleState.Collapsed);
this.tooltip = `package commands`;
}
contextValue = "package-commands";
}
export class CommandItem extends vscode.TreeItem {
constructor(
public readonly command: vscode.Command,
public description = "",
) {
super(command.title, vscode.TreeItemCollapsibleState.None);
this.tooltip = this.command.tooltip || ``;
}
iconPath = new vscode.ThemeIcon("tools");
contextValue = "package-command";
}
export class NamespaceItem extends vscode.TreeItem {
static is(element: vscode.TreeItem): element is NamespaceItem {
return element.contextValue === "package-namespace-item";
}
constructor(
public readonly namespace: string,
public description = "",
) {
super(`@${namespace}`, vscode.TreeItemCollapsibleState.Collapsed);
this.tooltip = `namespace: ${namespace}`;
}
contextValue = "package-namespace-item";
}
export class PackageGroupItem extends vscode.TreeItem {
static is(element: vscode.TreeItem): element is PackageGroupItem {
return element.contextValue === "package-group-item";
}
constructor(
public readonly namespace: string,
public readonly name: string,
public readonly packages: PackageItem[],
public description = `@${namespace}/${name}`,
) {
super(`${name}`, vscode.TreeItemCollapsibleState.Collapsed);
this.tooltip = `package: @${namespace}/${name}`;
}
contextValue = "package-group-item";
}
export class PackageItem extends vscode.TreeItem {
static is(element: vscode.TreeItem): element is PackageItem {
return element.contextValue === "package-item";
}
constructor(
public readonly pkg: PackageInfo,
public description = "",
) {
super(`${pkg.version}`, vscode.TreeItemCollapsibleState.Collapsed);
this.tooltip = `package: @${pkg.namespace}/${pkg.name}:${pkg.version}`;
}
pkgId() {
return `@${this.pkg.namespace}/${this.pkg.name}:${this.pkg.version}`;
}
contextValue = "package-item";
}
export class SymbolsItem extends vscode.TreeItem {
static is(element: vscode.TreeItem): element is SymbolsItem {
return element.contextValue === "package-symbols-item";
}
constructor(
public readonly pkg: PackageItem,
public description = "",
) {
super(`symbols`, vscode.TreeItemCollapsibleState.Collapsed);
this.tooltip = `symbols in package: ${pkg.pkgId()}`;
}
contextValue = "package-symbols-item";
}
export class SymbolItem extends vscode.TreeItem {
static is(element: vscode.TreeItem): element is SymbolItem {
return element.contextValue === "package-symbol-item";
}
constructor(
public readonly pkg: PackageItem,
public readonly info: SymbolInfo,
public description = "",
) {
const state =
info.children.length > 0
? vscode.TreeItemCollapsibleState.Collapsed
: vscode.TreeItemCollapsibleState.None;
super(`${info.name}`, state);
this.tooltip = `a symbol \`${info.name}\` in package: ${pkg.pkgId()}`;
this.iconPath = new vscode.ThemeIcon("symbol-" + info.kind);
}
contextValue = "package-symbol-item";
}
function createPackageSymbols(pkgItem: PackageItem, bases: SymbolInfo[]): vscode.TreeItem[] {
const symbols = bases.map((info) => new SymbolItem(pkgItem, info));
symbols.sort((a, b) => {
if (a.info.kind !== b.info.kind) {
return a.info.kind.localeCompare(b.info.kind);
}
return a.info.name.localeCompare(b.info.name);
});
return symbols;
}

View file

@ -12,8 +12,8 @@ import {
launchPreviewInWebView,
previewProcessOutline,
} from "./preview";
import { tinymist } from "./lsp";
import { loadHTMLFile } from "./util";
import { tinymist } from "../lsp";
import { loadHTMLFile } from "../util";
const vscodeVariables = require("vscode-variables");

View file

@ -2,7 +2,7 @@
// Import the module and reference it with the alias vscode in your code below
import * as vscode from "vscode";
import * as path from "path";
import { DisposeList, getSensibleTextEditorColumn, getTargetViewColumn } from "./util";
import { DisposeList, getSensibleTextEditorColumn, getTargetViewColumn } from "../util";
import {
launchPreviewCompat,
previewActiveCompat as previewPostActivateCompat,
@ -18,8 +18,8 @@ import {
commandScrollPreview,
commandStartPreview,
registerPreviewTaskDispose,
} from "./extension";
import { isGitpod, translateGitpodURL } from "./gitpod";
} from "../extension";
import { isGitpod, translateGitpodURL } from "../gitpod";
function translateExternalURL(urlstr: string): string {
if (isGitpod()) {

View file

@ -1,7 +1,7 @@
import * as vscode from "vscode";
import { tinymist } from "./lsp";
import { getFocusingFile } from "./extension";
import { VirtualConsole } from "./util";
import { tinymist } from "../lsp";
import { VirtualConsole } from "../util";
import { extensionState } from "../state";
type ExportFormat = "pdf" | "png" | "svg" | "html" | "markdown" | "text" | "query" | "pdfpc";
@ -98,7 +98,7 @@ const exportOps = (exportArgs: ExportArgs) => ({
resolveInputPath() {
const inputPath = exportArgs.inputPath;
if (inputPath === "$focused" || inputPath === undefined) {
return getFocusingFile();
return extensionState.getFocusingFile();
}
return inputPath;

View file

@ -17,10 +17,27 @@ export async function getClient(): Promise<LanguageClient> {
return clientPromise;
}
export interface PackageInfo {
path: string;
namespace: string;
name: string;
version: string;
}
export interface SymbolInfo {
name: string;
kind: string;
children: SymbolInfo[];
}
interface ResourceRoutes {
"/symbols": any;
"/preview/index.html": string;
"/dirs/local-packages": string;
"/dir/package": string;
"/dir/package/local": string;
"/package/by-namespace": PackageInfo[];
"/package/symbol": SymbolInfo[];
"/package/docs": string;
}
export const tinymist = {
@ -43,8 +60,8 @@ export const tinymist = {
arguments: args,
});
},
getResource<T extends keyof ResourceRoutes>(path: T) {
return tinymist.executeCommand<ResourceRoutes[T]>("tinymist.getResources", [path]);
getResource<T extends keyof ResourceRoutes>(path: T, ...args: any[]) {
return tinymist.executeCommand<ResourceRoutes[T]>("tinymist.getResources", [path, ...args]);
},
getWorkspaceLabels() {
return tinymist.executeCommand<SymbolInformation[]>("tinymist.getWorkspaceLabels", []);

View file

@ -1,190 +1,193 @@
import { window, workspace } from 'vscode';
import { tinymist } from './lsp';
import * as fs from 'fs';
import { window, workspace } from "vscode";
import { tinymist } from "./lsp";
import * as fs from "fs";
// error message
export const dataDirErrorMessage = 'Can not find package directory.';
export const dataDirErrorMessage = "Can not find package directory.";
// todo: there is a bug since we can have both @local in data dir and cache dir
export async function getLocalPackagesDir() {
const packagesDir = await tinymist.getResource('/dirs/local-packages');
return packagesDir ? `${packagesDir}/local` : null;
const packageDir = (await tinymist.getResource("/dir/package/local"))?.[0];
return packageDir ? `${packageDir}/local` : undefined;
}
// typst.toml template
const typstTomlTemplate = (name: string, version: string, entrypoint: string) => {
return `[package]\nname = "${name}"\nversion = "${version}"\nentrypoint = "${entrypoint}"`;
return `[package]\nname = "${name}"\nversion = "${version}"\nentrypoint = "${entrypoint}"`;
};
// versionCompare
function versionCompare(a: string, b: string) {
const aArr = a.split('.');
const bArr = b.split('.');
for (let i = 0; i < 3; i++) {
const aNum = Number(aArr[i]);
const bNum = Number(bArr[i]);
if (aNum !== bNum) {
return bNum - aNum;
}
const aArr = a.split(".");
const bArr = b.split(".");
for (let i = 0; i < 3; i++) {
const aNum = Number(aArr[i]);
const bNum = Number(bArr[i]);
if (aNum !== bNum) {
return bNum - aNum;
}
return 0;
}
return 0;
}
/**
* get local packages list
*/
export async function getLocalPackagesList() {
const localPackagesDir = await getLocalPackagesDir();
// return list of local packages like ['@local/mypkg:1.0.0']
if (!localPackagesDir) {
return [];
const localPackagesDir = await getLocalPackagesDir();
// return list of local packages like ['@local/mypkg:1.0.0']
if (!localPackagesDir) {
return [];
}
// if localPackagesDir doesn't exist, return []
try {
await fs.promises.access(localPackagesDir);
} catch (err) {
return [];
}
const localPackagesList = await fs.promises.readdir(localPackagesDir);
// get all version
const res = [] as {
package: string;
namespace: string;
name: string;
version: string;
}[];
for (const localPackage of localPackagesList) {
// if localPackage is not a directory, continue
const stat = await fs.promises.stat(`${localPackagesDir}/${localPackage}`);
if (!stat.isDirectory()) {
continue;
}
// if localPackagesDir doesn't exist, return []
try {
await fs.promises.access(localPackagesDir);
} catch (err) {
return [];
// filter versions only valid version like '0.1.0'
const versions = (await fs.promises.readdir(`${localPackagesDir}/${localPackage}`)).filter(
(version) => {
const versionReg = /^\d+\.\d+\.\d+$/;
return versionReg.test(version);
},
);
// sort versions like ['1.0.0', '0.2.0', '0.1.0', '0.0.2', '0.0.1']
versions.sort(versionCompare);
for (const version of versions) {
res.push({
package: `@local/${localPackage}:${version}`,
namespace: "local",
name: localPackage,
version,
});
}
const localPackagesList = await fs.promises.readdir(localPackagesDir);
// get all version
const res = [] as {
package: string,
namespace: string,
name: string,
version: string,
}[];
for (const localPackage of localPackagesList) {
// if localPackage is not a directory, continue
const stat = await fs.promises.stat(`${localPackagesDir}/${localPackage}`);
if (!stat.isDirectory()) {
continue;
}
// filter versions only valid version like '0.1.0'
const versions = (await fs.promises.readdir(`${localPackagesDir}/${localPackage}`)).filter(version => {
const versionReg = /^\d+\.\d+\.\d+$/;
return versionReg.test(version);
});
// sort versions like ['1.0.0', '0.2.0', '0.1.0', '0.0.2', '0.0.1']
versions.sort(versionCompare);
for (const version of versions) {
res.push({
package: `@local/${localPackage}:${version}`,
namespace: 'local',
name: localPackage,
version,
});
}
}
return res;
}
return res;
}
/**
* create local package
*/
export async function commandCreateLocalPackage() {
const localPackagesDir = await getLocalPackagesDir();
if (!localPackagesDir) {
window.showErrorMessage(dataDirErrorMessage);
return;
}
// 1. input package name
const packageName = await window.showInputBox({
value: '',
placeHolder: 'Please input package name',
validateInput: text => {
return text ? null : 'Please input package name';
}
});
if (!packageName) {
return;
}
// 2. input package version
const packageVersion = await window.showInputBox({
value: '0.1.0',
placeHolder: 'Please input package version',
validateInput: text => {
if (!text) {
return 'Please input package version';
}
// make sure it is valid version like '0.1.0'
const versionReg = /^\d+\.\d+\.\d+$/;
if (!versionReg.test(text)) {
return 'Please input valid package version like 0.1.0';
}
return null;
}
});
if (!packageVersion) {
return;
}
// 3. input entrypoint
const entrypoint = await window.showInputBox({
value: 'lib.typ',
placeHolder: 'Please input entrypoint',
validateInput: text => {
if (!text) {
return 'Please input entrypoint';
}
// make sure it is valid entrypoint end with .typ
if (!text.endsWith('.typ')) {
return 'Please input valid entrypoint end with .typ';
}
return null;
}
});
if (!entrypoint) {
return;
}
// 4. create localPackagesDir/name/version/typst.toml
const packageDir = `${localPackagesDir}/${packageName}/${packageVersion}`;
const typstToml = typstTomlTemplate(packageName, packageVersion, entrypoint);
await fs.promises.mkdir(packageDir, { recursive: true });
await fs.promises.writeFile(`${packageDir}/typst.toml`, typstToml);
// 5. create localPackagesDir/name/version/entrypoint
await fs.promises.writeFile(`${packageDir}/${entrypoint}`, '#let add(a, b) = { a + b }');
// 6. open localPackagesDir/name/version/entrypoint
const document = await workspace.openTextDocument(`${packageDir}/${entrypoint}`);
await window.showTextDocument(document);
const localPackagesDir = await getLocalPackagesDir();
if (!localPackagesDir) {
window.showErrorMessage(dataDirErrorMessage);
return;
}
// 1. input package name
const packageName = await window.showInputBox({
value: "",
placeHolder: "Please input package name",
validateInput: (text) => {
return text ? null : "Please input package name";
},
});
if (!packageName) {
return;
}
// 2. input package version
const packageVersion = await window.showInputBox({
value: "0.1.0",
placeHolder: "Please input package version",
validateInput: (text) => {
if (!text) {
return "Please input package version";
}
// make sure it is valid version like '0.1.0'
const versionReg = /^\d+\.\d+\.\d+$/;
if (!versionReg.test(text)) {
return "Please input valid package version like 0.1.0";
}
return null;
},
});
if (!packageVersion) {
return;
}
// 3. input entrypoint
const entrypoint = await window.showInputBox({
value: "lib.typ",
placeHolder: "Please input entrypoint",
validateInput: (text) => {
if (!text) {
return "Please input entrypoint";
}
// make sure it is valid entrypoint end with .typ
if (!text.endsWith(".typ")) {
return "Please input valid entrypoint end with .typ";
}
return null;
},
});
if (!entrypoint) {
return;
}
// 4. create localPackagesDir/name/version/typst.toml
const packageDir = `${localPackagesDir}/${packageName}/${packageVersion}`;
const typstToml = typstTomlTemplate(packageName, packageVersion, entrypoint);
await fs.promises.mkdir(packageDir, { recursive: true });
await fs.promises.writeFile(`${packageDir}/typst.toml`, typstToml);
// 5. create localPackagesDir/name/version/entrypoint
await fs.promises.writeFile(`${packageDir}/${entrypoint}`, "#let add(a, b) = { a + b }");
// 6. open localPackagesDir/name/version/entrypoint
const document = await workspace.openTextDocument(`${packageDir}/${entrypoint}`);
await window.showTextDocument(document);
}
/**
* open local package in editor
*/
export async function commandOpenLocalPackage() {
const localPackagesDir = await getLocalPackagesDir();
if (!localPackagesDir) {
window.showErrorMessage(dataDirErrorMessage);
return;
}
// 1. select local package
const localPackagesList = await getLocalPackagesList();
const localPackages = localPackagesList.map(pkg => pkg.package);
const selected = await window.showQuickPick(localPackages, {
placeHolder: 'Please select a local package to open'
});
if (!selected) {
return;
}
// 2. read localPackagesDir/name/version/typst.toml
const name = localPackagesList.filter(pkg => pkg.package === selected)[0].name;
const version = localPackagesList.filter(pkg => pkg.package === selected)[0].version;
const packageDir = `${localPackagesDir}/${name}/${version}`;
// if typst.toml doesn't exist, return
try {
await fs.promises.access(`${packageDir}/typst.toml`);
} catch (err) {
window.showErrorMessage('Can not find typst.toml.');
return;
}
const typstToml = await fs.readFileSync(`${packageDir}/typst.toml`, 'utf-8');
// parse typst.toml
const entrypoint = typstToml.match(/entrypoint\s*=\s*"(.*)"/)?.[1];
if (!entrypoint) {
// open typst.toml if entrypoint is not set
const document = await workspace.openTextDocument(`${packageDir}/typst.toml`);
await window.showTextDocument(document);
return;
}
// 3. open localPackagesDir/name/version/entrypoint
const document = await workspace.openTextDocument(`${packageDir}/${entrypoint}`);
const localPackagesDir = await getLocalPackagesDir();
if (!localPackagesDir) {
window.showErrorMessage(dataDirErrorMessage);
return;
}
// 1. select local package
const localPackagesList = await getLocalPackagesList();
const localPackages = localPackagesList.map((pkg) => pkg.package);
const selected = await window.showQuickPick(localPackages, {
placeHolder: "Please select a local package to open",
});
if (!selected) {
return;
}
// 2. read localPackagesDir/name/version/typst.toml
const name = localPackagesList.filter((pkg) => pkg.package === selected)[0].name;
const version = localPackagesList.filter((pkg) => pkg.package === selected)[0].version;
const packageDir = `${localPackagesDir}/${name}/${version}`;
// if typst.toml doesn't exist, return
try {
await fs.promises.access(`${packageDir}/typst.toml`);
} catch (err) {
window.showErrorMessage("Can not find typst.toml.");
return;
}
const typstToml = await fs.readFileSync(`${packageDir}/typst.toml`, "utf-8");
// parse typst.toml
const entrypoint = typstToml.match(/entrypoint\s*=\s*"(.*)"/)?.[1];
if (!entrypoint) {
// open typst.toml if entrypoint is not set
const document = await workspace.openTextDocument(`${packageDir}/typst.toml`);
await window.showTextDocument(document);
return;
}
// 3. open localPackagesDir/name/version/entrypoint
const document = await workspace.openTextDocument(`${packageDir}/${entrypoint}`);
await window.showTextDocument(document);
}

View file

@ -1,8 +1,37 @@
export const extensionState = {
import * as vscode from "vscode";
export type ExtensionContext = vscode.ExtensionContext;
interface ExtensionState {
features: {
task: boolean;
devKit: boolean;
onEnter: boolean;
preview: boolean;
};
mut: {
focusingFile: string | undefined;
focusingDoc: vscode.TextDocument | undefined;
};
getFocusingFile(): string | undefined;
getFocusingDoc(): vscode.TextDocument | undefined;
}
export const extensionState: ExtensionState = {
features: {
task: true,
devKit: false,
onEnter: false,
preview: false,
},
mut: {
focusingFile: undefined,
focusingDoc: undefined,
},
getFocusingFile() {
return extensionState.mut.focusingFile;
},
getFocusingDoc() {
return extensionState.mut.focusingDoc;
},
};

View file

@ -154,7 +154,7 @@
);
}
</script>
<script type="module" src="/src/main.ts"></script>
<script type="module" src="%VITE_ENTRY%"></script>
<div id="tinymist-app"></div>
</body>
</html>

View file

@ -6,7 +6,7 @@
"license": "Apache-2.0",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"build": "tsc && vite build -- --component=symbol-view && vite build -- ",
"preview": "vite preview",
"test": "vitest",
"coverage": "vitest run --coverage"

View file

@ -1 +1,2 @@
symbol-view.mock.ts
docs.mock.ts

View file

@ -0,0 +1,105 @@
@import url("https://fonts.googleapis.com/css2?family=Merriweather:ital,wght@0,300;0,700;0,900;1,300;1,700&display=swap");
.tinymist-docs {
--mainLight: hsl(250, 68%, 74%);
--textDetailAccent: var(--mainLight);
font-family:
Merriweather,
serif,
"Inter var experimental",
"Inter var",
-apple-system,
BlinkMacSystemFont,
"Segoe UI",
Roboto,
Oxygen,
Ubuntu,
Cantarell,
"Fira Sans",
"Droid Sans",
"Helvetica Neue",
sans-serif;
}
.tinymist-docs code {
font-family: Menlo, Monaco, "JetBrains Mono", "Fira Code", Consolas,
"Courier New", monospace;
}
.tinymist-docs h1 {
font-size: 3em;
}
.tinymist-docs h2 {
font-size: 1.7em;
margin-block-start: 1em;
margin-block-end: 0.83em;
}
.tinymist-docs h3 {
font-size: 1.17em;
margin-block-start: 1.17em;
margin-block-end: 0.83em;
}
.tinymist-docs .detail-header {
margin: 1em 0;
margin-top: 1.5em;
padding: 0.5em 1em;
background-color: rgba(255, 255, 255, 0.05);
border-left: 3px solid var(--textDetailAccent);
font-size: 1em;
position: relative;
}
.tinymist-docs .detail-header h3.doc-symbol-name {
font-size: 1em;
font-weight: 700;
margin: 0;
}
.tinymist-docs .detail-header .doc-param-title {
font-size: 1em;
font-weight: 600;
margin-block-start: 1.17em;
margin-block-end: 0.5em;
}
.tinymist-docs .type-int {
color: #e7d9ff;
}
.tinymist-docs .type-float {
color: #e7d9ff;
}
.tinymist-docs .type-builtin {
color: #d2a8ff;
}
.tinymist-docs .type-none {
color: #ff6d00;
}
.tinymist-docs .code-kw,
.tinymist-docs .type-auto {
color: #ff6d00;
}
.tinymist-docs .code-func {
color: #79c0ff;
}
.tinymist-docs .code-op {
color: #79c0ff;
}
.tinymist-docs .type-inferred-as,
.tinymist-docs .code-kw.type-inferred {
transition: background-color 0.1s;
}
.tinymist-docs .type-inferred-as:hover,
.tinymist-docs .code-kw.type-inferred:hover {
background-color: #344134;
}

View file

@ -0,0 +1,804 @@
import "./docs.css";
import van, { State, ChildDom } from "vanjs-core";
const { div, h1, h2, h3, code, a, p, i, span, strong } = van.tags;
// import { docsMock } from "./docs.mock";
const docsMock = "";
export const Docs = () => {
const parsedDocs: State<DocElement> = van.state({
contents: [],
children: [],
kind: DocKind.None,
id: "",
data: null,
} as DocElement);
const favoritePlaceholders = `:[[preview:DocContent]]:`;
van.derive(async () => {
const inp = favoritePlaceholders.startsWith(":")
? docsMock
: decodeURIComponent(atob(favoritePlaceholders));
if (!inp) {
return;
}
parsedDocs.val = await recoverDocsStructure(inp);
});
return div(
{
class: "tinymist-docs flex-col",
style: "justify-content: center; align-items: center; gap: 10px;",
},
div(
{
style: "flex: 1; width: 100%; padding: 10px",
},
(_dom?: Element) => {
const v = parsedDocs.val;
console.log("updated", v);
return div(MakeDoc(v));
}
)
);
};
const enum TokenKind {
Text,
PackageStart,
PackageEnd,
ParamDocStart,
ParamDocEnd,
ErrorStart,
ErrorEnd,
ModuleStart,
ModuleEnd,
SymbolStart,
SymbolEnd,
SigStart,
SigEnd,
ParamStart,
ParamEnd,
Comment,
}
const enum DocKind {
None,
Package,
Module,
Symbol,
Param,
SigOrParam,
}
interface DocElement {
contents: string[];
children: DocElement[];
kind: DocKind;
id: string;
data: any;
}
async function recoverDocsStructure(content: string) {
console.log("recoverDocsStructure", { content });
// split content by comment
let reg = /<!--(.*?)-->/g;
let tokenPromises = [];
let match;
let lastIndex = 0;
while ((match = reg.exec(content))) {
tokenPromises.push(
Promise.resolve([TokenKind.Text, content.slice(lastIndex, match.index)])
);
tokenPromises.push(identifyCommentToken(match[1]));
lastIndex = reg.lastIndex;
}
tokenPromises.push(Promise.resolve(content.slice(lastIndex)));
const tokens = await Promise.all(tokenPromises);
let packageStack = [];
let structStack = [];
let current = {
contents: [],
children: [],
kind: DocKind.None,
id: "",
data: {},
} as DocElement;
let currentPkg = current;
for (const token of tokens) {
switch (token[0]) {
case TokenKind.PackageStart:
structStack.push(current);
packageStack.push(currentPkg);
current = {
contents: [],
children: [],
kind: DocKind.Package,
id: "",
data: token[1],
};
currentPkg = current;
break;
case TokenKind.PackageEnd:
const pkg = current;
current = structStack.pop()!;
currentPkg = packageStack.pop()!;
current.children.push(pkg);
break;
case TokenKind.ErrorStart:
currentPkg.data.error = token[1];
break;
case TokenKind.ErrorEnd:
break;
case TokenKind.ParamDocStart: {
structStack.push(current);
let sym = undefined;
for (let i = structStack.length - 1; i >= 0; i--) {
if (structStack[i].kind === DocKind.Symbol) {
sym = structStack[i];
break;
}
}
current = {
contents: [],
children: [],
kind: DocKind.Param,
id: token[1],
data: {
name: token[1],
},
};
if (sym) {
current.id = `${sym.id}-param-${token[1]}`;
const renderedParams = (sym.data.renderedParams =
sym.data.renderedParams || {});
renderedParams[current.id] = current;
}
break;
}
case TokenKind.ParamDocEnd: {
current = structStack.pop()!;
break;
}
case TokenKind.ModuleStart:
structStack.push(current);
current = {
contents: [],
children: [],
kind: DocKind.Module,
id: token[1],
data: token[2],
};
break;
case TokenKind.ModuleEnd:
const module = current;
current = structStack.pop()!;
current.children.push(module);
break;
case TokenKind.SymbolStart:
structStack.push(current);
current = {
contents: [],
children: [],
kind: DocKind.Symbol,
id: token[1],
data: token[2],
};
break;
case TokenKind.SymbolEnd:
const symbol = current;
current = structStack.pop()!;
current.children.push(symbol);
break;
case TokenKind.SigStart:
case TokenKind.ParamStart:
structStack.push(current);
current = {
contents: [],
children: [],
kind: DocKind.SigOrParam,
id: "",
data: {},
};
break;
case TokenKind.SigEnd:
case TokenKind.ParamEnd:
current = structStack.pop()!;
break;
case TokenKind.Comment:
console.log("Comment", token[1]);
break;
case TokenKind.Text:
current.contents.push(token[1]);
break;
}
}
return current;
}
async function identifyCommentToken(comment: string) {
const cs = comment.trim().split(" ");
switch (cs[0]) {
case "begin:package":
return [TokenKind.PackageStart, JSON.parse(await base64ToUtf8(cs[1]))];
case "end:package":
return [TokenKind.PackageEnd, JSON.parse(await base64ToUtf8(cs[1]))];
case "begin:param-doc":
return [TokenKind.ParamDocStart, cs[1]];
case "end:param-doc":
return [TokenKind.ParamDocEnd, cs[1]];
case "begin:errors":
return [TokenKind.ErrorStart, JSON.parse(await base64ToUtf8(cs[1]))];
case "end:errors":
return [TokenKind.ErrorEnd, cs[1]];
case "begin:module":
return [
TokenKind.ModuleStart,
cs[1],
JSON.parse(await base64ToUtf8(cs[2])),
];
case "end:module":
return [TokenKind.ModuleEnd, cs[1]];
case "begin:symbol":
return [
TokenKind.SymbolStart,
cs[1],
JSON.parse(await base64ToUtf8(cs[2])),
];
case "end:symbol":
return [TokenKind.SymbolEnd, cs[1]];
case "begin:sig":
return [TokenKind.SigStart, cs[1]];
case "end:sig":
return [TokenKind.SigEnd, cs[1]];
case "begin:param":
return [TokenKind.ParamStart, cs[1]];
case "end:param":
return [TokenKind.ParamEnd, cs[1]];
case "typlite:begin:list-item":
case "typlite:end:list-item":
return [TokenKind.Text, ""];
default:
return [TokenKind.Comment, comment];
}
}
async function base64ToUtf8(base64: string) {
const base64Url = `data:text/plain;base64,${base64}`;
const res = await fetch(base64Url);
return await res.text();
}
function getKnownModules(v: DocElement, s: Set<string>) {
for (const child of v.children) {
if (child.kind === DocKind.Module) {
s.add(child.id);
}
getKnownModules(child, s);
}
}
function MakeDoc(v: DocElement) {
const knownModules = new Set<string>();
getKnownModules(v, knownModules);
console.log("MakeDoc", v, knownModules);
function Item(v: DocElement): ChildDom {
switch (v.kind) {
case DocKind.Package:
return PackageItem(v);
case DocKind.Module:
if (!v.data.prefix) {
return ModuleBody(v);
}
return ModuleItem(v);
case DocKind.Symbol:
const kind = v.data.kind;
switch (kind) {
case "function":
return FuncItem(v);
case "constant":
return ConstItem(v);
case "module":
return ModuleRefItem(v);
default:
return div();
}
case DocKind.None:
return div(...v.children.map(Item));
default:
return div();
}
}
function ModuleBody(v: DocElement) {
const modules = [];
const functions = [];
const constants = [];
const unknowns = [];
for (const child of v.children) {
switch (child.kind) {
case DocKind.Module:
modules.push(child);
break;
case DocKind.Symbol:
switch (child.data.kind) {
case "function":
functions.push(child);
break;
case "constant":
constants.push(child);
break;
case "module":
modules.push(child);
break;
default:
unknowns.push(child);
break;
}
break;
}
}
const chs = [];
if (modules.length > 0) {
chs.push(h2("Modules"), div(...modules.map(ModuleRefItem)));
}
if (constants.length > 0) {
chs.push(h2("Constants"), div(...constants.map(Item)));
}
if (functions.length > 0) {
chs.push(h2("Functions"), div(...functions.map(Item)));
}
if (unknowns.length > 0) {
chs.push(h2("Unknowns"), div(...unknowns.map(Item)));
}
return div(...chs);
}
function ModuleItem(v: DocElement) {
return div(
{ class: "tinymist-module" },
h1({ id: `module-${v.id}` }, `Module: ${v.data.prefix}`),
ModuleBody(v)
);
}
function PackageItem(v: DocElement) {
console.log("PackageItem", v);
return div(
h1(`@${v.data.namespace}/${v.data.name}:${v.data.version}`),
p(
span(
"This documentation is generated locally. Please submit issues to "
),
a(
{ href: "https://github.com/Myriad-Dreamin/tinymist/issues" },
"tinymist"
),
span(" if you see "),
strong(i("incorrect")),
span(" information in it.")
),
// ModuleBody(v)
...v.children.map(Item)
);
}
function ModuleRefItem(v: DocElement) {
const isExternal = !knownModules.has(v.id);
let body;
if (isExternal) {
body = code("external ", v.data.name);
} else {
body = code(
a(
{
href: `#module-${v.id}`,
},
v.data.name
)
);
}
return div(
{
class: "tinymist-module-ref",
},
div(
{
class: `detail-header doc-symbol-${v.data.kind}`,
// <a href="https://github.com/elixir-lang/elixir/blob/v1.17.2/lib/elixir/lib/float.ex#L283" class="icon-action" rel="help" title="View Source">
// <i class="ri-code-s-slash-line" aria-hidden="true"></i>
// <span class="sr-only">View Source</span>
// </a>
},
h3({ class: "doc-symbol-name" }, body)
)
);
}
interface DocParam {
name: string;
cano_type: [string, string];
expr?: string;
}
function FuncItem(v: DocElement) {
const sig = v.data.signature;
let funcTitle = [code(v.data.name), "("];
if (sig) {
// funcTitle.push(...sig.pos.map((e: DocParam) => code(e.name)));
for (let i = 0; i < sig.pos.length; i++) {
if (i > 0) {
funcTitle.push(code(", "));
}
funcTitle.push(code(sig.pos[i].name));
}
if (sig.rest || Object.keys(sig.named).length > 0) {
if (sig.pos.length > 0) {
funcTitle.push(code(", "));
}
funcTitle.push(code(".."));
}
}
funcTitle.push(code(")"));
if (v.data.parsed_docs?.return_ty) {
funcTitle.push(code(" -> "));
typeHighlighted(v.data.parsed_docs.return_ty, funcTitle);
}
return div(
{
class: "tinymist-symbol",
},
div(
{
class: `detail-header doc-symbol-${v.data.kind}`,
},
h3({ class: "doc-symbol-name" }, code(...funcTitle))
),
...SigPreview(v),
div({
style: "margin-left: 0.62em",
innerHTML: v.contents.join(""),
}),
...SigDocs(v)
);
}
interface DocSignature {
pos: DocParam[];
rest: DocParam;
named: Record<string, DocParam>;
ret_ty?: [string, string];
// return;
}
function SigDocs(v: DocElement): ChildDom[] {
const sig: DocSignature = v.data.signature;
const parsed_docs = v.data.parsed_docs;
const res: ChildDom[] = [];
if (!sig) {
return res;
}
const docsMapping = new Map<string, any>();
// for (const doc of parsed_docs) {
// docsMapping.set(doc.name, doc.contents.join(""));
// }
// return_ty
if (parsed_docs?.params) {
for (const param of parsed_docs.params) {
docsMapping.set(param.name, param);
}
}
if (v.data.renderedParams) {
for (const p of Object.values(v.data.renderedParams)) {
const param = p as DocElement;
const docs = param.contents.join("");
const prev = docsMapping.get(param.data.name) || {};
prev.docs = docs;
docsMapping.set(param.data.name, prev);
}
}
interface TaggedParam {
kind: string;
param: DocParam;
}
const paramsAll: TaggedParam[] = [
...sig.pos.map((param: DocParam) => ({ kind: "pos", param })),
...(sig.rest ? [{ kind: "rest", param: sig.rest }] : []),
...Object.entries(sig.named).map(([name, param]) => ({
kind: "named",
name,
param,
})),
];
if (parsed_docs?.return_ty || sig.ret_ty) {
let paramTitle = [codeHl("op", "-> ")];
sigTypeHighlighted(parsed_docs.return_ty, sig.ret_ty, paramTitle);
res.push(h3("Resultant"));
res.push(
div(
{
style: "margin-left: 0.62em",
},
div(
{
style: "margin-left: 0.62em",
},
div(
{
class: "doc-param-title",
},
strong(paramTitle)
)
)
)
);
}
if (paramsAll.length) {
res.push(h3("Parameters"));
}
console.log("SigDocs", { paramsAll, docsMapping });
for (const { kind, param } of paramsAll) {
let docs: string[] = [];
const docsMeta = docsMapping.get(param.name);
if (docsMeta?.docs) {
docs = [docsMeta.docs];
}
let paramTitle = [
code(
{
id: `param-${v.id}-${param.name}`,
},
param.name
),
];
if (docsMeta?.types || param.cano_type) {
paramTitle.push(code(": "));
// paramTitle += `: ${docsMeta.types}`;
sigTypeHighlighted(docsMeta?.types, param.cano_type, paramTitle);
}
if (param.expr) {
paramTitle.push(codeHl("op", " = "));
paramTitle.push(code(param.expr));
}
if (kind == "pos") {
paramTitle.push(code(" (positional)"));
} else if (kind == "rest") {
paramTitle.push(code(" (rest)"));
}
const docsAll = docs.join("");
res.push(
div(
{
style: "margin-left: 0.62em",
},
div(
{
class: "doc-param-title",
},
strong(code(paramTitle))
),
div({
style: "margin-left: 0.62em",
innerHTML: docsAll ? docsAll : "<p>-</p>",
})
)
);
}
return res;
}
function SigPreview(v: DocElement): ChildDom[] {
const sig = v.data.signature;
if (!sig) {
return [];
}
const res: ChildDom[] = [];
const paramsAll = [
...sig.pos.map((param: DocParam) => ({ kind: "pos", param })),
...Object.entries(sig.named).map(([name, param]) => ({
kind: "named",
name,
param,
})),
...(sig.rest ? [{ kind: "rest", param: sig.rest }] : []),
];
// ...paramsAll.map(({ kind, param }, i) => {
// if (i > 0) {
// return code(", ");
// }
// return code(param.name);
// }),
const sigTitle = [
code(kwHl("let")),
code(" "),
code(fnHl(v.data.name)),
code("("),
];
for (let i = 0; i < paramsAll.length; i++) {
if (i > 0) {
sigTitle.push(code(", "));
}
let paramTitle = [];
if (paramsAll[i].kind == "rest") {
paramTitle.push(code(".."));
}
paramTitle.push(code(paramsAll[i].param.name));
if (paramsAll[i].kind == "named") {
paramTitle.push(code("?"));
}
sigTitle.push(
a(
{
href: `#param-${v.id}-${paramsAll[i].param.name}`,
},
...paramTitle
)
);
}
sigTitle.push(code(")"));
if (v.data.parsed_docs?.return_ty) {
sigTitle.push(code(" -> "));
typeHighlighted(v.data.parsed_docs.return_ty, sigTitle);
}
sigTitle.push(code(";"));
res.push(
div(
{ style: "margin-left: 0.62em" },
div({
style: "font-size: 1.5em; margin: 0.5em 0",
}),
div(
{
style: "margin: 0 1em",
},
code(...sigTitle)
)
)
);
return res;
}
function ConstItem(v: DocElement) {
return div(
{
class: "tinymist-symbol",
},
div(
{
class: `detail-header doc-symbol-${v.data.kind}`,
},
h3(
{ class: "doc-symbol-name" },
code(`${v.data.name}`)
// code(
// {
// style: "float: right; line-height: 1em",
// },
// `${v.data.kind}`
// )
)
),
div({
style: "margin-left: 0.62em",
innerHTML: v.contents.join(""),
})
);
}
return Item(v);
}
function sigTypeHighlighted(
types: string | undefined,
inferred: [string, string] | undefined,
target: ChildDom[]
) {
console.log("sigTypeHighlighted", { types, inferred });
if (types) {
typeHighlighted(types, target);
} else if (inferred) {
const rendered: ChildDom[] = [];
typeHighlighted(inferred[0], rendered, "|");
const infer = span(
{ class: "code-kw type-inferred", title: "inferred by type checker" },
"infer"
);
target.push(
code(
{ class: "type-inferred" },
infer,
code(" "),
span({ class: "type-inferred-as", title: inferred[1] }, ...rendered)
)
);
}
}
function typeHighlighted(
types: string,
target: ChildDom[],
by: RegExp | string = /[|,]/g
) {
const type = types.split(by);
for (let i = 0; i < type.length; i++) {
if (i > 0) {
target.push(code(" | "));
}
const ty = type[i].trim();
switch (ty) {
case "int":
case "integer":
target.push(code({ class: "type-int" }, ty));
break;
case "float":
target.push(code({ class: "type-float" }, ty));
break;
case "string":
case "array":
case "dictionary":
case "content":
case "str":
case "bool":
case "boolean":
target.push(code({ class: "type-builtin" }, ty));
break;
case "auto":
target.push(code({ class: "type-auto" }, ty));
break;
case "none":
target.push(code({ class: "type-none" }, ty));
break;
default:
target.push(code(type[i]));
break;
}
}
}
function kwHl(kw: string) {
return code({ class: "code-kw" }, kw);
}
function fnHl(fn: string) {
return code({ class: "code-func" }, fn);
}
function codeHl(cls: string, c: string) {
return code({ class: `code-${cls}` }, c);
}

View file

@ -0,0 +1,50 @@
import "./style.css";
import van, { ChildDom } from "vanjs-core";
import { setupVscodeChannel } from "./vscode";
/// The components that can be rendered by the frontend.
/// Typically, each component corresponds to a single tool (Application).
type PageComponent =
| "template-gallery"
| "tracing"
| "summary"
| "diagnostics"
| "symbol-view"
| "docs";
/// The frontend arguments that are passed from the backend.
interface Arguments {
/// The page to render.
page: PageComponent;
}
/// Placeholders for editor-tools program initializing frontend
/// arguments.
function retrieveArgs(): Arguments {
/// The string `editor-tools-args:{}` is a placeholder
/// It will be replaced by the actual arguments.
/// ```rs
/// let frontend_html = frontend_html.replace(
/// "editor-tools-args:{}", ...);
/// ```
let mode = `editor-tools-args:{"page": "docs"}`;
/// Remove the placeholder prefix.
mode = mode.replace("editor-tools-args:", "");
/// Return a `WsArgs` object.
return JSON.parse(mode);
}
type Registry = Partial<Record<PageComponent, () => ChildDom>>;
export function mainHarness(components: Registry) {
setupVscodeChannel();
const args = retrieveArgs();
const appHook = document.querySelector("#tinymist-app")!;
const Component = components[args.page];
if (!Component) {
throw new Error(`Unknown page: ${args.page}`);
}
van.add(appHook, Component());
}

View file

@ -0,0 +1,6 @@
import { mainHarness } from "./main.base";
import { SymbolPicker } from "./features/symbol-view";
mainHarness({
"symbol-view": SymbolPicker,
});

View file

@ -1,69 +1,14 @@
import "./style.css";
import van from "vanjs-core";
import { setupVscodeChannel } from "./vscode";
import { mainHarness } from "./main.base";
import { TemplateGallery } from "./features/template-gallery";
import { Tracing } from "./features/tracing";
import { Summary } from "./features/summary";
import { Diagnostics } from "./features/diagnostics";
import { SymbolPicker } from "./features/symbol-view";
import { Docs } from "./features/docs";
/// The components that can be rendered by the frontend.
/// Typically, each component corresponds to a single tool (Application).
type PageComponent =
| "template-gallery"
| "tracing"
| "summary"
| "diagnostics"
| "symbol-view";
/// The frontend arguments that are passed from the backend.
interface Arguments {
/// The page to render.
page: PageComponent;
}
/// Placeholders for editor-tools program initializing frontend
/// arguments.
function retrieveArgs(): Arguments {
/// The string `editor-tools-args:{}` is a placeholder
/// It will be replaced by the actual arguments.
/// ```rs
/// let frontend_html = frontend_html.replace(
/// "editor-tools-args:{}", ...);
/// ```
let mode = `editor-tools-args:{"page": "symbol-view"}`;
/// Remove the placeholder prefix.
mode = mode.replace("editor-tools-args:", "");
/// Return a `WsArgs` object.
return JSON.parse(mode);
}
function main() {
setupVscodeChannel();
const args = retrieveArgs();
const appHook = document.querySelector("#tinymist-app")!;
switch (args.page) {
case "template-gallery":
van.add(appHook, TemplateGallery());
break;
case "tracing":
van.add(appHook, Tracing());
break;
case "summary":
van.add(appHook, Summary());
break;
case "diagnostics":
van.add(appHook, Diagnostics());
break;
case "symbol-view":
van.add(appHook, SymbolPicker());
break;
default:
throw new Error(`Unknown page: ${args.page}`);
}
}
main();
mainHarness({
"template-gallery": TemplateGallery,
tracing: Tracing,
summary: Summary,
diagnostics: Diagnostics,
docs: Docs,
});

View file

@ -183,3 +183,8 @@ body.typst-preview-light .tinymist-button.warning.activated {
.hidden {
display: none;
}
/* aria-hidden */
span[aria-hidden="true"] {
display: none;
}

View file

@ -1,18 +1,25 @@
import { defineConfig } from "vite";
import { viteSingleFile } from "vite-plugin-singlefile";
// /src/main.ts
const compPrefix = '--component=';
const componentArgs = process.argv.find(arg => arg.startsWith(compPrefix));
let output = 'dist/default';
if (componentArgs) {
const component = componentArgs.substring(compPrefix.length);
process.env.VITE_ENTRY = `/src/main.${component}.ts`;
output = `dist/${component}`;
} else {
process.env.VITE_ENTRY = '/src/main.ts';
}
export default defineConfig({
plugins: [viteSingleFile()],
assetsInclude: ["**/*.onnx"],
build: {
minify: false,
rollupOptions: {
output: {
assetFileNames: `typst-webview-assets/[name]-[hash][extname]`,
chunkFileNames: "typst-webview-assets/[name]-[hash].js",
entryFileNames: "typst-webview-assets/[name]-[hash].js",
},
},
outDir: output
},
optimizeDeps: {
esbuildOptions: {