feat: provide package view and local documentation (#596)

* feat: move featured components

* feat: provide package view and local documentation

* stage

* fix: compile error by merged commits
This commit is contained in:
Myriad-Dreamin 2024-09-12 21:17:07 +08:00 committed by GitHub
parent b06447ffe2
commit 78f3893185
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
70 changed files with 3422 additions and 593 deletions

View file

@ -333,7 +333,7 @@ mod lexical_hierarchy_tests {
undefined_refs.sort();
let entry = DefUseEntry {
def: &IdentDef {
name: "<nil>".to_string(),
name: "<nil>".into(),
kind: LexicalKind::Block,
range: 0..0,
},

View file

@ -2,7 +2,7 @@
use std::{collections::HashMap, ops::Range, sync::Arc};
use ecow::EcoVec;
use ecow::{EcoString, EcoVec};
use reflexo::hash::hash128;
use super::{prelude::*, ImportInfo};
@ -18,7 +18,7 @@ enum Ns {
Value,
}
type ExternalRefMap = HashMap<(TypstFileId, Option<String>), Vec<(Option<DefId>, IdentRef)>>;
type ExternalRefMap = HashMap<(TypstFileId, Option<EcoString>), Vec<(Option<DefId>, IdentRef)>>;
/// The def-use information of a source file.
#[derive(Default)]
@ -29,11 +29,11 @@ pub struct DefUseInfo {
/// The references to defined symbols.
pub ident_refs: HashMap<IdentRef, DefId>,
/// The references of labels.
pub label_refs: HashMap<String, Vec<Range<usize>>>,
pub label_refs: HashMap<EcoString, Vec<Range<usize>>>,
/// The references to undefined symbols.
pub undefined_refs: Vec<IdentRef>,
exports_refs: Vec<DefId>,
exports_defs: HashMap<String, DefId>,
exports_defs: HashMap<EcoString, DefId>,
self_id: Option<TypstFileId>,
self_hash: u128,
@ -93,7 +93,7 @@ impl DefUseInfo {
pub fn get_external_refs(
&self,
ext_id: TypstFileId,
ext_name: Option<String>,
ext_name: Option<EcoString>,
) -> impl Iterator<Item = &(Option<DefId>, IdentRef)> {
self.external_refs
.get(&(ext_id, ext_name))
@ -160,8 +160,8 @@ pub(super) fn get_def_use_inner(
struct DefUseCollector<'a, 'b, 'w> {
ctx: &'a mut SearchCtx<'b, 'w>,
info: DefUseInfo,
label_scope: SnapshotMap<String, DefId>,
id_scope: SnapshotMap<String, DefId>,
label_scope: SnapshotMap<EcoString, DefId>,
id_scope: SnapshotMap<EcoString, DefId>,
import: Arc<ImportInfo>,
current_id: TypstFileId,
@ -317,7 +317,7 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
}
}
fn insert_extern(&mut self, name: String, range: Range<usize>, redefine_id: Option<DefId>) {
fn insert_extern(&mut self, name: EcoString, range: Range<usize>, redefine_id: Option<DefId>) {
if let Some(src) = &self.ext_src {
self.info.external_refs.insert(
(src.id(), Some(name.clone())),

View file

@ -174,6 +174,11 @@ pub trait AnalysisResources {
None
}
/// Get the local packages and their descriptions.
fn local_packages(&self) -> EcoVec<PackageSpec> {
EcoVec::new()
}
/// Resolve telescope image at the given position.
fn periscope_at(
&self,
@ -596,32 +601,7 @@ impl<'w> AnalysisContext<'w> {
}
pub(crate) fn with_vm<T>(&self, f: impl FnOnce(&mut typst::eval::Vm) -> T) -> T {
use comemo::Track;
use typst::engine::*;
use typst::eval::*;
use typst::foundations::*;
use typst::introspection::*;
let mut locator = Locator::default();
let introspector = Introspector::default();
let mut tracer = Tracer::new();
let engine = Engine {
world: self.world().track(),
route: Route::default(),
introspector: introspector.track(),
locator: &mut locator,
tracer: tracer.track_mut(),
};
let context = Context::none();
let mut vm = Vm::new(
engine,
context.track(),
Scopes::new(Some(self.world().library())),
Span::detached(),
);
f(&mut vm)
crate::upstream::with_vm(self.world(), f)
}
pub(crate) fn const_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
@ -651,17 +631,26 @@ impl<'w> AnalysisContext<'w> {
Some(analyze_dyn_signature(self, func.clone()).type_sig())
}
pub(crate) fn user_type_of_def(&mut self, source: &Source, def: &DefinitionLink) -> Option<Ty> {
let def_at = def.def_at.clone()?;
pub(crate) fn user_type_of_ident(
&mut self,
source: &Source,
def_fid: TypstFileId,
def_ident: &IdentRef,
) -> Option<Ty> {
let ty_chk = self.type_check(source.clone())?;
let def_use = self.def_use(source.clone())?;
let (def_id, _) = def_use.get_def(def_fid, def_ident)?;
ty_chk.type_of_def(def_id)
}
pub(crate) fn user_type_of_def(&mut self, source: &Source, def: &DefinitionLink) -> Option<Ty> {
let def_at = def.def_at.clone()?;
let def_ident = IdentRef {
name: def.name.clone(),
range: def_at.1,
};
let (def_id, _) = def_use.get_def(def_at.0, &def_ident)?;
ty_chk.type_of_def(def_id)
self.user_type_of_ident(source, def_at.0, &def_ident)
}
pub(crate) fn type_of_span(&mut self, s: Span) -> Option<Ty> {

View file

@ -26,7 +26,7 @@ pub struct DefinitionLink {
/// A possible instance of the definition.
pub value: Option<Value>,
/// The name of the definition.
pub name: String,
pub name: EcoString,
/// The location of the definition.
pub def_at: Option<(TypstFileId, Range<usize>)>,
/// The range of the name of the definition.
@ -54,7 +54,7 @@ pub fn find_definition(
let source = find_source_by_expr(ctx.world(), def_fid, import_node.source())?;
return Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathVar),
name: String::new(),
name: EcoString::new(),
value: None,
def_at: Some((source.id(), LinkedNode::new(source.root()).range())),
name_range: None,
@ -67,7 +67,7 @@ pub fn find_definition(
let source = find_source_by_expr(ctx.world(), def_fid, include_node.source())?;
return Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathInclude),
name: String::new(),
name: EcoString::new(),
value: None,
def_at: Some((source.id(), (LinkedNode::new(source.root())).range())),
name_range: None,
@ -125,7 +125,7 @@ pub fn find_definition(
Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Label),
name: ref_node.to_owned(),
name: ref_node.into(),
value: Some(Value::Content(elem)),
def_at,
name_range,
@ -140,11 +140,11 @@ pub fn find_definition(
// Lexical reference
let ident_ref = match use_site.cast::<ast::Expr>()? {
ast::Expr::Ident(e) => Some(IdentRef {
name: e.get().to_string(),
name: e.get().clone(),
range: use_site.range(),
}),
ast::Expr::MathIdent(e) => Some(IdentRef {
name: e.get().to_string(),
name: e.get().clone(),
range: use_site.range(),
}),
ast::Expr::FieldAccess(..) => {
@ -173,12 +173,7 @@ pub fn find_definition(
// Global definition
let Some((def_fid, def)) = def_info else {
return resolve_global_value(ctx, use_site.clone(), false).and_then(move |f| {
value_to_def(
ctx,
f,
|| Some(use_site.get().clone().into_text().to_string()),
None,
)
value_to_def(ctx, f, || Some(use_site.get().clone().into_text()), None)
});
};
@ -237,7 +232,7 @@ fn find_bib_definition(bib_elem: Arc<BibInfo>, key: &str) -> Option<DefinitionLi
let entry = entry?;
Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::BibKey),
name: key.to_string(),
name: key.into(),
value: None,
def_at: Some((entry.file_id, entry.span.clone())),
// todo: rename with regard to string format: yaml-key/bib etc.
@ -448,7 +443,7 @@ pub(crate) fn resolve_global_value(
fn value_to_def(
ctx: &mut AnalysisContext,
value: Value,
name: impl FnOnce() -> Option<String>,
name: impl FnOnce() -> Option<EcoString>,
name_range: Option<Range<usize>>,
) -> Option<DefinitionLink> {
let mut def_at = |span: Span| {
@ -460,7 +455,7 @@ fn value_to_def(
Some(match value {
Value::Func(func) => {
let name = func.name().map(|e| e.to_owned()).or_else(name)?;
let name = func.name().map(|e| e.into()).or_else(name)?;
let span = func.span();
DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Function),
@ -471,7 +466,7 @@ fn value_to_def(
}
}
Value::Module(module) => {
let name = module.name().to_string();
let name = module.name().clone();
DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Variable),
name,

View file

@ -441,7 +441,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
fn to_ident_ref(root: &LinkedNode, c: ast::Ident) -> Option<IdentRef> {
Some(IdentRef {
name: c.get().to_string(),
name: c.get().clone(),
range: root.find(c.span())?.range(),
})
}

View file

@ -182,7 +182,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
fn check_ident(&mut self, root: LinkedNode<'_>, mode: InterpretMode) -> Option<Ty> {
let ident: ast::Ident = root.cast()?;
let ident_ref = IdentRef {
name: ident.get().to_string(),
name: ident.get().clone(),
range: root.range(),
};

View file

@ -0,0 +1,24 @@
use std::sync::Arc;
use ecow::eco_format;
use typlite::value::*;
pub(super) fn lib() -> Arc<typlite::scopes::Scopes<Value>> {
let mut scopes = typlite::library::library();
// todo: how to import this function correctly?
scopes.define("example", example as RawFunc);
Arc::new(scopes)
}
/// Evaluate a `example`.
pub fn example(mut args: Args) -> typlite::Result<Value> {
let body = get_pos_named!(args, body: Content).0;
let body = body.trim();
let ticks = body.chars().take_while(|t| *t == '`').collect::<String>();
let body = &body[ticks.len()..];
let body = eco_format!("{ticks}typ{body}");
Ok(Value::Content(body))
}

View file

@ -0,0 +1,816 @@
//! Package management tools.
mod library;
mod tidy;
use core::fmt::{self, Write};
use std::collections::{HashMap, HashSet};
use std::ops::Range;
use std::path::PathBuf;
use std::sync::Arc;
use comemo::Track;
use ecow::{eco_vec, EcoString, EcoVec};
use indexmap::IndexSet;
use parking_lot::Mutex;
use serde::{Deserialize, Serialize};
use tinymist_world::base::{EntryState, ShadowApi, TaskInputs};
use tinymist_world::LspWorld;
use typst::diag::{eco_format, StrResult};
use typst::engine::Route;
use typst::eval::Tracer;
use typst::foundations::{Bytes, Value};
use typst::syntax::package::{PackageManifest, PackageSpec};
use typst::syntax::{FileId, Span, VirtualPath};
use typst::World;
use self::tidy::*;
use crate::analysis::analyze_dyn_signature;
use crate::syntax::{find_docs_of, get_non_strict_def_target, IdentRef};
use crate::ty::Ty;
use crate::upstream::truncated_doc_repr;
use crate::AnalysisContext;
/// Information about a package.
#[derive(Debug, Serialize, Deserialize)]
pub struct PackageInfo {
/// The path to the package if any.
pub path: PathBuf,
/// The namespace the package lives in.
pub namespace: EcoString,
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: String,
}
impl From<(PathBuf, PackageSpec)> for PackageInfo {
fn from((path, spec): (PathBuf, PackageSpec)) -> Self {
Self {
path,
namespace: spec.namespace,
name: spec.name,
version: spec.version.to_string(),
}
}
}
/// Docs about a symbol.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "kind")]
pub enum Docs {
/// Docs about a function.
#[serde(rename = "func")]
Function(TidyFuncDocs),
/// Docs about a variable.
#[serde(rename = "var")]
Variable(TidyVarDocs),
/// Docs about a module.
#[serde(rename = "module")]
Module(TidyModuleDocs),
/// Other kinds of docs.
#[serde(rename = "plain")]
Plain(EcoString),
}
impl Docs {
/// Get the markdown representation of the docs.
pub fn docs(&self) -> &str {
match self {
Self::Function(docs) => docs.docs.as_str(),
Self::Variable(docs) => docs.docs.as_str(),
Self::Module(docs) => docs.docs.as_str(),
Self::Plain(docs) => docs.as_str(),
}
}
}
/// Describes a primary function signature.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DocSignature {
/// The positional parameters.
pub pos: Vec<DocParamSpec>,
/// The named parameters.
pub named: HashMap<String, DocParamSpec>,
/// The rest parameter.
pub rest: Option<DocParamSpec>,
/// The return type.
pub ret_ty: Option<(String, String)>,
}
/// Describes a function parameter.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DocParamSpec {
/// The parameter's name.
pub name: String,
/// Documentation for the parameter.
pub docs: String,
/// Inferred type of the parameter.
pub cano_type: Option<(String, String)>,
/// The parameter's default name as type.
pub type_repr: Option<EcoString>,
/// The parameter's default name as value.
pub expr: Option<EcoString>,
/// Is the parameter positional?
pub positional: bool,
/// Is the parameter named?
///
/// Can be true even if `positional` is true if the parameter can be given
/// in both variants.
pub named: bool,
/// Can the parameter be given any number of times?
pub variadic: bool,
/// Is the parameter settable with a set rule?
pub settable: bool,
}
/// Information about a symbol.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SymbolInfoHead {
/// The name of the symbol.
pub name: EcoString,
/// The kind of the symbol.
pub kind: EcoString,
/// The location (file, start, end) of the symbol.
pub loc: Option<(usize, usize, usize)>,
/// The raw documentation of the symbol.
pub docs: Option<String>,
/// The signature of the symbol.
pub signature: Option<DocSignature>,
/// The parsed documentation of the symbol.
pub parsed_docs: Option<Docs>,
/// The value of the symbol.
#[serde(skip)]
pub constant: Option<EcoString>,
/// The span of the symbol.
#[serde(skip)]
pub span: Option<Span>,
/// The name range of the symbol.
#[serde(skip)]
pub name_range: Option<Range<usize>>,
/// The value of the symbol.
#[serde(skip)]
pub value: Option<Value>,
}
/// Information about a symbol.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SymbolInfo {
/// The primary information about the symbol.
#[serde(flatten)]
pub head: SymbolInfoHead,
/// The children of the symbol.
pub children: EcoVec<SymbolInfo>,
}
/// Information about a package.
#[derive(Debug, Serialize, Deserialize)]
pub struct PackageMeta {
/// The namespace the package lives in.
pub namespace: EcoString,
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: String,
/// The package's manifest information.
pub manifest: Option<PackageManifest>,
}
/// Information about a package.
#[derive(Debug, Serialize, Deserialize)]
pub struct PackageMetaEnd {
packages: Vec<PackageMeta>,
files: Vec<FileMeta>,
}
/// Information about a package.
#[derive(Debug, Serialize, Deserialize)]
pub struct FileMeta {
package: Option<usize>,
path: PathBuf,
}
/// Parses the manifest of the package located at `package_path`.
pub fn get_manifest(world: &LspWorld, toml_id: FileId) -> StrResult<PackageManifest> {
let toml_data = world
.file(toml_id)
.map_err(|err| eco_format!("failed to read package manifest ({})", err))?;
let string = std::str::from_utf8(&toml_data)
.map_err(|err| eco_format!("package manifest is not valid UTF-8 ({})", err))?;
toml::from_str(string)
.map_err(|err| eco_format!("package manifest is malformed ({})", err.message()))
}
/// List all symbols in a package.
pub fn list_symbols(world: &LspWorld, spec: &PackageInfo) -> StrResult<SymbolInfo> {
let toml_id = FileId::new(
Some(PackageSpec {
namespace: spec.namespace.clone(),
name: spec.name.clone(),
version: spec.version.parse()?,
}),
VirtualPath::new("typst.toml"),
);
let manifest = get_manifest(world, toml_id)?;
let entry_point = toml_id.join(&manifest.package.entrypoint);
let source = world.source(entry_point).map_err(|e| eco_format!("{e}"))?;
let route = Route::default();
let mut tracer = Tracer::default();
let w: &dyn typst::World = world;
let src = typst::eval::eval(w.track(), route.track(), tracer.track_mut(), &source)
.map_err(|e| eco_format!("{e:?}"))?;
let for_spec = PackageSpec {
namespace: spec.namespace.clone(),
name: spec.name.clone(),
version: spec.version.parse()?,
};
Ok(symbol(world, Some(&for_spec), "root", &Value::Module(src)))
}
fn jbase64<T: Serialize>(s: &T) -> String {
use base64::Engine;
let content = serde_json::to_string(s).unwrap();
base64::engine::general_purpose::STANDARD.encode(content)
}
// Unfortunately, we have only 65536 possible file ids and we cannot revoke
// them. So we share a global file id for all docs conversion.
static DOCS_CONVERT_ID: std::sync::LazyLock<Mutex<FileId>> = std::sync::LazyLock::new(|| {
Mutex::new(FileId::new(None, VirtualPath::new("__tinymist_docs__.typ")))
});
fn convert_docs(world: &LspWorld, content: &str) -> StrResult<EcoString> {
static DOCS_LIB: std::sync::LazyLock<Arc<typlite::scopes::Scopes<typlite::value::Value>>> =
std::sync::LazyLock::new(library::lib);
let conv_id = DOCS_CONVERT_ID.lock();
let entry = EntryState::new_rootless(conv_id.vpath().as_rooted_path().into()).unwrap();
let entry = entry.select_in_workspace(*conv_id);
let mut w = world.task(TaskInputs {
entry: Some(entry),
inputs: None,
});
w.map_shadow_by_id(*conv_id, Bytes::from(content.as_bytes().to_owned()))?;
// todo: bad performance
w.source_db.take_state();
let conv = typlite::Typlite::new(Arc::new(w))
.with_library(DOCS_LIB.clone())
.annotate_elements(true)
.convert()
.map_err(|e| eco_format!("failed to convert to markdown: {e}"))?;
Ok(conv)
}
#[derive(Serialize, Deserialize)]
struct ConvertResult {
errors: Vec<String>,
}
/// Generate full documents in markdown format
pub fn generate_md_docs(
ctx: &mut AnalysisContext,
world: &LspWorld,
spec: &PackageInfo,
) -> StrResult<String> {
log::info!("generate_md_docs {spec:?}");
let toml_id = FileId::new(
Some(PackageSpec {
namespace: spec.namespace.clone(),
name: spec.name.clone(),
version: spec.version.parse()?,
}),
VirtualPath::new("typst.toml"),
);
let mut md = String::new();
let sym = list_symbols(world, spec)?;
let title = format!("@{}/{}:{}", spec.namespace, spec.name, spec.version);
let mut errors = vec![];
writeln!(md, "# {title}").unwrap();
md.push('\n');
writeln!(md, "This documentation is generated locally. Please submit issues to [tinymist](https://github.com/Myriad-Dreamin/tinymist/issues) if you see **incorrect** information in it.").unwrap();
md.push('\n');
md.push('\n');
let manifest = get_manifest(world, toml_id)?;
let meta = PackageMeta {
namespace: spec.namespace.clone(),
name: spec.name.clone(),
version: spec.version.to_string(),
manifest: Some(manifest),
};
let package_meta = jbase64(&meta);
let _ = writeln!(md, "<!-- begin:package {package_meta} -->");
let mut key = 0;
let mut modules_to_generate = vec![(EcoString::new(), sym.head.name.clone(), sym)];
let mut generated_modules = HashSet::new();
let mut file_ids = IndexSet::new();
while !modules_to_generate.is_empty() {
for (prefix, parent_ident, sym) in std::mem::take(&mut modules_to_generate) {
// parent_ident, symbols
let symbols = sym.children;
if !prefix.is_empty() {
let _ = writeln!(md, "---\n## Module: {prefix}");
}
let module_val = sym.head.value.as_ref().unwrap();
let module = match module_val {
Value::Module(m) => m,
_ => todo!(),
};
let fid = module.file_id();
let type_info = None.or_else(|| {
let file_id = fid?;
let src = world.source(file_id).ok()?;
let def_use = ctx.def_use(src.clone())?;
let ty_chck = ctx.type_check(src)?;
Some((def_use, ty_chck))
});
let type_info = type_info.as_ref();
let persist_fid = fid.map(|f| file_ids.insert_full(f).0);
#[derive(Serialize)]
struct ModuleInfo {
prefix: EcoString,
name: EcoString,
loc: Option<usize>,
parent_ident: EcoString,
}
let m = jbase64(&ModuleInfo {
prefix: prefix.clone(),
name: sym.head.name.clone(),
loc: persist_fid,
parent_ident: parent_ident.clone(),
});
let _ = writeln!(md, "<!-- begin:module {parent_ident} {m} -->");
for mut sym in symbols {
let span = sym.head.span.and_then(|v| {
v.id().and_then(|e| {
let fid = file_ids.insert_full(e).0;
let src = world.source(e).ok()?;
let rng = src.range(v)?;
Some((fid, rng.start, rng.end))
})
});
sym.head.loc = span;
let mut convert_err = None;
if let Some(docs) = &sym.head.docs {
match convert_docs(world, docs) {
Ok(content) => {
let docs = match sym.head.kind.as_str() {
"function" => {
let t = identify_tidy_func_docs(&content).ok();
t.map(Docs::Function).unwrap_or(Docs::Plain(content))
}
"variable" => {
let t = identify_tidy_var_docs(&content).ok();
t.map(Docs::Variable).unwrap_or(Docs::Plain(content))
}
"module" => {
let t = identify_tidy_module_docs(&content).ok();
t.map(Docs::Module).unwrap_or(Docs::Plain(content))
}
_ => Docs::Plain(content),
};
sym.head.parsed_docs = Some(docs.clone());
sym.head.docs = None;
}
Err(e) => {
let err = format!("failed to convert docs in {title}: {e}").replace(
"-->", "—>", // avoid markdown comment
);
log::error!("{err}");
convert_err = Some(err);
}
}
}
let signature =
match &sym.head.parsed_docs {
Some(Docs::Function(TidyFuncDocs {
params, return_ty, ..
})) => sym.head.value.clone().and_then(|e| {
let func = match e {
Value::Func(f) => f,
_ => return None,
};
let sig = analyze_dyn_signature(ctx, func.clone());
let type_sig = type_info.and_then(|(def_use, ty_chk)| {
let def_fid = func.span().id()?;
let def_ident = IdentRef {
name: sym.head.name.clone(),
range: sym.head.name_range.clone()?,
};
let (def_id, _) = def_use.get_def(def_fid, &def_ident)?;
ty_chk.type_of_def(def_id)
});
let type_sig = type_sig.and_then(|type_sig| type_sig.sig_repr(true));
let pos_in = sig.primary().pos.iter().enumerate().map(|(i, pos)| {
(pos, type_sig.as_ref().and_then(|sig| sig.pos(i)))
});
let named_in = sig
.primary()
.named
.iter()
.map(|x| (x, type_sig.as_ref().and_then(|sig| sig.named(x.0))));
let rest_in =
sig.primary().rest.as_ref().map(|x| {
(x, type_sig.as_ref().and_then(|sig| sig.rest_param()))
});
let ret_in = type_sig
.as_ref()
.and_then(|sig| sig.body.as_ref())
.or_else(|| sig.primary().ret_ty.as_ref());
let doc_ty = |ty: Option<&Ty>| {
ty.and_then(|ty| ty.describe().map(|e| (e, format!("{ty:?}"))))
};
let _ = params;
let _ = return_ty;
let pos = pos_in
.map(|(param, ty)| DocParamSpec {
name: param.name.as_ref().to_owned(),
docs: param.docs.as_ref().to_owned(),
cano_type: doc_ty(ty),
type_repr: param.type_repr.clone(),
expr: param.expr.clone(),
positional: param.positional,
named: param.named,
variadic: param.variadic,
settable: param.settable,
})
.collect();
let named = named_in
.map(|((name, param), ty)| {
(
name.as_ref().to_owned(),
DocParamSpec {
name: param.name.as_ref().to_owned(),
docs: param.docs.as_ref().to_owned(),
cano_type: doc_ty(ty),
type_repr: param.type_repr.clone(),
expr: param.expr.clone(),
positional: param.positional,
named: param.named,
variadic: param.variadic,
settable: param.settable,
},
)
})
.collect();
let rest = rest_in.map(|(param, ty)| DocParamSpec {
name: param.name.as_ref().to_owned(),
docs: param.docs.as_ref().to_owned(),
cano_type: doc_ty(ty),
type_repr: param.type_repr.clone(),
expr: param.expr.clone(),
positional: param.positional,
named: param.named,
variadic: param.variadic,
settable: param.settable,
});
let ret_ty = doc_ty(ret_in);
Some(DocSignature {
pos,
named,
rest,
ret_ty,
})
}),
_ => None,
};
sym.head.signature = signature;
let _ = writeln!(md, "### {}: {}", sym.head.kind, sym.head.name);
let ident = eco_format!("symbol-{}-{}-{key}", sym.head.kind, sym.head.name);
key += 1;
let head = jbase64(&sym.head);
let _ = writeln!(md, "<!-- begin:symbol {ident} {head} -->");
if let Some(sig) = &sym.head.signature {
let _ = writeln!(md, "<!-- begin:sig -->");
let _ = writeln!(md, "```typc");
let _ = writeln!(
md,
"let {name}({params});",
name = sym.head.name,
params = ParamTooltip(sig)
);
let _ = writeln!(md, "```");
let _ = writeln!(md, "<!-- end:sig -->");
}
match (&sym.head.parsed_docs, convert_err) {
(_, Some(err)) => {
let err = format!("failed to convert docs in {title}: {err}").replace(
"-->", "—>", // avoid markdown comment
);
let _ = writeln!(md, "<!-- convert-error: {err} -->");
errors.push(err);
}
(Some(docs), _) => {
let _ = writeln!(md, "{}", remove_list_annotations(docs.docs()));
if let Docs::Function(f) = docs {
for param in &f.params {
let _ = writeln!(md, "<!-- begin:param {} -->", param.name);
let _ = writeln!(
md,
"#### {} ({})\n<!-- begin:param-doc {} -->\n{}\n<!-- end:param-doc {} -->",
param.name, param.types, param.name, param.docs, param.name
);
let _ = writeln!(md, "<!-- end:param -->");
}
}
}
(None, None) => {}
}
if let Some(docs) = &sym.head.docs {
let contains_code = docs.contains("```");
if contains_code {
let _ = writeln!(md, "`````typ");
}
let _ = writeln!(md, "{docs}");
if contains_code {
let _ = writeln!(md, "`````");
}
}
if !sym.children.is_empty() {
let mut full_path = prefix.clone();
if !full_path.is_empty() {
full_path.push_str(".");
}
full_path.push_str(&sym.head.name);
let link = format!("Module-{full_path}").replace(".", "-");
let _ = writeln!(md, "[Module Docs](#{link})\n");
if generated_modules.insert(full_path.clone()) {
modules_to_generate.push((full_path, ident.clone(), sym));
}
}
let _ = writeln!(md, "<!-- end:symbol {ident} -->");
}
let _ = writeln!(md, "<!-- end:module {parent_ident} -->");
}
}
let res = ConvertResult { errors };
let err = jbase64(&res);
let _ = writeln!(md, "<!-- begin:errors {err} -->");
let _ = writeln!(md, "## Errors");
for e in res.errors {
let _ = writeln!(md, "- {e}");
}
let _ = writeln!(md, "<!-- end:errors -->");
let mut packages = IndexSet::new();
let files = file_ids
.into_iter()
.map(|e| {
let pkg = e.package().map(|e| packages.insert_full(e.clone()).0);
FileMeta {
package: pkg,
path: e.vpath().as_rootless_path().to_owned(),
}
})
.collect();
let packages = packages
.into_iter()
.map(|e| PackageMeta {
namespace: e.namespace.clone(),
name: e.name.clone(),
version: e.version.to_string(),
manifest: None,
})
.collect();
let meta = PackageMetaEnd { packages, files };
let package_meta = jbase64(&meta);
let _ = writeln!(md, "<!-- end:package {package_meta} -->");
Ok(md)
}
fn kind_of(val: &Value) -> EcoString {
match val {
Value::Module(_) => "module",
Value::Type(_) => "struct",
Value::Func(_) => "function",
Value::Label(_) => "reference",
_ => "constant",
}
.into()
}
fn symbol(world: &LspWorld, for_spec: Option<&PackageSpec>, key: &str, val: &Value) -> SymbolInfo {
let children = match val {
Value::Module(module) => {
// only generate docs for the same package
if module.file_id().map_or(true, |e| e.package() != for_spec) {
eco_vec![]
} else {
let symbols = module.scope().iter();
symbols
.map(|(k, v)| symbol(world, for_spec, k, v))
.collect()
}
}
_ => eco_vec![],
};
SymbolInfo {
head: create_head(world, key, val),
children,
}
}
fn create_head(world: &LspWorld, k: &str, v: &Value) -> SymbolInfoHead {
let kind = kind_of(v);
let (docs, name_range, span) = match v {
Value::Func(f) => {
let mut span = None;
let mut name_range = None;
let docs = None.or_else(|| {
let source = world.source(f.span().id()?).ok()?;
let node = source.find(f.span())?;
log::debug!("node: {k} -> {:?}", node.parent());
// use parent of params, todo: reliable way to get the def target
let def = get_non_strict_def_target(node.parent()?.clone())?;
span = Some(def.node().span());
name_range = def.name_range();
find_docs_of(&source, def)
});
(docs, name_range, span.or(Some(f.span())))
}
_ => (None, None, None),
};
SymbolInfoHead {
name: k.to_string().into(),
kind,
loc: None,
constant: None.or_else(|| match v {
Value::Func(_) => None,
t => Some(truncated_doc_repr(t)),
}),
signature: None,
parsed_docs: None,
docs,
name_range,
span,
value: Some(v.clone()),
}
}
// todo: hover with `with_stack`
struct ParamTooltip<'a>(&'a DocSignature);
impl<'a> fmt::Display for ParamTooltip<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut is_first = true;
let mut write_sep = |f: &mut fmt::Formatter<'_>| {
if is_first {
is_first = false;
return Ok(());
}
f.write_str(", ")
};
let primary_sig = self.0;
for p in &primary_sig.pos {
write_sep(f)?;
write!(f, "{}", p.name)?;
}
if let Some(rest) = &primary_sig.rest {
write_sep(f)?;
write!(f, "{}", rest.name)?;
}
if !primary_sig.named.is_empty() {
let mut name_prints = vec![];
for v in primary_sig.named.values() {
name_prints.push((v.name.clone(), v.type_repr.clone()))
}
name_prints.sort();
for (k, v) in name_prints {
write_sep(f)?;
let v = v.as_deref().unwrap_or("any");
let mut v = v.trim();
if v.starts_with('{') && v.ends_with('}') && v.len() > 30 {
v = "{ ... }"
}
if v.starts_with('`') && v.ends_with('`') && v.len() > 30 {
v = "raw"
}
if v.starts_with('[') && v.ends_with(']') && v.len() > 30 {
v = "content"
}
write!(f, "{k}: {v}")?;
}
}
Ok(())
}
}
fn remove_list_annotations(s: &str) -> String {
let s = s.to_string();
static REG: std::sync::LazyLock<regex::Regex> = std::sync::LazyLock::new(|| {
regex::Regex::new(r"<!-- typlite:(?:begin|end):[\w\-]+ \d+ -->").unwrap()
});
REG.replace_all(&s, "").to_string()
}
#[cfg(test)]
mod tests {
use reflexo_typst::package::{PackageRegistry, PackageSpec};
use super::{generate_md_docs, PackageInfo};
use crate::tests::*;
fn test(pkg: PackageSpec) {
run_with_sources("", |verse: &mut LspUniverse, p| {
let w = verse.snapshot();
let path = verse.registry.resolve(&pkg).unwrap();
let pi = PackageInfo {
path: path.as_ref().to_owned(),
namespace: pkg.namespace,
name: pkg.name,
version: pkg.version.to_string(),
};
run_with_ctx(verse, p, &|a, _p| {
let d = generate_md_docs(a, &w, &pi).unwrap();
let dest = format!(
"../../target/{}-{}-{}.md",
pi.namespace, pi.name, pi.version
);
std::fs::write(dest, d).unwrap();
})
})
}
#[test]
fn tidy() {
test(PackageSpec {
namespace: "preview".into(),
name: "tidy".into(),
version: "0.3.0".parse().unwrap(),
});
}
#[test]
fn touying() {
test(PackageSpec {
namespace: "preview".into(),
name: "touying".into(),
version: "0.5.2".parse().unwrap(),
});
}
#[test]
fn cetz() {
test(PackageSpec {
namespace: "preview".into(),
name: "cetz".into(),
version: "0.2.2".parse().unwrap(),
});
}
}

View file

@ -0,0 +1,359 @@
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use typst::diag::StrResult;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyParamDocs {
pub name: String,
pub docs: String,
pub types: String,
pub default: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyFuncDocs {
pub docs: String,
pub return_ty: Option<String>,
pub params: Vec<TidyParamDocs>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyVarDocs {
pub docs: String,
pub return_ty: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyModuleDocs {
pub docs: String,
}
pub fn identify_tidy_func_docs(converted: &str) -> StrResult<TidyFuncDocs> {
let lines = converted.lines().collect::<Vec<_>>();
let mut matching_return_ty = true;
let mut buf = vec![];
let mut params = vec![];
let mut return_ty = None;
let mut break_line = None;
let mut i = lines.len();
'search: loop {
if i == 0 {
break;
}
i -= 1;
let line = lines[i];
if line.is_empty() {
continue;
}
loop {
if matching_return_ty {
matching_return_ty = false;
let Some(w) = line.trim_start().strip_prefix("->") else {
// break_line = Some(i);
continue;
};
return_ty = Some(w.trim().to_string());
break;
}
let Some(mut line) = line
.trim_end()
.strip_suffix("<!-- typlite:end:list-item 0 -->")
else {
break_line = Some(i + 1);
break 'search;
};
let mut current_line_no = i;
loop {
// <!-- typlite:begin:list-item -->
let t = line
.trim_start()
.strip_prefix("- ")
.and_then(|t| t.trim().strip_prefix("<!-- typlite:begin:list-item 0 -->"));
let line_content = match t {
Some(t) => {
buf.push(t);
break;
}
None => line,
};
buf.push(line_content);
if current_line_no == 0 {
break_line = Some(i + 1);
break 'search;
}
current_line_no -= 1;
line = lines[current_line_no];
}
let mut buf = std::mem::take(&mut buf);
buf.reverse();
let Some(first_line) = buf.first_mut() else {
break_line = Some(i + 1);
break 'search;
};
*first_line = first_line.trim();
let Some(param_line) = None.or_else(|| {
let (param_name, rest) = first_line.split_once(" ")?;
let (type_content, rest) = match_brace(rest.trim_start().strip_prefix("(")?)?;
let (_, rest) = rest.split_once(":")?;
*first_line = rest.trim();
Some((param_name.into(), type_content.into()))
}) else {
break_line = Some(i + 1);
break 'search;
};
i = current_line_no;
params.push(TidyParamDocs {
name: param_line.0,
types: param_line.1,
default: None,
docs: buf.into_iter().join("\n"),
});
break;
}
}
let docs = match break_line {
Some(line_no) => (lines[..line_no]).iter().copied().join("\n"),
None => converted.to_owned(),
};
params.reverse();
Ok(TidyFuncDocs {
docs,
return_ty,
params,
})
}
pub fn identify_tidy_var_docs(converted: &str) -> StrResult<TidyVarDocs> {
let lines = converted.lines().collect::<Vec<_>>();
let mut return_ty = None;
let mut break_line = None;
let mut i = lines.len();
loop {
if i == 0 {
break;
}
i -= 1;
let line = lines[i];
if line.is_empty() {
continue;
}
let Some(w) = line.trim_start().strip_prefix("->") else {
break_line = Some(i + 1);
break;
};
return_ty = Some(w.trim().to_string());
break_line = Some(i);
break;
}
let docs = match break_line {
Some(line_no) => (lines[..line_no]).iter().copied().join("\n"),
None => converted.to_owned(),
};
Ok(TidyVarDocs { docs, return_ty })
}
pub fn identify_tidy_module_docs(converted: &str) -> StrResult<TidyModuleDocs> {
Ok(TidyModuleDocs {
docs: converted.to_owned(),
})
}
fn match_brace(trim_start: &str) -> Option<(&str, &str)> {
let mut brace_count = 1;
let mut end = 0;
for (i, c) in trim_start.char_indices() {
match c {
'(' => brace_count += 1,
')' => brace_count -= 1,
_ => {}
}
if brace_count == 0 {
end = i;
break;
}
}
if brace_count != 0 {
return None;
}
let (type_content, rest) = trim_start.split_at(end);
Some((type_content, rest))
}
#[cfg(test)]
mod tests {
use std::fmt::Write;
use super::TidyParamDocs;
fn func(s: &str) -> String {
let f = super::identify_tidy_func_docs(s).unwrap();
let mut res = format!(">> docs:\n{}\n<< docs", f.docs);
if let Some(t) = f.return_ty {
res.push_str(&format!("\n>>return\n{t}\n<<return"));
}
for TidyParamDocs {
name,
types,
docs,
default: _,
} in f.params
{
let _ = write!(res, "\n>>arg {name}: {types}\n{docs}\n<< arg");
}
res
}
fn var(s: &str) -> String {
let f = super::identify_tidy_var_docs(s).unwrap();
let mut res = format!(">> docs:\n{}\n<< docs", f.docs);
if let Some(t) = f.return_ty {
res.push_str(&format!("\n>>return\n{t}\n<<return"));
}
res
}
#[test]
fn test_identify_tidy_docs() {
insta::assert_snapshot!(func(r###"These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`types` (optional): A list of accepted argument types.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`default` (optional): Default value for this argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
- <!-- typlite:begin:list-item 0 -->content (string): Content of `.typ` file to analyze for docstrings.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->label-prefix (auto, string): The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->require-all-parameters (boolean): Require that all parameters of a
functions are documented and fail if some are not.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->scope (dictionary): A dictionary of definitions that are then available
in all function and parameter descriptions.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->preamble (string): Code to prepend to all code snippets shown with `#example()`.
This can for instance be used to import something from the scope.<!-- typlite:end:list-item 0 -->
-> string"###), @r###"
>> docs:
These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`types` (optional): A list of accepted argument types.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->`default` (optional): Default value for this argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
<< docs
>>return
string
<<return
>>arg content: string
Content of `.typ` file to analyze for docstrings.
<< arg
>>arg name: string
The name for the module.
<< arg
>>arg label-prefix: auto, string
The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
<< arg
>>arg require-all-parameters: boolean
Require that all parameters of a
functions are documented and fail if some are not.
<< arg
>>arg scope: dictionary
A dictionary of definitions that are then available
in all function and parameter descriptions.
<< arg
>>arg preamble: string
Code to prepend to all code snippets shown with `#example()`.
This can for instance be used to import something from the scope.
<< arg
"###);
}
#[test]
fn test_identify_tidy_docs_nested() {
insta::assert_snapshot!(func(r###"These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
- <!-- typlite:begin:list-item 0 -->label-prefix (auto, string): The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
- <!-- typlite:begin:list-item 1 -->nested something<!-- typlite:end:list-item 1 -->
- <!-- typlite:begin:list-item 1 -->nested something 2<!-- typlite:end:list-item 1 --><!-- typlite:end:list-item 0 -->
-> string"###), @r###"
>> docs:
These again are dictionaries with the keys
- <!-- typlite:begin:list-item 0 -->`description` (optional): The description for the argument.<!-- typlite:end:list-item 0 -->
See @@show-module() for outputting the results of this function.
<< docs
>>return
string
<<return
>>arg name: string
The name for the module.
<< arg
>>arg label-prefix: auto, string
The label-prefix for internal function
references. If `auto`, the label-prefix name will be the module name.
- <!-- typlite:begin:list-item 1 -->nested something<!-- typlite:end:list-item 1 -->
- <!-- typlite:begin:list-item 1 -->nested something 2<!-- typlite:end:list-item 1 -->
<< arg
"###);
}
#[test]
fn test_identify_tidy_docs3() {
insta::assert_snapshot!(var(r###"See @@show-module() for outputting the results of this function.
-> string"###), @r###"
>> docs:
See @@show-module() for outputting the results of this function.
<< docs
>>return
string
<<return
"###);
}
#[test]
fn test_identify_tidy_docs4() {
insta::assert_snapshot!(var(r###"
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
-> string"###), @r###"
>> docs:
- <!-- typlite:begin:list-item 0 -->name (string): The name for the module.<!-- typlite:end:list-item 0 -->
<< docs
>>return
string
<<return
"###);
}
}

View file

@ -49,7 +49,7 @@ fn filter_document_symbols(
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
DocumentSymbol {
name: e.info.name.clone(),
name: e.info.name.to_string(),
detail: None,
kind: e.info.kind.clone().try_into().unwrap(),
tags: None,

View file

@ -113,7 +113,7 @@ fn calc_folding_range(
end_line: rng.end.line,
end_character: Some(rng.end.character),
kind: None,
collapsed_text: Some(e.info.name.clone()),
collapsed_text: Some(e.info.name.to_string()),
};
let next_start = if is_not_last_range {

View file

@ -9,14 +9,13 @@
mod adt;
pub mod analysis;
pub mod docs;
pub mod syntax;
pub mod ty;
mod upstream;
use std::sync::Arc;
pub use analysis::AnalysisContext;
use typst::{model::Document as TypstDocument, syntax::Source};
pub use upstream::with_vm;
mod diagnostics;
pub use diagnostics::*;
@ -80,6 +79,10 @@ pub use lsp_features::*;
mod prelude;
use std::sync::Arc;
use typst::{model::Document as TypstDocument, syntax::Source};
/// The physical position in a document.
pub type FramePosition = typst::layout::Position;

View file

@ -20,13 +20,12 @@ pub use lsp_types::{
};
pub use reflexo::vector::ir::DefId;
pub use serde_json::Value as JsonValue;
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};
pub use typst::diag::{EcoString, FileResult, Tracepoint};
pub use typst::foundations::{Func, Value};
pub use typst::syntax::FileId as TypstFileId;
pub use typst::syntax::{
ast::{self, AstNode},
package::{PackageManifest, PackageSpec},
LinkedNode, Source, Spanned, SyntaxKind, VirtualPath,
LinkedNode, Source, Spanned, SyntaxKind,
};
pub use typst::World;

View file

@ -47,7 +47,7 @@ impl StatefulRequest for PrepareRenameRequest {
debug!("prepare_rename: {}", lnk.name);
Some(PrepareRenameResponse::RangeWithPlaceholder {
range: origin_selection_range,
placeholder: lnk.name,
placeholder: lnk.name.to_string(),
})
}
}

View file

@ -159,7 +159,7 @@ impl SemanticRequest for SignatureHelpRequest {
Some(SignatureHelp {
signatures: vec![SignatureInformation {
label,
label: label.to_string(),
documentation,
parameters: Some(params),
active_parameter: active_parameter.map(|x| x as u32),

View file

@ -85,7 +85,7 @@ fn filter_document_symbols(
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
Some(SymbolInformation {
name: e.info.name.clone(),
name: e.info.name.to_string(),
kind: e.info.kind.clone().try_into().unwrap(),
tags: None,
deprecated: None,

View file

@ -5,6 +5,8 @@ use typst_shim::syntax::LinkedNodeExt;
use crate::prelude::*;
use crate::syntax::get_def_target;
use super::DefTarget;
fn extract_document_between(
node: &LinkedNode,
rng: Range<usize>,
@ -93,6 +95,11 @@ pub fn find_docs_before(src: &Source, cursor: usize) -> Option<String> {
let root = LinkedNode::new(src.root());
let leaf = root.leaf_at_compat(cursor)?;
let def_target = get_def_target(leaf.clone())?;
find_docs_of(src, def_target)
}
pub fn find_docs_of(src: &Source, def_target: DefTarget) -> Option<String> {
let root = LinkedNode::new(src.root());
log::debug!("found docs target: {:?}", def_target.node().kind());
// todo: import node
let target = def_target.node().clone();

View file

@ -1,45 +0,0 @@
use crate::prelude::*;
/// Resolve a file id by its import path.
pub fn resolve_id_by_path(
world: &dyn World,
current: TypstFileId,
import_path: &str,
) -> Option<TypstFileId> {
if import_path.starts_with('@') {
let spec = import_path.parse::<PackageSpec>().ok()?;
// Evaluate the manifest.
let manifest_id = TypstFileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
let bytes = world.file(manifest_id).ok()?;
let string = std::str::from_utf8(&bytes).map_err(FileError::from).ok()?;
let manifest: PackageManifest = toml::from_str(string).ok()?;
manifest.validate(&spec).ok()?;
// Evaluate the entry point.
return Some(manifest_id.join(&manifest.package.entrypoint));
}
let path = Path::new(import_path);
let vpath = if path.is_relative() {
current.vpath().join(path)
} else {
VirtualPath::new(path)
};
Some(TypstFileId::new(current.package().cloned(), vpath))
}
/// Find a source instance by its import node.
pub fn find_source_by_expr(
world: &dyn World,
current: TypstFileId,
e: ast::Expr,
) -> Option<Source> {
// todo: this could be valid: import("path.typ"), where v is parenthesized
match e {
ast::Expr::Str(s) => world
.source(resolve_id_by_path(world, current, s.get().as_str())?)
.ok(),
_ => None,
}
}

View file

@ -4,7 +4,7 @@ use std::{
};
use anyhow::{anyhow, Context};
use ecow::{eco_vec, EcoVec};
use ecow::{eco_vec, EcoString, EcoVec};
use log::info;
use lsp_types::SymbolKind;
use serde::{Deserialize, Serialize};
@ -30,7 +30,7 @@ pub(crate) fn get_lexical_hierarchy(
};
worker.stack.push((
LexicalInfo {
name: "deadbeef".to_string(),
name: "deadbeef".into(),
kind: LexicalKind::Heading(-1),
range: 0..0,
},
@ -223,7 +223,7 @@ impl LexicalScopeKind {
#[derive(Debug, Clone, Hash)]
pub(crate) struct LexicalInfo {
pub name: String,
pub name: EcoString,
pub kind: LexicalKind,
pub range: Range<usize>,
}
@ -451,14 +451,14 @@ impl LexicalHierarchyWorker {
let symbol = if self.g == LexicalScopeKind::DefUse {
// DefUse mode does not nest symbols inside of functions
LexicalInfo {
name: String::new(),
name: EcoString::new(),
kind: LexicalKind::Block,
range: body.range(),
}
} else if current == self.stack.last().unwrap().1.len() {
// Closure has no updated symbol stack
LexicalInfo {
name: "<anonymous>".to_string(),
name: "<anonymous>".into(),
kind: LexicalKind::function(),
range: node.range(),
}
@ -495,9 +495,9 @@ impl LexicalHierarchyWorker {
let target_name_node = node.find(target_name.span()).context("no pos")?;
self.push_leaf(LexicalInfo {
name: origin_name.get().to_string(),
name: origin_name.get().clone(),
kind: LexicalKind::module_import_alias(IdentRef {
name: target_name.get().to_string(),
name: target_name.get().clone(),
range: target_name_node.range(),
}),
range: origin_name_node.range(),
@ -590,19 +590,19 @@ impl LexicalHierarchyWorker {
let ast_node = node
.cast::<ast::Label>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let name = ast_node.get().into();
(name, LexicalKind::label())
}
SyntaxKind::RefMarker if self.g.affect_ref() => {
let name = node.text().trim_start_matches('@').to_owned();
let name = node.text().trim_start_matches('@').into();
(name, LexicalKind::label_ref())
}
SyntaxKind::Ident if self.g.affect_symbol() => {
let ast_node = node
.cast::<ast::Ident>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let name = ast_node.get().clone();
let kind = match self.ident_context {
IdentContext::Ref if self.g.affect_ref() => LexicalKind::val_ref(),
IdentContext::Func => LexicalKind::function(),
@ -616,10 +616,10 @@ impl LexicalHierarchyWorker {
SyntaxKind::Equation | SyntaxKind::Raw | SyntaxKind::BlockComment
if self.g.affect_markup() =>
{
(String::new(), LexicalKind::Block)
(EcoString::new(), LexicalKind::Block)
}
SyntaxKind::CodeBlock | SyntaxKind::ContentBlock if self.g.affect_block() => {
(String::new(), LexicalKind::Block)
(EcoString::new(), LexicalKind::Block)
}
SyntaxKind::Parenthesized
| SyntaxKind::Destructuring
@ -628,7 +628,7 @@ impl LexicalHierarchyWorker {
| SyntaxKind::Dict
if self.g.affect_expr() =>
{
(String::new(), LexicalKind::Block)
(EcoString::new(), LexicalKind::Block)
}
SyntaxKind::ModuleImport if self.g.affect_import() => {
let src = node
@ -639,23 +639,23 @@ impl LexicalHierarchyWorker {
match src {
ast::Expr::Str(e) => {
let e = e.get();
(String::new(), LexicalKind::module(e.as_ref().into()))
(EcoString::new(), LexicalKind::module(e.as_ref().into()))
}
src => {
let e = node
.find(src.span())
.ok_or_else(|| anyhow!("find expression failed: {:?}", src))?;
let e = IdentRef {
name: String::new(),
name: EcoString::new(),
range: e.range(),
};
(String::new(), LexicalKind::module_expr(e.into()))
(EcoString::new(), LexicalKind::module_expr(e.into()))
}
}
}
SyntaxKind::Markup => {
let name = node.get().to_owned().into_text().to_string();
let name = node.get().to_owned().into_text();
if name.is_empty() {
return Ok(None);
}
@ -703,7 +703,7 @@ impl LexicalHierarchyWorker {
// ^^^
let import_node = node.find(name.span()).context("no pos")?;
self.push_leaf(LexicalInfo {
name: name.get().to_string(),
name: name.get().clone(),
kind: LexicalKind::module_as(),
range: import_node.range(),
});
@ -721,14 +721,14 @@ impl LexicalHierarchyWorker {
let spec = e
.parse::<PackageSpec>()
.map_err(|e| anyhow!("parse package spec failed: {:?}", e))?;
spec.name.to_string()
spec.name.clone()
} else {
let e = Path::new(e.as_ref())
.file_name()
.context("no file name")?
.to_string_lossy();
let e = e.as_ref();
e.strip_suffix(".typ").context("no suffix")?.to_owned()
e.strip_suffix(".typ").context("no suffix")?.into()
};
// return (e == name).then_some(ImportRef::Path(v));
@ -757,7 +757,7 @@ impl LexicalHierarchyWorker {
.context("no star")?;
let v = node.find(wildcard.span()).context("no pos")?;
self.push_leaf(LexicalInfo {
name: "*".to_string(),
name: "*".into(),
kind: LexicalKind::module_star(),
range: v.range(),
});

View file

@ -1,3 +1,5 @@
use std::ops::Range;
use ecow::EcoVec;
use serde::Serialize;
use typst::{
@ -211,6 +213,29 @@ impl<'a> DefTarget<'a> {
DefTarget::Import(node) => node,
}
}
pub fn name_range(&self) -> Option<Range<usize>> {
match self {
DefTarget::Let(node) => {
let lb: ast::LetBinding<'_> = node.cast()?;
let names = match lb.kind() {
ast::LetBindingKind::Closure(name) => node.find(name.span())?,
ast::LetBindingKind::Normal(ast::Pattern::Normal(name)) => {
node.find(name.span())?
}
_ => return None,
};
Some(names.range())
}
DefTarget::Import(_node) => {
// let ident = node.cast::<ast::ImportItem>()?;
// Some(ident.span().into())
// todo: implement this
None
}
}
}
}
// todo: whether we should distinguish between strict and non-strict def targets

View file

@ -5,8 +5,8 @@
// todo: remove this
#![allow(missing_docs)]
pub(crate) mod import;
pub use import::*;
use ecow::EcoString;
pub use tinymist_analysis::import::*;
pub(crate) mod lexical_hierarchy;
pub use lexical_hierarchy::*;
pub(crate) mod matcher;
@ -28,7 +28,7 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct IdentRef {
/// The name of the symbol.
pub name: String,
pub name: EcoString,
/// The byte range of the symbol in the source file.
pub range: Range<usize>,
}
@ -89,7 +89,7 @@ impl<'de> Deserialize<'de> for IdentRef {
(name, st_ed[0]..st_ed[1])
};
Ok(IdentRef {
name: name.to_string(),
name: name.into(),
range,
})
}
@ -101,7 +101,7 @@ impl<'de> Deserialize<'de> for IdentRef {
#[derive(Debug, Clone, Serialize)]
pub struct IdentDef {
/// The name of the symbol.
pub name: String,
pub name: EcoString,
/// The kind of the symbol.
pub kind: LexicalKind,
/// The byte range of the symbol in the source file.

View file

@ -1,7 +1,6 @@
use core::fmt;
use std::sync::Arc;
use std::{
borrow::Cow,
collections::{HashMap, HashSet},
ops::Range,
path::{Path, PathBuf},
@ -9,21 +8,19 @@ use std::{
use ecow::EcoVec;
use once_cell::sync::Lazy;
use reflexo_typst::config::CompileOpts;
use reflexo_typst::package::{PackageRegistry, PackageSpec};
use reflexo_typst::world::{EntryOpts, EntryState};
use reflexo_typst::{
CompileDriver, EntryManager, EntryReader, ShadowApi, TypstSystemUniverse, WorldDeps,
};
use reflexo_typst::world::EntryState;
use reflexo_typst::{CompileDriverImpl, EntryManager, EntryReader, ShadowApi, WorldDeps};
use serde_json::{ser::PrettyFormatter, Serializer, Value};
use tinymist_world::CompileFontArgs;
use typst::syntax::ast::{self, AstNode};
use typst::syntax::{FileId as TypstFileId, LinkedNode, Source, SyntaxKind, VirtualPath};
use typst::{diag::PackageError, foundations::Bytes};
pub use insta::assert_snapshot;
pub use reflexo_typst::TypstSystemWorld;
pub use serde::Serialize;
pub use serde_json::json;
pub use tinymist_world::{LspUniverse, LspUniverseBuilder, LspWorld};
use typst_shim::syntax::LinkedNodeExt;
use crate::{
@ -32,7 +29,9 @@ use crate::{
typst_to_lsp, LspPosition, PositionEncoding, VersionedDocument,
};
struct WrapWorld<'a>(&'a mut TypstSystemWorld);
type CompileDriver<C> = CompileDriverImpl<C, tinymist_world::LspCompilerFeat>;
struct WrapWorld<'a>(&'a mut LspWorld);
impl<'a> AnalysisResources for WrapWorld<'a> {
fn world(&self) -> &dyn typst::World {
@ -64,27 +63,33 @@ pub fn snapshot_testing(name: &str, f: &impl Fn(&mut AnalysisContext, PathBuf))
#[cfg(windows)]
let contents = contents.replace("\r\n", "\n");
run_with_sources(&contents, |w: &mut TypstSystemUniverse, p| {
let root = w.workspace_root().unwrap();
let paths = w
.shadow_paths()
.into_iter()
.map(|p| {
TypstFileId::new(None, VirtualPath::new(p.strip_prefix(&root).unwrap()))
})
.collect::<Vec<_>>();
let mut w = w.snapshot();
let w = WrapWorld(&mut w);
let a = Analysis::default();
let mut ctx = AnalysisContext::new(root, &w, &a);
ctx.test_completion_files(Vec::new);
ctx.test_files(|| paths);
f(&mut ctx, p);
run_with_sources(&contents, |w, p| {
run_with_ctx(w, p, f);
});
});
});
}
pub fn run_with_ctx<T>(
w: &mut LspUniverse,
p: PathBuf,
f: &impl Fn(&mut AnalysisContext, PathBuf) -> T,
) -> T {
let root = w.workspace_root().unwrap();
let paths = w
.shadow_paths()
.into_iter()
.map(|p| TypstFileId::new(None, VirtualPath::new(p.strip_prefix(&root).unwrap())))
.collect::<Vec<_>>();
let mut w = w.snapshot();
let w = WrapWorld(&mut w);
let a = Analysis::default();
let mut ctx = AnalysisContext::new(root, &w, &a);
ctx.test_completion_files(Vec::new);
ctx.test_files(|| paths);
f(&mut ctx, p)
}
pub fn get_test_properties(s: &str) -> HashMap<&'_ str, &'_ str> {
let mut props = HashMap::new();
for line in s.lines() {
@ -116,21 +121,24 @@ pub fn compile_doc_for_test(
})
}
pub fn run_with_sources<T>(
source: &str,
f: impl FnOnce(&mut TypstSystemUniverse, PathBuf) -> T,
) -> T {
pub fn run_with_sources<T>(source: &str, f: impl FnOnce(&mut LspUniverse, PathBuf) -> T) -> T {
let root = if cfg!(windows) {
PathBuf::from("C:\\")
} else {
PathBuf::from("/")
};
let mut world = TypstSystemUniverse::new(CompileOpts {
entry: EntryOpts::new_rooted(root.as_path().into(), None),
with_embedded_fonts: typst_assets::fonts().map(Cow::Borrowed).collect(),
no_system_fonts: true,
..Default::default()
})
let mut world = LspUniverseBuilder::build(
EntryState::new_rooted(root.as_path().into(), None),
Arc::new(
LspUniverseBuilder::resolve_fonts(CompileFontArgs {
ignore_system_fonts: true,
..Default::default()
})
.unwrap(),
),
Default::default(),
None,
)
.unwrap();
let sources = source.split("-----");

View file

@ -8,7 +8,6 @@ use serde::{Deserialize, Serialize};
use typst::foundations::{fields_on, format_str, repr, Repr, StyleChain, Styles, Value};
use typst::model::Document;
use typst::syntax::ast::AstNode;
use typst::syntax::package::PackageSpec;
use typst::syntax::{ast, is_id_continue, is_id_start, is_ident, LinkedNode, Source, SyntaxKind};
use typst::text::RawElem;
use typst_shim::syntax::LinkedNodeExt;
@ -1048,74 +1047,21 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
}
}
/// Get local packages
fn local_packages(&mut self) -> Vec<(PackageSpec, Option<EcoString>)> {
// search packages locally. We only search in the data
// directory and not the cache directory, because the latter is not
// intended for storage of local packages.
let mut packages = vec![];
let Some(data_dir) = dirs::data_dir() else {
return packages;
};
let local_path = data_dir.join("typst/packages");
if !local_path.exists() {
return packages;
}
// namespace/package_name/version
// 1. namespace
let namespaces = std::fs::read_dir(local_path).unwrap();
for namespace in namespaces {
let namespace = namespace.unwrap();
if !namespace.file_type().unwrap().is_dir() {
continue;
}
// start with . are hidden directories
if namespace.file_name().to_string_lossy().starts_with('.') {
continue;
}
// 2. package_name
let package_names = std::fs::read_dir(namespace.path()).unwrap();
for package in package_names {
let package = package.unwrap();
if !package.file_type().unwrap().is_dir() {
continue;
}
if package.file_name().to_string_lossy().starts_with('.') {
continue;
}
// 3. version
let versions = std::fs::read_dir(package.path()).unwrap();
for version in versions {
let version = version.unwrap();
if !version.file_type().unwrap().is_dir() {
continue;
}
if version.file_name().to_string_lossy().starts_with('.') {
continue;
}
let version = version.file_name().to_string_lossy().parse().unwrap();
let spec = PackageSpec {
namespace: namespace.file_name().to_string_lossy().into(),
name: package.file_name().to_string_lossy().into(),
version,
};
let description = eco_format!("{} v{}", spec.name, spec.version);
let package = (spec, Some(description));
packages.push(package);
}
}
}
packages
}
/// Add completions for all available packages.
fn package_completions(&mut self, all_versions: bool) {
let mut packages: Vec<_> = self.world().packages().iter().collect();
let mut packages: Vec<_> = self
.world()
.packages()
.iter()
.map(|e| (&e.0, e.1.clone()))
.collect();
// local_packages to references and add them to the packages
let local_packages = self.local_packages();
let local_packages_refs: Vec<&(PackageSpec, Option<EcoString>)> =
local_packages.iter().collect();
packages.extend(local_packages_refs);
let local_packages_refs = self.ctx.resources.local_packages();
packages.extend(
local_packages_refs
.iter()
.map(|spec| (spec, Some(eco_format!("{} v{}", spec.name, spec.version)))),
);
packages.sort_by_key(|(spec, _)| (&spec.namespace, &spec.name, Reverse(spec.version)));
if !all_versions {

View file

@ -8,6 +8,7 @@ use typst::{
diag::{bail, StrResult},
foundations::{Content, Func, Module, Type, Value},
introspection::MetadataElem,
syntax::Span,
text::{FontInfo, FontStyle},
Library,
};
@ -397,8 +398,7 @@ fn summarize_font_family<'a>(variants: impl Iterator<Item = &'a FontInfo>) -> Ec
detail
}
pub fn truncated_repr(value: &Value) -> EcoString {
const _10MB: usize = 100 * 1024 * 1024;
pub fn truncated_repr_<const SZ_LIMIT: usize>(value: &Value) -> EcoString {
use typst::foundations::Repr;
let data: Option<Content> = value.clone().cast().ok();
@ -411,13 +411,53 @@ pub fn truncated_repr(value: &Value) -> EcoString {
value.repr()
};
if repr.len() > _10MB {
if repr.len() > SZ_LIMIT {
eco_format!("[truncated-repr: {} bytes]", repr.len())
} else {
repr
}
}
pub fn truncated_repr(value: &Value) -> EcoString {
const _10MB: usize = 100 * 1024 * 1024;
truncated_repr_::<_10MB>(value)
}
pub fn truncated_doc_repr(value: &Value) -> EcoString {
const _128B: usize = 128;
truncated_repr_::<_128B>(value)
}
/// Run a function with a VM instance in the world
pub fn with_vm<T>(world: &dyn typst::World, f: impl FnOnce(&mut typst::eval::Vm) -> T) -> T {
use comemo::Track;
use typst::engine::*;
use typst::eval::*;
use typst::foundations::*;
use typst::introspection::*;
let mut locator = Locator::default();
let introspector = Introspector::default();
let mut tracer = Tracer::new();
let engine = Engine {
world: world.track(),
route: Route::default(),
introspector: introspector.track(),
locator: &mut locator,
tracer: tracer.track_mut(),
};
let context = Context::none();
let mut vm = Vm::new(
engine,
context.track(),
Scopes::new(Some(world.library())),
Span::detached(),
);
f(&mut vm)
}
#[cfg(test)]
mod tests {
#[test]

View file

@ -67,7 +67,7 @@ fn filter_document_labels(
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
Some(SymbolInformation {
name: e.info.name.clone(),
name: e.info.name.to_string(),
kind: e.info.kind.clone().try_into().unwrap(),
tags: None,
deprecated: None,