feat: conditionally render code in docs (#824)

* feat: conditionally render code in docs

* fix: fmt

* fix: config
This commit is contained in:
Myriad-Dreamin 2024-11-15 13:40:30 +08:00 committed by GitHub
parent 8b3a0e986a
commit da1e68ad1f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 226 additions and 71 deletions

View file

@ -1,6 +1,8 @@
//! Semantic static and dynamic analysis of the source code.
mod bib;
use std::path::Path;
pub(crate) use bib::*;
pub mod call;
pub use call::*;
@ -16,6 +18,8 @@ pub mod signature;
pub use signature::*;
pub mod semantic_tokens;
pub use semantic_tokens::*;
use typst::syntax::{Source, VirtualPath};
use typst::World;
mod post_tyck;
mod tyck;
pub(crate) use crate::ty::*;
@ -30,8 +34,8 @@ pub use global::*;
use ecow::eco_format;
use lsp_types::Url;
use reflexo_typst::TypstFileId;
use typst::diag::FileError;
use reflexo_typst::{EntryReader, TypstFileId};
use typst::diag::{FileError, FileResult};
use typst::foundations::{Func, Value};
use crate::path_to_url;
@ -52,12 +56,36 @@ impl ToFunc for Value {
/// Extension trait for `typst::World`.
pub trait LspWorldExt {
/// Get file's id by its path
fn file_id_by_path(&self, p: &Path) -> FileResult<TypstFileId>;
/// Get the source of a file by file path.
fn source_by_path(&self, p: &Path) -> FileResult<Source>;
/// Resolve the uri for a file id.
fn uri_for_id(&self, id: TypstFileId) -> Result<Url, FileError>;
fn uri_for_id(&self, id: TypstFileId) -> FileResult<Url>;
}
impl LspWorldExt for tinymist_world::LspWorld {
/// Resolve the uri for a file id.
fn file_id_by_path(&self, p: &Path) -> FileResult<TypstFileId> {
// todo: source in packages
let root = self.workspace_root().ok_or_else(|| {
let reason = eco_format!("workspace root not found");
FileError::Other(Some(reason))
})?;
let relative_path = p.strip_prefix(&root).map_err(|_| {
let reason = eco_format!("access denied, path: {p:?}, root: {root:?}");
FileError::Other(Some(reason))
})?;
Ok(TypstFileId::new(None, VirtualPath::new(relative_path)))
}
fn source_by_path(&self, p: &Path) -> FileResult<Source> {
// todo: source cache
self.source(self.file_id_by_path(p)?)
}
fn uri_for_id(&self, id: TypstFileId) -> Result<Url, FileError> {
self.path_for_id(id).and_then(|e| {
path_to_url(&e)

View file

@ -53,6 +53,8 @@ pub struct Analysis {
pub allow_overlapping_token: bool,
/// Whether to allow multiline semantic tokens.
pub allow_multiline_token: bool,
/// Whether to remove html from markup content in responses.
pub remove_html: bool,
/// The editor's color theme.
pub color_theme: ColorTheme,
/// The periscope provider.
@ -496,17 +498,7 @@ impl SharedContext {
/// Get file's id by its path
pub fn file_id_by_path(&self, p: &Path) -> FileResult<TypstFileId> {
// todo: source in packages
let root = self.world.workspace_root().ok_or_else(|| {
let reason = eco_format!("workspace root not found");
FileError::Other(Some(reason))
})?;
let relative_path = p.strip_prefix(&root).map_err(|_| {
let reason = eco_format!("access denied, path: {p:?}, root: {root:?}");
FileError::Other(Some(reason))
})?;
Ok(TypstFileId::new(None, VirtualPath::new(relative_path)))
self.world.file_id_by_path(p)
}
/// Get the content of a file by file id.
@ -521,8 +513,7 @@ impl SharedContext {
/// Get the source of a file by file path.
pub fn source_by_path(&self, p: &Path) -> FileResult<Source> {
// todo: source cache
self.source_by_id(self.file_id_by_path(p)?)
self.world.source_by_path(p)
}
/// Get a syntax object at a position.
@ -894,6 +885,20 @@ impl SharedContext {
res.get_or_init(|| compute(self)).clone()
}
/// Remove html tags from markup content if necessary.
pub fn remove_html(&self, markup: EcoString) -> EcoString {
if !self.analysis.remove_html {
return markup;
}
static REMOVE_HTML_COMMENT_REGEX: LazyLock<regex::Regex> =
LazyLock::new(|| regex::Regex::new(r#"<!--[\s\S]*?-->"#).unwrap());
REMOVE_HTML_COMMENT_REGEX
.replace_all(&markup, "")
.trim()
.into()
}
fn query_stat(&self, id: TypstFileId, query: &'static str) -> QueryStatGuard {
let stats = &self.analysis.stats.query_stats;
let entry = stats.entry(id).or_default();

View file

@ -5,6 +5,7 @@ use parking_lot::Mutex;
use tinymist_world::base::{EntryState, ShadowApi, TaskInputs};
use typlite::scopes::Scopes;
use typlite::value::Value;
use typlite::TypliteFeat;
use typst::foundations::Bytes;
use typst::{
diag::StrResult,
@ -36,8 +37,13 @@ pub(crate) fn convert_docs(ctx: &SharedContext, content: &str) -> StrResult<EcoS
let conv = typlite::Typlite::new(Arc::new(w))
.with_library(DOCS_LIB.clone())
.with_color_theme(ctx.analysis.color_theme)
.annotate_elements(true)
.with_feature(TypliteFeat {
color_theme: Some(ctx.analysis.color_theme),
annotate_elem: true,
soft_error: true,
remove_html: ctx.analysis.remove_html,
..Default::default()
})
.convert()
.map_err(|e| eco_format!("failed to convert to markdown: {e}"))?;

View file

@ -0,0 +1,13 @@
/// Lambda constructor.
///
/// Typing Rule:
///
/// $ (Γ , x : A ⊢ M : B #h(2em) Γ ⊢ a:B)/(Γ ⊢ λ (x : A) → M : π (x : A) → B) $
///
/// - A (type): The type of the argument.
/// - It can be also regarded as the condition of the proposition.
/// - B (type): The type of the body.
/// - It can be also regarded as the conclusion of the proposition.
#let lam(A, B) = (kind: "lambda", args: A, body: B)
#(/* ident after */ lam);

View file

@ -0,0 +1,15 @@
/// html: false
/// Lambda constructor.
///
/// Typing Rule:
///
/// $ (Γ , x : A ⊢ M : B #h(2em) Γ ⊢ a:B)/(Γ ⊢ λ (x : A) → M : π (x : A) → B) $
///
/// - A (type): The type of the argument.
/// - It can be also regarded as the condition of the proposition.
/// - B (type): The type of the body.
/// - It can be also regarded as the conclusion of the proposition.
#let lam(A, B) = (kind: "lambda", args: A, body: B)
#(/* ident after */ lam);

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/hover.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/hover/render_equation.typ
---
{
"contents": "```typc\nlet lam(\n A: type,\n B: type,\n) = dictionary;\n```\n\n---\nLambda constructor.\n\n Typing Rule:\n\n <p align=\"center\"><img alt=\"typst-block\" src=\"data:image-hash/svg+xml;base64,redacted\" /></p>\n\n# Positional Parameters\n\n## A\n\n```typc\ntype: type\n```\n\nThe type of the argument.\n - <!-- typlite:begin:list-item 1 -->It can be also regarded as the condition of the proposition.<!-- typlite:end:list-item 1 -->\n\n## B (positional)\n\n```typc\ntype: type\n```\n\nThe type of the body.\n - <!-- typlite:begin:list-item 1 -->It can be also regarded as the conclusion of the proposition.<!-- typlite:end:list-item 1 -->",
"range": "12:20:12:23"
}

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/hover.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/hover/render_equation_no_html.typ
---
{
"contents": "```typc\nlet lam(\n A: type,\n B: type,\n) = dictionary;\n```\n\n---\nLambda constructor.\n\n Typing Rule:\n\n ```typc\n$ (Γ , x : A ⊢ M : B #h(2em) Γ ⊢ a:B)/(Γ ⊢ λ (x : A) → M : π (x : A) → B) $\n```\n\n# Positional Parameters\n\n## A\n\n```typc\ntype: type\n```\n\nThe type of the argument.\n - It can be also regarded as the condition of the proposition.\n\n## B (positional)\n\n```typc\ntype: type\n```\n\nThe type of the body.\n - It can be also regarded as the conclusion of the proposition.",
"range": "14:20:14:23"
}

View file

@ -104,6 +104,7 @@ struct DocsChecker<'a> {
static EMPTY_MODULE: LazyLock<Module> =
LazyLock::new(|| Module::new("stub", typst::foundations::Scope::new()));
impl<'a> DocsChecker<'a> {
pub fn check_pat_docs(mut self, docs: String) -> Option<DocString> {
let converted =
@ -122,7 +123,7 @@ impl<'a> DocsChecker<'a> {
params.insert(
param.name.into(),
VarDoc {
docs: param.docs,
docs: self.ctx.remove_html(param.docs),
ty: self.check_type_strings(module, &param.types),
},
);
@ -133,7 +134,7 @@ impl<'a> DocsChecker<'a> {
.and_then(|ty| self.check_type_strings(module, &ty));
Some(DocString {
docs: Some(converted.docs),
docs: Some(self.ctx.remove_html(converted.docs)),
var_bounds: self.vars,
vars: params,
res_ty,
@ -149,7 +150,7 @@ impl<'a> DocsChecker<'a> {
};
Some(DocString {
docs: Some(converted.docs),
docs: Some(self.ctx.remove_html(converted.docs)),
var_bounds: self.vars,
vars: BTreeMap::new(),
res_ty: None,

View file

@ -1,5 +1,5 @@
use core::fmt;
use std::sync::Arc;
use std::sync::{Arc, OnceLock};
use std::{
collections::{HashMap, HashSet},
ops::Range,
@ -21,6 +21,8 @@ pub use serde_json::json;
pub use tinymist_world::{LspUniverse, LspUniverseBuilder};
use typst_shim::syntax::LinkedNodeExt;
use crate::syntax::find_module_level_docs;
use crate::LspWorldExt;
use crate::{
analysis::Analysis, prelude::LocalContext, typst_to_lsp, LspPosition, PositionEncoding,
VersionedDocument,
@ -60,7 +62,22 @@ pub fn run_with_ctx<T>(
.map(|p| TypstFileId::new(None, VirtualPath::new(p.strip_prefix(&root).unwrap())))
.collect::<Vec<_>>();
let mut ctx = Arc::new(Analysis::default()).snapshot(w.snapshot());
let w = w.snapshot();
let source = w.source_by_path(&p).ok().unwrap();
let docs = find_module_level_docs(&source).unwrap_or_default();
let properties = get_test_properties(&docs);
let supports_html = properties
.get("html")
.map(|v| v.trim() == "true")
.unwrap_or(true);
let mut ctx = Arc::new(Analysis {
remove_html: !supports_html,
..Analysis::default()
})
.snapshot(w);
ctx.test_completion_files(Vec::new);
ctx.test_files(|| paths);
f(&mut ctx, p)
@ -71,8 +88,10 @@ pub fn get_test_properties(s: &str) -> HashMap<&'_ str, &'_ str> {
for line in s.lines() {
let mut line = line.splitn(2, ':');
let key = line.next().unwrap().trim();
let value = line.next().unwrap().trim();
props.insert(key, value);
let Some(value) = line.next() else {
continue;
};
props.insert(key, value.trim());
}
props
}
@ -293,6 +312,7 @@ pub fn find_test_position_(s: &Source, offset: usize) -> LspPosition {
pub static REDACT_LOC: Lazy<RedactFields> = Lazy::new(|| {
RedactFields::from_iter([
"location",
"contents",
"uri",
"oldUri",
"newUri",
@ -385,6 +405,18 @@ impl Redact for RedactFields {
format!("{}:{}", pos(&t["start"]), pos(&t["end"])).into(),
);
}
"contents" => {
let res = t.as_str().unwrap();
static REG: OnceLock<regex::Regex> = OnceLock::new();
let reg = REG.get_or_init(|| {
regex::Regex::new(r#"data:image/svg\+xml;base64,([^"]+)"#).unwrap()
});
let res = reg.replace_all(res, |_captures: &regex::Captures| {
"data:image-hash/svg+xml;base64,redacted"
});
m.insert(k.to_owned(), res.into());
}
_ => {}
}
}