mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-24 05:06:41 +00:00
dev: drop if_chain and collapse if statements (#2097)
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
This commit is contained in:
parent
1c9db1ce69
commit
ce447185d1
42 changed files with 520 additions and 545 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
|
@ -4205,7 +4205,6 @@ dependencies = [
|
||||||
"ecow",
|
"ecow",
|
||||||
"ena",
|
"ena",
|
||||||
"hashbrown 0.14.5",
|
"hashbrown 0.14.5",
|
||||||
"if_chain",
|
|
||||||
"insta",
|
"insta",
|
||||||
"itertools 0.13.0",
|
"itertools 0.13.0",
|
||||||
"log",
|
"log",
|
||||||
|
|
@ -4487,7 +4486,6 @@ dependencies = [
|
||||||
"ena",
|
"ena",
|
||||||
"hayagriva",
|
"hayagriva",
|
||||||
"hex",
|
"hex",
|
||||||
"if_chain",
|
|
||||||
"indexmap 2.10.0",
|
"indexmap 2.10.0",
|
||||||
"itertools 0.13.0",
|
"itertools 0.13.0",
|
||||||
"log",
|
"log",
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,6 @@ members = ["benches/*", "crates/*", "tests"]
|
||||||
|
|
||||||
# Basic Infra
|
# Basic Infra
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
if_chain = "1"
|
|
||||||
itertools = "0.13"
|
itertools = "0.13"
|
||||||
paste = "1.0"
|
paste = "1.0"
|
||||||
cfg-if = "1.0"
|
cfg-if = "1.0"
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,6 @@ dashmap.workspace = true
|
||||||
ecow.workspace = true
|
ecow.workspace = true
|
||||||
ena.workspace = true
|
ena.workspace = true
|
||||||
hashbrown.workspace = true
|
hashbrown.workspace = true
|
||||||
if_chain.workspace = true
|
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
lsp-types.workspace = true
|
lsp-types.workspace = true
|
||||||
|
|
|
||||||
|
|
@ -228,10 +228,10 @@ pub fn previous_decls<T>(
|
||||||
if let Some(t) = recv(PreviousDecl::Ident(new_name)) {
|
if let Some(t) = recv(PreviousDecl::Ident(new_name)) {
|
||||||
return Some(t);
|
return Some(t);
|
||||||
}
|
}
|
||||||
} else if import.imports().is_none() {
|
} else if import.imports().is_none()
|
||||||
if let Some(t) = recv(PreviousDecl::ImportSource(import.source())) {
|
&& let Some(t) = recv(PreviousDecl::ImportSource(import.source()))
|
||||||
return Some(t);
|
{
|
||||||
}
|
return Some(t);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(PreviousItem::Parent(parent, child), ast::Expr::For(for_expr)) => {
|
(PreviousItem::Parent(parent, child), ast::Expr::For(for_expr)) => {
|
||||||
|
|
@ -269,10 +269,10 @@ pub fn previous_decls<T>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Param::Spread(spread) => {
|
ast::Param::Spread(spread) => {
|
||||||
if let Some(sink_ident) = spread.sink_ident() {
|
if let Some(sink_ident) = spread.sink_ident()
|
||||||
if let Some(t) = recv(PreviousDecl::Ident(sink_ident)) {
|
&& let Some(t) = recv(PreviousDecl::Ident(sink_ident))
|
||||||
return Some(t);
|
{
|
||||||
}
|
return Some(t);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -506,12 +506,11 @@ pub fn adjust_expr(mut node: LinkedNode) -> Option<LinkedNode> {
|
||||||
while let Some(paren_expr) = node.cast::<ast::Parenthesized>() {
|
while let Some(paren_expr) = node.cast::<ast::Parenthesized>() {
|
||||||
node = node.find(paren_expr.expr().span())?;
|
node = node.find(paren_expr.expr().span())?;
|
||||||
}
|
}
|
||||||
if let Some(parent) = node.parent() {
|
if let Some(parent) = node.parent()
|
||||||
if let Some(field_access) = parent.cast::<ast::FieldAccess>() {
|
&& let Some(field_access) = parent.cast::<ast::FieldAccess>()
|
||||||
if node.span() == field_access.field().span() {
|
&& node.span() == field_access.field().span()
|
||||||
return Some(parent.clone());
|
{
|
||||||
}
|
return Some(parent.clone());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Some(node)
|
Some(node)
|
||||||
}
|
}
|
||||||
|
|
@ -789,26 +788,26 @@ pub fn classify_syntax(node: LinkedNode<'_>, cursor: usize) -> Option<SyntaxClas
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.offset() + 1 == cursor && {
|
if node.offset() + 1 == cursor
|
||||||
// Check if the cursor is exactly after single dot.
|
&& {
|
||||||
matches!(node.kind(), SyntaxKind::Dot)
|
// Check if the cursor is exactly after single dot.
|
||||||
|| (matches!(
|
matches!(node.kind(), SyntaxKind::Dot)
|
||||||
node.kind(),
|
|| (matches!(
|
||||||
SyntaxKind::Text | SyntaxKind::MathText | SyntaxKind::Error
|
node.kind(),
|
||||||
) && node.text().starts_with("."))
|
SyntaxKind::Text | SyntaxKind::MathText | SyntaxKind::Error
|
||||||
} {
|
) && node.text().starts_with("."))
|
||||||
if let Some(dot_access) = classify_dot_access(&node) {
|
|
||||||
return Some(dot_access);
|
|
||||||
}
|
}
|
||||||
|
&& let Some(dot_access) = classify_dot_access(&node)
|
||||||
|
{
|
||||||
|
return Some(dot_access);
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.offset() + 1 == cursor
|
if node.offset() + 1 == cursor
|
||||||
&& matches!(node.kind(), SyntaxKind::Dots)
|
&& matches!(node.kind(), SyntaxKind::Dots)
|
||||||
&& matches!(node.parent_kind(), Some(SyntaxKind::Spread))
|
&& matches!(node.parent_kind(), Some(SyntaxKind::Spread))
|
||||||
|
&& let Some(dot_access) = classify_dot_access(&node)
|
||||||
{
|
{
|
||||||
if let Some(dot_access) = classify_dot_access(&node) {
|
return Some(dot_access);
|
||||||
return Some(dot_access);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches ref parsing broken by a colon.
|
/// Matches ref parsing broken by a colon.
|
||||||
|
|
@ -830,17 +829,18 @@ pub fn classify_syntax(node: LinkedNode<'_>, cursor: usize) -> Option<SyntaxClas
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.offset() + 1 == cursor && {
|
if node.offset() + 1 == cursor
|
||||||
// Check if the cursor is exactly after single dot.
|
&& {
|
||||||
matches!(node.kind(), SyntaxKind::Colon)
|
// Check if the cursor is exactly after single dot.
|
||||||
|| (matches!(
|
matches!(node.kind(), SyntaxKind::Colon)
|
||||||
node.kind(),
|
|| (matches!(
|
||||||
SyntaxKind::Text | SyntaxKind::MathText | SyntaxKind::Error
|
node.kind(),
|
||||||
) && node.text().starts_with(":"))
|
SyntaxKind::Text | SyntaxKind::MathText | SyntaxKind::Error
|
||||||
} {
|
) && node.text().starts_with(":"))
|
||||||
if let Some(ref_syntax) = classify_ref(&node) {
|
|
||||||
return Some(ref_syntax);
|
|
||||||
}
|
}
|
||||||
|
&& let Some(ref_syntax) = classify_ref(&node)
|
||||||
|
{
|
||||||
|
return Some(ref_syntax);
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo: check if we can remove Text here
|
// todo: check if we can remove Text here
|
||||||
|
|
@ -1385,16 +1385,16 @@ fn arg_context<'a>(
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let parent = node.parent();
|
let parent = node.parent();
|
||||||
if let Some(parent) = parent {
|
if let Some(parent) = parent
|
||||||
if parent.kind() == SyntaxKind::Named {
|
&& parent.kind() == SyntaxKind::Named
|
||||||
let param_ident = parent.cast::<ast::Named>()?;
|
{
|
||||||
let name = param_ident.name();
|
let param_ident = parent.cast::<ast::Named>()?;
|
||||||
let init = param_ident.expr();
|
let name = param_ident.name();
|
||||||
let init = parent.find(init.span())?;
|
let init = param_ident.expr();
|
||||||
if init.range().contains(&node.offset()) {
|
let init = parent.find(init.span())?;
|
||||||
let name = args_node.find(name.span())?;
|
if init.range().contains(&node.offset()) {
|
||||||
return Some(ArgClass::Named(name));
|
let name = args_node.find(name.span())?;
|
||||||
}
|
return Some(ArgClass::Named(name));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,10 +13,11 @@ use typst_shim::eval::Vm;
|
||||||
|
|
||||||
/// Try to determine a set of possible values for an expression.
|
/// Try to determine a set of possible values for an expression.
|
||||||
pub fn analyze_expr(world: &dyn World, node: &LinkedNode) -> EcoVec<(Value, Option<Styles>)> {
|
pub fn analyze_expr(world: &dyn World, node: &LinkedNode) -> EcoVec<(Value, Option<Styles>)> {
|
||||||
if let Some(parent) = node.parent() {
|
if let Some(parent) = node.parent()
|
||||||
if parent.kind() == SyntaxKind::FieldAccess && node.index() > 0 {
|
&& parent.kind() == SyntaxKind::FieldAccess
|
||||||
return analyze_expr(world, parent);
|
&& node.index() > 0
|
||||||
}
|
{
|
||||||
|
return analyze_expr(world, parent);
|
||||||
}
|
}
|
||||||
|
|
||||||
analyze_expr_(world, node.get())
|
analyze_expr_(world, node.get())
|
||||||
|
|
@ -38,10 +39,10 @@ pub fn analyze_expr_(world: &dyn World, node: &SyntaxNode) -> EcoVec<(Value, Opt
|
||||||
ast::Expr::Numeric(v) => Value::numeric(v.get()),
|
ast::Expr::Numeric(v) => Value::numeric(v.get()),
|
||||||
ast::Expr::Str(v) => Value::Str(v.get().into()),
|
ast::Expr::Str(v) => Value::Str(v.get().into()),
|
||||||
_ => {
|
_ => {
|
||||||
if node.kind() == SyntaxKind::Contextual {
|
if node.kind() == SyntaxKind::Contextual
|
||||||
if let Some(child) = node.children().last() {
|
&& let Some(child) = node.children().last()
|
||||||
return analyze_expr_(world, child);
|
{
|
||||||
}
|
return analyze_expr_(world, child);
|
||||||
}
|
}
|
||||||
|
|
||||||
return typst::trace::<TypstPagedDocument>(world, node.span());
|
return typst::trace::<TypstPagedDocument>(world, node.span());
|
||||||
|
|
|
||||||
|
|
@ -226,14 +226,13 @@ fn resolve_definition(head: &str, base: &str) -> StrResult<String> {
|
||||||
if let Ok(field) = value.field(next, ()) {
|
if let Ok(field) = value.field(next, ()) {
|
||||||
route.push_str("/#definitions-");
|
route.push_str("/#definitions-");
|
||||||
route.push_str(next);
|
route.push_str(next);
|
||||||
if let Some(next) = parts.next() {
|
if let Some(next) = parts.next()
|
||||||
if field
|
&& field
|
||||||
.cast::<Func>()
|
.cast::<Func>()
|
||||||
.is_ok_and(|func| func.param(next).is_some())
|
.is_ok_and(|func| func.param(next).is_some())
|
||||||
{
|
{
|
||||||
route.push('-');
|
route.push('-');
|
||||||
route.push_str(next);
|
route.push_str(next);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else if value
|
} else if value
|
||||||
.clone()
|
.clone()
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
use ecow::{EcoString, eco_format};
|
use ecow::{EcoString, eco_format};
|
||||||
use if_chain::if_chain;
|
|
||||||
use typst::World;
|
use typst::World;
|
||||||
use typst::engine::Sink;
|
use typst::engine::Sink;
|
||||||
use typst::foundations::{Capturer, Value, repr};
|
use typst::foundations::{Capturer, Value, repr};
|
||||||
|
|
@ -55,10 +54,10 @@ pub fn expr_tooltip(world: &dyn World, leaf: &LinkedNode) -> Option<Tooltip> {
|
||||||
|
|
||||||
let values = analyze_expr(world, ancestor);
|
let values = analyze_expr(world, ancestor);
|
||||||
|
|
||||||
if let [(Value::Length(length), _)] = values.as_slice() {
|
if let [(Value::Length(length), _)] = values.as_slice()
|
||||||
if let Some(tooltip) = length_tooltip(*length) {
|
&& let Some(tooltip) = length_tooltip(*length)
|
||||||
return Some(tooltip);
|
{
|
||||||
}
|
return Some(tooltip);
|
||||||
}
|
}
|
||||||
|
|
||||||
if expr.is_literal() {
|
if expr.is_literal() {
|
||||||
|
|
@ -105,10 +104,10 @@ pub fn expr_tooltip(world: &dyn World, leaf: &LinkedNode) -> Option<Tooltip> {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((_, count)) = last {
|
if let Some((_, count)) = last
|
||||||
if count > 1 {
|
&& count > 1
|
||||||
write!(pieces.last_mut().unwrap(), " (x{count})").unwrap();
|
{
|
||||||
}
|
write!(pieces.last_mut().unwrap(), " (x{count})").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
if iter.next().is_some() {
|
if iter.next().is_some() {
|
||||||
|
|
@ -170,27 +169,24 @@ fn length_tooltip(length: Length) -> Option<Tooltip> {
|
||||||
|
|
||||||
/// Tooltip for font.
|
/// Tooltip for font.
|
||||||
fn font_tooltip(world: &dyn World, leaf: &LinkedNode) -> Option<Tooltip> {
|
fn font_tooltip(world: &dyn World, leaf: &LinkedNode) -> Option<Tooltip> {
|
||||||
if_chain! {
|
// Ensure that we are on top of a string.
|
||||||
// Ensure that we are on top of a string.
|
if let Some(string) = leaf.cast::<ast::Str>()
|
||||||
if let Some(string) = leaf.cast::<ast::Str>();
|
&&let lower = string.get().to_lowercase()
|
||||||
let lower = string.get().to_lowercase();
|
|
||||||
|
|
||||||
// Ensure that we are in the arguments to the text function.
|
// Ensure that we are in the arguments to the text function.
|
||||||
if let Some(parent) = leaf.parent();
|
&& let Some(parent) = leaf.parent()
|
||||||
if let Some(named) = parent.cast::<ast::Named>();
|
&& let Some(named) = parent.cast::<ast::Named>()
|
||||||
if named.name().as_str() == "font";
|
&& named.name().as_str() == "font"
|
||||||
|
|
||||||
// Find the font family.
|
// Find the font family.
|
||||||
if let Some((_, iter)) = world
|
&& let Some((_, iter)) = world
|
||||||
.book()
|
.book()
|
||||||
.families()
|
.families()
|
||||||
.find(|&(family, _)| family.to_lowercase().as_str() == lower.as_str());
|
.find(|&(family, _)| family.to_lowercase().as_str() == lower.as_str())
|
||||||
|
{
|
||||||
then {
|
let detail = summarize_font_family(iter);
|
||||||
let detail = summarize_font_family(iter);
|
return Some(Tooltip::Text(detail));
|
||||||
return Some(Tooltip::Text(detail));
|
}
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -211,13 +211,13 @@ pub fn __cov_pc(span: Span, pc: i64) {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let mut map = COVERAGE_MAP.lock();
|
let mut map = COVERAGE_MAP.lock();
|
||||||
if let Some(last_hit) = map.last_hit.as_ref() {
|
if let Some(last_hit) = map.last_hit.as_ref()
|
||||||
if last_hit.0 == fid {
|
&& last_hit.0 == fid
|
||||||
let mut hits = last_hit.1.hits.lock();
|
{
|
||||||
let c = &mut hits[pc as usize];
|
let mut hits = last_hit.1.hits.lock();
|
||||||
*c = c.saturating_add(1);
|
let c = &mut hits[pc as usize];
|
||||||
return;
|
*c = c.saturating_add(1);
|
||||||
}
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let region = map.regions.entry(fid).or_default();
|
let region = map.regions.entry(fid).or_default();
|
||||||
|
|
|
||||||
|
|
@ -218,13 +218,13 @@ impl<'w> Linter<'w> {
|
||||||
|
|
||||||
fn check_variable_font<'a>(&mut self, args: impl IntoIterator<Item = ast::Arg<'a>>) {
|
fn check_variable_font<'a>(&mut self, args: impl IntoIterator<Item = ast::Arg<'a>>) {
|
||||||
for arg in args {
|
for arg in args {
|
||||||
if let ast::Arg::Named(arg) = arg {
|
if let ast::Arg::Named(arg) = arg
|
||||||
if arg.name().as_str() == "font" {
|
&& arg.name().as_str() == "font"
|
||||||
self.check_variable_font_object(arg.expr().to_untyped());
|
{
|
||||||
if let Some(array) = arg.expr().to_untyped().cast::<ast::Array>() {
|
self.check_variable_font_object(arg.expr().to_untyped());
|
||||||
for item in array.items() {
|
if let Some(array) = arg.expr().to_untyped().cast::<ast::Array>() {
|
||||||
self.check_variable_font_object(item.to_untyped());
|
for item in array.items() {
|
||||||
}
|
self.check_variable_font_object(item.to_untyped());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -234,10 +234,10 @@ impl<'w> Linter<'w> {
|
||||||
fn check_variable_font_object(&mut self, expr: &SyntaxNode) -> Option<()> {
|
fn check_variable_font_object(&mut self, expr: &SyntaxNode) -> Option<()> {
|
||||||
if let Some(font_dict) = expr.cast::<ast::Dict>() {
|
if let Some(font_dict) = expr.cast::<ast::Dict>() {
|
||||||
for item in font_dict.items() {
|
for item in font_dict.items() {
|
||||||
if let ast::DictItem::Named(arg) = item {
|
if let ast::DictItem::Named(arg) = item
|
||||||
if arg.name().as_str() == "name" {
|
&& arg.name().as_str() == "name"
|
||||||
self.check_variable_font_str(arg.expr().to_untyped());
|
{
|
||||||
}
|
self.check_variable_font_str(arg.expr().to_untyped());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -169,25 +169,25 @@ impl EntryResolver {
|
||||||
pub fn resolve_default(&self) -> Option<ImmutPath> {
|
pub fn resolve_default(&self) -> Option<ImmutPath> {
|
||||||
let entry = self.entry.as_ref();
|
let entry = self.entry.as_ref();
|
||||||
// todo: pre-compute this when updating config
|
// todo: pre-compute this when updating config
|
||||||
if let Some(entry) = entry {
|
if let Some(entry) = entry
|
||||||
if entry.is_relative() {
|
&& entry.is_relative()
|
||||||
let root = self.root(None)?;
|
{
|
||||||
return Some(root.join(entry).as_path().into());
|
let root = self.root(None)?;
|
||||||
}
|
return Some(root.join(entry).as_path().into());
|
||||||
}
|
}
|
||||||
entry.cloned()
|
entry.cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validates the configuration.
|
/// Validates the configuration.
|
||||||
pub fn validate(&self) -> Result<()> {
|
pub fn validate(&self) -> Result<()> {
|
||||||
if let Some(root) = &self.root_path {
|
if let Some(root) = &self.root_path
|
||||||
if !root.is_absolute() {
|
&& !root.is_absolute()
|
||||||
tinymist_l10n::bail!(
|
{
|
||||||
"tinymist-project.validate-error.root-path-not-absolute",
|
tinymist_l10n::bail!(
|
||||||
"rootPath or typstExtraArgs.root must be an absolute path: {root:?}",
|
"tinymist-project.validate-error.root-path-not-absolute",
|
||||||
root = root.debug_l10n()
|
"rootPath or typstExtraArgs.root must be an absolute path: {root:?}",
|
||||||
);
|
root = root.debug_l10n()
|
||||||
}
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
||||||
|
|
@ -102,10 +102,10 @@ pub struct ProjectInput {
|
||||||
impl ProjectInput {
|
impl ProjectInput {
|
||||||
/// Returns a new project input relative to the provided lock directory.
|
/// Returns a new project input relative to the provided lock directory.
|
||||||
pub fn relative_to(&self, that: &Path) -> Self {
|
pub fn relative_to(&self, that: &Path) -> Self {
|
||||||
if let Some(lock_dir) = &self.lock_dir {
|
if let Some(lock_dir) = &self.lock_dir
|
||||||
if lock_dir == that {
|
&& lock_dir == that
|
||||||
return self.clone();
|
{
|
||||||
}
|
return self.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
todo!()
|
todo!()
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,6 @@ dirs.workspace = true
|
||||||
ena.workspace = true
|
ena.workspace = true
|
||||||
ecow.workspace = true
|
ecow.workspace = true
|
||||||
hayagriva.workspace = true
|
hayagriva.workspace = true
|
||||||
if_chain.workspace = true
|
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
indexmap.workspace = true
|
indexmap.workspace = true
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
|
|
|
||||||
|
|
@ -88,10 +88,10 @@ impl<T> RevisionManager<T> {
|
||||||
.filter(|slot| slot.revision <= revision.get())
|
.filter(|slot| slot.revision <= revision.get())
|
||||||
.reduce(|x, y| if x.revision > y.revision { x } else { y });
|
.reduce(|x, y| if x.revision > y.revision { x } else { y });
|
||||||
|
|
||||||
if let Some(slot) = slot_base {
|
if let Some(slot) = slot_base
|
||||||
if slot.revision == revision.get() {
|
&& slot.revision == revision.get()
|
||||||
return slot.clone();
|
{
|
||||||
}
|
return slot.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
let slot = Arc::new(RevisionSlot {
|
let slot = Arc::new(RevisionSlot {
|
||||||
|
|
|
||||||
|
|
@ -62,14 +62,13 @@ impl<'a> CodeActionWorker<'a> {
|
||||||
range: &Range<usize>,
|
range: &Range<usize>,
|
||||||
context: &lsp_types::CodeActionContext,
|
context: &lsp_types::CodeActionContext,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
if let Some(only) = &context.only {
|
if let Some(only) = &context.only
|
||||||
if !only.is_empty()
|
&& !only.is_empty()
|
||||||
&& !only
|
&& !only
|
||||||
.iter()
|
.iter()
|
||||||
.any(|kind| *kind == CodeActionKind::EMPTY || *kind == CodeActionKind::QUICKFIX)
|
.any(|kind| *kind == CodeActionKind::EMPTY || *kind == CodeActionKind::QUICKFIX)
|
||||||
{
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for diag in &context.diagnostics {
|
for diag in &context.diagnostics {
|
||||||
|
|
@ -546,10 +545,10 @@ impl<'a> CodeActionWorker<'a> {
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
|
|
||||||
if !new_text.is_empty() {
|
if !new_text.is_empty()
|
||||||
if let Some((_, edit)) = &punc_modify {
|
&& let Some((_, edit)) = &punc_modify
|
||||||
edits.push(edit.clone());
|
{
|
||||||
}
|
edits.push(edit.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(CodeAction {
|
Some(CodeAction {
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ use std::collections::{BTreeMap, HashSet};
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
|
||||||
use ecow::{EcoString, eco_format};
|
use ecow::{EcoString, eco_format};
|
||||||
use if_chain::if_chain;
|
|
||||||
use lsp_types::InsertTextFormat;
|
use lsp_types::InsertTextFormat;
|
||||||
use regex::{Captures, Regex};
|
use regex::{Captures, Regex};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
@ -300,10 +299,10 @@ impl<'a> CompletionCursor<'a> {
|
||||||
/// Gets the LSP range of a given range with caching.
|
/// Gets the LSP range of a given range with caching.
|
||||||
fn lsp_range_of(&mut self, rng: Range<usize>) -> LspRange {
|
fn lsp_range_of(&mut self, rng: Range<usize>) -> LspRange {
|
||||||
// self.ctx.to_lsp_range(rng, &self.source)
|
// self.ctx.to_lsp_range(rng, &self.source)
|
||||||
if let Some((last_rng, last_lsp_rng)) = &self.last_lsp_range_pair {
|
if let Some((last_rng, last_lsp_rng)) = &self.last_lsp_range_pair
|
||||||
if *last_rng == rng {
|
&& *last_rng == rng
|
||||||
return *last_lsp_rng;
|
{
|
||||||
}
|
return *last_lsp_rng;
|
||||||
}
|
}
|
||||||
|
|
||||||
let lsp_rng = self.ctx.to_lsp_range(rng.clone(), &self.source);
|
let lsp_rng = self.ctx.to_lsp_range(rng.clone(), &self.source);
|
||||||
|
|
|
||||||
|
|
@ -6,81 +6,78 @@ impl CompletionPair<'_, '_, '_> {
|
||||||
pub fn complete_imports(&mut self) -> bool {
|
pub fn complete_imports(&mut self) -> bool {
|
||||||
// On the colon marker of an import list:
|
// On the colon marker of an import list:
|
||||||
// "#import "path.typ":|"
|
// "#import "path.typ":|"
|
||||||
if_chain! {
|
if matches!(self.cursor.leaf.kind(), SyntaxKind::Colon)
|
||||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Colon);
|
&& let Some(parent) = self.cursor.leaf.clone().parent()
|
||||||
if let Some(parent) = self.cursor.leaf.clone().parent();
|
&& let Some(ast::Expr::Import(import)) = parent.get().cast()
|
||||||
if let Some(ast::Expr::Import(import)) = parent.get().cast();
|
&& !matches!(import.imports(), Some(ast::Imports::Wildcard))
|
||||||
if !matches!(import.imports(), Some(ast::Imports::Wildcard));
|
&& let Some(source) = parent.children().find(|child| child.is::<ast::Expr>())
|
||||||
if let Some(source) = parent.children().find(|child| child.is::<ast::Expr>());
|
{
|
||||||
then {
|
let items = match import.imports() {
|
||||||
let items = match import.imports() {
|
Some(ast::Imports::Items(items)) => items,
|
||||||
Some(ast::Imports::Items(items)) => items,
|
_ => Default::default(),
|
||||||
_ => Default::default(),
|
};
|
||||||
};
|
|
||||||
|
|
||||||
self.cursor.from = self.cursor.cursor;
|
self.cursor.from = self.cursor.cursor;
|
||||||
|
|
||||||
self.import_item_completions(items, vec![], &source);
|
self.import_item_completions(items, vec![], &source);
|
||||||
if items.iter().next().is_some() {
|
if items.iter().next().is_some() {
|
||||||
self.worker.enrich("", ", ");
|
self.worker.enrich("", ", ");
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Behind an import list:
|
// Behind an import list:
|
||||||
// "#import "path.typ": |",
|
// "#import "path.typ": |",
|
||||||
// "#import "path.typ": a, b, |".
|
// "#import "path.typ": a, b, |".
|
||||||
if_chain! {
|
|
||||||
if let Some(prev) = self.cursor.leaf.prev_sibling();
|
if let Some(prev) = self.cursor.leaf.prev_sibling()
|
||||||
if let Some(ast::Expr::Import(import)) = prev.get().cast();
|
&& let Some(ast::Expr::Import(import)) = prev.get().cast()
|
||||||
if !self.cursor.text[prev.offset()..self.cursor.cursor].contains('\n');
|
&& !self.cursor.text[prev.offset()..self.cursor.cursor].contains('\n')
|
||||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
&& let Some(ast::Imports::Items(items)) = import.imports()
|
||||||
if let Some(source) = prev.children().find(|child| child.is::<ast::Expr>());
|
&& let Some(source) = prev.children().find(|child| child.is::<ast::Expr>())
|
||||||
then {
|
{
|
||||||
self. cursor.from = self.cursor.cursor;
|
self.cursor.from = self.cursor.cursor;
|
||||||
self.import_item_completions(items, vec![], &source);
|
self.import_item_completions(items, vec![], &source);
|
||||||
return true;
|
return true;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Behind a comma in an import list:
|
// Behind a comma in an import list:
|
||||||
// "#import "path.typ": this,|".
|
// "#import "path.typ": this,|".
|
||||||
if_chain! {
|
if matches!(self.cursor.leaf.kind(), SyntaxKind::Comma)
|
||||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Comma);
|
&& let Some(parent) = self.cursor.leaf.clone().parent()
|
||||||
if let Some(parent) = self.cursor.leaf.clone().parent();
|
&& parent.kind() == SyntaxKind::ImportItems
|
||||||
if parent.kind() == SyntaxKind::ImportItems;
|
&& let Some(grand) = parent.parent()
|
||||||
if let Some(grand) = parent.parent();
|
&& let Some(ast::Expr::Import(import)) = grand.get().cast()
|
||||||
if let Some(ast::Expr::Import(import)) = grand.get().cast();
|
&& let Some(ast::Imports::Items(items)) = import.imports()
|
||||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
&& let Some(source) = grand.children().find(|child| child.is::<ast::Expr>())
|
||||||
if let Some(source) = grand.children().find(|child| child.is::<ast::Expr>());
|
{
|
||||||
then {
|
self.import_item_completions(items, vec![], &source);
|
||||||
self.import_item_completions(items, vec![], &source);
|
self.worker.enrich(" ", "");
|
||||||
self.worker.enrich(" ", "");
|
return true;
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Behind a half-started identifier in an import list:
|
// Behind a half-started identifier in an import list:
|
||||||
// "#import "path.typ": th|".
|
// "#import "path.typ": th|".
|
||||||
if_chain! {
|
if matches!(self.cursor.leaf.kind(), SyntaxKind::Ident | SyntaxKind::Dot)
|
||||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Ident | SyntaxKind::Dot);
|
&& let Some(path_ctx) = self.cursor.leaf.clone().parent()
|
||||||
if let Some(path_ctx) = self.cursor.leaf.clone().parent();
|
&& path_ctx.kind() == SyntaxKind::ImportItemPath
|
||||||
if path_ctx.kind() == SyntaxKind::ImportItemPath;
|
&& let Some(parent) = path_ctx.parent()
|
||||||
if let Some(parent) = path_ctx.parent();
|
&& parent.kind() == SyntaxKind::ImportItems
|
||||||
if parent.kind() == SyntaxKind::ImportItems;
|
&& let Some(grand) = parent.parent()
|
||||||
if let Some(grand) = parent.parent();
|
&& let Some(ast::Expr::Import(import)) = grand.get().cast()
|
||||||
if let Some(ast::Expr::Import(import)) = grand.get().cast();
|
&& let Some(ast::Imports::Items(items)) = import.imports()
|
||||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
&& let Some(source) = grand.children().find(|child| child.is::<ast::Expr>())
|
||||||
if let Some(source) = grand.children().find(|child| child.is::<ast::Expr>());
|
{
|
||||||
then {
|
if self.cursor.leaf.kind() == SyntaxKind::Ident {
|
||||||
if self.cursor.leaf.kind() == SyntaxKind::Ident {
|
self.cursor.from = self.cursor.leaf.offset();
|
||||||
self.cursor.from = self.cursor.leaf.offset();
|
|
||||||
}
|
|
||||||
let path = path_ctx.cast::<ast::ImportItemPath>().map(|path| path.iter().take_while(|ident| ident.span() != self.cursor.leaf.span()).collect());
|
|
||||||
self.import_item_completions( items, path.unwrap_or_default(), &source);
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
let path = path_ctx.cast::<ast::ImportItemPath>().map(|path| {
|
||||||
|
path.iter()
|
||||||
|
.take_while(|ident| ident.span() != self.cursor.leaf.span())
|
||||||
|
.collect()
|
||||||
|
});
|
||||||
|
self.import_item_completions(items, path.unwrap_or_default(), &source);
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
false
|
false
|
||||||
|
|
|
||||||
|
|
@ -6,67 +6,63 @@ impl CompletionPair<'_, '_, '_> {
|
||||||
pub fn complete_comments(&mut self) -> bool {
|
pub fn complete_comments(&mut self) -> bool {
|
||||||
let text = self.cursor.leaf.get().text();
|
let text = self.cursor.leaf.get().text();
|
||||||
// check if next line defines a function
|
// check if next line defines a function
|
||||||
if_chain! {
|
if (text == "///" || text == "/// ")
|
||||||
if text == "///" || text == "/// ";
|
|
||||||
// hash node
|
// hash node
|
||||||
if let Some(hash_node) = self.cursor.leaf.next_leaf();
|
&& let Some(hash_node) = self.cursor.leaf.next_leaf()
|
||||||
// let node
|
// let node
|
||||||
if let Some(let_node) = hash_node.next_leaf();
|
&& let Some(let_node) = hash_node.next_leaf()
|
||||||
if let Some(let_closure) = let_node.next_leaf();
|
&& let Some(let_closure) = let_node.next_leaf()
|
||||||
if matches!(let_closure.parent_kind(), Some(SyntaxKind::Closure));
|
&& matches!(let_closure.parent_kind(), Some(SyntaxKind::Closure))
|
||||||
if let Some(closure) = let_closure.parent();
|
&& let Some(closure) = let_closure.parent()
|
||||||
if let Some(closure) = closure.cast::<ast::Expr>();
|
&& let Some(closure) = closure.cast::<ast::Expr>()
|
||||||
if let ast::Expr::Closure(c) = closure;
|
&& let ast::Expr::Closure(c) = closure
|
||||||
then {
|
{
|
||||||
// Only completes if the next line is a function definition
|
// Only completes if the next line is a function definition
|
||||||
let rng = self.cursor.leaf.offset()..hash_node.offset();
|
let rng = self.cursor.leaf.offset()..hash_node.offset();
|
||||||
let text_between = &self.cursor.source.text()[rng];
|
let text_between = &self.cursor.source.text()[rng];
|
||||||
let mut line_count = 0;
|
let mut line_count = 0;
|
||||||
for ch in text_between.chars() {
|
for ch in text_between.chars() {
|
||||||
if ch == '\n' {
|
if ch == '\n' {
|
||||||
line_count += 1;
|
line_count += 1;
|
||||||
}
|
|
||||||
if line_count > 1 {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if line_count > 1 {
|
||||||
let mut doc_snippet: String = if text == "///" {
|
return false;
|
||||||
" $0\n///".to_string()
|
|
||||||
} else {
|
|
||||||
"$0\n///".to_string()
|
|
||||||
};
|
|
||||||
let mut i = 0;
|
|
||||||
for param in c.params().children() {
|
|
||||||
// TODO: Properly handle Pos and Spread argument
|
|
||||||
let param: &EcoString = match param {
|
|
||||||
Param::Pos(p) => {
|
|
||||||
match p {
|
|
||||||
ast::Pattern::Normal(ast::Expr::Ident(ident)) => ident.get(),
|
|
||||||
_ => &"_".into()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Param::Named(n) => n.name().get(),
|
|
||||||
Param::Spread(s) => {
|
|
||||||
if let Some(ident) = s.sink_ident() {
|
|
||||||
&eco_format!("{}", ident.get())
|
|
||||||
} else {
|
|
||||||
&EcoString::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
log::info!("param: {param}, index: {i}");
|
|
||||||
doc_snippet += &format!("\n/// - {param} (${}): ${}", i + 1, i + 2);
|
|
||||||
i += 2;
|
|
||||||
}
|
}
|
||||||
doc_snippet += &format!("\n/// -> ${}", i + 1);
|
|
||||||
self.push_completion(Completion {
|
|
||||||
label: "Document function".into(),
|
|
||||||
apply: Some(doc_snippet.into()),
|
|
||||||
..Completion::default()
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
let mut doc_snippet: String = if text == "///" {
|
||||||
|
" $0\n///".to_string()
|
||||||
|
} else {
|
||||||
|
"$0\n///".to_string()
|
||||||
|
};
|
||||||
|
let mut i = 0;
|
||||||
|
for param in c.params().children() {
|
||||||
|
// TODO: Properly handle Pos and Spread argument
|
||||||
|
let param: &EcoString = match param {
|
||||||
|
Param::Pos(p) => match p {
|
||||||
|
ast::Pattern::Normal(ast::Expr::Ident(ident)) => ident.get(),
|
||||||
|
_ => &"_".into(),
|
||||||
|
},
|
||||||
|
Param::Named(n) => n.name().get(),
|
||||||
|
Param::Spread(s) => {
|
||||||
|
if let Some(ident) = s.sink_ident() {
|
||||||
|
&eco_format!("{}", ident.get())
|
||||||
|
} else {
|
||||||
|
&EcoString::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
log::info!("param: {param}, index: {i}");
|
||||||
|
doc_snippet += &format!("\n/// - {param} (${}): ${}", i + 1, i + 2);
|
||||||
|
i += 2;
|
||||||
|
}
|
||||||
|
doc_snippet += &format!("\n/// -> ${}", i + 1);
|
||||||
|
self.push_completion(Completion {
|
||||||
|
label: "Document function".into(),
|
||||||
|
apply: Some(doc_snippet.into()),
|
||||||
|
..Completion::default()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
@ -77,26 +73,25 @@ impl CompletionPair<'_, '_, '_> {
|
||||||
node_ancestors(&self.cursor.leaf).find(|node| matches!(node.kind(), SyntaxKind::Raw));
|
node_ancestors(&self.cursor.leaf).find(|node| matches!(node.kind(), SyntaxKind::Raw));
|
||||||
|
|
||||||
// Behind a half-completed binding: "#let x = |" or `#let f(x) = |`.
|
// Behind a half-completed binding: "#let x = |" or `#let f(x) = |`.
|
||||||
if_chain! {
|
if let Some(prev) = self.cursor.leaf.prev_leaf()
|
||||||
if let Some(prev) = self.cursor.leaf.prev_leaf();
|
&& matches!(prev.kind(), SyntaxKind::Eq | SyntaxKind::Arrow)
|
||||||
if matches!(prev.kind(), SyntaxKind::Eq | SyntaxKind::Arrow);
|
&& matches!(
|
||||||
if matches!( prev.parent_kind(), Some(SyntaxKind::LetBinding | SyntaxKind::Closure));
|
prev.parent_kind(),
|
||||||
then {
|
Some(SyntaxKind::LetBinding | SyntaxKind::Closure)
|
||||||
self.cursor.from = self.cursor.cursor;
|
)
|
||||||
self.code_completions( false);
|
{
|
||||||
return true;
|
self.cursor.from = self.cursor.cursor;
|
||||||
}
|
self.code_completions(false);
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Behind a half-completed context block: "#context |".
|
// Behind a half-completed context block: "#context |".
|
||||||
if_chain! {
|
if let Some(prev) = self.cursor.leaf.prev_leaf()
|
||||||
if let Some(prev) = self.cursor.leaf.prev_leaf();
|
&& prev.kind() == SyntaxKind::Context
|
||||||
if prev.kind() == SyntaxKind::Context;
|
{
|
||||||
then {
|
self.cursor.from = self.cursor.cursor;
|
||||||
self.cursor.from = self.cursor.cursor;
|
self.code_completions(false);
|
||||||
self.code_completions(false);
|
return true;
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Directly after a raw block.
|
// Directly after a raw block.
|
||||||
|
|
@ -171,14 +166,12 @@ impl CompletionPair<'_, '_, '_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Behind a half-completed context block: "context |".
|
// Behind a half-completed context block: "context |".
|
||||||
if_chain! {
|
if let Some(prev) = self.cursor.leaf.prev_leaf()
|
||||||
if let Some(prev) = self.cursor.leaf.prev_leaf();
|
&& prev.kind() == SyntaxKind::Context
|
||||||
if prev.kind() == SyntaxKind::Context;
|
{
|
||||||
then {
|
self.cursor.from = self.cursor.cursor;
|
||||||
self.cursor.from = self.cursor.cursor;
|
self.code_completions(false);
|
||||||
self.code_completions(false);
|
return true;
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// An existing identifier: "{ pa| }".
|
// An existing identifier: "{ pa| }".
|
||||||
|
|
|
||||||
|
|
@ -139,10 +139,10 @@ impl CompletionPair<'_, '_, '_> {
|
||||||
SurroundingSyntax::SetRule => 'set_rule: {
|
SurroundingSyntax::SetRule => 'set_rule: {
|
||||||
// todo: user defined elements
|
// todo: user defined elements
|
||||||
for func in &checker.functions {
|
for func in &checker.functions {
|
||||||
if let Some(elem) = func.element() {
|
if let Some(elem) = func.element()
|
||||||
if elem.params().iter().any(|param| param.settable) {
|
&& elem.params().iter().any(|param| param.settable)
|
||||||
break 'set_rule true;
|
{
|
||||||
}
|
break 'set_rule true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -196,10 +196,10 @@ impl CompletionPair<'_, '_, '_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn analyze_import_source(ctx: &LocalContext, types: &TypeInfo, s: ast::Expr) -> Option<Ty> {
|
fn analyze_import_source(ctx: &LocalContext, types: &TypeInfo, s: ast::Expr) -> Option<Ty> {
|
||||||
if let Some(res) = types.type_of_span(s.span()) {
|
if let Some(res) = types.type_of_span(s.span())
|
||||||
if !matches!(res.value(), Some(Value::Str(..))) {
|
&& !matches!(res.value(), Some(Value::Str(..)))
|
||||||
return Some(types.simplify(res, false));
|
{
|
||||||
}
|
return Some(types.simplify(res, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
let m = ctx.module_by_syntax(s.to_untyped())?;
|
let m = ctx.module_by_syntax(s.to_untyped())?;
|
||||||
|
|
|
||||||
|
|
@ -226,16 +226,18 @@ impl CompletionPair<'_, '_, '_> {
|
||||||
apply = Some(eco_format!("at(\"{label}\")"));
|
apply = Some(eco_format!("at(\"{label}\")"));
|
||||||
} else {
|
} else {
|
||||||
let apply_label = &mut label.as_str();
|
let apply_label = &mut label.as_str();
|
||||||
if apply_label.ends_with('"') && self.cursor.after.starts_with('"') {
|
if apply_label.ends_with('"')
|
||||||
if let Some(trimmed) = apply_label.strip_suffix('"') {
|
&& self.cursor.after.starts_with('"')
|
||||||
*apply_label = trimmed;
|
&& let Some(trimmed) = apply_label.strip_suffix('"')
|
||||||
}
|
{
|
||||||
|
*apply_label = trimmed;
|
||||||
}
|
}
|
||||||
let from_before = slice_at(self.cursor.text, 0..self.cursor.from);
|
let from_before = slice_at(self.cursor.text, 0..self.cursor.from);
|
||||||
if apply_label.starts_with('"') && from_before.ends_with('"') {
|
if apply_label.starts_with('"')
|
||||||
if let Some(trimmed) = apply_label.strip_prefix('"') {
|
&& from_before.ends_with('"')
|
||||||
*apply_label = trimmed;
|
&& let Some(trimmed) = apply_label.strip_prefix('"')
|
||||||
}
|
{
|
||||||
|
*apply_label = trimmed;
|
||||||
}
|
}
|
||||||
|
|
||||||
if apply_label.len() != label.len() {
|
if apply_label.len() != label.len() {
|
||||||
|
|
|
||||||
|
|
@ -653,10 +653,10 @@ impl SharedContext {
|
||||||
// e.g. `f(x|)`, we will select the `x`
|
// e.g. `f(x|)`, we will select the `x`
|
||||||
if cursor == node.offset() + 1 && is_mark(node.kind()) {
|
if cursor == node.offset() + 1 && is_mark(node.kind()) {
|
||||||
let prev_leaf = node.prev_leaf();
|
let prev_leaf = node.prev_leaf();
|
||||||
if let Some(prev_leaf) = prev_leaf {
|
if let Some(prev_leaf) = prev_leaf
|
||||||
if prev_leaf.range().end == node.offset() {
|
&& prev_leaf.range().end == node.offset()
|
||||||
node = prev_leaf;
|
{
|
||||||
}
|
node = prev_leaf;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -141,10 +141,10 @@ impl LinkStrWorker {
|
||||||
for item in call.args().items() {
|
for item in call.args().items() {
|
||||||
match item {
|
match item {
|
||||||
ast::Arg::Named(named) if named.name().get().as_str() == "style" => {
|
ast::Arg::Named(named) if named.name().get().as_str() == "style" => {
|
||||||
if let ast::Expr::Str(style) = named.expr() {
|
if let ast::Expr::Str(style) = named.expr()
|
||||||
if hayagriva::archive::ArchivedStyle::by_name(&style.get()).is_some() {
|
&& hayagriva::archive::ArchivedStyle::by_name(&style.get()).is_some()
|
||||||
return Some(());
|
{
|
||||||
}
|
return Some(());
|
||||||
}
|
}
|
||||||
self.analyze_path_expr(node, named.expr());
|
self.analyze_path_expr(node, named.expr());
|
||||||
return Some(());
|
return Some(());
|
||||||
|
|
|
||||||
|
|
@ -349,18 +349,19 @@ impl Tokenizer {
|
||||||
.map(|token_type| Token::new(token_type, modifiers, range.clone()));
|
.map(|token_type| Token::new(token_type, modifiers, range.clone()));
|
||||||
|
|
||||||
// Push start
|
// Push start
|
||||||
if let Some(prev_token) = self.token.as_mut() {
|
if let Some(prev_token) = self.token.as_mut()
|
||||||
if !prev_token.range.is_empty() && prev_token.range.start < range.start {
|
&& !prev_token.range.is_empty()
|
||||||
let end = prev_token.range.end.min(range.start);
|
&& prev_token.range.start < range.start
|
||||||
let sliced = Token {
|
{
|
||||||
token_type: prev_token.token_type,
|
let end = prev_token.range.end.min(range.start);
|
||||||
modifiers: prev_token.modifiers,
|
let sliced = Token {
|
||||||
range: prev_token.range.start..end,
|
token_type: prev_token.token_type,
|
||||||
};
|
modifiers: prev_token.modifiers,
|
||||||
// Slice the previous token
|
range: prev_token.range.start..end,
|
||||||
prev_token.range.start = end;
|
};
|
||||||
self.push(sliced);
|
// Slice the previous token
|
||||||
}
|
prev_token.range.start = end;
|
||||||
|
self.push(sliced);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !is_leaf {
|
if !is_leaf {
|
||||||
|
|
@ -372,14 +373,14 @@ impl Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Push end
|
// Push end
|
||||||
if let Some(token) = token.clone() {
|
if let Some(token) = token.clone()
|
||||||
if !token.range.is_empty() {
|
&& !token.range.is_empty()
|
||||||
// Slice the previous token
|
{
|
||||||
if let Some(prev_token) = self.token.as_mut() {
|
// Slice the previous token
|
||||||
prev_token.range.start = token.range.end;
|
if let Some(prev_token) = self.token.as_mut() {
|
||||||
}
|
prev_token.range.start = token.range.end;
|
||||||
self.push(token);
|
|
||||||
}
|
}
|
||||||
|
self.push(token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,20 +21,18 @@ impl ApplyChecker for ApplyTypeChecker<'_, '_> {
|
||||||
sig => (sig, false),
|
sig => (sig, false),
|
||||||
};
|
};
|
||||||
|
|
||||||
if !is_partialize {
|
if !is_partialize && let Some(ty) = sig.call(args, pol, self.base) {
|
||||||
if let Some(ty) = sig.call(args, pol, self.base) {
|
self.resultant.push(ty);
|
||||||
self.resultant.push(ty);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo: remove this after we implemented dependent types
|
// todo: remove this after we implemented dependent types
|
||||||
match sig {
|
match sig {
|
||||||
Sig::TypeCons { val, .. } => {
|
Sig::TypeCons { val, .. } => {
|
||||||
if *val == typst::foundations::Type::of::<typst::foundations::Type>() {
|
if *val == typst::foundations::Type::of::<typst::foundations::Type>()
|
||||||
if let Some(p0) = args.pos(0) {
|
&& let Some(p0) = args.pos(0)
|
||||||
self.resultant
|
{
|
||||||
.push(Ty::Unary(TypeUnary::new(UnaryOp::TypeOf, p0.clone())));
|
self.resultant
|
||||||
}
|
.push(Ty::Unary(TypeUnary::new(UnaryOp::TypeOf, p0.clone())));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Sig::Builtin(BuiltinSig::TupleMap(this)) => {
|
Sig::Builtin(BuiltinSig::TupleMap(this)) => {
|
||||||
|
|
|
||||||
|
|
@ -25,17 +25,17 @@ impl TypeChecker<'_> {
|
||||||
};
|
};
|
||||||
let mut vars = vars;
|
let mut vars = vars;
|
||||||
for (_name, doc) in vars.iter_mut() {
|
for (_name, doc) in vars.iter_mut() {
|
||||||
if let Some(ty) = &mut doc.ty {
|
if let Some(ty) = &mut doc.ty
|
||||||
if let Some(mutated) = ty.mutate(true, &mut renamer) {
|
&& let Some(mutated) = ty.mutate(true, &mut renamer)
|
||||||
*ty = mutated;
|
{
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(ty) = res_ty.as_mut() {
|
|
||||||
if let Some(mutated) = ty.mutate(true, &mut renamer) {
|
|
||||||
*ty = mutated;
|
*ty = mutated;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if let Some(ty) = res_ty.as_mut()
|
||||||
|
&& let Some(mutated) = ty.mutate(true, &mut renamer)
|
||||||
|
{
|
||||||
|
*ty = mutated;
|
||||||
|
}
|
||||||
DocString {
|
DocString {
|
||||||
docs,
|
docs,
|
||||||
var_bounds,
|
var_bounds,
|
||||||
|
|
|
||||||
|
|
@ -34,12 +34,12 @@ pub(crate) fn convert_docs(
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut imports = Vec::new();
|
let mut imports = Vec::new();
|
||||||
if WorkspaceResolver::is_package_file(fid) {
|
if WorkspaceResolver::is_package_file(fid)
|
||||||
if let Some(pkg) = fid.package() {
|
&& let Some(pkg) = fid.package()
|
||||||
let pkg_spec = pkg.to_string();
|
{
|
||||||
imports.push(format!("#import {pkg_spec:?}"));
|
let pkg_spec = pkg.to_string();
|
||||||
imports.push(format!("#import {pkg_spec:?}: *"));
|
imports.push(format!("#import {pkg_spec:?}"));
|
||||||
}
|
imports.push(format!("#import {pkg_spec:?}: *"));
|
||||||
}
|
}
|
||||||
imports.push(format!(
|
imports.push(format!(
|
||||||
"#import {:?}: *",
|
"#import {:?}: *",
|
||||||
|
|
|
||||||
|
|
@ -229,12 +229,11 @@ impl ScanDefCtx<'_> {
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some((span, mod_fid)) = head.decl.as_ref().and_then(|decl| decl.file_id()).zip(site)
|
if let Some((span, mod_fid)) = head.decl.as_ref().and_then(|decl| decl.file_id()).zip(site)
|
||||||
|
&& span != *mod_fid
|
||||||
{
|
{
|
||||||
if span != *mod_fid {
|
head.is_external = true;
|
||||||
head.is_external = true;
|
head.oneliner = head.docs.map(|docs| oneliner(&docs).to_owned());
|
||||||
head.oneliner = head.docs.map(|docs| oneliner(&docs).to_owned());
|
head.docs = None;
|
||||||
head.docs = None;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert module that is not exported
|
// Insert module that is not exported
|
||||||
|
|
|
||||||
|
|
@ -130,31 +130,30 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Pac
|
||||||
eco_format!("symbol-{}-{}", child.kind, child.name)
|
eco_format!("symbol-{}-{}", child.kind, child.name)
|
||||||
};
|
};
|
||||||
|
|
||||||
if child.is_external {
|
if child.is_external
|
||||||
if let Some(fid) = child_fid {
|
&& let Some(fid) = child_fid
|
||||||
let lnk = if fid.package() == Some(for_spec) {
|
{
|
||||||
let sub_aka = akas(fid);
|
let lnk = if fid.package() == Some(for_spec) {
|
||||||
let sub_primary = sub_aka.first().cloned().unwrap_or_default();
|
let sub_aka = akas(fid);
|
||||||
child.external_link = Some(format!(
|
let sub_primary = sub_aka.first().cloned().unwrap_or_default();
|
||||||
"#symbol-{}-{sub_primary}.{}",
|
child.external_link = Some(format!(
|
||||||
child.kind, child.name
|
"#symbol-{}-{sub_primary}.{}",
|
||||||
));
|
child.kind, child.name
|
||||||
format!("#{}-{}-in-{sub_primary}", child.kind, child.name)
|
));
|
||||||
.replace(".", "")
|
format!("#{}-{}-in-{sub_primary}", child.kind, child.name).replace(".", "")
|
||||||
} else if let Some(spec) = fid.package() {
|
} else if let Some(spec) = fid.package() {
|
||||||
let lnk = format!(
|
let lnk = format!(
|
||||||
"https://typst.app/universe/package/{}/{}",
|
"https://typst.app/universe/package/{}/{}",
|
||||||
spec.name, spec.version
|
spec.name, spec.version
|
||||||
);
|
);
|
||||||
child.external_link = Some(lnk.clone());
|
child.external_link = Some(lnk.clone());
|
||||||
lnk
|
lnk
|
||||||
} else {
|
} else {
|
||||||
let lnk: String = "https://typst.app/docs".into();
|
let lnk: String = "https://typst.app/docs".into();
|
||||||
child.external_link = Some(lnk.clone());
|
child.external_link = Some(lnk.clone());
|
||||||
lnk
|
lnk
|
||||||
};
|
};
|
||||||
child.symbol_link = Some(lnk);
|
child.symbol_link = Some(lnk);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let child_children = std::mem::take(&mut child.children);
|
let child_children = std::mem::take(&mut child.children);
|
||||||
|
|
|
||||||
|
|
@ -327,10 +327,10 @@ impl ExternalDocLink {
|
||||||
fn get(def: &Definition) -> Option<CommandLink> {
|
fn get(def: &Definition) -> Option<CommandLink> {
|
||||||
let value = def.value();
|
let value = def.value();
|
||||||
|
|
||||||
if matches!(value, Some(Value::Func(..))) {
|
if matches!(value, Some(Value::Func(..)))
|
||||||
if let Some(builtin) = Self::builtin_func_tooltip("https://typst.app/docs/", def) {
|
&& let Some(builtin) = Self::builtin_func_tooltip("https://typst.app/docs/", def)
|
||||||
return Some(builtin);
|
{
|
||||||
}
|
return Some(builtin);
|
||||||
};
|
};
|
||||||
|
|
||||||
value.and_then(|value| Self::builtin_value_tooltip("https://typst.app/docs/", &value))
|
value.and_then(|value| Self::builtin_value_tooltip("https://typst.app/docs/", &value))
|
||||||
|
|
|
||||||
|
|
@ -22,11 +22,11 @@ pub fn jump_from_click(
|
||||||
) -> Option<(SourceSpanOffset, SourceSpanOffset)> {
|
) -> Option<(SourceSpanOffset, SourceSpanOffset)> {
|
||||||
// Try to find a link first.
|
// Try to find a link first.
|
||||||
for (pos, item) in frame.items() {
|
for (pos, item) in frame.items() {
|
||||||
if let FrameItem::Link(_dest, size) = item {
|
if let FrameItem::Link(_dest, size) = item
|
||||||
if is_in_rect(*pos, *size, click) {
|
&& is_in_rect(*pos, *size, click)
|
||||||
// todo: url reaction
|
{
|
||||||
return None;
|
// todo: url reaction
|
||||||
}
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -140,10 +140,10 @@ fn jump_from_cursor_(
|
||||||
// In a page, we try to find a closer span than the existing found one.
|
// In a page, we try to find a closer span than the existing found one.
|
||||||
let mut p_dis = min_dis;
|
let mut p_dis = min_dis;
|
||||||
|
|
||||||
if let Some(point) = find_in_frame(&page.frame, span, &mut p_dis, &mut min_point) {
|
if let Some(point) = find_in_frame(&page.frame, span, &mut p_dis, &mut min_point)
|
||||||
if let Some(page) = NonZeroUsize::new(idx + 1) {
|
&& let Some(page) = NonZeroUsize::new(idx + 1)
|
||||||
positions.push(Position { page, point });
|
{
|
||||||
}
|
positions.push(Position { page, point });
|
||||||
}
|
}
|
||||||
|
|
||||||
// In this page, we found a closer span and update.
|
// In this page, we found a closer span and update.
|
||||||
|
|
|
||||||
|
|
@ -55,12 +55,11 @@ pub(crate) fn expr_of(
|
||||||
|
|
||||||
// If there is a cycle, the expression will be stable as the source is
|
// If there is a cycle, the expression will be stable as the source is
|
||||||
// unchanged.
|
// unchanged.
|
||||||
if let Some(exports) = ei {
|
if let Some(exports) = ei
|
||||||
if prev_exports.size() != exports.size()
|
&& (prev_exports.size() != exports.size()
|
||||||
|| hash128(&prev_exports) != hash128(&exports)
|
|| hash128(&prev_exports) != hash128(&exports))
|
||||||
{
|
{
|
||||||
return None;
|
return None;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -384,10 +384,10 @@ impl LexicalHierarchyWorker {
|
||||||
SyntaxKind::Closure => {
|
SyntaxKind::Closure => {
|
||||||
let first_child = node.children().next();
|
let first_child = node.children().next();
|
||||||
let current = self.stack.last_mut().unwrap().1.len();
|
let current = self.stack.last_mut().unwrap().1.len();
|
||||||
if let Some(first_child) = first_child {
|
if let Some(first_child) = first_child
|
||||||
if first_child.kind() == SyntaxKind::Ident {
|
&& first_child.kind() == SyntaxKind::Ident
|
||||||
self.check_node_with(first_child, IdentContext::Func)?;
|
{
|
||||||
}
|
self.check_node_with(first_child, IdentContext::Func)?;
|
||||||
}
|
}
|
||||||
let body = node
|
let body = node
|
||||||
.children()
|
.children()
|
||||||
|
|
|
||||||
|
|
@ -251,20 +251,20 @@ fn match_by_pos(mut n: LinkedNode, prev: bool, ident: bool) -> usize {
|
||||||
match n.kind() {
|
match n.kind() {
|
||||||
SyntaxKind::Closure => {
|
SyntaxKind::Closure => {
|
||||||
let closure = n.cast::<ast::Closure>().unwrap();
|
let closure = n.cast::<ast::Closure>().unwrap();
|
||||||
if let Some(name) = closure.name() {
|
if let Some(name) = closure.name()
|
||||||
if let Some(m) = n.find(name.span()) {
|
&& let Some(m) = n.find(name.span())
|
||||||
n = m;
|
{
|
||||||
break 'match_loop;
|
n = m;
|
||||||
}
|
break 'match_loop;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SyntaxKind::LetBinding => {
|
SyntaxKind::LetBinding => {
|
||||||
let let_binding = n.cast::<ast::LetBinding>().unwrap();
|
let let_binding = n.cast::<ast::LetBinding>().unwrap();
|
||||||
if let Some(name) = let_binding.kind().bindings().first() {
|
if let Some(name) = let_binding.kind().bindings().first()
|
||||||
if let Some(m) = n.find(name.span()) {
|
&& let Some(m) = n.find(name.span())
|
||||||
n = m;
|
{
|
||||||
break 'match_loop;
|
n = m;
|
||||||
}
|
break 'match_loop;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
|
||||||
|
|
@ -56,10 +56,10 @@ pub fn typst_typographic_family(mut family: &str) -> &str {
|
||||||
|
|
||||||
// Also allow an extra modifier, but apply it only if it is separated it
|
// Also allow an extra modifier, but apply it only if it is separated it
|
||||||
// from the text before it (to prevent false positives).
|
// from the text before it (to prevent false positives).
|
||||||
if let Some(t) = MODIFIERS.iter().find_map(|s| t.strip_suffix(s)) {
|
if let Some(t) = MODIFIERS.iter().find_map(|s| t.strip_suffix(s))
|
||||||
if let Some(stripped) = t.strip_suffix(SEPARATORS) {
|
&& let Some(stripped) = t.strip_suffix(SEPARATORS)
|
||||||
trimmed = stripped;
|
{
|
||||||
}
|
trimmed = stripped;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -188,11 +188,11 @@ fn infer_info_from_web_font(
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(guess_stretch) = guess_stretch {
|
if let Some(guess_stretch) = guess_stretch
|
||||||
if idx == 0 {
|
&& idx == 0
|
||||||
stretch = Some(guess_stretch);
|
{
|
||||||
break 'searchLoop;
|
stretch = Some(guess_stretch);
|
||||||
}
|
break 'searchLoop;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -131,15 +131,15 @@ impl<F: CompilerFeat + 'static> CompileSnapshot<F> {
|
||||||
/// tasks break this assumption.
|
/// tasks break this assumption.
|
||||||
pub fn task(mut self, inputs: TaskInputs) -> Self {
|
pub fn task(mut self, inputs: TaskInputs) -> Self {
|
||||||
'check_changed: {
|
'check_changed: {
|
||||||
if let Some(entry) = &inputs.entry {
|
if let Some(entry) = &inputs.entry
|
||||||
if *entry != self.world.entry_state() {
|
&& *entry != self.world.entry_state()
|
||||||
break 'check_changed;
|
{
|
||||||
}
|
break 'check_changed;
|
||||||
}
|
}
|
||||||
if let Some(inputs) = &inputs.inputs {
|
if let Some(inputs) = &inputs.inputs
|
||||||
if inputs.clone() != self.world.inputs() {
|
&& inputs.clone() != self.world.inputs()
|
||||||
break 'check_changed;
|
{
|
||||||
}
|
break 'check_changed;
|
||||||
}
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,6 @@ use serde::{Deserialize, Serialize};
|
||||||
use tinymist_project::CompileReport;
|
use tinymist_project::CompileReport;
|
||||||
use tinymist_query::DiagnosticsMap;
|
use tinymist_query::DiagnosticsMap;
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
use typst::utils::OptionExt;
|
|
||||||
|
|
||||||
use crate::project::ProjectInsId;
|
use crate::project::ProjectInsId;
|
||||||
use crate::{tool::word_count::WordsCount, LspClient};
|
use crate::{tool::word_count::WordsCount, LspClient};
|
||||||
|
|
@ -116,7 +115,8 @@ impl EditorActor {
|
||||||
|
|
||||||
self.status.path = compile_status
|
self.status.path = compile_status
|
||||||
.compiling_id
|
.compiling_id
|
||||||
.map_or_default(|fid| unix_slash(fid.vpath().as_rooted_path()));
|
.map(|fid| unix_slash(fid.vpath().as_rooted_path()))
|
||||||
|
.unwrap_or_default();
|
||||||
self.status.page_count = compile_status.page_count;
|
self.status.page_count = compile_status.page_count;
|
||||||
self.status.status = match &compile_status.status {
|
self.status.status = match &compile_status.status {
|
||||||
Compiling => CompileStatusEnum::Compiling,
|
Compiling => CompileStatusEnum::Compiling,
|
||||||
|
|
|
||||||
|
|
@ -81,10 +81,10 @@ fn run(args: CompileArgs, world: Arc<LspWorld>) -> Result<()> {
|
||||||
_ => Format::Md,
|
_ => Format::Md,
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(assets_path) = args.assets_path.as_ref() {
|
if let Some(assets_path) = args.assets_path.as_ref()
|
||||||
if !assets_path.exists() {
|
&& !assets_path.exists()
|
||||||
std::fs::create_dir_all(assets_path).context("failed to create assets directory")?;
|
{
|
||||||
}
|
std::fs::create_dir_all(assets_path).context("failed to create assets directory")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let doc = Typlite::new(world.clone())
|
let doc = Typlite::new(world.clone())
|
||||||
|
|
|
||||||
|
|
@ -25,58 +25,57 @@ impl ListParser {
|
||||||
let mut all_items = Vec::new();
|
let mut all_items = Vec::new();
|
||||||
|
|
||||||
for child in &element.children {
|
for child in &element.children {
|
||||||
if let HtmlNode::Element(li) = child {
|
if let HtmlNode::Element(li) = child
|
||||||
if li.tag == tag::li {
|
&& li.tag == tag::li
|
||||||
let attrs = ListItemAttr::parse(&li.attrs)?;
|
{
|
||||||
let mut item_content = Vec::new();
|
let attrs = ListItemAttr::parse(&li.attrs)?;
|
||||||
|
let mut item_content = Vec::new();
|
||||||
|
|
||||||
let mut li_buffer = Vec::new();
|
let mut li_buffer = Vec::new();
|
||||||
|
|
||||||
if parser.feat.annotate_elem {
|
if parser.feat.annotate_elem {
|
||||||
li_buffer.push(Node::Custom(Box::new(super::core::Comment(eco_format!(
|
li_buffer.push(Node::Custom(Box::new(super::core::Comment(eco_format!(
|
||||||
"typlite:begin:list-item {}",
|
"typlite:begin:list-item {}",
|
||||||
parser.list_level - 1
|
parser.list_level - 1
|
||||||
)))));
|
)))));
|
||||||
}
|
}
|
||||||
|
|
||||||
for li_child in &li.children {
|
for li_child in &li.children {
|
||||||
match li_child {
|
match li_child {
|
||||||
HtmlNode::Text(text, _) => {
|
HtmlNode::Text(text, _) => {
|
||||||
li_buffer.push(Node::Text(text.clone()));
|
li_buffer.push(Node::Text(text.clone()));
|
||||||
}
|
|
||||||
HtmlNode::Element(child_elem) => {
|
|
||||||
let element_content =
|
|
||||||
parser.process_list_item_element(child_elem)?;
|
|
||||||
|
|
||||||
if !element_content.is_empty() {
|
|
||||||
li_buffer.extend(element_content);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
}
|
HtmlNode::Element(child_elem) => {
|
||||||
|
let element_content = parser.process_list_item_element(child_elem)?;
|
||||||
|
|
||||||
if parser.feat.annotate_elem {
|
if !element_content.is_empty() {
|
||||||
li_buffer.push(Node::Custom(Box::new(super::core::Comment(eco_format!(
|
li_buffer.extend(element_content);
|
||||||
"typlite:end:list-item {}",
|
}
|
||||||
parser.list_level - 1
|
|
||||||
)))));
|
|
||||||
}
|
|
||||||
|
|
||||||
if !li_buffer.is_empty() {
|
|
||||||
item_content.push(Node::Paragraph(li_buffer));
|
|
||||||
}
|
|
||||||
if !item_content.is_empty() {
|
|
||||||
if is_ordered {
|
|
||||||
all_items.push(ListItem::Ordered {
|
|
||||||
number: attrs.value,
|
|
||||||
content: item_content,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
all_items.push(ListItem::Unordered {
|
|
||||||
content: item_content,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if parser.feat.annotate_elem {
|
||||||
|
li_buffer.push(Node::Custom(Box::new(super::core::Comment(eco_format!(
|
||||||
|
"typlite:end:list-item {}",
|
||||||
|
parser.list_level - 1
|
||||||
|
)))));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !li_buffer.is_empty() {
|
||||||
|
item_content.push(Node::Paragraph(li_buffer));
|
||||||
|
}
|
||||||
|
if !item_content.is_empty() {
|
||||||
|
if is_ordered {
|
||||||
|
all_items.push(ListItem::Ordered {
|
||||||
|
number: attrs.value,
|
||||||
|
content: item_content,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
all_items.push(ListItem::Unordered {
|
||||||
|
content: item_content,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -59,15 +59,15 @@ impl TableParser {
|
||||||
|
|
||||||
fn find_table_in_grid(grid_element: &HtmlElement) -> Option<&HtmlElement> {
|
fn find_table_in_grid(grid_element: &HtmlElement) -> Option<&HtmlElement> {
|
||||||
for child in &grid_element.children {
|
for child in &grid_element.children {
|
||||||
if let HtmlNode::Element(table_elem) = child {
|
if let HtmlNode::Element(table_elem) = child
|
||||||
if table_elem.tag == md_tag::table {
|
&& table_elem.tag == md_tag::table
|
||||||
// Find table tag within m1table
|
{
|
||||||
for inner_child in &table_elem.children {
|
// Find table tag within m1table
|
||||||
if let HtmlNode::Element(inner) = inner_child {
|
for inner_child in &table_elem.children {
|
||||||
if inner.tag == tag::table {
|
if let HtmlNode::Element(inner) = inner_child
|
||||||
return Some(inner);
|
&& inner.tag == tag::table
|
||||||
}
|
{
|
||||||
}
|
return Some(inner);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -77,10 +77,10 @@ impl TableParser {
|
||||||
|
|
||||||
fn find_table_direct(element: &HtmlElement) -> Option<&HtmlElement> {
|
fn find_table_direct(element: &HtmlElement) -> Option<&HtmlElement> {
|
||||||
for child in &element.children {
|
for child in &element.children {
|
||||||
if let HtmlNode::Element(table_elem) = child {
|
if let HtmlNode::Element(table_elem) = child
|
||||||
if table_elem.tag == tag::table {
|
&& table_elem.tag == tag::table
|
||||||
return Some(table_elem);
|
{
|
||||||
}
|
return Some(table_elem);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
|
@ -134,14 +134,14 @@ impl TableParser {
|
||||||
is_header_section: bool,
|
is_header_section: bool,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
for row_node in §ion.children {
|
for row_node in §ion.children {
|
||||||
if let HtmlNode::Element(row_elem) = row_node {
|
if let HtmlNode::Element(row_elem) = row_node
|
||||||
if row_elem.tag == tag::tr {
|
&& row_elem.tag == tag::tr
|
||||||
let current_row =
|
{
|
||||||
Self::process_table_row(parser, row_elem, is_header_section, headers)?;
|
let current_row =
|
||||||
|
Self::process_table_row(parser, row_elem, is_header_section, headers)?;
|
||||||
|
|
||||||
if !is_header_section && !current_row.is_empty() {
|
if !is_header_section && !current_row.is_empty() {
|
||||||
rows.push(current_row);
|
rows.push(current_row);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -158,20 +158,20 @@ impl TableParser {
|
||||||
|
|
||||||
// Process cells in this row
|
// Process cells in this row
|
||||||
for cell_node in &row_elem.children {
|
for cell_node in &row_elem.children {
|
||||||
if let HtmlNode::Element(cell) = cell_node {
|
if let HtmlNode::Element(cell) = cell_node
|
||||||
if cell.tag == tag::td || cell.tag == tag::th {
|
&& (cell.tag == tag::td || cell.tag == tag::th)
|
||||||
let mut cell_content = Vec::new();
|
{
|
||||||
parser.convert_children_into(&mut cell_content, cell)?;
|
let mut cell_content = Vec::new();
|
||||||
|
parser.convert_children_into(&mut cell_content, cell)?;
|
||||||
|
|
||||||
// Merge cell content into a single node
|
// Merge cell content into a single node
|
||||||
let merged_cell = Self::merge_cell_content(cell_content);
|
let merged_cell = Self::merge_cell_content(cell_content);
|
||||||
|
|
||||||
// Add to appropriate section
|
// Add to appropriate section
|
||||||
if is_header || cell.tag == tag::th {
|
if is_header || cell.tag == tag::th {
|
||||||
headers.push(merged_cell);
|
headers.push(merged_cell);
|
||||||
} else {
|
} else {
|
||||||
current_row.push(merged_cell);
|
current_row.push(merged_cell);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -214,10 +214,11 @@ impl TableParser {
|
||||||
|
|
||||||
fn check_section_for_complex_cells(section: &HtmlElement) -> bool {
|
fn check_section_for_complex_cells(section: &HtmlElement) -> bool {
|
||||||
for row_node in §ion.children {
|
for row_node in §ion.children {
|
||||||
if let HtmlNode::Element(row_elem) = row_node {
|
if let HtmlNode::Element(row_elem) = row_node
|
||||||
if row_elem.tag == tag::tr && Self::check_row_for_complex_cells(row_elem) {
|
&& row_elem.tag == tag::tr
|
||||||
return true;
|
&& Self::check_row_for_complex_cells(row_elem)
|
||||||
}
|
{
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
|
|
@ -225,15 +226,14 @@ impl TableParser {
|
||||||
|
|
||||||
fn check_row_for_complex_cells(row_elem: &HtmlElement) -> bool {
|
fn check_row_for_complex_cells(row_elem: &HtmlElement) -> bool {
|
||||||
for cell_node in &row_elem.children {
|
for cell_node in &row_elem.children {
|
||||||
if let HtmlNode::Element(cell) = cell_node {
|
if let HtmlNode::Element(cell) = cell_node
|
||||||
if (cell.tag == tag::td || cell.tag == tag::th)
|
&& (cell.tag == tag::td || cell.tag == tag::th)
|
||||||
&& cell.attrs.0.iter().any(|(name, _)| {
|
&& cell.attrs.0.iter().any(|(name, _)| {
|
||||||
let name = name.into_inner();
|
let name = name.into_inner();
|
||||||
name == PicoStr::constant("colspan") || name == PicoStr::constant("rowspan")
|
name == PicoStr::constant("colspan") || name == PicoStr::constant("rowspan")
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
|
|
|
||||||
|
|
@ -397,13 +397,13 @@ impl DocxWriter {
|
||||||
block_type: _,
|
block_type: _,
|
||||||
} => {
|
} => {
|
||||||
// Add language information
|
// Add language information
|
||||||
if let Some(lang) = language {
|
if let Some(lang) = language
|
||||||
if !lang.is_empty() {
|
&& !lang.is_empty()
|
||||||
let lang_para = Paragraph::new()
|
{
|
||||||
.style("CodeBlock")
|
let lang_para = Paragraph::new()
|
||||||
.add_run(Run::new().add_text(lang));
|
.style("CodeBlock")
|
||||||
docx = docx.add_paragraph(lang_para);
|
.add_run(Run::new().add_text(lang));
|
||||||
}
|
docx = docx.add_paragraph(lang_para);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process code line by line, preserving line breaks
|
// Process code line by line, preserving line breaks
|
||||||
|
|
|
||||||
|
|
@ -155,10 +155,10 @@ impl TextWriter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
node if node.is_custom_type::<ExternalFrameNode>() => {
|
node if node.is_custom_type::<ExternalFrameNode>() => {
|
||||||
if let Some(external_frame) = node.as_custom_type::<ExternalFrameNode>() {
|
if let Some(external_frame) = node.as_custom_type::<ExternalFrameNode>()
|
||||||
if !external_frame.alt_text.is_empty() {
|
&& !external_frame.alt_text.is_empty()
|
||||||
output.push_str(&external_frame.alt_text);
|
{
|
||||||
}
|
output.push_str(&external_frame.alt_text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
node if node.is_custom_type::<crate::common::HighlightNode>() => {
|
node if node.is_custom_type::<crate::common::HighlightNode>() => {
|
||||||
|
|
|
||||||
|
|
@ -225,12 +225,11 @@ impl RenderActor {
|
||||||
// order
|
// order
|
||||||
let range_res = {
|
let range_res = {
|
||||||
let mut range_res = range_res;
|
let mut range_res = range_res;
|
||||||
if let Some(info) = &mut range_res {
|
if let Some(info) = &mut range_res
|
||||||
if let Some((x, y)) = info.start.zip(info.end) {
|
&& let Some((x, y)) = info.start.zip(info.end)
|
||||||
if y <= x {
|
&& y <= x
|
||||||
std::mem::swap(&mut info.start, &mut info.end);
|
{
|
||||||
}
|
std::mem::swap(&mut info.start, &mut info.end);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
range_res
|
range_res
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue