mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-23 12:46:43 +00:00
dev: drop if_chain and collapse if statements (#2097)
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
This commit is contained in:
parent
1c9db1ce69
commit
ce447185d1
42 changed files with 520 additions and 545 deletions
|
|
@ -22,7 +22,6 @@ dirs.workspace = true
|
|||
ena.workspace = true
|
||||
ecow.workspace = true
|
||||
hayagriva.workspace = true
|
||||
if_chain.workspace = true
|
||||
itertools.workspace = true
|
||||
indexmap.workspace = true
|
||||
log.workspace = true
|
||||
|
|
|
|||
|
|
@ -88,10 +88,10 @@ impl<T> RevisionManager<T> {
|
|||
.filter(|slot| slot.revision <= revision.get())
|
||||
.reduce(|x, y| if x.revision > y.revision { x } else { y });
|
||||
|
||||
if let Some(slot) = slot_base {
|
||||
if slot.revision == revision.get() {
|
||||
return slot.clone();
|
||||
}
|
||||
if let Some(slot) = slot_base
|
||||
&& slot.revision == revision.get()
|
||||
{
|
||||
return slot.clone();
|
||||
}
|
||||
|
||||
let slot = Arc::new(RevisionSlot {
|
||||
|
|
|
|||
|
|
@ -62,14 +62,13 @@ impl<'a> CodeActionWorker<'a> {
|
|||
range: &Range<usize>,
|
||||
context: &lsp_types::CodeActionContext,
|
||||
) -> Option<()> {
|
||||
if let Some(only) = &context.only {
|
||||
if !only.is_empty()
|
||||
&& !only
|
||||
.iter()
|
||||
.any(|kind| *kind == CodeActionKind::EMPTY || *kind == CodeActionKind::QUICKFIX)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
if let Some(only) = &context.only
|
||||
&& !only.is_empty()
|
||||
&& !only
|
||||
.iter()
|
||||
.any(|kind| *kind == CodeActionKind::EMPTY || *kind == CodeActionKind::QUICKFIX)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
for diag in &context.diagnostics {
|
||||
|
|
@ -546,10 +545,10 @@ impl<'a> CodeActionWorker<'a> {
|
|||
),
|
||||
];
|
||||
|
||||
if !new_text.is_empty() {
|
||||
if let Some((_, edit)) = &punc_modify {
|
||||
edits.push(edit.clone());
|
||||
}
|
||||
if !new_text.is_empty()
|
||||
&& let Some((_, edit)) = &punc_modify
|
||||
{
|
||||
edits.push(edit.clone());
|
||||
}
|
||||
|
||||
Some(CodeAction {
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ use std::collections::{BTreeMap, HashSet};
|
|||
use std::ops::Range;
|
||||
|
||||
use ecow::{EcoString, eco_format};
|
||||
use if_chain::if_chain;
|
||||
use lsp_types::InsertTextFormat;
|
||||
use regex::{Captures, Regex};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
|
@ -300,10 +299,10 @@ impl<'a> CompletionCursor<'a> {
|
|||
/// Gets the LSP range of a given range with caching.
|
||||
fn lsp_range_of(&mut self, rng: Range<usize>) -> LspRange {
|
||||
// self.ctx.to_lsp_range(rng, &self.source)
|
||||
if let Some((last_rng, last_lsp_rng)) = &self.last_lsp_range_pair {
|
||||
if *last_rng == rng {
|
||||
return *last_lsp_rng;
|
||||
}
|
||||
if let Some((last_rng, last_lsp_rng)) = &self.last_lsp_range_pair
|
||||
&& *last_rng == rng
|
||||
{
|
||||
return *last_lsp_rng;
|
||||
}
|
||||
|
||||
let lsp_rng = self.ctx.to_lsp_range(rng.clone(), &self.source);
|
||||
|
|
|
|||
|
|
@ -6,81 +6,78 @@ impl CompletionPair<'_, '_, '_> {
|
|||
pub fn complete_imports(&mut self) -> bool {
|
||||
// On the colon marker of an import list:
|
||||
// "#import "path.typ":|"
|
||||
if_chain! {
|
||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Colon);
|
||||
if let Some(parent) = self.cursor.leaf.clone().parent();
|
||||
if let Some(ast::Expr::Import(import)) = parent.get().cast();
|
||||
if !matches!(import.imports(), Some(ast::Imports::Wildcard));
|
||||
if let Some(source) = parent.children().find(|child| child.is::<ast::Expr>());
|
||||
then {
|
||||
let items = match import.imports() {
|
||||
Some(ast::Imports::Items(items)) => items,
|
||||
_ => Default::default(),
|
||||
};
|
||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Colon)
|
||||
&& let Some(parent) = self.cursor.leaf.clone().parent()
|
||||
&& let Some(ast::Expr::Import(import)) = parent.get().cast()
|
||||
&& !matches!(import.imports(), Some(ast::Imports::Wildcard))
|
||||
&& let Some(source) = parent.children().find(|child| child.is::<ast::Expr>())
|
||||
{
|
||||
let items = match import.imports() {
|
||||
Some(ast::Imports::Items(items)) => items,
|
||||
_ => Default::default(),
|
||||
};
|
||||
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
|
||||
self.import_item_completions(items, vec![], &source);
|
||||
if items.iter().next().is_some() {
|
||||
self.worker.enrich("", ", ");
|
||||
}
|
||||
return true;
|
||||
self.import_item_completions(items, vec![], &source);
|
||||
if items.iter().next().is_some() {
|
||||
self.worker.enrich("", ", ");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Behind an import list:
|
||||
// "#import "path.typ": |",
|
||||
// "#import "path.typ": a, b, |".
|
||||
if_chain! {
|
||||
if let Some(prev) = self.cursor.leaf.prev_sibling();
|
||||
if let Some(ast::Expr::Import(import)) = prev.get().cast();
|
||||
if !self.cursor.text[prev.offset()..self.cursor.cursor].contains('\n');
|
||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
||||
if let Some(source) = prev.children().find(|child| child.is::<ast::Expr>());
|
||||
then {
|
||||
self. cursor.from = self.cursor.cursor;
|
||||
self.import_item_completions(items, vec![], &source);
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(prev) = self.cursor.leaf.prev_sibling()
|
||||
&& let Some(ast::Expr::Import(import)) = prev.get().cast()
|
||||
&& !self.cursor.text[prev.offset()..self.cursor.cursor].contains('\n')
|
||||
&& let Some(ast::Imports::Items(items)) = import.imports()
|
||||
&& let Some(source) = prev.children().find(|child| child.is::<ast::Expr>())
|
||||
{
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
self.import_item_completions(items, vec![], &source);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Behind a comma in an import list:
|
||||
// "#import "path.typ": this,|".
|
||||
if_chain! {
|
||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Comma);
|
||||
if let Some(parent) = self.cursor.leaf.clone().parent();
|
||||
if parent.kind() == SyntaxKind::ImportItems;
|
||||
if let Some(grand) = parent.parent();
|
||||
if let Some(ast::Expr::Import(import)) = grand.get().cast();
|
||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
||||
if let Some(source) = grand.children().find(|child| child.is::<ast::Expr>());
|
||||
then {
|
||||
self.import_item_completions(items, vec![], &source);
|
||||
self.worker.enrich(" ", "");
|
||||
return true;
|
||||
}
|
||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Comma)
|
||||
&& let Some(parent) = self.cursor.leaf.clone().parent()
|
||||
&& parent.kind() == SyntaxKind::ImportItems
|
||||
&& let Some(grand) = parent.parent()
|
||||
&& let Some(ast::Expr::Import(import)) = grand.get().cast()
|
||||
&& let Some(ast::Imports::Items(items)) = import.imports()
|
||||
&& let Some(source) = grand.children().find(|child| child.is::<ast::Expr>())
|
||||
{
|
||||
self.import_item_completions(items, vec![], &source);
|
||||
self.worker.enrich(" ", "");
|
||||
return true;
|
||||
}
|
||||
|
||||
// Behind a half-started identifier in an import list:
|
||||
// "#import "path.typ": th|".
|
||||
if_chain! {
|
||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Ident | SyntaxKind::Dot);
|
||||
if let Some(path_ctx) = self.cursor.leaf.clone().parent();
|
||||
if path_ctx.kind() == SyntaxKind::ImportItemPath;
|
||||
if let Some(parent) = path_ctx.parent();
|
||||
if parent.kind() == SyntaxKind::ImportItems;
|
||||
if let Some(grand) = parent.parent();
|
||||
if let Some(ast::Expr::Import(import)) = grand.get().cast();
|
||||
if let Some(ast::Imports::Items(items)) = import.imports();
|
||||
if let Some(source) = grand.children().find(|child| child.is::<ast::Expr>());
|
||||
then {
|
||||
if self.cursor.leaf.kind() == SyntaxKind::Ident {
|
||||
self.cursor.from = self.cursor.leaf.offset();
|
||||
}
|
||||
let path = path_ctx.cast::<ast::ImportItemPath>().map(|path| path.iter().take_while(|ident| ident.span() != self.cursor.leaf.span()).collect());
|
||||
self.import_item_completions( items, path.unwrap_or_default(), &source);
|
||||
return true;
|
||||
if matches!(self.cursor.leaf.kind(), SyntaxKind::Ident | SyntaxKind::Dot)
|
||||
&& let Some(path_ctx) = self.cursor.leaf.clone().parent()
|
||||
&& path_ctx.kind() == SyntaxKind::ImportItemPath
|
||||
&& let Some(parent) = path_ctx.parent()
|
||||
&& parent.kind() == SyntaxKind::ImportItems
|
||||
&& let Some(grand) = parent.parent()
|
||||
&& let Some(ast::Expr::Import(import)) = grand.get().cast()
|
||||
&& let Some(ast::Imports::Items(items)) = import.imports()
|
||||
&& let Some(source) = grand.children().find(|child| child.is::<ast::Expr>())
|
||||
{
|
||||
if self.cursor.leaf.kind() == SyntaxKind::Ident {
|
||||
self.cursor.from = self.cursor.leaf.offset();
|
||||
}
|
||||
let path = path_ctx.cast::<ast::ImportItemPath>().map(|path| {
|
||||
path.iter()
|
||||
.take_while(|ident| ident.span() != self.cursor.leaf.span())
|
||||
.collect()
|
||||
});
|
||||
self.import_item_completions(items, path.unwrap_or_default(), &source);
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
|
|
|
|||
|
|
@ -6,67 +6,63 @@ impl CompletionPair<'_, '_, '_> {
|
|||
pub fn complete_comments(&mut self) -> bool {
|
||||
let text = self.cursor.leaf.get().text();
|
||||
// check if next line defines a function
|
||||
if_chain! {
|
||||
if text == "///" || text == "/// ";
|
||||
if (text == "///" || text == "/// ")
|
||||
// hash node
|
||||
if let Some(hash_node) = self.cursor.leaf.next_leaf();
|
||||
&& let Some(hash_node) = self.cursor.leaf.next_leaf()
|
||||
// let node
|
||||
if let Some(let_node) = hash_node.next_leaf();
|
||||
if let Some(let_closure) = let_node.next_leaf();
|
||||
if matches!(let_closure.parent_kind(), Some(SyntaxKind::Closure));
|
||||
if let Some(closure) = let_closure.parent();
|
||||
if let Some(closure) = closure.cast::<ast::Expr>();
|
||||
if let ast::Expr::Closure(c) = closure;
|
||||
then {
|
||||
// Only completes if the next line is a function definition
|
||||
let rng = self.cursor.leaf.offset()..hash_node.offset();
|
||||
let text_between = &self.cursor.source.text()[rng];
|
||||
let mut line_count = 0;
|
||||
for ch in text_between.chars() {
|
||||
if ch == '\n' {
|
||||
line_count += 1;
|
||||
}
|
||||
if line_count > 1 {
|
||||
return false;
|
||||
}
|
||||
&& let Some(let_node) = hash_node.next_leaf()
|
||||
&& let Some(let_closure) = let_node.next_leaf()
|
||||
&& matches!(let_closure.parent_kind(), Some(SyntaxKind::Closure))
|
||||
&& let Some(closure) = let_closure.parent()
|
||||
&& let Some(closure) = closure.cast::<ast::Expr>()
|
||||
&& let ast::Expr::Closure(c) = closure
|
||||
{
|
||||
// Only completes if the next line is a function definition
|
||||
let rng = self.cursor.leaf.offset()..hash_node.offset();
|
||||
let text_between = &self.cursor.source.text()[rng];
|
||||
let mut line_count = 0;
|
||||
for ch in text_between.chars() {
|
||||
if ch == '\n' {
|
||||
line_count += 1;
|
||||
}
|
||||
|
||||
let mut doc_snippet: String = if text == "///" {
|
||||
" $0\n///".to_string()
|
||||
} else {
|
||||
"$0\n///".to_string()
|
||||
};
|
||||
let mut i = 0;
|
||||
for param in c.params().children() {
|
||||
// TODO: Properly handle Pos and Spread argument
|
||||
let param: &EcoString = match param {
|
||||
Param::Pos(p) => {
|
||||
match p {
|
||||
ast::Pattern::Normal(ast::Expr::Ident(ident)) => ident.get(),
|
||||
_ => &"_".into()
|
||||
}
|
||||
}
|
||||
Param::Named(n) => n.name().get(),
|
||||
Param::Spread(s) => {
|
||||
if let Some(ident) = s.sink_ident() {
|
||||
&eco_format!("{}", ident.get())
|
||||
} else {
|
||||
&EcoString::new()
|
||||
}
|
||||
}
|
||||
};
|
||||
log::info!("param: {param}, index: {i}");
|
||||
doc_snippet += &format!("\n/// - {param} (${}): ${}", i + 1, i + 2);
|
||||
i += 2;
|
||||
if line_count > 1 {
|
||||
return false;
|
||||
}
|
||||
doc_snippet += &format!("\n/// -> ${}", i + 1);
|
||||
self.push_completion(Completion {
|
||||
label: "Document function".into(),
|
||||
apply: Some(doc_snippet.into()),
|
||||
..Completion::default()
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
let mut doc_snippet: String = if text == "///" {
|
||||
" $0\n///".to_string()
|
||||
} else {
|
||||
"$0\n///".to_string()
|
||||
};
|
||||
let mut i = 0;
|
||||
for param in c.params().children() {
|
||||
// TODO: Properly handle Pos and Spread argument
|
||||
let param: &EcoString = match param {
|
||||
Param::Pos(p) => match p {
|
||||
ast::Pattern::Normal(ast::Expr::Ident(ident)) => ident.get(),
|
||||
_ => &"_".into(),
|
||||
},
|
||||
Param::Named(n) => n.name().get(),
|
||||
Param::Spread(s) => {
|
||||
if let Some(ident) = s.sink_ident() {
|
||||
&eco_format!("{}", ident.get())
|
||||
} else {
|
||||
&EcoString::new()
|
||||
}
|
||||
}
|
||||
};
|
||||
log::info!("param: {param}, index: {i}");
|
||||
doc_snippet += &format!("\n/// - {param} (${}): ${}", i + 1, i + 2);
|
||||
i += 2;
|
||||
}
|
||||
doc_snippet += &format!("\n/// -> ${}", i + 1);
|
||||
self.push_completion(Completion {
|
||||
label: "Document function".into(),
|
||||
apply: Some(doc_snippet.into()),
|
||||
..Completion::default()
|
||||
});
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
|
@ -77,26 +73,25 @@ impl CompletionPair<'_, '_, '_> {
|
|||
node_ancestors(&self.cursor.leaf).find(|node| matches!(node.kind(), SyntaxKind::Raw));
|
||||
|
||||
// Behind a half-completed binding: "#let x = |" or `#let f(x) = |`.
|
||||
if_chain! {
|
||||
if let Some(prev) = self.cursor.leaf.prev_leaf();
|
||||
if matches!(prev.kind(), SyntaxKind::Eq | SyntaxKind::Arrow);
|
||||
if matches!( prev.parent_kind(), Some(SyntaxKind::LetBinding | SyntaxKind::Closure));
|
||||
then {
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
self.code_completions( false);
|
||||
return true;
|
||||
}
|
||||
if let Some(prev) = self.cursor.leaf.prev_leaf()
|
||||
&& matches!(prev.kind(), SyntaxKind::Eq | SyntaxKind::Arrow)
|
||||
&& matches!(
|
||||
prev.parent_kind(),
|
||||
Some(SyntaxKind::LetBinding | SyntaxKind::Closure)
|
||||
)
|
||||
{
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
self.code_completions(false);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Behind a half-completed context block: "#context |".
|
||||
if_chain! {
|
||||
if let Some(prev) = self.cursor.leaf.prev_leaf();
|
||||
if prev.kind() == SyntaxKind::Context;
|
||||
then {
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
self.code_completions(false);
|
||||
return true;
|
||||
}
|
||||
if let Some(prev) = self.cursor.leaf.prev_leaf()
|
||||
&& prev.kind() == SyntaxKind::Context
|
||||
{
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
self.code_completions(false);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Directly after a raw block.
|
||||
|
|
@ -171,14 +166,12 @@ impl CompletionPair<'_, '_, '_> {
|
|||
}
|
||||
|
||||
// Behind a half-completed context block: "context |".
|
||||
if_chain! {
|
||||
if let Some(prev) = self.cursor.leaf.prev_leaf();
|
||||
if prev.kind() == SyntaxKind::Context;
|
||||
then {
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
self.code_completions(false);
|
||||
return true;
|
||||
}
|
||||
if let Some(prev) = self.cursor.leaf.prev_leaf()
|
||||
&& prev.kind() == SyntaxKind::Context
|
||||
{
|
||||
self.cursor.from = self.cursor.cursor;
|
||||
self.code_completions(false);
|
||||
return true;
|
||||
}
|
||||
|
||||
// An existing identifier: "{ pa| }".
|
||||
|
|
|
|||
|
|
@ -139,10 +139,10 @@ impl CompletionPair<'_, '_, '_> {
|
|||
SurroundingSyntax::SetRule => 'set_rule: {
|
||||
// todo: user defined elements
|
||||
for func in &checker.functions {
|
||||
if let Some(elem) = func.element() {
|
||||
if elem.params().iter().any(|param| param.settable) {
|
||||
break 'set_rule true;
|
||||
}
|
||||
if let Some(elem) = func.element()
|
||||
&& elem.params().iter().any(|param| param.settable)
|
||||
{
|
||||
break 'set_rule true;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -196,10 +196,10 @@ impl CompletionPair<'_, '_, '_> {
|
|||
}
|
||||
|
||||
fn analyze_import_source(ctx: &LocalContext, types: &TypeInfo, s: ast::Expr) -> Option<Ty> {
|
||||
if let Some(res) = types.type_of_span(s.span()) {
|
||||
if !matches!(res.value(), Some(Value::Str(..))) {
|
||||
return Some(types.simplify(res, false));
|
||||
}
|
||||
if let Some(res) = types.type_of_span(s.span())
|
||||
&& !matches!(res.value(), Some(Value::Str(..)))
|
||||
{
|
||||
return Some(types.simplify(res, false));
|
||||
}
|
||||
|
||||
let m = ctx.module_by_syntax(s.to_untyped())?;
|
||||
|
|
|
|||
|
|
@ -226,16 +226,18 @@ impl CompletionPair<'_, '_, '_> {
|
|||
apply = Some(eco_format!("at(\"{label}\")"));
|
||||
} else {
|
||||
let apply_label = &mut label.as_str();
|
||||
if apply_label.ends_with('"') && self.cursor.after.starts_with('"') {
|
||||
if let Some(trimmed) = apply_label.strip_suffix('"') {
|
||||
*apply_label = trimmed;
|
||||
}
|
||||
if apply_label.ends_with('"')
|
||||
&& self.cursor.after.starts_with('"')
|
||||
&& let Some(trimmed) = apply_label.strip_suffix('"')
|
||||
{
|
||||
*apply_label = trimmed;
|
||||
}
|
||||
let from_before = slice_at(self.cursor.text, 0..self.cursor.from);
|
||||
if apply_label.starts_with('"') && from_before.ends_with('"') {
|
||||
if let Some(trimmed) = apply_label.strip_prefix('"') {
|
||||
*apply_label = trimmed;
|
||||
}
|
||||
if apply_label.starts_with('"')
|
||||
&& from_before.ends_with('"')
|
||||
&& let Some(trimmed) = apply_label.strip_prefix('"')
|
||||
{
|
||||
*apply_label = trimmed;
|
||||
}
|
||||
|
||||
if apply_label.len() != label.len() {
|
||||
|
|
|
|||
|
|
@ -653,10 +653,10 @@ impl SharedContext {
|
|||
// e.g. `f(x|)`, we will select the `x`
|
||||
if cursor == node.offset() + 1 && is_mark(node.kind()) {
|
||||
let prev_leaf = node.prev_leaf();
|
||||
if let Some(prev_leaf) = prev_leaf {
|
||||
if prev_leaf.range().end == node.offset() {
|
||||
node = prev_leaf;
|
||||
}
|
||||
if let Some(prev_leaf) = prev_leaf
|
||||
&& prev_leaf.range().end == node.offset()
|
||||
{
|
||||
node = prev_leaf;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -141,10 +141,10 @@ impl LinkStrWorker {
|
|||
for item in call.args().items() {
|
||||
match item {
|
||||
ast::Arg::Named(named) if named.name().get().as_str() == "style" => {
|
||||
if let ast::Expr::Str(style) = named.expr() {
|
||||
if hayagriva::archive::ArchivedStyle::by_name(&style.get()).is_some() {
|
||||
return Some(());
|
||||
}
|
||||
if let ast::Expr::Str(style) = named.expr()
|
||||
&& hayagriva::archive::ArchivedStyle::by_name(&style.get()).is_some()
|
||||
{
|
||||
return Some(());
|
||||
}
|
||||
self.analyze_path_expr(node, named.expr());
|
||||
return Some(());
|
||||
|
|
|
|||
|
|
@ -349,18 +349,19 @@ impl Tokenizer {
|
|||
.map(|token_type| Token::new(token_type, modifiers, range.clone()));
|
||||
|
||||
// Push start
|
||||
if let Some(prev_token) = self.token.as_mut() {
|
||||
if !prev_token.range.is_empty() && prev_token.range.start < range.start {
|
||||
let end = prev_token.range.end.min(range.start);
|
||||
let sliced = Token {
|
||||
token_type: prev_token.token_type,
|
||||
modifiers: prev_token.modifiers,
|
||||
range: prev_token.range.start..end,
|
||||
};
|
||||
// Slice the previous token
|
||||
prev_token.range.start = end;
|
||||
self.push(sliced);
|
||||
}
|
||||
if let Some(prev_token) = self.token.as_mut()
|
||||
&& !prev_token.range.is_empty()
|
||||
&& prev_token.range.start < range.start
|
||||
{
|
||||
let end = prev_token.range.end.min(range.start);
|
||||
let sliced = Token {
|
||||
token_type: prev_token.token_type,
|
||||
modifiers: prev_token.modifiers,
|
||||
range: prev_token.range.start..end,
|
||||
};
|
||||
// Slice the previous token
|
||||
prev_token.range.start = end;
|
||||
self.push(sliced);
|
||||
}
|
||||
|
||||
if !is_leaf {
|
||||
|
|
@ -372,14 +373,14 @@ impl Tokenizer {
|
|||
}
|
||||
|
||||
// Push end
|
||||
if let Some(token) = token.clone() {
|
||||
if !token.range.is_empty() {
|
||||
// Slice the previous token
|
||||
if let Some(prev_token) = self.token.as_mut() {
|
||||
prev_token.range.start = token.range.end;
|
||||
}
|
||||
self.push(token);
|
||||
if let Some(token) = token.clone()
|
||||
&& !token.range.is_empty()
|
||||
{
|
||||
// Slice the previous token
|
||||
if let Some(prev_token) = self.token.as_mut() {
|
||||
prev_token.range.start = token.range.end;
|
||||
}
|
||||
self.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -21,20 +21,18 @@ impl ApplyChecker for ApplyTypeChecker<'_, '_> {
|
|||
sig => (sig, false),
|
||||
};
|
||||
|
||||
if !is_partialize {
|
||||
if let Some(ty) = sig.call(args, pol, self.base) {
|
||||
self.resultant.push(ty);
|
||||
}
|
||||
if !is_partialize && let Some(ty) = sig.call(args, pol, self.base) {
|
||||
self.resultant.push(ty);
|
||||
}
|
||||
|
||||
// todo: remove this after we implemented dependent types
|
||||
match sig {
|
||||
Sig::TypeCons { val, .. } => {
|
||||
if *val == typst::foundations::Type::of::<typst::foundations::Type>() {
|
||||
if let Some(p0) = args.pos(0) {
|
||||
self.resultant
|
||||
.push(Ty::Unary(TypeUnary::new(UnaryOp::TypeOf, p0.clone())));
|
||||
}
|
||||
if *val == typst::foundations::Type::of::<typst::foundations::Type>()
|
||||
&& let Some(p0) = args.pos(0)
|
||||
{
|
||||
self.resultant
|
||||
.push(Ty::Unary(TypeUnary::new(UnaryOp::TypeOf, p0.clone())));
|
||||
}
|
||||
}
|
||||
Sig::Builtin(BuiltinSig::TupleMap(this)) => {
|
||||
|
|
|
|||
|
|
@ -25,17 +25,17 @@ impl TypeChecker<'_> {
|
|||
};
|
||||
let mut vars = vars;
|
||||
for (_name, doc) in vars.iter_mut() {
|
||||
if let Some(ty) = &mut doc.ty {
|
||||
if let Some(mutated) = ty.mutate(true, &mut renamer) {
|
||||
*ty = mutated;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(ty) = res_ty.as_mut() {
|
||||
if let Some(mutated) = ty.mutate(true, &mut renamer) {
|
||||
if let Some(ty) = &mut doc.ty
|
||||
&& let Some(mutated) = ty.mutate(true, &mut renamer)
|
||||
{
|
||||
*ty = mutated;
|
||||
}
|
||||
}
|
||||
if let Some(ty) = res_ty.as_mut()
|
||||
&& let Some(mutated) = ty.mutate(true, &mut renamer)
|
||||
{
|
||||
*ty = mutated;
|
||||
}
|
||||
DocString {
|
||||
docs,
|
||||
var_bounds,
|
||||
|
|
|
|||
|
|
@ -34,12 +34,12 @@ pub(crate) fn convert_docs(
|
|||
}
|
||||
|
||||
let mut imports = Vec::new();
|
||||
if WorkspaceResolver::is_package_file(fid) {
|
||||
if let Some(pkg) = fid.package() {
|
||||
let pkg_spec = pkg.to_string();
|
||||
imports.push(format!("#import {pkg_spec:?}"));
|
||||
imports.push(format!("#import {pkg_spec:?}: *"));
|
||||
}
|
||||
if WorkspaceResolver::is_package_file(fid)
|
||||
&& let Some(pkg) = fid.package()
|
||||
{
|
||||
let pkg_spec = pkg.to_string();
|
||||
imports.push(format!("#import {pkg_spec:?}"));
|
||||
imports.push(format!("#import {pkg_spec:?}: *"));
|
||||
}
|
||||
imports.push(format!(
|
||||
"#import {:?}: *",
|
||||
|
|
|
|||
|
|
@ -229,12 +229,11 @@ impl ScanDefCtx<'_> {
|
|||
};
|
||||
|
||||
if let Some((span, mod_fid)) = head.decl.as_ref().and_then(|decl| decl.file_id()).zip(site)
|
||||
&& span != *mod_fid
|
||||
{
|
||||
if span != *mod_fid {
|
||||
head.is_external = true;
|
||||
head.oneliner = head.docs.map(|docs| oneliner(&docs).to_owned());
|
||||
head.docs = None;
|
||||
}
|
||||
head.is_external = true;
|
||||
head.oneliner = head.docs.map(|docs| oneliner(&docs).to_owned());
|
||||
head.docs = None;
|
||||
}
|
||||
|
||||
// Insert module that is not exported
|
||||
|
|
|
|||
|
|
@ -130,31 +130,30 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Pac
|
|||
eco_format!("symbol-{}-{}", child.kind, child.name)
|
||||
};
|
||||
|
||||
if child.is_external {
|
||||
if let Some(fid) = child_fid {
|
||||
let lnk = if fid.package() == Some(for_spec) {
|
||||
let sub_aka = akas(fid);
|
||||
let sub_primary = sub_aka.first().cloned().unwrap_or_default();
|
||||
child.external_link = Some(format!(
|
||||
"#symbol-{}-{sub_primary}.{}",
|
||||
child.kind, child.name
|
||||
));
|
||||
format!("#{}-{}-in-{sub_primary}", child.kind, child.name)
|
||||
.replace(".", "")
|
||||
} else if let Some(spec) = fid.package() {
|
||||
let lnk = format!(
|
||||
"https://typst.app/universe/package/{}/{}",
|
||||
spec.name, spec.version
|
||||
);
|
||||
child.external_link = Some(lnk.clone());
|
||||
lnk
|
||||
} else {
|
||||
let lnk: String = "https://typst.app/docs".into();
|
||||
child.external_link = Some(lnk.clone());
|
||||
lnk
|
||||
};
|
||||
child.symbol_link = Some(lnk);
|
||||
}
|
||||
if child.is_external
|
||||
&& let Some(fid) = child_fid
|
||||
{
|
||||
let lnk = if fid.package() == Some(for_spec) {
|
||||
let sub_aka = akas(fid);
|
||||
let sub_primary = sub_aka.first().cloned().unwrap_or_default();
|
||||
child.external_link = Some(format!(
|
||||
"#symbol-{}-{sub_primary}.{}",
|
||||
child.kind, child.name
|
||||
));
|
||||
format!("#{}-{}-in-{sub_primary}", child.kind, child.name).replace(".", "")
|
||||
} else if let Some(spec) = fid.package() {
|
||||
let lnk = format!(
|
||||
"https://typst.app/universe/package/{}/{}",
|
||||
spec.name, spec.version
|
||||
);
|
||||
child.external_link = Some(lnk.clone());
|
||||
lnk
|
||||
} else {
|
||||
let lnk: String = "https://typst.app/docs".into();
|
||||
child.external_link = Some(lnk.clone());
|
||||
lnk
|
||||
};
|
||||
child.symbol_link = Some(lnk);
|
||||
}
|
||||
|
||||
let child_children = std::mem::take(&mut child.children);
|
||||
|
|
|
|||
|
|
@ -327,10 +327,10 @@ impl ExternalDocLink {
|
|||
fn get(def: &Definition) -> Option<CommandLink> {
|
||||
let value = def.value();
|
||||
|
||||
if matches!(value, Some(Value::Func(..))) {
|
||||
if let Some(builtin) = Self::builtin_func_tooltip("https://typst.app/docs/", def) {
|
||||
return Some(builtin);
|
||||
}
|
||||
if matches!(value, Some(Value::Func(..)))
|
||||
&& let Some(builtin) = Self::builtin_func_tooltip("https://typst.app/docs/", def)
|
||||
{
|
||||
return Some(builtin);
|
||||
};
|
||||
|
||||
value.and_then(|value| Self::builtin_value_tooltip("https://typst.app/docs/", &value))
|
||||
|
|
|
|||
|
|
@ -22,11 +22,11 @@ pub fn jump_from_click(
|
|||
) -> Option<(SourceSpanOffset, SourceSpanOffset)> {
|
||||
// Try to find a link first.
|
||||
for (pos, item) in frame.items() {
|
||||
if let FrameItem::Link(_dest, size) = item {
|
||||
if is_in_rect(*pos, *size, click) {
|
||||
// todo: url reaction
|
||||
return None;
|
||||
}
|
||||
if let FrameItem::Link(_dest, size) = item
|
||||
&& is_in_rect(*pos, *size, click)
|
||||
{
|
||||
// todo: url reaction
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -140,10 +140,10 @@ fn jump_from_cursor_(
|
|||
// In a page, we try to find a closer span than the existing found one.
|
||||
let mut p_dis = min_dis;
|
||||
|
||||
if let Some(point) = find_in_frame(&page.frame, span, &mut p_dis, &mut min_point) {
|
||||
if let Some(page) = NonZeroUsize::new(idx + 1) {
|
||||
positions.push(Position { page, point });
|
||||
}
|
||||
if let Some(point) = find_in_frame(&page.frame, span, &mut p_dis, &mut min_point)
|
||||
&& let Some(page) = NonZeroUsize::new(idx + 1)
|
||||
{
|
||||
positions.push(Position { page, point });
|
||||
}
|
||||
|
||||
// In this page, we found a closer span and update.
|
||||
|
|
|
|||
|
|
@ -55,12 +55,11 @@ pub(crate) fn expr_of(
|
|||
|
||||
// If there is a cycle, the expression will be stable as the source is
|
||||
// unchanged.
|
||||
if let Some(exports) = ei {
|
||||
if prev_exports.size() != exports.size()
|
||||
|| hash128(&prev_exports) != hash128(&exports)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
if let Some(exports) = ei
|
||||
&& (prev_exports.size() != exports.size()
|
||||
|| hash128(&prev_exports) != hash128(&exports))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -384,10 +384,10 @@ impl LexicalHierarchyWorker {
|
|||
SyntaxKind::Closure => {
|
||||
let first_child = node.children().next();
|
||||
let current = self.stack.last_mut().unwrap().1.len();
|
||||
if let Some(first_child) = first_child {
|
||||
if first_child.kind() == SyntaxKind::Ident {
|
||||
self.check_node_with(first_child, IdentContext::Func)?;
|
||||
}
|
||||
if let Some(first_child) = first_child
|
||||
&& first_child.kind() == SyntaxKind::Ident
|
||||
{
|
||||
self.check_node_with(first_child, IdentContext::Func)?;
|
||||
}
|
||||
let body = node
|
||||
.children()
|
||||
|
|
|
|||
|
|
@ -251,20 +251,20 @@ fn match_by_pos(mut n: LinkedNode, prev: bool, ident: bool) -> usize {
|
|||
match n.kind() {
|
||||
SyntaxKind::Closure => {
|
||||
let closure = n.cast::<ast::Closure>().unwrap();
|
||||
if let Some(name) = closure.name() {
|
||||
if let Some(m) = n.find(name.span()) {
|
||||
n = m;
|
||||
break 'match_loop;
|
||||
}
|
||||
if let Some(name) = closure.name()
|
||||
&& let Some(m) = n.find(name.span())
|
||||
{
|
||||
n = m;
|
||||
break 'match_loop;
|
||||
}
|
||||
}
|
||||
SyntaxKind::LetBinding => {
|
||||
let let_binding = n.cast::<ast::LetBinding>().unwrap();
|
||||
if let Some(name) = let_binding.kind().bindings().first() {
|
||||
if let Some(m) = n.find(name.span()) {
|
||||
n = m;
|
||||
break 'match_loop;
|
||||
}
|
||||
if let Some(name) = let_binding.kind().bindings().first()
|
||||
&& let Some(m) = n.find(name.span())
|
||||
{
|
||||
n = m;
|
||||
break 'match_loop;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue