feat: complete arguments enhanced by type inference (#186)

* auto complete code in param position

* dev: initial path completion in params

* fix: compile error

* remove two todo

* dev: improve get_deref_target

* check string during completion

* feat: complete path arguments

* feat: identify hash before function follows a content parameter

* dev: complete text.size, text.dir, stack.dir, stroke dict

* dev: add record type

* dev: complete stroke dict

* fix: correct kind of langauge of code tooltip

* dev: add colon trigger character

* dev: let type selection complete

* dev: complete inset/outset/margin/radius dictionary types

* dev: complete raw theme/syntaxes, bib path types

* dev: complete all files at the same time

* dev: update snapshot
This commit is contained in:
Myriad-Dreamin 2024-04-15 00:17:54 +08:00 committed by GitHub
parent 76de22b676
commit 987a7da867
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
32 changed files with 1958 additions and 775 deletions

View file

@ -12,8 +12,8 @@ pub mod linked_def;
pub use linked_def::*;
pub mod signature;
pub use signature::*;
pub mod r#type;
pub(crate) use r#type::*;
mod ty;
pub(crate) use ty::*;
pub mod track_values;
pub use track_values::*;
mod prelude;
@ -28,7 +28,7 @@ mod type_check_tests {
use typst::syntax::Source;
use crate::analysis::type_check;
use crate::analysis::ty;
use crate::tests::*;
use super::TypeCheckInfo;
@ -38,7 +38,7 @@ mod type_check_tests {
snapshot_testing("type_check", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let result = type_check(ctx, source.clone());
let result = ty::type_check(ctx, source.clone());
let result = result
.as_deref()
.map(|e| format!("{:#?}", TypeCheckSnapshot(&source, e)));

View file

@ -13,6 +13,7 @@ use reflexo::hash::hash128;
use reflexo::{cow_mut::CowMut, debug_loc::DataSource, ImmutPath};
use typst::eval::Eval;
use typst::foundations;
use typst::syntax::SyntaxNode;
use typst::{
diag::{eco_format, FileError, FileResult, PackageError},
syntax::{package::PackageSpec, Source, Span, VirtualPath},
@ -21,7 +22,9 @@ use typst::{
use typst::{foundations::Value, syntax::ast, text::Font};
use typst::{layout::Position, syntax::FileId as TypstFileId};
use super::{DefUseInfo, ImportInfo, Signature, SignatureTarget, TypeCheckInfo};
use super::{
DefUseInfo, FlowType, ImportInfo, PathPreference, Signature, SignatureTarget, TypeCheckInfo,
};
use crate::{
lsp_to_typst,
syntax::{
@ -246,7 +249,7 @@ impl AnalysisGlobalCaches {
pub fn signature(&self, source: Option<Source>, func: &SignatureTarget) -> Option<Signature> {
match func {
SignatureTarget::Syntax(node) => {
// todo: performance
// todo: check performance on peeking signature source frequently
let cache = self.modules.get(&node.span().id()?)?;
if cache
.signature_source
@ -276,7 +279,7 @@ impl AnalysisGlobalCaches {
match func {
SignatureTarget::Syntax(node) => {
let cache = self.modules.entry(node.span().id().unwrap()).or_default();
// todo: performance
// todo: check performance on peeking signature source frequently
if cache
.signature_source
.as_ref()
@ -306,6 +309,7 @@ impl AnalysisGlobalCaches {
#[derive(Default)]
pub struct AnalysisCaches {
modules: HashMap<TypstFileId, ModuleAnalysisCache>,
completion_files: OnceCell<Vec<PathBuf>>,
root_files: OnceCell<Vec<TypstFileId>>,
module_deps: OnceCell<HashMap<TypstFileId, ModuleDependency>>,
}
@ -374,15 +378,42 @@ impl<'w> AnalysisContext<'w> {
}
#[cfg(test)]
pub fn test_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) -> &Vec<TypstFileId> {
self.caches.root_files.get_or_init(f)
pub fn test_completion_files(&mut self, f: impl FnOnce() -> Vec<PathBuf>) {
self.caches.completion_files.get_or_init(f);
}
/// Get all the files in the workspace.
pub fn files(&mut self) -> &Vec<TypstFileId> {
#[cfg(test)]
pub fn test_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) {
self.caches.root_files.get_or_init(f);
}
/// Get all the source files in the workspace.
pub(crate) fn completion_files(&self, pref: &PathPreference) -> impl Iterator<Item = &PathBuf> {
let r = pref.ext_matcher();
self.caches
.root_files
.get_or_init(|| scan_workspace_files(&self.analysis.root))
.completion_files
.get_or_init(|| {
scan_workspace_files(
&self.analysis.root,
PathPreference::Special.ext_matcher(),
|relative_path| relative_path.to_owned(),
)
})
.iter()
.filter(move |p| {
p.extension()
.and_then(|p| p.to_str())
.is_some_and(|e| r.is_match(e))
})
}
/// Get all the source files in the workspace.
pub fn source_files(&self) -> &Vec<TypstFileId> {
self.caches.root_files.get_or_init(|| {
self.completion_files(&PathPreference::Source)
.map(|p| TypstFileId::new(None, VirtualPath::new(p.as_path())))
.collect()
})
}
/// Get the module dependencies of the workspace.
@ -492,7 +523,7 @@ impl<'w> AnalysisContext<'w> {
let tl = cache.type_check.clone();
let res = tl
.compute(source, |_before, after| {
let next = crate::analysis::type_check(self, after);
let next = crate::analysis::ty::type_check(self, after);
next.or_else(|| tl.output.read().clone())
})
.ok()
@ -586,7 +617,7 @@ impl<'w> AnalysisContext<'w> {
f(&mut vm)
}
pub(crate) fn mini_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
pub(crate) fn const_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
Some(match rr {
ast::Expr::None(_) => Value::None,
ast::Expr::Auto(_) => Value::Auto,
@ -595,9 +626,25 @@ impl<'w> AnalysisContext<'w> {
ast::Expr::Float(v) => Value::Float(v.get()),
ast::Expr::Numeric(v) => Value::numeric(v.get()),
ast::Expr::Str(v) => Value::Str(v.get().into()),
e => return self.with_vm(|vm| e.eval(vm).ok()),
_ => return None,
})
}
pub(crate) fn mini_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
self.const_eval(rr)
.or_else(|| self.with_vm(|vm| rr.eval(vm).ok()))
}
pub(crate) fn type_of(&mut self, rr: &SyntaxNode) -> Option<FlowType> {
self.type_of_span(rr.span())
}
pub(crate) fn type_of_span(&mut self, s: Span) -> Option<FlowType> {
let id = s.id()?;
let source = self.source_by_id(id).ok()?;
let ty_chk = self.type_check(source)?;
ty_chk.mapping.get(&s).cloned()
}
}
/// The context for searching in the workspace.

View file

@ -68,6 +68,10 @@ pub fn find_definition(
name_range: None,
});
}
// todo: label, reference
DerefTarget::Label(..) | DerefTarget::Ref(..) | DerefTarget::Normal(..) => {
return None;
}
};
// syntactic definition

View file

@ -1,16 +1,16 @@
//! Top-level evaluation of a source file.
use core::fmt;
use std::{
collections::{HashMap, HashSet},
sync::Arc,
};
use ecow::EcoString;
use ecow::{EcoString, EcoVec};
use once_cell::sync::Lazy;
use parking_lot::{Mutex, RwLock};
use reflexo::{hash::hash128, vector::ir::DefId};
use typst::{
foundations::{CastInfo, Element, Func, ParamInfo, Value},
foundations::{Func, Value},
syntax::{
ast::{self, AstNode},
LinkedNode, Source, Span, SyntaxKind,
@ -21,405 +21,10 @@ use crate::{analysis::analyze_dyn_signature, AnalysisContext};
use super::{resolve_global_value, DefUseInfo, IdentRef};
struct RefDebug<'a>(&'a FlowType);
impl<'a> fmt::Debug for RefDebug<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.0 {
FlowType::Var(v) => write!(f, "@{}", v.1),
_ => write!(f, "{:?}", self.0),
}
}
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum FlowUnaryType {
Pos(Box<FlowType>),
Neg(Box<FlowType>),
Not(Box<FlowType>),
}
impl FlowUnaryType {
pub fn lhs(&self) -> &FlowType {
match self {
FlowUnaryType::Pos(e) => e,
FlowUnaryType::Neg(e) => e,
FlowUnaryType::Not(e) => e,
}
}
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum FlowBinaryType {
Add(FlowBinaryRepr),
Sub(FlowBinaryRepr),
Mul(FlowBinaryRepr),
Div(FlowBinaryRepr),
And(FlowBinaryRepr),
Or(FlowBinaryRepr),
Eq(FlowBinaryRepr),
Neq(FlowBinaryRepr),
Lt(FlowBinaryRepr),
Leq(FlowBinaryRepr),
Gt(FlowBinaryRepr),
Geq(FlowBinaryRepr),
Assign(FlowBinaryRepr),
In(FlowBinaryRepr),
NotIn(FlowBinaryRepr),
AddAssign(FlowBinaryRepr),
SubAssign(FlowBinaryRepr),
MulAssign(FlowBinaryRepr),
DivAssign(FlowBinaryRepr),
}
impl FlowBinaryType {
pub fn repr(&self) -> &FlowBinaryRepr {
match self {
FlowBinaryType::Add(r)
| FlowBinaryType::Sub(r)
| FlowBinaryType::Mul(r)
| FlowBinaryType::Div(r)
| FlowBinaryType::And(r)
| FlowBinaryType::Or(r)
| FlowBinaryType::Eq(r)
| FlowBinaryType::Neq(r)
| FlowBinaryType::Lt(r)
| FlowBinaryType::Leq(r)
| FlowBinaryType::Gt(r)
| FlowBinaryType::Geq(r)
| FlowBinaryType::Assign(r)
| FlowBinaryType::In(r)
| FlowBinaryType::NotIn(r)
| FlowBinaryType::AddAssign(r)
| FlowBinaryType::SubAssign(r)
| FlowBinaryType::MulAssign(r)
| FlowBinaryType::DivAssign(r) => r,
}
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowBinaryRepr(Box<(FlowType, FlowType)>);
impl fmt::Debug for FlowBinaryRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// shorter
write!(f, "{:?}, {:?}", RefDebug(&self.0 .0), RefDebug(&self.0 .1))
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowVarStore {
pub lbs: Vec<FlowType>,
pub ubs: Vec<FlowType>,
}
impl fmt::Debug for FlowVarStore {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// write!(f, "{}", self.name)
// also where
if !self.lbs.is_empty() {
write!(f, " ⪰ {:?}", self.lbs[0])?;
for lb in &self.lbs[1..] {
write!(f, " | {lb:?}")?;
}
}
if !self.ubs.is_empty() {
write!(f, " ⪯ {:?}", self.ubs[0])?;
for ub in &self.ubs[1..] {
write!(f, " & {ub:?}")?;
}
}
Ok(())
}
}
#[derive(Clone)]
pub(crate) enum FlowVarKind {
Weak(Arc<RwLock<FlowVarStore>>),
}
#[derive(Clone)]
pub(crate) struct FlowVar {
pub name: EcoString,
pub id: DefId,
pub kind: FlowVarKind,
}
impl std::hash::Hash for FlowVar {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
0.hash(state);
self.id.hash(state);
}
}
impl fmt::Debug for FlowVar {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "@{}", self.name)?;
match &self.kind {
// FlowVarKind::Strong(t) => write!(f, " = {:?}", t),
FlowVarKind::Weak(w) => write!(f, "{w:?}"),
}
}
}
impl FlowVar {
pub fn name(&self) -> EcoString {
self.name.clone()
}
pub fn id(&self) -> DefId {
self.id
}
pub fn get_ref(&self) -> FlowType {
FlowType::Var(Box::new((self.id, self.name.clone())))
}
fn ever_be(&self, exp: FlowType) {
match &self.kind {
// FlowVarKind::Strong(_t) => {}
FlowVarKind::Weak(w) => {
let mut w = w.write();
w.lbs.push(exp.clone());
}
}
}
fn as_strong(&mut self, exp: FlowType) {
// self.kind = FlowVarKind::Strong(value);
match &self.kind {
// FlowVarKind::Strong(_t) => {}
FlowVarKind::Weak(w) => {
let mut w = w.write();
w.lbs.push(exp.clone());
}
}
}
}
#[derive(Hash, Clone)]
pub(crate) struct FlowAt(Box<(FlowType, EcoString)>);
impl fmt::Debug for FlowAt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}.{}", RefDebug(&self.0 .0), self.0 .1)
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowArgs {
pub args: Vec<FlowType>,
pub named: Vec<(EcoString, FlowType)>,
}
impl FlowArgs {
fn start_match(&self) -> &[FlowType] {
&self.args
}
}
impl fmt::Debug for FlowArgs {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use std::fmt::Write;
f.write_str("&(")?;
if let Some((first, args)) = self.args.split_first() {
write!(f, "{first:?}")?;
for arg in args {
write!(f, "{arg:?}, ")?;
}
}
f.write_char(')')
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowSignature {
pub pos: Vec<FlowType>,
pub named: Vec<(EcoString, FlowType)>,
pub rest: Option<FlowType>,
pub ret: FlowType,
}
impl fmt::Debug for FlowSignature {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("(")?;
if let Some((first, pos)) = self.pos.split_first() {
write!(f, "{first:?}")?;
for p in pos {
write!(f, ", {p:?}")?;
}
}
for (name, ty) in &self.named {
write!(f, ", {name}: {ty:?}")?;
}
if let Some(rest) = &self.rest {
write!(f, ", ...: {rest:?}")?;
}
f.write_str(") -> ")?;
write!(f, "{:?}", self.ret)
}
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum PathPreference {
None,
Image,
Json,
Yaml,
Xml,
Toml,
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum FlowBuiltinType {
Args,
Path(PathPreference),
}
#[derive(Hash, Clone)]
#[allow(clippy::box_collection)]
pub(crate) enum FlowType {
Clause,
Undef,
Content,
Any,
Array,
Dict,
None,
Infer,
FlowNone,
Auto,
Builtin(FlowBuiltinType),
Args(Box<FlowArgs>),
Func(Box<FlowSignature>),
With(Box<(FlowType, Vec<FlowArgs>)>),
At(FlowAt),
Union(Box<Vec<FlowType>>),
Let(Arc<FlowVarStore>),
Var(Box<(DefId, EcoString)>),
Unary(FlowUnaryType),
Binary(FlowBinaryType),
Value(Box<Value>),
Element(Element),
}
impl fmt::Debug for FlowType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FlowType::Clause => f.write_str("Clause"),
FlowType::Undef => f.write_str("Undef"),
FlowType::Content => f.write_str("Content"),
FlowType::Any => f.write_str("Any"),
FlowType::Array => f.write_str("Array"),
FlowType::Dict => f.write_str("Dict"),
FlowType::None => f.write_str("None"),
FlowType::Infer => f.write_str("Infer"),
FlowType::FlowNone => f.write_str("FlowNone"),
FlowType::Auto => f.write_str("Auto"),
FlowType::Builtin(t) => write!(f, "{t:?}"),
FlowType::Args(a) => write!(f, "&({a:?})"),
FlowType::Func(s) => write!(f, "{s:?}"),
FlowType::With(w) => write!(f, "({:?}).with(..{:?})", w.0, w.1),
FlowType::At(a) => write!(f, "{a:?}"),
FlowType::Union(u) => {
f.write_str("(")?;
if let Some((first, u)) = u.split_first() {
write!(f, "{first:?}")?;
for u in u {
write!(f, " | {u:?}")?;
}
}
f.write_str(")")
}
FlowType::Let(v) => write!(f, "{v:?}"),
FlowType::Var(v) => write!(f, "@{}", v.1),
FlowType::Unary(u) => write!(f, "{u:?}"),
FlowType::Binary(b) => write!(f, "{b:?}"),
FlowType::Value(v) => write!(f, "{v:?}"),
FlowType::Element(e) => write!(f, "{e:?}"),
}
}
}
impl FlowType {
pub fn from_return_site(f: &Func, c: &'_ CastInfo) -> Option<Self> {
use typst::foundations::func::Repr;
match f.inner() {
Repr::Element(e) => return Some(FlowType::Element(*e)),
Repr::Closure(_) => {}
Repr::With(w) => return FlowType::from_return_site(&w.0, c),
Repr::Native(_) => {}
};
let ty = match c {
CastInfo::Any => FlowType::Any,
CastInfo::Value(v, _) => FlowType::Value(Box::new(v.clone())),
CastInfo::Type(ty) => FlowType::Value(Box::new(Value::Type(*ty))),
CastInfo::Union(e) => FlowType::Union(Box::new(
e.iter()
.flat_map(|e| Self::from_return_site(f, e))
.collect(),
)),
};
Some(ty)
}
pub(crate) fn from_param_site(f: &Func, p: &ParamInfo, s: &CastInfo) -> Option<FlowType> {
use typst::foundations::func::Repr;
match f.inner() {
Repr::Element(..) | Repr::Native(..) => match (f.name().unwrap(), p.name) {
("image", "path") => {
return Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Image,
)))
}
("read", "path") => {
return Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::None,
)))
}
("json", "path") => {
return Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Json,
)))
}
("yaml", "path") => {
return Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Yaml,
)))
}
("xml", "path") => {
return Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Xml,
)))
}
("toml", "path") => {
return Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Toml,
)))
}
_ => {}
},
Repr::Closure(_) => {}
Repr::With(w) => return FlowType::from_param_site(&w.0, p, s),
};
let ty = match &s {
CastInfo::Any => FlowType::Any,
CastInfo::Value(v, _) => FlowType::Value(Box::new(v.clone())),
CastInfo::Type(ty) => FlowType::Value(Box::new(Value::Type(*ty))),
CastInfo::Union(e) => FlowType::Union(Box::new(
e.iter()
.flat_map(|e| Self::from_param_site(f, p, e))
.collect(),
)),
};
Some(ty)
}
}
mod def;
pub(crate) use def::*;
mod builtin;
pub(crate) use builtin::*;
pub(crate) struct TypeCheckInfo {
pub vars: HashMap<DefId, FlowVar>,
@ -472,7 +77,6 @@ pub(crate) fn type_check(ctx: &mut AnalysisContext, source: Source) -> Option<Ar
let elapsed = current.elapsed();
log::info!("Type checking on {:?} took {:?}", source.id(), elapsed);
let _ = type_checker.info.mapping;
let _ = type_checker.source;
Some(Arc::new(info))
@ -571,8 +175,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
return self
.ctx
.mini_eval(root.cast()?)
.map(Box::new)
.map(FlowType::Value)
.map(|v| (FlowType::Value(Box::new((v, root.span())))))
}
SyntaxKind::Parenthesized => return self.check_children(root),
SyntaxKind::Array => return self.check_array(root),
@ -676,8 +279,9 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
};
let Some(def_id) = self.def_use_info.get_ref(&ident_ref) else {
let s = root.span();
let v = resolve_global_value(self.ctx, root, mode == InterpretMode::Math)?;
return Some(FlowType::Value(Box::new(v)));
return Some(FlowType::Value(Box::new((v, s))));
};
let var = self.info.vars.get(&def_id)?.clone();
@ -691,16 +295,37 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
fn check_dict(&mut self, root: LinkedNode<'_>) -> Option<FlowType> {
let _dict: ast::Dict = root.cast()?;
let dict: ast::Dict = root.cast()?;
Some(FlowType::Dict)
let mut fields = EcoVec::new();
for field in dict.items() {
match field {
ast::DictItem::Named(n) => {
let name = n.name().get().clone();
let value = self.check_expr_in(n.expr().span(), root.clone());
fields.push((name, value, n.span()));
}
ast::DictItem::Keyed(k) => {
let key = self.ctx.const_eval(k.key());
if let Some(Value::Str(key)) = key {
let value = self.check_expr_in(k.expr().span(), root.clone());
fields.push((key.into(), value, k.span()));
}
}
// todo: var dict union
ast::DictItem::Spread(_s) => {}
}
}
Some(FlowType::Dict(FlowRecord { fields }))
}
fn check_unary(&mut self, root: LinkedNode<'_>) -> Option<FlowType> {
let unary: ast::Unary = root.cast()?;
if let Some(constant) = self.ctx.mini_eval(ast::Expr::Unary(unary)) {
return Some(FlowType::Value(Box::new(constant)));
return Some(FlowType::Value(Box::new((constant, root.span()))));
}
let op = unary.op();
@ -719,7 +344,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
let binary: ast::Binary = root.cast()?;
if let Some(constant) = self.ctx.mini_eval(ast::Expr::Binary(binary)) {
return Some(FlowType::Value(Box::new(constant)));
return Some(FlowType::Value(Box::new((constant, root.span()))));
}
let op = binary.op();
@ -1036,7 +661,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
syntax_args: &ast::Args,
candidates: &mut Vec<FlowType>,
) -> Option<()> {
// println!("check func callee {callee:?}");
// log::debug!("check func callee {callee:?}");
match &callee {
FlowType::Var(v) => {
@ -1071,23 +696,28 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
}
// println!("check applied {v:?}");
// log::debug!("check applied {v:?}");
candidates.push(f.ret.clone());
}
FlowType::Dict(_v) => {}
// todo: with
FlowType::With(_e) => {}
FlowType::Args(_e) => {}
FlowType::Union(_e) => {}
FlowType::Let(_) => {}
FlowType::Value(f) => {
if let Value::Func(f) = f.as_ref() {
if let Value::Func(f) = &f.0 {
self.check_apply_runtime(f, args, syntax_args, candidates);
}
}
FlowType::ValueDoc(f) => {
if let Value::Func(f) = &f.0 {
self.check_apply_runtime(f, args, syntax_args, candidates);
}
}
FlowType::Array => {}
FlowType::Dict => {}
FlowType::Clause => {}
FlowType::Undef => {}
FlowType::Content => {}
@ -1110,6 +740,17 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
fn constrain(&mut self, lhs: &FlowType, rhs: &FlowType) {
static FLOW_STROKE_DICT_TYPE: Lazy<FlowType> =
Lazy::new(|| FlowType::Dict(FLOW_STROKE_DICT.clone()));
static FLOW_MARGIN_DICT_TYPE: Lazy<FlowType> =
Lazy::new(|| FlowType::Dict(FLOW_MARGIN_DICT.clone()));
static FLOW_INSET_DICT_TYPE: Lazy<FlowType> =
Lazy::new(|| FlowType::Dict(FLOW_INSET_DICT.clone()));
static FLOW_OUTSET_DICT_TYPE: Lazy<FlowType> =
Lazy::new(|| FlowType::Dict(FLOW_OUTSET_DICT.clone()));
static FLOW_RADIUS_DICT_TYPE: Lazy<FlowType> =
Lazy::new(|| FlowType::Dict(FLOW_RADIUS_DICT.clone()));
match (lhs, rhs) {
(FlowType::Var(v), FlowType::Var(w)) => {
if v.0 .0 == w.0 .0 {
@ -1130,7 +771,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
}
}
(_, FlowType::Var(v)) => {
(lhs, FlowType::Var(v)) => {
let v = self.info.vars.get(&v.0).unwrap();
match &v.kind {
FlowVarKind::Weak(v) => {
@ -1139,7 +780,99 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
}
}
_ => {}
(FlowType::Union(v), rhs) => {
for e in v.iter() {
self.constrain(e, rhs);
}
}
(lhs, FlowType::Union(v)) => {
for e in v.iter() {
self.constrain(lhs, e);
}
}
(lhs, FlowType::Builtin(FlowBuiltinType::Stroke)) => {
// empty array is also a constructing dict but we can safely ignore it during
// type checking, since no fields are added yet.
if lhs.is_dict() {
self.constrain(lhs, &FLOW_STROKE_DICT_TYPE);
}
}
(FlowType::Builtin(FlowBuiltinType::Stroke), rhs) => {
if rhs.is_dict() {
self.constrain(&FLOW_STROKE_DICT_TYPE, rhs);
}
}
(lhs, FlowType::Builtin(FlowBuiltinType::Margin)) => {
if lhs.is_dict() {
self.constrain(lhs, &FLOW_MARGIN_DICT_TYPE);
}
}
(FlowType::Builtin(FlowBuiltinType::Margin), rhs) => {
if rhs.is_dict() {
self.constrain(&FLOW_MARGIN_DICT_TYPE, rhs);
}
}
(lhs, FlowType::Builtin(FlowBuiltinType::Inset)) => {
if lhs.is_dict() {
self.constrain(lhs, &FLOW_INSET_DICT_TYPE);
}
}
(FlowType::Builtin(FlowBuiltinType::Inset), rhs) => {
if rhs.is_dict() {
self.constrain(&FLOW_INSET_DICT_TYPE, rhs);
}
}
(lhs, FlowType::Builtin(FlowBuiltinType::Outset)) => {
if lhs.is_dict() {
self.constrain(lhs, &FLOW_OUTSET_DICT_TYPE);
}
}
(FlowType::Builtin(FlowBuiltinType::Outset), rhs) => {
if rhs.is_dict() {
self.constrain(&FLOW_OUTSET_DICT_TYPE, rhs);
}
}
(lhs, FlowType::Builtin(FlowBuiltinType::Radius)) => {
if lhs.is_dict() {
self.constrain(lhs, &FLOW_RADIUS_DICT_TYPE);
}
}
(FlowType::Builtin(FlowBuiltinType::Radius), rhs) => {
if rhs.is_dict() {
self.constrain(&FLOW_RADIUS_DICT_TYPE, rhs);
}
}
(FlowType::Dict(lhs), FlowType::Dict(rhs)) => {
for ((key, lhs, sl), (_, rhs, sr)) in lhs.intersect_keys(rhs) {
log::debug!("constrain record item {key} {lhs:?} ⪯ {rhs:?}");
self.constrain(lhs, rhs);
if !sl.is_detached() {
// todo: intersect/union
self.info.mapping.entry(*sl).or_insert(rhs.clone());
}
if !sr.is_detached() {
// todo: intersect/union
self.info.mapping.entry(*sr).or_insert(lhs.clone());
}
}
}
(FlowType::Value(lhs), rhs) => {
log::debug!("constrain value {lhs:?} ⪯ {rhs:?}");
if !lhs.1.is_detached() {
// todo: intersect/union
self.info.mapping.entry(lhs.1).or_insert(rhs.clone());
}
}
(lhs, FlowType::Value(rhs)) => {
log::debug!("constrain value {lhs:?} ⪯ {rhs:?}");
if !rhs.1.is_detached() {
// todo: intersect/union
self.info.mapping.entry(rhs.1).or_insert(lhs.clone());
}
}
_ => {
log::debug!("constrain {lhs:?} ⪯ {rhs:?}");
}
}
}
@ -1161,14 +894,15 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
}
FlowType::Func(..) => e,
FlowType::Dict(..) => e,
FlowType::With(..) => e,
FlowType::Args(..) => e,
FlowType::Union(..) => e,
FlowType::Let(_) => e,
FlowType::Value(..) => e,
FlowType::ValueDoc(..) => e,
FlowType::Array => e,
FlowType::Dict => e,
FlowType::Clause => e,
FlowType::Undef => e,
FlowType::Content => e,
@ -1196,7 +930,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
match primary_type {
FlowType::Func(v) => match method_name.as_str() {
"with" => {
// println!("check method at args: {v:?}.with({args:?})");
// log::debug!("check method at args: {v:?}.with({args:?})");
let f = v.as_ref();
let mut pos = f.pos.iter();
@ -1218,7 +952,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
_candidates.push(self.partial_apply(f, args));
}
"where" => {
// println!("where method at args: {args:?}");
// log::debug!("where method at args: {args:?}");
}
_ => {}
},
@ -1273,11 +1007,14 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
.find(|n| n.name().get() == name.as_ref());
if let Some(named_ty) = named_ty {
self.constrain(named_in, named_ty);
}
if let Some(syntax_named) = syntax_named {
self.info
.mapping
.insert(syntax_named.span(), named_in.clone());
.insert(syntax_named.span(), named_ty.clone());
self.info
.mapping
.insert(syntax_named.expr().span(), named_ty.clone());
}
}
}
@ -1361,6 +1098,11 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
}
self.analyze(&f.ret, pol);
}
FlowType::Dict(r) => {
for (_, p, _) in &r.fields {
self.analyze(p, pol);
}
}
FlowType::With(w) => {
self.analyze(&w.0, pol);
for m in &w.1 {
@ -1397,6 +1139,7 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
}
}
FlowType::Value(_v) => {}
FlowType::ValueDoc(_v) => {}
FlowType::Clause => {}
FlowType::Undef => {}
FlowType::Content => {}
@ -1408,7 +1151,6 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
FlowType::Builtin(_) => {}
// todo
FlowType::Array => {}
FlowType::Dict => {}
FlowType::Element(_) => {}
}
}
@ -1424,7 +1166,7 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
FlowVarKind::Weak(w) => {
let w = w.read();
// println!("transform var {:?} {pol}", v.0);
// log::debug!("transform var {:?} {pol}", v.0);
let mut lbs = Vec::with_capacity(w.lbs.len());
let mut ubs = Vec::with_capacity(w.ubs.len());
@ -1473,6 +1215,15 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
ret,
}))
}
FlowType::Dict(f) => {
let fields = f
.fields
.iter()
.map(|p| (p.0.clone(), self.transform(&p.1, !pol), p.2))
.collect();
FlowType::Dict(FlowRecord { fields })
}
FlowType::With(w) => {
let primary = self.transform(&w.0, pol);
FlowType::With(Box::new((primary, w.1.clone())))
@ -1513,8 +1264,8 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
// todo
FlowType::Let(_) => FlowType::Any,
FlowType::Array => FlowType::Array,
FlowType::Dict => FlowType::Dict,
FlowType::Value(v) => FlowType::Value(v.clone()),
FlowType::ValueDoc(v) => FlowType::ValueDoc(v.clone()),
FlowType::Element(v) => FlowType::Element(*v),
FlowType::Clause => FlowType::Clause,
FlowType::Undef => FlowType::Undef,

View file

@ -0,0 +1,321 @@
use ecow::EcoVec;
use once_cell::sync::Lazy;
use regex::RegexSet;
use typst::{
foundations::{Func, ParamInfo, Value},
syntax::Span,
};
use super::{FlowRecord, FlowType};
#[derive(Debug, Clone, Hash)]
pub(crate) enum PathPreference {
None,
Special,
Source,
Csv,
Image,
Json,
Yaml,
Xml,
Toml,
Bibliography,
RawTheme,
RawSyntax,
}
impl PathPreference {
pub fn ext_matcher(&self) -> &'static RegexSet {
static SOURCE_REGSET: Lazy<RegexSet> =
Lazy::new(|| RegexSet::new([r"^typ$", r"^typc$"]).unwrap());
static IMAGE_REGSET: Lazy<RegexSet> = Lazy::new(|| {
RegexSet::new([
r"^png$", r"^webp$", r"^jpg$", r"^jpeg$", r"^svg$", r"^svgz$",
])
.unwrap()
});
static JSON_REGSET: Lazy<RegexSet> =
Lazy::new(|| RegexSet::new([r"^json$", r"^jsonc$", r"^json5$"]).unwrap());
static YAML_REGSET: Lazy<RegexSet> =
Lazy::new(|| RegexSet::new([r"^yaml$", r"^yml$"]).unwrap());
static XML_REGSET: Lazy<RegexSet> = Lazy::new(|| RegexSet::new([r"^xml$"]).unwrap());
static TOML_REGSET: Lazy<RegexSet> = Lazy::new(|| RegexSet::new([r"^toml$"]).unwrap());
static CSV_REGSET: Lazy<RegexSet> = Lazy::new(|| RegexSet::new([r"^csv$"]).unwrap());
static BIB_REGSET: Lazy<RegexSet> =
Lazy::new(|| RegexSet::new([r"^yaml$", r"^yml$", r"^bib$"]).unwrap());
static RAW_THEME_REGSET: Lazy<RegexSet> =
Lazy::new(|| RegexSet::new([r"^tmTheme$", r"^xml$"]).unwrap());
static RAW_SYNTAX_REGSET: Lazy<RegexSet> =
Lazy::new(|| RegexSet::new([r"^tmLanguage$", r"^sublime-syntax$"]).unwrap());
static ALL_REGSET: Lazy<RegexSet> = Lazy::new(|| RegexSet::new([r".*"]).unwrap());
static ALL_SPECIAL_REGSET: Lazy<RegexSet> = Lazy::new(|| {
RegexSet::new({
let patterns = SOURCE_REGSET.patterns();
let patterns = patterns.iter().chain(IMAGE_REGSET.patterns());
let patterns = patterns.chain(JSON_REGSET.patterns());
let patterns = patterns.chain(YAML_REGSET.patterns());
let patterns = patterns.chain(XML_REGSET.patterns());
let patterns = patterns.chain(TOML_REGSET.patterns());
let patterns = patterns.chain(CSV_REGSET.patterns());
let patterns = patterns.chain(BIB_REGSET.patterns());
let patterns = patterns.chain(RAW_THEME_REGSET.patterns());
patterns.chain(RAW_SYNTAX_REGSET.patterns())
})
.unwrap()
});
match self {
PathPreference::None => &ALL_REGSET,
PathPreference::Special => &ALL_SPECIAL_REGSET,
PathPreference::Source => &SOURCE_REGSET,
PathPreference::Csv => &CSV_REGSET,
PathPreference::Image => &IMAGE_REGSET,
PathPreference::Json => &JSON_REGSET,
PathPreference::Yaml => &YAML_REGSET,
PathPreference::Xml => &XML_REGSET,
PathPreference::Toml => &TOML_REGSET,
PathPreference::Bibliography => &BIB_REGSET,
PathPreference::RawTheme => &RAW_THEME_REGSET,
PathPreference::RawSyntax => &RAW_SYNTAX_REGSET,
}
}
}
pub(in crate::analysis::ty) fn param_mapping(f: &Func, p: &ParamInfo) -> Option<FlowType> {
match (f.name().unwrap(), p.name) {
("cbor", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::None,
))),
("csv", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Csv,
))),
("image", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Image,
))),
("read", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::None,
))),
("json", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Json,
))),
("yaml", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Yaml,
))),
("xml", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Xml,
))),
("toml", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Toml,
))),
("raw", "theme") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::RawTheme,
))),
("raw", "syntaxes") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::RawSyntax,
))),
("bibliography", "path") => Some(FlowType::Builtin(FlowBuiltinType::Path(
PathPreference::Bibliography,
))),
("text", "size") => Some(FlowType::Builtin(FlowBuiltinType::TextSize)),
("text" | "stack", "dir") => Some(FlowType::Builtin(FlowBuiltinType::Dir)),
("text", "font") => Some(FlowType::Builtin(FlowBuiltinType::TextFont)),
(
// todo: polygon.regular
"page" | "highlight" | "text" | "path" | "rect" | "ellipse" | "circle" | "polygon"
| "box" | "block" | "table" | "regular",
"fill",
) => Some(FlowType::Builtin(FlowBuiltinType::Color)),
(
// todo: table.cell
"table" | "cell" | "block" | "box" | "circle" | "ellipse" | "rect" | "square",
"inset",
) => Some(FlowType::Builtin(FlowBuiltinType::Inset)),
("block" | "box" | "circle" | "ellipse" | "rect" | "square", "outset") => {
Some(FlowType::Builtin(FlowBuiltinType::Outset))
}
("block" | "box" | "rect" | "square", "radius") => {
Some(FlowType::Builtin(FlowBuiltinType::Radius))
}
(
//todo: table.cell, table.hline, table.vline, math.cancel, grid.cell, polygon.regular
"cancel" | "highlight" | "overline" | "strike" | "underline" | "text" | "path" | "rect"
| "ellipse" | "circle" | "polygon" | "box" | "block" | "table" | "line" | "cell"
| "hline" | "vline" | "regular",
"stroke",
) => Some(FlowType::Builtin(FlowBuiltinType::Stroke)),
("page", "margin") => Some(FlowType::Builtin(FlowBuiltinType::Margin)),
_ => None,
}
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum FlowBuiltinType {
Args,
Color,
TextSize,
TextFont,
Dir,
Length,
Float,
Stroke,
Margin,
Inset,
Outset,
Radius,
Path(PathPreference),
}
fn literally(s: impl FlowBuiltinLiterally) -> FlowType {
s.literally()
}
trait FlowBuiltinLiterally {
fn literally(&self) -> FlowType;
}
impl FlowBuiltinLiterally for &str {
fn literally(&self) -> FlowType {
FlowType::Value(Box::new((Value::Str((*self).into()), Span::detached())))
}
}
impl FlowBuiltinLiterally for FlowBuiltinType {
fn literally(&self) -> FlowType {
FlowType::Builtin(self.clone())
}
}
// separate by middle
macro_rules! flow_builtin_union_inner {
($literal_kind:expr) => {
literally($literal_kind)
};
($($x:expr),+ $(,)?) => {
Vec::from_iter([
$(flow_builtin_union_inner!($x)),*
])
};
}
macro_rules! flow_union {
// the first one is string
($($b:tt)*) => {
FlowType::Union(Box::new(flow_builtin_union_inner!( $($b)* )))
};
}
macro_rules! flow_record {
($($name:expr => $ty:expr),* $(,)?) => {
FlowRecord {
fields: EcoVec::from_iter([
$(
(
$name.into(),
$ty,
Span::detached(),
),
)*
])
}
};
}
use FlowBuiltinType::*;
pub static FLOW_STROKE_DICT: Lazy<FlowRecord> = Lazy::new(|| {
flow_record!(
"paint" => literally(Color),
"thickness" => literally(Length),
"cap" => flow_union!("butt", "round", "square"),
"join" => flow_union!("miter", "round", "bevel"),
"dash" => flow_union!(
"solid",
"dotted",
"densely-dotted",
"loosely-dotted",
"dashed",
"densely-dashed",
"loosely-dashed",
"dash-dotted",
"densely-dash-dotted",
"loosely-dash-dotted",
),
"miter-limit" => literally(Float),
)
});
pub static FLOW_MARGIN_DICT: Lazy<FlowRecord> = Lazy::new(|| {
flow_record!(
"top" => literally(Length),
"right" => literally(Length),
"bottom" => literally(Length),
"left" => literally(Length),
"inside" => literally(Length),
"outside" => literally(Length),
"x" => literally(Length),
"y" => literally(Length),
"rest" => literally(Length),
)
});
pub static FLOW_INSET_DICT: Lazy<FlowRecord> = Lazy::new(|| {
flow_record!(
"top" => literally(Length),
"right" => literally(Length),
"bottom" => literally(Length),
"left" => literally(Length),
"x" => literally(Length),
"y" => literally(Length),
"rest" => literally(Length),
)
});
pub static FLOW_OUTSET_DICT: Lazy<FlowRecord> = Lazy::new(|| {
flow_record!(
"top" => literally(Length),
"right" => literally(Length),
"bottom" => literally(Length),
"left" => literally(Length),
"x" => literally(Length),
"y" => literally(Length),
"rest" => literally(Length),
)
});
pub static FLOW_RADIUS_DICT: Lazy<FlowRecord> = Lazy::new(|| {
flow_record!(
"top" => literally(Length),
"right" => literally(Length),
"bottom" => literally(Length),
"left" => literally(Length),
"top-left" => literally(Length),
"top-right" => literally(Length),
"bottom-left" => literally(Length),
"bottom-right" => literally(Length),
"rest" => literally(Length),
)
});
// todo bad case: function.with
// todo bad case: function.where
// todo bad case: array.fold
// todo bad case: datetime
// todo bad case: selector
// todo: function signatures, for example: `locate(loc => ...)`
// todo: numbering/supplement
// todo: grid/table.columns/rows/gutter/column-gutter/row-gutter array of length
// todo: pattern.size array of length
// todo: grid/table.fill/align/stroke/inset can be a function
// todo: math.cancel.angle can be a function
// todo: text.features array/dictionary
// todo: math.mat.augment
// todo: text.lang
// todo: text.region
// todo: text.font array
// todo: stroke.dash can be an array
// todo: csv.row-type can be an array or a dictionary

View file

@ -0,0 +1,435 @@
use core::fmt;
use std::sync::Arc;
use ecow::{EcoString, EcoVec};
use parking_lot::RwLock;
use reflexo::vector::ir::DefId;
use typst::{
foundations::{CastInfo, Element, Func, ParamInfo, Value},
syntax::Span,
};
use crate::analysis::ty::param_mapping;
use super::FlowBuiltinType;
struct RefDebug<'a>(&'a FlowType);
impl<'a> fmt::Debug for RefDebug<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.0 {
FlowType::Var(v) => write!(f, "@{}", v.1),
_ => write!(f, "{:?}", self.0),
}
}
}
#[derive(Hash, Clone)]
#[allow(clippy::box_collection)]
pub(crate) enum FlowType {
Clause,
Undef,
Content,
Any,
Array,
None,
Infer,
FlowNone,
Auto,
Builtin(FlowBuiltinType),
Value(Box<(Value, Span)>),
ValueDoc(Box<(Value, &'static str)>),
Element(Element),
Var(Box<(DefId, EcoString)>),
Func(Box<FlowSignature>),
Dict(FlowRecord),
With(Box<(FlowType, Vec<FlowArgs>)>),
Args(Box<FlowArgs>),
At(FlowAt),
Unary(FlowUnaryType),
Binary(FlowBinaryType),
Union(Box<Vec<FlowType>>),
Let(Arc<FlowVarStore>),
}
impl fmt::Debug for FlowType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FlowType::Clause => f.write_str("Clause"),
FlowType::Undef => f.write_str("Undef"),
FlowType::Content => f.write_str("Content"),
FlowType::Any => f.write_str("Any"),
FlowType::Array => f.write_str("Array"),
FlowType::None => f.write_str("None"),
FlowType::Infer => f.write_str("Infer"),
FlowType::FlowNone => f.write_str("FlowNone"),
FlowType::Auto => f.write_str("Auto"),
FlowType::Builtin(t) => write!(f, "{t:?}"),
FlowType::Args(a) => write!(f, "&({a:?})"),
FlowType::Func(s) => write!(f, "{s:?}"),
FlowType::Dict(r) => write!(f, "{r:?}"),
FlowType::With(w) => write!(f, "({:?}).with(..{:?})", w.0, w.1),
FlowType::At(a) => write!(f, "{a:?}"),
FlowType::Union(u) => {
f.write_str("(")?;
if let Some((first, u)) = u.split_first() {
write!(f, "{first:?}")?;
for u in u {
write!(f, " | {u:?}")?;
}
}
f.write_str(")")
}
FlowType::Let(v) => write!(f, "{v:?}"),
FlowType::Var(v) => write!(f, "@{}", v.1),
FlowType::Unary(u) => write!(f, "{u:?}"),
FlowType::Binary(b) => write!(f, "{b:?}"),
FlowType::Value(v) => write!(f, "{v:?}", v = v.0),
FlowType::ValueDoc(v) => write!(f, "{v:?}"),
FlowType::Element(e) => write!(f, "{e:?}"),
}
}
}
impl FlowType {
pub fn from_return_site(f: &Func, c: &'_ CastInfo) -> Option<Self> {
use typst::foundations::func::Repr;
match f.inner() {
Repr::Element(e) => return Some(FlowType::Element(*e)),
Repr::Closure(_) => {}
Repr::With(w) => return FlowType::from_return_site(&w.0, c),
Repr::Native(_) => {}
};
let ty = match c {
CastInfo::Any => FlowType::Any,
CastInfo::Value(v, doc) => FlowType::ValueDoc(Box::new((v.clone(), *doc))),
CastInfo::Type(ty) => FlowType::Value(Box::new((Value::Type(*ty), Span::detached()))),
CastInfo::Union(e) => FlowType::Union(Box::new(
e.iter()
.flat_map(|e| Self::from_return_site(f, e))
.collect(),
)),
};
Some(ty)
}
pub(crate) fn from_param_site(f: &Func, p: &ParamInfo, s: &CastInfo) -> Option<FlowType> {
use typst::foundations::func::Repr;
match f.inner() {
Repr::Element(..) | Repr::Native(..) => {
if let Some(ty) = param_mapping(f, p) {
return Some(ty);
}
}
Repr::Closure(_) => {}
Repr::With(w) => return FlowType::from_param_site(&w.0, p, s),
};
let ty = match &s {
CastInfo::Any => FlowType::Any,
CastInfo::Value(v, doc) => FlowType::ValueDoc(Box::new((v.clone(), *doc))),
CastInfo::Type(ty) => FlowType::Value(Box::new((Value::Type(*ty), Span::detached()))),
CastInfo::Union(e) => FlowType::Union(Box::new(
e.iter()
.flat_map(|e| Self::from_param_site(f, p, e))
.collect(),
)),
};
Some(ty)
}
pub(crate) fn is_dict(&self) -> bool {
matches!(self, FlowType::Dict(..))
}
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum FlowUnaryType {
Pos(Box<FlowType>),
Neg(Box<FlowType>),
Not(Box<FlowType>),
}
impl FlowUnaryType {
pub fn lhs(&self) -> &FlowType {
match self {
FlowUnaryType::Pos(e) => e,
FlowUnaryType::Neg(e) => e,
FlowUnaryType::Not(e) => e,
}
}
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum FlowBinaryType {
Add(FlowBinaryRepr),
Sub(FlowBinaryRepr),
Mul(FlowBinaryRepr),
Div(FlowBinaryRepr),
And(FlowBinaryRepr),
Or(FlowBinaryRepr),
Eq(FlowBinaryRepr),
Neq(FlowBinaryRepr),
Lt(FlowBinaryRepr),
Leq(FlowBinaryRepr),
Gt(FlowBinaryRepr),
Geq(FlowBinaryRepr),
Assign(FlowBinaryRepr),
In(FlowBinaryRepr),
NotIn(FlowBinaryRepr),
AddAssign(FlowBinaryRepr),
SubAssign(FlowBinaryRepr),
MulAssign(FlowBinaryRepr),
DivAssign(FlowBinaryRepr),
}
impl FlowBinaryType {
pub fn repr(&self) -> &FlowBinaryRepr {
match self {
FlowBinaryType::Add(r)
| FlowBinaryType::Sub(r)
| FlowBinaryType::Mul(r)
| FlowBinaryType::Div(r)
| FlowBinaryType::And(r)
| FlowBinaryType::Or(r)
| FlowBinaryType::Eq(r)
| FlowBinaryType::Neq(r)
| FlowBinaryType::Lt(r)
| FlowBinaryType::Leq(r)
| FlowBinaryType::Gt(r)
| FlowBinaryType::Geq(r)
| FlowBinaryType::Assign(r)
| FlowBinaryType::In(r)
| FlowBinaryType::NotIn(r)
| FlowBinaryType::AddAssign(r)
| FlowBinaryType::SubAssign(r)
| FlowBinaryType::MulAssign(r)
| FlowBinaryType::DivAssign(r) => r,
}
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowBinaryRepr(pub Box<(FlowType, FlowType)>);
impl fmt::Debug for FlowBinaryRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// shorter
write!(f, "{:?}, {:?}", RefDebug(&self.0 .0), RefDebug(&self.0 .1))
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowVarStore {
pub lbs: Vec<FlowType>,
pub ubs: Vec<FlowType>,
}
impl fmt::Debug for FlowVarStore {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// write!(f, "{}", self.name)
// also where
if !self.lbs.is_empty() {
write!(f, " ⪰ {:?}", self.lbs[0])?;
for lb in &self.lbs[1..] {
write!(f, " | {lb:?}")?;
}
}
if !self.ubs.is_empty() {
write!(f, " ⪯ {:?}", self.ubs[0])?;
for ub in &self.ubs[1..] {
write!(f, " & {ub:?}")?;
}
}
Ok(())
}
}
#[derive(Clone)]
pub(crate) enum FlowVarKind {
Weak(Arc<RwLock<FlowVarStore>>),
}
#[derive(Clone)]
pub(crate) struct FlowVar {
pub name: EcoString,
pub id: DefId,
pub kind: FlowVarKind,
}
impl std::hash::Hash for FlowVar {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
0.hash(state);
self.id.hash(state);
}
}
impl fmt::Debug for FlowVar {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "@{}", self.name)?;
match &self.kind {
// FlowVarKind::Strong(t) => write!(f, " = {:?}", t),
FlowVarKind::Weak(w) => write!(f, "{w:?}"),
}
}
}
impl FlowVar {
pub fn name(&self) -> EcoString {
self.name.clone()
}
pub fn id(&self) -> DefId {
self.id
}
pub fn get_ref(&self) -> FlowType {
FlowType::Var(Box::new((self.id, self.name.clone())))
}
pub fn ever_be(&self, exp: FlowType) {
match &self.kind {
// FlowVarKind::Strong(_t) => {}
FlowVarKind::Weak(w) => {
let mut w = w.write();
w.lbs.push(exp.clone());
}
}
}
pub fn as_strong(&mut self, exp: FlowType) {
// self.kind = FlowVarKind::Strong(value);
match &self.kind {
// FlowVarKind::Strong(_t) => {}
FlowVarKind::Weak(w) => {
let mut w = w.write();
w.lbs.push(exp.clone());
}
}
}
}
#[derive(Hash, Clone)]
pub(crate) struct FlowAt(pub Box<(FlowType, EcoString)>);
impl fmt::Debug for FlowAt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}.{}", RefDebug(&self.0 .0), self.0 .1)
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowArgs {
pub args: Vec<FlowType>,
pub named: Vec<(EcoString, FlowType)>,
}
impl FlowArgs {
pub fn start_match(&self) -> &[FlowType] {
&self.args
}
}
impl fmt::Debug for FlowArgs {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use std::fmt::Write;
f.write_str("&(")?;
if let Some((first, args)) = self.args.split_first() {
write!(f, "{first:?}")?;
for arg in args {
write!(f, "{arg:?}, ")?;
}
}
f.write_char(')')
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowSignature {
pub pos: Vec<FlowType>,
pub named: Vec<(EcoString, FlowType)>,
pub rest: Option<FlowType>,
pub ret: FlowType,
}
impl fmt::Debug for FlowSignature {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("(")?;
if let Some((first, pos)) = self.pos.split_first() {
write!(f, "{first:?}")?;
for p in pos {
write!(f, ", {p:?}")?;
}
}
for (name, ty) in &self.named {
write!(f, ", {name}: {ty:?}")?;
}
if let Some(rest) = &self.rest {
write!(f, ", ...: {rest:?}")?;
}
f.write_str(") -> ")?;
write!(f, "{:?}", self.ret)
}
}
#[derive(Clone, Hash)]
pub(crate) struct FlowRecord {
pub fields: EcoVec<(EcoString, FlowType, Span)>,
}
impl FlowRecord {
pub(crate) fn intersect_keys_enumerate<'a>(
&'a self,
rhs: &'a FlowRecord,
) -> impl Iterator<Item = (usize, usize)> + 'a {
let mut lhs = self;
let mut rhs = rhs;
// size optimization
let mut swapped = false;
if lhs.fields.len() < rhs.fields.len() {
swapped = true;
std::mem::swap(&mut lhs, &mut rhs);
}
lhs.fields
.iter()
.enumerate()
.filter_map(move |(i, (name, _, _))| {
rhs.fields
.iter()
.position(|(name2, _, _)| name == name2)
.map(|j| (i, j))
})
.map(move |(i, j)| if swapped { (j, i) } else { (i, j) })
}
pub(crate) fn intersect_keys<'a>(
&'a self,
rhs: &'a FlowRecord,
) -> impl Iterator<Item = (&(EcoString, FlowType, Span), &(EcoString, FlowType, Span))> + 'a
{
self.intersect_keys_enumerate(rhs)
.filter_map(move |(i, j)| {
self.fields
.get(i)
.and_then(|lhs| rhs.fields.get(j).map(|rhs| (lhs, rhs)))
})
}
}
impl fmt::Debug for FlowRecord {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("{")?;
if let Some((first, fields)) = self.fields.split_first() {
write!(f, "{name:?}: {ty:?}", name = first.0, ty = first.1)?;
for (name, ty, _) in fields {
write!(f, ", {name:?}: {ty:?}")?;
}
}
f.write_str("}")
}
}

View file

@ -1,13 +1,11 @@
use ecow::eco_format;
use lsp_types::{CompletionItem, CompletionList, CompletionTextEdit, InsertTextFormat, TextEdit};
use reflexo::path::{unix_slash, PathClean};
use lsp_types::CompletionList;
use crate::{
analysis::{FlowBuiltinType, FlowType},
prelude::*,
syntax::{get_deref_target, DerefTarget},
typst_to_lsp::completion_kind,
upstream::{autocomplete, Completion, CompletionContext, CompletionKind},
LspCompletion, StatefulRequest,
upstream::{autocomplete, complete_path, CompletionContext},
StatefulRequest,
};
use self::typst_to_lsp::completion;
@ -104,15 +102,47 @@ impl StatefulRequest for CompletionRequest {
}
Some(DerefTarget::ImportPath(v) | DerefTarget::IncludePath(v)) => {
if !v.text().starts_with(r#""@"#) {
completion_result = complete_path(ctx, v, &source, cursor);
completion_result = complete_path(
ctx,
Some(v),
&source,
cursor,
&crate::analysis::PathPreference::Source,
);
}
}
Some(DerefTarget::Normal(SyntaxKind::Str, cano_expr)) => {
let parent = cano_expr.parent()?;
if matches!(parent.kind(), SyntaxKind::Named | SyntaxKind::Args) {
let ty_chk = ctx.type_check(source.clone());
if let Some(ty_chk) = ty_chk {
let ty = ty_chk.mapping.get(&cano_expr.span());
log::info!("check string ty: {:?}", ty);
if let Some(FlowType::Builtin(FlowBuiltinType::Path(path_filter))) = ty {
completion_result =
complete_path(ctx, Some(cano_expr), &source, cursor, path_filter);
}
}
}
}
// todo: label, reference
Some(DerefTarget::Label(..) | DerefTarget::Ref(..) | DerefTarget::Normal(..)) => {}
None => {}
}
let items = completion_result.or_else(|| {
let mut completion_items_rest = None;
let is_incomplete = false;
let mut items = completion_result.or_else(|| {
let cc_ctx = CompletionContext::new(ctx, doc, &source, cursor, explicit)?;
let (offset, mut completions) = autocomplete(cc_ctx)?;
let (offset, ic, mut completions, completions_items2) = autocomplete(cc_ctx)?;
if !completions_items2.is_empty() {
completion_items_rest = Some(completions_items2);
}
// todo: define it well, we were needing it because we wanted to do interactive
// path completion, but now we've scanned all the paths at the same time.
// is_incomplete = ic;
let _ = ic;
let replace_range;
if match_ident.as_ref().is_some_and(|i| i.offset() == offset) {
@ -149,167 +179,24 @@ impl StatefulRequest for CompletionRequest {
)
})?;
if let Some(items_rest) = completion_items_rest.as_mut() {
items.append(items_rest);
}
// To response completions in fine-grained manner, we need to mark result as
// incomplete. This follows what rust-analyzer does.
// https://github.com/rust-lang/rust-analyzer/blob/f5a9250147f6569d8d89334dc9cca79c0322729f/crates/rust-analyzer/src/handlers/request.rs#L940C55-L940C75
Some(CompletionResponse::List(CompletionList {
is_incomplete: true,
is_incomplete,
items,
}))
}
}
fn complete_path(
ctx: &AnalysisContext,
v: LinkedNode,
source: &Source,
cursor: usize,
) -> Option<Vec<CompletionItem>> {
let id = source.id();
if id.package().is_some() {
return None;
}
let vp = v.cast::<ast::Str>()?;
// todo: path escape
let real_content = vp.get();
let text = v.text();
let unquoted = &text[1..text.len() - 1];
if unquoted != real_content {
return None;
}
let text = source.text();
let vr = v.range();
let offset = vr.start + 1;
if cursor < offset || vr.end <= cursor || vr.len() < 2 {
return None;
}
let path = Path::new(&text[offset..cursor]);
let is_abs = path.is_absolute();
let src_path = id.vpath();
let base = src_path.resolve(&ctx.analysis.root)?;
let dst_path = src_path.join(path);
let mut compl_path = dst_path.as_rootless_path();
if !compl_path.is_dir() {
compl_path = compl_path.parent().unwrap_or(Path::new(""));
}
log::debug!("compl_path: {src_path:?} + {path:?} -> {compl_path:?}");
if compl_path.is_absolute() {
log::warn!("absolute path completion is not supported for security consideration {path:?}");
return None;
}
let dirs = ctx.analysis.root.join(compl_path);
log::debug!("compl_dirs: {dirs:?}");
// find directory or files in the path
let mut folder_completions = vec![];
let mut module_completions = vec![];
// todo: test it correctly
for entry in dirs.read_dir().ok()? {
let Ok(entry) = entry else {
continue;
};
let path = entry.path();
log::trace!("compl_check_path: {path:?}");
if !path.is_dir() && !path.extension().is_some_and(|ext| ext == "typ") {
continue;
}
if path.is_dir()
&& path
.file_name()
.is_some_and(|name| name.to_string_lossy().starts_with('.'))
{
continue;
}
// diff with root
let path = dirs.join(path);
// Skip self smartly
if path.clean() == base.clean() {
continue;
}
let label = if is_abs {
// diff with root
let w = path.strip_prefix(&ctx.analysis.root).ok()?;
eco_format!("/{}", unix_slash(w))
} else {
let base = base.parent()?;
let w = pathdiff::diff_paths(&path, base)?;
unix_slash(&w).into()
};
log::debug!("compl_label: {label:?}");
if path.is_dir() {
folder_completions.push(Completion {
label,
kind: CompletionKind::Folder,
apply: None,
detail: None,
command: None,
});
} else {
module_completions.push(Completion {
label,
kind: CompletionKind::Module,
apply: None,
detail: None,
command: None,
});
}
}
let rng = offset..vr.end - 1;
let replace_range = ctx.to_lsp_range(rng, source);
module_completions.sort_by(|a, b| a.label.cmp(&b.label));
folder_completions.sort_by(|a, b| a.label.cmp(&b.label));
let mut sorter = 0;
let digits = (module_completions.len() + folder_completions.len())
.to_string()
.len();
let completions = module_completions.into_iter().chain(folder_completions);
Some(
completions
.map(|typst_completion| {
let lsp_snippet = typst_completion
.apply
.as_ref()
.unwrap_or(&typst_completion.label);
let text_edit =
CompletionTextEdit::Edit(TextEdit::new(replace_range, lsp_snippet.to_string()));
let sort_text = format!("{sorter:0>digits$}");
sorter += 1;
let res = LspCompletion {
label: typst_completion.label.to_string(),
kind: Some(completion_kind(typst_completion.kind.clone())),
detail: typst_completion.detail.as_ref().map(String::from),
text_edit: Some(text_edit),
// don't sort me
sort_text: Some(sort_text),
filter_text: Some("".to_owned()),
insert_text_format: Some(InsertTextFormat::PLAIN_TEXT),
..Default::default()
};
log::debug!("compl_res: {res:?}");
res
})
.collect_vec(),
)
}
#[cfg(test)]
mod tests {
use insta::with_settings;
use lsp_types::CompletionItem;
use super::*;
use crate::tests::*;

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/base.typ
---
[
{
"isIncomplete": true,
"isIncomplete": false,
"items": [
{
"kind": 7,
@ -249,7 +249,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/base.typ
]
},
{
"isIncomplete": true,
"isIncomplete": false,
"items": [
{
"kind": 3,

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/func_params.typ
---
[
{
"isIncomplete": true,
"isIncomplete": false,
"items": [
{
"kind": 7,
@ -249,7 +249,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/func_params.typ
]
},
{
"isIncomplete": true,
"isIncomplete": false,
"items": [
{
"kind": 6,

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/item_shadow.typ
---
[
{
"isIncomplete": true,
"isIncomplete": false,
"items": [
{
"kind": 7,
@ -215,7 +215,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/item_shadow.typ
]
},
{
"isIncomplete": true,
"isIncomplete": false,
"items": [
{
"kind": 3,

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/let.typ
---
[
{
"isIncomplete": true,
"isIncomplete": false,
"items": [
{
"kind": 7,
@ -249,7 +249,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/let.typ
]
},
{
"isIncomplete": true,
"isIncomplete": false,
"items": [
{
"kind": 6,

View file

@ -4,6 +4,6 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/hover/builtin_var.typ
---
{
"contents": "```typst\nrgb(\"#ff4136\")\n```",
"contents": "```typc\nrgb(\"#ff4136\")\n```",
"range": "0:20:0:23"
}

View file

@ -0,0 +1,16 @@
#text(size: 1pt, font: (), stroke: 1pt, fill: red)[]
#path(fill: red, stroke: red)
#line(angle: 1deg, length: 1pt, stroke: red)
#rect(width: 1pt, height: 1pt, fill: red, stroke: red, radius: 1pt, inset: 1pt, outset: 1pt)
#ellipse(fill: red, stroke: red)
#circle(fill: red, stroke: red)
#box(fill: red, stroke: red)
#block(fill: red, stroke: red)
#table(
fill: red,
stroke: red,
table.hline(stroke: red),
table.vline(stroke: red),
)
#text(stroke: ())

View file

@ -0,0 +1,5 @@
#text(stroke: (
paint: black,
thickness: 1pt,
))[]

View file

@ -0,0 +1,73 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/type_check/infer2.typ
---
---
1..52 -> Element(text)
6..15 -> TextSize
12..15 -> TextSize
17..25 -> TextFont
23..25 -> TextFont
27..38 -> Stroke
35..38 -> Stroke
40..49 -> Color
46..49 -> Color
50..52 -> Type(content)
54..82 -> Element(path)
59..68 -> Color
65..68 -> Color
70..81 -> Stroke
78..81 -> Stroke
84..127 -> Element(line)
89..100 -> Type(angle)
96..100 -> Type(angle)
102..113 -> Type(relative length)
110..113 -> Type(relative length)
115..126 -> Stroke
123..126 -> Stroke
129..220 -> Element(rect)
134..144 -> (Type(relative length) | Type(auto))
141..144 -> (Type(relative length) | Type(auto))
146..157 -> (Type(relative length) | Type(auto))
154..157 -> (Type(relative length) | Type(auto))
159..168 -> Color
165..168 -> Color
170..181 -> Stroke
178..181 -> Stroke
183..194 -> Radius
191..194 -> Radius
196..206 -> Inset
203..206 -> Inset
208..219 -> Outset
216..219 -> Outset
222..253 -> Element(ellipse)
230..239 -> Color
236..239 -> Color
241..252 -> Stroke
249..252 -> Stroke
255..285 -> Element(circle)
262..271 -> Color
268..271 -> Color
273..284 -> Stroke
281..284 -> Stroke
287..314 -> Element(box)
291..300 -> Color
297..300 -> Color
302..313 -> Stroke
310..313 -> Stroke
316..345 -> Element(block)
322..331 -> Color
328..331 -> Color
333..344 -> Stroke
341..344 -> Stroke
347..439 -> Element(table)
356..365 -> Color
362..365 -> Color
369..380 -> Stroke
377..380 -> Stroke
384..408 -> Type(content)
412..436 -> Any
441..457 -> Element(text)
446..456 -> Stroke
454..456 -> Stroke

View file

@ -0,0 +1,14 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/type_check/infer_stroke_dict.typ
---
---
1..54 -> Element(text)
6..51 -> Stroke
14..51 -> Stroke
18..30 -> Color
25..30 -> Color
34..48 -> Length
45..48 -> Length
52..54 -> Type(content)

View file

@ -272,11 +272,13 @@ pub mod typst_to_lsp {
TypstCompletionKind::Syntax => LspCompletionKind::SNIPPET,
TypstCompletionKind::Func => LspCompletionKind::FUNCTION,
TypstCompletionKind::Param => LspCompletionKind::VARIABLE,
TypstCompletionKind::Field => LspCompletionKind::FIELD,
TypstCompletionKind::Variable => LspCompletionKind::VARIABLE,
TypstCompletionKind::Constant => LspCompletionKind::CONSTANT,
TypstCompletionKind::Symbol(_) => LspCompletionKind::FIELD,
TypstCompletionKind::Type => LspCompletionKind::CLASS,
TypstCompletionKind::Module => LspCompletionKind::MODULE,
TypstCompletionKind::File => LspCompletionKind::FILE,
TypstCompletionKind::Folder => LspCompletionKind::FOLDER,
}
}
@ -324,7 +326,7 @@ pub mod typst_to_lsp {
let lsp_marked_string = match typst_tooltip {
TypstTooltip::Text(text) => MarkedString::String(text.to_string()),
TypstTooltip::Code(code) => MarkedString::LanguageString(LanguageString {
language: "typst".to_owned(),
language: "typc".to_owned(),
value: code.to_string(),
}),
};

View file

@ -20,7 +20,7 @@ pub use lsp_types::{
pub use reflexo::vector::ir::DefId;
pub use serde_json::Value as JsonValue;
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};
pub use typst::foundations::{Func, ParamInfo, Value};
pub use typst::foundations::{Func, Value};
pub use typst::syntax::FileId as TypstFileId;
pub use typst::syntax::{
ast::{self, AstNode},

View file

@ -51,6 +51,10 @@ pub(crate) fn find_references(
DerefTarget::ImportPath(..) | DerefTarget::IncludePath(..) => {
return None;
}
// todo: label, reference
DerefTarget::Label(..) | DerefTarget::Ref(..) | DerefTarget::Normal(..) => {
return None;
}
};
let mut may_ident = node.cast::<ast::Expr>()?;

View file

@ -73,7 +73,7 @@ impl SemanticRequest for RenameRequest {
});
}
// todo: conflict analysis
// todo: name conflict analysis
Some(WorkspaceEdit {
changes: Some(editions),
..Default::default()

View file

@ -1,4 +1,4 @@
use crate::{prelude::*, SemanticRequest};
use crate::{prelude::*, syntax::param_index_at_leaf, SemanticRequest};
/// The [`textDocument/signatureHelp`] request is sent from the client to the
/// server to request signature information at a given cursor position.
@ -95,65 +95,6 @@ fn surrounding_function_syntax<'b>(
Some((callee, grand.find(callee.span())?, args))
}
fn param_index_at_leaf(leaf: &LinkedNode, function: &Func, args: ast::Args) -> Option<usize> {
let deciding = deciding_syntax(leaf);
let params = function.params()?;
let param_index = find_param_index(&deciding, params, args)?;
trace!("got param index {param_index}");
Some(param_index)
}
/// Find the piece of syntax that decides what we're completing.
fn deciding_syntax<'b>(leaf: &'b LinkedNode) -> LinkedNode<'b> {
let mut deciding = leaf.clone();
while !matches!(
deciding.kind(),
SyntaxKind::LeftParen | SyntaxKind::Comma | SyntaxKind::Colon
) {
let Some(prev) = deciding.prev_leaf() else {
break;
};
deciding = prev;
}
deciding
}
fn find_param_index(deciding: &LinkedNode, params: &[ParamInfo], args: ast::Args) -> Option<usize> {
match deciding.kind() {
// After colon: "func(param:|)", "func(param: |)".
SyntaxKind::Colon => {
let prev = deciding.prev_leaf()?;
let param_ident = prev.cast::<ast::Ident>()?;
params
.iter()
.position(|param| param.name == param_ident.as_str())
}
// Before: "func(|)", "func(hi|)", "func(12,|)".
SyntaxKind::Comma | SyntaxKind::LeftParen => {
let next = deciding.next_leaf();
let following_param = next.as_ref().and_then(|next| next.cast::<ast::Ident>());
match following_param {
Some(next) => params
.iter()
.position(|param| param.named && param.name.starts_with(next.as_str())),
None => {
let positional_args_so_far = args
.items()
.filter(|arg| matches!(arg, ast::Arg::Pos(_)))
.count();
params
.iter()
.enumerate()
.filter(|(_, param)| param.positional)
.map(|(i, _)| i)
.nth(positional_args_so_far)
}
}
}
_ => None,
}
}
fn markdown_docs(docs: &str) -> Documentation {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,

View file

@ -32,7 +32,7 @@ impl SemanticRequest for SymbolRequest {
type Response = Vec<SymbolInformation>;
fn request(self, ctx: &mut AnalysisContext) -> Option<Self::Response> {
// todo: expose source
// todo: let typst.ts expose source
let mut symbols = vec![];

View file

@ -1,7 +1,10 @@
use log::debug;
use typst::syntax::{
use typst::{
foundations::{Func, ParamInfo},
syntax::{
ast::{self, AstNode},
LinkedNode, SyntaxKind,
},
};
pub fn deref_lvalue(mut node: LinkedNode) -> Option<LinkedNode> {
@ -11,25 +14,6 @@ pub fn deref_lvalue(mut node: LinkedNode) -> Option<LinkedNode> {
Some(node)
}
#[derive(Debug, Clone)]
pub enum DerefTarget<'a> {
VarAccess(LinkedNode<'a>),
Callee(LinkedNode<'a>),
ImportPath(LinkedNode<'a>),
IncludePath(LinkedNode<'a>),
}
impl<'a> DerefTarget<'a> {
pub fn node(&self) -> &LinkedNode {
match self {
DerefTarget::VarAccess(node) => node,
DerefTarget::Callee(node) => node,
DerefTarget::ImportPath(node) => node,
DerefTarget::IncludePath(node) => node,
}
}
}
fn is_mark(sk: SyntaxKind) -> bool {
use SyntaxKind::*;
matches!(
@ -69,68 +53,95 @@ fn is_mark(sk: SyntaxKind) -> bool {
)
}
#[derive(Debug, Clone)]
pub enum DerefTarget<'a> {
Label(LinkedNode<'a>),
Ref(LinkedNode<'a>),
VarAccess(LinkedNode<'a>),
Callee(LinkedNode<'a>),
ImportPath(LinkedNode<'a>),
IncludePath(LinkedNode<'a>),
Normal(SyntaxKind, LinkedNode<'a>),
}
impl<'a> DerefTarget<'a> {
pub fn node(&self) -> &LinkedNode {
match self {
DerefTarget::Label(node) => node,
DerefTarget::Ref(node) => node,
DerefTarget::VarAccess(node) => node,
DerefTarget::Callee(node) => node,
DerefTarget::ImportPath(node) => node,
DerefTarget::IncludePath(node) => node,
DerefTarget::Normal(_, node) => node,
}
}
}
pub fn get_deref_target(node: LinkedNode, cursor: usize) -> Option<DerefTarget> {
fn same_line_skip(node: &LinkedNode, cursor: usize) -> bool {
// (ancestor.kind().is_trivia() && ancestor.text())
/// Skips trivia nodes that are on the same line as the cursor.
fn skippable_trivia(node: &LinkedNode, cursor: usize) -> bool {
// A non-trivia node is our target so we stop at it.
if !node.kind().is_trivia() {
return false;
}
// Get the trivia text before the cursor.
let pref = node.text();
// slice
let pref = if cursor < pref.len() {
&pref[..cursor]
let pref = if node.range().contains(&cursor) {
&pref[..cursor - node.offset()]
} else {
pref
};
// no newlines
// The deref target should be on the same line as the cursor.
// todo: if we are in markup mode, we should check if we are at start of node
!pref.contains('\n')
}
let mut ancestor = node;
if same_line_skip(&ancestor, cursor) || is_mark(ancestor.kind()) {
ancestor = ancestor.prev_sibling()?;
// Move to the first non-trivia node before the cursor.
let mut node = node;
if skippable_trivia(&node, cursor) || is_mark(node.kind()) {
node = node.prev_sibling()?;
}
// Move to the first ancestor that is an expression.
let mut ancestor = node;
while !ancestor.is::<ast::Expr>() {
ancestor = ancestor.parent()?.clone();
}
debug!("deref expr: {ancestor:?}");
let ancestor = deref_lvalue(ancestor)?;
debug!("deref lvalue: {ancestor:?}");
let may_ident = ancestor.cast::<ast::Expr>()?;
if !may_ident.hash() && !matches!(may_ident, ast::Expr::MathIdent(_)) {
return None;
}
// Unwrap all parentheses to get the actual expression.
let cano_expr = deref_lvalue(ancestor)?;
debug!("deref lvalue: {cano_expr:?}");
Some(match may_ident {
// todo: label, reference
// todo: import
// todo: include
ast::Expr::FuncCall(call) => DerefTarget::Callee(ancestor.find(call.callee().span())?),
ast::Expr::Set(set) => DerefTarget::Callee(ancestor.find(set.target().span())?),
// Identify convenient expression kinds.
let expr = cano_expr.cast::<ast::Expr>()?;
Some(match expr {
ast::Expr::Label(..) => DerefTarget::Label(cano_expr),
ast::Expr::Ref(..) => DerefTarget::Ref(cano_expr),
ast::Expr::FuncCall(call) => DerefTarget::Callee(cano_expr.find(call.callee().span())?),
ast::Expr::Set(set) => DerefTarget::Callee(cano_expr.find(set.target().span())?),
ast::Expr::Ident(..) | ast::Expr::MathIdent(..) | ast::Expr::FieldAccess(..) => {
DerefTarget::VarAccess(ancestor.find(may_ident.span())?)
DerefTarget::VarAccess(cano_expr)
}
ast::Expr::Str(..) => {
let parent = ancestor.parent()?;
let parent = cano_expr.parent()?;
if parent.kind() == SyntaxKind::ModuleImport {
return Some(DerefTarget::ImportPath(ancestor.find(may_ident.span())?));
DerefTarget::ImportPath(cano_expr)
} else if parent.kind() == SyntaxKind::ModuleInclude {
DerefTarget::IncludePath(cano_expr)
} else {
DerefTarget::Normal(cano_expr.kind(), cano_expr)
}
if parent.kind() == SyntaxKind::ModuleInclude {
return Some(DerefTarget::IncludePath(ancestor.find(may_ident.span())?));
}
return None;
}
ast::Expr::Import(..) => {
return None;
}
_ => {
debug!("unsupported kind {kind:?}", kind = ancestor.kind());
return None;
_ if expr.hash()
|| matches!(cano_expr.kind(), SyntaxKind::MathIdent | SyntaxKind::Error) =>
{
DerefTarget::Normal(cano_expr.kind(), cano_expr)
}
_ => return None,
})
}
@ -201,3 +212,62 @@ pub fn get_def_target(node: LinkedNode) -> Option<DefTarget<'_>> {
}
})
}
pub fn param_index_at_leaf(leaf: &LinkedNode, function: &Func, args: ast::Args) -> Option<usize> {
let deciding = deciding_syntax(leaf);
let params = function.params()?;
let param_index = find_param_index(&deciding, params, args)?;
log::trace!("got param index {param_index}");
Some(param_index)
}
/// Find the piece of syntax that decides what we're completing.
fn deciding_syntax<'b>(leaf: &'b LinkedNode) -> LinkedNode<'b> {
let mut deciding = leaf.clone();
while !matches!(
deciding.kind(),
SyntaxKind::LeftParen | SyntaxKind::Comma | SyntaxKind::Colon
) {
let Some(prev) = deciding.prev_leaf() else {
break;
};
deciding = prev;
}
deciding
}
fn find_param_index(deciding: &LinkedNode, params: &[ParamInfo], args: ast::Args) -> Option<usize> {
match deciding.kind() {
// After colon: "func(param:|)", "func(param: |)".
SyntaxKind::Colon => {
let prev = deciding.prev_leaf()?;
let param_ident = prev.cast::<ast::Ident>()?;
params
.iter()
.position(|param| param.name == param_ident.as_str())
}
// Before: "func(|)", "func(hi|)", "func(12,|)".
SyntaxKind::Comma | SyntaxKind::LeftParen => {
let next = deciding.next_leaf();
let following_param = next.as_ref().and_then(|next| next.cast::<ast::Ident>());
match following_param {
Some(next) => params
.iter()
.position(|param| param.named && param.name.starts_with(next.as_str())),
None => {
let positional_args_so_far = args
.items()
.filter(|arg| matches!(arg, ast::Arg::Pos(_)))
.count();
params
.iter()
.enumerate()
.filter(|(_, param)| param.positional)
.map(|(i, _)| i)
.nth(positional_args_so_far)
}
}
}
_ => None,
}
}

View file

@ -1,5 +1,8 @@
use std::sync::Once;
use once_cell::sync::Lazy;
use regex::RegexSet;
use super::find_imports;
use crate::prelude::*;
@ -21,7 +24,7 @@ pub fn construct_module_dependencies(
let mut dependencies = HashMap::new();
let mut dependents = HashMap::new();
for file_id in ctx.files().clone() {
for file_id in ctx.source_files().clone() {
let source = match ctx.source_by_id(file_id) {
Ok(source) => source,
Err(err) => {
@ -58,22 +61,68 @@ pub fn construct_module_dependencies(
dependencies
}
fn is_hidden(entry: &walkdir::DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.starts_with('.'))
.unwrap_or(false)
}
/// Scan the files in the workspace and return the file ids.
///
/// Note: this function will touch the physical file system.
pub fn scan_workspace_files(root: &Path) -> Vec<TypstFileId> {
pub(crate) fn scan_workspace_files<T>(
root: &Path,
ext: &RegexSet,
f: impl Fn(&Path) -> T,
) -> Vec<T> {
let mut res = vec![];
for path in walkdir::WalkDir::new(root).follow_links(false).into_iter() {
let Ok(de) = path else {
continue;
let mut it = walkdir::WalkDir::new(root).follow_links(false).into_iter();
loop {
let de = match it.next() {
None => break,
Some(Err(_err)) => continue,
Some(Ok(entry)) => entry,
};
if is_hidden(&de) {
if de.file_type().is_dir() {
it.skip_current_dir();
}
continue;
}
/// this is a temporary solution to ignore some common build directories
static IGNORE_REGEX: Lazy<RegexSet> = Lazy::new(|| {
RegexSet::new([
r#"^build$"#,
r#"^target$"#,
r#"^node_modules$"#,
r#"^out$"#,
r#"^dist$"#,
])
.unwrap()
});
if de
.path()
.file_name()
.and_then(|s| s.to_str())
.is_some_and(|s| IGNORE_REGEX.is_match(s))
{
if de.file_type().is_dir() {
it.skip_current_dir();
}
continue;
}
if !de.file_type().is_file() {
continue;
}
if !de
.path()
.extension()
.is_some_and(|e| e == "typ" || e == "typc")
.and_then(|e| e.to_str())
.is_some_and(|e| ext.is_match(e))
{
continue;
}
@ -87,7 +136,12 @@ pub fn scan_workspace_files(root: &Path) -> Vec<TypstFileId> {
}
};
res.push(TypstFileId::new(None, VirtualPath::new(relative_path)));
res.push(f(relative_path));
// two times of max number of typst file ids
if res.len() >= (u16::MAX as usize) {
break;
}
}
res

View file

@ -78,6 +78,7 @@ pub fn snapshot_testing(name: &str, f: &impl Fn(&mut AnalysisContext, PathBuf))
caches: Default::default(),
},
);
ctx.test_completion_files(Vec::new);
ctx.test_files(|| paths);
f(&mut ctx, p);
});

View file

@ -9,6 +9,7 @@ use typst::foundations::{
Repr, StyleChain, Styles, Type, Value,
};
use typst::model::Document;
use typst::syntax::ast::AstNode;
use typst::syntax::{ast, is_id_continue, is_id_start, is_ident, LinkedNode, Source, SyntaxKind};
use typst::text::RawElem;
use typst::visualize::Color;
@ -19,6 +20,7 @@ use crate::analysis::{analyze_expr, analyze_import, analyze_labels};
use crate::AnalysisContext;
mod ext;
pub use ext::complete_path;
use ext::*;
/// Autocomplete a cursor position in a source file.
@ -32,18 +34,23 @@ use ext::*;
/// Passing a `document` (from a previous compilation) is optional, but enhances
/// the autocompletions. Label completions, for instance, are only generated
/// when the document is available.
pub fn autocomplete(mut ctx: CompletionContext) -> Option<(usize, Vec<Completion>)> {
pub fn autocomplete(
mut ctx: CompletionContext,
) -> Option<(usize, bool, Vec<Completion>, Vec<lsp_types::CompletionItem>)> {
let _ = complete_comments(&mut ctx)
|| complete_field_accesses(&mut ctx)
|| complete_literal(&mut ctx).is_none() && {
log::info!("continue after completing literal");
complete_field_accesses(&mut ctx)
|| complete_open_labels(&mut ctx)
|| complete_imports(&mut ctx)
|| complete_rules(&mut ctx)
|| complete_params(&mut ctx)
|| complete_markup(&mut ctx)
|| complete_math(&mut ctx)
|| complete_code(&mut ctx);
|| complete_code(&mut ctx)
};
Some((ctx.from, ctx.completions))
Some((ctx.from, ctx.incomplete, ctx.completions, ctx.completions2))
}
/// An autocompletion option.
@ -76,6 +83,8 @@ pub enum CompletionKind {
Type,
/// A function parameter.
Param,
/// A field.
Field,
/// A constant.
Constant,
/// A symbol.
@ -84,6 +93,8 @@ pub enum CompletionKind {
Variable,
/// A module.
Module,
/// A file.
File,
/// A folder.
Folder,
}
@ -315,7 +326,7 @@ fn complete_math(ctx: &mut CompletionContext) -> bool {
/// Add completions for math snippets.
#[rustfmt::skip]
fn math_completions(ctx: &mut CompletionContext) {
ctx.scope_completions_(true, |_| true);
ctx.scope_completions(true, |_| true);
ctx.snippet_completion(
"subscript",
@ -600,7 +611,7 @@ fn complete_rules(ctx: &mut CompletionContext) -> bool {
/// Add completions for all functions from the global scope.
fn set_rule_completions(ctx: &mut CompletionContext) {
ctx.scope_completions_(true, |value| {
ctx.scope_completions(true, |value| {
matches!(
value,
Value::Func(func) if func.params()
@ -613,7 +624,7 @@ fn set_rule_completions(ctx: &mut CompletionContext) {
/// Add completions for selectors.
fn show_rule_selector_completions(ctx: &mut CompletionContext) {
ctx.scope_completions_(
ctx.scope_completions(
false,
|value| matches!(value, Value::Func(func) if func.element().is_some()),
);
@ -653,7 +664,7 @@ fn show_rule_recipe_completions(ctx: &mut CompletionContext) {
"Transform the element with a function.",
);
ctx.scope_completions_(false, |value| matches!(value, Value::Func(_)));
ctx.scope_completions(false, |value| matches!(value, Value::Func(_)));
}
/// Complete call and set rule parameters.
@ -702,8 +713,13 @@ fn complete_params(ctx: &mut CompletionContext) -> bool {
if let Some(next) = deciding.next_leaf() {
ctx.from = ctx.cursor.min(next.offset());
}
let parent = deciding.parent().unwrap();
log::info!("named param parent: {:?}", parent);
// get type of this param
let ty = ctx.ctx.type_of(param.to_untyped());
log::info!("named param type: {:?}", ty);
named_param_value_completions(ctx, callee, &param);
named_param_value_completions(ctx, callee, &param, ty.as_ref());
return true;
}
}
@ -772,7 +788,7 @@ fn complete_code(ctx: &mut CompletionContext) -> bool {
/// Add completions for expression snippets.
#[rustfmt::skip]
fn code_completions(ctx: &mut CompletionContext, hash: bool) {
ctx.scope_completions_(true, |value| !hash || {
ctx.scope_completions(true, |value| !hash || {
matches!(value, Value::Symbol(_) | Value::Func(_) | Value::Type(_) | Value::Module(_))
});
@ -942,6 +958,8 @@ pub struct CompletionContext<'a, 'w> {
pub explicit: bool,
pub from: usize,
pub completions: Vec<Completion>,
pub completions2: Vec<lsp_types::CompletionItem>,
pub incomplete: bool,
pub seen_casts: HashSet<u128>,
}
@ -968,13 +986,16 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
cursor,
explicit,
from: cursor,
incomplete: true,
completions: vec![],
completions2: vec![],
seen_casts: HashSet::new(),
})
}
/// A small window of context before the cursor.
fn before_window(&self, size: usize) -> &str {
// todo: bad slicing
&self.before[self.cursor.saturating_sub(size)..]
}
@ -1216,7 +1237,7 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
"color.hsl(${h}, ${s}, ${l}, ${a})",
"A custom HSLA color.",
);
self.scope_completions_(false, |value| value.ty() == *ty);
self.scope_completions(false, |value| value.ty() == *ty);
} else if *ty == Type::of::<Label>() {
self.label_completions()
} else if *ty == Type::of::<Func>() {
@ -1233,7 +1254,7 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
detail: Some(eco_format!("A value of type {ty}.")),
command: None,
});
self.scope_completions_(false, |value| value.ty() == *ty);
self.scope_completions(false, |value| value.ty() == *ty);
}
}
CastInfo::Union(union) => {

View file

@ -1,23 +1,42 @@
use super::{Completion, CompletionContext, CompletionKind};
use std::collections::BTreeMap;
use std::collections::{BTreeMap, HashSet};
use ecow::{eco_format, EcoString};
use typst::foundations::Value;
use lsp_types::{CompletionItem, CompletionTextEdit, InsertTextFormat, TextEdit};
use reflexo::path::{unix_slash, PathClean};
use typst::foundations::{AutoValue, Func, Label, NoneValue, Type, Value};
use typst::layout::Length;
use typst::syntax::ast::AstNode;
use typst::syntax::{ast, SyntaxKind};
use typst::syntax::{ast, Span, SyntaxKind};
use typst::visualize::Color;
use crate::analysis::{analyze_dyn_signature, analyze_import, resolve_callee};
use super::{Completion, CompletionContext, CompletionKind};
use crate::analysis::{
analyze_dyn_signature, analyze_import, resolve_callee, FlowBuiltinType, FlowType,
PathPreference, FLOW_INSET_DICT, FLOW_MARGIN_DICT, FLOW_OUTSET_DICT, FLOW_RADIUS_DICT,
FLOW_STROKE_DICT,
};
use crate::syntax::param_index_at_leaf;
use crate::upstream::plain_docs_sentence;
use crate::{prelude::*, typst_to_lsp::completion_kind, LspCompletion};
impl<'a, 'w> CompletionContext<'a, 'w> {
pub fn world(&self) -> &'w dyn typst::World {
self.ctx.world()
}
pub fn scope_completions(&mut self, parens: bool, filter: impl Fn(&Value) -> bool) {
self.scope_completions_(parens, |v| v.map_or(true, &filter));
}
pub fn strict_scope_completions(&mut self, parens: bool, filter: impl Fn(&Value) -> bool) {
self.scope_completions_(parens, |v| v.map_or(false, &filter));
}
/// Add completions for definitions that are available at the cursor.
///
/// Filters the global/math scope with the given filter.
pub fn scope_completions_(&mut self, parens: bool, filter: impl Fn(&Value) -> bool) {
pub fn scope_completions_(&mut self, parens: bool, filter: impl Fn(Option<&Value>) -> bool) {
let mut defined = BTreeMap::new();
let mut try_insert = |name: EcoString, kind: CompletionKind| {
if name.is_empty() {
@ -51,7 +70,7 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
.as_ref()
.and_then(|source| analyze_import(self.world(), source));
if analyzed.is_none() {
log::info!("failed to analyze import: {:?}", anaylyze);
log::debug!("failed to analyze import: {:?}", anaylyze);
}
if let Some(value) = analyzed {
if imports.is_none() {
@ -124,13 +143,13 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
.scope()
.clone();
for (name, value) in scope.iter() {
if filter(value) && !defined.contains_key(name) {
if filter(Some(value)) && !defined.contains_key(name) {
self.value_completion(Some(name.clone()), value, parens, None);
}
}
for (name, kind) in defined {
if !name.is_empty() {
if filter(None) && !name.is_empty() {
if kind == CompletionKind::Func {
// todo: check arguments, if empty, jump to after the parens
let apply = eco_format!("{}(${{}})", name);
@ -179,6 +198,8 @@ pub fn param_completions<'a>(
func = f.0.clone();
}
let pos_index = param_index_at_leaf(&ctx.leaf, &func, args);
let signature = analyze_dyn_signature(ctx.ctx, func.clone());
// Exclude named arguments which are already present.
@ -192,6 +213,30 @@ pub fn param_completions<'a>(
let primary_sig = signature.primary();
log::debug!("pos_param_completion: {:?}", pos_index);
if let Some(pos_index) = pos_index {
let pos = primary_sig.pos.get(pos_index);
log::debug!("pos_param_completion_to: {:?}", pos);
if let Some(pos) = pos {
if set && !pos.settable {
return;
}
if pos.positional
&& type_completion(
ctx,
pos.infer_type.as_ref(),
Some(&plain_docs_sentence(&pos.docs)),
)
.is_none()
{
ctx.cast_completions(&pos.input);
}
}
}
for (name, param) in &primary_sig.named {
if exclude.iter().any(|ident| ident.as_str() == name) {
continue;
@ -202,7 +247,7 @@ pub fn param_completions<'a>(
}
if param.named {
ctx.completions.push(Completion {
let compl = Completion {
kind: CompletionKind::Param,
label: param.name.clone().into(),
apply: Some(eco_format!("{}: ${{}}", param.name)),
@ -214,10 +259,44 @@ pub fn param_completions<'a>(
// editor.action.triggerSuggest as command on a suggestion to
// "manually" retrigger suggest after inserting one
command: Some("editor.action.triggerSuggest"),
};
match param.infer_type {
Some(FlowType::Builtin(FlowBuiltinType::TextSize)) => {
for size_template in &[
"10.5pt", "12pt", "9pt", "14pt", "8pt", "16pt", "18pt", "20pt", "22pt",
"24pt", "28pt",
] {
let compl = compl.clone();
ctx.completions.push(Completion {
label: eco_format!("{}: {}", param.name, size_template),
apply: None,
..compl
});
}
}
Some(FlowType::Builtin(FlowBuiltinType::Dir)) => {
for dir_template in &["ltr", "rtl", "ttb", "btt"] {
let compl = compl.clone();
ctx.completions.push(Completion {
label: eco_format!("{}: {}", param.name, dir_template),
apply: None,
..compl
});
}
}
_ => {}
}
ctx.completions.push(compl);
}
if param.positional {
if param.positional
&& type_completion(
ctx,
param.infer_type.as_ref(),
Some(&plain_docs_sentence(&param.docs)),
)
.is_none()
{
ctx.cast_completions(&param.input);
}
}
@ -227,17 +306,203 @@ pub fn param_completions<'a>(
}
}
fn type_completion(
ctx: &mut CompletionContext<'_, '_>,
infer_type: Option<&FlowType>,
docs: Option<&str>,
) -> Option<()> {
// Prevent duplicate completions from appearing.
if !ctx.seen_casts.insert(typst::util::hash128(&infer_type)) {
return Some(());
}
log::info!("type_completion: {:?}", infer_type);
match infer_type? {
FlowType::Clause => return None,
FlowType::Undef => return None,
FlowType::Content => return None,
FlowType::Any => return None,
FlowType::Array => {
ctx.snippet_completion("()", "(${})", "An array.");
}
FlowType::Dict(..) => {
ctx.snippet_completion("()", "(${})", "A dictionary.");
}
FlowType::None => ctx.snippet_completion("none", "none", "Nothing."),
FlowType::Infer => return None,
FlowType::FlowNone => return None,
FlowType::Auto => {
ctx.snippet_completion("auto", "auto", "A smart default.");
}
FlowType::Builtin(v) => match v {
FlowBuiltinType::Path(p) => {
let source = ctx.ctx.source_by_id(ctx.root.span().id()?).ok()?;
log::debug!(
"type_path_completion: {:?}",
&source.text()[ctx.cursor - 10..ctx.cursor]
);
ctx.completions2.extend(
complete_path(ctx.ctx, None, &source, ctx.cursor, p)
.into_iter()
.flatten(),
);
}
FlowBuiltinType::Args => return None,
FlowBuiltinType::Stroke => {
ctx.snippet_completion("stroke()", "stroke(${})", "Stroke type.");
ctx.snippet_completion("()", "(${})", "Stroke dictionary.");
type_completion(ctx, Some(&FlowType::Builtin(FlowBuiltinType::Color)), None);
type_completion(ctx, Some(&FlowType::Builtin(FlowBuiltinType::Length)), None);
}
FlowBuiltinType::Color => {
ctx.snippet_completion("luma()", "luma(${v})", "A custom grayscale color.");
ctx.snippet_completion(
"rgb()",
"rgb(${r}, ${g}, ${b}, ${a})",
"A custom RGBA color.",
);
ctx.snippet_completion(
"cmyk()",
"cmyk(${c}, ${m}, ${y}, ${k})",
"A custom CMYK color.",
);
ctx.snippet_completion(
"oklab()",
"oklab(${l}, ${a}, ${b}, ${alpha})",
"A custom Oklab color.",
);
ctx.snippet_completion(
"oklch()",
"oklch(${l}, ${chroma}, ${hue}, ${alpha})",
"A custom Oklch color.",
);
ctx.snippet_completion(
"color.linear-rgb()",
"color.linear-rgb(${r}, ${g}, ${b}, ${a})",
"A custom linear RGBA color.",
);
ctx.snippet_completion(
"color.hsv()",
"color.hsv(${h}, ${s}, ${v}, ${a})",
"A custom HSVA color.",
);
ctx.snippet_completion(
"color.hsl()",
"color.hsl(${h}, ${s}, ${l}, ${a})",
"A custom HSLA color.",
);
let color_ty = Type::of::<Color>();
ctx.strict_scope_completions(false, |value| value.ty() == color_ty);
}
FlowBuiltinType::TextSize => return None,
FlowBuiltinType::TextFont => return None,
FlowBuiltinType::Dir => return None,
FlowBuiltinType::Margin => {
ctx.snippet_completion("()", "(${})", "Margin dictionary.");
type_completion(ctx, Some(&FlowType::Builtin(FlowBuiltinType::Length)), None);
}
FlowBuiltinType::Inset => {
ctx.snippet_completion("()", "(${})", "Inset dictionary.");
type_completion(ctx, Some(&FlowType::Builtin(FlowBuiltinType::Length)), None);
}
FlowBuiltinType::Outset => {
ctx.snippet_completion("()", "(${})", "Outset dictionary.");
type_completion(ctx, Some(&FlowType::Builtin(FlowBuiltinType::Length)), None);
}
FlowBuiltinType::Radius => {
ctx.snippet_completion("()", "(${})", "Radius dictionary.");
type_completion(ctx, Some(&FlowType::Builtin(FlowBuiltinType::Length)), None);
}
FlowBuiltinType::Length => {
ctx.snippet_completion("pt", "${1}pt", "Point length unit.");
ctx.snippet_completion("mm", "${1}mm", "Millimeter length unit.");
ctx.snippet_completion("cm", "${1}cm", "Centimeter length unit.");
ctx.snippet_completion("in", "${1}in", "Inch length unit.");
ctx.snippet_completion("em", "${1}em", "Em length unit.");
let length_ty = Type::of::<Length>();
ctx.strict_scope_completions(false, |value| value.ty() == length_ty);
}
FlowBuiltinType::Float => {
ctx.snippet_completion("exponential notation", "${1}e${0}", "Exponential notation");
}
},
FlowType::Args(_) => return None,
FlowType::Func(_) => return None,
FlowType::With(_) => return None,
FlowType::At(_) => return None,
FlowType::Union(u) => {
for info in u.as_ref() {
type_completion(ctx, Some(info), docs);
}
}
FlowType::Let(_) => return None,
FlowType::Var(_) => return None,
FlowType::Unary(_) => return None,
FlowType::Binary(_) => return None,
FlowType::Value(v) => {
if let Value::Type(ty) = &v.0 {
if *ty == Type::of::<NoneValue>() {
ctx.snippet_completion("none", "none", "Nothing.")
} else if *ty == Type::of::<AutoValue>() {
ctx.snippet_completion("auto", "auto", "A smart default.");
} else if *ty == Type::of::<bool>() {
ctx.snippet_completion("false", "false", "No / Disabled.");
ctx.snippet_completion("true", "true", "Yes / Enabled.");
} else if *ty == Type::of::<Color>() {
type_completion(ctx, Some(&FlowType::Builtin(FlowBuiltinType::Color)), None);
} else if *ty == Type::of::<Label>() {
ctx.label_completions()
} else if *ty == Type::of::<Func>() {
ctx.snippet_completion(
"function",
"(${params}) => ${output}",
"A custom function.",
);
} else {
ctx.completions.push(Completion {
kind: CompletionKind::Syntax,
label: ty.long_name().into(),
apply: Some(eco_format!("${{{ty}}}")),
detail: Some(eco_format!("A value of type {ty}.")),
command: None,
});
ctx.strict_scope_completions(false, |value| value.ty() == *ty);
}
} else {
ctx.value_completion(None, &v.0, true, docs);
}
}
FlowType::ValueDoc(v) => {
let (value, docs) = v.as_ref();
ctx.value_completion(None, value, true, Some(docs));
}
FlowType::Element(e) => {
ctx.value_completion(Some(e.name().into()), &Value::Func((*e).into()), true, docs);
} // CastInfo::Any => {}
};
Some(())
}
/// Add completions for the values of a named function parameter.
pub fn named_param_value_completions<'a>(
ctx: &mut CompletionContext<'a, '_>,
callee: ast::Expr<'a>,
name: &str,
ty: Option<&FlowType>,
) {
let Some(func) = ctx
.root
.find(callee.span())
.and_then(|callee| resolve_callee(ctx.ctx, callee))
else {
// static analysis
if let Some(ty) = ty {
type_completion(ctx, Some(ty), None);
}
return;
};
@ -260,6 +525,11 @@ pub fn named_param_value_completions<'a>(
return;
}
// static analysis
if let Some(ty) = ty {
type_completion(ctx, Some(ty), Some(&plain_docs_sentence(&param.docs)));
}
if let Some(expr) = &param.expr {
ctx.completions.push(Completion {
kind: CompletionKind::Constant,
@ -270,7 +540,15 @@ pub fn named_param_value_completions<'a>(
});
}
if type_completion(
ctx,
param.infer_type.as_ref(),
Some(&plain_docs_sentence(&param.docs)),
)
.is_none()
{
ctx.cast_completions(&param.input);
}
if name == "font" {
ctx.font_completions();
}
@ -279,3 +557,260 @@ pub fn named_param_value_completions<'a>(
ctx.enrich(" ", "");
}
}
pub fn complete_literal(ctx: &mut CompletionContext) -> Option<()> {
let parent = ctx.leaf.clone();
log::debug!("check complete_literal: {:?}", ctx.leaf);
let parent = if parent.kind().is_trivia() {
parent.prev_sibling()?
} else {
parent
};
log::debug!("check complete_literal 2: {:?}", ctx.leaf);
let parent = &parent;
let parent = match parent.kind() {
SyntaxKind::Colon => parent.parent()?,
_ => parent,
};
let (named, parent) = match parent.kind() {
SyntaxKind::Named => (parent.cast::<ast::Named>(), parent.parent()?),
SyntaxKind::LeftParen | SyntaxKind::Comma => (None, parent.parent()?),
_ => (None, parent),
};
log::debug!("check complete_literal 3: {:?}", ctx.leaf);
// or empty array
let dict_span;
let dict_lit = match parent.kind() {
SyntaxKind::Dict => {
let dict_lit = parent.get().cast::<ast::Dict>()?;
dict_span = dict_lit.span();
dict_lit
}
SyntaxKind::Array => {
let w = parent.get().cast::<ast::Array>()?;
if w.items().next().is_some() {
return None;
}
dict_span = w.span();
ast::Dict::default()
}
_ => return None,
};
// query type of the dict
let named_span = named.map(|n| n.span()).unwrap_or_else(Span::detached);
let named_ty = ctx.ctx.type_of_span(named_span);
let dict_ty = ctx.ctx.type_of_span(dict_span);
log::info!("complete_literal: {:?} {:?}", dict_ty, named_ty);
// todo: check if the dict is named
if named_ty.is_some() {
let res = type_completion(ctx, named_ty.as_ref(), None);
if res.is_some() {
ctx.incomplete = false;
}
return res;
}
let existing = dict_lit
.items()
.filter_map(|field| match field {
ast::DictItem::Named(n) => Some(n.name().get().clone()),
ast::DictItem::Keyed(k) => {
let key = ctx.ctx.const_eval(k.key());
if let Some(Value::Str(key)) = key {
return Some(key.into());
}
None
}
// todo: var dict union
ast::DictItem::Spread(_s) => None,
})
.collect::<HashSet<_>>();
let dict_ty = dict_ty?;
let dict_interface = match dict_ty {
FlowType::Builtin(FlowBuiltinType::Stroke) => &FLOW_STROKE_DICT,
FlowType::Builtin(FlowBuiltinType::Margin) => &FLOW_MARGIN_DICT,
FlowType::Builtin(FlowBuiltinType::Inset) => &FLOW_INSET_DICT,
FlowType::Builtin(FlowBuiltinType::Outset) => &FLOW_OUTSET_DICT,
FlowType::Builtin(FlowBuiltinType::Radius) => &FLOW_RADIUS_DICT,
_ => return None,
};
for (key, _, _) in dict_interface.fields.iter() {
if existing.contains(key) {
continue;
}
ctx.completions.push(Completion {
kind: CompletionKind::Field,
label: key.clone(),
apply: Some(eco_format!("{}: ${{}}", key)),
detail: None,
// todo: only vscode and neovim (0.9.1) support this
command: Some("editor.action.triggerSuggest"),
});
}
if ctx.before.ends_with(',') {
ctx.enrich(" ", "");
}
ctx.incomplete = false;
Some(())
}
pub fn complete_path(
ctx: &AnalysisContext,
v: Option<LinkedNode>,
source: &Source,
cursor: usize,
p: &PathPreference,
) -> Option<Vec<CompletionItem>> {
let id = source.id();
if id.package().is_some() {
return None;
}
let is_in_text;
let text;
let rng;
if let Some(v) = v {
let vp = v.cast::<ast::Str>()?;
// todo: path escape
let real_content = vp.get();
let str_content = v.text();
let unquoted = &str_content[1..str_content.len() - 1];
if unquoted != real_content {
return None;
}
let vr = v.range();
let offset = vr.start + 1;
if cursor < offset || vr.end <= cursor || vr.len() < 2 {
return None;
}
text = &source.text()[offset..cursor];
rng = offset..vr.end - 1;
is_in_text = true;
} else {
text = "";
rng = cursor..cursor;
is_in_text = false;
}
let path = Path::new(&text);
let has_root = path.has_root();
let src_path = id.vpath();
let base = src_path.resolve(&ctx.analysis.root)?;
let dst_path = src_path.join(path);
let mut compl_path = dst_path.as_rootless_path();
if !compl_path.is_dir() {
compl_path = compl_path.parent().unwrap_or(Path::new(""));
}
log::debug!("compl_path: {src_path:?} + {path:?} -> {compl_path:?}");
if compl_path.is_absolute() {
log::warn!("absolute path completion is not supported for security consideration {path:?}");
return None;
}
let dirs = ctx.analysis.root.clone();
log::debug!("compl_dirs: {dirs:?}");
// find directory or files in the path
let mut folder_completions = vec![];
let mut module_completions = vec![];
// todo: test it correctly
for path in ctx.completion_files(p) {
log::debug!("compl_check_path: {path:?}");
// diff with root
let path = dirs.join(path);
// Skip self smartly
if path.clean() == base.clean() {
continue;
}
let label = if has_root {
// diff with root
let w = path.strip_prefix(&ctx.analysis.root).ok()?;
eco_format!("/{}", unix_slash(w))
} else {
let base = base.parent()?;
let w = pathdiff::diff_paths(&path, base)?;
unix_slash(&w).into()
};
log::debug!("compl_label: {label:?}");
if path.is_dir() {
folder_completions.push((label, CompletionKind::Folder));
} else {
module_completions.push((label, CompletionKind::File));
}
}
let replace_range = ctx.to_lsp_range(rng, source);
let path_priority_cmp = |a: &str, b: &str| {
// files are more important than dot started paths
if a.starts_with('.') || b.starts_with('.') {
// compare consecutive dots and slashes
let a_prefix = a.chars().take_while(|c| *c == '.' || *c == '/').count();
let b_prefix = b.chars().take_while(|c| *c == '.' || *c == '/').count();
if a_prefix != b_prefix {
return a_prefix.cmp(&b_prefix);
}
}
a.cmp(b)
};
module_completions.sort_by(|a, b| path_priority_cmp(&a.0, &b.0));
folder_completions.sort_by(|a, b| path_priority_cmp(&a.0, &b.0));
let mut sorter = 0;
let digits = (module_completions.len() + folder_completions.len())
.to_string()
.len();
let completions = module_completions.into_iter().chain(folder_completions);
Some(
completions
.map(|typst_completion| {
let lsp_snippet = &typst_completion.0;
let text_edit = CompletionTextEdit::Edit(TextEdit::new(
replace_range,
if is_in_text {
lsp_snippet.to_string()
} else {
format!(r#""{lsp_snippet}""#)
},
));
let sort_text = format!("{sorter:0>digits$}");
sorter += 1;
// todo: no all clients support label details
let res = LspCompletion {
label: typst_completion.0.to_string(),
kind: Some(completion_kind(typst_completion.1.clone())),
detail: None,
text_edit: Some(text_edit),
// don't sort me
sort_text: Some(sort_text),
filter_text: Some("".to_owned()),
insert_text_format: Some(InsertTextFormat::PLAIN_TEXT),
..Default::default()
};
log::debug!("compl_res: {res:?}");
res
})
.collect_vec(),
)
}

View file

@ -100,7 +100,7 @@ impl ExportActor {
};
info!("RenderActor: received request: {req:?}", req = req);
log::debug!("RenderActor: received request: {req:?}", req = req);
match req {
RenderActorRequest::ChangeConfig(cfg) => {
self.substitute_pattern = cfg.substitute_pattern;

View file

@ -430,6 +430,8 @@ impl Init {
String::from("#"),
String::from("("),
String::from("."),
String::from(","),
String::from(":"),
String::from("/"),
String::from("\""),
String::from("@"),

View file

@ -342,7 +342,7 @@ const markupEnterCode: textmate.Pattern = {
),
enterExpression(
"entity.name.function.hash.typst",
/(?=[\p{XID_Start}_][\p{XID_Continue}_\-]*\()/
/(?=[\p{XID_Start}_][\p{XID_Continue}_\-]*[\(\[])/
),
enterExpression(
"variable.other.readwrite.hash.typst",

View file

@ -63,7 +63,7 @@
# ^ source.typst variable.other.readwrite.typst
# ^^^^ source.typst
>#f[] []
#^ source.typst variable.other.readwrite.hash.typst
#^ source.typst entity.name.function.hash.typst
# ^ source.typst meta.expr.call.typst entity.name.function.typst
# ^ source.typst meta.expr.call.typst meta.brace.square.typst
# ^ source.typst meta.expr.call.typst meta.brace.square.typst
@ -152,7 +152,7 @@
# ^ source.typst meta.expr.call.typst meta.brace.square.typst
# ^ source.typst meta.expr.call.typst meta.brace.square.typst
>#list[]
#^ source.typst variable.other.readwrite.hash.typst
#^ source.typst entity.name.function.hash.typst
# ^^^^ source.typst meta.expr.call.typst entity.name.function.typst
# ^ source.typst meta.expr.call.typst meta.brace.square.typst
# ^ source.typst meta.expr.call.typst meta.brace.square.typst

View file

@ -374,7 +374,7 @@ fn e2e() {
});
let hash = replay_log(&tinymist_binary, &root.join("neovim"));
insta::assert_snapshot!(hash, @"siphash128_13:6906e16c65e7a8b7f24f235c28159069");
insta::assert_snapshot!(hash, @"siphash128_13:9480f1a9fe4bb5166bf916d610af1e37");
}
{
@ -385,7 +385,7 @@ fn e2e() {
});
let hash = replay_log(&tinymist_binary, &root.join("vscode"));
insta::assert_snapshot!(hash, @"siphash128_13:5eb5788d170020cdf29964d202039e33");
insta::assert_snapshot!(hash, @"siphash128_13:fe9362131962ba0be4a3e5bea9ccd8e8");
}
}