docs: add documents to type system (#295)

* docs: simplify a bit

* docs: add documents to type system code

* docs: update

* refactor: rename
This commit is contained in:
Myriad-Dreamin 2024-05-16 11:52:41 +08:00 committed by GitHub
parent c182c5fadc
commit 9835ffe6ab
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 969 additions and 819 deletions

View file

@ -34,7 +34,7 @@ mod type_check_tests {
use crate::analysis::ty; use crate::analysis::ty;
use crate::tests::*; use crate::tests::*;
use super::{Ty, TypeCheckInfo}; use super::{Ty, TypeScheme};
#[test] #[test]
fn test() { fn test() {
@ -51,7 +51,7 @@ mod type_check_tests {
}); });
} }
struct TypeCheckSnapshot<'a>(&'a Source, &'a TypeCheckInfo); struct TypeCheckSnapshot<'a>(&'a Source, &'a TypeScheme);
impl fmt::Debug for TypeCheckSnapshot<'_> { impl fmt::Debug for TypeCheckSnapshot<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {

View file

@ -26,7 +26,7 @@ use typst::{layout::Position, syntax::FileId as TypstFileId};
use super::{ use super::{
analyze_bib, post_type_check, BibInfo, DefUseInfo, DefinitionLink, IdentRef, ImportInfo, analyze_bib, post_type_check, BibInfo, DefUseInfo, DefinitionLink, IdentRef, ImportInfo,
PathPreference, SigTy, Signature, SignatureTarget, Ty, TypeCheckInfo, PathPreference, SigTy, Signature, SignatureTarget, Ty, TypeScheme,
}; };
use crate::adt::interner::Interned; use crate::adt::interner::Interned;
use crate::analysis::analyze_dyn_signature; use crate::analysis::analyze_dyn_signature;
@ -49,7 +49,7 @@ pub struct ModuleAnalysisCache {
source: OnceCell<FileResult<Source>>, source: OnceCell<FileResult<Source>>,
import_info: OnceCell<Option<Arc<ImportInfo>>>, import_info: OnceCell<Option<Arc<ImportInfo>>>,
def_use: OnceCell<Option<Arc<DefUseInfo>>>, def_use: OnceCell<Option<Arc<DefUseInfo>>>,
type_check: OnceCell<Option<Arc<TypeCheckInfo>>>, type_check: OnceCell<Option<Arc<TypeScheme>>>,
bibliography: OnceCell<Option<Arc<BibInfo>>>, bibliography: OnceCell<Option<Arc<BibInfo>>>,
} }
@ -93,15 +93,15 @@ impl ModuleAnalysisCache {
} }
/// Try to get the type check information of a file. /// Try to get the type check information of a file.
pub(crate) fn type_check(&self) -> Option<Arc<TypeCheckInfo>> { pub(crate) fn type_check(&self) -> Option<Arc<TypeScheme>> {
self.type_check.get().cloned().flatten() self.type_check.get().cloned().flatten()
} }
/// Compute the type check information of a file. /// Compute the type check information of a file.
pub(crate) fn compute_type_check( pub(crate) fn compute_type_check(
&self, &self,
f: impl FnOnce() -> Option<Arc<TypeCheckInfo>>, f: impl FnOnce() -> Option<Arc<TypeScheme>>,
) -> Option<Arc<TypeCheckInfo>> { ) -> Option<Arc<TypeScheme>> {
self.type_check.get_or_init(f).clone() self.type_check.get_or_init(f).clone()
} }
@ -308,7 +308,7 @@ impl<Inputs, Output> ComputingNode<Inputs, Output> {
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
pub struct ModuleAnalysisGlobalCache { pub struct ModuleAnalysisGlobalCache {
def_use_lexical_hierarchy: ComputingNode<Source, EcoVec<LexicalHierarchy>>, def_use_lexical_hierarchy: ComputingNode<Source, EcoVec<LexicalHierarchy>>,
type_check: Arc<ComputingNode<Source, Arc<TypeCheckInfo>>>, type_check: Arc<ComputingNode<Source, Arc<TypeScheme>>>,
def_use: Arc<ComputingNode<(EcoVec<LexicalHierarchy>, Arc<ImportInfo>), Arc<DefUseInfo>>>, def_use: Arc<ComputingNode<(EcoVec<LexicalHierarchy>, Arc<ImportInfo>), Arc<DefUseInfo>>>,
bibliography: Arc<ComputingNode<EcoVec<(TypstFileId, Bytes)>, Arc<BibInfo>>>, bibliography: Arc<ComputingNode<EcoVec<(TypstFileId, Bytes)>, Arc<BibInfo>>>,
@ -670,7 +670,7 @@ impl<'w> AnalysisContext<'w> {
} }
/// Get the type check information of a source file. /// Get the type check information of a source file.
pub(crate) fn type_check(&mut self, source: Source) -> Option<Arc<TypeCheckInfo>> { pub(crate) fn type_check(&mut self, source: Source) -> Option<Arc<TypeScheme>> {
let fid = source.id(); let fid = source.id();
if let Some(res) = self.caches.modules.entry(fid).or_default().type_check() { if let Some(res) = self.caches.modules.entry(fid).or_default().type_check() {

View file

@ -13,7 +13,7 @@ use typst::{
}; };
use crate::analysis::{Ty, *}; use crate::analysis::{Ty, *};
use crate::{analysis::TypeCheckInfo, ty::TypeInterace, AnalysisContext}; use crate::{analysis::TypeScheme, ty::TypeInterface, AnalysisContext};
use super::{ use super::{
resolve_global_value, BuiltinTy, DefUseInfo, FlowVarKind, IdentRef, TypeBounds, TypeVar, resolve_global_value, BuiltinTy, DefUseInfo, FlowVarKind, IdentRef, TypeBounds, TypeVar,
@ -28,8 +28,8 @@ pub(crate) use apply::*;
pub(crate) use post_check::*; pub(crate) use post_check::*;
/// Type checking at the source unit level. /// Type checking at the source unit level.
pub(crate) fn type_check(ctx: &mut AnalysisContext, source: Source) -> Option<Arc<TypeCheckInfo>> { pub(crate) fn type_check(ctx: &mut AnalysisContext, source: Source) -> Option<Arc<TypeScheme>> {
let mut info = TypeCheckInfo::default(); let mut info = TypeScheme::default();
// Retrieve def-use information for the source. // Retrieve def-use information for the source.
let def_use_info = ctx.def_use(source.clone())?; let def_use_info = ctx.def_use(source.clone())?;
@ -64,7 +64,7 @@ struct TypeChecker<'a, 'w> {
source: Source, source: Source,
def_use_info: Arc<DefUseInfo>, def_use_info: Arc<DefUseInfo>,
info: &'a mut TypeCheckInfo, info: &'a mut TypeScheme,
externals: HashMap<DefId, Option<Ty>>, externals: HashMap<DefId, Option<Ty>>,
mode: InterpretMode, mode: InterpretMode,
} }
@ -98,7 +98,6 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
TypeVar { TypeVar {
name: r.name.as_str().into(), name: r.name.as_str().into(),
def: def_id, def: def_id,
syntax: None,
}, },
init_expr, init_expr,
), ),
@ -106,7 +105,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
} }
let var = self.info.vars.get_mut(&def_id).unwrap(); let var = self.info.vars.get_mut(&def_id).unwrap();
TypeCheckInfo::witness_(s, var.as_type(), &mut self.info.mapping); TypeScheme::witness_(s, var.as_type(), &mut self.info.mapping);
Some(var.as_type()) Some(var.as_type())
} }
@ -250,7 +249,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
} }
} }
(Ty::Dict(lhs), Ty::Dict(rhs)) => { (Ty::Dict(lhs), Ty::Dict(rhs)) => {
for (key, lhs, rhs) in lhs.intersect_keys(rhs) { for (key, lhs, rhs) in lhs.common_iface_fields(rhs) {
log::debug!("constrain record item {key} {lhs:?} ⪯ {rhs:?}"); log::debug!("constrain record item {key} {lhs:?} ⪯ {rhs:?}");
self.constrain(lhs, rhs); self.constrain(lhs, rhs);
// if !sl.is_detached() { // if !sl.is_detached() {
@ -280,7 +279,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
} }
(Ty::Value(lhs), rhs) => { (Ty::Value(lhs), rhs) => {
log::debug!("constrain value {lhs:?} ⪯ {rhs:?}"); log::debug!("constrain value {lhs:?} ⪯ {rhs:?}");
let _ = TypeCheckInfo::witness_at_most; let _ = TypeScheme::witness_at_most;
// if !lhs.1.is_detached() { // if !lhs.1.is_detached() {
// self.info.witness_at_most(lhs.1, rhs.clone()); // self.info.witness_at_most(lhs.1, rhs.clone());
// } // }
@ -402,7 +401,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
self.weaken(&v.lhs); self.weaken(&v.lhs);
} }
Ty::Binary(v) => { Ty::Binary(v) => {
let (lhs, rhs) = v.repr(); let [lhs, rhs] = v.operands();
self.weaken(lhs); self.weaken(lhs);
self.weaken(rhs); self.weaken(rhs);
} }

View file

@ -29,7 +29,7 @@ impl<'a, 'b, 'w> ApplyChecker for ApplyTypeChecker<'a, 'b, 'w> {
.map(|v| v.bounds.bounds().read().clone()) .map(|v| v.bounds.bounds().read().clone())
} }
fn call(&mut self, sig: Sig, args: &Interned<ArgsTy>, pol: bool) { fn apply(&mut self, sig: Sig, args: &Interned<ArgsTy>, pol: bool) {
let _ = self.args; let _ = self.args;
let (sig, is_partialize) = match sig { let (sig, is_partialize) = match sig {

View file

@ -15,13 +15,13 @@ use crate::{
AnalysisContext, AnalysisContext,
}; };
use super::{FieldTy, SigShape, Ty, TypeCheckInfo}; use super::{FieldTy, SigShape, Ty, TypeScheme};
/// With given type information, check the type of a literal expression again by /// With given type information, check the type of a literal expression again by
/// touching the possible related nodes. /// touching the possible related nodes.
pub(crate) fn post_type_check( pub(crate) fn post_type_check(
_ctx: &mut AnalysisContext, _ctx: &mut AnalysisContext,
info: &TypeCheckInfo, info: &TypeScheme,
node: LinkedNode, node: LinkedNode,
) -> Option<Ty> { ) -> Option<Ty> {
let mut worker = PostTypeCheckWorker { let mut worker = PostTypeCheckWorker {
@ -108,7 +108,7 @@ fn check_signature<'a>(
struct PostTypeCheckWorker<'a, 'w> { struct PostTypeCheckWorker<'a, 'w> {
ctx: &'a mut AnalysisContext<'w>, ctx: &'a mut AnalysisContext<'w>,
checked: HashMap<Span, Option<Ty>>, checked: HashMap<Span, Option<Ty>>,
info: &'a TypeCheckInfo, info: &'a TypeScheme,
} }
impl<'a, 'w> PostTypeCheckWorker<'a, 'w> { impl<'a, 'w> PostTypeCheckWorker<'a, 'w> {

View file

@ -143,7 +143,7 @@ impl SemanticRequest for SignatureHelpRequest {
label.push(')'); label.push(')');
let ret = type_sig let ret = type_sig
.as_ref() .as_ref()
.and_then(|sig| sig.ret.as_ref()) .and_then(|sig| sig.body.as_ref())
.or_else(|| sig.primary().ret_ty.as_ref()); .or_else(|| sig.primary().ret_ty.as_ref());
if let Some(ret_ty) = ret { if let Some(ret_ty) = ret {
label.push_str(" -> "); label.push_str(" -> ");

View file

@ -5,7 +5,7 @@ use crate::{adt::interner::Interned, ty::def::*};
use super::{Sig, SigChecker, SigSurfaceKind}; use super::{Sig, SigChecker, SigSurfaceKind};
pub trait ApplyChecker { pub trait ApplyChecker {
fn call(&mut self, sig: Sig, arguments: &Interned<ArgsTy>, pol: bool); fn apply(&mut self, sig: Sig, arguments: &Interned<ArgsTy>, pol: bool);
fn bound_of_var(&mut self, _var: &Interned<TypeVar>, _pol: bool) -> Option<TypeBounds> { fn bound_of_var(&mut self, _var: &Interned<TypeVar>, _pol: bool) -> Option<TypeBounds> {
None None
@ -13,11 +13,12 @@ pub trait ApplyChecker {
} }
impl Ty { impl Ty {
/// Call the given type with the given arguments.
pub fn call(&self, args: &Interned<ArgsTy>, pol: bool, checker: &mut impl ApplyChecker) { pub fn call(&self, args: &Interned<ArgsTy>, pol: bool, checker: &mut impl ApplyChecker) {
self.apply(SigSurfaceKind::Call, args, pol, checker) self.apply(SigSurfaceKind::Call, args, pol, checker)
} }
#[allow(dead_code)] /// Get the element type of the given type.
pub fn element_of(&self, pol: bool, checker: &mut impl ApplyChecker) { pub fn element_of(&self, pol: bool, checker: &mut impl ApplyChecker) {
static EMPTY_ARGS: Lazy<Interned<ArgsTy>> = Lazy::new(|| ArgsTy::default().into()); static EMPTY_ARGS: Lazy<Interned<ArgsTy>> = Lazy::new(|| ArgsTy::default().into());
@ -46,18 +47,17 @@ impl<'a, T: ApplyChecker> ApplySigChecker<'a, T> {
impl<'a, T: ApplyChecker> SigChecker for ApplySigChecker<'a, T> { impl<'a, T: ApplyChecker> SigChecker for ApplySigChecker<'a, T> {
fn check(&mut self, cano_sig: Sig, ctx: &mut super::SigCheckContext, pol: bool) -> Option<()> { fn check(&mut self, cano_sig: Sig, ctx: &mut super::SigCheckContext, pol: bool) -> Option<()> {
let args = &ctx.args; // Bind the arguments to the canonical signature.
let partial_sig = if args.is_empty() { let partial_sig = if ctx.args.is_empty() {
cano_sig cano_sig
} else { } else {
Sig::With { Sig::With {
sig: &cano_sig, sig: &cano_sig,
withs: args, withs: &ctx.args,
at: &ctx.at, at: &ctx.at,
} }
}; };
self.0.apply(partial_sig, self.1, pol);
self.0.call(partial_sig, self.1, pol);
Some(()) Some(())
} }

View file

@ -17,10 +17,12 @@ where
} }
impl Ty { impl Ty {
/// Check if the given type has bounds (is combinated).
pub fn has_bounds(&self) -> bool { pub fn has_bounds(&self) -> bool {
matches!(self, Ty::Union(_) | Ty::Let(_) | Ty::Var(_)) matches!(self, Ty::Union(_) | Ty::Let(_) | Ty::Var(_))
} }
/// Profile the bounds of the given type.
pub fn bounds(&self, pol: bool, checker: &mut impl BoundChecker) { pub fn bounds(&self, pol: bool, checker: &mut impl BoundChecker) {
let mut worker = BoundCheckContext; let mut worker = BoundCheckContext;
worker.ty(self, pol, checker); worker.ty(self, pol, checker);

View file

@ -11,7 +11,7 @@ use typst::{
use crate::{adt::interner::Interned, ty::*}; use crate::{adt::interner::Interned, ty::*};
#[derive(Debug, Clone, Hash, PartialEq, Eq)] #[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub(crate) enum PathPreference { pub enum PathPreference {
None, None,
Special, Special,
Source, Source,
@ -84,7 +84,7 @@ impl PathPreference {
} }
impl Ty { impl Ty {
pub fn from_return_site(f: &Func, c: &'_ CastInfo) -> Option<Self> { pub(crate) fn from_return_site(f: &Func, c: &'_ CastInfo) -> Option<Self> {
use typst::foundations::func::Repr; use typst::foundations::func::Repr;
match f.inner() { match f.inner() {
Repr::Element(e) => return Some(Ty::Builtin(BuiltinTy::Element(*e))), Repr::Element(e) => return Some(Ty::Builtin(BuiltinTy::Element(*e))),
@ -95,7 +95,7 @@ impl Ty {
let ty = match c { let ty = match c {
CastInfo::Any => Ty::Any, CastInfo::Any => Ty::Any,
CastInfo::Value(v, doc) => Ty::Value(InsTy::new_doc(v.clone(), doc)), CastInfo::Value(v, doc) => Ty::Value(InsTy::new_doc(v.clone(), *doc)),
CastInfo::Type(ty) => Ty::Builtin(BuiltinTy::Type(*ty)), CastInfo::Type(ty) => Ty::Builtin(BuiltinTy::Type(*ty)),
CastInfo::Union(e) => { CastInfo::Union(e) => {
// flat union // flat union
@ -122,7 +122,7 @@ impl Ty {
let ty = match &s { let ty = match &s {
CastInfo::Any => Ty::Any, CastInfo::Any => Ty::Any,
CastInfo::Value(v, doc) => Ty::Value(InsTy::new_doc(v.clone(), doc)), CastInfo::Value(v, doc) => Ty::Value(InsTy::new_doc(v.clone(), *doc)),
CastInfo::Type(ty) => Ty::Builtin(BuiltinTy::Type(*ty)), CastInfo::Type(ty) => Ty::Builtin(BuiltinTy::Type(*ty)),
CastInfo::Union(e) => { CastInfo::Union(e) => {
// flat union // flat union
@ -159,7 +159,7 @@ impl<'a> Iterator for UnionIter<'a> {
} }
#[derive(Clone, Hash, PartialEq, Eq)] #[derive(Clone, Hash, PartialEq, Eq)]
pub(crate) enum BuiltinTy { pub enum BuiltinTy {
Clause, Clause,
Undef, Undef,
Content, Content,

File diff suppressed because it is too large Load diff

View file

@ -5,7 +5,8 @@ use typst::foundations::Repr;
use crate::{adt::interner::Interned, analysis::*, ty::def::*}; use crate::{adt::interner::Interned, analysis::*, ty::def::*};
impl TypeCheckInfo { impl TypeScheme {
/// Describe the given type with the given type scheme.
pub fn describe(&self, ty: &Ty) -> Option<String> { pub fn describe(&self, ty: &Ty) -> Option<String> {
let mut worker = TypeDescriber::default(); let mut worker = TypeDescriber::default();
worker.describe_root(ty) worker.describe_root(ty)
@ -13,6 +14,7 @@ impl TypeCheckInfo {
} }
impl Ty { impl Ty {
/// Describe the given type.
pub fn describe(&self) -> Option<String> { pub fn describe(&self) -> Option<String> {
let mut worker = TypeDescriber::default(); let mut worker = TypeDescriber::default();
worker.describe_root(self) worker.describe_root(self)
@ -80,7 +82,7 @@ impl TypeDescriber {
} }
res.push_str(") => "); res.push_str(") => ");
res.push_str( res.push_str(
f.ret f.body
.as_ref() .as_ref()
.and_then(|ret| self.describe_root(ret)) .and_then(|ret| self.describe_root(ret))
.as_deref() .as_deref()

View file

@ -13,7 +13,7 @@ mod subst;
pub(crate) use apply::*; pub(crate) use apply::*;
pub(crate) use bound::*; pub(crate) use bound::*;
pub(crate) use builtin::*; pub(crate) use builtin::*;
pub(crate) use def::*; pub use def::*;
pub(crate) use mutate::*; pub(crate) use mutate::*;
pub(crate) use sig::*; pub(crate) use sig::*;

View file

@ -32,17 +32,21 @@ pub trait MutateDriver {
} }
fn mutate_func(&mut self, ty: &Interned<SigTy>, pol: bool) -> Option<SigTy> { fn mutate_func(&mut self, ty: &Interned<SigTy>, pol: bool) -> Option<SigTy> {
let types = self.mutate_vec(&ty.types, pol); let types = self.mutate_vec(&ty.inputs, pol);
let ret = self.mutate_option(ty.ret.as_ref(), pol); let ret = self.mutate_option(ty.body.as_ref(), pol);
if types.is_none() && ret.is_none() { if types.is_none() && ret.is_none() {
return None; return None;
} }
let sig = ty.as_ref().clone(); let sig = ty.as_ref().clone();
let types = types.unwrap_or_else(|| ty.types.clone()); let types = types.unwrap_or_else(|| ty.inputs.clone());
let ret = ret.unwrap_or_else(|| ty.ret.clone()); let ret = ret.unwrap_or_else(|| ty.body.clone());
Some(SigTy { types, ret, ..sig }) Some(SigTy {
inputs: types,
body: ret,
..sig
})
} }
fn mutate_record(&mut self, ty: &Interned<RecordTy>, pol: bool) -> Option<RecordTy> { fn mutate_record(&mut self, ty: &Interned<RecordTy>, pol: bool) -> Option<RecordTy> {
@ -127,6 +131,7 @@ where
} }
impl Ty { impl Ty {
/// Mutate the given type.
pub fn mutate(&self, pol: bool, checker: &mut impl MutateDriver) -> Option<Ty> { pub fn mutate(&self, pol: bool, checker: &mut impl MutateDriver) -> Option<Ty> {
let mut worker = Mutator; let mut worker = Mutator;
worker.ty(self, pol, checker) worker.ty(self, pol, checker)

View file

@ -90,6 +90,7 @@ where
} }
impl Ty { impl Ty {
/// Iterate over the signatures of the given type.
pub fn sig_surface(&self, pol: bool, sig_kind: SigSurfaceKind, checker: &mut impl SigChecker) { pub fn sig_surface(&self, pol: bool, sig_kind: SigSurfaceKind, checker: &mut impl SigChecker) {
let context = SigCheckContext { let context = SigCheckContext {
sig_kind, sig_kind,
@ -104,6 +105,7 @@ impl Ty {
worker.ty(self, pol); worker.ty(self, pol);
} }
/// Get the signature representation of the given type.
pub fn sig_repr(&self, pol: bool) -> Option<Interned<SigTy>> { pub fn sig_repr(&self, pol: bool) -> Option<Interned<SigTy>> {
// todo: union sig // todo: union sig
// let mut pos = vec![]; // let mut pos = vec![];
@ -248,7 +250,7 @@ impl BoundChecker for SigCheckDriver<'_> {
} }
} }
struct MethodDriver<'a, 'b>(&'a mut SigCheckDriver<'b>, &'a Interned<str>); struct MethodDriver<'a, 'b>(&'a mut SigCheckDriver<'b>, &'a StrRef);
impl<'a, 'b> MethodDriver<'a, 'b> { impl<'a, 'b> MethodDriver<'a, 'b> {
fn is_binder(&self) -> bool { fn is_binder(&self) -> bool {

View file

@ -17,7 +17,8 @@ struct CompactTy {
is_final: bool, is_final: bool,
} }
impl TypeCheckInfo { impl TypeScheme {
/// Simplify (Canonicalize) the given type with the given type scheme.
pub fn simplify(&self, ty: Ty, principal: bool) -> Ty { pub fn simplify(&self, ty: Ty, principal: bool) -> Ty {
let mut c = self.cano_cache.lock(); let mut c = self.cano_cache.lock();
let c = &mut *c; let c = &mut *c;
@ -94,7 +95,7 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
for p in f.inputs() { for p in f.inputs() {
self.analyze(p, !pol); self.analyze(p, !pol);
} }
if let Some(ret) = &f.ret { if let Some(ret) = &f.body {
self.analyze(ret, pol); self.analyze(ret, pol);
} }
} }
@ -124,7 +125,7 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
} }
Ty::Unary(u) => self.analyze(&u.lhs, pol), Ty::Unary(u) => self.analyze(&u.lhs, pol),
Ty::Binary(b) => { Ty::Binary(b) => {
let (lhs, rhs) = b.repr(); let [lhs, rhs] = b.operands();
self.analyze(lhs, pol); self.analyze(lhs, pol);
self.analyze(rhs, pol); self.analyze(rhs, pol);
} }
@ -203,7 +204,7 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
Ty::Args(args) => Ty::Args(self.transform_sig(args, !pol)), Ty::Args(args) => Ty::Args(self.transform_sig(args, !pol)),
Ty::Unary(u) => Ty::Unary(TypeUnary::new(u.op, self.transform(&u.lhs, pol).into())), Ty::Unary(u) => Ty::Unary(TypeUnary::new(u.op, self.transform(&u.lhs, pol).into())),
Ty::Binary(b) => { Ty::Binary(b) => {
let (lhs, rhs) = b.repr(); let [lhs, rhs] = b.operands();
let lhs = self.transform(lhs, pol); let lhs = self.transform(lhs, pol);
let rhs = self.transform(rhs, pol); let rhs = self.transform(rhs, pol);
@ -272,9 +273,9 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
fn transform_sig(&mut self, sig: &SigTy, pol: bool) -> Interned<SigTy> { fn transform_sig(&mut self, sig: &SigTy, pol: bool) -> Interned<SigTy> {
let mut sig = sig.clone(); let mut sig = sig.clone();
sig.types = self.transform_seq(&sig.types, !pol); sig.inputs = self.transform_seq(&sig.inputs, !pol);
if let Some(ret) = &sig.ret { if let Some(ret) = &sig.body {
sig.ret = Some(self.transform(ret, pol)); sig.body = Some(self.transform(ret, pol));
} }
// todo: we can reduce one clone by early compare on sig.types // todo: we can reduce one clone by early compare on sig.types

View file

@ -42,7 +42,7 @@ impl<'a> Sig<'a> {
} }
} }
Some((arguments, sig.ret.clone())) Some((arguments, sig.body.clone()))
} }
} }
@ -96,7 +96,7 @@ mod tests {
struct CallCollector(Vec<Ty>); struct CallCollector(Vec<Ty>);
impl ApplyChecker for CallCollector { impl ApplyChecker for CallCollector {
fn call( fn apply(
&mut self, &mut self,
sig: super::Sig, sig: super::Sig,
arguments: &crate::adt::interner::Interned<super::ArgsTy>, arguments: &crate::adt::interner::Interned<super::ArgsTy>,

View file

@ -23,7 +23,7 @@
it.lines.at(0).body.children.slice(0, -2).join() it.lines.at(0).body.children.slice(0, -2).join()
} }
This document gives an overview of tinymist service, which provides a single integrated language service for Typst. This document doesn't dive in details but doesn't avoid showing code if necessary. This document gives an overview of tinymist service, which provides a single integrated language service for Typst. This document doesn't dive in details unless necessary.
== Principles == Principles
@ -61,13 +61,13 @@ Four principles are followed:
A _Hover_ request is taken as example of that events. A _Hover_ request is taken as example of that events.
A global unique `LspActor` takes the event and mutates a global server state. If the event requires some additional code analysis, it is converted into an analysis request, #link("https://github.com/search?q=repo%3AMyriad-Dreamin/tinymist%20CompilerQueryRequest&type=code")[```rs struct CompilerQueryRequest```], and pushed to the actors owning compiler resources. Otherwise, `LspActor` responds to the event according to its state. Obviously, the _Hover_ on code request requires code analysis. A global unique `LspActor` takes the event and _mutates_ a global server state by the event. If the event requires some additional code analysis, it is converted into an analysis request, #link("https://github.com/search?q=repo%3AMyriad-Dreamin/tinymist%20CompilerQueryRequest&type=code")[```rs struct CompilerQueryRequest```], and pushed to the actors owning compiler resources. Otherwise, `LspActor` responds to the event directly. Obviously, the _Hover_ on code request requires code analysis.
The `CompileServerActor`s are created for each workspace and main entries (files/documents) in workspaces. When a compiler query is coming, a subset of that actors will take it and give project-specific responses, combining into a final concluded LSP response. Some analysis requests even require rendering features, and those requests will be pushed to the actors owning rendering resources. If you enable the periscope feature, a `Hover` on content request requires rendering on documents. The `CompileServerActor`s are created for workspaces and main entries (files/documents) in workspaces. When a compiler query is coming, a subset of that actors will take it and give project-specific responses, combining into a final concluded LSP response. Some analysis requests even require rendering features, and those requests will be pushed to the actors owning rendering resources. If you enable the periscope feature, a `Hover` on content request requires rendering on documents.
The `RenderActor`s don't do compilations, but own project-specific rendering cache. They are designed for rendering docuemnt in _low latency_. This is the last sink of `Hover` requests. A `RenderActor` will receive an additional compiled `Document` object, and render the compiled frames in needed. After finishing rendering, a response attached with the rendered picture is sent to the LSP response channel intermediately. The `RenderActor`s don't do compilations, but own project-specific rendering cache. They are designed for rendering documents in _low latency_. This is the last sink of `Hover` requests. A `RenderActor` will receive an additional compiled `Document` object, and render the compiled frames in needed. After finishing rendering, a response attached with the rendered picture is sent to the LSP response channel intermediately.
/ Multi-level Analysis: The most critical features are lsp functions, built on the #link("https://github.com/Myriad-Dreamin/tinymist/tree/main/crates/tinymist-query")[tinymist-query] crate. To achieve low latency, functions are classified into different levels of analysis. / Multi-level Analysis: The most critical features are lsp functions, built on the #link("https://github.com/Myriad-Dreamin/tinymist/tree/main/crates/tinymist-query")[tinymist-query] crate. To achieve higher concurrency, functions are classified into different levels of analysis.
// + `query_token_cache` `TokenRequest` locks and accesses token cache. // + `query_token_cache` `TokenRequest` locks and accesses token cache.
+ `query_source` `SyntaxRequest` locks and accesses a single source unit. + `query_source` `SyntaxRequest` locks and accesses a single source unit.
+ `query_world` `SemanticRequest` locks and accesses multiple source units. + `query_world` `SemanticRequest` locks and accesses multiple source units.