mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-24 13:13:43 +00:00
refactor: rename local variables and some types (#1023)
* fix: naming * refactor: rename local variables and some types
This commit is contained in:
parent
ad0c1e8aca
commit
be1d659719
52 changed files with 805 additions and 787 deletions
|
@ -31,12 +31,12 @@ fn main() -> typlite::Result<()> {
|
|||
.as_ref()
|
||||
.ok_or("Missing required argument: INPUT")?;
|
||||
let output = match args.output {
|
||||
Some(e) if e == "-" => None,
|
||||
Some(e) => Some(PathBuf::from(e)),
|
||||
Some(stdout_path) if stdout_path == "-" => None,
|
||||
Some(output_path) => Some(PathBuf::from(output_path)),
|
||||
None => Some(Path::new(input).with_extension("md")),
|
||||
};
|
||||
|
||||
let universe = args.compile.resolve().map_err(|e| format!("{e:?}"))?;
|
||||
let universe = args.compile.resolve().map_err(|err| format!("{err:?}"))?;
|
||||
let world = universe.snapshot();
|
||||
|
||||
let converter = Typlite::new(Arc::new(world)).with_library(lib());
|
||||
|
@ -45,8 +45,8 @@ fn main() -> typlite::Result<()> {
|
|||
match (conv, output) {
|
||||
(Ok(conv), None) => println!("{}", conv),
|
||||
(Ok(conv), Some(output)) => std::fs::write(output, conv.as_str()).unwrap(),
|
||||
(Err(e), ..) => {
|
||||
eprintln!("{e}");
|
||||
(Err(err), ..) => {
|
||||
eprintln!("{err}");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,10 +35,10 @@ pub fn resolve_id_by_path(
|
|||
pub fn find_source_by_expr(
|
||||
world: &dyn World,
|
||||
current: TypstFileId,
|
||||
e: ast::Expr,
|
||||
import_source: ast::Expr,
|
||||
) -> Option<Source> {
|
||||
// todo: this could be valid: import("path.typ"), where v is parenthesized
|
||||
match e {
|
||||
match import_source {
|
||||
ast::Expr::Str(s) => world
|
||||
.source(resolve_id_by_path(world, current, s.get().as_str())?)
|
||||
.ok(),
|
||||
|
|
|
@ -85,8 +85,8 @@ impl<T> RevisionManager<T> {
|
|||
let slot_base = self
|
||||
.slots
|
||||
.iter()
|
||||
.filter(|e| e.revision <= revision.get())
|
||||
.reduce(|a, b| if a.revision > b.revision { a } else { b });
|
||||
.filter(|slot| slot.revision <= revision.get())
|
||||
.reduce(|x, y| if x.revision > y.revision { x } else { y });
|
||||
|
||||
if let Some(slot) = slot_base {
|
||||
if slot.revision == revision.get() {
|
||||
|
@ -120,7 +120,7 @@ impl<T> RevisionManager<T> {
|
|||
// if there is no locked revision, we only keep the latest revision
|
||||
self.slots
|
||||
.iter()
|
||||
.map(|e| e.revision)
|
||||
.map(|slot| slot.revision)
|
||||
.max())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,15 +85,15 @@ impl LspWorldExt for tinymist_world::LspWorld {
|
|||
Ok(TypstFileId::new(None, VirtualPath::new(relative_path)))
|
||||
}
|
||||
|
||||
fn source_by_path(&self, p: &Path) -> FileResult<Source> {
|
||||
fn source_by_path(&self, path: &Path) -> FileResult<Source> {
|
||||
// todo: source cache
|
||||
self.source(self.file_id_by_path(p)?)
|
||||
self.source(self.file_id_by_path(path)?)
|
||||
}
|
||||
|
||||
fn uri_for_id(&self, id: TypstFileId) -> Result<Url, FileError> {
|
||||
self.path_for_id(id).and_then(|e| {
|
||||
path_to_url(&e)
|
||||
.map_err(|e| FileError::Other(Some(eco_format!("convert to url: {e:?}"))))
|
||||
fn uri_for_id(&self, fid: TypstFileId) -> Result<Url, FileError> {
|
||||
self.path_for_id(fid).and_then(|path| {
|
||||
path_to_url(&path)
|
||||
.map_err(|err| FileError::Other(Some(eco_format!("convert to url: {err:?}"))))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -118,10 +118,10 @@ mod matcher_tests {
|
|||
let root = LinkedNode::new(source.root());
|
||||
let node = root.leaf_at_compat(pos).unwrap();
|
||||
|
||||
let result = classify_def(node).map(|e| format!("{:?}", e.node().range()));
|
||||
let result = result.as_deref().unwrap_or("<nil>");
|
||||
let snap = classify_def(node).map(|def| format!("{:?}", def.node().range()));
|
||||
let snap = snap.as_deref().unwrap_or("<nil>");
|
||||
|
||||
assert_snapshot!(result);
|
||||
assert_snapshot!(snap);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -201,17 +201,17 @@ mod expr_tests {
|
|||
decl: ident,
|
||||
step,
|
||||
root,
|
||||
val,
|
||||
term,
|
||||
} = expr.as_ref();
|
||||
|
||||
format!(
|
||||
"{} -> {}, root {}, val: {val:?}",
|
||||
"{} -> {}, root {}, val: {term:?}",
|
||||
source.show_expr(&Expr::Decl(ident.clone())),
|
||||
step.as_ref()
|
||||
.map(|e| source.show_expr(e))
|
||||
.map(|expr| source.show_expr(expr))
|
||||
.unwrap_or_default(),
|
||||
root.as_ref()
|
||||
.map(|e| source.show_expr(e))
|
||||
.map(|expr| source.show_expr(expr))
|
||||
.unwrap_or_default()
|
||||
)
|
||||
})
|
||||
|
@ -300,7 +300,7 @@ mod type_check_tests {
|
|||
|
||||
use crate::tests::*;
|
||||
|
||||
use super::{Ty, TypeScheme};
|
||||
use super::{Ty, TypeInfo};
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
|
@ -314,7 +314,7 @@ mod type_check_tests {
|
|||
});
|
||||
}
|
||||
|
||||
struct TypeCheckSnapshot<'a>(&'a Source, &'a TypeScheme);
|
||||
struct TypeCheckSnapshot<'a>(&'a Source, &'a TypeInfo);
|
||||
|
||||
impl fmt::Debug for TypeCheckSnapshot<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
|
@ -322,21 +322,21 @@ mod type_check_tests {
|
|||
let info = self.1;
|
||||
let mut vars = info
|
||||
.vars
|
||||
.iter()
|
||||
.map(|e| (e.1.name(), e.1))
|
||||
.values()
|
||||
.map(|bounds| (bounds.name(), bounds))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
vars.sort_by(|x, y| x.1.var.cmp(&y.1.var));
|
||||
|
||||
for (name, var) in vars {
|
||||
writeln!(f, "{:?} = {:?}", name, info.simplify(var.as_type(), true))?;
|
||||
for (name, bounds) in vars {
|
||||
writeln!(f, "{name:?} = {:?}", info.simplify(bounds.as_type(), true))?;
|
||||
}
|
||||
|
||||
writeln!(f, "=====")?;
|
||||
let mut mapping = info
|
||||
.mapping
|
||||
.iter()
|
||||
.map(|e| (source.range(*e.0).unwrap_or_default(), e.1))
|
||||
.map(|pair| (source.range(*pair.0).unwrap_or_default(), pair.1))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
mapping.sort_by(|x, y| {
|
||||
|
@ -378,14 +378,14 @@ mod post_type_check_tests {
|
|||
let text = node.get().clone().into_text();
|
||||
|
||||
let result = ctx.type_check(&source);
|
||||
let literal_type = post_type_check(ctx.shared_(), &result, node);
|
||||
let post_ty = post_type_check(ctx.shared_(), &result, node);
|
||||
|
||||
with_settings!({
|
||||
description => format!("Check on {text:?} ({pos:?})"),
|
||||
}, {
|
||||
let literal_type = literal_type.map(|e| format!("{e:#?}"))
|
||||
let post_ty = post_ty.map(|ty| format!("{ty:#?}"))
|
||||
.unwrap_or_else(|| "<nil>".to_string());
|
||||
assert_snapshot!(literal_type);
|
||||
assert_snapshot!(post_ty);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
@ -413,15 +413,15 @@ mod type_describe_tests {
|
|||
let node = root.leaf_at_compat(pos + 1).unwrap();
|
||||
let text = node.get().clone().into_text();
|
||||
|
||||
let result = ctx.type_check(&source);
|
||||
let literal_type = post_type_check(ctx.shared_(), &result, node);
|
||||
let ti = ctx.type_check(&source);
|
||||
let post_ty = post_type_check(ctx.shared_(), &ti, node);
|
||||
|
||||
with_settings!({
|
||||
description => format!("Check on {text:?} ({pos:?})"),
|
||||
}, {
|
||||
let literal_type = literal_type.and_then(|e| e.describe())
|
||||
let post_ty = post_ty.and_then(|ty| ty.describe())
|
||||
.unwrap_or_else(|| "<nil>".into());
|
||||
assert_snapshot!(literal_type);
|
||||
assert_snapshot!(post_ty);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
|
|
@ -70,11 +70,11 @@ impl YamlBib {
|
|||
loader
|
||||
.content
|
||||
.iter()
|
||||
.flat_map(|(k, span)| [k.span.start, k.span.end, span.start, span.end])
|
||||
.map(|e| (e, None)),
|
||||
.flat_map(|(name, span)| [name.span.start, name.span.end, span.start, span.end])
|
||||
.map(|offset| (offset, None)),
|
||||
);
|
||||
span_mapper.sort_by_key(|e| e.0);
|
||||
span_mapper.dedup_by_key(|e| e.0);
|
||||
span_mapper.sort_by_key(|(offset, _)| *offset);
|
||||
span_mapper.dedup_by_key(|(offset, _)| *offset);
|
||||
let mut span_cursor = 0;
|
||||
let mut byte_offset = 0;
|
||||
for (off, ch) in content.chars().chain(Some('\0')).enumerate() {
|
||||
|
@ -170,14 +170,14 @@ impl BibWorker {
|
|||
}
|
||||
"bib" => {
|
||||
let bibliography = biblatex::RawBibliography::parse(content).ok()?;
|
||||
for e in bibliography.entries {
|
||||
let k = e.v.key;
|
||||
let span = e.span;
|
||||
for entry in bibliography.entries {
|
||||
let name = entry.v.key;
|
||||
let span = entry.span;
|
||||
self.info.entries.insert(
|
||||
k.v.to_owned(),
|
||||
name.v.to_owned(),
|
||||
BibEntry {
|
||||
file_id: path,
|
||||
name_span: k.span,
|
||||
name_span: name.span,
|
||||
span,
|
||||
},
|
||||
);
|
||||
|
|
|
@ -112,13 +112,13 @@ impl<'a> CodeActionWorker<'a> {
|
|||
}
|
||||
|
||||
fn heading_actions(&mut self, node: &LinkedNode) -> Option<()> {
|
||||
let h = node.cast::<ast::Heading>()?;
|
||||
let depth = h.depth().get();
|
||||
let heading = node.cast::<ast::Heading>()?;
|
||||
let depth = heading.depth().get();
|
||||
|
||||
// Only the marker is replaced, for minimal text change
|
||||
let marker = node
|
||||
.children()
|
||||
.find(|e| e.kind() == SyntaxKind::HeadingMarker)?;
|
||||
.find(|child| child.kind() == SyntaxKind::HeadingMarker)?;
|
||||
let marker_range = marker.range();
|
||||
|
||||
if depth > 1 {
|
||||
|
@ -161,8 +161,9 @@ impl<'a> CodeActionWorker<'a> {
|
|||
|
||||
let mut chs = node.children();
|
||||
let chs = chs.by_ref();
|
||||
let first_dollar = chs.take(1).find(|e| e.kind() == SyntaxKind::Dollar)?;
|
||||
let last_dollar = chs.rev().take(1).find(|e| e.kind() == SyntaxKind::Dollar)?;
|
||||
let is_dollar = |node: &LinkedNode| node.kind() == SyntaxKind::Dollar;
|
||||
let first_dollar = chs.take(1).find(is_dollar)?;
|
||||
let last_dollar = chs.rev().take(1).find(is_dollar)?;
|
||||
|
||||
// Erroneous equation is skipped.
|
||||
// For example, some unclosed equation.
|
||||
|
@ -242,15 +243,15 @@ impl<'a> CodeActionWorker<'a> {
|
|||
};
|
||||
|
||||
// Prepare actions
|
||||
let a1 = if is_block {
|
||||
let toggle_action = if is_block {
|
||||
rewrite_action("Convert to inline equation", "")?
|
||||
} else {
|
||||
rewrite_action("Convert to block equation", " ")?
|
||||
};
|
||||
let a2 = rewrite_action("Convert to multiple-line block equation", "\n");
|
||||
let block_action = rewrite_action("Convert to multiple-line block equation", "\n");
|
||||
|
||||
self.actions.push(a1);
|
||||
if let Some(a2) = a2 {
|
||||
self.actions.push(toggle_action);
|
||||
if let Some(a2) = block_action {
|
||||
self.actions.push(a2);
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ impl Definition {
|
|||
|
||||
/// The location of the definition.
|
||||
// todo: cache
|
||||
pub(crate) fn def_at(&self, ctx: &SharedContext) -> Option<(TypstFileId, Range<usize>)> {
|
||||
pub(crate) fn location(&self, ctx: &SharedContext) -> Option<(TypstFileId, Range<usize>)> {
|
||||
let fid = self.decl.file_id()?;
|
||||
let span = self.decl.span();
|
||||
let range = (!span.is_detached()).then(|| ctx.source_by_id(fid).ok()?.range(span));
|
||||
|
@ -83,8 +83,8 @@ pub fn definition(
|
|||
};
|
||||
|
||||
let introspector = &document?.document.introspector;
|
||||
find_bib_definition(ctx, introspector, name)
|
||||
.or_else(|| find_ref_definition(introspector, name, ref_expr))
|
||||
bib_definition(ctx, introspector, name)
|
||||
.or_else(|| ref_definition(introspector, name, ref_expr))
|
||||
}
|
||||
SyntaxClass::Label {
|
||||
node: _,
|
||||
|
@ -102,9 +102,9 @@ fn find_ident_definition(
|
|||
// Lexical reference
|
||||
let ident_store = use_site.clone();
|
||||
let ident_ref = match ident_store.cast::<ast::Expr>()? {
|
||||
ast::Expr::Ident(e) => e.span(),
|
||||
ast::Expr::MathIdent(e) => e.span(),
|
||||
ast::Expr::FieldAccess(s) => return find_field_definition(ctx, s),
|
||||
ast::Expr::Ident(ident) => ident.span(),
|
||||
ast::Expr::MathIdent(ident) => ident.span(),
|
||||
ast::Expr::FieldAccess(field_access) => return field_definition(ctx, field_access),
|
||||
_ => {
|
||||
crate::log_debug_ct!("unsupported kind {kind:?}", kind = use_site.kind());
|
||||
Span::detached()
|
||||
|
@ -114,8 +114,8 @@ fn find_ident_definition(
|
|||
DefResolver::new(ctx, source)?.of_span(ident_ref)
|
||||
}
|
||||
|
||||
fn find_field_definition(ctx: &Arc<SharedContext>, fa: ast::FieldAccess<'_>) -> Option<Definition> {
|
||||
let span = fa.span();
|
||||
fn field_definition(ctx: &Arc<SharedContext>, node: ast::FieldAccess) -> Option<Definition> {
|
||||
let span = node.span();
|
||||
let ty = ctx.type_of_span(span)?;
|
||||
crate::log_debug_ct!("find_field_definition[{span:?}]: {ty:?}");
|
||||
|
||||
|
@ -134,22 +134,22 @@ fn find_field_definition(ctx: &Arc<SharedContext>, fa: ast::FieldAccess<'_>) ->
|
|||
let source = ctx.source_by_id(s.id()?).ok()?;
|
||||
DefResolver::new(ctx, &source)?.of_span(s)
|
||||
}
|
||||
DocSource::Ins(ins) => value_to_def(ins.val.clone(), || Some(fa.field().get().into())),
|
||||
DocSource::Ins(ins) => value_to_def(ins.val.clone(), || Some(node.field().get().into())),
|
||||
DocSource::Builtin(..) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn find_bib_definition(
|
||||
fn bib_definition(
|
||||
ctx: &Arc<SharedContext>,
|
||||
introspector: &Introspector,
|
||||
key: &str,
|
||||
) -> Option<Definition> {
|
||||
let bib_elem = BibliographyElem::find(introspector.track()).ok()?;
|
||||
let Value::Array(arr) = bib_elem.path().clone().into_value() else {
|
||||
let Value::Array(paths) = bib_elem.path().clone().into_value() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let bib_paths = arr.into_iter().map(Value::cast).flat_map(|e| e.ok());
|
||||
let bib_paths = paths.into_iter().flat_map(|path| path.cast().ok());
|
||||
let bib_info = ctx.analyze_bib(bib_elem.span(), bib_paths)?;
|
||||
|
||||
let entry = bib_info.entries.get(key)?;
|
||||
|
@ -160,7 +160,7 @@ fn find_bib_definition(
|
|||
Some(Definition::new(decl.into(), None))
|
||||
}
|
||||
|
||||
fn find_ref_definition(
|
||||
fn ref_definition(
|
||||
introspector: &Introspector,
|
||||
name: &str,
|
||||
ref_expr: ast::Expr,
|
||||
|
@ -307,7 +307,7 @@ fn value_to_def(value: Value, name: impl FnOnce() -> Option<Interned<str>>) -> O
|
|||
let val = Ty::Value(InsTy::new(value.clone()));
|
||||
Some(match value {
|
||||
Value::Func(func) => {
|
||||
let name = func.name().map(|e| e.into()).or_else(name)?;
|
||||
let name = func.name().map(|name| name.into()).or_else(name)?;
|
||||
let mut s = SyntaxNode::leaf(SyntaxKind::Ident, &name);
|
||||
s.synthesize(func.span());
|
||||
|
||||
|
@ -334,9 +334,9 @@ impl DefResolver {
|
|||
return None;
|
||||
}
|
||||
|
||||
let expr = self.ei.resolves.get(&span).cloned()?;
|
||||
match (&expr.root, &expr.val) {
|
||||
(Some(expr), ty) => self.of_expr(expr, ty.as_ref()),
|
||||
let resolved = self.ei.resolves.get(&span).cloned()?;
|
||||
match (&resolved.root, &resolved.term) {
|
||||
(Some(expr), term) => self.of_expr(expr, term.as_ref()),
|
||||
(None, Some(term)) => self.of_term(term),
|
||||
(None, None) => None,
|
||||
}
|
||||
|
@ -347,7 +347,9 @@ impl DefResolver {
|
|||
|
||||
match expr {
|
||||
Expr::Decl(decl) => self.of_decl(decl, term),
|
||||
Expr::Ref(r) => self.of_expr(r.root.as_ref()?, r.val.as_ref().or(term)),
|
||||
Expr::Ref(resolved) => {
|
||||
self.of_expr(resolved.root.as_ref()?, resolved.term.as_ref().or(term))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ use crate::analysis::{
|
|||
analyze_bib, analyze_expr_, analyze_import_, analyze_signature, definition, post_type_check,
|
||||
AllocStats, AnalysisStats, BibInfo, Definition, PathPreference, QueryStatGuard,
|
||||
SemanticTokenCache, SemanticTokenContext, SemanticTokens, Signature, SignatureTarget, Ty,
|
||||
TypeScheme,
|
||||
TypeInfo,
|
||||
};
|
||||
use crate::docs::{DefDocs, TidyModuleDocs};
|
||||
use crate::syntax::{
|
||||
|
@ -303,7 +303,7 @@ impl LocalContext {
|
|||
&self,
|
||||
pref: &PathPreference,
|
||||
) -> impl Iterator<Item = &TypstFileId> {
|
||||
let r = pref.ext_matcher();
|
||||
let regexes = pref.ext_matcher();
|
||||
self.caches
|
||||
.completion_files
|
||||
.get_or_init(|| {
|
||||
|
@ -316,12 +316,12 @@ impl LocalContext {
|
|||
}
|
||||
})
|
||||
.iter()
|
||||
.filter(move |p| {
|
||||
p.vpath()
|
||||
.filter(move |fid| {
|
||||
fid.vpath()
|
||||
.as_rooted_path()
|
||||
.extension()
|
||||
.and_then(|p| p.to_str())
|
||||
.is_some_and(|e| r.is_match(e))
|
||||
.and_then(|path| path.to_str())
|
||||
.is_some_and(|path| regexes.is_match(path))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -416,14 +416,14 @@ impl LocalContext {
|
|||
}
|
||||
|
||||
/// Get the type check information of a source file.
|
||||
pub(crate) fn type_check(&mut self, source: &Source) -> Arc<TypeScheme> {
|
||||
pub(crate) fn type_check(&mut self, source: &Source) -> Arc<TypeInfo> {
|
||||
let id = source.id();
|
||||
let cache = &self.caches.modules.entry(id).or_default().type_check;
|
||||
cache.get_or_init(|| self.shared.type_check(source)).clone()
|
||||
}
|
||||
|
||||
/// Get the type check information of a source file.
|
||||
pub(crate) fn type_check_by_id(&mut self, id: TypstFileId) -> Arc<TypeScheme> {
|
||||
pub(crate) fn type_check_by_id(&mut self, id: TypstFileId) -> Arc<TypeInfo> {
|
||||
let cache = &self.caches.modules.entry(id).or_default().type_check;
|
||||
cache
|
||||
.clone()
|
||||
|
@ -511,13 +511,13 @@ impl SharedContext {
|
|||
|
||||
/// Convert a Typst range to a LSP range.
|
||||
pub fn to_lsp_range_(&self, position: TypstRange, fid: TypstFileId) -> Option<LspRange> {
|
||||
let w = fid
|
||||
let ext = fid
|
||||
.vpath()
|
||||
.as_rootless_path()
|
||||
.extension()
|
||||
.and_then(|e| e.to_str());
|
||||
.and_then(|ext| ext.to_str());
|
||||
// yaml/yml/bib
|
||||
if matches!(w, Some("yaml" | "yml" | "bib")) {
|
||||
if matches!(ext, Some("yaml" | "yml" | "bib")) {
|
||||
let bytes = self.file_by_id(fid).ok()?;
|
||||
let bytes_len = bytes.len();
|
||||
let loc = loc_info(bytes)?;
|
||||
|
@ -578,8 +578,8 @@ impl SharedContext {
|
|||
position: LspPosition,
|
||||
shift: usize,
|
||||
) -> Option<SyntaxClass<'s>> {
|
||||
let (_, expr) = self.classify_pos_(source, position, shift)?;
|
||||
expr
|
||||
let (_, syntax) = self.classify_pos_(source, position, shift)?;
|
||||
syntax
|
||||
}
|
||||
|
||||
/// Classifies the syntax under position that can be operated on by IDE
|
||||
|
@ -714,7 +714,7 @@ impl SharedContext {
|
|||
}
|
||||
|
||||
/// Get the type check information of a source file.
|
||||
pub(crate) fn type_check(self: &Arc<Self>, source: &Source) -> Arc<TypeScheme> {
|
||||
pub(crate) fn type_check(self: &Arc<Self>, source: &Source) -> Arc<TypeInfo> {
|
||||
let mut route = TypeEnv::default();
|
||||
self.type_check_(source, &mut route)
|
||||
}
|
||||
|
@ -724,7 +724,7 @@ impl SharedContext {
|
|||
self: &Arc<Self>,
|
||||
source: &Source,
|
||||
route: &mut TypeEnv,
|
||||
) -> Arc<TypeScheme> {
|
||||
) -> Arc<TypeInfo> {
|
||||
use crate::analysis::type_check;
|
||||
|
||||
let ei = self.expr_stage(source);
|
||||
|
@ -787,8 +787,8 @@ impl SharedContext {
|
|||
doc: Option<&VersionedDocument>,
|
||||
span: Span,
|
||||
) -> Option<Definition> {
|
||||
let expr = self.classify_span(source, span)?;
|
||||
definition(self, source, doc, expr)
|
||||
let syntax = self.classify_span(source, span)?;
|
||||
definition(self, source, doc, syntax)
|
||||
}
|
||||
|
||||
pub(crate) fn def_of_decl(&self, decl: &Interned<Decl>) -> Option<Definition> {
|
||||
|
@ -832,7 +832,7 @@ impl SharedContext {
|
|||
analyze_signature(self, SignatureTarget::Def(source, def))
|
||||
}
|
||||
|
||||
pub(crate) fn sig_of_type(self: &Arc<Self>, ti: &TypeScheme, ty: Ty) -> Option<Signature> {
|
||||
pub(crate) fn sig_of_type(self: &Arc<Self>, ti: &TypeInfo, ty: Ty) -> Option<Signature> {
|
||||
super::sig_of_type(self, ti, ty)
|
||||
}
|
||||
|
||||
|
@ -980,9 +980,9 @@ impl SharedContext {
|
|||
fn work(&self, fid: TypstFileId) {
|
||||
crate::log_debug_ct!("preload package {fid:?}");
|
||||
let source = self.shared.source_by_id(fid).ok().unwrap();
|
||||
let expr = self.shared.expr_stage(&source);
|
||||
let exprs = self.shared.expr_stage(&source);
|
||||
self.shared.type_check(&source);
|
||||
expr.imports.iter().for_each(|(fid, _)| {
|
||||
exprs.imports.iter().for_each(|(fid, _)| {
|
||||
if !self.analyzed.lock().insert(*fid) {
|
||||
return;
|
||||
}
|
||||
|
@ -1044,14 +1044,14 @@ impl<K, V> IncrCacheMap<K, V> {
|
|||
let entry = global.entry(key.clone());
|
||||
use dashmap::mapref::entry::Entry;
|
||||
match entry {
|
||||
Entry::Occupied(mut e) => {
|
||||
let (revision, _) = e.get();
|
||||
Entry::Occupied(mut entry) => {
|
||||
let (revision, _) = entry.get();
|
||||
if *revision < self.revision {
|
||||
e.insert((self.revision, res.clone()));
|
||||
entry.insert((self.revision, res.clone()));
|
||||
}
|
||||
}
|
||||
Entry::Vacant(e) => {
|
||||
e.insert((self.revision, res.clone()));
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert((self.revision, res.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1146,7 +1146,7 @@ pub struct AnalysisLocalCaches {
|
|||
#[derive(Default)]
|
||||
pub struct ModuleAnalysisLocalCache {
|
||||
expr_stage: OnceCell<Arc<ExprInfo>>,
|
||||
type_check: OnceCell<Arc<TypeScheme>>,
|
||||
type_check: OnceCell<Arc<TypeInfo>>,
|
||||
}
|
||||
|
||||
/// A revision-managed (per input change) cache for all level of analysis
|
||||
|
@ -1199,10 +1199,10 @@ impl AnalysisRevCache {
|
|||
self.manager.find_revision(revision, |slot_base| {
|
||||
log::info!("analysis revision {} is created", revision.get());
|
||||
slot_base
|
||||
.map(|e| AnalysisRevSlot {
|
||||
revision: e.revision,
|
||||
expr_stage: e.data.expr_stage.crawl(revision.get()),
|
||||
type_check: e.data.type_check.crawl(revision.get()),
|
||||
.map(|slot| AnalysisRevSlot {
|
||||
revision: slot.revision,
|
||||
expr_stage: slot.data.expr_stage.crawl(revision.get()),
|
||||
type_check: slot.data.type_check.crawl(revision.get()),
|
||||
})
|
||||
.unwrap_or_else(|| self.default_slot.clone())
|
||||
})
|
||||
|
@ -1234,7 +1234,7 @@ impl Drop for AnalysisRevLock {
|
|||
struct AnalysisRevSlot {
|
||||
revision: usize,
|
||||
expr_stage: IncrCacheMap<u128, Arc<ExprInfo>>,
|
||||
type_check: IncrCacheMap<u128, Arc<TypeScheme>>,
|
||||
type_check: IncrCacheMap<u128, Arc<TypeInfo>>,
|
||||
}
|
||||
|
||||
impl Drop for AnalysisRevSlot {
|
||||
|
@ -1274,7 +1274,7 @@ fn bib_info(
|
|||
fn loc_info(bytes: Bytes) -> Option<EcoVec<(usize, String)>> {
|
||||
let mut loc = EcoVec::new();
|
||||
let mut offset = 0;
|
||||
for line in bytes.split(|e| *e == b'\n') {
|
||||
for line in bytes.split(|byte| *byte == b'\n') {
|
||||
loc.push((offset, String::from_utf8(line.to_owned()).ok()?));
|
||||
offset += line.len() + 1;
|
||||
}
|
||||
|
@ -1327,9 +1327,9 @@ pub struct SearchCtx<'a> {
|
|||
|
||||
impl SearchCtx<'_> {
|
||||
/// Push a file to the worklist.
|
||||
pub fn push(&mut self, id: TypstFileId) -> bool {
|
||||
if self.searched.insert(id) {
|
||||
self.worklist.push(id);
|
||||
pub fn push(&mut self, fid: TypstFileId) -> bool {
|
||||
if self.searched.insert(fid) {
|
||||
self.worklist.push(fid);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
|
@ -1337,9 +1337,9 @@ impl SearchCtx<'_> {
|
|||
}
|
||||
|
||||
/// Push the dependents of a file to the worklist.
|
||||
pub fn push_dependents(&mut self, id: TypstFileId) {
|
||||
let deps = self.ctx.module_dependencies().get(&id);
|
||||
let dependents = deps.map(|e| e.dependents.clone()).into_iter().flatten();
|
||||
pub fn push_dependents(&mut self, fid: TypstFileId) {
|
||||
let deps = self.ctx.module_dependencies().get(&fid);
|
||||
let dependents = deps.map(|dep| dep.dependents.clone()).into_iter().flatten();
|
||||
for dep in dependents {
|
||||
self.push(dep);
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ impl LinkStrWorker {
|
|||
SyntaxKind::Include => {
|
||||
let inc = node.cast::<ast::ModuleInclude>()?;
|
||||
let path = inc.source();
|
||||
self.analyze_path_exp(node, path);
|
||||
self.analyze_path_expr(node, path);
|
||||
}
|
||||
// early exit
|
||||
k if k.is_trivia() || k.is_keyword() || k.is_error() => return Some(()),
|
||||
|
@ -143,14 +143,14 @@ impl LinkStrWorker {
|
|||
let arg = call.args().items().next()?;
|
||||
match arg {
|
||||
ast::Arg::Pos(s) if pos => {
|
||||
self.analyze_path_exp(node, s);
|
||||
self.analyze_path_expr(node, s);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
for item in call.args().items() {
|
||||
match item {
|
||||
ast::Arg::Named(named) if named.name().get().as_str() == key => {
|
||||
self.analyze_path_exp(node, named.expr());
|
||||
self.analyze_path_expr(node, named.expr());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -158,8 +158,8 @@ impl LinkStrWorker {
|
|||
Some(())
|
||||
}
|
||||
|
||||
fn analyze_path_exp(&mut self, node: &LinkedNode, expr: ast::Expr) -> Option<()> {
|
||||
match expr {
|
||||
fn analyze_path_expr(&mut self, node: &LinkedNode, path_expr: ast::Expr) -> Option<()> {
|
||||
match path_expr {
|
||||
ast::Expr::Str(s) => self.analyze_path_str(node, s),
|
||||
ast::Expr::Array(a) => {
|
||||
for item in a.items() {
|
||||
|
|
|
@ -6,7 +6,7 @@ use tinymist_derive::BindTyCtx;
|
|||
use super::{prelude::*, DynTypeBounds, ParamAttrs, ParamTy, SharedContext};
|
||||
use super::{
|
||||
ArgsTy, Sig, SigChecker, SigShape, SigSurfaceKind, SigTy, Ty, TyCtx, TyCtxMut, TypeBounds,
|
||||
TypeScheme, TypeVar,
|
||||
TypeInfo, TypeVar,
|
||||
};
|
||||
use crate::syntax::{classify_cursor, classify_cursor_by_context, ArgClass, CursorClass};
|
||||
use crate::ty::BuiltinTy;
|
||||
|
@ -15,10 +15,10 @@ use crate::ty::BuiltinTy;
|
|||
/// touching the possible related nodes.
|
||||
pub(crate) fn post_type_check(
|
||||
ctx: Arc<SharedContext>,
|
||||
info: &TypeScheme,
|
||||
ti: &TypeInfo,
|
||||
node: LinkedNode,
|
||||
) -> Option<Ty> {
|
||||
let mut checker = PostTypeChecker::new(ctx, info);
|
||||
let mut checker = PostTypeChecker::new(ctx, ti);
|
||||
let res = checker.check(&node);
|
||||
checker.simplify(&res?)
|
||||
}
|
||||
|
@ -101,9 +101,9 @@ fn check_signature<'a>(
|
|||
|
||||
pub(crate) struct PostTypeChecker<'a> {
|
||||
ctx: Arc<SharedContext>,
|
||||
pub info: &'a TypeScheme,
|
||||
pub info: &'a TypeInfo,
|
||||
checked: HashMap<Span, Option<Ty>>,
|
||||
locals: TypeScheme,
|
||||
locals: TypeInfo,
|
||||
}
|
||||
|
||||
impl TyCtx for PostTypeChecker<'_> {
|
||||
|
@ -117,7 +117,7 @@ impl TyCtx for PostTypeChecker<'_> {
|
|||
}
|
||||
|
||||
impl TyCtxMut for PostTypeChecker<'_> {
|
||||
type Snap = <TypeScheme as TyCtxMut>::Snap;
|
||||
type Snap = <TypeInfo as TyCtxMut>::Snap;
|
||||
|
||||
fn start_scope(&mut self) -> Self::Snap {
|
||||
self.locals.start_scope()
|
||||
|
@ -145,12 +145,12 @@ impl TyCtxMut for PostTypeChecker<'_> {
|
|||
}
|
||||
|
||||
impl<'a> PostTypeChecker<'a> {
|
||||
pub fn new(ctx: Arc<SharedContext>, info: &'a TypeScheme) -> Self {
|
||||
pub fn new(ctx: Arc<SharedContext>, info: &'a TypeInfo) -> Self {
|
||||
Self {
|
||||
ctx,
|
||||
info,
|
||||
checked: HashMap::new(),
|
||||
locals: TypeScheme::default(),
|
||||
locals: TypeInfo::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -316,13 +316,13 @@ impl<'a> PostTypeChecker<'a> {
|
|||
fn check_context(&mut self, context: &LinkedNode, node: &LinkedNode) -> Option<Ty> {
|
||||
match context.kind() {
|
||||
SyntaxKind::LetBinding => {
|
||||
let p = context.cast::<ast::LetBinding>()?;
|
||||
let exp = p.init()?;
|
||||
if exp.span() != node.span() {
|
||||
let let_binding = context.cast::<ast::LetBinding>()?;
|
||||
let let_init = let_binding.init()?;
|
||||
if let_init.span() != node.span() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match p.kind() {
|
||||
match let_binding.kind() {
|
||||
ast::LetBindingKind::Closure(_c) => None,
|
||||
ast::LetBindingKind::Normal(pattern) => {
|
||||
self.destruct_let(pattern, node.clone())
|
||||
|
@ -413,8 +413,8 @@ fn sig_context_of(context: &LinkedNode) -> SigSurfaceKind {
|
|||
match context.kind() {
|
||||
SyntaxKind::Parenthesized => SigSurfaceKind::ArrayOrDict,
|
||||
SyntaxKind::Array => {
|
||||
let c = context.cast::<ast::Array>();
|
||||
if c.is_some_and(|e| e.items().next().is_some()) {
|
||||
let arr = context.cast::<ast::Array>();
|
||||
if arr.is_some_and(|arr| arr.items().next().is_some()) {
|
||||
SigSurfaceKind::Array
|
||||
} else {
|
||||
SigSurfaceKind::ArrayOrDict
|
||||
|
|
|
@ -601,9 +601,9 @@ fn token_from_node(
|
|||
fn token_from_ident(ei: &ExprInfo, ident: &LinkedNode, modifier: &mut ModifierSet) -> TokenType {
|
||||
let resolved = ei.resolves.get(&ident.span());
|
||||
let context = if let Some(resolved) = resolved {
|
||||
match (&resolved.root, &resolved.val) {
|
||||
(Some(e), t) => Some(token_from_decl_expr(e, t.as_ref(), modifier)),
|
||||
(_, Some(t)) => Some(token_from_term(t, modifier)),
|
||||
match (&resolved.root, &resolved.term) {
|
||||
(Some(root), term) => Some(token_from_decl_expr(root, term.as_ref(), modifier)),
|
||||
(_, Some(ty)) => Some(token_from_term(ty, modifier)),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
|
@ -710,7 +710,7 @@ fn token_from_hashtag(
|
|||
) -> Option<TokenType> {
|
||||
get_expr_following_hashtag(hashtag)
|
||||
.as_ref()
|
||||
.and_then(|e| token_from_node(ei, e, modifier))
|
||||
.and_then(|node| token_from_node(ei, node, modifier))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -6,7 +6,7 @@ use typst::foundations::Closure;
|
|||
|
||||
use super::{
|
||||
prelude::*, BoundChecker, Definition, DocSource, ParamTy, SharedContext, SigTy, SigWithTy,
|
||||
TypeScheme, TypeVar,
|
||||
TypeInfo, TypeVar,
|
||||
};
|
||||
use crate::analysis::PostTypeChecker;
|
||||
use crate::docs::{UntypedDefDocs, UntypedSignatureDocs, UntypedVarDocs};
|
||||
|
@ -217,7 +217,7 @@ fn analyze_type_signature(
|
|||
|
||||
pub(crate) fn sig_of_type(
|
||||
ctx: &Arc<SharedContext>,
|
||||
type_info: &TypeScheme,
|
||||
type_info: &TypeInfo,
|
||||
ty: Ty,
|
||||
) -> Option<Signature> {
|
||||
// todo multiple sources
|
||||
|
@ -596,8 +596,8 @@ fn analyze_closure_signature(
|
|||
|
||||
for param in closure_ast.params().children() {
|
||||
match param {
|
||||
ast::Param::Pos(e) => {
|
||||
let name = format!("{}", PatternDisplay(&e));
|
||||
ast::Param::Pos(pos) => {
|
||||
let name = format!("{}", PatternDisplay(&pos));
|
||||
add_param(Interned::new(ParamTy {
|
||||
name: name.as_str().into(),
|
||||
docs: None,
|
||||
|
@ -607,20 +607,20 @@ fn analyze_closure_signature(
|
|||
}));
|
||||
}
|
||||
// todo: pattern
|
||||
ast::Param::Named(n) => {
|
||||
let expr = unwrap_expr(n.expr()).to_untyped().clone().into_text();
|
||||
ast::Param::Named(named) => {
|
||||
let default = unwrap_parens(named.expr()).to_untyped().clone().into_text();
|
||||
add_param(Interned::new(ParamTy {
|
||||
name: n.name().get().into(),
|
||||
docs: Some(eco_format!("Default value: {expr}")),
|
||||
default: Some(expr),
|
||||
name: named.name().get().into(),
|
||||
docs: Some(eco_format!("Default value: {default}")),
|
||||
default: Some(default),
|
||||
ty: Ty::Any,
|
||||
attrs: ParamAttrs::named(),
|
||||
}));
|
||||
}
|
||||
ast::Param::Spread(n) => {
|
||||
let ident = n.sink_ident().map(|e| e.as_str());
|
||||
ast::Param::Spread(spread) => {
|
||||
let sink = spread.sink_ident().map(|sink| sink.as_str());
|
||||
add_param(Interned::new(ParamTy {
|
||||
name: ident.unwrap_or_default().into(),
|
||||
name: sink.unwrap_or_default().into(),
|
||||
docs: None,
|
||||
default: None,
|
||||
ty: Ty::Any,
|
||||
|
@ -657,7 +657,7 @@ impl fmt::Display for PatternDisplay<'_> {
|
|||
f,
|
||||
"{}: {}",
|
||||
n.name().as_str(),
|
||||
unwrap_expr(n.expr()).to_untyped().text()
|
||||
unwrap_parens(n.expr()).to_untyped().text()
|
||||
)?,
|
||||
ast::DestructuringItem::Spread(s) => write!(
|
||||
f,
|
||||
|
@ -673,10 +673,10 @@ impl fmt::Display for PatternDisplay<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn unwrap_expr(mut e: ast::Expr) -> ast::Expr {
|
||||
while let ast::Expr::Parenthesized(p) = e {
|
||||
e = p.expr();
|
||||
fn unwrap_parens(mut expr: ast::Expr) -> ast::Expr {
|
||||
while let ast::Expr::Parenthesized(p) = expr {
|
||||
expr = p.expr();
|
||||
}
|
||||
|
||||
e
|
||||
expr
|
||||
}
|
||||
|
|
|
@ -6,8 +6,8 @@ use rustc_hash::{FxHashMap, FxHashSet};
|
|||
use tinymist_derive::BindTyCtx;
|
||||
|
||||
use super::{
|
||||
prelude::*, BuiltinTy, DynTypeBounds, FlowVarKind, SharedContext, TyCtxMut, TypeScheme,
|
||||
TypeVar, TypeVarBounds,
|
||||
prelude::*, BuiltinTy, DynTypeBounds, FlowVarKind, SharedContext, TyCtxMut, TypeInfo, TypeVar,
|
||||
TypeVarBounds,
|
||||
};
|
||||
use crate::{
|
||||
syntax::{Decl, DeclExpr, Expr, ExprInfo, UnaryOp},
|
||||
|
@ -26,7 +26,7 @@ pub(crate) use select::*;
|
|||
|
||||
#[derive(Default)]
|
||||
pub struct TypeEnv {
|
||||
visiting: FxHashMap<TypstFileId, Arc<TypeScheme>>,
|
||||
visiting: FxHashMap<TypstFileId, Arc<TypeInfo>>,
|
||||
exprs: FxHashMap<TypstFileId, Option<Arc<ExprInfo>>>,
|
||||
}
|
||||
|
||||
|
@ -35,13 +35,13 @@ pub(crate) fn type_check(
|
|||
ctx: Arc<SharedContext>,
|
||||
ei: Arc<ExprInfo>,
|
||||
env: &mut TypeEnv,
|
||||
) -> Arc<TypeScheme> {
|
||||
let mut info = TypeScheme::default();
|
||||
) -> Arc<TypeInfo> {
|
||||
let mut info = TypeInfo::default();
|
||||
info.valid = true;
|
||||
info.fid = Some(ei.fid);
|
||||
info.revision = ei.revision;
|
||||
|
||||
env.visiting.insert(ei.fid, Arc::new(TypeScheme::default()));
|
||||
env.visiting.insert(ei.fid, Arc::new(TypeInfo::default()));
|
||||
|
||||
// Retrieve expression information for the source.
|
||||
let root = ei.root.clone();
|
||||
|
@ -86,7 +86,7 @@ pub(crate) struct TypeChecker<'a> {
|
|||
ctx: Arc<SharedContext>,
|
||||
ei: Arc<ExprInfo>,
|
||||
|
||||
info: TypeScheme,
|
||||
info: TypeInfo,
|
||||
module_exports: FxHashMap<(TypstFileId, Interned<str>), OnceLock<Option<Ty>>>,
|
||||
|
||||
call_cache: FxHashSet<CallCacheDesc>,
|
||||
|
@ -105,7 +105,7 @@ impl TyCtx for TypeChecker<'_> {
|
|||
}
|
||||
|
||||
impl TyCtxMut for TypeChecker<'_> {
|
||||
type Snap = <TypeScheme as TyCtxMut>::Snap;
|
||||
type Snap = <TypeInfo as TyCtxMut>::Snap;
|
||||
|
||||
fn start_scope(&mut self) -> Self::Snap {
|
||||
self.info.start_scope()
|
||||
|
@ -147,8 +147,8 @@ impl TyCtxMut for TypeChecker<'_> {
|
|||
}
|
||||
|
||||
impl TypeChecker<'_> {
|
||||
fn check(&mut self, root: &Expr) -> Ty {
|
||||
self.check_syntax(root).unwrap_or(Ty::undef())
|
||||
fn check(&mut self, expr: &Expr) -> Ty {
|
||||
self.check_syntax(expr).unwrap_or(Ty::undef())
|
||||
}
|
||||
|
||||
fn copy_doc_vars(
|
||||
|
@ -221,7 +221,7 @@ impl TypeChecker<'_> {
|
|||
// self.info.witness_at_least(s, w.clone());
|
||||
// }
|
||||
|
||||
TypeScheme::witness_(s, Ty::Var(var.clone()), &mut self.info.mapping);
|
||||
TypeInfo::witness_(s, Ty::Var(var.clone()), &mut self.info.mapping);
|
||||
}
|
||||
var
|
||||
}
|
||||
|
@ -300,14 +300,14 @@ impl TypeChecker<'_> {
|
|||
}
|
||||
}
|
||||
}
|
||||
(Ty::Union(v), rhs) => {
|
||||
for e in v.iter() {
|
||||
self.constrain(e, rhs);
|
||||
(Ty::Union(types), rhs) => {
|
||||
for ty in types.iter() {
|
||||
self.constrain(ty, rhs);
|
||||
}
|
||||
}
|
||||
(lhs, Ty::Union(v)) => {
|
||||
for e in v.iter() {
|
||||
self.constrain(lhs, e);
|
||||
(lhs, Ty::Union(types)) => {
|
||||
for ty in types.iter() {
|
||||
self.constrain(lhs, ty);
|
||||
}
|
||||
}
|
||||
(lhs, Ty::Builtin(BuiltinTy::Stroke)) => {
|
||||
|
@ -393,7 +393,7 @@ impl TypeChecker<'_> {
|
|||
}
|
||||
(Ty::Value(lhs), rhs) => {
|
||||
crate::log_debug_ct!("constrain value {lhs:?} ⪯ {rhs:?}");
|
||||
let _ = TypeScheme::witness_at_most;
|
||||
let _ = TypeInfo::witness_at_most;
|
||||
// if !lhs.1.is_detached() {
|
||||
// self.info.witness_at_most(lhs.1, rhs.clone());
|
||||
// }
|
||||
|
@ -557,15 +557,15 @@ impl Joiner {
|
|||
(Ty::Builtin(BuiltinTy::Space | BuiltinTy::None), _) => {}
|
||||
(Ty::Builtin(BuiltinTy::Clause | BuiltinTy::FlowNone), _) => {}
|
||||
(Ty::Any, _) | (_, Ty::Any) => {}
|
||||
(Ty::Var(v), _) => self.possibles.push(Ty::Var(v)),
|
||||
(Ty::Var(var), _) => self.possibles.push(Ty::Var(var)),
|
||||
// todo: check possibles
|
||||
(Ty::Array(e), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Array(e),
|
||||
(Ty::Array(arr), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Array(arr),
|
||||
(Ty::Array(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Tuple(e), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Tuple(e),
|
||||
(Ty::Tuple(elems), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Tuple(elems),
|
||||
(Ty::Tuple(..), _) => self.definite = Ty::undef(),
|
||||
// todo: mystery flow none
|
||||
// todo: possible some style (auto)
|
||||
(Ty::Builtin(b), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Builtin(b),
|
||||
(Ty::Builtin(ty), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Builtin(ty),
|
||||
(Ty::Builtin(..), _) => self.definite = Ty::undef(),
|
||||
// todo: value join
|
||||
(Ty::Value(v), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Value(v),
|
||||
|
|
|
@ -10,11 +10,11 @@ static EMPTY_DOCSTRING: LazyLock<DocString> = LazyLock::new(DocString::default);
|
|||
static EMPTY_VAR_DOC: LazyLock<VarDoc> = LazyLock::new(VarDoc::default);
|
||||
|
||||
impl TypeChecker<'_> {
|
||||
pub(crate) fn check_syntax(&mut self, root: &Expr) -> Option<Ty> {
|
||||
Some(match root {
|
||||
Expr::Block(seq) => self.check_block(seq),
|
||||
Expr::Array(array) => self.check_array(array),
|
||||
Expr::Dict(dict) => self.check_dict(dict),
|
||||
pub(crate) fn check_syntax(&mut self, expr: &Expr) -> Option<Ty> {
|
||||
Some(match expr {
|
||||
Expr::Block(exprs) => self.check_block(exprs),
|
||||
Expr::Array(elems) => self.check_array(elems),
|
||||
Expr::Dict(elems) => self.check_dict(elems),
|
||||
Expr::Args(args) => self.check_args(args),
|
||||
// todo: check pattern correctly
|
||||
Expr::Pattern(pattern) => self.check_pattern_exp(pattern),
|
||||
|
@ -41,20 +41,20 @@ impl TypeChecker<'_> {
|
|||
})
|
||||
}
|
||||
|
||||
fn check_block(&mut self, seq: &Interned<Vec<Expr>>) -> Ty {
|
||||
fn check_block(&mut self, exprs: &Interned<Vec<Expr>>) -> Ty {
|
||||
let mut joiner = Joiner::default();
|
||||
|
||||
for child in seq.iter() {
|
||||
for child in exprs.iter() {
|
||||
joiner.join(self.check(child));
|
||||
}
|
||||
|
||||
joiner.finalize()
|
||||
}
|
||||
|
||||
fn check_array(&mut self, array: &Interned<Vec<ArgExpr>>) -> Ty {
|
||||
fn check_array(&mut self, elems: &Interned<Vec<ArgExpr>>) -> Ty {
|
||||
let mut elements = Vec::new();
|
||||
|
||||
for elem in array.iter() {
|
||||
for elem in elems.iter() {
|
||||
match elem {
|
||||
ArgExpr::Pos(p) => {
|
||||
elements.push(self.check(p));
|
||||
|
@ -69,10 +69,10 @@ impl TypeChecker<'_> {
|
|||
Ty::Tuple(elements.into())
|
||||
}
|
||||
|
||||
fn check_dict(&mut self, dict: &Interned<Vec<ArgExpr>>) -> Ty {
|
||||
fn check_dict(&mut self, elems: &Interned<Vec<ArgExpr>>) -> Ty {
|
||||
let mut fields = Vec::new();
|
||||
|
||||
for elem in dict.iter() {
|
||||
for elem in elems.iter() {
|
||||
match elem {
|
||||
ArgExpr::Named(n) => {
|
||||
let (name, value) = n.as_ref();
|
||||
|
@ -122,19 +122,19 @@ impl TypeChecker<'_> {
|
|||
Ty::Args(args.into())
|
||||
}
|
||||
|
||||
fn check_pattern_exp(&mut self, pattern: &Interned<Pattern>) -> Ty {
|
||||
self.check_pattern(None, pattern, &EMPTY_DOCSTRING)
|
||||
fn check_pattern_exp(&mut self, pat: &Interned<Pattern>) -> Ty {
|
||||
self.check_pattern(None, pat, &EMPTY_DOCSTRING)
|
||||
}
|
||||
|
||||
fn check_pattern(
|
||||
&mut self,
|
||||
base: Option<&Interned<Decl>>,
|
||||
pattern: &Interned<Pattern>,
|
||||
pat: &Interned<Pattern>,
|
||||
docstring: &DocString,
|
||||
) -> Ty {
|
||||
// todo: recursive doc constructing
|
||||
match pattern.as_ref() {
|
||||
Pattern::Expr(e) => self.check(e),
|
||||
match pat.as_ref() {
|
||||
Pattern::Expr(expr) => self.check(expr),
|
||||
Pattern::Simple(decl) => {
|
||||
let ret = self.check_decl(decl);
|
||||
let var_doc = docstring.as_var();
|
||||
|
@ -155,28 +155,28 @@ impl TypeChecker<'_> {
|
|||
fn check_pattern_sig(
|
||||
&mut self,
|
||||
base: Option<&Interned<Decl>>,
|
||||
pattern: &PatternSig,
|
||||
pat: &PatternSig,
|
||||
docstring: &DocString,
|
||||
) -> (PatternTy, BTreeMap<Interned<str>, Ty>) {
|
||||
let mut pos_docs = vec![];
|
||||
let mut named_docs = BTreeMap::new();
|
||||
let mut rest_docs = None;
|
||||
|
||||
let mut pos = vec![];
|
||||
let mut named = BTreeMap::new();
|
||||
let mut pos_all = vec![];
|
||||
let mut named_all = BTreeMap::new();
|
||||
let mut defaults = BTreeMap::new();
|
||||
let mut rest = None;
|
||||
let mut spread_right = None;
|
||||
|
||||
// todo: combine with check_pattern
|
||||
for exp in pattern.pos.iter() {
|
||||
for pos_expr in pat.pos.iter() {
|
||||
// pos.push(self.check_pattern(pattern, Ty::Any, docstring, root.clone()));
|
||||
let res = self.check_pattern_exp(exp);
|
||||
if let Pattern::Simple(ident) = exp.as_ref() {
|
||||
let pos_ty = self.check_pattern_exp(pos_expr);
|
||||
if let Pattern::Simple(ident) = pos_expr.as_ref() {
|
||||
let name = ident.name().clone();
|
||||
|
||||
let param_doc = docstring.get_var(&name).unwrap_or(&EMPTY_VAR_DOC);
|
||||
if let Some(annotated) = docstring.var_ty(&name) {
|
||||
self.constrain(&res, annotated);
|
||||
self.constrain(&pos_ty, annotated);
|
||||
}
|
||||
pos_docs.push(TypelessParamDocs {
|
||||
name,
|
||||
|
@ -194,22 +194,22 @@ impl TypeChecker<'_> {
|
|||
attrs: ParamAttrs::positional(),
|
||||
});
|
||||
}
|
||||
pos.push(res);
|
||||
pos_all.push(pos_ty);
|
||||
}
|
||||
|
||||
for (decl, exp) in pattern.named.iter() {
|
||||
for (decl, named_expr) in pat.named.iter() {
|
||||
let name = decl.name().clone();
|
||||
let res = self.check_pattern_exp(exp);
|
||||
let named_ty = self.check_pattern_exp(named_expr);
|
||||
let var = self.get_var(decl);
|
||||
let v = Ty::Var(var.clone());
|
||||
let var_ty = Ty::Var(var.clone());
|
||||
if let Some(annotated) = docstring.var_ty(&name) {
|
||||
self.constrain(&v, annotated);
|
||||
self.constrain(&var_ty, annotated);
|
||||
}
|
||||
// todo: this is less efficient than v.lbs.push(exp), we may have some idea to
|
||||
// optimize it, so I put a todo here.
|
||||
self.constrain(&res, &v);
|
||||
named.insert(name.clone(), v);
|
||||
defaults.insert(name.clone(), res);
|
||||
self.constrain(&named_ty, &var_ty);
|
||||
named_all.insert(name.clone(), var_ty);
|
||||
defaults.insert(name.clone(), named_ty);
|
||||
|
||||
let param_doc = docstring.get_var(&name).unwrap_or(&EMPTY_VAR_DOC);
|
||||
named_docs.insert(
|
||||
|
@ -218,7 +218,7 @@ impl TypeChecker<'_> {
|
|||
name: name.clone(),
|
||||
docs: param_doc.docs.clone(),
|
||||
cano_type: (),
|
||||
default: Some(exp.repr()),
|
||||
default: Some(named_expr.repr()),
|
||||
attrs: ParamAttrs::named(),
|
||||
},
|
||||
);
|
||||
|
@ -228,7 +228,7 @@ impl TypeChecker<'_> {
|
|||
}
|
||||
|
||||
// todo: spread left/right
|
||||
if let Some((decl, _exp)) = &pattern.spread_right {
|
||||
if let Some((decl, _spread_expr)) = &pat.spread_right {
|
||||
let var = self.get_var(decl);
|
||||
let name = var.name.clone();
|
||||
let param_doc = docstring
|
||||
|
@ -238,13 +238,13 @@ impl TypeChecker<'_> {
|
|||
.var_docs
|
||||
.insert(decl.clone(), param_doc.to_untyped());
|
||||
|
||||
let exp = Ty::Builtin(BuiltinTy::Args);
|
||||
let v = Ty::Var(var);
|
||||
let term = Ty::Builtin(BuiltinTy::Args);
|
||||
let var_ty = Ty::Var(var);
|
||||
if let Some(annotated) = docstring.var_ty(&name) {
|
||||
self.constrain(&v, annotated);
|
||||
self.constrain(&var_ty, annotated);
|
||||
}
|
||||
self.constrain(&exp, &v);
|
||||
rest = Some(v);
|
||||
self.constrain(&term, &var_ty);
|
||||
spread_right = Some(var_ty);
|
||||
|
||||
rest_docs = Some(TypelessParamDocs {
|
||||
name,
|
||||
|
@ -256,7 +256,7 @@ impl TypeChecker<'_> {
|
|||
// todo: ..(args)
|
||||
}
|
||||
|
||||
let named: Vec<(Interned<str>, Ty)> = named.into_iter().collect();
|
||||
let named: Vec<(Interned<str>, Ty)> = named_all.into_iter().collect();
|
||||
|
||||
if let Some(base) = base {
|
||||
self.info.var_docs.insert(
|
||||
|
@ -273,7 +273,7 @@ impl TypeChecker<'_> {
|
|||
}
|
||||
|
||||
(
|
||||
PatternTy::new(pos.into_iter(), named, None, rest, None),
|
||||
PatternTy::new(pos_all.into_iter(), named, None, spread_right, None),
|
||||
defaults,
|
||||
)
|
||||
}
|
||||
|
@ -469,7 +469,7 @@ impl TypeChecker<'_> {
|
|||
let s = r.decl.span();
|
||||
let s = (!s.is_detached()).then_some(s);
|
||||
let of = r.root.as_ref().map(|of| self.check(of));
|
||||
let of = of.or_else(|| r.val.clone());
|
||||
let of = of.or_else(|| r.term.clone());
|
||||
if let Some((s, of)) = s.zip(of.as_ref()) {
|
||||
self.info.witness_at_most(s, of.clone());
|
||||
}
|
||||
|
@ -536,7 +536,7 @@ impl TypeChecker<'_> {
|
|||
let ty = if decl.is_def() {
|
||||
Some(Ty::Builtin(BuiltinTy::Module(decl.clone())))
|
||||
} else {
|
||||
self.ei.get_def(decl).map(|e| self.check(&e))
|
||||
self.ei.get_def(decl).map(|expr| self.check(&expr))
|
||||
};
|
||||
if let Some(ty) = ty {
|
||||
self.constrain(&v, &ty);
|
||||
|
|
|
@ -137,8 +137,8 @@ impl StatefulRequest for CompletionRequest {
|
|||
|
||||
// Exclude it self from auto completion
|
||||
// e.g. `#let x = (1.);`
|
||||
let self_ty = cc_ctx.leaf.cast::<ast::Expr>().and_then(|exp| {
|
||||
let v = cc_ctx.ctx.mini_eval(exp)?;
|
||||
let self_ty = cc_ctx.leaf.cast::<ast::Expr>().and_then(|leaf| {
|
||||
let v = cc_ctx.ctx.mini_eval(leaf)?;
|
||||
Some(Ty::Value(InsTy::new(v)))
|
||||
});
|
||||
|
||||
|
@ -227,10 +227,10 @@ impl StatefulRequest for CompletionRequest {
|
|||
detail: typst_completion.detail.as_ref().map(String::from),
|
||||
sort_text: typst_completion.sort_text.as_ref().map(String::from),
|
||||
filter_text: typst_completion.filter_text.as_ref().map(String::from),
|
||||
label_details: typst_completion.label_detail.as_ref().map(|e| {
|
||||
label_details: typst_completion.label_detail.as_ref().map(|desc| {
|
||||
CompletionItemLabelDetails {
|
||||
detail: None,
|
||||
description: Some(e.to_string()),
|
||||
description: Some(desc.to_string()),
|
||||
}
|
||||
}),
|
||||
text_edit: Some(text_edit),
|
||||
|
|
|
@ -63,12 +63,12 @@ where
|
|||
.get("negative")
|
||||
.ok_or_else(|| serde::de::Error::custom("missing field `negative`"))?;
|
||||
let negative = serde_json::from_value(negative.clone())
|
||||
.map_err(|e| serde::de::Error::custom(e.to_string()))?;
|
||||
.map_err(|err| serde::de::Error::custom(err.to_string()))?;
|
||||
Ok(ContextSelector::Negative(negative))
|
||||
}
|
||||
_ => {
|
||||
let value = serde_json::from_value(value)
|
||||
.map_err(|e| serde::de::Error::custom(e.to_string()))?;
|
||||
.map_err(|err| serde::de::Error::custom(err.to_string()))?;
|
||||
Ok(ContextSelector::Positive(Some(value)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ pub(crate) fn convert_docs(ctx: &SharedContext, content: &str) -> StrResult<EcoS
|
|||
..Default::default()
|
||||
})
|
||||
.convert()
|
||||
.map_err(|e| eco_format!("failed to convert to markdown: {e}"))?;
|
||||
.map_err(|err| eco_format!("failed to convert to markdown: {err}"))?;
|
||||
|
||||
Ok(conv.replace("```example", "```typ"))
|
||||
}
|
||||
|
|
|
@ -209,7 +209,7 @@ impl ScanDefCtx<'_> {
|
|||
let mut head = DefInfo {
|
||||
name: key.to_string().into(),
|
||||
kind: decl.kind(),
|
||||
constant: expr.map(|e| e.repr()),
|
||||
constant: expr.map(|expr| expr.repr()),
|
||||
docs,
|
||||
parsed_docs: def_docs,
|
||||
decl: Some(decl.clone()),
|
||||
|
@ -223,7 +223,7 @@ impl ScanDefCtx<'_> {
|
|||
if let Some((span, mod_fid)) = head.decl.as_ref().and_then(|d| d.file_id()).zip(site) {
|
||||
if span != *mod_fid {
|
||||
head.is_external = true;
|
||||
head.oneliner = head.docs.as_deref().map(oneliner).map(|e| e.to_owned());
|
||||
head.oneliner = head.docs.map(|docs| oneliner(&docs).to_owned());
|
||||
head.docs = None;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -110,11 +110,11 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Str
|
|||
for mut child in children {
|
||||
let span = child.decl.as_ref().map(|d| d.span());
|
||||
let fid_range = span.and_then(|v| {
|
||||
v.id().and_then(|e| {
|
||||
let fid = file_ids.insert_full(e).0;
|
||||
let src = ctx.source_by_id(e).ok()?;
|
||||
v.id().and_then(|fid| {
|
||||
let allocated = file_ids.insert_full(fid).0;
|
||||
let src = ctx.source_by_id(fid).ok()?;
|
||||
let rng = src.range(v)?;
|
||||
Some((fid, rng.start, rng.end))
|
||||
Some((allocated, rng.start, rng.end))
|
||||
})
|
||||
});
|
||||
let child_fid = child.decl.as_ref().and_then(|d| d.file_id());
|
||||
|
@ -258,8 +258,8 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Str
|
|||
let err = jbase64(&res);
|
||||
let _ = writeln!(md, "<!-- begin:errors {err} -->");
|
||||
let _ = writeln!(md, "## Errors");
|
||||
for e in res.errors {
|
||||
let _ = writeln!(md, "- {e}");
|
||||
for errs in res.errors {
|
||||
let _ = writeln!(md, "- {errs}");
|
||||
}
|
||||
let _ = writeln!(md, "<!-- end:errors -->");
|
||||
|
||||
|
@ -267,22 +267,24 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Str
|
|||
|
||||
let files = file_ids
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
let pkg = e.package().map(|e| packages.insert_full(e.clone()).0);
|
||||
.map(|fid| {
|
||||
let pkg = fid
|
||||
.package()
|
||||
.map(|spec| packages.insert_full(spec.clone()).0);
|
||||
|
||||
FileMeta {
|
||||
package: pkg,
|
||||
path: e.vpath().as_rootless_path().to_owned(),
|
||||
path: fid.vpath().as_rootless_path().to_owned(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let packages = packages
|
||||
.into_iter()
|
||||
.map(|e| PackageMeta {
|
||||
namespace: e.namespace.clone(),
|
||||
name: e.name.clone(),
|
||||
version: e.version.to_string(),
|
||||
.map(|spec| PackageMeta {
|
||||
namespace: spec.namespace.clone(),
|
||||
name: spec.name.clone(),
|
||||
version: spec.version.to_string(),
|
||||
manifest: None,
|
||||
})
|
||||
.collect();
|
||||
|
|
|
@ -246,7 +246,7 @@ impl DocumentMetricsWorker<'_> {
|
|||
full_name: font.find_name(FULL_NAME),
|
||||
family: font.find_name(FAMILY),
|
||||
fixed_family: Some(info.family.clone()),
|
||||
source: extra.map(|e| self.internal_source(e)),
|
||||
source: extra.map(|source| self.internal_source(source)),
|
||||
index: Some(font.index()),
|
||||
uses_scale: Some(font_info_value.uses),
|
||||
uses: None,
|
||||
|
|
|
@ -30,37 +30,36 @@ impl SyntaxRequest for DocumentSymbolRequest {
|
|||
source: &Source,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> Option<Self::Response> {
|
||||
let symbols = get_lexical_hierarchy(source.clone(), LexicalScopeKind::Symbol)?;
|
||||
|
||||
let symbols = filter_document_symbols(&symbols, source, position_encoding);
|
||||
let hierarchy = get_lexical_hierarchy(source, LexicalScopeKind::Symbol)?;
|
||||
let symbols = symbols_in_hierarchy(&hierarchy, source, position_encoding);
|
||||
Some(DocumentSymbolResponse::Nested(symbols))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
fn filter_document_symbols(
|
||||
symbols: &[LexicalHierarchy],
|
||||
fn symbols_in_hierarchy(
|
||||
hierarchy: &[LexicalHierarchy],
|
||||
source: &Source,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> Vec<DocumentSymbol> {
|
||||
symbols
|
||||
hierarchy
|
||||
.iter()
|
||||
.map(|e| {
|
||||
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
|
||||
.map(|hierarchy| {
|
||||
let range =
|
||||
typst_to_lsp::range(hierarchy.info.range.clone(), source, position_encoding);
|
||||
|
||||
DocumentSymbol {
|
||||
name: e.info.name.to_string(),
|
||||
name: hierarchy.info.name.to_string(),
|
||||
detail: None,
|
||||
kind: e.info.kind.clone().try_into().unwrap(),
|
||||
kind: hierarchy.info.kind.clone().try_into().unwrap(),
|
||||
tags: None,
|
||||
deprecated: None,
|
||||
range: rng,
|
||||
selection_range: rng,
|
||||
// .raw_range,
|
||||
children: e
|
||||
range,
|
||||
selection_range: range,
|
||||
children: hierarchy
|
||||
.children
|
||||
.as_ref()
|
||||
.map(|ch| filter_document_symbols(ch, source, position_encoding)),
|
||||
.map(|ch| symbols_in_hierarchy(ch, source, position_encoding)),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
|
|
|
@ -33,7 +33,7 @@ impl SyntaxRequest for FoldingRangeRequest {
|
|||
) -> Option<Self::Response> {
|
||||
let line_folding_only = self.line_folding_only;
|
||||
|
||||
let symbols = get_lexical_hierarchy(source.clone(), LexicalScopeKind::Braced)?;
|
||||
let hierarchy = get_lexical_hierarchy(source, LexicalScopeKind::Braced)?;
|
||||
|
||||
let mut results = vec![];
|
||||
let LspPosition { line, character } =
|
||||
|
@ -41,7 +41,7 @@ impl SyntaxRequest for FoldingRangeRequest {
|
|||
let loc = (line, Some(character));
|
||||
|
||||
calc_folding_range(
|
||||
&symbols,
|
||||
&hierarchy,
|
||||
source,
|
||||
position_encoding,
|
||||
loc,
|
||||
|
@ -83,9 +83,7 @@ impl SyntaxRequest for FoldingRangeRequest {
|
|||
results.reverse();
|
||||
}
|
||||
|
||||
if false {
|
||||
log::trace!("FoldingRangeRequest(line_folding_only={line_folding_only}) symbols: {symbols:#?} results: {results:#?}");
|
||||
}
|
||||
crate::log_debug_ct!("FoldingRangeRequest(line_folding_only={line_folding_only}) symbols: {hierarchy:#?} results: {results:#?}");
|
||||
|
||||
Some(results)
|
||||
}
|
||||
|
@ -94,30 +92,30 @@ impl SyntaxRequest for FoldingRangeRequest {
|
|||
type LoC = (u32, Option<u32>);
|
||||
|
||||
fn calc_folding_range(
|
||||
symbols: &[LexicalHierarchy],
|
||||
hierarchy: &[LexicalHierarchy],
|
||||
source: &Source,
|
||||
position_encoding: PositionEncoding,
|
||||
parent_last_loc: LoC,
|
||||
last_loc: LoC,
|
||||
is_last_range: bool,
|
||||
ranges: &mut Vec<FoldingRange>,
|
||||
folding_ranges: &mut Vec<FoldingRange>,
|
||||
) {
|
||||
for (i, e) in symbols.iter().enumerate() {
|
||||
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
|
||||
let is_not_last_range = i + 1 < symbols.len();
|
||||
for (idx, child) in hierarchy.iter().enumerate() {
|
||||
let range = typst_to_lsp::range(child.info.range.clone(), source, position_encoding);
|
||||
let is_not_last_range = idx + 1 < hierarchy.len();
|
||||
let is_not_final_last_range = !is_last_range || is_not_last_range;
|
||||
|
||||
let mut range = FoldingRange {
|
||||
start_line: rng.start.line,
|
||||
start_character: Some(rng.start.character),
|
||||
end_line: rng.end.line,
|
||||
end_character: Some(rng.end.character),
|
||||
let mut folding_range = FoldingRange {
|
||||
start_line: range.start.line,
|
||||
start_character: Some(range.start.character),
|
||||
end_line: range.end.line,
|
||||
end_character: Some(range.end.character),
|
||||
kind: None,
|
||||
collapsed_text: Some(e.info.name.to_string()),
|
||||
collapsed_text: Some(child.info.name.to_string()),
|
||||
};
|
||||
|
||||
let next_start = if is_not_last_range {
|
||||
let next = &symbols[i + 1];
|
||||
let next = &hierarchy[idx + 1];
|
||||
let next_rng = typst_to_lsp::range(next.info.range.clone(), source, position_encoding);
|
||||
(next_rng.start.line, Some(next_rng.start.character))
|
||||
} else if is_not_final_last_range {
|
||||
|
@ -126,17 +124,17 @@ fn calc_folding_range(
|
|||
last_loc
|
||||
};
|
||||
|
||||
if matches!(e.info.kind, LexicalKind::Heading(..)) {
|
||||
range.end_line = range.end_line.max(if is_not_last_range {
|
||||
if matches!(child.info.kind, LexicalKind::Heading(..)) {
|
||||
folding_range.end_line = folding_range.end_line.max(if is_not_last_range {
|
||||
next_start.0.saturating_sub(1)
|
||||
} else {
|
||||
next_start.0
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(ch) = &e.children {
|
||||
if let Some(ch) = &child.children {
|
||||
let parent_last_loc = if is_not_last_range {
|
||||
(rng.end.line, Some(rng.end.character))
|
||||
(range.end.line, Some(range.end.character))
|
||||
} else {
|
||||
parent_last_loc
|
||||
};
|
||||
|
@ -148,11 +146,11 @@ fn calc_folding_range(
|
|||
parent_last_loc,
|
||||
last_loc,
|
||||
!is_not_final_last_range,
|
||||
ranges,
|
||||
folding_ranges,
|
||||
);
|
||||
}
|
||||
|
||||
ranges.push(range);
|
||||
folding_ranges.push(folding_range);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ impl StatefulRequest for GotoDefinitionRequest {
|
|||
|
||||
let def = ctx.def_of_syntax(&source, doc.as_ref(), syntax)?;
|
||||
|
||||
let (fid, def_range) = def.def_at(ctx.shared())?;
|
||||
let (fid, def_range) = def.location(ctx.shared())?;
|
||||
let uri = ctx.uri_for_id(fid).ok()?;
|
||||
let range = ctx.to_lsp_range_(def_range, fid)?;
|
||||
|
||||
|
|
|
@ -51,22 +51,22 @@ impl StatefulRequest for HoverRequest {
|
|||
let mut contents = match contents {
|
||||
HoverContents::Array(contents) => contents
|
||||
.into_iter()
|
||||
.map(|e| match e {
|
||||
MarkedString::LanguageString(e) => {
|
||||
format!("```{}\n{}\n```", e.language, e.value)
|
||||
.map(|content| match content {
|
||||
MarkedString::LanguageString(content) => {
|
||||
format!("```{}\n{}\n```", content.language, content.value)
|
||||
}
|
||||
MarkedString::String(e) => e,
|
||||
MarkedString::String(content) => content,
|
||||
})
|
||||
.join("\n\n---\n"),
|
||||
HoverContents::Scalar(MarkedString::String(contents)) => contents,
|
||||
HoverContents::Scalar(MarkedString::LanguageString(contents)) => {
|
||||
format!("```{}\n{}\n```", contents.language, contents.value)
|
||||
}
|
||||
lsp_types::HoverContents::Markup(e) => {
|
||||
match e.kind {
|
||||
MarkupKind::Markdown => e.value,
|
||||
lsp_types::HoverContents::Markup(content) => {
|
||||
match content.kind {
|
||||
MarkupKind::Markdown => content.value,
|
||||
// todo: escape
|
||||
MarkupKind::PlainText => e.value,
|
||||
MarkupKind::PlainText => content.value,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -70,8 +70,8 @@ pub fn path_to_url(path: &Path) -> anyhow::Result<Url> {
|
|||
return Ok(Url::parse(&format!("untitled:{}", untitled.display()))?);
|
||||
}
|
||||
|
||||
Url::from_file_path(path).or_else(|e| {
|
||||
let _: () = e;
|
||||
Url::from_file_path(path).or_else(|never| {
|
||||
let _: () = never;
|
||||
|
||||
anyhow::bail!("could not convert path to URI: path: {path:?}",)
|
||||
})
|
||||
|
|
|
@ -64,7 +64,7 @@ struct OnEnterWorker<'a> {
|
|||
impl OnEnterWorker<'_> {
|
||||
fn indent_of(&self, of: usize) -> String {
|
||||
let all_text = self.source.text();
|
||||
let start = all_text[..of].rfind('\n').map(|e| e + 1);
|
||||
let start = all_text[..of].rfind('\n').map(|lf_offset| lf_offset + 1);
|
||||
let indent_size = all_text[start.unwrap_or_default()..of].chars().count();
|
||||
" ".repeat(indent_size)
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ impl OnEnterWorker<'_> {
|
|||
.children()
|
||||
.skip(leaf.index().saturating_sub(first_index))
|
||||
.take_while(skipper)
|
||||
.filter(|e| matches!(e.kind(), SyntaxKind::LineComment))
|
||||
.filter(|child| matches!(child.kind(), SyntaxKind::LineComment))
|
||||
.count();
|
||||
|
||||
let comment_prefix = {
|
||||
|
|
|
@ -63,7 +63,7 @@ pub(crate) fn prepare_renaming(
|
|||
def: &Definition,
|
||||
) -> Option<(String, Option<LspRange>)> {
|
||||
let name = def.name().clone();
|
||||
let (def_fid, _def_range) = def.def_at(ctx.shared()).clone()?;
|
||||
let (def_fid, _def_range) = def.location(ctx.shared()).clone()?;
|
||||
|
||||
if def_fid.package().is_some() {
|
||||
crate::log_debug_ct!(
|
||||
|
|
|
@ -139,19 +139,19 @@ impl ReferencesWorker<'_> {
|
|||
|
||||
fn push_idents<'b>(
|
||||
&mut self,
|
||||
s: &Source,
|
||||
u: &Url,
|
||||
src: &Source,
|
||||
url: &Url,
|
||||
idents: impl Iterator<Item = (&'b Span, &'b Interned<RefExpr>)>,
|
||||
) {
|
||||
self.push_ranges(s, u, idents.map(|e| e.0));
|
||||
self.push_ranges(src, url, idents.map(|(span, _)| span));
|
||||
}
|
||||
|
||||
fn push_ranges<'b>(&mut self, s: &Source, u: &Url, rs: impl Iterator<Item = &'b Span>) {
|
||||
self.references.extend(rs.filter_map(|span| {
|
||||
fn push_ranges<'b>(&mut self, src: &Source, url: &Url, spans: impl Iterator<Item = &'b Span>) {
|
||||
self.references.extend(spans.filter_map(|span| {
|
||||
// todo: this is not necessary a name span
|
||||
let range = self.ctx.ctx.to_lsp_range(s.range(*span)?, s);
|
||||
let range = self.ctx.ctx.to_lsp_range(src.range(*span)?, src);
|
||||
Some(LspLocation {
|
||||
uri: u.clone(),
|
||||
uri: url.clone(),
|
||||
range,
|
||||
})
|
||||
}));
|
||||
|
|
|
@ -58,7 +58,7 @@ impl StatefulRequest for RenameRequest {
|
|||
self.new_name
|
||||
};
|
||||
|
||||
let def_fid = def.def_at(ctx.shared())?.0;
|
||||
let def_fid = def.location(ctx.shared())?.0;
|
||||
let old_path = ctx.path_for_id(def_fid).ok()?;
|
||||
|
||||
let rename_loc = Path::new(ref_path_str.as_str());
|
||||
|
|
|
@ -43,16 +43,15 @@ impl SemanticRequest for SymbolRequest {
|
|||
continue;
|
||||
};
|
||||
let uri = path_to_url(&path).unwrap();
|
||||
let res =
|
||||
get_lexical_hierarchy(source.clone(), LexicalScopeKind::Symbol).map(|symbols| {
|
||||
filter_document_symbols(
|
||||
&symbols,
|
||||
self.pattern.as_deref(),
|
||||
&source,
|
||||
&uri,
|
||||
ctx.position_encoding(),
|
||||
)
|
||||
});
|
||||
let res = get_lexical_hierarchy(&source, LexicalScopeKind::Symbol).map(|symbols| {
|
||||
filter_document_symbols(
|
||||
&symbols,
|
||||
self.pattern.as_deref(),
|
||||
&source,
|
||||
&uri,
|
||||
ctx.position_encoding(),
|
||||
)
|
||||
});
|
||||
|
||||
if let Some(mut res) = res {
|
||||
symbols.append(&mut res)
|
||||
|
@ -65,28 +64,29 @@ impl SemanticRequest for SymbolRequest {
|
|||
|
||||
#[allow(deprecated)]
|
||||
fn filter_document_symbols(
|
||||
symbols: &[LexicalHierarchy],
|
||||
hierarchy: &[LexicalHierarchy],
|
||||
query_string: Option<&str>,
|
||||
source: &Source,
|
||||
uri: &Url,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> Vec<SymbolInformation> {
|
||||
symbols
|
||||
hierarchy
|
||||
.iter()
|
||||
.flat_map(|e| {
|
||||
[e].into_iter()
|
||||
.chain(e.children.as_deref().into_iter().flatten())
|
||||
.flat_map(|hierarchy| {
|
||||
[hierarchy]
|
||||
.into_iter()
|
||||
.chain(hierarchy.children.as_deref().into_iter().flatten())
|
||||
})
|
||||
.flat_map(|e| {
|
||||
if query_string.is_some_and(|s| !e.info.name.contains(s)) {
|
||||
.flat_map(|hierarchy| {
|
||||
if query_string.is_some_and(|s| !hierarchy.info.name.contains(s)) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
|
||||
let rng = typst_to_lsp::range(hierarchy.info.range.clone(), source, position_encoding);
|
||||
|
||||
Some(SymbolInformation {
|
||||
name: e.info.name.to_string(),
|
||||
kind: e.info.kind.clone().try_into().unwrap(),
|
||||
name: hierarchy.info.name.to_string(),
|
||||
kind: hierarchy.info.kind.clone().try_into().unwrap(),
|
||||
tags: None,
|
||||
deprecated: None,
|
||||
location: LspLocation {
|
||||
|
|
|
@ -449,7 +449,7 @@ impl Decl {
|
|||
decl: this.clone(),
|
||||
step: Some(def.clone()),
|
||||
root: Some(def),
|
||||
val,
|
||||
term: val,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -667,7 +667,7 @@ pub struct RefExpr {
|
|||
pub decl: DeclExpr,
|
||||
pub step: Option<Expr>,
|
||||
pub root: Option<Expr>,
|
||||
pub val: Option<Ty>,
|
||||
pub term: Option<Ty>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
|
|
|
@ -82,7 +82,7 @@ pub(crate) fn compute_docstring(
|
|||
let checker = DocsChecker {
|
||||
fid,
|
||||
ctx,
|
||||
vars: HashMap::new(),
|
||||
var_bounds: HashMap::new(),
|
||||
globals: HashMap::default(),
|
||||
locals: SnapshotMap::default(),
|
||||
next_id: 0,
|
||||
|
@ -98,10 +98,13 @@ pub(crate) fn compute_docstring(
|
|||
struct DocsChecker<'a> {
|
||||
fid: TypstFileId,
|
||||
ctx: &'a Arc<SharedContext>,
|
||||
/// The typing on definitions
|
||||
vars: HashMap<DeclExpr, TypeVarBounds>,
|
||||
/// The bounds of type variables
|
||||
var_bounds: HashMap<DeclExpr, TypeVarBounds>,
|
||||
/// Global name bindings
|
||||
globals: HashMap<EcoString, Option<Ty>>,
|
||||
/// Local name bindings
|
||||
locals: SnapshotMap<EcoString, Ty>,
|
||||
/// Next generated variable id
|
||||
next_id: u32,
|
||||
}
|
||||
|
||||
|
@ -114,8 +117,8 @@ impl DocsChecker<'_> {
|
|||
convert_docs(self.ctx, &docs).and_then(|converted| identify_pat_docs(&converted));
|
||||
|
||||
let converted = match Self::fallback_docs(converted, &docs) {
|
||||
Ok(c) => c,
|
||||
Err(e) => return Some(e),
|
||||
Ok(docs) => docs,
|
||||
Err(err) => return Some(err),
|
||||
};
|
||||
|
||||
let module = self.ctx.module_by_str(docs);
|
||||
|
@ -138,7 +141,7 @@ impl DocsChecker<'_> {
|
|||
|
||||
Some(DocString {
|
||||
docs: Some(self.ctx.remove_html(converted.docs)),
|
||||
var_bounds: self.vars,
|
||||
var_bounds: self.var_bounds,
|
||||
vars: params,
|
||||
res_ty,
|
||||
})
|
||||
|
@ -148,13 +151,13 @@ impl DocsChecker<'_> {
|
|||
let converted = convert_docs(self.ctx, &docs).and_then(identify_tidy_module_docs);
|
||||
|
||||
let converted = match Self::fallback_docs(converted, &docs) {
|
||||
Ok(c) => c,
|
||||
Err(e) => return Some(e),
|
||||
Ok(docs) => docs,
|
||||
Err(err) => return Some(err),
|
||||
};
|
||||
|
||||
Some(DocString {
|
||||
docs: Some(self.ctx.remove_html(converted.docs)),
|
||||
var_bounds: self.vars,
|
||||
var_bounds: self.var_bounds,
|
||||
vars: BTreeMap::new(),
|
||||
res_ty: None,
|
||||
})
|
||||
|
@ -162,13 +165,13 @@ impl DocsChecker<'_> {
|
|||
|
||||
fn fallback_docs<T>(converted: Result<T, EcoString>, docs: &str) -> Result<T, DocString> {
|
||||
match converted {
|
||||
Ok(c) => Ok(c),
|
||||
Err(e) => {
|
||||
let e = e.replace("`", "\\`");
|
||||
Ok(converted) => Ok(converted),
|
||||
Err(err) => {
|
||||
let err = err.replace("`", "\\`");
|
||||
let max_consecutive_backticks = docs
|
||||
.chars()
|
||||
.fold((0, 0), |(max, count), c| {
|
||||
if c == '`' {
|
||||
.fold((0, 0), |(max, count), ch| {
|
||||
if ch == '`' {
|
||||
(max.max(count + 1), count + 1)
|
||||
} else {
|
||||
(max, 0)
|
||||
|
@ -177,7 +180,7 @@ impl DocsChecker<'_> {
|
|||
.0;
|
||||
let backticks = "`".repeat((max_consecutive_backticks + 1).max(3));
|
||||
let fallback_docs = eco_format!(
|
||||
"```\nfailed to parse docs: {e}\n```\n\n{backticks}typ\n{docs}\n{backticks}\n"
|
||||
"```\nfailed to parse docs: {err}\n```\n\n{backticks}typ\n{docs}\n{backticks}\n"
|
||||
);
|
||||
Err(DocString {
|
||||
docs: Some(fallback_docs),
|
||||
|
@ -199,20 +202,20 @@ impl DocsChecker<'_> {
|
|||
};
|
||||
let bounds = TypeVarBounds::new(var, DynTypeBounds::default());
|
||||
let var = bounds.as_type();
|
||||
self.vars.insert(encoded, bounds);
|
||||
self.var_bounds.insert(encoded, bounds);
|
||||
var
|
||||
}
|
||||
|
||||
fn check_type_strings(&mut self, m: &Module, strs: &str) -> Option<Ty> {
|
||||
let mut types = vec![];
|
||||
for name in strs.split(",").map(|e| e.trim()) {
|
||||
fn check_type_strings(&mut self, m: &Module, inputs: &str) -> Option<Ty> {
|
||||
let mut terms = vec![];
|
||||
for name in inputs.split(",").map(|ty| ty.trim()) {
|
||||
let Some(ty) = self.check_type_ident(m, name) else {
|
||||
continue;
|
||||
};
|
||||
types.push(ty);
|
||||
terms.push(ty);
|
||||
}
|
||||
|
||||
Some(Ty::from_types(types.into_iter()))
|
||||
Some(Ty::from_types(terms.into_iter()))
|
||||
}
|
||||
|
||||
fn check_type_ident(&mut self, m: &Module, name: &str) -> Option<Ty> {
|
||||
|
@ -308,21 +311,21 @@ impl DocsChecker<'_> {
|
|||
| ast::Expr::Float(..)
|
||||
| ast::Expr::Numeric(..)
|
||||
| ast::Expr::Str(..) => SharedContext::const_eval(s).map(|v| Ty::Value(InsTy::new(v))),
|
||||
ast::Expr::Binary(b) => {
|
||||
ast::Expr::Binary(binary) => {
|
||||
let mut components = Vec::with_capacity(2);
|
||||
components.push(self.check_type_expr(m, b.lhs())?);
|
||||
components.push(self.check_type_expr(m, binary.lhs())?);
|
||||
|
||||
let mut expr = b.rhs();
|
||||
while let ast::Expr::Binary(b) = expr {
|
||||
if b.op() != ast::BinOp::Or {
|
||||
let mut rhs = binary.rhs();
|
||||
while let ast::Expr::Binary(binary) = rhs {
|
||||
if binary.op() != ast::BinOp::Or {
|
||||
break;
|
||||
}
|
||||
|
||||
components.push(self.check_type_expr(m, b.lhs())?);
|
||||
expr = b.rhs();
|
||||
components.push(self.check_type_expr(m, binary.lhs())?);
|
||||
rhs = binary.rhs();
|
||||
}
|
||||
|
||||
components.push(self.check_type_expr(m, expr)?);
|
||||
components.push(self.check_type_expr(m, rhs)?);
|
||||
Some(Ty::from_types(components.into_iter()))
|
||||
}
|
||||
ast::Expr::FuncCall(c) => match c.callee() {
|
||||
|
@ -353,42 +356,49 @@ impl DocsChecker<'_> {
|
|||
},
|
||||
ast::Expr::Closure(c) => {
|
||||
crate::log_debug_ct!("check doc closure annotation: {c:?}");
|
||||
let mut pos = vec![];
|
||||
let mut named = BTreeMap::new();
|
||||
let mut rest = None;
|
||||
let mut pos_all = vec![];
|
||||
let mut named_all = BTreeMap::new();
|
||||
let mut spread_right = None;
|
||||
let snap = self.locals.snapshot();
|
||||
|
||||
let sig = None.or_else(|| {
|
||||
for param in c.params().children() {
|
||||
match param {
|
||||
ast::Param::Pos(ast::Pattern::Normal(ast::Expr::Ident(i))) => {
|
||||
let name = i.get().clone();
|
||||
let base_ty = self.generate_var(name.as_str().into());
|
||||
self.locals.insert(name, base_ty.clone());
|
||||
pos.push(base_ty);
|
||||
ast::Param::Pos(ast::Pattern::Normal(ast::Expr::Ident(pos))) => {
|
||||
let name = pos.get().clone();
|
||||
let term = self.generate_var(name.as_str().into());
|
||||
self.locals.insert(name, term.clone());
|
||||
pos_all.push(term);
|
||||
}
|
||||
ast::Param::Pos(_) => {
|
||||
pos.push(Ty::Any);
|
||||
ast::Param::Pos(_pos) => {
|
||||
pos_all.push(Ty::Any);
|
||||
}
|
||||
ast::Param::Named(e) => {
|
||||
let exp = self.check_type_expr(m, e.expr()).unwrap_or(Ty::Any);
|
||||
named.insert(e.name().into(), exp);
|
||||
ast::Param::Named(named) => {
|
||||
let term = self.check_type_expr(m, named.expr()).unwrap_or(Ty::Any);
|
||||
named_all.insert(named.name().into(), term);
|
||||
}
|
||||
// todo: spread left/right
|
||||
ast::Param::Spread(s) => {
|
||||
let Some(i) = s.sink_ident() else {
|
||||
ast::Param::Spread(spread) => {
|
||||
let Some(sink) = spread.sink_ident() else {
|
||||
continue;
|
||||
};
|
||||
let name = i.get().clone();
|
||||
let rest_ty = self.generate_var(name.as_str().into());
|
||||
self.locals.insert(name, rest_ty.clone());
|
||||
rest = Some(rest_ty);
|
||||
let sink_name = sink.get().clone();
|
||||
let rest_term = self.generate_var(sink_name.as_str().into());
|
||||
self.locals.insert(sink_name, rest_term.clone());
|
||||
spread_right = Some(rest_term);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let body = self.check_type_expr(m, c.body())?;
|
||||
let sig = SigTy::new(pos.into_iter(), named, None, rest, Some(body)).into();
|
||||
let sig = SigTy::new(
|
||||
pos_all.into_iter(),
|
||||
named_all,
|
||||
None,
|
||||
spread_right,
|
||||
Some(body),
|
||||
)
|
||||
.into();
|
||||
|
||||
Some(Ty::Func(sig))
|
||||
});
|
||||
|
|
|
@ -186,10 +186,12 @@ impl ExprInfo {
|
|||
|
||||
pub fn is_exported(&self, decl: &Interned<Decl>) -> bool {
|
||||
let of = Expr::Decl(decl.clone());
|
||||
self.exports.get(decl.name()).map_or(false, |e| match e {
|
||||
Expr::Ref(r) => r.root == Some(of),
|
||||
e => *e == of,
|
||||
})
|
||||
self.exports
|
||||
.get(decl.name())
|
||||
.map_or(false, |export| match export {
|
||||
Expr::Ref(ref_expr) => ref_expr.root == Some(of),
|
||||
exprt => *exprt == of,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -207,8 +209,8 @@ impl ExprInfo {
|
|||
std::fs::create_dir_all(scopes.parent().unwrap()).unwrap();
|
||||
{
|
||||
let mut scopes = std::fs::File::create(scopes).unwrap();
|
||||
for (s, e) in self.exprs.iter() {
|
||||
writeln!(scopes, "{s:?} -> {e}").unwrap();
|
||||
for (span, expr) in self.exprs.iter() {
|
||||
writeln!(scopes, "{span:?} -> {expr}").unwrap();
|
||||
}
|
||||
}
|
||||
let imports = vpath.with_extension("imports.expr");
|
||||
|
@ -376,29 +378,29 @@ impl ExprWorker<'_> {
|
|||
Shorthand(..) => Expr::Type(Ty::Builtin(BuiltinTy::Content)),
|
||||
SmartQuote(..) => Expr::Type(Ty::Builtin(BuiltinTy::Content)),
|
||||
|
||||
Strong(e) => {
|
||||
let body = self.check_inline_markup(e.body());
|
||||
Strong(strong) => {
|
||||
let body = self.check_inline_markup(strong.body());
|
||||
self.check_element::<StrongElem>(eco_vec![body])
|
||||
}
|
||||
Emph(e) => {
|
||||
let body = self.check_inline_markup(e.body());
|
||||
Emph(emph) => {
|
||||
let body = self.check_inline_markup(emph.body());
|
||||
self.check_element::<EmphElem>(eco_vec![body])
|
||||
}
|
||||
Heading(e) => {
|
||||
let body = self.check_markup(e.body());
|
||||
Heading(heading) => {
|
||||
let body = self.check_markup(heading.body());
|
||||
self.check_element::<HeadingElem>(eco_vec![body])
|
||||
}
|
||||
List(e) => {
|
||||
let body = self.check_markup(e.body());
|
||||
List(item) => {
|
||||
let body = self.check_markup(item.body());
|
||||
self.check_element::<ListElem>(eco_vec![body])
|
||||
}
|
||||
Enum(e) => {
|
||||
let body = self.check_markup(e.body());
|
||||
Enum(item) => {
|
||||
let body = self.check_markup(item.body());
|
||||
self.check_element::<EnumElem>(eco_vec![body])
|
||||
}
|
||||
Term(t) => {
|
||||
let term = self.check_markup(t.term());
|
||||
let description = self.check_markup(t.description());
|
||||
Term(item) => {
|
||||
let term = self.check_markup(item.term());
|
||||
let description = self.check_markup(item.description());
|
||||
self.check_element::<TermsElem>(eco_vec![term, description])
|
||||
}
|
||||
|
||||
|
@ -407,19 +409,19 @@ impl ExprWorker<'_> {
|
|||
MathDelimited(math_delimited) => {
|
||||
self.check_math(math_delimited.body().to_untyped().children())
|
||||
}
|
||||
MathAttach(ma) => {
|
||||
let base = ma.base().to_untyped().clone();
|
||||
let bottom = ma.bottom().unwrap_or_default().to_untyped().clone();
|
||||
let top = ma.top().unwrap_or_default().to_untyped().clone();
|
||||
MathAttach(attach) => {
|
||||
let base = attach.base().to_untyped().clone();
|
||||
let bottom = attach.bottom().unwrap_or_default().to_untyped().clone();
|
||||
let top = attach.top().unwrap_or_default().to_untyped().clone();
|
||||
self.check_math([base, bottom, top].iter())
|
||||
}
|
||||
MathPrimes(..) => Expr::Type(Ty::Builtin(BuiltinTy::None)),
|
||||
MathFrac(mf) => {
|
||||
let num = mf.num().to_untyped().clone();
|
||||
let denom = mf.denom().to_untyped().clone();
|
||||
MathFrac(frac) => {
|
||||
let num = frac.num().to_untyped().clone();
|
||||
let denom = frac.denom().to_untyped().clone();
|
||||
self.check_math([num, denom].iter())
|
||||
}
|
||||
MathRoot(mr) => self.check(mr.radicand()),
|
||||
MathRoot(root) => self.check(root.radicand()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -437,14 +439,14 @@ impl ExprWorker<'_> {
|
|||
ast::LetBindingKind::Closure(..) => {
|
||||
typed.init().map_or_else(none_expr, |expr| self.check(expr))
|
||||
}
|
||||
ast::LetBindingKind::Normal(p) => {
|
||||
ast::LetBindingKind::Normal(pat) => {
|
||||
// Check init expression before pattern checking
|
||||
let body = typed.init().map(|e| self.defer(e));
|
||||
let body = typed.init().map(|init| self.defer(init));
|
||||
|
||||
let span = p.span();
|
||||
let span = pat.span();
|
||||
let decl = Decl::pattern(span).into();
|
||||
self.check_docstring(&decl, DefKind::Variable);
|
||||
let pattern = self.check_pattern(p);
|
||||
let pattern = self.check_pattern(pat);
|
||||
Expr::Let(Interned::new(LetExpr {
|
||||
span,
|
||||
pattern,
|
||||
|
@ -613,7 +615,7 @@ impl ExprWorker<'_> {
|
|||
decl: mod_var.clone(),
|
||||
step: mod_expr.clone(),
|
||||
root: mod_expr.clone(),
|
||||
val: None,
|
||||
term: None,
|
||||
};
|
||||
crate::log_debug_ct!("create import variable: {mod_ref:?}");
|
||||
let mod_ref = Interned::new(mod_ref);
|
||||
|
@ -778,7 +780,7 @@ impl ExprWorker<'_> {
|
|||
decl: import_path.into(),
|
||||
step: Some(module.clone()),
|
||||
root: Some(module.clone()),
|
||||
val: None,
|
||||
term: None,
|
||||
};
|
||||
self.resolve_as(ref_expr.into());
|
||||
Some(module)
|
||||
|
@ -834,7 +836,7 @@ impl ExprWorker<'_> {
|
|||
decl: old.clone(),
|
||||
root,
|
||||
step,
|
||||
val,
|
||||
term: val,
|
||||
});
|
||||
self.resolve_as(ref_expr.clone());
|
||||
|
||||
|
@ -843,7 +845,7 @@ impl ExprWorker<'_> {
|
|||
decl: new.clone(),
|
||||
root: ref_expr.root.clone(),
|
||||
step: Some(ref_expr.decl.clone().into()),
|
||||
val: ref_expr.val.clone(),
|
||||
term: ref_expr.term.clone(),
|
||||
});
|
||||
self.resolve_as(ref_expr.clone());
|
||||
}
|
||||
|
@ -1012,39 +1014,41 @@ impl ExprWorker<'_> {
|
|||
})
|
||||
}
|
||||
|
||||
fn check_inline_markup(&mut self, m: ast::Markup) -> Expr {
|
||||
self.check_in_mode(m.to_untyped().children(), InterpretMode::Markup)
|
||||
fn check_inline_markup(&mut self, markup: ast::Markup) -> Expr {
|
||||
self.check_in_mode(markup.to_untyped().children(), InterpretMode::Markup)
|
||||
}
|
||||
|
||||
fn check_markup(&mut self, m: ast::Markup) -> Expr {
|
||||
self.with_scope(|this| this.check_inline_markup(m))
|
||||
fn check_markup(&mut self, markup: ast::Markup) -> Expr {
|
||||
self.with_scope(|this| this.check_inline_markup(markup))
|
||||
}
|
||||
|
||||
fn check_code(&mut self, m: ast::Code) -> Expr {
|
||||
self.with_scope(|this| this.check_in_mode(m.to_untyped().children(), InterpretMode::Code))
|
||||
fn check_code(&mut self, code: ast::Code) -> Expr {
|
||||
self.with_scope(|this| {
|
||||
this.check_in_mode(code.to_untyped().children(), InterpretMode::Code)
|
||||
})
|
||||
}
|
||||
|
||||
fn check_math(&mut self, root: SyntaxNodeChildren) -> Expr {
|
||||
self.check_in_mode(root, InterpretMode::Math)
|
||||
fn check_math(&mut self, children: SyntaxNodeChildren) -> Expr {
|
||||
self.check_in_mode(children, InterpretMode::Math)
|
||||
}
|
||||
|
||||
fn check_root_scope(&mut self, root: SyntaxNodeChildren) {
|
||||
fn check_root_scope(&mut self, children: SyntaxNodeChildren) {
|
||||
self.init_stage = true;
|
||||
self.check_in_mode(root, InterpretMode::Markup);
|
||||
self.check_in_mode(children, InterpretMode::Markup);
|
||||
self.init_stage = false;
|
||||
}
|
||||
|
||||
fn check_in_mode(&mut self, root: SyntaxNodeChildren, mode: InterpretMode) -> Expr {
|
||||
fn check_in_mode(&mut self, children: SyntaxNodeChildren, mode: InterpretMode) -> Expr {
|
||||
let old_mode = self.lexical.mode;
|
||||
self.lexical.mode = mode;
|
||||
|
||||
// collect all comments before the definition
|
||||
self.comment_matcher.reset();
|
||||
|
||||
let mut children = Vec::with_capacity(4);
|
||||
for n in root {
|
||||
let mut items = Vec::with_capacity(4);
|
||||
for n in children {
|
||||
if let Some(expr) = n.cast::<ast::Expr>() {
|
||||
children.push(self.check(expr));
|
||||
items.push(self.check(expr));
|
||||
self.comment_matcher.reset();
|
||||
continue;
|
||||
}
|
||||
|
@ -1054,12 +1058,14 @@ impl ExprWorker<'_> {
|
|||
}
|
||||
|
||||
self.lexical.mode = old_mode;
|
||||
Expr::Block(children.into())
|
||||
Expr::Block(items.into())
|
||||
}
|
||||
|
||||
fn check_ref(&mut self, r: ast::Ref) -> Expr {
|
||||
let ident = Interned::new(Decl::ref_(r));
|
||||
let body = r.supplement().map(|s| self.check(ast::Expr::Content(s)));
|
||||
fn check_ref(&mut self, ref_node: ast::Ref) -> Expr {
|
||||
let ident = Interned::new(Decl::ref_(ref_node));
|
||||
let body = ref_node
|
||||
.supplement()
|
||||
.map(|block| self.check(ast::Expr::Content(block)));
|
||||
let ref_expr = ContentRefExpr {
|
||||
ident: ident.clone(),
|
||||
of: None,
|
||||
|
@ -1070,7 +1076,7 @@ impl ExprWorker<'_> {
|
|||
decl: ident,
|
||||
step: None,
|
||||
root: None,
|
||||
val: None,
|
||||
term: None,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
|
@ -1108,7 +1114,7 @@ impl ExprWorker<'_> {
|
|||
decl,
|
||||
root,
|
||||
step,
|
||||
val,
|
||||
term: val,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1133,8 +1139,8 @@ impl ExprWorker<'_> {
|
|||
}
|
||||
|
||||
fn eval_expr(&mut self, expr: ast::Expr, mode: InterpretMode) -> ConcolicExpr {
|
||||
if let Some(s) = self.const_eval_expr(expr) {
|
||||
return (None, Some(Ty::Value(InsTy::new(s))));
|
||||
if let Some(term) = self.const_eval_expr(expr) {
|
||||
return (None, Some(Ty::Value(InsTy::new(term))));
|
||||
}
|
||||
crate::log_debug_ct!("checking expr: {expr:?}");
|
||||
|
||||
|
@ -1142,8 +1148,8 @@ impl ExprWorker<'_> {
|
|||
ast::Expr::FieldAccess(f) => {
|
||||
let field = Decl::ident_ref(f.field());
|
||||
|
||||
let (expr, val) = self.eval_expr(f.target(), mode);
|
||||
let val = val.and_then(|v| {
|
||||
let (expr, term) = self.eval_expr(f.target(), mode);
|
||||
let term = term.and_then(|v| {
|
||||
// todo: use type select
|
||||
// v.select(field.name()).ok()
|
||||
match v {
|
||||
|
@ -1153,13 +1159,13 @@ impl ExprWorker<'_> {
|
|||
_ => None,
|
||||
}
|
||||
});
|
||||
let expr = expr.map(|e| Expr::Select(SelectExpr::new(field.into(), e)));
|
||||
(expr, val)
|
||||
let sel = expr.map(|expr| Expr::Select(SelectExpr::new(field.into(), expr)));
|
||||
(sel, term)
|
||||
}
|
||||
ast::Expr::Ident(ident) => {
|
||||
let res = self.eval_ident(&ident.get().into(), mode);
|
||||
crate::log_debug_ct!("checking expr: {expr:?} -> res: {res:?}");
|
||||
res
|
||||
let expr_term = self.eval_ident(&ident.get().into(), mode);
|
||||
crate::log_debug_ct!("checking expr: {expr:?} -> res: {expr_term:?}");
|
||||
expr_term
|
||||
}
|
||||
_ => (None, None),
|
||||
}
|
||||
|
@ -1200,9 +1206,9 @@ impl ExprWorker<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn fold_expr(&mut self, src: Option<Expr>) -> Option<Expr> {
|
||||
crate::log_debug_ct!("folding cc: {src:?}");
|
||||
match src {
|
||||
fn fold_expr(&mut self, expr: Option<Expr>) -> Option<Expr> {
|
||||
crate::log_debug_ct!("folding cc: {expr:?}");
|
||||
match expr {
|
||||
Some(Expr::Decl(decl)) if !decl.is_def() => {
|
||||
crate::log_debug_ct!("folding decl: {decl:?}");
|
||||
let (x, y) = self.eval_ident(decl.name(), InterpretMode::Code);
|
||||
|
@ -1210,7 +1216,7 @@ impl ExprWorker<'_> {
|
|||
}
|
||||
Some(Expr::Ref(r)) => {
|
||||
crate::log_debug_ct!("folding ref: {r:?}");
|
||||
self.fold_expr_and_val((r.root.clone(), r.val.clone()))
|
||||
self.fold_expr_and_val((r.root.clone(), r.term.clone()))
|
||||
}
|
||||
Some(Expr::Select(r)) => {
|
||||
let lhs = self.fold_expr(Some(r.lhs.clone()));
|
||||
|
@ -1235,7 +1241,7 @@ impl ExprWorker<'_> {
|
|||
decl: key.clone(),
|
||||
root: Some(lhs.clone()),
|
||||
step: Some(selected.clone()),
|
||||
val: None,
|
||||
term: None,
|
||||
});
|
||||
self.resolve_as(select_ref.clone());
|
||||
self.resolve_as_(span, select_ref);
|
||||
|
|
|
@ -11,7 +11,7 @@ use typst::syntax::{
|
|||
use typst_shim::utils::LazyHash;
|
||||
|
||||
pub(crate) fn get_lexical_hierarchy(
|
||||
source: Source,
|
||||
source: &Source,
|
||||
g: LexicalScopeKind,
|
||||
) -> Option<EcoVec<LexicalHierarchy>> {
|
||||
let b = std::time::Instant::now();
|
||||
|
@ -31,8 +31,8 @@ pub(crate) fn get_lexical_hierarchy(
|
|||
));
|
||||
let res = match worker.get_symbols(root) {
|
||||
Ok(()) => Some(()),
|
||||
Err(e) => {
|
||||
log::error!("lexical hierarchy analysis failed: {:?}", e);
|
||||
Err(err) => {
|
||||
log::error!("lexical hierarchy analysis failed: {err:?}");
|
||||
None
|
||||
}
|
||||
};
|
||||
|
@ -301,34 +301,34 @@ impl LexicalHierarchyWorker {
|
|||
}
|
||||
|
||||
// reverse order for correct symbol affection
|
||||
let name_offset = pattern.as_ref().map(|e| e.offset());
|
||||
let name_offset = pattern.as_ref().map(|node| node.offset());
|
||||
self.get_symbols_in_opt_with(pattern, IdentContext::Var)?;
|
||||
self.get_symbols_in_first_expr(node.children().rev(), name_offset)?;
|
||||
}
|
||||
SyntaxKind::ForLoop => {
|
||||
let pattern = node.children().find(|n| n.is::<ast::Pattern>());
|
||||
let pattern = node.children().find(|child| child.is::<ast::Pattern>());
|
||||
let iterable = node
|
||||
.children()
|
||||
.skip_while(|n| n.kind() != SyntaxKind::In)
|
||||
.find(|e| e.is::<ast::Expr>());
|
||||
.skip_while(|child| child.kind() != SyntaxKind::In)
|
||||
.find(|child| child.is::<ast::Expr>());
|
||||
|
||||
let iterable_offset = iterable.as_ref().map(|e| e.offset());
|
||||
let iterable_offset = iterable.as_ref().map(|node| node.offset());
|
||||
self.get_symbols_in_opt_with(iterable, IdentContext::Ref)?;
|
||||
self.get_symbols_in_opt_with(pattern, IdentContext::Var)?;
|
||||
self.get_symbols_in_first_expr(node.children().rev(), iterable_offset)?;
|
||||
}
|
||||
SyntaxKind::Closure => {
|
||||
let n = node.children().next();
|
||||
let first_child = node.children().next();
|
||||
let current = self.stack.last_mut().unwrap().1.len();
|
||||
if let Some(n) = n {
|
||||
if n.kind() == SyntaxKind::Ident {
|
||||
self.get_symbols_with(n, IdentContext::Func)?;
|
||||
if let Some(first_child) = first_child {
|
||||
if first_child.kind() == SyntaxKind::Ident {
|
||||
self.get_symbols_with(first_child, IdentContext::Func)?;
|
||||
}
|
||||
}
|
||||
let body = node
|
||||
.children()
|
||||
.rev()
|
||||
.find(|n| n.cast::<ast::Expr>().is_some());
|
||||
.find(|child| child.cast::<ast::Expr>().is_some());
|
||||
if let Some(body) = body {
|
||||
let symbol = if current == self.stack.last().unwrap().1.len() {
|
||||
// Closure has no updated symbol stack
|
||||
|
@ -398,7 +398,7 @@ impl LexicalHierarchyWorker {
|
|||
) -> anyhow::Result<()> {
|
||||
let body = nodes.find(|n| n.is::<ast::Expr>());
|
||||
if let Some(body) = body {
|
||||
if iterable_offset.is_some_and(|e| e >= body.offset()) {
|
||||
if iterable_offset.is_some_and(|offset| offset >= body.offset()) {
|
||||
return Ok(());
|
||||
}
|
||||
self.get_symbols_with(body, IdentContext::Ref)?;
|
||||
|
|
|
@ -90,8 +90,8 @@ pub fn descent_decls<T>(
|
|||
return Some(t);
|
||||
}
|
||||
}
|
||||
Some(ast::Imports::Items(e)) => {
|
||||
for item in e.iter() {
|
||||
Some(ast::Imports::Items(items)) => {
|
||||
for item in items.iter() {
|
||||
if let Some(t) = recv(DescentDecl::Ident(item.bound_name())) {
|
||||
return Some(t);
|
||||
}
|
||||
|
@ -389,13 +389,13 @@ fn possible_in_code_trivia(sk: SyntaxKind) -> bool {
|
|||
/// - Parenthesized expression.
|
||||
/// - Identifier on the right side of a dot operator (field access).
|
||||
fn classify_lvalue(mut node: LinkedNode) -> Option<LinkedNode> {
|
||||
while let Some(e) = node.cast::<ast::Parenthesized>() {
|
||||
node = node.find(e.expr().span())?;
|
||||
while let Some(paren_expr) = node.cast::<ast::Parenthesized>() {
|
||||
node = node.find(paren_expr.expr().span())?;
|
||||
}
|
||||
if let Some(e) = node.parent() {
|
||||
if let Some(f) = e.cast::<ast::FieldAccess>() {
|
||||
if node.span() == f.field().span() {
|
||||
return Some(e.clone());
|
||||
if let Some(parent) = node.parent() {
|
||||
if let Some(field_access) = parent.cast::<ast::FieldAccess>() {
|
||||
if node.span() == field_access.field().span() {
|
||||
return Some(parent.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -541,7 +541,7 @@ impl ArgClass<'_> {
|
|||
/// A cursor class is either an [`SyntaxClass`] or other things under cursor.
|
||||
/// One thing is not ncessary to refer to some exact node. For example, a cursor
|
||||
/// moving after some comma in a function call is identified as a
|
||||
/// [`CursorClass::Param`].
|
||||
/// [`CursorClass::Arg`].
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CursorClass<'a> {
|
||||
/// A cursor on an argument.
|
||||
|
@ -608,12 +608,12 @@ pub fn classify_cursor_by_context<'a>(
|
|||
) -> Option<CursorClass<'a>> {
|
||||
use SyntaxClass::*;
|
||||
let context_syntax = classify_syntax(context.clone(), node.offset())?;
|
||||
let inner_syntax = classify_syntax(node.clone(), node.offset())?;
|
||||
let node_syntax = classify_syntax(node.clone(), node.offset())?;
|
||||
|
||||
match context_syntax {
|
||||
Callee(callee)
|
||||
if matches!(inner_syntax, Normal(..) | Label { .. } | Ref(..))
|
||||
&& !matches!(inner_syntax, Callee(..)) =>
|
||||
if matches!(node_syntax, Normal(..) | Label { .. } | Ref(..))
|
||||
&& !matches!(node_syntax, Callee(..)) =>
|
||||
{
|
||||
let parent = callee.parent()?;
|
||||
let args = match parent.cast::<ast::Expr>() {
|
||||
|
@ -829,7 +829,7 @@ mod tests {
|
|||
use typst::syntax::{is_newline, Source};
|
||||
use typst_shim::syntax::LinkedNodeExt;
|
||||
|
||||
fn map_base(source: &str, mapper: impl Fn(&LinkedNode, usize) -> char) -> String {
|
||||
fn map_node(source: &str, mapper: impl Fn(&LinkedNode, usize) -> char) -> String {
|
||||
let source = Source::detached(source.to_owned());
|
||||
let root = LinkedNode::new(source.root());
|
||||
let mut output_mapping = String::new();
|
||||
|
@ -853,8 +853,8 @@ mod tests {
|
|||
.collect::<String>()
|
||||
}
|
||||
|
||||
fn map_deref(source: &str) -> String {
|
||||
map_base(source, |root, cursor| {
|
||||
fn map_syntax(source: &str) -> String {
|
||||
map_node(source, |root, cursor| {
|
||||
let node = root.leaf_at_compat(cursor);
|
||||
let kind = node.and_then(|node| classify_syntax(node, cursor));
|
||||
match kind {
|
||||
|
@ -870,8 +870,8 @@ mod tests {
|
|||
})
|
||||
}
|
||||
|
||||
fn map_check(source: &str) -> String {
|
||||
map_base(source, |root, cursor| {
|
||||
fn map_cursor(source: &str) -> String {
|
||||
map_node(source, |root, cursor| {
|
||||
let node = root.leaf_at_compat(cursor);
|
||||
let kind = node.and_then(|node| classify_cursor(node));
|
||||
match kind {
|
||||
|
@ -889,7 +889,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_get_deref_target() {
|
||||
assert_snapshot!(map_deref(r#"#let x = 1
|
||||
assert_snapshot!(map_syntax(r#"#let x = 1
|
||||
Text
|
||||
= Heading #let y = 2;
|
||||
== Heading"#).trim(), @r"
|
||||
|
@ -901,11 +901,11 @@ Text
|
|||
nnnnvvnnn
|
||||
== Heading
|
||||
");
|
||||
assert_snapshot!(map_deref(r#"#let f(x);"#).trim(), @r"
|
||||
assert_snapshot!(map_syntax(r#"#let f(x);"#).trim(), @r"
|
||||
#let f(x);
|
||||
nnnnv v
|
||||
");
|
||||
assert_snapshot!(map_deref(r#"#{
|
||||
assert_snapshot!(map_syntax(r#"#{
|
||||
calc.
|
||||
}"#).trim(), @r"
|
||||
#{
|
||||
|
@ -919,7 +919,7 @@ Text
|
|||
|
||||
#[test]
|
||||
fn test_get_check_target() {
|
||||
assert_snapshot!(map_check(r#"#let x = 1
|
||||
assert_snapshot!(map_cursor(r#"#let x = 1
|
||||
Text
|
||||
= Heading #let y = 2;
|
||||
== Heading"#).trim(), @r"
|
||||
|
@ -931,31 +931,31 @@ Text
|
|||
nnnnnnnnn
|
||||
== Heading
|
||||
");
|
||||
assert_snapshot!(map_check(r#"#let f(x);"#).trim(), @r"
|
||||
assert_snapshot!(map_cursor(r#"#let f(x);"#).trim(), @r"
|
||||
#let f(x);
|
||||
nnnnn n
|
||||
");
|
||||
assert_snapshot!(map_check(r#"#f(1, 2) Test"#).trim(), @r"
|
||||
assert_snapshot!(map_cursor(r#"#f(1, 2) Test"#).trim(), @r"
|
||||
#f(1, 2) Test
|
||||
npppppp
|
||||
");
|
||||
assert_snapshot!(map_check(r#"#() Test"#).trim(), @r"
|
||||
assert_snapshot!(map_cursor(r#"#() Test"#).trim(), @r"
|
||||
#() Test
|
||||
ee
|
||||
");
|
||||
assert_snapshot!(map_check(r#"#(1) Test"#).trim(), @r"
|
||||
assert_snapshot!(map_cursor(r#"#(1) Test"#).trim(), @r"
|
||||
#(1) Test
|
||||
PPP
|
||||
");
|
||||
assert_snapshot!(map_check(r#"#(a: 1) Test"#).trim(), @r"
|
||||
assert_snapshot!(map_cursor(r#"#(a: 1) Test"#).trim(), @r"
|
||||
#(a: 1) Test
|
||||
eeeeee
|
||||
");
|
||||
assert_snapshot!(map_check(r#"#(1, 2) Test"#).trim(), @r"
|
||||
assert_snapshot!(map_cursor(r#"#(1, 2) Test"#).trim(), @r"
|
||||
#(1, 2) Test
|
||||
eeeeee
|
||||
");
|
||||
assert_snapshot!(map_check(r#"#(1, 2)
|
||||
assert_snapshot!(map_cursor(r#"#(1, 2)
|
||||
Test"#).trim(), @r"
|
||||
#(1, 2)
|
||||
eeeeee
|
||||
|
|
|
@ -123,8 +123,8 @@ pub(crate) fn scan_workspace_files<T>(
|
|||
if !de
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.is_some_and(|e| ext.is_match(e))
|
||||
.and_then(|err| err.to_str())
|
||||
.is_some_and(|err| ext.is_match(err))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -19,30 +19,30 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
|
||||
pub fn write_expr(&mut self, expr: &Expr) -> fmt::Result {
|
||||
match expr {
|
||||
Expr::Block(s) => self.write_seq(s),
|
||||
Expr::Array(a) => self.write_array(a),
|
||||
Expr::Dict(d) => self.write_dict(d),
|
||||
Expr::Args(a) => self.write_args(a),
|
||||
Expr::Pattern(p) => self.write_pattern(p),
|
||||
Expr::Element(e) => self.write_element(e),
|
||||
Expr::Unary(u) => self.write_unary(u),
|
||||
Expr::Binary(b) => self.write_binary(b),
|
||||
Expr::Apply(a) => self.write_apply(a),
|
||||
Expr::Block(exprs) => self.write_seq(exprs),
|
||||
Expr::Array(elems) => self.write_array(elems),
|
||||
Expr::Dict(elems) => self.write_dict(elems),
|
||||
Expr::Args(args) => self.write_args(args),
|
||||
Expr::Pattern(pat) => self.write_pattern(pat),
|
||||
Expr::Element(elem) => self.write_element(elem),
|
||||
Expr::Unary(unary) => self.write_unary(unary),
|
||||
Expr::Binary(binary) => self.write_binary(binary),
|
||||
Expr::Apply(apply) => self.write_apply(apply),
|
||||
Expr::Func(func) => self.write_func(func),
|
||||
Expr::Let(l) => self.write_let(l),
|
||||
Expr::Show(s) => self.write_show(s),
|
||||
Expr::Set(s) => self.write_set(s),
|
||||
Expr::Ref(r) => self.write_ref(r),
|
||||
Expr::ContentRef(r) => self.write_content_ref(r),
|
||||
Expr::Select(s) => self.write_select(s),
|
||||
Expr::Import(i) => self.write_import(i),
|
||||
Expr::Include(i) => self.write_include(i),
|
||||
Expr::Contextual(c) => self.write_contextual(c),
|
||||
Expr::Conditional(c) => self.write_conditional(c),
|
||||
Expr::WhileLoop(w) => self.write_while_loop(w),
|
||||
Expr::ForLoop(f) => self.write_for_loop(f),
|
||||
Expr::Type(t) => self.write_type(t),
|
||||
Expr::Decl(d) => self.write_decl(d),
|
||||
Expr::Let(let_expr) => self.write_let(let_expr),
|
||||
Expr::Show(show) => self.write_show(show),
|
||||
Expr::Set(set) => self.write_set(set),
|
||||
Expr::Ref(reference) => self.write_ref(reference),
|
||||
Expr::ContentRef(content_ref) => self.write_content_ref(content_ref),
|
||||
Expr::Select(sel) => self.write_select(sel),
|
||||
Expr::Import(import) => self.write_import(import),
|
||||
Expr::Include(include) => self.write_include(include),
|
||||
Expr::Contextual(contextual) => self.write_contextual(contextual),
|
||||
Expr::Conditional(if_expr) => self.write_conditional(if_expr),
|
||||
Expr::WhileLoop(while_expr) => self.write_while_loop(while_expr),
|
||||
Expr::ForLoop(for_expr) => self.write_for_loop(for_expr),
|
||||
Expr::Type(ty) => self.write_type(ty),
|
||||
Expr::Decl(decl) => self.write_decl(decl),
|
||||
Expr::Star => self.write_star(),
|
||||
}
|
||||
}
|
||||
|
@ -51,10 +51,10 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
write!(self.f, "{:indent$}", "", indent = self.indent)
|
||||
}
|
||||
|
||||
fn write_seq(&mut self, s: &Interned<Vec<Expr>>) -> fmt::Result {
|
||||
fn write_seq(&mut self, exprs: &Interned<Vec<Expr>>) -> fmt::Result {
|
||||
writeln!(self.f, "[")?;
|
||||
self.indent += 1;
|
||||
for expr in s.iter() {
|
||||
for expr in exprs.iter() {
|
||||
self.write_indent()?;
|
||||
self.write_expr(expr)?;
|
||||
self.f.write_str(",\n")?;
|
||||
|
@ -64,10 +64,10 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
write!(self.f, "]")
|
||||
}
|
||||
|
||||
fn write_array(&mut self, a: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
fn write_array(&mut self, elems: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
writeln!(self.f, "(")?;
|
||||
self.indent += 1;
|
||||
for arg in a.iter() {
|
||||
for arg in elems.iter() {
|
||||
self.write_indent()?;
|
||||
self.write_arg(arg)?;
|
||||
self.f.write_str(",\n")?;
|
||||
|
@ -77,10 +77,10 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_dict(&mut self, d: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
fn write_dict(&mut self, elems: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
writeln!(self.f, "(:")?;
|
||||
self.indent += 1;
|
||||
for arg in d.iter() {
|
||||
for arg in elems.iter() {
|
||||
self.write_indent()?;
|
||||
self.write_arg(arg)?;
|
||||
self.f.write_str(",\n")?;
|
||||
|
@ -90,9 +90,9 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_args(&mut self, a: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
fn write_args(&mut self, args: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
writeln!(self.f, "(")?;
|
||||
for arg in a.iter() {
|
||||
for arg in args.iter() {
|
||||
self.write_indent()?;
|
||||
self.write_arg(arg)?;
|
||||
self.f.write_str(",\n")?;
|
||||
|
@ -101,32 +101,32 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_arg(&mut self, a: &ArgExpr) -> fmt::Result {
|
||||
match a {
|
||||
ArgExpr::Pos(e) => self.write_expr(e),
|
||||
ArgExpr::Named(n) => {
|
||||
let (k, v) = n.as_ref();
|
||||
fn write_arg(&mut self, arg: &ArgExpr) -> fmt::Result {
|
||||
match arg {
|
||||
ArgExpr::Pos(pos) => self.write_expr(pos),
|
||||
ArgExpr::Named(named) => {
|
||||
let (k, v) = named.as_ref();
|
||||
write!(self.f, "{k:?}: ")?;
|
||||
self.write_expr(v)
|
||||
}
|
||||
ArgExpr::NamedRt(n) => {
|
||||
let n = n.as_ref();
|
||||
self.write_expr(&n.0)?;
|
||||
ArgExpr::NamedRt(named) => {
|
||||
let (key, val) = named.as_ref();
|
||||
self.write_expr(key)?;
|
||||
write!(self.f, ": ")?;
|
||||
self.write_expr(&n.1)
|
||||
self.write_expr(val)
|
||||
}
|
||||
ArgExpr::Spread(e) => {
|
||||
ArgExpr::Spread(spread) => {
|
||||
write!(self.f, "..")?;
|
||||
self.write_expr(e)
|
||||
self.write_expr(spread)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_pattern(&mut self, p: &Pattern) -> fmt::Result {
|
||||
match p {
|
||||
Pattern::Expr(e) => self.write_expr(e),
|
||||
Pattern::Simple(s) => self.write_decl(s),
|
||||
Pattern::Sig(p) => self.write_pattern_sig(p),
|
||||
Pattern::Expr(expr) => self.write_expr(expr),
|
||||
Pattern::Simple(decl) => self.write_decl(decl),
|
||||
Pattern::Sig(sig) => self.write_pattern_sig(sig),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -161,10 +161,10 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_element(&mut self, e: &Interned<ElementExpr>) -> fmt::Result {
|
||||
fn write_element(&mut self, elem: &Interned<ElementExpr>) -> fmt::Result {
|
||||
self.f.write_str("elem(\n")?;
|
||||
self.indent += 1;
|
||||
for v in &e.content {
|
||||
for v in &elem.content {
|
||||
self.write_indent()?;
|
||||
self.write_expr(v)?;
|
||||
self.f.write_str(",\n")?;
|
||||
|
@ -174,26 +174,26 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_unary(&mut self, u: &Interned<UnExpr>) -> fmt::Result {
|
||||
write!(self.f, "un({:?})(", u.op)?;
|
||||
self.write_expr(&u.lhs)?;
|
||||
fn write_unary(&mut self, unary: &Interned<UnExpr>) -> fmt::Result {
|
||||
write!(self.f, "un({:?})(", unary.op)?;
|
||||
self.write_expr(&unary.lhs)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_binary(&mut self, b: &Interned<BinExpr>) -> fmt::Result {
|
||||
let [lhs, rhs] = b.operands();
|
||||
write!(self.f, "bin({:?})(", b.op)?;
|
||||
fn write_binary(&mut self, binary: &Interned<BinExpr>) -> fmt::Result {
|
||||
let [lhs, rhs] = binary.operands();
|
||||
write!(self.f, "bin({:?})(", binary.op)?;
|
||||
self.write_expr(lhs)?;
|
||||
self.f.write_str(", ")?;
|
||||
self.write_expr(rhs)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_apply(&mut self, a: &Interned<ApplyExpr>) -> fmt::Result {
|
||||
fn write_apply(&mut self, apply: &Interned<ApplyExpr>) -> fmt::Result {
|
||||
write!(self.f, "apply(")?;
|
||||
self.write_expr(&a.callee)?;
|
||||
self.write_expr(&apply.callee)?;
|
||||
self.f.write_str(", ")?;
|
||||
self.write_expr(&a.args)?;
|
||||
self.write_expr(&apply.args)?;
|
||||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
|
@ -205,121 +205,121 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_let(&mut self, l: &Interned<LetExpr>) -> fmt::Result {
|
||||
fn write_let(&mut self, let_expr: &Interned<LetExpr>) -> fmt::Result {
|
||||
write!(self.f, "let(")?;
|
||||
self.write_pattern(&l.pattern)?;
|
||||
if let Some(body) = &l.body {
|
||||
self.write_pattern(&let_expr.pattern)?;
|
||||
if let Some(body) = &let_expr.body {
|
||||
write!(self.f, " = ")?;
|
||||
self.write_expr(body)?;
|
||||
}
|
||||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_show(&mut self, s: &Interned<ShowExpr>) -> fmt::Result {
|
||||
fn write_show(&mut self, show: &Interned<ShowExpr>) -> fmt::Result {
|
||||
write!(self.f, "show(")?;
|
||||
if let Some(selector) = &s.selector {
|
||||
if let Some(selector) = &show.selector {
|
||||
self.write_expr(selector)?;
|
||||
self.f.write_str(", ")?;
|
||||
}
|
||||
self.write_expr(&s.edit)?;
|
||||
self.write_expr(&show.edit)?;
|
||||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_set(&mut self, s: &Interned<SetExpr>) -> fmt::Result {
|
||||
fn write_set(&mut self, set: &Interned<SetExpr>) -> fmt::Result {
|
||||
write!(self.f, "set(")?;
|
||||
self.write_expr(&s.target)?;
|
||||
self.write_expr(&set.target)?;
|
||||
self.f.write_str(", ")?;
|
||||
self.write_expr(&s.args)?;
|
||||
if let Some(cond) = &s.cond {
|
||||
self.write_expr(&set.args)?;
|
||||
if let Some(cond) = &set.cond {
|
||||
self.f.write_str(", ")?;
|
||||
self.write_expr(cond)?;
|
||||
}
|
||||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_ref(&mut self, r: &Interned<RefExpr>) -> fmt::Result {
|
||||
write!(self.f, "ref({:?}", r.decl)?;
|
||||
if let Some(step) = &r.step {
|
||||
fn write_ref(&mut self, reference: &Interned<RefExpr>) -> fmt::Result {
|
||||
write!(self.f, "ref({:?}", reference.decl)?;
|
||||
if let Some(step) = &reference.step {
|
||||
self.f.write_str(", step = ")?;
|
||||
self.write_expr(step)?;
|
||||
}
|
||||
if let Some(of) = &r.root {
|
||||
if let Some(of) = &reference.root {
|
||||
self.f.write_str(", root = ")?;
|
||||
self.write_expr(of)?;
|
||||
}
|
||||
if let Some(val) = &r.val {
|
||||
if let Some(val) = &reference.term {
|
||||
write!(self.f, ", val = {val:?}")?;
|
||||
}
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_content_ref(&mut self, r: &Interned<ContentRefExpr>) -> fmt::Result {
|
||||
write!(self.f, "content_ref({:?}", r.ident)?;
|
||||
if let Some(of) = &r.of {
|
||||
fn write_content_ref(&mut self, content_ref: &Interned<ContentRefExpr>) -> fmt::Result {
|
||||
write!(self.f, "content_ref({:?}", content_ref.ident)?;
|
||||
if let Some(of) = &content_ref.of {
|
||||
self.f.write_str(", ")?;
|
||||
self.write_decl(of)?;
|
||||
}
|
||||
if let Some(val) = &r.body {
|
||||
if let Some(val) = &content_ref.body {
|
||||
self.write_expr(val)?;
|
||||
}
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_select(&mut self, s: &Interned<SelectExpr>) -> fmt::Result {
|
||||
fn write_select(&mut self, sel: &Interned<SelectExpr>) -> fmt::Result {
|
||||
write!(self.f, "(")?;
|
||||
self.write_expr(&s.lhs)?;
|
||||
self.write_expr(&sel.lhs)?;
|
||||
self.f.write_str(").")?;
|
||||
self.write_decl(&s.key)
|
||||
self.write_decl(&sel.key)
|
||||
}
|
||||
|
||||
fn write_import(&mut self, i: &Interned<ImportExpr>) -> fmt::Result {
|
||||
fn write_import(&mut self, import: &Interned<ImportExpr>) -> fmt::Result {
|
||||
self.f.write_str("import(")?;
|
||||
self.write_decl(&i.decl.decl)?;
|
||||
self.write_decl(&import.decl.decl)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_include(&mut self, i: &Interned<IncludeExpr>) -> fmt::Result {
|
||||
fn write_include(&mut self, include: &Interned<IncludeExpr>) -> fmt::Result {
|
||||
self.f.write_str("include(")?;
|
||||
self.write_expr(&i.source)?;
|
||||
self.write_expr(&include.source)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_contextual(&mut self, c: &Interned<Expr>) -> fmt::Result {
|
||||
fn write_contextual(&mut self, contextual: &Interned<Expr>) -> fmt::Result {
|
||||
self.f.write_str("contextual(")?;
|
||||
self.write_expr(c)?;
|
||||
self.write_expr(contextual)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_conditional(&mut self, c: &Interned<IfExpr>) -> fmt::Result {
|
||||
fn write_conditional(&mut self, if_expr: &Interned<IfExpr>) -> fmt::Result {
|
||||
self.f.write_str("if(")?;
|
||||
self.write_expr(&c.cond)?;
|
||||
self.write_expr(&if_expr.cond)?;
|
||||
self.f.write_str(", then = ")?;
|
||||
self.write_expr(&c.then)?;
|
||||
self.write_expr(&if_expr.then)?;
|
||||
self.f.write_str(", else = ")?;
|
||||
self.write_expr(&c.else_)?;
|
||||
self.write_expr(&if_expr.else_)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_while_loop(&mut self, w: &Interned<WhileExpr>) -> fmt::Result {
|
||||
fn write_while_loop(&mut self, while_expr: &Interned<WhileExpr>) -> fmt::Result {
|
||||
self.f.write_str("while(")?;
|
||||
self.write_expr(&w.cond)?;
|
||||
self.write_expr(&while_expr.cond)?;
|
||||
self.f.write_str(", ")?;
|
||||
self.write_expr(&w.body)?;
|
||||
self.write_expr(&while_expr.body)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_for_loop(&mut self, f: &Interned<ForExpr>) -> fmt::Result {
|
||||
fn write_for_loop(&mut self, for_expr: &Interned<ForExpr>) -> fmt::Result {
|
||||
self.f.write_str("for(")?;
|
||||
self.write_pattern(&f.pattern)?;
|
||||
self.write_pattern(&for_expr.pattern)?;
|
||||
self.f.write_str(", ")?;
|
||||
self.write_expr(&f.iter)?;
|
||||
self.write_expr(&for_expr.iter)?;
|
||||
self.f.write_str(", ")?;
|
||||
self.write_expr(&f.body)?;
|
||||
self.write_expr(&for_expr.body)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_type(&mut self, t: &Ty) -> fmt::Result {
|
||||
let formatted = t.describe();
|
||||
fn write_type(&mut self, ty: &Ty) -> fmt::Result {
|
||||
let formatted = ty.describe();
|
||||
let formatted = formatted.as_deref().unwrap_or("any");
|
||||
self.f.write_str(formatted)
|
||||
}
|
||||
|
@ -352,27 +352,27 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
pub fn write_expr(&mut self, expr: &Expr) -> fmt::Result {
|
||||
match expr {
|
||||
Expr::Block(..) => self.f.write_str("Expr(..)"),
|
||||
Expr::Array(a) => self.write_array(a),
|
||||
Expr::Dict(d) => self.write_dict(d),
|
||||
Expr::Args(a) => self.write_args(a),
|
||||
Expr::Pattern(p) => self.write_pattern(p),
|
||||
Expr::Element(e) => self.write_element(e),
|
||||
Expr::Unary(u) => self.write_unary(u),
|
||||
Expr::Binary(b) => self.write_binary(b),
|
||||
Expr::Apply(a) => self.write_apply(a),
|
||||
Expr::Array(elems) => self.write_array(elems),
|
||||
Expr::Dict(elems) => self.write_dict(elems),
|
||||
Expr::Args(args) => self.write_args(args),
|
||||
Expr::Pattern(pat) => self.write_pattern(pat),
|
||||
Expr::Element(elem) => self.write_element(elem),
|
||||
Expr::Unary(unary) => self.write_unary(unary),
|
||||
Expr::Binary(binary) => self.write_binary(binary),
|
||||
Expr::Apply(apply) => self.write_apply(apply),
|
||||
Expr::Func(func) => self.write_func(func),
|
||||
Expr::Ref(r) => self.write_ref(r),
|
||||
Expr::ContentRef(r) => self.write_content_ref(r),
|
||||
Expr::Select(s) => self.write_select(s),
|
||||
Expr::Import(i) => self.write_import(i),
|
||||
Expr::Include(i) => self.write_include(i),
|
||||
Expr::Ref(ref_expr) => self.write_ref(ref_expr),
|
||||
Expr::ContentRef(content_ref) => self.write_content_ref(content_ref),
|
||||
Expr::Select(sel) => self.write_select(sel),
|
||||
Expr::Import(import) => self.write_import(import),
|
||||
Expr::Include(include) => self.write_include(include),
|
||||
Expr::Contextual(..) => self.f.write_str("content"),
|
||||
Expr::Let(..) | Expr::Show(..) | Expr::Set(..) => self.f.write_str("Expr(..)"),
|
||||
Expr::Conditional(..) | Expr::WhileLoop(..) | Expr::ForLoop(..) => {
|
||||
self.f.write_str("Expr(..)")
|
||||
}
|
||||
Expr::Type(t) => self.write_type(t),
|
||||
Expr::Decl(d) => self.write_decl(d),
|
||||
Expr::Type(ty) => self.write_type(ty),
|
||||
Expr::Decl(decl) => self.write_decl(decl),
|
||||
Expr::Star => self.f.write_str("*"),
|
||||
}
|
||||
}
|
||||
|
@ -381,10 +381,10 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
write!(self.f, "{:indent$}", "", indent = self.indent)
|
||||
}
|
||||
|
||||
fn write_array(&mut self, a: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
if a.len() <= 1 {
|
||||
fn write_array(&mut self, elems: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
if elems.len() <= 1 {
|
||||
self.f.write_char('(')?;
|
||||
if let Some(arg) = a.first() {
|
||||
if let Some(arg) = elems.first() {
|
||||
self.write_arg(arg)?;
|
||||
self.f.write_str(",")?
|
||||
}
|
||||
|
@ -393,7 +393,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
|
||||
writeln!(self.f, "(")?;
|
||||
self.indent += 1;
|
||||
for arg in a.iter() {
|
||||
for arg in elems.iter() {
|
||||
self.write_indent()?;
|
||||
self.write_arg(arg)?;
|
||||
self.f.write_str(",\n")?;
|
||||
|
@ -403,10 +403,10 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_dict(&mut self, d: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
if d.len() <= 1 {
|
||||
fn write_dict(&mut self, elems: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
if elems.len() <= 1 {
|
||||
self.f.write_char('(')?;
|
||||
if let Some(arg) = d.first() {
|
||||
if let Some(arg) = elems.first() {
|
||||
self.write_arg(arg)?;
|
||||
} else {
|
||||
self.f.write_str(":")?
|
||||
|
@ -416,7 +416,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
|
||||
writeln!(self.f, "(:")?;
|
||||
self.indent += 1;
|
||||
for arg in d.iter() {
|
||||
for arg in elems.iter() {
|
||||
self.write_indent()?;
|
||||
self.write_arg(arg)?;
|
||||
self.f.write_str(",\n")?;
|
||||
|
@ -426,9 +426,9 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_args(&mut self, a: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
fn write_args(&mut self, args: &Interned<Vec<ArgExpr>>) -> fmt::Result {
|
||||
writeln!(self.f, "(")?;
|
||||
for arg in a.iter() {
|
||||
for arg in args.iter() {
|
||||
self.write_indent()?;
|
||||
self.write_arg(arg)?;
|
||||
self.f.write_str(",\n")?;
|
||||
|
@ -437,57 +437,57 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
fn write_arg(&mut self, a: &ArgExpr) -> fmt::Result {
|
||||
match a {
|
||||
ArgExpr::Pos(e) => self.write_expr(e),
|
||||
ArgExpr::Named(n) => {
|
||||
let (k, v) = n.as_ref();
|
||||
fn write_arg(&mut self, arg: &ArgExpr) -> fmt::Result {
|
||||
match arg {
|
||||
ArgExpr::Pos(pos) => self.write_expr(pos),
|
||||
ArgExpr::Named(named) => {
|
||||
let (k, v) = named.as_ref();
|
||||
self.write_decl(k)?;
|
||||
write!(self.f, ": ")?;
|
||||
self.write_expr(v)
|
||||
}
|
||||
ArgExpr::NamedRt(n) => {
|
||||
let n = n.as_ref();
|
||||
ArgExpr::NamedRt(named) => {
|
||||
let n = named.as_ref();
|
||||
self.write_expr(&n.0)?;
|
||||
write!(self.f, ": ")?;
|
||||
self.write_expr(&n.1)
|
||||
}
|
||||
ArgExpr::Spread(e) => {
|
||||
ArgExpr::Spread(spread) => {
|
||||
write!(self.f, "..")?;
|
||||
self.write_expr(e)
|
||||
self.write_expr(spread)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_pattern(&mut self, p: &Pattern) -> fmt::Result {
|
||||
match p {
|
||||
Pattern::Expr(e) => self.write_expr(e),
|
||||
Pattern::Simple(s) => self.write_decl(s),
|
||||
Pattern::Sig(p) => self.write_pattern_sig(p),
|
||||
pub fn write_pattern(&mut self, pat: &Pattern) -> fmt::Result {
|
||||
match pat {
|
||||
Pattern::Expr(expr) => self.write_expr(expr),
|
||||
Pattern::Simple(decl) => self.write_decl(decl),
|
||||
Pattern::Sig(sig) => self.write_pattern_sig(sig),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_pattern_sig(&mut self, p: &PatternSig) -> fmt::Result {
|
||||
fn write_pattern_sig(&mut self, sig: &PatternSig) -> fmt::Result {
|
||||
self.f.write_str("pat(\n")?;
|
||||
self.indent += 1;
|
||||
for pos in &p.pos {
|
||||
for pos in &sig.pos {
|
||||
self.write_indent()?;
|
||||
self.write_pattern(pos)?;
|
||||
self.f.write_str(",\n")?;
|
||||
}
|
||||
for (name, pat) in &p.named {
|
||||
for (name, pat) in &sig.named {
|
||||
self.write_indent()?;
|
||||
write!(self.f, "{name:?} = ")?;
|
||||
self.write_pattern(pat)?;
|
||||
self.f.write_str(",\n")?;
|
||||
}
|
||||
if let Some((k, rest)) = &p.spread_left {
|
||||
if let Some((k, rest)) = &sig.spread_left {
|
||||
self.write_indent()?;
|
||||
write!(self.f, "..{k:?}: ")?;
|
||||
self.write_pattern(rest)?;
|
||||
self.f.write_str(",\n")?;
|
||||
}
|
||||
if let Some((k, rest)) = &p.spread_right {
|
||||
if let Some((k, rest)) = &sig.spread_right {
|
||||
self.write_indent()?;
|
||||
write!(self.f, "..{k:?}: ")?;
|
||||
self.write_pattern(rest)?;
|
||||
|
@ -498,66 +498,66 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_element(&mut self, e: &Interned<ElementExpr>) -> fmt::Result {
|
||||
write!(self.f, "{:?}", e.elem.name())
|
||||
fn write_element(&mut self, elem: &Interned<ElementExpr>) -> fmt::Result {
|
||||
write!(self.f, "{:?}", elem.elem.name())
|
||||
}
|
||||
|
||||
fn write_unary(&mut self, u: &Interned<UnExpr>) -> fmt::Result {
|
||||
fn write_unary(&mut self, unary: &Interned<UnExpr>) -> fmt::Result {
|
||||
use UnaryOp::*;
|
||||
match u.op {
|
||||
match unary.op {
|
||||
Pos => {
|
||||
self.f.write_str("+")?;
|
||||
self.write_expr(&u.lhs)
|
||||
self.write_expr(&unary.lhs)
|
||||
}
|
||||
Neg => {
|
||||
self.f.write_str("-")?;
|
||||
self.write_expr(&u.lhs)
|
||||
self.write_expr(&unary.lhs)
|
||||
}
|
||||
Not => {
|
||||
self.f.write_str("not ")?;
|
||||
self.write_expr(&u.lhs)
|
||||
self.write_expr(&unary.lhs)
|
||||
}
|
||||
Return => {
|
||||
self.f.write_str("return ")?;
|
||||
self.write_expr(&u.lhs)
|
||||
self.write_expr(&unary.lhs)
|
||||
}
|
||||
Context => {
|
||||
self.f.write_str("context ")?;
|
||||
self.write_expr(&u.lhs)
|
||||
self.write_expr(&unary.lhs)
|
||||
}
|
||||
Spread => {
|
||||
self.f.write_str("..")?;
|
||||
self.write_expr(&u.lhs)
|
||||
self.write_expr(&unary.lhs)
|
||||
}
|
||||
NotElementOf => {
|
||||
self.f.write_str("not elementOf(")?;
|
||||
self.write_expr(&u.lhs)?;
|
||||
self.write_expr(&unary.lhs)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
ElementOf => {
|
||||
self.f.write_str("elementOf(")?;
|
||||
self.write_expr(&u.lhs)?;
|
||||
self.write_expr(&unary.lhs)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
TypeOf => {
|
||||
self.f.write_str("typeOf(")?;
|
||||
self.write_expr(&u.lhs)?;
|
||||
self.write_expr(&unary.lhs)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn write_binary(&mut self, b: &Interned<BinExpr>) -> fmt::Result {
|
||||
let [lhs, rhs] = b.operands();
|
||||
fn write_binary(&mut self, binary: &Interned<BinExpr>) -> fmt::Result {
|
||||
let [lhs, rhs] = binary.operands();
|
||||
self.write_expr(lhs)?;
|
||||
write!(self.f, " {} ", b.op.as_str())?;
|
||||
write!(self.f, " {} ", binary.op.as_str())?;
|
||||
self.write_expr(rhs)
|
||||
}
|
||||
|
||||
fn write_apply(&mut self, a: &Interned<ApplyExpr>) -> fmt::Result {
|
||||
self.write_expr(&a.callee)?;
|
||||
fn write_apply(&mut self, apply: &Interned<ApplyExpr>) -> fmt::Result {
|
||||
self.write_expr(&apply.callee)?;
|
||||
write!(self.f, "(")?;
|
||||
self.write_expr(&a.args)?;
|
||||
self.write_expr(&apply.args)?;
|
||||
write!(self.f, ")")
|
||||
}
|
||||
|
||||
|
@ -565,42 +565,42 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
self.write_decl(&func.decl)
|
||||
}
|
||||
|
||||
fn write_ref(&mut self, r: &Interned<RefExpr>) -> fmt::Result {
|
||||
if let Some(r) = &r.root {
|
||||
return self.write_expr(r);
|
||||
fn write_ref(&mut self, resolved: &Interned<RefExpr>) -> fmt::Result {
|
||||
if let Some(root) = &resolved.root {
|
||||
return self.write_expr(root);
|
||||
}
|
||||
if let Some(r) = &r.val {
|
||||
return self.write_type(r);
|
||||
if let Some(term) = &resolved.term {
|
||||
return self.write_type(term);
|
||||
}
|
||||
|
||||
write!(self.f, "undefined({:?})", r.decl)
|
||||
write!(self.f, "undefined({:?})", resolved.decl)
|
||||
}
|
||||
|
||||
fn write_content_ref(&mut self, r: &Interned<ContentRefExpr>) -> fmt::Result {
|
||||
write!(self.f, "@{:?}", r.ident)
|
||||
fn write_content_ref(&mut self, content_ref: &Interned<ContentRefExpr>) -> fmt::Result {
|
||||
write!(self.f, "@{:?}", content_ref.ident)
|
||||
}
|
||||
|
||||
fn write_select(&mut self, s: &Interned<SelectExpr>) -> fmt::Result {
|
||||
fn write_select(&mut self, sel: &Interned<SelectExpr>) -> fmt::Result {
|
||||
write!(self.f, "")?;
|
||||
self.write_expr(&s.lhs)?;
|
||||
self.write_expr(&sel.lhs)?;
|
||||
self.f.write_str(".")?;
|
||||
self.write_decl(&s.key)
|
||||
self.write_decl(&sel.key)
|
||||
}
|
||||
|
||||
fn write_import(&mut self, i: &Interned<ImportExpr>) -> fmt::Result {
|
||||
fn write_import(&mut self, import: &Interned<ImportExpr>) -> fmt::Result {
|
||||
self.f.write_str("import(")?;
|
||||
self.write_decl(&i.decl.decl)?;
|
||||
self.write_decl(&import.decl.decl)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_include(&mut self, i: &Interned<IncludeExpr>) -> fmt::Result {
|
||||
fn write_include(&mut self, include: &Interned<IncludeExpr>) -> fmt::Result {
|
||||
self.f.write_str("include(")?;
|
||||
self.write_expr(&i.source)?;
|
||||
self.write_expr(&include.source)?;
|
||||
self.f.write_str(")")
|
||||
}
|
||||
|
||||
fn write_type(&mut self, t: &Ty) -> fmt::Result {
|
||||
let formatted = t.describe();
|
||||
fn write_type(&mut self, ty: &Ty) -> fmt::Result {
|
||||
let formatted = ty.describe();
|
||||
let formatted = formatted.as_deref().unwrap_or("any");
|
||||
self.f.write_str(formatted)
|
||||
}
|
||||
|
|
|
@ -97,42 +97,42 @@ impl PathPreference {
|
|||
}
|
||||
|
||||
impl Ty {
|
||||
pub(crate) fn from_cast_info(s: &CastInfo) -> Ty {
|
||||
match &s {
|
||||
pub(crate) fn from_cast_info(ty: &CastInfo) -> Ty {
|
||||
match &ty {
|
||||
CastInfo::Any => Ty::Any,
|
||||
CastInfo::Value(v, doc) => Ty::Value(InsTy::new_doc(v.clone(), *doc)),
|
||||
CastInfo::Value(val, doc) => Ty::Value(InsTy::new_doc(val.clone(), *doc)),
|
||||
CastInfo::Type(ty) => Ty::Builtin(BuiltinTy::Type(*ty)),
|
||||
CastInfo::Union(e) => {
|
||||
Ty::iter_union(UnionIter(vec![e.as_slice().iter()]).map(Self::from_cast_info))
|
||||
CastInfo::Union(types) => {
|
||||
Ty::iter_union(UnionIter(vec![types.as_slice().iter()]).map(Self::from_cast_info))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_param_site(f: &Func, p: &ParamInfo) -> Ty {
|
||||
pub(crate) fn from_param_site(func: &Func, param: &ParamInfo) -> Ty {
|
||||
use typst::foundations::func::Repr;
|
||||
match f.inner() {
|
||||
match func.inner() {
|
||||
Repr::Element(..) | Repr::Native(..) => {
|
||||
if let Some(ty) = param_mapping(f, p) {
|
||||
if let Some(ty) = param_mapping(func, param) {
|
||||
return ty;
|
||||
}
|
||||
}
|
||||
Repr::Closure(_) => {}
|
||||
Repr::With(w) => return Ty::from_param_site(&w.0, p),
|
||||
Repr::With(w) => return Ty::from_param_site(&w.0, param),
|
||||
};
|
||||
|
||||
Self::from_cast_info(&p.input)
|
||||
Self::from_cast_info(¶m.input)
|
||||
}
|
||||
|
||||
pub(crate) fn from_return_site(f: &Func, c: &'_ CastInfo) -> Self {
|
||||
pub(crate) fn from_return_site(func: &Func, ty: &'_ CastInfo) -> Self {
|
||||
use typst::foundations::func::Repr;
|
||||
match f.inner() {
|
||||
Repr::Element(e) => return Ty::Builtin(BuiltinTy::Element(*e)),
|
||||
match func.inner() {
|
||||
Repr::Element(elem) => return Ty::Builtin(BuiltinTy::Element(*elem)),
|
||||
Repr::Closure(_) => {}
|
||||
Repr::With(w) => return Ty::from_return_site(&w.0, c),
|
||||
Repr::With(w) => return Ty::from_return_site(&w.0, ty),
|
||||
Repr::Native(_) => {}
|
||||
};
|
||||
|
||||
Self::from_cast_info(c)
|
||||
Self::from_cast_info(ty)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -144,12 +144,12 @@ impl<'a> Iterator for UnionIter<'a> {
|
|||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let iter = self.0.last_mut()?;
|
||||
if let Some(e) = iter.next() {
|
||||
match e {
|
||||
CastInfo::Union(e) => {
|
||||
self.0.push(e.as_slice().iter());
|
||||
if let Some(ty) = iter.next() {
|
||||
match ty {
|
||||
CastInfo::Union(types) => {
|
||||
self.0.push(types.as_slice().iter());
|
||||
}
|
||||
_ => return Some(e),
|
||||
_ => return Some(ty),
|
||||
}
|
||||
} else {
|
||||
self.0.pop();
|
||||
|
@ -268,7 +268,7 @@ impl fmt::Debug for BuiltinTy {
|
|||
BuiltinTy::Radius => write!(f, "Radius"),
|
||||
BuiltinTy::TypeType(ty) => write!(f, "TypeType({})", ty.short_name()),
|
||||
BuiltinTy::Type(ty) => write!(f, "Type({})", ty.short_name()),
|
||||
BuiltinTy::Element(e) => e.fmt(f),
|
||||
BuiltinTy::Element(elem) => elem.fmt(f),
|
||||
BuiltinTy::Tag(tag) => {
|
||||
let (name, id) = tag.as_ref();
|
||||
if let Some(id) = id {
|
||||
|
|
|
@ -153,20 +153,20 @@ impl Ty {
|
|||
}
|
||||
|
||||
/// Create a union type from an iterator of types
|
||||
pub fn from_types(e: impl ExactSizeIterator<Item = Ty>) -> Self {
|
||||
if e.len() == 0 {
|
||||
pub fn from_types(iter: impl ExactSizeIterator<Item = Ty>) -> Self {
|
||||
if iter.len() == 0 {
|
||||
Ty::Any
|
||||
} else if e.len() == 1 {
|
||||
let mut e = e;
|
||||
e.next().unwrap()
|
||||
} else if iter.len() == 1 {
|
||||
let mut iter = iter;
|
||||
iter.next().unwrap()
|
||||
} else {
|
||||
Self::iter_union(e)
|
||||
Self::iter_union(iter)
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a union type from an iterator of types
|
||||
pub fn iter_union(e: impl IntoIterator<Item = Ty>) -> Self {
|
||||
let mut v: Vec<Ty> = e.into_iter().collect();
|
||||
pub fn iter_union(iter: impl IntoIterator<Item = Ty>) -> Self {
|
||||
let mut v: Vec<Ty> = iter.into_iter().collect();
|
||||
v.sort();
|
||||
Ty::Union(Interned::new(v))
|
||||
}
|
||||
|
@ -681,7 +681,7 @@ impl RecordTy {
|
|||
pub fn shape_fields(mut fields: Vec<(StrRef, Ty)>) -> (NameBone, Vec<Ty>) {
|
||||
fields.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
let names = NameBone {
|
||||
names: fields.iter().map(|e| e.0.clone()).collect(),
|
||||
names: fields.iter().map(|(name, _)| name.clone()).collect(),
|
||||
};
|
||||
let types = fields.into_iter().map(|(_, ty)| ty).collect::<Vec<_>>();
|
||||
|
||||
|
@ -979,10 +979,10 @@ impl SigTy {
|
|||
|
||||
let pos = sig_stream.zip(arg_stream);
|
||||
let common_ifaces = withs
|
||||
.map(|e| e.iter().rev())
|
||||
.map(|args_all| args_all.iter().rev())
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(|w| self.common_iface_fields(w))
|
||||
.flat_map(|args| self.common_iface_fields(args))
|
||||
.chain(self.common_iface_fields(args));
|
||||
let named = common_ifaces.map(|(_, l, r)| (l, r));
|
||||
|
||||
|
@ -1158,9 +1158,9 @@ impl IfTy {
|
|||
}
|
||||
}
|
||||
|
||||
/// A type scheme on a group of syntax structures (typing)
|
||||
/// The type information on a group of syntax structures (typing)
|
||||
#[derive(Default)]
|
||||
pub struct TypeScheme {
|
||||
pub struct TypeInfo {
|
||||
/// Whether the typing is valid
|
||||
pub valid: bool,
|
||||
/// The belonging file id
|
||||
|
@ -1181,7 +1181,7 @@ pub struct TypeScheme {
|
|||
pub(super) cano_cache: Mutex<TypeCanoStore>,
|
||||
}
|
||||
|
||||
impl TyCtx for TypeScheme {
|
||||
impl TyCtx for TypeInfo {
|
||||
fn global_bounds(&self, var: &Interned<TypeVar>, _pol: bool) -> Option<DynTypeBounds> {
|
||||
let v = self.vars.get(&var.def)?;
|
||||
Some(v.bounds.bounds().read().clone())
|
||||
|
@ -1192,18 +1192,13 @@ impl TyCtx for TypeScheme {
|
|||
}
|
||||
}
|
||||
|
||||
impl TypeScheme {
|
||||
// Get the type of a definition
|
||||
// pub fn type_of_def(&self, def: DefId) -> Option<Ty> {
|
||||
// Some(self.simplify(self.vars.get(&def).map(|e| e.as_type())?, false))
|
||||
// }
|
||||
|
||||
impl TypeInfo {
|
||||
/// Gets the type of a syntax structure
|
||||
pub fn type_of_span(&self, site: Span) -> Option<Ty> {
|
||||
self.mapping
|
||||
.get(&site)
|
||||
.cloned()
|
||||
.map(|e| Ty::from_types(e.into_iter()))
|
||||
.map(|types| Ty::from_types(types.into_iter()))
|
||||
}
|
||||
|
||||
// todo: distinguish at least, at most
|
||||
|
@ -1261,7 +1256,7 @@ impl TypeScheme {
|
|||
}
|
||||
}
|
||||
|
||||
impl TyCtxMut for TypeScheme {
|
||||
impl TyCtxMut for TypeInfo {
|
||||
type Snap = ena::undo_log::Snapshot;
|
||||
|
||||
fn start_scope(&mut self) -> Self::Snap {
|
||||
|
|
|
@ -164,17 +164,17 @@ impl IfaceCheckDriver<'_> {
|
|||
}
|
||||
}
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Type(e)) if self.value_as_iface() => {
|
||||
Ty::Builtin(BuiltinTy::Type(b_ty)) if self.value_as_iface() => {
|
||||
// todo: distinguish between element and function
|
||||
self.checker
|
||||
.check(Iface::Type { val: e, at: ty }, &mut self.ctx, pol);
|
||||
.check(Iface::Type { val: b_ty, at: ty }, &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Element(e)) if self.value_as_iface() => {
|
||||
Ty::Builtin(BuiltinTy::Element(elem)) if self.value_as_iface() => {
|
||||
self.checker
|
||||
.check(Iface::Element { val: e, at: ty }, &mut self.ctx, pol);
|
||||
.check(Iface::Element { val: elem, at: ty }, &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Module(e)) => {
|
||||
if let Decl::Module(m) = e.as_ref() {
|
||||
Ty::Builtin(BuiltinTy::Module(module)) => {
|
||||
if let Decl::Module(m) = module.as_ref() {
|
||||
self.checker
|
||||
.check(Iface::Module { val: m.fid, at: ty }, &mut self.ctx, pol);
|
||||
}
|
||||
|
|
|
@ -10,18 +10,18 @@ pub trait TyMutator {
|
|||
Value(..) | Any | Boolean(..) | Builtin(..) => None,
|
||||
Union(v) => Some(Union(self.mutate_vec(v, pol)?)),
|
||||
Var(..) | Let(..) => None,
|
||||
Array(e) => Some(Array(self.mutate(e, pol)?.into())),
|
||||
Dict(r) => Some(Dict(self.mutate_record(r, pol)?.into())),
|
||||
Tuple(e) => Some(Tuple(self.mutate_vec(e, pol)?)),
|
||||
Func(f) => Some(Func(self.mutate_func(f, pol)?.into())),
|
||||
Array(arr) => Some(Array(self.mutate(arr, pol)?.into())),
|
||||
Dict(dict) => Some(Dict(self.mutate_record(dict, pol)?.into())),
|
||||
Tuple(tup) => Some(Tuple(self.mutate_vec(tup, pol)?)),
|
||||
Func(func) => Some(Func(self.mutate_func(func, pol)?.into())),
|
||||
Args(args) => Some(Args(self.mutate_func(args, pol)?.into())),
|
||||
Pattern(args) => Some(Pattern(self.mutate_func(args, pol)?.into())),
|
||||
Param(f) => Some(Param(self.mutate_param(f, pol)?.into())),
|
||||
Select(s) => Some(Select(self.mutate_select(s, pol)?.into())),
|
||||
With(w) => Some(With(self.mutate_with_sig(w, pol)?.into())),
|
||||
Unary(u) => Some(Unary(self.mutate_unary(u, pol)?.into())),
|
||||
Binary(b) => Some(Binary(self.mutate_binary(b, pol)?.into())),
|
||||
If(i) => Some(If(self.mutate_if(i, pol)?.into())),
|
||||
Pattern(pat) => Some(Pattern(self.mutate_func(pat, pol)?.into())),
|
||||
Param(param) => Some(Param(self.mutate_param(param, pol)?.into())),
|
||||
Select(sel) => Some(Select(self.mutate_select(sel, pol)?.into())),
|
||||
With(sig) => Some(With(self.mutate_with_sig(sig, pol)?.into())),
|
||||
Unary(unary) => Some(Unary(self.mutate_unary(unary, pol)?.into())),
|
||||
Binary(binary) => Some(Binary(self.mutate_binary(binary, pol)?.into())),
|
||||
If(if_expr) => Some(If(self.mutate_if(if_expr, pol)?.into())),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -208,33 +208,34 @@ impl SigCheckDriver<'_> {
|
|||
}
|
||||
}
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Type(e)) if self.func_as_sig() => {
|
||||
Ty::Builtin(BuiltinTy::Type(b_ty)) if self.func_as_sig() => {
|
||||
// todo: distinguish between element and function
|
||||
self.checker
|
||||
.check(Sig::TypeCons { val: e, at: ty }, &mut self.ctx, pol);
|
||||
.check(Sig::TypeCons { val: b_ty, at: ty }, &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Element(e)) if self.func_as_sig() => {
|
||||
Ty::Builtin(BuiltinTy::Element(elem)) if self.func_as_sig() => {
|
||||
// todo: distinguish between element and function
|
||||
let f = (*e).into();
|
||||
let f = (*elem).into();
|
||||
self.checker
|
||||
.check(Sig::Value { val: &f, at: ty }, &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Func(sig) if self.func_as_sig() => {
|
||||
self.checker.check(Sig::Type(sig), &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Array(sig) if self.array_as_sig() => {
|
||||
self.checker.check(Sig::ArrayCons(sig), &mut self.ctx, pol);
|
||||
Ty::Array(arr) if self.array_as_sig() => {
|
||||
self.checker.check(Sig::ArrayCons(arr), &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Tuple(tup) if self.array_as_sig() => {
|
||||
self.checker.check(Sig::TupleCons(tup), &mut self.ctx, pol);
|
||||
Ty::Tuple(elems) if self.array_as_sig() => {
|
||||
self.checker
|
||||
.check(Sig::TupleCons(elems), &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Dict(sig) if self.dict_as_sig() => {
|
||||
// self.check_dict_signature(sig, pol, self.checker);
|
||||
self.checker.check(Sig::DictCons(sig), &mut self.ctx, pol);
|
||||
}
|
||||
Ty::With(w) if self.func_as_sig() => {
|
||||
self.ctx.args.push(w.with.clone());
|
||||
self.ty(&w.sig, pol);
|
||||
Ty::With(sig) if self.func_as_sig() => {
|
||||
self.ctx.args.push(sig.with.clone());
|
||||
self.ty(&sig.sig, pol);
|
||||
self.ctx.args.pop();
|
||||
}
|
||||
Ty::Select(sel) => sel.ty.bounds(pol, &mut MethodDriver(self, &sel.select)),
|
||||
|
@ -242,9 +243,9 @@ impl SigCheckDriver<'_> {
|
|||
Ty::Unary(_) => {}
|
||||
Ty::Binary(_) => {}
|
||||
Ty::If(_) => {}
|
||||
Ty::Param(p) => {
|
||||
Ty::Param(param) => {
|
||||
// todo: keep type information
|
||||
self.ty(&p.ty, pol);
|
||||
self.ty(¶m.ty, pol);
|
||||
}
|
||||
_ if ty.has_bounds() => ty.bounds(pol, self),
|
||||
_ => {}
|
||||
|
@ -302,10 +303,10 @@ impl BoundChecker for MethodDriver<'_, '_> {
|
|||
_ => {}
|
||||
}
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Element(e)) => {
|
||||
Ty::Builtin(BuiltinTy::Element(elem)) => {
|
||||
// todo: distinguish between element and function
|
||||
if self.is_binder() {
|
||||
let f = (*e).into();
|
||||
let f = (*elem).into();
|
||||
self.0.checker.check(
|
||||
Sig::Partialize(&Sig::Value { val: &f, at: ty }),
|
||||
&mut self.0.ctx,
|
||||
|
|
|
@ -14,7 +14,7 @@ struct CompactTy {
|
|||
is_final: bool,
|
||||
}
|
||||
|
||||
impl TypeScheme {
|
||||
impl TypeInfo {
|
||||
/// Simplify (Canonicalize) the given type with the given type scheme.
|
||||
pub fn simplify(&self, ty: Ty, principal: bool) -> Ty {
|
||||
let mut c = self.cano_cache.lock();
|
||||
|
@ -101,13 +101,13 @@ impl TypeSimplifier<'_, '_> {
|
|||
self.analyze(p, pol);
|
||||
}
|
||||
}
|
||||
Ty::Tuple(e) => {
|
||||
for ty in e.iter() {
|
||||
Ty::Tuple(tup) => {
|
||||
for ty in tup.iter() {
|
||||
self.analyze(ty, pol);
|
||||
}
|
||||
}
|
||||
Ty::Array(e) => {
|
||||
self.analyze(e, pol);
|
||||
Ty::Array(arr) => {
|
||||
self.analyze(arr, pol);
|
||||
}
|
||||
Ty::With(w) => {
|
||||
self.analyze(&w.sig, pol);
|
||||
|
@ -193,8 +193,8 @@ impl TypeSimplifier<'_, '_> {
|
|||
|
||||
Ty::Dict(f.into())
|
||||
}
|
||||
Ty::Tuple(e) => Ty::Tuple(self.transform_seq(e, pol)),
|
||||
Ty::Array(e) => Ty::Array(self.transform(e, pol).into()),
|
||||
Ty::Tuple(tup) => Ty::Tuple(self.transform_seq(tup, pol)),
|
||||
Ty::Array(arr) => Ty::Array(self.transform(arr, pol).into()),
|
||||
Ty::With(w) => {
|
||||
let sig = self.transform(&w.sig, pol).into();
|
||||
// Negate the pol to make correct covariance
|
||||
|
|
|
@ -58,7 +58,7 @@ mod tests {
|
|||
use insta::{assert_debug_snapshot, assert_snapshot};
|
||||
use tinymist_derive::BindTyCtx;
|
||||
|
||||
use super::{DynTypeBounds, Interned, Ty, TyCtx, TypeScheme, TypeVar};
|
||||
use super::{DynTypeBounds, Interned, Ty, TyCtx, TypeInfo, TypeVar};
|
||||
use crate::ty::tests::*;
|
||||
use crate::ty::ApplyChecker;
|
||||
#[test]
|
||||
|
@ -71,7 +71,7 @@ mod tests {
|
|||
|
||||
#[derive(Default, BindTyCtx)]
|
||||
#[bind(0)]
|
||||
struct CallCollector(TypeScheme, Vec<Ty>);
|
||||
struct CallCollector(TypeInfo, Vec<Ty>);
|
||||
|
||||
impl ApplyChecker for CallCollector {
|
||||
fn apply(
|
||||
|
|
|
@ -721,7 +721,11 @@ impl<'a> CompletionContext<'a> {
|
|||
/// Add completions for all available packages.
|
||||
fn package_completions(&mut self, all_versions: bool) {
|
||||
let w = self.world().clone();
|
||||
let mut packages: Vec<_> = w.packages().iter().map(|e| (&e.0, e.1.clone())).collect();
|
||||
let mut packages: Vec<_> = w
|
||||
.packages()
|
||||
.iter()
|
||||
.map(|(spec, desc)| (spec, desc.clone()))
|
||||
.collect();
|
||||
// local_packages to references and add them to the packages
|
||||
let local_packages_refs = self.ctx.local_packages();
|
||||
packages.extend(
|
||||
|
|
|
@ -23,7 +23,7 @@ use crate::snippet::{
|
|||
use crate::syntax::{
|
||||
descent_decls, interpret_mode_at, is_ident_like, CursorClass, DescentDecl, InterpretMode,
|
||||
};
|
||||
use crate::ty::{DynTypeBounds, Iface, IfaceChecker, InsTy, SigTy, TyCtx, TypeScheme, TypeVar};
|
||||
use crate::ty::{DynTypeBounds, Iface, IfaceChecker, InsTy, SigTy, TyCtx, TypeInfo, TypeVar};
|
||||
use crate::upstream::complete::complete_code;
|
||||
|
||||
use crate::{completion_kind, prelude::*, LspCompletion};
|
||||
|
@ -629,7 +629,7 @@ fn check_previous_syntax(leaf: &LinkedNode) -> Option<SurroundingSyntax> {
|
|||
#[derive(BindTyCtx)]
|
||||
#[bind(types)]
|
||||
struct Defines {
|
||||
types: Arc<TypeScheme>,
|
||||
types: Arc<TypeInfo>,
|
||||
defines: BTreeMap<EcoString, Ty>,
|
||||
}
|
||||
|
||||
|
@ -659,7 +659,7 @@ impl Defines {
|
|||
}
|
||||
}
|
||||
|
||||
fn analyze_import_source(ctx: &LocalContext, types: &TypeScheme, s: ast::Expr) -> Option<Ty> {
|
||||
fn analyze_import_source(ctx: &LocalContext, types: &TypeInfo, s: ast::Expr) -> Option<Ty> {
|
||||
if let Some(res) = types.type_of_span(s.span()) {
|
||||
if !matches!(res.value(), Some(Value::Str(..))) {
|
||||
return Some(types.simplify(res, false));
|
||||
|
@ -1077,11 +1077,11 @@ impl TypeCompletionContext<'_, '_> {
|
|||
self.type_completion(info, docs);
|
||||
}
|
||||
}
|
||||
Ty::Let(e) => {
|
||||
for ut in e.ubs.iter() {
|
||||
Ty::Let(bounds) => {
|
||||
for ut in bounds.ubs.iter() {
|
||||
self.type_completion(ut, docs);
|
||||
}
|
||||
for lt in e.lbs.iter() {
|
||||
for lt in bounds.lbs.iter() {
|
||||
self.type_completion(lt, docs);
|
||||
}
|
||||
}
|
||||
|
@ -1350,10 +1350,10 @@ impl TypeCompletionContext<'_, '_> {
|
|||
});
|
||||
}
|
||||
}
|
||||
BuiltinTy::Element(e) => {
|
||||
BuiltinTy::Element(elem) => {
|
||||
self.ctx.value_completion(
|
||||
Some(e.name().into()),
|
||||
&Value::Func((*e).into()),
|
||||
Some(elem.name().into()),
|
||||
&Value::Func((*elem).into()),
|
||||
true,
|
||||
docs,
|
||||
);
|
||||
|
|
|
@ -21,18 +21,17 @@ impl SemanticRequest for WorkspaceLabelRequest {
|
|||
|
||||
let mut symbols = vec![];
|
||||
|
||||
for id in ctx.source_files().clone() {
|
||||
let Ok(source) = ctx.source_by_id(id) else {
|
||||
for fid in ctx.source_files().clone() {
|
||||
let Ok(source) = ctx.source_by_id(fid) else {
|
||||
continue;
|
||||
};
|
||||
let Ok(path) = ctx.path_for_id(id) else {
|
||||
let Ok(path) = ctx.path_for_id(fid) else {
|
||||
continue;
|
||||
};
|
||||
let uri = path_to_url(&path).unwrap();
|
||||
let res =
|
||||
get_lexical_hierarchy(source.clone(), LexicalScopeKind::Symbol).map(|symbols| {
|
||||
filter_document_labels(&symbols, &source, &uri, ctx.position_encoding())
|
||||
});
|
||||
let res = get_lexical_hierarchy(&source, LexicalScopeKind::Symbol).map(|hierarchy| {
|
||||
filter_document_labels(&hierarchy, &source, &uri, ctx.position_encoding())
|
||||
});
|
||||
|
||||
if let Some(mut res) = res {
|
||||
symbols.append(&mut res)
|
||||
|
@ -45,27 +44,28 @@ impl SemanticRequest for WorkspaceLabelRequest {
|
|||
|
||||
#[allow(deprecated)]
|
||||
fn filter_document_labels(
|
||||
symbols: &[LexicalHierarchy],
|
||||
hierarchy: &[LexicalHierarchy],
|
||||
source: &Source,
|
||||
uri: &Url,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> Vec<SymbolInformation> {
|
||||
symbols
|
||||
hierarchy
|
||||
.iter()
|
||||
.flat_map(|e| {
|
||||
[e].into_iter()
|
||||
.chain(e.children.as_deref().into_iter().flatten())
|
||||
.flat_map(|hierarchy| {
|
||||
[hierarchy]
|
||||
.into_iter()
|
||||
.chain(hierarchy.children.as_deref().into_iter().flatten())
|
||||
})
|
||||
.flat_map(|e| {
|
||||
if !matches!(e.info.kind, LexicalKind::Var(LexicalVarKind::Label)) {
|
||||
.flat_map(|hierarchy| {
|
||||
if !matches!(hierarchy.info.kind, LexicalKind::Var(LexicalVarKind::Label)) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding);
|
||||
let rng = typst_to_lsp::range(hierarchy.info.range.clone(), source, position_encoding);
|
||||
|
||||
Some(SymbolInformation {
|
||||
name: e.info.name.to_string(),
|
||||
kind: e.info.kind.clone().try_into().unwrap(),
|
||||
name: hierarchy.info.name.to_string(),
|
||||
kind: hierarchy.info.kind.clone().try_into().unwrap(),
|
||||
tags: None,
|
||||
deprecated: None,
|
||||
location: LspLocation {
|
||||
|
|
|
@ -178,7 +178,7 @@ impl CompileOnceArgs {
|
|||
}
|
||||
|
||||
let relative_entry = match entry.strip_prefix(&root) {
|
||||
Ok(e) => e,
|
||||
Ok(relative_entry) => relative_entry,
|
||||
Err(_) => {
|
||||
log::error!("entry path must be inside the root: {}", entry.display());
|
||||
std::process::exit(1);
|
||||
|
|
|
@ -245,7 +245,7 @@ impl PackageStorage {
|
|||
description: Option<EcoString>,
|
||||
}
|
||||
|
||||
let index: Vec<RemotePackageIndex> = match serde_json::from_reader(reader) {
|
||||
let indices: Vec<RemotePackageIndex> = match serde_json::from_reader(reader) {
|
||||
Ok(index) => index,
|
||||
Err(err) => {
|
||||
log::error!("Failed to parse package index: {err} from {url}");
|
||||
|
@ -253,16 +253,16 @@ impl PackageStorage {
|
|||
}
|
||||
};
|
||||
|
||||
index
|
||||
indices
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
.map(|index| {
|
||||
(
|
||||
PackageSpec {
|
||||
namespace: "preview".into(),
|
||||
name: e.name,
|
||||
version: e.version,
|
||||
name: index.name,
|
||||
version: index.version,
|
||||
},
|
||||
e.description,
|
||||
index.description,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
|
|
|
@ -106,7 +106,7 @@ impl Typlite {
|
|||
|
||||
let main = world
|
||||
.source(current)
|
||||
.map_err(|e| format!("getting source for main file: {e:?}"))?;
|
||||
.map_err(|err| format!("getting source for main file: {err:?}"))?;
|
||||
|
||||
let worker = TypliteWorker {
|
||||
current,
|
||||
|
@ -456,7 +456,7 @@ impl TypliteWorker {
|
|||
world.map_shadow_by_id(main_id, main).unwrap();
|
||||
|
||||
let document = typst::compile(&world).output;
|
||||
let document = document.map_err(|e| {
|
||||
let document = document.map_err(|diagnostics| {
|
||||
let mut err = String::new();
|
||||
let _ = write!(err, "compiling node: ");
|
||||
let write_span = |span: typst_syntax::Span, err: &mut String| {
|
||||
|
@ -483,7 +483,7 @@ impl TypliteWorker {
|
|||
}
|
||||
};
|
||||
|
||||
for s in e.iter() {
|
||||
for s in diagnostics.iter() {
|
||||
match s.severity {
|
||||
typst::diag::Severity::Error => {
|
||||
let _ = write!(err, "error: ");
|
||||
|
@ -585,8 +585,8 @@ impl TypliteWorker {
|
|||
return Ok(Value::Content(s));
|
||||
}
|
||||
s.push('`');
|
||||
for e in raw.lines() {
|
||||
s.push_str(&Self::value(Self::str(e.to_untyped())?));
|
||||
for line in raw.lines() {
|
||||
s.push_str(&Self::value(Self::str(line.to_untyped())?));
|
||||
}
|
||||
s.push('`');
|
||||
Ok(Value::Content(s))
|
||||
|
|
|
@ -72,8 +72,8 @@ impl RenderActor {
|
|||
info!("RenderActor: resolving span: {:?}", element_path);
|
||||
let spans = match self.renderer.resolve_span_by_element_path(&element_path) {
|
||||
Ok(spans) => spans,
|
||||
Err(e) => {
|
||||
info!("RenderActor: failed to resolve span: {}", e);
|
||||
Err(err) => {
|
||||
info!("RenderActor: failed to resolve span: {err}");
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -201,7 +201,7 @@ impl<T: SourceFileServer + EditorServer> TypstActor<T> {
|
|||
Some(ed)
|
||||
}
|
||||
}
|
||||
(Some(e), None) | (None, Some(e)) => Some(e),
|
||||
(Some(info), None) | (None, Some(info)) => Some(info),
|
||||
(None, None) => None,
|
||||
};
|
||||
|
||||
|
@ -241,7 +241,7 @@ impl<T: SourceFileServer + EditorServer> TypstActor<T> {
|
|||
}
|
||||
Some(rng)
|
||||
}
|
||||
(.., Some(e)) | (Some(e), None) => Some(e),
|
||||
(.., Some(info)) | (Some(info), None) => Some(info),
|
||||
(None, None) => None,
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue