mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-08-03 17:58:17 +00:00
refactor: rename local variables (cont.) (#1026)
This commit is contained in:
parent
9017e05afa
commit
de8e45a539
50 changed files with 837 additions and 814 deletions
|
@ -60,9 +60,9 @@ impl<T> RevisionManager<T> {
|
|||
/// Lock the revision in *main thread*.
|
||||
#[must_use]
|
||||
pub fn lock(&mut self, used: NonZeroUsize) -> RevisionLock {
|
||||
let l = self.lock_estimated();
|
||||
l.access(used);
|
||||
l
|
||||
let lock = self.lock_estimated();
|
||||
lock.access(used);
|
||||
lock
|
||||
}
|
||||
|
||||
/// Lock the revision in *main thread*.
|
||||
|
|
|
@ -51,8 +51,8 @@ pub(crate) trait ToFunc {
|
|||
impl ToFunc for Value {
|
||||
fn to_func(&self) -> Option<Func> {
|
||||
match self {
|
||||
Value::Func(f) => Some(f.clone()),
|
||||
Value::Type(t) => t.constructor().ok(),
|
||||
Value::Func(func) => Some(func.clone()),
|
||||
Value::Type(ty) => ty.constructor().ok(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -61,24 +61,24 @@ impl ToFunc for Value {
|
|||
/// Extension trait for `typst::World`.
|
||||
pub trait LspWorldExt {
|
||||
/// Get file's id by its path
|
||||
fn file_id_by_path(&self, p: &Path) -> FileResult<TypstFileId>;
|
||||
fn file_id_by_path(&self, path: &Path) -> FileResult<TypstFileId>;
|
||||
|
||||
/// Get the source of a file by file path.
|
||||
fn source_by_path(&self, p: &Path) -> FileResult<Source>;
|
||||
fn source_by_path(&self, path: &Path) -> FileResult<Source>;
|
||||
|
||||
/// Resolve the uri for a file id.
|
||||
fn uri_for_id(&self, id: TypstFileId) -> FileResult<Url>;
|
||||
fn uri_for_id(&self, fid: TypstFileId) -> FileResult<Url>;
|
||||
}
|
||||
|
||||
impl LspWorldExt for tinymist_world::LspWorld {
|
||||
fn file_id_by_path(&self, p: &Path) -> FileResult<TypstFileId> {
|
||||
fn file_id_by_path(&self, path: &Path) -> FileResult<TypstFileId> {
|
||||
// todo: source in packages
|
||||
let root = self.workspace_root().ok_or_else(|| {
|
||||
let reason = eco_format!("workspace root not found");
|
||||
FileError::Other(Some(reason))
|
||||
})?;
|
||||
let relative_path = p.strip_prefix(&root).map_err(|_| {
|
||||
let reason = eco_format!("access denied, path: {p:?}, root: {root:?}");
|
||||
let relative_path = path.strip_prefix(&root).map_err(|_| {
|
||||
let reason = eco_format!("access denied, path: {path:?}, root: {root:?}");
|
||||
FileError::Other(Some(reason))
|
||||
})?;
|
||||
|
||||
|
@ -273,7 +273,7 @@ mod module_tests {
|
|||
|
||||
dependencies.sort();
|
||||
// remove /main.typ
|
||||
dependencies.retain(|(p, _, _)| p != "/main.typ");
|
||||
dependencies.retain(|(path, _, _)| path != "/main.typ");
|
||||
|
||||
let dependencies = dependencies
|
||||
.into_iter()
|
||||
|
|
|
@ -102,15 +102,15 @@ impl YamlBib {
|
|||
let entries = loader
|
||||
.content
|
||||
.into_iter()
|
||||
.filter_map(|(k, span)| {
|
||||
let k_span = map_span(k.span)?;
|
||||
.filter_map(|(name, span)| {
|
||||
let name_span = map_span(name.span)?;
|
||||
let span = map_span(span)?;
|
||||
let entry = BibEntry {
|
||||
file_id,
|
||||
name_span: k_span.clone(),
|
||||
name_span: name_span.clone(),
|
||||
span: span.clone(),
|
||||
};
|
||||
Some((k.value, entry))
|
||||
Some((name.value, entry))
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
|
|
@ -44,9 +44,9 @@ pub fn analyze_call(
|
|||
node: LinkedNode,
|
||||
) -> Option<Arc<CallInfo>> {
|
||||
log::trace!("func call found: {:?}", node);
|
||||
let f = node.cast::<ast::FuncCall>()?;
|
||||
let call = node.cast::<ast::FuncCall>()?;
|
||||
|
||||
let callee = f.callee();
|
||||
let callee = call.callee();
|
||||
// todo: reduce many such patterns
|
||||
if !callee.hash() && !matches!(callee, ast::Expr::MathIdent(_)) {
|
||||
return None;
|
||||
|
@ -57,7 +57,7 @@ pub fn analyze_call(
|
|||
ctx,
|
||||
source,
|
||||
callee_node,
|
||||
f.args(),
|
||||
call.args(),
|
||||
)?))
|
||||
}
|
||||
|
||||
|
@ -107,16 +107,16 @@ pub fn analyze_call_no_cache(
|
|||
|
||||
return;
|
||||
}
|
||||
PosState::Pos(i) => {
|
||||
if i + 1 < self.signature.pos_size() {
|
||||
self.state = PosState::Pos(i + 1);
|
||||
PosState::Pos(pos) => {
|
||||
if pos + 1 < self.signature.pos_size() {
|
||||
self.state = PosState::Pos(pos + 1);
|
||||
} else if self.signature.has_spread_right() {
|
||||
self.state = PosState::Variadic;
|
||||
} else {
|
||||
self.state = PosState::Final;
|
||||
}
|
||||
|
||||
(ParamKind::Positional, self.signature.get_pos(i).unwrap())
|
||||
(ParamKind::Positional, self.signature.get_pos(pos).unwrap())
|
||||
}
|
||||
PosState::Variadic => (ParamKind::Rest, self.signature.rest().unwrap()),
|
||||
PosState::Final => return,
|
||||
|
|
|
@ -35,7 +35,7 @@ impl<'a> ColorExprWorker<'a> {
|
|||
}
|
||||
}
|
||||
SyntaxKind::Named => {}
|
||||
k if k.is_trivia() || k.is_keyword() || k.is_error() => return Some(()),
|
||||
kind if kind.is_trivia() || kind.is_keyword() || kind.is_error() => return Some(()),
|
||||
_ => {}
|
||||
};
|
||||
|
||||
|
@ -81,8 +81,8 @@ impl<'a> ColorExprWorker<'a> {
|
|||
fn on_rgb(&mut self, node: &LinkedNode, call: ast::FuncCall) -> Option<()> {
|
||||
let mut args = call.args().items();
|
||||
let hex_or_color_or_r = args.next()?;
|
||||
let g = args.next();
|
||||
match (g.is_some(), hex_or_color_or_r) {
|
||||
let arg = args.next();
|
||||
match (arg.is_some(), hex_or_color_or_r) {
|
||||
(true, _) => self.on_const_call(node, call)?,
|
||||
(false, ast::Arg::Pos(ast::Expr::Str(s))) => {
|
||||
// parse hex
|
||||
|
|
|
@ -215,10 +215,10 @@ impl CallConvention {
|
|||
/// Get the function pointer of the call.
|
||||
pub fn callee(self) -> Func {
|
||||
match self {
|
||||
CallConvention::Static(f) => f,
|
||||
CallConvention::Method(_, f) => f,
|
||||
CallConvention::With(f) => f,
|
||||
CallConvention::Where(f) => f,
|
||||
CallConvention::Static(func) => func,
|
||||
CallConvention::Method(_, func) => func,
|
||||
CallConvention::With(func) => func,
|
||||
CallConvention::Where(func) => func,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -229,7 +229,7 @@ pub fn resolve_call_target(ctx: &Arc<SharedContext>, node: &SyntaxNode) -> Optio
|
|||
let source = ctx.source_by_id(node.span().id()?).ok()?;
|
||||
let def = ctx.def_of_span(&source, None, node.span())?;
|
||||
let func_ptr = match def.term.and_then(|val| val.value()) {
|
||||
Some(Value::Func(f)) => Some(f),
|
||||
Some(Value::Func(func)) => Some(func),
|
||||
Some(Value::Type(ty)) => ty.constructor().ok(),
|
||||
_ => None,
|
||||
}?;
|
||||
|
@ -244,8 +244,8 @@ pub fn resolve_call_target(ctx: &Arc<SharedContext>, node: &SyntaxNode) -> Optio
|
|||
let field = access.field().get();
|
||||
let values = ctx.analyze_expr(target.to_untyped());
|
||||
if let Some((this, func_ptr)) = values.into_iter().find_map(|(this, _styles)| {
|
||||
if let Some(Value::Func(f)) = this.ty().scope().get(field) {
|
||||
return Some((this, f.clone()));
|
||||
if let Some(Value::Func(func)) = this.ty().scope().get(field) {
|
||||
return Some((this, func.clone()));
|
||||
}
|
||||
|
||||
None
|
||||
|
@ -289,18 +289,18 @@ fn is_same_native_func(x: Option<&Func>, y: &Func) -> bool {
|
|||
|
||||
static WITH_FUNC: LazyLock<Option<&'static Func>> = LazyLock::new(|| {
|
||||
let fn_ty = Type::of::<Func>();
|
||||
let Some(Value::Func(f)) = fn_ty.scope().get("with") else {
|
||||
let Some(Value::Func(func)) = fn_ty.scope().get("with") else {
|
||||
return None;
|
||||
};
|
||||
Some(f)
|
||||
Some(func)
|
||||
});
|
||||
|
||||
static WHERE_FUNC: LazyLock<Option<&'static Func>> = LazyLock::new(|| {
|
||||
let fn_ty = Type::of::<Func>();
|
||||
let Some(Value::Func(f)) = fn_ty.scope().get("where") else {
|
||||
let Some(Value::Func(func)) = fn_ty.scope().get("where") else {
|
||||
return None;
|
||||
};
|
||||
Some(f)
|
||||
Some(func)
|
||||
});
|
||||
|
||||
fn value_to_def(value: Value, name: impl FnOnce() -> Option<Interned<str>>) -> Option<Definition> {
|
||||
|
|
|
@ -103,19 +103,19 @@ impl Analysis {
|
|||
|
||||
/// Lock the revision in *main thread*.
|
||||
#[must_use]
|
||||
pub fn lock_revision(&self, q: Option<&CompilerQueryRequest>) -> AnalysisRevLock {
|
||||
pub fn lock_revision(&self, req: Option<&CompilerQueryRequest>) -> AnalysisRevLock {
|
||||
let mut grid = self.analysis_rev_cache.lock();
|
||||
|
||||
AnalysisRevLock {
|
||||
tokens: match q {
|
||||
Some(CompilerQueryRequest::SemanticTokensFull(f)) => Some(
|
||||
SemanticTokenCache::acquire(self.tokens_caches.clone(), &f.path, None),
|
||||
tokens: match req {
|
||||
Some(CompilerQueryRequest::SemanticTokensFull(req)) => Some(
|
||||
SemanticTokenCache::acquire(self.tokens_caches.clone(), &req.path, None),
|
||||
),
|
||||
Some(CompilerQueryRequest::SemanticTokensDelta(f)) => {
|
||||
Some(CompilerQueryRequest::SemanticTokensDelta(req)) => {
|
||||
Some(SemanticTokenCache::acquire(
|
||||
self.tokens_caches.clone(),
|
||||
&f.path,
|
||||
Some(&f.previous_result_id),
|
||||
&req.path,
|
||||
Some(&req.previous_result_id),
|
||||
))
|
||||
}
|
||||
_ => None,
|
||||
|
@ -308,8 +308,8 @@ impl LocalContext {
|
|||
.completion_files
|
||||
.get_or_init(|| {
|
||||
if let Some(root) = self.world.workspace_root() {
|
||||
scan_workspace_files(&root, PathPreference::Special.ext_matcher(), |p| {
|
||||
TypstFileId::new(None, VirtualPath::new(p))
|
||||
scan_workspace_files(&root, PathPreference::Special.ext_matcher(), |path| {
|
||||
TypstFileId::new(None, VirtualPath::new(path))
|
||||
})
|
||||
} else {
|
||||
vec![]
|
||||
|
@ -538,28 +538,28 @@ impl SharedContext {
|
|||
}
|
||||
|
||||
/// Resolve the uri for a file id.
|
||||
pub fn uri_for_id(&self, id: TypstFileId) -> Result<Url, FileError> {
|
||||
self.world.uri_for_id(id)
|
||||
pub fn uri_for_id(&self, fid: TypstFileId) -> Result<Url, FileError> {
|
||||
self.world.uri_for_id(fid)
|
||||
}
|
||||
|
||||
/// Get file's id by its path
|
||||
pub fn file_id_by_path(&self, p: &Path) -> FileResult<TypstFileId> {
|
||||
self.world.file_id_by_path(p)
|
||||
pub fn file_id_by_path(&self, path: &Path) -> FileResult<TypstFileId> {
|
||||
self.world.file_id_by_path(path)
|
||||
}
|
||||
|
||||
/// Get the content of a file by file id.
|
||||
pub fn file_by_id(&self, id: TypstFileId) -> FileResult<Bytes> {
|
||||
self.world.file(id)
|
||||
pub fn file_by_id(&self, fid: TypstFileId) -> FileResult<Bytes> {
|
||||
self.world.file(fid)
|
||||
}
|
||||
|
||||
/// Get the source of a file by file id.
|
||||
pub fn source_by_id(&self, id: TypstFileId) -> FileResult<Source> {
|
||||
self.world.source(id)
|
||||
pub fn source_by_id(&self, fid: TypstFileId) -> FileResult<Source> {
|
||||
self.world.source(fid)
|
||||
}
|
||||
|
||||
/// Get the source of a file by file path.
|
||||
pub fn source_by_path(&self, p: &Path) -> FileResult<Source> {
|
||||
self.source_by_id(self.file_id_by_path(p)?)
|
||||
pub fn source_by_path(&self, path: &Path) -> FileResult<Source> {
|
||||
self.source_by_id(self.file_id_by_path(path)?)
|
||||
}
|
||||
|
||||
/// Classifies the syntax under span that can be operated on by IDE
|
||||
|
@ -600,12 +600,12 @@ impl SharedContext {
|
|||
/// Get the real definition of a compilation.
|
||||
/// Note: must be called after compilation.
|
||||
pub(crate) fn dependencies(&self) -> EcoVec<reflexo::ImmutPath> {
|
||||
let mut v = EcoVec::new();
|
||||
self.world.iter_dependencies(&mut |p| {
|
||||
v.push(p);
|
||||
let mut deps = EcoVec::new();
|
||||
self.world.iter_dependencies(&mut |path| {
|
||||
deps.push(path);
|
||||
});
|
||||
|
||||
v
|
||||
deps
|
||||
}
|
||||
|
||||
/// Resolve extra font information.
|
||||
|
@ -808,27 +808,27 @@ impl SharedContext {
|
|||
definition(self, source, doc, syntax)
|
||||
}
|
||||
|
||||
pub(crate) fn type_of_span(self: &Arc<Self>, s: Span) -> Option<Ty> {
|
||||
self.type_of_span_(&self.source_by_id(s.id()?).ok()?, s)
|
||||
pub(crate) fn type_of_span(self: &Arc<Self>, span: Span) -> Option<Ty> {
|
||||
self.type_of_span_(&self.source_by_id(span.id()?).ok()?, span)
|
||||
}
|
||||
|
||||
pub(crate) fn type_of_span_(self: &Arc<Self>, source: &Source, s: Span) -> Option<Ty> {
|
||||
self.type_check(source).type_of_span(s)
|
||||
pub(crate) fn type_of_span_(self: &Arc<Self>, source: &Source, span: Span) -> Option<Ty> {
|
||||
self.type_check(source).type_of_span(span)
|
||||
}
|
||||
|
||||
pub(crate) fn literal_type_of_node(self: &Arc<Self>, k: LinkedNode) -> Option<Ty> {
|
||||
let id = k.span().id()?;
|
||||
pub(crate) fn post_type_of_node(self: &Arc<Self>, node: LinkedNode) -> Option<Ty> {
|
||||
let id = node.span().id()?;
|
||||
let source = self.source_by_id(id).ok()?;
|
||||
let ty_chk = self.type_check(&source);
|
||||
|
||||
let ty = post_type_check(self.clone(), &ty_chk, k.clone())
|
||||
.or_else(|| ty_chk.type_of_span(k.span()))?;
|
||||
let ty = post_type_check(self.clone(), &ty_chk, node.clone())
|
||||
.or_else(|| ty_chk.type_of_span(node.span()))?;
|
||||
Some(ty_chk.simplify(ty, false))
|
||||
}
|
||||
|
||||
pub(crate) fn sig_of_def(self: &Arc<Self>, def: Definition) -> Option<Signature> {
|
||||
crate::log_debug_ct!("check definition func {def:?}");
|
||||
let source = def.decl.file_id().and_then(|f| self.source_by_id(f).ok());
|
||||
let source = def.decl.file_id().and_then(|id| self.source_by_id(id).ok());
|
||||
analyze_signature(self, SignatureTarget::Def(source, def))
|
||||
}
|
||||
|
||||
|
@ -896,11 +896,11 @@ impl SharedContext {
|
|||
compute: impl FnOnce(&Arc<Self>) -> Option<Signature> + Send + Sync + 'static,
|
||||
) -> Option<Signature> {
|
||||
let res = match func {
|
||||
SignatureTarget::Def(src, d) => self
|
||||
SignatureTarget::Def(src, def) => self
|
||||
.analysis
|
||||
.caches
|
||||
.def_signatures
|
||||
.entry(hash128(&(src, d.clone())), self.lifetime),
|
||||
.entry(hash128(&(src, def.clone())), self.lifetime),
|
||||
SignatureTarget::SyntaxFast(source, span) => {
|
||||
let cache_key = (source, span, true);
|
||||
self.analysis
|
||||
|
@ -1032,7 +1032,7 @@ impl<K, V> IncrCacheMap<K, V> {
|
|||
|
||||
next.get_or_init(|| {
|
||||
let prev = self.prev.lock().get(&key).cloned();
|
||||
let prev = prev.and_then(|p| p.get().cloned());
|
||||
let prev = prev.and_then(|prev| prev.get().cloned());
|
||||
let prev = prev.or_else(|| {
|
||||
let global = self.global.lock();
|
||||
global.get(&key).map(|global| global.1.clone())
|
||||
|
@ -1096,8 +1096,8 @@ impl<T> CacheMap<T> {
|
|||
}
|
||||
|
||||
impl<T: Default + Clone> CacheMap<T> {
|
||||
fn entry(&self, k: u128, lifetime: u64) -> T {
|
||||
let entry = self.m.entry(k);
|
||||
fn entry(&self, key: u128, lifetime: u64) -> T {
|
||||
let entry = self.m.entry(key);
|
||||
let entry = entry.or_insert_with(|| (lifetime, T::default()));
|
||||
entry.1.clone()
|
||||
}
|
||||
|
@ -1309,7 +1309,7 @@ fn find_loc(
|
|||
let line = r as u32;
|
||||
let character = match encoding {
|
||||
PositionEncoding::Utf8 => column_prefix.chars().count(),
|
||||
PositionEncoding::Utf16 => column_prefix.chars().map(|c| c.len_utf16()).sum(),
|
||||
PositionEncoding::Utf16 => column_prefix.chars().map(|ch| ch.len_utf16()).sum(),
|
||||
} as u32;
|
||||
|
||||
Some(LspPosition { line, character })
|
||||
|
|
|
@ -85,7 +85,7 @@ impl LinkStrWorker {
|
|||
self.analyze_path_expr(node, path);
|
||||
}
|
||||
// early exit
|
||||
k if k.is_trivia() || k.is_keyword() || k.is_error() => return Some(()),
|
||||
kind if kind.is_trivia() || kind.is_keyword() || kind.is_error() => return Some(()),
|
||||
_ => {}
|
||||
};
|
||||
|
||||
|
|
|
@ -236,10 +236,10 @@ impl<'a> PostTypeChecker<'a> {
|
|||
}
|
||||
|
||||
// truncate args
|
||||
let c = sig.param_shift();
|
||||
let shift = sig.param_shift();
|
||||
let nth = sig
|
||||
.primary()
|
||||
.get_pos(c + positional)
|
||||
.get_pos(shift + positional)
|
||||
.or_else(|| sig.primary().rest());
|
||||
if let Some(nth) = nth {
|
||||
resp.insert(Ty::Param(nth.clone()), false);
|
||||
|
@ -349,8 +349,8 @@ impl<'a> PostTypeChecker<'a> {
|
|||
};
|
||||
self.info.type_of_span(ident.span())
|
||||
}
|
||||
ast::Pattern::Parenthesized(p) => {
|
||||
self.destruct_let(p.expr().to_untyped().cast()?, node)
|
||||
ast::Pattern::Parenthesized(paren_expr) => {
|
||||
self.destruct_let(paren_expr.expr().to_untyped().cast()?, node)
|
||||
}
|
||||
// todo: pattern matching
|
||||
ast::Pattern::Destructuring(_d) => {
|
||||
|
|
|
@ -54,7 +54,7 @@ impl SemanticTokenCache {
|
|||
/// Lock the token cache with an optional previous id in *main thread*.
|
||||
pub(crate) fn acquire(
|
||||
cache: Arc<Mutex<Self>>,
|
||||
p: &Path,
|
||||
path: &Path,
|
||||
prev: Option<&str>,
|
||||
) -> SemanticTokenContext {
|
||||
let that = cache.clone();
|
||||
|
@ -70,7 +70,7 @@ impl SemanticTokenCache {
|
|||
});
|
||||
let next = NonZeroUsize::new(that.next_id).expect("id overflow");
|
||||
|
||||
let path = ImmutPath::from(p);
|
||||
let path = ImmutPath::from(path);
|
||||
let manager = that.manager.entry(path.clone()).or_default();
|
||||
let _rev_lock = manager.lock(prev.unwrap_or(next));
|
||||
let prev = prev.and_then(|prev| {
|
||||
|
|
|
@ -126,7 +126,7 @@ impl PrimarySignature {
|
|||
let pos = pos
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, pos)| (pos, type_sig.pos(i)));
|
||||
.map(|(idx, pos)| (pos, type_sig.pos(idx)));
|
||||
let named = named.iter().map(|x| (x, type_sig.named(&x.name)));
|
||||
let rest = rest.into_iter().map(|x| (x, type_sig.rest_param()));
|
||||
|
||||
|
@ -202,9 +202,9 @@ fn analyze_type_signature(
|
|||
let ty = type_info.type_of_span(span)?;
|
||||
Some((type_info, ty))
|
||||
}
|
||||
SignatureTarget::Runtime(f) => {
|
||||
let source = ctx.source_by_id(f.span().id()?).ok()?;
|
||||
let node = source.find(f.span())?;
|
||||
SignatureTarget::Runtime(func) => {
|
||||
let source = ctx.source_by_id(func.span().id()?).ok()?;
|
||||
let node = source.find(func.span())?;
|
||||
let def = classify_def_loosely(node.parent()?.clone())?;
|
||||
let type_info = ctx.type_check(&source);
|
||||
let ty = type_info.type_of_span(def.name()?.span())?;
|
||||
|
@ -242,13 +242,13 @@ pub(crate) fn sig_of_type(
|
|||
// todo: this will affect inlay hint: _var_with
|
||||
let (var_with, docstring) = match type_info.var_docs.get(&v.def).map(|x| x.as_ref()) {
|
||||
Some(UntypedDefDocs::Function(sig)) => (vec![], Either::Left(sig.as_ref())),
|
||||
Some(UntypedDefDocs::Variable(d)) => find_alias_stack(&mut ty_ctx, &v, d)?,
|
||||
Some(UntypedDefDocs::Variable(docs)) => find_alias_stack(&mut ty_ctx, &v, docs)?,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let docstring = match docstring {
|
||||
Either::Left(docstring) => docstring,
|
||||
Either::Right(f) => return Some(wind_stack(var_with, ctx.type_of_func(f))),
|
||||
Either::Right(func) => return Some(wind_stack(var_with, ctx.type_of_func(func))),
|
||||
};
|
||||
|
||||
let mut param_specs = Vec::new();
|
||||
|
@ -373,16 +373,16 @@ type WithElem<'a> = (&'a UntypedVarDocs, Option<Interned<SigWithTy>>);
|
|||
|
||||
fn find_alias_stack<'a>(
|
||||
ctx: &'a mut PostTypeChecker,
|
||||
v: &Interned<TypeVar>,
|
||||
d: &'a UntypedVarDocs,
|
||||
var: &Interned<TypeVar>,
|
||||
docs: &'a UntypedVarDocs,
|
||||
) -> Option<(Vec<WithElem<'a>>, Either<&'a UntypedSignatureDocs, Func>)> {
|
||||
let mut checker = AliasStackChecker {
|
||||
ctx,
|
||||
stack: vec![(d, None)],
|
||||
stack: vec![(docs, None)],
|
||||
res: None,
|
||||
checking_with: true,
|
||||
};
|
||||
Ty::Var(v.clone()).bounds(true, &mut checker);
|
||||
Ty::Var(var.clone()).bounds(true, &mut checker);
|
||||
|
||||
checker.res.map(|res| (checker.stack, res))
|
||||
}
|
||||
|
@ -416,9 +416,9 @@ impl BoundChecker for AliasStackChecker<'_, '_> {
|
|||
Some(UntypedDefDocs::Function(sig)) => {
|
||||
self.res = Some(Either::Left(sig));
|
||||
}
|
||||
Some(UntypedDefDocs::Variable(d)) => {
|
||||
Some(UntypedDefDocs::Variable(docs)) => {
|
||||
self.checking_with = true;
|
||||
self.stack.push((d, None));
|
||||
self.stack.push((docs, None));
|
||||
self.check_var_rec(u, pol);
|
||||
self.stack.pop();
|
||||
self.checking_with = false;
|
||||
|
@ -447,8 +447,8 @@ impl BoundChecker for AliasStackChecker<'_, '_> {
|
|||
self.check_var(&u, pol);
|
||||
}
|
||||
src @ (DocSource::Builtin(..) | DocSource::Ins(..)) => {
|
||||
if let Some(f) = src.as_func() {
|
||||
self.res = Some(Either::Right(f));
|
||||
if let Some(func) = src.as_func() {
|
||||
self.res = Some(Either::Right(func));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -482,10 +482,10 @@ pub fn func_signature(func: Func) -> Signature {
|
|||
use typst::foundations::func::Repr;
|
||||
let mut with_stack = eco_vec![];
|
||||
let mut func = func;
|
||||
while let Repr::With(f) = func.inner() {
|
||||
while let Repr::With(with) = func.inner() {
|
||||
let (inner, args) = with.as_ref();
|
||||
with_stack.push(ArgsInfo {
|
||||
items: f
|
||||
.1
|
||||
items: args
|
||||
.items
|
||||
.iter()
|
||||
.map(|arg| ArgInfo {
|
||||
|
@ -494,7 +494,7 @@ pub fn func_signature(func: Func) -> Signature {
|
|||
})
|
||||
.collect(),
|
||||
});
|
||||
func = f.0.clone();
|
||||
func = inner.clone();
|
||||
}
|
||||
|
||||
let mut pos_tys = vec![];
|
||||
|
@ -534,18 +534,18 @@ pub fn func_signature(func: Func) -> Signature {
|
|||
|
||||
let ret_ty = match func.inner() {
|
||||
Repr::With(..) => unreachable!(),
|
||||
Repr::Closure(c) => {
|
||||
analyze_closure_signature(c.clone(), &mut add_param);
|
||||
Repr::Closure(closure) => {
|
||||
analyze_closure_signature(closure.clone(), &mut add_param);
|
||||
None
|
||||
}
|
||||
Repr::Element(..) | Repr::Native(..) => {
|
||||
for p in func.params().unwrap() {
|
||||
for param in func.params().unwrap() {
|
||||
add_param(Interned::new(ParamTy {
|
||||
name: p.name.into(),
|
||||
docs: Some(p.docs.into()),
|
||||
default: p.default.map(|d| truncated_repr(&d())),
|
||||
ty: Ty::from_param_site(&func, p),
|
||||
attrs: p.into(),
|
||||
name: param.name.into(),
|
||||
docs: Some(param.docs.into()),
|
||||
default: param.default.map(|default| truncated_repr(&default())),
|
||||
ty: Ty::from_param_site(&func, param),
|
||||
attrs: param.into(),
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -583,12 +583,12 @@ pub fn func_signature(func: Func) -> Signature {
|
|||
}
|
||||
|
||||
fn analyze_closure_signature(
|
||||
c: Arc<LazyHash<Closure>>,
|
||||
closure: Arc<LazyHash<Closure>>,
|
||||
add_param: &mut impl FnMut(Interned<ParamTy>),
|
||||
) {
|
||||
log::trace!("closure signature for: {:?}", c.node.kind());
|
||||
log::trace!("closure signature for: {:?}", closure.node.kind());
|
||||
|
||||
let closure = &c.node;
|
||||
let closure = &closure.node;
|
||||
let closure_ast = match closure.kind() {
|
||||
SyntaxKind::Closure => closure.cast::<ast::Closure>().unwrap(),
|
||||
_ => return,
|
||||
|
@ -639,30 +639,35 @@ impl fmt::Display for PatternDisplay<'_> {
|
|||
ast::Pattern::Normal(ast::Expr::Ident(ident)) => f.write_str(ident.as_str()),
|
||||
ast::Pattern::Normal(_) => f.write_str("?"), // unreachable?
|
||||
ast::Pattern::Placeholder(_) => f.write_str("_"),
|
||||
ast::Pattern::Parenthesized(p) => {
|
||||
write!(f, "{}", PatternDisplay(&p.pattern()))
|
||||
ast::Pattern::Parenthesized(paren_expr) => {
|
||||
write!(f, "{}", PatternDisplay(&paren_expr.pattern()))
|
||||
}
|
||||
ast::Pattern::Destructuring(d) => {
|
||||
ast::Pattern::Destructuring(destructing) => {
|
||||
write!(f, "(")?;
|
||||
let mut first = true;
|
||||
for item in d.items() {
|
||||
for item in destructing.items() {
|
||||
if first {
|
||||
first = false;
|
||||
} else {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
match item {
|
||||
ast::DestructuringItem::Pattern(p) => write!(f, "{}", PatternDisplay(&p))?,
|
||||
ast::DestructuringItem::Named(n) => write!(
|
||||
ast::DestructuringItem::Pattern(pos) => {
|
||||
write!(f, "{}", PatternDisplay(&pos))?
|
||||
}
|
||||
ast::DestructuringItem::Named(named) => write!(
|
||||
f,
|
||||
"{}: {}",
|
||||
n.name().as_str(),
|
||||
unwrap_parens(n.expr()).to_untyped().text()
|
||||
named.name().as_str(),
|
||||
unwrap_parens(named.expr()).to_untyped().text()
|
||||
)?,
|
||||
ast::DestructuringItem::Spread(s) => write!(
|
||||
ast::DestructuringItem::Spread(spread) => write!(
|
||||
f,
|
||||
"..{}",
|
||||
s.sink_ident().map(|i| i.as_str()).unwrap_or_default()
|
||||
spread
|
||||
.sink_ident()
|
||||
.map(|sink| sink.as_str())
|
||||
.unwrap_or_default()
|
||||
)?,
|
||||
}
|
||||
}
|
||||
|
@ -674,8 +679,8 @@ impl fmt::Display for PatternDisplay<'_> {
|
|||
}
|
||||
|
||||
fn unwrap_parens(mut expr: ast::Expr) -> ast::Expr {
|
||||
while let ast::Expr::Parenthesized(p) = expr {
|
||||
expr = p.expr();
|
||||
while let ast::Expr::Parenthesized(paren_expr) = expr {
|
||||
expr = paren_expr.expr();
|
||||
}
|
||||
|
||||
expr
|
||||
|
|
|
@ -127,9 +127,9 @@ impl TyCtxMut for TypeChecker<'_> {
|
|||
self.ctx.type_of_value(val)
|
||||
}
|
||||
|
||||
fn check_module_item(&mut self, fid: TypstFileId, k: &StrRef) -> Option<Ty> {
|
||||
fn check_module_item(&mut self, fid: TypstFileId, name: &StrRef) -> Option<Ty> {
|
||||
self.module_exports
|
||||
.entry((fid, k.clone()))
|
||||
.entry((fid, name.clone()))
|
||||
.or_default()
|
||||
.clone()
|
||||
.get_or_init(|| {
|
||||
|
@ -140,7 +140,7 @@ impl TyCtxMut for TypeChecker<'_> {
|
|||
.or_insert_with(|| self.ctx.expr_stage_by_id(fid))
|
||||
.clone()?;
|
||||
|
||||
Some(self.check(ei.exports.get(k)?))
|
||||
Some(self.check(ei.exports.get(name)?))
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
@ -510,16 +510,16 @@ impl TypeChecker<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn weaken_constraint(&self, c: &Ty, kind: &FlowVarKind) -> Ty {
|
||||
fn weaken_constraint(&self, term: &Ty, kind: &FlowVarKind) -> Ty {
|
||||
if matches!(kind, FlowVarKind::Strong(_)) {
|
||||
return c.clone();
|
||||
return term.clone();
|
||||
}
|
||||
|
||||
if let Ty::Value(v) = c {
|
||||
return BuiltinTy::from_value(&v.val);
|
||||
if let Ty::Value(ins_ty) = term {
|
||||
return BuiltinTy::from_value(&ins_ty.val);
|
||||
}
|
||||
|
||||
c.clone()
|
||||
term.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -568,31 +568,33 @@ impl Joiner {
|
|||
(Ty::Builtin(ty), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Builtin(ty),
|
||||
(Ty::Builtin(..), _) => self.definite = Ty::undef(),
|
||||
// todo: value join
|
||||
(Ty::Value(v), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Value(v),
|
||||
(Ty::Value(ins_ty), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Value(ins_ty),
|
||||
(Ty::Value(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Func(f), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Func(f),
|
||||
(Ty::Func(func), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Func(func),
|
||||
(Ty::Func(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Dict(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Dict(w),
|
||||
(Ty::Dict(dict), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Dict(dict),
|
||||
(Ty::Dict(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::With(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::With(w),
|
||||
(Ty::With(with), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::With(with),
|
||||
(Ty::With(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Args(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Args(w),
|
||||
(Ty::Args(args), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Args(args),
|
||||
(Ty::Args(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Pattern(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Pattern(w),
|
||||
(Ty::Pattern(pat), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Pattern(pat),
|
||||
(Ty::Pattern(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Select(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Select(w),
|
||||
(Ty::Select(sel), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Select(sel),
|
||||
(Ty::Select(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Unary(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Unary(w),
|
||||
(Ty::Unary(unary), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Unary(unary),
|
||||
(Ty::Unary(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Binary(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Binary(w),
|
||||
(Ty::Binary(binary), Ty::Builtin(BuiltinTy::None)) => {
|
||||
self.definite = Ty::Binary(binary)
|
||||
}
|
||||
(Ty::Binary(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::If(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::If(w),
|
||||
(Ty::If(if_ty), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::If(if_ty),
|
||||
(Ty::If(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Union(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Union(w),
|
||||
(Ty::Union(types), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Union(types),
|
||||
(Ty::Union(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Let(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Let(w),
|
||||
(Ty::Let(bounds), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Let(bounds),
|
||||
(Ty::Let(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Param(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Param(w),
|
||||
(Ty::Param(param), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Param(param),
|
||||
(Ty::Param(..), _) => self.definite = Ty::undef(),
|
||||
(Ty::Boolean(b), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Boolean(b),
|
||||
(Ty::Boolean(..), _) => self.definite = Ty::undef(),
|
||||
|
|
|
@ -95,10 +95,10 @@ impl ApplyChecker for ApplyTypeChecker<'_, '_> {
|
|||
crate::log_debug_ct!("syntax check tuple at {this:?} {p0:?}");
|
||||
|
||||
// todo: caster
|
||||
let selector = match p0 {
|
||||
let arg_offset = match p0 {
|
||||
Ty::Value(v) => match v.val {
|
||||
Value::Int(i) => Ok(i as usize),
|
||||
Value::Float(i) => Ok(i as usize),
|
||||
Value::Int(arg_offset) => Ok(arg_offset as usize),
|
||||
Value::Float(arg_offset) => Ok(arg_offset as usize),
|
||||
_ => Err(p0),
|
||||
},
|
||||
ty => Err(ty),
|
||||
|
@ -112,8 +112,8 @@ impl ApplyChecker for ApplyTypeChecker<'_, '_> {
|
|||
crate::log_debug_ct!(
|
||||
"tuple at check on tuple elem: {cons:?} {p0:?}"
|
||||
);
|
||||
let sel = match selector {
|
||||
Ok(i) => cons.get(i).cloned(),
|
||||
let sel = match arg_offset {
|
||||
Ok(arg_offset) => cons.get(arg_offset).cloned(),
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
|
|
|
@ -44,23 +44,23 @@ pub fn term_value(value: &Value) -> Ty {
|
|||
}
|
||||
// todo: term arguments
|
||||
Value::Args(..) => Ty::Builtin(BuiltinTy::Args),
|
||||
Value::Plugin(p) => {
|
||||
Value::Plugin(plugin) => {
|
||||
// todo: create infer variables for plugin functions
|
||||
let values = p
|
||||
let values = plugin
|
||||
.iter()
|
||||
.map(|k| (k.as_str().into(), Ty::Func(SigTy::any())))
|
||||
.map(|method| (method.as_str().into(), Ty::Func(SigTy::any())))
|
||||
.collect();
|
||||
Ty::Dict(RecordTy::new(values))
|
||||
}
|
||||
Value::Dict(d) => {
|
||||
let values = d
|
||||
Value::Dict(dict) => {
|
||||
let values = dict
|
||||
.iter()
|
||||
.map(|(k, v)| (k.as_str().into(), term_value_rec(v, Span::detached())))
|
||||
.collect();
|
||||
Ty::Dict(RecordTy::new(values))
|
||||
}
|
||||
Value::Module(m) => {
|
||||
let values = m
|
||||
Value::Module(module) => {
|
||||
let values = module
|
||||
.scope()
|
||||
.iter()
|
||||
.map(|(k, v, s)| (k.into(), term_value_rec(v, s)))
|
||||
|
@ -68,7 +68,7 @@ pub fn term_value(value: &Value) -> Ty {
|
|||
Ty::Dict(RecordTy::new(values))
|
||||
}
|
||||
Value::Type(ty) => Ty::Builtin(BuiltinTy::TypeType(*ty)),
|
||||
Value::Dyn(v) => Ty::Builtin(BuiltinTy::Type(v.ty())),
|
||||
Value::Dyn(dyn_val) => Ty::Builtin(BuiltinTy::Type(dyn_val.ty())),
|
||||
Value::Func(func) => Ty::Func(func_signature(func.clone()).type_sig()),
|
||||
_ if is_plain_value(value) => Ty::Value(InsTy::new(value.clone())),
|
||||
_ => Ty::Any,
|
||||
|
|
|
@ -56,8 +56,8 @@ impl TypeChecker<'_> {
|
|||
|
||||
for elem in elems.iter() {
|
||||
match elem {
|
||||
ArgExpr::Pos(p) => {
|
||||
elements.push(self.check(p));
|
||||
ArgExpr::Pos(pos) => {
|
||||
elements.push(self.check(pos));
|
||||
}
|
||||
ArgExpr::Spread(..) => {
|
||||
// todo: handle spread args
|
||||
|
@ -99,8 +99,8 @@ impl TypeChecker<'_> {
|
|||
|
||||
for arg in args.iter() {
|
||||
match arg {
|
||||
ArgExpr::Pos(p) => {
|
||||
args_res.push(self.check(p));
|
||||
ArgExpr::Pos(pos) => {
|
||||
args_res.push(self.check(pos));
|
||||
}
|
||||
ArgExpr::Named(n) => {
|
||||
let (name, value) = n.as_ref();
|
||||
|
@ -499,10 +499,10 @@ impl TypeChecker<'_> {
|
|||
Ty::Unary(TypeUnary::new(UnaryOp::Context, body))
|
||||
}
|
||||
|
||||
fn check_conditional(&mut self, i: &Interned<IfExpr>) -> Ty {
|
||||
let cond = self.check(&i.cond);
|
||||
let then = self.check(&i.then);
|
||||
let else_ = self.check(&i.else_);
|
||||
fn check_conditional(&mut self, if_expr: &Interned<IfExpr>) -> Ty {
|
||||
let cond = self.check(&if_expr.cond);
|
||||
let then = self.check(&if_expr.then);
|
||||
let else_ = self.check(&if_expr.else_);
|
||||
|
||||
Ty::If(IfTy::new(cond.into(), then.into(), else_.into()))
|
||||
}
|
||||
|
|
|
@ -115,12 +115,12 @@ impl StatefulRequest for CompletionRequest {
|
|||
) {
|
||||
let node = LinkedNode::new(source.root()).leaf_at_compat(cursor)?;
|
||||
if node.erroneous() {
|
||||
let mut n = node.text().chars();
|
||||
let mut chars = node.text().chars();
|
||||
|
||||
match n.next() {
|
||||
Some(c) if c.is_numeric() => return None,
|
||||
match chars.next() {
|
||||
Some(ch) if ch.is_numeric() => return None,
|
||||
Some('.') => {
|
||||
if matches!(n.next(), Some(c) if c.is_numeric()) {
|
||||
if matches!(chars.next(), Some(ch) if ch.is_numeric()) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
@ -171,12 +171,11 @@ impl StatefulRequest for CompletionRequest {
|
|||
let mut rng = from_ident.range();
|
||||
let ident_prefix = source.text()[rng.start..cursor].to_string();
|
||||
|
||||
completions.retain(|c| {
|
||||
// c.label
|
||||
let mut prefix_matcher = c.label.chars();
|
||||
completions.retain(|item| {
|
||||
let mut prefix_matcher = item.label.chars();
|
||||
'ident_matching: for ch in ident_prefix.chars() {
|
||||
for c in prefix_matcher.by_ref() {
|
||||
if c == ch {
|
||||
for item in prefix_matcher.by_ref() {
|
||||
if item == ch {
|
||||
continue 'ident_matching;
|
||||
}
|
||||
}
|
||||
|
@ -190,12 +189,12 @@ impl StatefulRequest for CompletionRequest {
|
|||
// if modifying some arguments, we need to truncate and add a comma
|
||||
if !is_callee && cursor != rng.end && is_arg_like_context(&from_ident) {
|
||||
// extend comma
|
||||
for c in completions.iter_mut() {
|
||||
let apply = match &mut c.apply {
|
||||
for item in completions.iter_mut() {
|
||||
let apply = match &mut item.apply {
|
||||
Some(w) => w,
|
||||
None => {
|
||||
c.apply = Some(c.label.clone());
|
||||
c.apply.as_mut().unwrap()
|
||||
item.apply = Some(item.label.clone());
|
||||
item.apply.as_mut().unwrap()
|
||||
}
|
||||
};
|
||||
if apply.trim_end().ends_with(',') {
|
||||
|
@ -240,8 +239,8 @@ impl StatefulRequest for CompletionRequest {
|
|||
.commit_char
|
||||
.as_ref()
|
||||
.map(|v| vec![v.to_string()]),
|
||||
command: typst_completion.command.as_ref().map(|c| Command {
|
||||
command: c.to_string(),
|
||||
command: typst_completion.command.as_ref().map(|cmd| Command {
|
||||
command: cmd.to_string(),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
|
@ -326,7 +325,7 @@ mod tests {
|
|||
pkg_mode: bool,
|
||||
}
|
||||
|
||||
fn run(c: TestConfig) -> impl Fn(&mut LocalContext, PathBuf) {
|
||||
fn run(config: TestConfig) -> impl Fn(&mut LocalContext, PathBuf) {
|
||||
fn test(ctx: &mut LocalContext, id: TypstFileId) {
|
||||
let source = ctx.source_by_id(id).unwrap();
|
||||
let rng = find_test_range(&source);
|
||||
|
@ -395,9 +394,9 @@ mod tests {
|
|||
trigger_character,
|
||||
};
|
||||
results.push(request.request(ctx, doc.clone()).map(|resp| match resp {
|
||||
CompletionResponse::List(l) => CompletionResponse::List(CompletionList {
|
||||
is_incomplete: l.is_incomplete,
|
||||
items: get_items(l.items),
|
||||
CompletionResponse::List(list) => CompletionResponse::List(CompletionList {
|
||||
is_incomplete: list.is_incomplete,
|
||||
items: get_items(list.items),
|
||||
}),
|
||||
CompletionResponse::Array(items) => CompletionResponse::Array(get_items(items)),
|
||||
}));
|
||||
|
@ -410,7 +409,7 @@ mod tests {
|
|||
}
|
||||
|
||||
move |ctx, path| {
|
||||
if c.pkg_mode {
|
||||
if config.pkg_mode {
|
||||
let files = ctx
|
||||
.source_files()
|
||||
.iter()
|
||||
|
|
|
@ -106,23 +106,23 @@ impl fmt::Display for SigHoverDocs<'_> {
|
|||
|
||||
fn write_param_docs(
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
p: &ParamDocsT<TypeRepr>,
|
||||
docs: &ParamDocsT<TypeRepr>,
|
||||
kind: &str,
|
||||
is_first: &mut bool,
|
||||
) -> fmt::Result {
|
||||
if *is_first {
|
||||
*is_first = false;
|
||||
write!(f, "\n\n## {}\n\n", p.name)?;
|
||||
write!(f, "\n\n## {}\n\n", docs.name)?;
|
||||
} else {
|
||||
write!(f, "\n\n## {} ({kind})\n\n", p.name)?;
|
||||
write!(f, "\n\n## {} ({kind})\n\n", docs.name)?;
|
||||
}
|
||||
|
||||
// p.cano_type.0
|
||||
if let Some(t) = &p.cano_type {
|
||||
if let Some(t) = &docs.cano_type {
|
||||
write!(f, "```typc\ntype: {}\n```\n\n", t.2)?;
|
||||
}
|
||||
|
||||
f.write_str(p.docs.trim())?;
|
||||
f.write_str(docs.docs.trim())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -131,8 +131,8 @@ impl fmt::Display for SigHoverDocs<'_> {
|
|||
f.write_str("\n\n# Positional Parameters")?;
|
||||
|
||||
let mut is_first = true;
|
||||
for p in &docs.pos {
|
||||
write_param_docs(f, p, "positional", &mut is_first)?;
|
||||
for pos_docs in &docs.pos {
|
||||
write_param_docs(f, pos_docs, "positional", &mut is_first)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,8 +149,8 @@ impl fmt::Display for SigHoverDocs<'_> {
|
|||
f.write_str("\n\n# Named Parameters")?;
|
||||
|
||||
let mut is_first = true;
|
||||
for p in docs.named.values() {
|
||||
write_param_docs(f, p, "named", &mut is_first)?;
|
||||
for named_docs in docs.named.values() {
|
||||
write_param_docs(f, named_docs, "named", &mut is_first)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -176,10 +176,10 @@ impl SignatureDocs {
|
|||
};
|
||||
|
||||
f.write_char('(')?;
|
||||
for p in &self.pos {
|
||||
for pos_docs in &self.pos {
|
||||
write_sep(f)?;
|
||||
f.write_str(&p.name)?;
|
||||
if let Some(t) = &p.cano_type {
|
||||
f.write_str(&pos_docs.name)?;
|
||||
if let Some(t) = &pos_docs.cano_type {
|
||||
write!(f, ": {}", t.0)?;
|
||||
}
|
||||
}
|
||||
|
@ -199,27 +199,27 @@ impl SignatureDocs {
|
|||
name_prints.push((v.name.clone(), ty, v.default.clone()))
|
||||
}
|
||||
name_prints.sort();
|
||||
for (k, t, v) in name_prints {
|
||||
for (name, ty, val) in name_prints {
|
||||
write_sep(f)?;
|
||||
let v = v.as_deref().unwrap_or("any");
|
||||
let mut v = v.trim();
|
||||
if v.starts_with('{') && v.ends_with('}') && v.len() > 30 {
|
||||
v = "{ .. }"
|
||||
let val = val.as_deref().unwrap_or("any");
|
||||
let mut default = val.trim();
|
||||
if default.starts_with('{') && default.ends_with('}') && default.len() > 30 {
|
||||
default = "{ .. }"
|
||||
}
|
||||
if v.starts_with('`') && v.ends_with('`') && v.len() > 30 {
|
||||
v = "raw"
|
||||
if default.starts_with('`') && default.ends_with('`') && default.len() > 30 {
|
||||
default = "raw"
|
||||
}
|
||||
if v.starts_with('[') && v.ends_with(']') && v.len() > 30 {
|
||||
v = "content"
|
||||
if default.starts_with('[') && default.ends_with(']') && default.len() > 30 {
|
||||
default = "content"
|
||||
}
|
||||
f.write_str(&k)?;
|
||||
if let Some(t) = t {
|
||||
write!(f, ": {t}")?;
|
||||
f.write_str(&name)?;
|
||||
if let Some(ty) = ty {
|
||||
write!(f, ": {ty}")?;
|
||||
}
|
||||
if v.contains('\n') {
|
||||
write!(f, " = {}", v.replace("\n", "\n "))?;
|
||||
if default.contains('\n') {
|
||||
write!(f, " = {}", default.replace("\n", "\n "))?;
|
||||
} else {
|
||||
write!(f, " = {v}")?;
|
||||
write!(f, " = {default}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -349,12 +349,12 @@ pub(crate) fn sig_docs(sig: &Signature) -> Option<SignatureDocs> {
|
|||
.pos()
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, pos)| (pos, type_sig.pos(i)));
|
||||
.map(|(idx, pos)| (pos, type_sig.pos(idx)));
|
||||
let named_in = sig
|
||||
.primary()
|
||||
.named()
|
||||
.iter()
|
||||
.map(|x| (x, type_sig.named(&x.name)));
|
||||
.map(|param| (param, type_sig.named(¶m.name)));
|
||||
let rest_in = sig.primary().rest().map(|x| (x, type_sig.rest_param()));
|
||||
|
||||
let ret_in = type_sig.body.as_ref();
|
||||
|
|
|
@ -15,10 +15,10 @@ pub use module::*;
|
|||
pub use package::*;
|
||||
pub(crate) use tidy::*;
|
||||
|
||||
fn file_id_repr(k: FileId) -> String {
|
||||
if let Some(p) = k.package() {
|
||||
format!("{p}{}", unix_slash(k.vpath().as_rooted_path()))
|
||||
fn file_id_repr(fid: FileId) -> String {
|
||||
if let Some(spec) = fid.package() {
|
||||
format!("{spec}{}", unix_slash(fid.vpath().as_rooted_path()))
|
||||
} else {
|
||||
unix_slash(k.vpath().as_rooted_path())
|
||||
unix_slash(fid.vpath().as_rooted_path())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,9 +48,9 @@ pub fn module_docs(ctx: &mut LocalContext, entry_point: FileId) -> StrResult<Pac
|
|||
|
||||
let module_uses = aliases
|
||||
.into_iter()
|
||||
.map(|(k, mut v)| {
|
||||
.map(|(fid, mut v)| {
|
||||
v.sort_by(|a, b| a.len().cmp(&b.len()).then(a.cmp(b)));
|
||||
(file_id_repr(k), v.into())
|
||||
(file_id_repr(fid), v.into())
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -113,7 +113,7 @@ struct ScanDefCtx<'a> {
|
|||
}
|
||||
|
||||
impl ScanDefCtx<'_> {
|
||||
fn defs(&mut self, path: EcoVec<&str>, ei: Arc<ExprInfo>) -> DefInfo {
|
||||
fn defs(&mut self, paths: EcoVec<&str>, ei: Arc<ExprInfo>) -> DefInfo {
|
||||
let name = {
|
||||
let stem = ei.fid.vpath().as_rooted_path().file_stem();
|
||||
stem.and_then(|s| Some(Interned::new_str(s.to_str()?)))
|
||||
|
@ -121,8 +121,8 @@ impl ScanDefCtx<'_> {
|
|||
};
|
||||
let module_decl = Decl::module(name.clone(), ei.fid).into();
|
||||
let site = Some(self.root);
|
||||
let p = path.clone();
|
||||
self.def(&name, p, site.as_ref(), &module_decl, None)
|
||||
let paths = paths.clone();
|
||||
self.def(&name, paths, site.as_ref(), &module_decl, None)
|
||||
}
|
||||
|
||||
fn expr(
|
||||
|
@ -133,7 +133,7 @@ impl ScanDefCtx<'_> {
|
|||
val: &Expr,
|
||||
) -> DefInfo {
|
||||
match val {
|
||||
Expr::Decl(d) => self.def(key, path, site, d, Some(val)),
|
||||
Expr::Decl(decl) => self.def(key, path, site, decl, Some(val)),
|
||||
Expr::Ref(r) if r.root.is_some() => {
|
||||
self.expr(key, path, site, r.root.as_ref().unwrap())
|
||||
}
|
||||
|
@ -170,7 +170,7 @@ impl ScanDefCtx<'_> {
|
|||
) -> DefInfo {
|
||||
let def = self.ctx.def_of_decl(decl);
|
||||
let def_docs = def.and_then(|def| self.ctx.def_docs(&def));
|
||||
let docs = def_docs.as_ref().map(|d| d.docs().clone());
|
||||
let docs = def_docs.as_ref().map(|docs| docs.docs().clone());
|
||||
let children = match decl.as_ref() {
|
||||
Decl::Module(..) => decl.file_id().and_then(|fid| {
|
||||
// only generate docs for the same package
|
||||
|
@ -195,10 +195,10 @@ impl ScanDefCtx<'_> {
|
|||
let symbols = ei
|
||||
.exports
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
.map(|(name, val)| {
|
||||
let mut path = path.clone();
|
||||
path.push(k);
|
||||
self.expr(k, path.clone(), Some(&fid), v)
|
||||
path.push(name);
|
||||
self.expr(name, path.clone(), Some(&fid), val)
|
||||
})
|
||||
.collect();
|
||||
Some(symbols)
|
||||
|
@ -220,7 +220,8 @@ impl ScanDefCtx<'_> {
|
|||
oneliner: None,
|
||||
};
|
||||
|
||||
if let Some((span, mod_fid)) = head.decl.as_ref().and_then(|d| d.file_id()).zip(site) {
|
||||
if let Some((span, mod_fid)) = head.decl.as_ref().and_then(|decl| decl.file_id()).zip(site)
|
||||
{
|
||||
if span != *mod_fid {
|
||||
head.is_external = true;
|
||||
head.oneliner = head.docs.map(|docs| oneliner(&docs).to_owned());
|
||||
|
@ -229,7 +230,7 @@ impl ScanDefCtx<'_> {
|
|||
}
|
||||
|
||||
// Insert module that is not exported
|
||||
if let Some(fid) = head.decl.as_ref().and_then(|d| d.file_id()) {
|
||||
if let Some(fid) = head.decl.as_ref().and_then(|del| del.file_id()) {
|
||||
// only generate docs for the same package
|
||||
if fid.package() == self.for_spec {
|
||||
let av = self.aliases.entry(fid).or_default();
|
||||
|
|
|
@ -88,7 +88,7 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Str
|
|||
|
||||
crate::log_debug_ct!("module: {primary} -- {parent_ident}");
|
||||
|
||||
let persist_fid = fid.map(|f| file_ids.insert_full(f).0);
|
||||
let persist_fid = fid.map(|fid| file_ids.insert_full(fid).0);
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ModuleInfo {
|
||||
|
@ -98,17 +98,17 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Str
|
|||
parent_ident: EcoString,
|
||||
aka: EcoVec<String>,
|
||||
}
|
||||
let m = jbase64(&ModuleInfo {
|
||||
let module_info = jbase64(&ModuleInfo {
|
||||
prefix: primary.as_str().into(),
|
||||
name: def.name.clone(),
|
||||
loc: persist_fid,
|
||||
parent_ident: parent_ident.clone(),
|
||||
aka,
|
||||
});
|
||||
let _ = writeln!(md, "<!-- begin:module {primary} {m} -->");
|
||||
let _ = writeln!(md, "<!-- begin:module {primary} {module_info} -->");
|
||||
|
||||
for mut child in children {
|
||||
let span = child.decl.as_ref().map(|d| d.span());
|
||||
let span = child.decl.as_ref().map(|decl| decl.span());
|
||||
let fid_range = span.and_then(|v| {
|
||||
v.id().and_then(|fid| {
|
||||
let allocated = file_ids.insert_full(fid).0;
|
||||
|
@ -117,7 +117,7 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Str
|
|||
Some((allocated, rng.start, rng.end))
|
||||
})
|
||||
});
|
||||
let child_fid = child.decl.as_ref().and_then(|d| d.file_id());
|
||||
let child_fid = child.decl.as_ref().and_then(|decl| decl.file_id());
|
||||
let child_fid = child_fid.or_else(|| span.and_then(Span::id)).or(fid);
|
||||
let span = fid_range.or_else(|| {
|
||||
let fid = child_fid?;
|
||||
|
@ -191,8 +191,12 @@ pub fn package_docs(ctx: &mut LocalContext, spec: &PackageInfo) -> StrResult<Str
|
|||
(Some(docs), _) if !child.is_external => {
|
||||
let _ = writeln!(md, "{}", remove_list_annotations(docs.docs()));
|
||||
printed_docs = true;
|
||||
if let DefDocs::Function(f) = docs {
|
||||
for param in f.pos.iter().chain(f.named.values()).chain(f.rest.as_ref())
|
||||
if let DefDocs::Function(docs) = docs {
|
||||
for param in docs
|
||||
.pos
|
||||
.iter()
|
||||
.chain(docs.named.values())
|
||||
.chain(docs.rest.as_ref())
|
||||
{
|
||||
let _ = writeln!(md, "<!-- begin:param {} -->", param.name);
|
||||
let ty = match ¶m.cano_type {
|
||||
|
@ -350,21 +354,21 @@ mod tests {
|
|||
use crate::tests::*;
|
||||
|
||||
fn test(pkg: PackageSpec) {
|
||||
run_with_sources("", |verse: &mut LspUniverse, p| {
|
||||
let path = verse.registry.resolve(&pkg).unwrap();
|
||||
run_with_sources("", |verse: &mut LspUniverse, path| {
|
||||
let pkg_root = verse.registry.resolve(&pkg).unwrap();
|
||||
let pi = PackageInfo {
|
||||
path: path.as_ref().to_owned(),
|
||||
path: pkg_root.as_ref().to_owned(),
|
||||
namespace: pkg.namespace,
|
||||
name: pkg.name,
|
||||
version: pkg.version.to_string(),
|
||||
};
|
||||
run_with_ctx(verse, p, &|a, _p| {
|
||||
let d = package_docs(a, &pi).unwrap();
|
||||
run_with_ctx(verse, path, &|a, _p| {
|
||||
let docs = package_docs(a, &pi).unwrap();
|
||||
let dest = format!(
|
||||
"../../target/{}-{}-{}.md",
|
||||
pi.namespace, pi.name, pi.version
|
||||
);
|
||||
std::fs::write(dest, d).unwrap();
|
||||
std::fs::write(dest, docs).unwrap();
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -32,14 +32,14 @@ pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> {
|
|||
let mut return_ty = None;
|
||||
let mut break_line = None;
|
||||
|
||||
let mut i = lines.len();
|
||||
let mut line_width = lines.len();
|
||||
'search: loop {
|
||||
if i == 0 {
|
||||
if line_width == 0 {
|
||||
break;
|
||||
}
|
||||
i -= 1;
|
||||
line_width -= 1;
|
||||
|
||||
let line = lines[i];
|
||||
let line = lines[line_width];
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> {
|
|||
continue;
|
||||
};
|
||||
|
||||
break_line = Some(i);
|
||||
break_line = Some(line_width);
|
||||
return_ty = Some(w.trim().into());
|
||||
break;
|
||||
}
|
||||
|
@ -61,10 +61,10 @@ pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> {
|
|||
.trim_end()
|
||||
.strip_suffix("<!-- typlite:end:list-item 0 -->")
|
||||
else {
|
||||
break_line = Some(i + 1);
|
||||
break_line = Some(line_width + 1);
|
||||
break 'search;
|
||||
};
|
||||
let mut current_line_no = i;
|
||||
let mut current_line_no = line_width;
|
||||
|
||||
loop {
|
||||
// <!-- typlite:begin:list-item -->
|
||||
|
@ -84,7 +84,7 @@ pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> {
|
|||
buf.push(line_content);
|
||||
|
||||
if current_line_no == 0 {
|
||||
break_line = Some(i + 1);
|
||||
break_line = Some(line_width + 1);
|
||||
break 'search;
|
||||
}
|
||||
current_line_no -= 1;
|
||||
|
@ -95,7 +95,7 @@ pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> {
|
|||
buf.reverse();
|
||||
|
||||
let Some(first_line) = buf.first_mut() else {
|
||||
break_line = Some(i + 1);
|
||||
break_line = Some(line_width + 1);
|
||||
break 'search;
|
||||
};
|
||||
*first_line = first_line.trim();
|
||||
|
@ -107,11 +107,11 @@ pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> {
|
|||
*first_line = rest.trim();
|
||||
Some((param_name.into(), type_content.into()))
|
||||
}) else {
|
||||
break_line = Some(i + 1);
|
||||
break_line = Some(line_width + 1);
|
||||
break 'search;
|
||||
};
|
||||
|
||||
i = current_line_no;
|
||||
line_width = current_line_no;
|
||||
params.push(TidyParamDocs {
|
||||
name: param_line.0,
|
||||
types: param_line.1,
|
||||
|
@ -143,15 +143,15 @@ pub fn identify_tidy_module_docs(docs: EcoString) -> StrResult<TidyModuleDocs> {
|
|||
fn match_brace(trim_start: &str) -> Option<(&str, &str)> {
|
||||
let mut brace_count = 1;
|
||||
let mut end = 0;
|
||||
for (i, c) in trim_start.char_indices() {
|
||||
match c {
|
||||
for (idx, ch) in trim_start.char_indices() {
|
||||
match ch {
|
||||
'(' => brace_count += 1,
|
||||
')' => brace_count -= 1,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if brace_count == 0 {
|
||||
end = i;
|
||||
end = idx;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -171,9 +171,9 @@ mod tests {
|
|||
use super::TidyParamDocs;
|
||||
|
||||
fn func(s: &str) -> String {
|
||||
let f = super::identify_pat_docs(s).unwrap();
|
||||
let mut res = format!(">> docs:\n{}\n<< docs", f.docs);
|
||||
if let Some(t) = f.return_ty {
|
||||
let docs = super::identify_pat_docs(s).unwrap();
|
||||
let mut res = format!(">> docs:\n{}\n<< docs", docs.docs);
|
||||
if let Some(t) = docs.return_ty {
|
||||
res.push_str(&format!("\n>>return\n{t}\n<<return"));
|
||||
}
|
||||
for TidyParamDocs {
|
||||
|
@ -181,7 +181,7 @@ mod tests {
|
|||
types,
|
||||
docs,
|
||||
default: _,
|
||||
} in f.params
|
||||
} in docs.params
|
||||
{
|
||||
let _ = write!(res, "\n>>arg {name}: {types}\n{docs}\n<< arg");
|
||||
}
|
||||
|
@ -189,9 +189,9 @@ mod tests {
|
|||
}
|
||||
|
||||
fn var(s: &str) -> String {
|
||||
let f = super::identify_pat_docs(s).unwrap();
|
||||
let mut res = format!(">> docs:\n{}\n<< docs", f.docs);
|
||||
if let Some(t) = f.return_ty {
|
||||
let docs = super::identify_pat_docs(s).unwrap();
|
||||
let mut res = format!(">> docs:\n{}\n<< docs", docs.docs);
|
||||
if let Some(t) = docs.return_ty {
|
||||
res.push_str(&format!("\n>>return\n{t}\n<<return"));
|
||||
}
|
||||
res
|
||||
|
|
|
@ -71,19 +71,19 @@ impl StatefulRequest for HoverRequest {
|
|||
}
|
||||
};
|
||||
|
||||
if let Some(p) = ctx.analysis.periscope.clone() {
|
||||
if let Some(provider) = ctx.analysis.periscope.clone() {
|
||||
if let Some(doc) = doc.clone() {
|
||||
let position = jump_from_cursor(&doc.document, &source, cursor);
|
||||
let position = position.or_else(|| {
|
||||
for i in 1..100 {
|
||||
let next_cursor = cursor + i;
|
||||
for idx in 1..100 {
|
||||
let next_cursor = cursor + idx;
|
||||
if next_cursor < source.text().len() {
|
||||
let position = jump_from_cursor(&doc.document, &source, next_cursor);
|
||||
if position.is_some() {
|
||||
return position;
|
||||
}
|
||||
}
|
||||
let prev_cursor = cursor.checked_sub(i);
|
||||
let prev_cursor = cursor.checked_sub(idx);
|
||||
if let Some(prev_cursor) = prev_cursor {
|
||||
let position = jump_from_cursor(&doc.document, &source, prev_cursor);
|
||||
if position.is_some() {
|
||||
|
@ -96,7 +96,7 @@ impl StatefulRequest for HoverRequest {
|
|||
});
|
||||
|
||||
log::info!("telescope position: {:?}", position);
|
||||
let content = position.and_then(|pos| p.periscope_at(ctx, doc, pos));
|
||||
let content = position.and_then(|pos| provider.periscope_at(ctx, doc, pos));
|
||||
if let Some(preview_content) = content {
|
||||
contents = format!("{preview_content}\n---\n{contents}");
|
||||
}
|
||||
|
@ -128,9 +128,9 @@ fn def_tooltip(
|
|||
Label(..) => {
|
||||
results.push(MarkedString::String(format!("Label: {}\n", def.name())));
|
||||
// todo: type repr
|
||||
if let Some(c) = def.term.as_ref().and_then(|v| v.value()) {
|
||||
let c = truncated_repr(&c);
|
||||
results.push(MarkedString::String(format!("{c}")));
|
||||
if let Some(val) = def.term.as_ref().and_then(|v| v.value()) {
|
||||
let repr = truncated_repr(&val);
|
||||
results.push(MarkedString::String(format!("{repr}")));
|
||||
}
|
||||
Some(HoverContents::Array(results))
|
||||
}
|
||||
|
|
|
@ -125,8 +125,8 @@ impl InlayHintWorker<'_> {
|
|||
let call_info = analyze_call(self.ctx, self.source.clone(), node.clone())?;
|
||||
crate::log_debug_ct!("got call_info {call_info:?}");
|
||||
|
||||
let f = node.cast::<ast::FuncCall>().unwrap();
|
||||
let args = f.args();
|
||||
let call = node.cast::<ast::FuncCall>().unwrap();
|
||||
let args = call.args();
|
||||
let args_node = node.find(args.span())?;
|
||||
|
||||
let check_single_pos_arg = || {
|
||||
|
|
|
@ -19,20 +19,20 @@ pub fn jump_from_cursor(document: &Document, source: &Source, cursor: usize) ->
|
|||
}
|
||||
|
||||
let mut min_dis = u64::MAX;
|
||||
let mut p = Point::default();
|
||||
let mut point = Point::default();
|
||||
let mut ppage = 0usize;
|
||||
|
||||
let span = node.span();
|
||||
for (i, page) in document.pages.iter().enumerate() {
|
||||
for (idx, page) in document.pages.iter().enumerate() {
|
||||
let t_dis = min_dis;
|
||||
if let Some(pos) = find_in_frame(&page.frame, span, &mut min_dis, &mut p) {
|
||||
if let Some(point) = find_in_frame(&page.frame, span, &mut min_dis, &mut point) {
|
||||
return Some(Position {
|
||||
page: NonZeroUsize::new(i + 1)?,
|
||||
point: pos,
|
||||
page: NonZeroUsize::new(idx + 1)?,
|
||||
point,
|
||||
});
|
||||
}
|
||||
if t_dis != min_dis {
|
||||
ppage = i;
|
||||
ppage = idx;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,16 +42,16 @@ pub fn jump_from_cursor(document: &Document, source: &Source, cursor: usize) ->
|
|||
|
||||
Some(Position {
|
||||
page: NonZeroUsize::new(ppage + 1)?,
|
||||
point: p,
|
||||
point,
|
||||
})
|
||||
}
|
||||
|
||||
/// Find the position of a span in a frame.
|
||||
fn find_in_frame(frame: &Frame, span: Span, min_dis: &mut u64, p: &mut Point) -> Option<Point> {
|
||||
fn find_in_frame(frame: &Frame, span: Span, min_dis: &mut u64, res: &mut Point) -> Option<Point> {
|
||||
for (mut pos, item) in frame.items() {
|
||||
if let FrameItem::Group(group) = item {
|
||||
// TODO: Handle transformation.
|
||||
if let Some(point) = find_in_frame(&group.frame, span, min_dis, p) {
|
||||
if let Some(point) = find_in_frame(&group.frame, span, min_dis, res) {
|
||||
return Some(point + pos);
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ fn find_in_frame(frame: &Frame, span: Span, min_dis: &mut u64, p: &mut Point) ->
|
|||
let dis = glyph.span.0.number().abs_diff(span.number());
|
||||
if dis < *min_dis {
|
||||
*min_dis = dis;
|
||||
*p = pos;
|
||||
*res = pos;
|
||||
}
|
||||
}
|
||||
pos.x += glyph.x_advance.at(text.size);
|
||||
|
|
|
@ -91,10 +91,10 @@ impl OnEnterWorker<'_> {
|
|||
.count();
|
||||
|
||||
let comment_prefix = {
|
||||
let mut p = unscanny::Scanner::new(leaf.text());
|
||||
p.eat_while('/');
|
||||
p.eat_if('!');
|
||||
p.before()
|
||||
let mut scanner = unscanny::Scanner::new(leaf.text());
|
||||
scanner.eat_while('/');
|
||||
scanner.eat_if('!');
|
||||
scanner.before()
|
||||
};
|
||||
|
||||
// Continuing single-line non-doc comments (like this one :) ) is annoying
|
||||
|
|
|
@ -105,9 +105,9 @@ pub(crate) fn prepare_renaming(
|
|||
fn validate_fn_renaming(def: &Definition) -> Option<()> {
|
||||
use typst::foundations::func::Repr;
|
||||
let value = def.value();
|
||||
let mut f = match &value {
|
||||
let mut func = match &value {
|
||||
None => return Some(()),
|
||||
Some(Value::Func(f)) => f,
|
||||
Some(Value::Func(func)) => func,
|
||||
Some(..) => {
|
||||
log::info!(
|
||||
"prepare_rename: not a function on function definition site: {:?}",
|
||||
|
@ -117,9 +117,9 @@ fn validate_fn_renaming(def: &Definition) -> Option<()> {
|
|||
}
|
||||
};
|
||||
loop {
|
||||
match f.inner() {
|
||||
match func.inner() {
|
||||
// todo: rename with site
|
||||
Repr::With(w) => f = &w.0,
|
||||
Repr::With(w) => func = &w.0,
|
||||
Repr::Closure(..) => return Some(()),
|
||||
// native functions can't be renamed
|
||||
Repr::Native(..) | Repr::Element(..) => return None,
|
||||
|
|
|
@ -200,15 +200,15 @@ mod tests {
|
|||
let result = request.request(ctx, doc);
|
||||
let mut result = result.map(|v| {
|
||||
v.into_iter()
|
||||
.map(|l| {
|
||||
let fp = unix_slash(&url_to_path(l.uri));
|
||||
.map(|loc| {
|
||||
let fp = unix_slash(&url_to_path(loc.uri));
|
||||
let fp = fp.strip_prefix("C:").unwrap_or(&fp);
|
||||
format!(
|
||||
"{fp}@{}:{}:{}:{}",
|
||||
l.range.start.line,
|
||||
l.range.start.character,
|
||||
l.range.end.line,
|
||||
l.range.end.character
|
||||
loc.range.start.line,
|
||||
loc.range.start.character,
|
||||
loc.range.end.line,
|
||||
loc.range.end.character
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
|
|
|
@ -98,9 +98,9 @@ impl StatefulRequest for RenameRequest {
|
|||
|
||||
let mut edits = HashMap::new();
|
||||
|
||||
for i in references {
|
||||
let uri = i.uri;
|
||||
let range = i.range;
|
||||
for loc in references {
|
||||
let uri = loc.uri;
|
||||
let range = loc.range;
|
||||
let edits = edits.entry(uri).or_insert_with(Vec::new);
|
||||
edits.push(TextEdit {
|
||||
range,
|
||||
|
@ -236,7 +236,7 @@ impl RenameFileWorker<'_> {
|
|||
fn rename_module_path(&mut self, span: Span, r: &RefExpr, src: &Source) -> Option<TextEdit> {
|
||||
let importing = r.root.as_ref()?.file_id();
|
||||
|
||||
if importing.map_or(true, |i| i != self.def_fid) {
|
||||
if importing.map_or(true, |fid| fid != self.def_fid) {
|
||||
return None;
|
||||
}
|
||||
crate::log_debug_ct!("import: {span:?} -> {importing:?} v.s. {:?}", self.def_fid);
|
||||
|
@ -246,10 +246,11 @@ impl RenameFileWorker<'_> {
|
|||
let import_node = root.find(span).and_then(deref_expr)?;
|
||||
let (import_path, has_path_var) = node_ancestors(&import_node).find_map(|import_node| {
|
||||
match import_node.cast::<ast::Expr>()? {
|
||||
ast::Expr::Import(i) => {
|
||||
Some((i.source(), i.new_name().is_none() && i.imports().is_none()))
|
||||
}
|
||||
ast::Expr::Include(i) => Some((i.source(), false)),
|
||||
ast::Expr::Import(import) => Some((
|
||||
import.source(),
|
||||
import.new_name().is_none() && import.imports().is_none(),
|
||||
)),
|
||||
ast::Expr::Include(include) => Some((include.source(), false)),
|
||||
_ => None,
|
||||
}
|
||||
})?;
|
||||
|
|
|
@ -53,7 +53,7 @@ impl SemanticRequest for SignatureHelpRequest {
|
|||
|
||||
let mut real_offset = 0;
|
||||
let focus_name = OnceCell::new();
|
||||
for (i, (param, ty)) in sig.params().enumerate() {
|
||||
for (idx, (param, ty)) in sig.params().enumerate() {
|
||||
if is_set && !param.attrs.settable {
|
||||
continue;
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ impl SemanticRequest for SignatureHelpRequest {
|
|||
match &target {
|
||||
ArgClass::Positional { .. } if is_set => {}
|
||||
ArgClass::Positional { positional, .. } => {
|
||||
if (*positional) + param_shift == i {
|
||||
if (*positional) + param_shift == idx {
|
||||
active_parameter = Some(real_offset);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,13 +104,13 @@ impl DocCommentMatcher {
|
|||
return None;
|
||||
}
|
||||
|
||||
let comments = comments.iter().map(|c| match c {
|
||||
RawComment::Line(c) => {
|
||||
let comments = comments.iter().map(|comment| match comment {
|
||||
RawComment::Line(line) => {
|
||||
// strip all slash prefix
|
||||
let text = c.trim_start_matches('/');
|
||||
let text = line.trim_start_matches('/');
|
||||
text
|
||||
}
|
||||
RawComment::Block(c) => {
|
||||
RawComment::Block(block) => {
|
||||
fn remove_comment(text: &str) -> Option<&str> {
|
||||
let mut text = text.strip_prefix("/*")?.strip_suffix("*/")?.trim();
|
||||
// trip start star
|
||||
|
@ -120,27 +120,27 @@ impl DocCommentMatcher {
|
|||
Some(text)
|
||||
}
|
||||
|
||||
remove_comment(c).unwrap_or(c.as_str())
|
||||
remove_comment(block).unwrap_or(block.as_str())
|
||||
}
|
||||
});
|
||||
let comments = comments.collect::<Vec<_>>();
|
||||
|
||||
let dedent = comments.iter().fold(usize::MAX, |acc, c| {
|
||||
let indent = c.chars().take_while(|c| c.is_whitespace()).count();
|
||||
let dedent = comments.iter().fold(usize::MAX, |acc, content| {
|
||||
let indent = content.chars().take_while(|ch| ch.is_whitespace()).count();
|
||||
acc.min(indent)
|
||||
});
|
||||
|
||||
let size_hint = comments.iter().map(|c| c.len()).sum::<usize>();
|
||||
let size_hint = comments.iter().map(|comment| comment.len()).sum::<usize>();
|
||||
let mut comments = comments
|
||||
.iter()
|
||||
.map(|c| c.chars().skip(dedent).collect::<String>());
|
||||
.map(|comment| comment.chars().skip(dedent).collect::<String>());
|
||||
|
||||
let res = comments.try_fold(String::with_capacity(size_hint), |mut acc, c| {
|
||||
let res = comments.try_fold(String::with_capacity(size_hint), |mut acc, comment| {
|
||||
if !acc.is_empty() {
|
||||
acc.push('\n');
|
||||
}
|
||||
|
||||
acc.push_str(&c);
|
||||
acc.push_str(&comment);
|
||||
Some(acc)
|
||||
});
|
||||
|
||||
|
|
|
@ -81,16 +81,16 @@ impl Expr {
|
|||
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
Expr::Decl(d) => d.span(),
|
||||
Expr::Select(a) => a.span,
|
||||
Expr::Apply(a) => a.span,
|
||||
Expr::Decl(decl) => decl.span(),
|
||||
Expr::Select(select) => select.span,
|
||||
Expr::Apply(apply) => apply.span,
|
||||
_ => Span::detached(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn file_id(&self) -> Option<TypstFileId> {
|
||||
match self {
|
||||
Expr::Decl(d) => d.file_id(),
|
||||
Expr::Decl(decl) => decl.file_id(),
|
||||
_ => self.span().id(),
|
||||
}
|
||||
}
|
||||
|
@ -332,10 +332,10 @@ impl Decl {
|
|||
use std::str::FromStr;
|
||||
let name = if s.starts_with('@') {
|
||||
let spec = PackageSpec::from_str(s).ok();
|
||||
spec.map(|p| Interned::new_str(p.name.as_str()))
|
||||
spec.map(|spec| Interned::new_str(spec.name.as_str()))
|
||||
} else {
|
||||
let stem = Path::new(s).file_stem();
|
||||
stem.and_then(|s| Some(Interned::new_str(s.to_str()?)))
|
||||
stem.and_then(|stem| Some(Interned::new_str(stem.to_str()?)))
|
||||
};
|
||||
name.unwrap_or_default()
|
||||
}
|
||||
|
|
|
@ -280,40 +280,42 @@ impl DocsChecker<'_> {
|
|||
.or_else(|| self.check_type_annotation(m, name))
|
||||
}
|
||||
|
||||
fn check_type_annotation(&mut self, m: &Module, name: &str) -> Option<Ty> {
|
||||
if let Some(v) = self.globals.get(name) {
|
||||
return v.clone();
|
||||
fn check_type_annotation(&mut self, module: &Module, name: &str) -> Option<Ty> {
|
||||
if let Some(term) = self.globals.get(name) {
|
||||
return term.clone();
|
||||
}
|
||||
|
||||
let v = m.scope().get(name)?;
|
||||
let val = module.scope().get(name)?;
|
||||
crate::log_debug_ct!("check doc type annotation: {name:?}");
|
||||
if let Value::Content(c) = v {
|
||||
let annotated = c.clone().unpack::<typst::text::RawElem>().ok()?;
|
||||
let text = annotated.text().clone().into_value().cast::<Str>().ok()?;
|
||||
let code = typst::syntax::parse_code(&text.as_str().replace('\'', "θ"));
|
||||
if let Value::Content(raw) = val {
|
||||
let annotated = raw.clone().unpack::<typst::text::RawElem>().ok()?;
|
||||
let annotated = annotated.text().clone().into_value().cast::<Str>().ok()?;
|
||||
let code = typst::syntax::parse_code(&annotated.as_str().replace('\'', "θ"));
|
||||
let mut exprs = code.cast::<ast::Code>()?.exprs();
|
||||
let ret = self.check_type_expr(m, exprs.next()?);
|
||||
self.globals.insert(name.into(), ret.clone());
|
||||
ret
|
||||
let term = self.check_type_expr(module, exprs.next()?);
|
||||
self.globals.insert(name.into(), term.clone());
|
||||
term
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn check_type_expr(&mut self, m: &Module, s: ast::Expr) -> Option<Ty> {
|
||||
crate::log_debug_ct!("check doc type expr: {s:?}");
|
||||
match s {
|
||||
ast::Expr::Ident(i) => self.check_type_ident(m, i.get().as_str()),
|
||||
fn check_type_expr(&mut self, module: &Module, expr: ast::Expr) -> Option<Ty> {
|
||||
crate::log_debug_ct!("check doc type expr: {expr:?}");
|
||||
match expr {
|
||||
ast::Expr::Ident(ident) => self.check_type_ident(module, ident.get().as_str()),
|
||||
ast::Expr::None(_)
|
||||
| ast::Expr::Auto(_)
|
||||
| ast::Expr::Bool(..)
|
||||
| ast::Expr::Int(..)
|
||||
| ast::Expr::Float(..)
|
||||
| ast::Expr::Numeric(..)
|
||||
| ast::Expr::Str(..) => SharedContext::const_eval(s).map(|v| Ty::Value(InsTy::new(v))),
|
||||
| ast::Expr::Str(..) => {
|
||||
SharedContext::const_eval(expr).map(|v| Ty::Value(InsTy::new(v)))
|
||||
}
|
||||
ast::Expr::Binary(binary) => {
|
||||
let mut components = Vec::with_capacity(2);
|
||||
components.push(self.check_type_expr(m, binary.lhs())?);
|
||||
components.push(self.check_type_expr(module, binary.lhs())?);
|
||||
|
||||
let mut rhs = binary.rhs();
|
||||
while let ast::Expr::Binary(binary) = rhs {
|
||||
|
@ -321,26 +323,27 @@ impl DocsChecker<'_> {
|
|||
break;
|
||||
}
|
||||
|
||||
components.push(self.check_type_expr(m, binary.lhs())?);
|
||||
components.push(self.check_type_expr(module, binary.lhs())?);
|
||||
rhs = binary.rhs();
|
||||
}
|
||||
|
||||
components.push(self.check_type_expr(m, rhs)?);
|
||||
components.push(self.check_type_expr(module, rhs)?);
|
||||
Some(Ty::from_types(components.into_iter()))
|
||||
}
|
||||
ast::Expr::FuncCall(c) => match c.callee() {
|
||||
ast::Expr::Ident(i) => {
|
||||
let name = i.get().as_str();
|
||||
ast::Expr::FuncCall(call) => match call.callee() {
|
||||
ast::Expr::Ident(callee) => {
|
||||
let name = callee.get().as_str();
|
||||
match name {
|
||||
"array" => Some({
|
||||
let ast::Arg::Pos(pos) = c.args().items().next()? else {
|
||||
let ast::Arg::Pos(pos) = call.args().items().next()? else {
|
||||
return None;
|
||||
};
|
||||
|
||||
Ty::Array(self.check_type_expr(m, pos)?.into())
|
||||
Ty::Array(self.check_type_expr(module, pos)?.into())
|
||||
}),
|
||||
"tag" => Some({
|
||||
let ast::Arg::Pos(ast::Expr::Str(s)) = c.args().items().next()? else {
|
||||
let ast::Arg::Pos(ast::Expr::Str(s)) = call.args().items().next()?
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let pkg_id = PackageId::try_from(self.fid).ok();
|
||||
|
@ -354,15 +357,15 @@ impl DocsChecker<'_> {
|
|||
}
|
||||
_ => None,
|
||||
},
|
||||
ast::Expr::Closure(c) => {
|
||||
crate::log_debug_ct!("check doc closure annotation: {c:?}");
|
||||
ast::Expr::Closure(closure) => {
|
||||
crate::log_debug_ct!("check doc closure annotation: {closure:?}");
|
||||
let mut pos_all = vec![];
|
||||
let mut named_all = BTreeMap::new();
|
||||
let mut spread_right = None;
|
||||
let snap = self.locals.snapshot();
|
||||
|
||||
let sig = None.or_else(|| {
|
||||
for param in c.params().children() {
|
||||
for param in closure.params().children() {
|
||||
match param {
|
||||
ast::Param::Pos(ast::Pattern::Normal(ast::Expr::Ident(pos))) => {
|
||||
let name = pos.get().clone();
|
||||
|
@ -374,7 +377,9 @@ impl DocsChecker<'_> {
|
|||
pos_all.push(Ty::Any);
|
||||
}
|
||||
ast::Param::Named(named) => {
|
||||
let term = self.check_type_expr(m, named.expr()).unwrap_or(Ty::Any);
|
||||
let term = self
|
||||
.check_type_expr(module, named.expr())
|
||||
.unwrap_or(Ty::Any);
|
||||
named_all.insert(named.name().into(), term);
|
||||
}
|
||||
// todo: spread left/right
|
||||
|
@ -390,7 +395,7 @@ impl DocsChecker<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
let body = self.check_type_expr(m, c.body())?;
|
||||
let body = self.check_type_expr(module, closure.body())?;
|
||||
let sig = SigTy::new(
|
||||
pos_all.into_iter(),
|
||||
named_all,
|
||||
|
@ -406,8 +411,8 @@ impl DocsChecker<'_> {
|
|||
self.locals.rollback_to(snap);
|
||||
sig
|
||||
}
|
||||
ast::Expr::Dict(d) => {
|
||||
crate::log_debug_ct!("check doc dict annotation: {d:?}");
|
||||
ast::Expr::Dict(decl) => {
|
||||
crate::log_debug_ct!("check doc dict annotation: {decl:?}");
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
|
|
|
@ -41,8 +41,8 @@ pub(crate) fn expr_of(
|
|||
{
|
||||
return None;
|
||||
}
|
||||
for (i, prev_exports) in &prev.imports {
|
||||
let ei = ctx.exports_of(&ctx.source_by_id(*i).ok()?, route);
|
||||
for (fid, prev_exports) in &prev.imports {
|
||||
let ei = ctx.exports_of(&ctx.source_by_id(*fid).ok()?, route);
|
||||
|
||||
// If there is a cycle, the expression will be stable as the source is
|
||||
// unchanged.
|
||||
|
@ -155,7 +155,7 @@ impl std::hash::Hash for ExprInfo {
|
|||
self.exports.hash(state);
|
||||
self.root.hash(state);
|
||||
let mut imports = self.imports.iter().collect::<Vec<_>>();
|
||||
imports.sort_by_key(|(k, _)| *k);
|
||||
imports.sort_by_key(|(fid, _)| *fid);
|
||||
imports.hash(state);
|
||||
}
|
||||
}
|
||||
|
@ -333,19 +333,19 @@ impl ExprWorker<'_> {
|
|||
Equation(equation) => self.check_math(equation.body().to_untyped().children()),
|
||||
Math(math) => self.check_math(math.to_untyped().children()),
|
||||
Code(code_block) => self.check_code(code_block.body()),
|
||||
Content(c) => self.check_markup(c.body()),
|
||||
Content(content_block) => self.check_markup(content_block.body()),
|
||||
|
||||
Ident(ident) => self.check_ident(ident),
|
||||
MathIdent(math_ident) => self.check_math_ident(math_ident),
|
||||
Label(label) => self.check_label(label),
|
||||
Ref(r) => self.check_ref(r),
|
||||
Ref(ref_node) => self.check_ref(ref_node),
|
||||
|
||||
Let(let_binding) => self.check_let(let_binding),
|
||||
Closure(closure) => self.check_closure(closure),
|
||||
Import(module_import) => self.check_module_import(module_import),
|
||||
Include(module_include) => self.check_module_include(module_include),
|
||||
|
||||
Parenthesized(p) => self.check(p.expr()),
|
||||
Parenthesized(paren_expr) => self.check(paren_expr.expr()),
|
||||
Array(array) => self.check_array(array),
|
||||
Dict(dict) => self.check_dict(dict),
|
||||
Unary(unary) => self.check_unary(unary),
|
||||
|
@ -355,7 +355,9 @@ impl ExprWorker<'_> {
|
|||
DestructAssign(destruct_assignment) => self.check_destruct_assign(destruct_assignment),
|
||||
Set(set_rule) => self.check_set(set_rule),
|
||||
Show(show_rule) => self.check_show(show_rule),
|
||||
Contextual(c) => Expr::Unary(UnInst::new(UnaryOp::Context, self.defer(c.body()))),
|
||||
Contextual(contextual) => {
|
||||
Expr::Unary(UnInst::new(UnaryOp::Context, self.defer(contextual.body())))
|
||||
}
|
||||
Conditional(conditional) => self.check_conditional(conditional),
|
||||
While(while_loop) => self.check_while_loop(while_loop),
|
||||
For(for_loop) => self.check_for_loop(for_loop),
|
||||
|
@ -425,8 +427,8 @@ impl ExprWorker<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn check_label(&mut self, ident: ast::Label) -> Expr {
|
||||
Expr::Decl(Decl::label(ident.get(), ident.span()).into())
|
||||
fn check_label(&mut self, label: ast::Label) -> Expr {
|
||||
Expr::Decl(Decl::label(label.get(), label.span()).into())
|
||||
}
|
||||
|
||||
fn check_element<T: NativeElement>(&mut self, content: EcoVec<Expr>) -> Expr {
|
||||
|
@ -528,34 +530,36 @@ impl ExprWorker<'_> {
|
|||
match typed {
|
||||
ast::Pattern::Normal(expr) => self.check_pattern_expr(expr),
|
||||
ast::Pattern::Placeholder(..) => Pattern::Expr(Expr::Star).into(),
|
||||
ast::Pattern::Parenthesized(p) => self.check_pattern(p.pattern()),
|
||||
ast::Pattern::Destructuring(d) => {
|
||||
ast::Pattern::Parenthesized(paren_expr) => self.check_pattern(paren_expr.pattern()),
|
||||
ast::Pattern::Destructuring(destructing) => {
|
||||
let mut inputs = eco_vec![];
|
||||
let mut names = eco_vec![];
|
||||
let mut spread_left = None;
|
||||
let mut spread_right = None;
|
||||
|
||||
for item in d.items() {
|
||||
for item in destructing.items() {
|
||||
match item {
|
||||
ast::DestructuringItem::Pattern(p) => {
|
||||
inputs.push(self.check_pattern(p));
|
||||
ast::DestructuringItem::Pattern(pos) => {
|
||||
inputs.push(self.check_pattern(pos));
|
||||
}
|
||||
ast::DestructuringItem::Named(n) => {
|
||||
let key = Decl::var(n.name()).into();
|
||||
let val = self.check_pattern_expr(n.expr());
|
||||
ast::DestructuringItem::Named(named) => {
|
||||
let key = Decl::var(named.name()).into();
|
||||
let val = self.check_pattern_expr(named.expr());
|
||||
names.push((key, val));
|
||||
}
|
||||
ast::DestructuringItem::Spread(s) => {
|
||||
let decl: DeclExpr = if let Some(ident) = s.sink_ident() {
|
||||
ast::DestructuringItem::Spread(spreading) => {
|
||||
let decl: DeclExpr = if let Some(ident) = spreading.sink_ident() {
|
||||
Decl::var(ident).into()
|
||||
} else {
|
||||
Decl::spread(s.span()).into()
|
||||
Decl::spread(spreading.span()).into()
|
||||
};
|
||||
|
||||
if inputs.is_empty() {
|
||||
spread_left = Some((decl, self.check_pattern_expr(s.expr())));
|
||||
spread_left =
|
||||
Some((decl, self.check_pattern_expr(spreading.expr())));
|
||||
} else {
|
||||
spread_right = Some((decl, self.check_pattern_expr(s.expr())));
|
||||
spread_right =
|
||||
Some((decl, self.check_pattern_expr(spreading.expr())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -601,7 +605,9 @@ impl ExprWorker<'_> {
|
|||
let mod_var = typed.new_name().map(Decl::module_alias).or_else(|| {
|
||||
typed.imports().is_none().then(|| {
|
||||
let name = match mod_expr.as_ref()? {
|
||||
Expr::Decl(d) if matches!(d.as_ref(), Decl::Module { .. }) => d.name().clone(),
|
||||
Expr::Decl(decl) if matches!(decl.as_ref(), Decl::Module { .. }) => {
|
||||
decl.name().clone()
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
// todo: package stem
|
||||
|
@ -631,9 +637,9 @@ impl ExprWorker<'_> {
|
|||
Value::Module(m) => m.file_id(),
|
||||
_ => None,
|
||||
},
|
||||
Expr::Decl(d) => {
|
||||
if matches!(d.as_ref(), Decl::Module { .. }) {
|
||||
d.file_id()
|
||||
Expr::Decl(decl) => {
|
||||
if matches!(decl.as_ref(), Decl::Module { .. }) {
|
||||
decl.file_id()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -642,9 +648,9 @@ impl ExprWorker<'_> {
|
|||
});
|
||||
|
||||
// Prefetch Type Check Information
|
||||
if let Some(f) = fid {
|
||||
crate::log_debug_ct!("prefetch type check: {f:?}");
|
||||
self.ctx.prefetch_type_check(f);
|
||||
if let Some(fid) = fid {
|
||||
crate::log_debug_ct!("prefetch type check: {fid:?}");
|
||||
self.ctx.prefetch_type_check(fid);
|
||||
}
|
||||
|
||||
let scope = if let Some(fid) = &fid {
|
||||
|
@ -653,9 +659,9 @@ impl ExprWorker<'_> {
|
|||
match &mod_expr {
|
||||
Some(Expr::Type(Ty::Value(v))) => match &v.val {
|
||||
Value::Module(m) => Some(ExprScope::Module(m.clone())),
|
||||
Value::Func(f) => {
|
||||
if f.scope().is_some() {
|
||||
Some(ExprScope::Func(f.clone()))
|
||||
Value::Func(func) => {
|
||||
if func.scope().is_some() {
|
||||
Some(ExprScope::Func(func.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -715,7 +721,7 @@ impl ExprWorker<'_> {
|
|||
Expr::Type(Ty::Value(val)) => {
|
||||
self.check_import_source_val(source, &val.val, Some(&src_expr), is_import)
|
||||
}
|
||||
Expr::Decl(d) if matches!(d.as_ref(), Decl::Module { .. }) => {
|
||||
Expr::Decl(decl) if matches!(decl.as_ref(), Decl::Module { .. }) => {
|
||||
return Some(src_expr.clone())
|
||||
}
|
||||
|
||||
|
@ -817,7 +823,7 @@ impl ExprWorker<'_> {
|
|||
path.push(seg);
|
||||
}
|
||||
// todo: import path
|
||||
let (mut root, val) = match path.last().map(|d| d.name()) {
|
||||
let (mut root, val) = match path.last().map(|decl| decl.name()) {
|
||||
Some(name) => scope.get(name),
|
||||
None => (None, None),
|
||||
};
|
||||
|
@ -973,12 +979,12 @@ impl ExprWorker<'_> {
|
|||
fn check_set(&mut self, typed: ast::SetRule) -> Expr {
|
||||
let target = self.check(typed.target());
|
||||
let args = self.check_args(typed.args());
|
||||
let cond = typed.condition().map(|c| self.check(c));
|
||||
let cond = typed.condition().map(|cond| self.check(cond));
|
||||
Expr::Set(SetExpr { target, args, cond }.into())
|
||||
}
|
||||
|
||||
fn check_show(&mut self, typed: ast::ShowRule) -> Expr {
|
||||
let selector = typed.selector().map(|s| self.check(s));
|
||||
let selector = typed.selector().map(|selector| self.check(selector));
|
||||
let edit = self.defer(typed.transform());
|
||||
Expr::Show(ShowExpr { selector, edit }.into())
|
||||
}
|
||||
|
@ -1145,10 +1151,10 @@ impl ExprWorker<'_> {
|
|||
crate::log_debug_ct!("checking expr: {expr:?}");
|
||||
|
||||
match expr {
|
||||
ast::Expr::FieldAccess(f) => {
|
||||
let field = Decl::ident_ref(f.field());
|
||||
ast::Expr::FieldAccess(field_access) => {
|
||||
let field = Decl::ident_ref(field_access.field());
|
||||
|
||||
let (expr, term) = self.eval_expr(f.target(), mode);
|
||||
let (expr, term) = self.eval_expr(field_access.target(), mode);
|
||||
let term = term.and_then(|v| {
|
||||
// todo: use type select
|
||||
// v.select(field.name()).ok()
|
||||
|
@ -1233,10 +1239,10 @@ impl ExprWorker<'_> {
|
|||
|
||||
fn syntax_level_select(&mut self, lhs: Expr, key: &Interned<Decl>, span: Span) -> Option<Expr> {
|
||||
match &lhs {
|
||||
Expr::Decl(d) => match d.as_ref() {
|
||||
Decl::Module(m) => {
|
||||
let f = self.exports_of(m.fid);
|
||||
let selected = f.get(key.name())?;
|
||||
Expr::Decl(decl) => match decl.as_ref() {
|
||||
Decl::Module(module) => {
|
||||
let exports = self.exports_of(module.fid);
|
||||
let selected = exports.get(key.name())?;
|
||||
let select_ref = Interned::new(RefExpr {
|
||||
decl: key.clone(),
|
||||
root: Some(lhs.clone()),
|
||||
|
|
|
@ -29,32 +29,32 @@ struct IndexWorker {
|
|||
impl IndexWorker {
|
||||
fn visit(&mut self, node: &SyntaxNode) {
|
||||
match node.cast::<ast::Expr>() {
|
||||
Some(ast::Expr::Str(s)) => {
|
||||
if s.to_untyped().text().len() > 65536 {
|
||||
Some(ast::Expr::Str(path_str)) => {
|
||||
if path_str.to_untyped().text().len() > 65536 {
|
||||
// skip long strings
|
||||
return;
|
||||
}
|
||||
let s = s.get();
|
||||
let path_str = path_str.get();
|
||||
|
||||
if s.starts_with('@') {
|
||||
let pkg_spec = PackageSpec::from_str(&s).ok();
|
||||
if path_str.starts_with('@') {
|
||||
let pkg_spec = PackageSpec::from_str(&path_str).ok();
|
||||
if let Some(pkg_spec) = pkg_spec {
|
||||
self.info.identifiers.insert(pkg_spec.name.clone().into());
|
||||
self.info.packages.insert(pkg_spec);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let p = Path::new(s.as_str());
|
||||
let name = p.file_name().unwrap_or_default().to_str();
|
||||
let path = Path::new(path_str.as_str());
|
||||
let name = path.file_name().unwrap_or_default().to_str();
|
||||
if let Some(name) = name {
|
||||
self.info.paths.insert(name.into());
|
||||
}
|
||||
}
|
||||
Some(ast::Expr::MathIdent(i)) => {
|
||||
self.info.identifiers.insert(i.get().into());
|
||||
Some(ast::Expr::MathIdent(ident)) => {
|
||||
self.info.identifiers.insert(ident.get().into());
|
||||
}
|
||||
Some(ast::Expr::Ident(i)) => {
|
||||
self.info.identifiers.insert(i.get().into());
|
||||
Some(ast::Expr::Ident(ident)) => {
|
||||
self.info.identifiers.insert(ident.get().into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
|
|
@ -12,13 +12,13 @@ use typst_shim::utils::LazyHash;
|
|||
|
||||
pub(crate) fn get_lexical_hierarchy(
|
||||
source: &Source,
|
||||
g: LexicalScopeKind,
|
||||
scope_kind: LexicalScopeKind,
|
||||
) -> Option<EcoVec<LexicalHierarchy>> {
|
||||
let b = std::time::Instant::now();
|
||||
let start = std::time::Instant::now();
|
||||
let root = LinkedNode::new(source.root());
|
||||
|
||||
let mut worker = LexicalHierarchyWorker {
|
||||
g,
|
||||
sk: scope_kind,
|
||||
..LexicalHierarchyWorker::default()
|
||||
};
|
||||
worker.stack.push((
|
||||
|
@ -41,7 +41,7 @@ pub(crate) fn get_lexical_hierarchy(
|
|||
worker.symbreak();
|
||||
}
|
||||
|
||||
crate::log_debug_ct!("lexical hierarchy analysis took {:?}", b.elapsed());
|
||||
crate::log_debug_ct!("lexical hierarchy analysis took {:?}", start.elapsed());
|
||||
res.map(|_| worker.stack.pop().unwrap().1)
|
||||
}
|
||||
|
||||
|
@ -212,7 +212,7 @@ enum IdentContext {
|
|||
|
||||
#[derive(Default)]
|
||||
struct LexicalHierarchyWorker {
|
||||
g: LexicalScopeKind,
|
||||
sk: LexicalScopeKind,
|
||||
stack: Vec<(LexicalInfo, EcoVec<LexicalHierarchy>)>,
|
||||
ident_context: IdentContext,
|
||||
}
|
||||
|
@ -253,7 +253,7 @@ impl LexicalHierarchyWorker {
|
|||
if let LexicalKind::Heading(level) = symbol.kind {
|
||||
'heading_break: while let Some((w, _)) = self.stack.last() {
|
||||
match w.kind {
|
||||
LexicalKind::Heading(l) if l < level => break 'heading_break,
|
||||
LexicalKind::Heading(lvl) if lvl < level => break 'heading_break,
|
||||
LexicalKind::Block => break 'heading_break,
|
||||
_ if self.stack.len() <= 1 => break 'heading_break,
|
||||
_ => {}
|
||||
|
@ -289,11 +289,11 @@ impl LexicalHierarchyWorker {
|
|||
let pattern = node.children().find(|n| n.cast::<ast::Pattern>().is_some());
|
||||
|
||||
if let Some(name) = &pattern {
|
||||
let p = name.cast::<ast::Pattern>().unwrap();
|
||||
let pat = name.cast::<ast::Pattern>().unwrap();
|
||||
|
||||
// special case: it will then match SyntaxKind::Closure in the inner looking
|
||||
// up.
|
||||
if matches!(p, ast::Pattern::Normal(ast::Expr::Closure(..))) {
|
||||
if matches!(pat, ast::Pattern::Normal(ast::Expr::Closure(..))) {
|
||||
let closure = name.clone();
|
||||
self.get_symbols_with(closure, IdentContext::Ref)?;
|
||||
break 'let_binding;
|
||||
|
@ -364,7 +364,7 @@ impl LexicalHierarchyWorker {
|
|||
self.get_symbols_in_opt_with(ident, IdentContext::Var)?;
|
||||
}
|
||||
}
|
||||
k if k.is_trivia() || k.is_keyword() || k.is_error() => {}
|
||||
kind if kind.is_trivia() || kind.is_keyword() || kind.is_error() => {}
|
||||
_ => {
|
||||
for child in node.children() {
|
||||
self.get_symbols(child)?;
|
||||
|
@ -408,12 +408,12 @@ impl LexicalHierarchyWorker {
|
|||
}
|
||||
|
||||
fn get_symbols_with(&mut self, node: LinkedNode, context: IdentContext) -> anyhow::Result<()> {
|
||||
let c = self.ident_context;
|
||||
let parent_context = self.ident_context;
|
||||
self.ident_context = context;
|
||||
|
||||
let res = self.get_symbols(node);
|
||||
|
||||
self.ident_context = c;
|
||||
self.ident_context = parent_context;
|
||||
res
|
||||
}
|
||||
|
||||
|
@ -422,18 +422,18 @@ impl LexicalHierarchyWorker {
|
|||
#[allow(deprecated)]
|
||||
fn get_ident(&self, node: &LinkedNode) -> anyhow::Result<Option<LexicalInfo>> {
|
||||
let (name, kind) = match node.kind() {
|
||||
SyntaxKind::Label if self.g.affect_symbol() => {
|
||||
SyntaxKind::Label if self.sk.affect_symbol() => {
|
||||
// filter out label in code context.
|
||||
let p = node.prev_sibling_kind();
|
||||
if p.is_some_and(|p| {
|
||||
let prev_kind = node.prev_sibling_kind();
|
||||
if prev_kind.is_some_and(|prev_kind| {
|
||||
matches!(
|
||||
p,
|
||||
prev_kind,
|
||||
SyntaxKind::LeftBracket
|
||||
| SyntaxKind::LeftBrace
|
||||
| SyntaxKind::LeftParen
|
||||
| SyntaxKind::Comma
|
||||
| SyntaxKind::Colon
|
||||
) || p.is_keyword()
|
||||
) || prev_kind.is_keyword()
|
||||
}) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -444,7 +444,7 @@ impl LexicalHierarchyWorker {
|
|||
|
||||
(name, LexicalKind::label())
|
||||
}
|
||||
SyntaxKind::Ident if self.g.affect_symbol() => {
|
||||
SyntaxKind::Ident if self.sk.affect_symbol() => {
|
||||
let ast_node = node
|
||||
.cast::<ast::Ident>()
|
||||
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
|
||||
|
@ -458,11 +458,11 @@ impl LexicalHierarchyWorker {
|
|||
(name, kind)
|
||||
}
|
||||
SyntaxKind::Equation | SyntaxKind::Raw | SyntaxKind::BlockComment
|
||||
if self.g.affect_markup() =>
|
||||
if self.sk.affect_markup() =>
|
||||
{
|
||||
(EcoString::new(), LexicalKind::Block)
|
||||
}
|
||||
SyntaxKind::CodeBlock | SyntaxKind::ContentBlock if self.g.affect_block() => {
|
||||
SyntaxKind::CodeBlock | SyntaxKind::ContentBlock if self.sk.affect_block() => {
|
||||
(EcoString::new(), LexicalKind::Block)
|
||||
}
|
||||
SyntaxKind::Parenthesized
|
||||
|
@ -470,7 +470,7 @@ impl LexicalHierarchyWorker {
|
|||
| SyntaxKind::Args
|
||||
| SyntaxKind::Array
|
||||
| SyntaxKind::Dict
|
||||
if self.g.affect_expr() =>
|
||||
if self.sk.affect_expr() =>
|
||||
{
|
||||
(EcoString::new(), LexicalKind::Block)
|
||||
}
|
||||
|
@ -483,7 +483,7 @@ impl LexicalHierarchyWorker {
|
|||
return Ok(None);
|
||||
};
|
||||
let kind = match parent.kind() {
|
||||
SyntaxKind::Heading if self.g.affect_heading() => LexicalKind::Heading(
|
||||
SyntaxKind::Heading if self.sk.affect_heading() => LexicalKind::Heading(
|
||||
parent.cast::<ast::Heading>().unwrap().depth().get() as i16,
|
||||
),
|
||||
_ => return Ok(None),
|
||||
|
|
|
@ -111,27 +111,27 @@ pub fn descent_decls<T>(
|
|||
}
|
||||
}
|
||||
}
|
||||
(DescentItem::Parent(node, child), ast::Expr::For(f)) => {
|
||||
let body = node.find(f.body().span());
|
||||
(DescentItem::Parent(node, child), ast::Expr::For(for_expr)) => {
|
||||
let body = node.find(for_expr.body().span());
|
||||
let in_body = body.is_some_and(|n| n.find(child.span()).is_some());
|
||||
if !in_body {
|
||||
return None;
|
||||
}
|
||||
|
||||
for ident in f.pattern().bindings() {
|
||||
for ident in for_expr.pattern().bindings() {
|
||||
if let Some(t) = recv(DescentDecl::Ident(ident)) {
|
||||
return Some(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
(DescentItem::Parent(node, child), ast::Expr::Closure(c)) => {
|
||||
let body = node.find(c.body().span());
|
||||
(DescentItem::Parent(node, child), ast::Expr::Closure(closure)) => {
|
||||
let body = node.find(closure.body().span());
|
||||
let in_body = body.is_some_and(|n| n.find(child.span()).is_some());
|
||||
if !in_body {
|
||||
return None;
|
||||
}
|
||||
|
||||
for param in c.params().children() {
|
||||
for param in closure.params().children() {
|
||||
match param {
|
||||
ast::Param::Pos(pattern) => {
|
||||
for ident in pattern.bindings() {
|
||||
|
@ -181,10 +181,10 @@ pub fn is_ident_like(node: &SyntaxNode) -> bool {
|
|||
}
|
||||
|
||||
use SyntaxKind::*;
|
||||
let k = node.kind();
|
||||
matches!(k, Ident | MathIdent | Underscore)
|
||||
|| (matches!(k, Error) && can_be_ident(node))
|
||||
|| k.is_keyword()
|
||||
let kind = node.kind();
|
||||
matches!(kind, Ident | MathIdent | Underscore)
|
||||
|| (matches!(kind, Error) && can_be_ident(node))
|
||||
|| kind.is_keyword()
|
||||
}
|
||||
|
||||
/// A mode in which a text document is interpreted.
|
||||
|
@ -222,9 +222,9 @@ pub(crate) fn interpret_mode_at(mut leaf: Option<&LinkedNode>) -> InterpretMode
|
|||
}
|
||||
|
||||
/// Determine the interpretation mode at the given kind (context-free).
|
||||
pub(crate) fn interpret_mode_at_kind(k: SyntaxKind) -> Option<InterpretMode> {
|
||||
pub(crate) fn interpret_mode_at_kind(kind: SyntaxKind) -> Option<InterpretMode> {
|
||||
use SyntaxKind::*;
|
||||
Some(match k {
|
||||
Some(match kind {
|
||||
LineComment | BlockComment => InterpretMode::Comment,
|
||||
Raw => InterpretMode::Raw,
|
||||
Str => InterpretMode::String,
|
||||
|
@ -672,21 +672,21 @@ pub fn classify_cursor(node: LinkedNode) -> Option<CursorClass<'_>> {
|
|||
};
|
||||
|
||||
while let SyntaxKind::Named | SyntaxKind::Colon = node_parent.kind() {
|
||||
let Some(p) = node_parent.parent() else {
|
||||
let Some(parent) = node_parent.parent() else {
|
||||
return Some(CursorClass::Normal(node));
|
||||
};
|
||||
node_parent = p.clone();
|
||||
node_parent = parent.clone();
|
||||
}
|
||||
|
||||
match node_parent.kind() {
|
||||
SyntaxKind::Args => {
|
||||
let callee = node_ancestors(&node_parent).find_map(|p| {
|
||||
let s = match p.cast::<ast::Expr>()? {
|
||||
let callee = node_ancestors(&node_parent).find_map(|ancestor| {
|
||||
let span = match ancestor.cast::<ast::Expr>()? {
|
||||
ast::Expr::FuncCall(call) => call.callee().span(),
|
||||
ast::Expr::Set(set) => set.target().span(),
|
||||
_ => return None,
|
||||
};
|
||||
p.find(s)
|
||||
ancestor.find(span)
|
||||
})?;
|
||||
|
||||
let param_node = match node.kind() {
|
||||
|
|
|
@ -131,7 +131,7 @@ pub(crate) fn scan_workspace_files<T>(
|
|||
|
||||
let path = de.path();
|
||||
let relative_path = match path.strip_prefix(root) {
|
||||
Ok(p) => p,
|
||||
Ok(path) => path,
|
||||
Err(err) => {
|
||||
log::warn!("failed to strip prefix, path: {path:?}, root: {root:?}: {err}");
|
||||
continue;
|
||||
|
|
|
@ -13,8 +13,8 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
Self { f, indent: 0 }
|
||||
}
|
||||
|
||||
pub fn write_decl(&mut self, d: &Decl) -> fmt::Result {
|
||||
write!(self.f, "{d:?}")
|
||||
pub fn write_decl(&mut self, decl: &Decl) -> fmt::Result {
|
||||
write!(self.f, "{decl:?}")
|
||||
}
|
||||
|
||||
pub fn write_expr(&mut self, expr: &Expr) -> fmt::Result {
|
||||
|
@ -105,9 +105,9 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
match arg {
|
||||
ArgExpr::Pos(pos) => self.write_expr(pos),
|
||||
ArgExpr::Named(named) => {
|
||||
let (k, v) = named.as_ref();
|
||||
write!(self.f, "{k:?}: ")?;
|
||||
self.write_expr(v)
|
||||
let (name, val) = named.as_ref();
|
||||
write!(self.f, "{name:?}: ")?;
|
||||
self.write_expr(val)
|
||||
}
|
||||
ArgExpr::NamedRt(named) => {
|
||||
let (key, val) = named.as_ref();
|
||||
|
@ -122,38 +122,38 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn write_pattern(&mut self, p: &Pattern) -> fmt::Result {
|
||||
match p {
|
||||
pub fn write_pattern(&mut self, pat: &Pattern) -> fmt::Result {
|
||||
match pat {
|
||||
Pattern::Expr(expr) => self.write_expr(expr),
|
||||
Pattern::Simple(decl) => self.write_decl(decl),
|
||||
Pattern::Sig(sig) => self.write_pattern_sig(sig),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_pattern_sig(&mut self, p: &PatternSig) -> fmt::Result {
|
||||
fn write_pattern_sig(&mut self, sig: &PatternSig) -> fmt::Result {
|
||||
self.f.write_str("pat(\n")?;
|
||||
self.indent += 1;
|
||||
for pos in &p.pos {
|
||||
for pos in &sig.pos {
|
||||
self.write_indent()?;
|
||||
self.write_pattern(pos)?;
|
||||
self.f.write_str(",\n")?;
|
||||
}
|
||||
for (name, pat) in &p.named {
|
||||
for (name, named) in &sig.named {
|
||||
self.write_indent()?;
|
||||
write!(self.f, "{name:?} = ")?;
|
||||
self.write_pattern(pat)?;
|
||||
self.write_pattern(named)?;
|
||||
self.f.write_str(",\n")?;
|
||||
}
|
||||
if let Some((k, rest)) = &p.spread_left {
|
||||
if let Some((name, spread_left)) = &sig.spread_left {
|
||||
self.write_indent()?;
|
||||
write!(self.f, "..{k:?}: ")?;
|
||||
self.write_pattern(rest)?;
|
||||
write!(self.f, "..{name:?}: ")?;
|
||||
self.write_pattern(spread_left)?;
|
||||
self.f.write_str(",\n")?;
|
||||
}
|
||||
if let Some((k, rest)) = &p.spread_right {
|
||||
if let Some((name, spread_right)) = &sig.spread_right {
|
||||
self.write_indent()?;
|
||||
write!(self.f, "..{k:?}: ")?;
|
||||
self.write_pattern(rest)?;
|
||||
write!(self.f, "..{name:?}: ")?;
|
||||
self.write_pattern(spread_right)?;
|
||||
self.f.write_str(",\n")?;
|
||||
}
|
||||
self.indent -= 1;
|
||||
|
@ -339,14 +339,14 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
|
|||
Self { f, indent: 0 }
|
||||
}
|
||||
|
||||
pub fn write_decl(&mut self, d: &Decl) -> fmt::Result {
|
||||
pub fn write_decl(&mut self, decl: &Decl) -> fmt::Result {
|
||||
use DefKind::*;
|
||||
let shorter = matches!(d.kind(), Function | Variable | Module);
|
||||
if shorter && !d.name().is_empty() {
|
||||
return write!(self.f, "{}", d.name());
|
||||
let shorter = matches!(decl.kind(), Function | Variable | Module);
|
||||
if shorter && !decl.name().is_empty() {
|
||||
return write!(self.f, "{}", decl.name());
|
||||
}
|
||||
|
||||
write!(self.f, "{d:?}")
|
||||
write!(self.f, "{decl:?}")
|
||||
}
|
||||
|
||||
pub fn write_expr(&mut self, expr: &Expr) -> fmt::Result {
|
||||
|
|
|
@ -47,28 +47,28 @@ pub fn snapshot_testing(name: &str, f: &impl Fn(&mut LocalContext, PathBuf)) {
|
|||
#[cfg(windows)]
|
||||
let contents = contents.replace("\r\n", "\n");
|
||||
|
||||
run_with_sources(&contents, |w, p| {
|
||||
run_with_ctx(w, p, f);
|
||||
run_with_sources(&contents, |world, path| {
|
||||
run_with_ctx(world, path, f);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
pub fn run_with_ctx<T>(
|
||||
w: &mut LspUniverse,
|
||||
p: PathBuf,
|
||||
verse: &mut LspUniverse,
|
||||
path: PathBuf,
|
||||
f: &impl Fn(&mut LocalContext, PathBuf) -> T,
|
||||
) -> T {
|
||||
let root = w.workspace_root().unwrap();
|
||||
let paths = w
|
||||
let root = verse.workspace_root().unwrap();
|
||||
let paths = verse
|
||||
.shadow_paths()
|
||||
.into_iter()
|
||||
.map(|p| TypstFileId::new(None, VirtualPath::new(p.strip_prefix(&root).unwrap())))
|
||||
.map(|path| TypstFileId::new(None, VirtualPath::new(path.strip_prefix(&root).unwrap())))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let w = w.snapshot();
|
||||
let world = verse.snapshot();
|
||||
|
||||
let source = w.source_by_path(&p).ok().unwrap();
|
||||
let source = world.source_by_path(&path).ok().unwrap();
|
||||
let docs = find_module_level_docs(&source).unwrap_or_default();
|
||||
let properties = get_test_properties(&docs);
|
||||
let supports_html = properties
|
||||
|
@ -87,7 +87,7 @@ pub fn run_with_ctx<T>(
|
|||
},
|
||||
..Analysis::default()
|
||||
})
|
||||
.snapshot(w);
|
||||
.snapshot(world);
|
||||
|
||||
ctx.test_package_list(|| {
|
||||
vec![(
|
||||
|
@ -97,7 +97,7 @@ pub fn run_with_ctx<T>(
|
|||
});
|
||||
ctx.test_completion_files(|| paths.clone());
|
||||
ctx.test_files(|| paths);
|
||||
f(&mut ctx, p)
|
||||
f(&mut ctx, path)
|
||||
}
|
||||
|
||||
pub fn get_test_properties(s: &str) -> HashMap<&'_ str, &'_ str> {
|
||||
|
@ -164,7 +164,7 @@ pub fn run_with_sources<T>(source: &str, f: impl FnOnce(&mut LspUniverse, PathBu
|
|||
let sources = source.split("-----");
|
||||
|
||||
let mut last_pw = None;
|
||||
for (i, source) in sources.enumerate() {
|
||||
for (idx, source) in sources.enumerate() {
|
||||
// find prelude
|
||||
let mut source = source.trim_start();
|
||||
let mut path = None;
|
||||
|
@ -179,7 +179,7 @@ pub fn run_with_sources<T>(source: &str, f: impl FnOnce(&mut LspUniverse, PathBu
|
|||
}
|
||||
};
|
||||
|
||||
let path = path.unwrap_or_else(|| format!("/s{i}.typ"));
|
||||
let path = path.unwrap_or_else(|| format!("/s{idx}.typ"));
|
||||
|
||||
let pw = root.join(Path::new(&path));
|
||||
world
|
||||
|
@ -307,8 +307,8 @@ pub fn find_test_position_(s: &Source, offset: usize) -> LspPosition {
|
|||
if match_ident {
|
||||
match n.kind() {
|
||||
SyntaxKind::Closure => {
|
||||
let c = n.cast::<ast::Closure>().unwrap();
|
||||
if let Some(name) = c.name() {
|
||||
let closure = n.cast::<ast::Closure>().unwrap();
|
||||
if let Some(name) = closure.name() {
|
||||
if let Some(m) = n.find(name.span()) {
|
||||
n = m;
|
||||
break 'match_loop;
|
||||
|
@ -316,8 +316,8 @@ pub fn find_test_position_(s: &Source, offset: usize) -> LspPosition {
|
|||
}
|
||||
}
|
||||
SyntaxKind::LetBinding => {
|
||||
let c = n.cast::<ast::LetBinding>().unwrap();
|
||||
if let Some(name) = c.kind().bindings().first() {
|
||||
let let_binding = n.cast::<ast::LetBinding>().unwrap();
|
||||
if let Some(name) = let_binding.kind().bindings().first() {
|
||||
if let Some(m) = n.find(name.span()) {
|
||||
n = m;
|
||||
break 'match_loop;
|
||||
|
@ -397,37 +397,37 @@ fn pos(v: &Value) -> String {
|
|||
}
|
||||
|
||||
impl Redact for RedactFields {
|
||||
fn redact(&self, v: Value) -> Value {
|
||||
match v {
|
||||
Value::Object(mut m) => {
|
||||
for (_, v) in m.iter_mut() {
|
||||
*v = self.redact(v.clone());
|
||||
fn redact(&self, json_val: Value) -> Value {
|
||||
match json_val {
|
||||
Value::Object(mut map) => {
|
||||
for (_, val) in map.iter_mut() {
|
||||
*val = self.redact(val.clone());
|
||||
}
|
||||
for k in self.0.iter().copied() {
|
||||
let Some(t) = m.remove(k) else {
|
||||
for key in self.0.iter().copied() {
|
||||
let Some(t) = map.remove(key) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
match k {
|
||||
match key {
|
||||
"changes" => {
|
||||
let obj = t.as_object().unwrap();
|
||||
m.insert(
|
||||
k.to_owned(),
|
||||
map.insert(
|
||||
key.to_owned(),
|
||||
Value::Object(
|
||||
obj.iter().map(|(k, v)| (file_name(k), v.clone())).collect(),
|
||||
),
|
||||
);
|
||||
}
|
||||
"uri" | "oldUri" | "newUri" | "targetUri" => {
|
||||
m.insert(k.to_owned(), file_name(t.as_str().unwrap()).into());
|
||||
map.insert(key.to_owned(), file_name(t.as_str().unwrap()).into());
|
||||
}
|
||||
"range"
|
||||
| "selectionRange"
|
||||
| "originSelectionRange"
|
||||
| "targetRange"
|
||||
| "targetSelectionRange" => {
|
||||
m.insert(
|
||||
k.to_owned(),
|
||||
map.insert(
|
||||
key.to_owned(),
|
||||
format!("{}:{}", pos(&t["start"]), pos(&t["end"])).into(),
|
||||
);
|
||||
}
|
||||
|
@ -441,27 +441,27 @@ impl Redact for RedactFields {
|
|||
"data:image-hash/svg+xml;base64,redacted"
|
||||
});
|
||||
|
||||
m.insert(k.to_owned(), res.into());
|
||||
map.insert(key.to_owned(), res.into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Value::Object(m)
|
||||
Value::Object(map)
|
||||
}
|
||||
Value::Array(mut a) => {
|
||||
for v in a.iter_mut() {
|
||||
*v = self.redact(v.clone());
|
||||
Value::Array(mut arr) => {
|
||||
for elem in arr.iter_mut() {
|
||||
*elem = self.redact(elem.clone());
|
||||
}
|
||||
Value::Array(a)
|
||||
Value::Array(arr)
|
||||
}
|
||||
Value::String(s) => Value::String(s),
|
||||
v => v,
|
||||
Value::String(content) => Value::String(content),
|
||||
json_val => json_val,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn file_name(k: &str) -> String {
|
||||
let name = Path::new(k).file_name().unwrap();
|
||||
fn file_name(path: &str) -> String {
|
||||
let name = Path::new(path).file_name().unwrap();
|
||||
name.to_str().unwrap().to_owned()
|
||||
}
|
||||
|
||||
|
|
|
@ -55,9 +55,9 @@ impl DocSource {
|
|||
Self::Builtin(BuiltinTy::Type(ty)) => Some(ty.constructor().ok()?),
|
||||
Self::Builtin(BuiltinTy::Element(ty)) => Some((*ty).into()),
|
||||
Self::Builtin(..) => None,
|
||||
Self::Ins(i) => match &i.val {
|
||||
foundations::Value::Func(f) => Some(f.clone()),
|
||||
foundations::Value::Type(f) => Some(f.constructor().ok()?),
|
||||
Self::Ins(ins_ty) => match &ins_ty.val {
|
||||
foundations::Value::Func(func) => Some(func.clone()),
|
||||
foundations::Value::Type(ty) => Some(ty.constructor().ok()?),
|
||||
_ => None,
|
||||
},
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ impl PathPreference {
|
|||
|
||||
pub fn from_ext(path: &str) -> Option<Self> {
|
||||
let path = std::path::Path::new(path).extension()?.to_str()?;
|
||||
PathPreference::iter().find(|p| p.ext_matcher().is_match(path))
|
||||
PathPreference::iter().find(|preference| preference.ext_matcher().is_match(path))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -277,8 +277,8 @@ impl fmt::Debug for BuiltinTy {
|
|||
write!(f, "Tag({name:?})")
|
||||
}
|
||||
}
|
||||
BuiltinTy::Module(m) => write!(f, "{m:?}"),
|
||||
BuiltinTy::Path(p) => write!(f, "Path({p:?})"),
|
||||
BuiltinTy::Module(decl) => write!(f, "{decl:?}"),
|
||||
BuiltinTy::Path(preference) => write!(f, "Path({preference:?})"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -442,8 +442,8 @@ macro_rules! flow_record {
|
|||
};
|
||||
}
|
||||
|
||||
pub(super) fn param_mapping(f: &Func, p: &ParamInfo) -> Option<Ty> {
|
||||
match (f.name()?, p.name) {
|
||||
pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
|
||||
match (func.name()?, param.name) {
|
||||
("cbor", "path") => Some(literally(Path(PathPreference::None))),
|
||||
("csv", "path") => Some(literally(Path(PathPreference::Csv))),
|
||||
("image", "path") => Some(literally(Path(PathPreference::Image))),
|
||||
|
@ -456,13 +456,13 @@ pub(super) fn param_mapping(f: &Func, p: &ParamInfo) -> Option<Ty> {
|
|||
("raw", "syntaxes") => Some(literally(Path(PathPreference::RawSyntax))),
|
||||
("bibliography" | "cite", "style") => Some(Ty::iter_union([
|
||||
literally(Path(PathPreference::Csl)),
|
||||
Ty::from_cast_info(&p.input),
|
||||
Ty::from_cast_info(¶m.input),
|
||||
])),
|
||||
("cite", "key") => Some(Ty::iter_union([literally(CiteLabel)])),
|
||||
("ref", "target") => Some(Ty::iter_union([literally(RefLabel)])),
|
||||
("link", "dest") | ("footnote", "body") => Some(Ty::iter_union([
|
||||
literally(RefLabel),
|
||||
Ty::from_cast_info(&p.input),
|
||||
Ty::from_cast_info(¶m.input),
|
||||
])),
|
||||
("bibliography", "path") => Some(literally(Path(PathPreference::Bibliography))),
|
||||
("text", "size") => Some(literally(TextSize)),
|
||||
|
|
|
@ -95,24 +95,24 @@ impl fmt::Debug for Ty {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Ty::Any => f.write_str("Any"),
|
||||
Ty::Builtin(t) => write!(f, "{t:?}"),
|
||||
Ty::Args(a) => write!(f, "&({a:?})"),
|
||||
Ty::Func(s) => write!(f, "{s:?}"),
|
||||
Ty::Pattern(s) => write!(f, "{s:?}"),
|
||||
Ty::Dict(r) => write!(f, "{r:?}"),
|
||||
Ty::Array(a) => write!(f, "Array<{a:?}>"),
|
||||
Ty::Tuple(t) => {
|
||||
Ty::Builtin(ty) => write!(f, "{ty:?}"),
|
||||
Ty::Args(args) => write!(f, "&({args:?})"),
|
||||
Ty::Func(func) => write!(f, "{func:?}"),
|
||||
Ty::Pattern(pat) => write!(f, "{pat:?}"),
|
||||
Ty::Dict(record) => write!(f, "{record:?}"),
|
||||
Ty::Array(arr) => write!(f, "Array<{arr:?}>"),
|
||||
Ty::Tuple(elems) => {
|
||||
f.write_str("(")?;
|
||||
for t in t.iter() {
|
||||
for t in elems.iter() {
|
||||
write!(f, "{t:?}, ")?;
|
||||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
Ty::With(w) => write!(f, "({:?}).with(..{:?})", w.sig, w.with),
|
||||
Ty::Select(a) => write!(f, "{a:?}"),
|
||||
Ty::Union(u) => {
|
||||
Ty::With(with) => write!(f, "({:?}).with(..{:?})", with.sig, with.with),
|
||||
Ty::Select(sel) => write!(f, "{sel:?}"),
|
||||
Ty::Union(types) => {
|
||||
f.write_str("(")?;
|
||||
if let Some((first, u)) = u.split_first() {
|
||||
if let Some((first, u)) = types.split_first() {
|
||||
write!(f, "{first:?}")?;
|
||||
for u in u {
|
||||
write!(f, " | {u:?}")?;
|
||||
|
@ -120,16 +120,16 @@ impl fmt::Debug for Ty {
|
|||
}
|
||||
f.write_str(")")
|
||||
}
|
||||
Ty::Let(v) => write!(f, "({v:?})"),
|
||||
Ty::Param(ff) => write!(f, "{:?}: {:?}", ff.name, ff.ty),
|
||||
Ty::Var(v) => v.fmt(f),
|
||||
Ty::Unary(u) => write!(f, "{u:?}"),
|
||||
Ty::Binary(b) => write!(f, "{b:?}"),
|
||||
Ty::If(i) => write!(f, "{i:?}"),
|
||||
Ty::Value(v) => write!(f, "{v:?}", v = v.val),
|
||||
Ty::Boolean(b) => {
|
||||
if let Some(b) = b {
|
||||
write!(f, "{b}")
|
||||
Ty::Let(bounds) => write!(f, "({bounds:?})"),
|
||||
Ty::Param(param) => write!(f, "{:?}: {:?}", param.name, param.ty),
|
||||
Ty::Var(var) => var.fmt(f),
|
||||
Ty::Unary(unary) => write!(f, "{unary:?}"),
|
||||
Ty::Binary(binary) => write!(f, "{binary:?}"),
|
||||
Ty::If(if_expr) => write!(f, "{if_expr:?}"),
|
||||
Ty::Value(ins_ty) => write!(f, "{:?}", ins_ty.val),
|
||||
Ty::Boolean(truthiness) => {
|
||||
if let Some(truthiness) = truthiness {
|
||||
write!(f, "{truthiness}")
|
||||
} else {
|
||||
f.write_str("Boolean")
|
||||
}
|
||||
|
@ -224,8 +224,8 @@ impl Ty {
|
|||
/// Get the type of the type
|
||||
pub fn element(&self) -> Option<Element> {
|
||||
match self {
|
||||
Ty::Value(v) => match &v.val {
|
||||
Value::Func(f) => f.element(),
|
||||
Ty::Value(ins_ty) => match &ins_ty.val {
|
||||
Value::Func(func) => func.element(),
|
||||
_ => None,
|
||||
},
|
||||
Ty::Builtin(BuiltinTy::Element(v)) => Some(*v),
|
||||
|
@ -344,7 +344,7 @@ impl NameBone {
|
|||
let mut rhs = rhs_iter.next();
|
||||
|
||||
std::iter::from_fn(move || 'name_scanning: loop {
|
||||
if let (Some((i, lhs_key)), Some((j, rhs_key))) = (lhs, rhs) {
|
||||
if let (Some((idx, lhs_key)), Some((j, rhs_key))) = (lhs, rhs) {
|
||||
match lhs_key.cmp(rhs_key) {
|
||||
std::cmp::Ordering::Less => {
|
||||
lhs = lhs_iter.next();
|
||||
|
@ -357,7 +357,7 @@ impl NameBone {
|
|||
std::cmp::Ordering::Equal => {
|
||||
lhs = lhs_iter.next();
|
||||
rhs = rhs_iter.next();
|
||||
return Some((i, j));
|
||||
return Some((idx, j));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -436,7 +436,7 @@ pub trait TypeInterface {
|
|||
/// Iterate over the fields of a record.
|
||||
fn interface(&self) -> impl Iterator<Item = (&StrRef, &Ty)>;
|
||||
/// Get the field by bone offset.
|
||||
fn field_by_bone_offset(&self, i: usize) -> Option<&Ty>;
|
||||
fn field_by_bone_offset(&self, idx: usize) -> Option<&Ty>;
|
||||
/// Get the field by name.
|
||||
fn field_by_name(&self, name: &StrRef) -> Option<&Ty> {
|
||||
self.field_by_bone_offset(self.bone().find(name)?)
|
||||
|
@ -499,13 +499,13 @@ impl InsTy {
|
|||
}
|
||||
|
||||
/// Create a instance with a sapn
|
||||
pub fn new_at(val: Value, s: Span) -> Interned<Self> {
|
||||
let mut l = SyntaxNode::leaf(SyntaxKind::Ident, "");
|
||||
l.synthesize(s);
|
||||
pub fn new_at(val: Value, span: Span) -> Interned<Self> {
|
||||
let mut name = SyntaxNode::leaf(SyntaxKind::Ident, "");
|
||||
name.synthesize(span);
|
||||
Interned::new(Self {
|
||||
val,
|
||||
syntax: Some(Interned::new(TypeSource {
|
||||
name_node: l,
|
||||
name_node: name,
|
||||
name_repr: OnceCell::new(),
|
||||
doc: "".into(),
|
||||
})),
|
||||
|
@ -527,12 +527,12 @@ impl InsTy {
|
|||
pub fn span(&self) -> Span {
|
||||
self.syntax
|
||||
.as_ref()
|
||||
.map(|s| s.name_node.span())
|
||||
.map(|source| source.name_node.span())
|
||||
.or_else(|| {
|
||||
Some(match &self.val {
|
||||
Value::Func(f) => f.span(),
|
||||
Value::Args(a) => a.span,
|
||||
Value::Content(c) => c.span(),
|
||||
Value::Func(func) => func.span(),
|
||||
Value::Args(args) => args.span,
|
||||
Value::Content(content) => content.span(),
|
||||
// todo: module might have file id
|
||||
_ => return None,
|
||||
})
|
||||
|
@ -703,8 +703,8 @@ impl TypeInterface for RecordTy {
|
|||
&self.names
|
||||
}
|
||||
|
||||
fn field_by_bone_offset(&self, i: usize) -> Option<&Ty> {
|
||||
self.types.get(i)
|
||||
fn field_by_bone_offset(&self, idx: usize) -> Option<&Ty> {
|
||||
self.types.get(idx)
|
||||
}
|
||||
|
||||
fn interface(&self) -> impl Iterator<Item = (&StrRef, &Ty)> {
|
||||
|
@ -884,8 +884,8 @@ impl TypeInterface for SigTy {
|
|||
names.zip(types)
|
||||
}
|
||||
|
||||
fn field_by_bone_offset(&self, i: usize) -> Option<&Ty> {
|
||||
self.inputs.get(i + self.name_started as usize)
|
||||
fn field_by_bone_offset(&self, offset: usize) -> Option<&Ty> {
|
||||
self.inputs.get(offset + self.name_started as usize)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1228,23 +1228,23 @@ impl TypeInfo {
|
|||
Ty::Var(v) => {
|
||||
let w = self.vars.get(&v.def).unwrap();
|
||||
match &w.bounds {
|
||||
FlowVarKind::Strong(w) | FlowVarKind::Weak(w) => {
|
||||
let w = w.read();
|
||||
for l in w.lbs.iter() {
|
||||
store.lbs.insert_mut(l.clone());
|
||||
FlowVarKind::Strong(bounds) | FlowVarKind::Weak(bounds) => {
|
||||
let w = bounds.read();
|
||||
for bound in w.lbs.iter() {
|
||||
store.lbs.insert_mut(bound.clone());
|
||||
}
|
||||
for l in w.ubs.iter() {
|
||||
store.ubs.insert_mut(l.clone());
|
||||
for bound in w.ubs.iter() {
|
||||
store.ubs.insert_mut(bound.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ty::Let(v) => {
|
||||
for l in v.lbs.iter() {
|
||||
store.lbs.insert_mut(l.clone());
|
||||
Ty::Let(bounds) => {
|
||||
for bound in bounds.lbs.iter() {
|
||||
store.lbs.insert_mut(bound.clone());
|
||||
}
|
||||
for l in v.ubs.iter() {
|
||||
store.ubs.insert_mut(l.clone());
|
||||
for bound in bounds.ubs.iter() {
|
||||
store.ubs.insert_mut(bound.clone());
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
|
|
@ -80,12 +80,12 @@ impl TypeDescriber {
|
|||
if !functions.is_empty() {
|
||||
// todo: union signature
|
||||
// only first function is described
|
||||
let f = functions[0].clone();
|
||||
let func = functions[0].clone();
|
||||
|
||||
let mut res = EcoString::new();
|
||||
res.push('(');
|
||||
let mut not_first = false;
|
||||
for ty in f.positional_params() {
|
||||
for ty in func.positional_params() {
|
||||
if not_first {
|
||||
res.push_str(", ");
|
||||
} else {
|
||||
|
@ -93,26 +93,26 @@ impl TypeDescriber {
|
|||
}
|
||||
res.push_str(self.describe_root(ty).as_deref().unwrap_or("any"));
|
||||
}
|
||||
for (k, ty) in f.named_params() {
|
||||
for (name, ty) in func.named_params() {
|
||||
if not_first {
|
||||
res.push_str(", ");
|
||||
} else {
|
||||
not_first = true;
|
||||
}
|
||||
res.push_str(k);
|
||||
res.push_str(name);
|
||||
res.push_str(": ");
|
||||
res.push_str(self.describe_root(ty).as_deref().unwrap_or("any"));
|
||||
}
|
||||
if let Some(r) = f.rest_param() {
|
||||
if let Some(spread_right) = func.rest_param() {
|
||||
if not_first {
|
||||
res.push_str(", ");
|
||||
}
|
||||
res.push_str("..: ");
|
||||
res.push_str(self.describe_root(r).as_deref().unwrap_or("any"));
|
||||
res.push_str(self.describe_root(spread_right).as_deref().unwrap_or("any"));
|
||||
}
|
||||
res.push_str(") => ");
|
||||
res.push_str(
|
||||
f.body
|
||||
func.body
|
||||
.as_ref()
|
||||
.and_then(|ret| self.describe_root(ret))
|
||||
.as_deref()
|
||||
|
@ -146,15 +146,15 @@ impl TypeDescriber {
|
|||
fn describe(&mut self, ty: &Ty) -> EcoString {
|
||||
match ty {
|
||||
Ty::Var(..) => {}
|
||||
Ty::Union(tys) => {
|
||||
self.describe_iter(tys);
|
||||
Ty::Union(types) => {
|
||||
self.describe_iter(types);
|
||||
}
|
||||
Ty::Let(lb) => {
|
||||
self.describe_iter(&lb.lbs);
|
||||
self.describe_iter(&lb.ubs);
|
||||
Ty::Let(bounds) => {
|
||||
self.describe_iter(&bounds.lbs);
|
||||
self.describe_iter(&bounds.ubs);
|
||||
}
|
||||
Ty::Func(f) => {
|
||||
self.functions.push(f.clone());
|
||||
Ty::Func(func) => {
|
||||
self.functions.push(func.clone());
|
||||
}
|
||||
Ty::Dict(..) => {
|
||||
return "dictionary".into();
|
||||
|
|
|
@ -42,7 +42,7 @@ impl Iface<'_> {
|
|||
crate::log_debug_ct!("iface shape: {self:?}");
|
||||
|
||||
match self {
|
||||
Iface::Dict(d) => d.field_by_name(key).cloned(),
|
||||
Iface::Dict(dict) => dict.field_by_name(key).cloned(),
|
||||
Iface::Element { val, .. } => select_scope(Some(val.scope()), key),
|
||||
Iface::Type { val, .. } => select_scope(Some(val.scope()), key),
|
||||
Iface::Func { val, .. } => select_scope(val.scope(), key),
|
||||
|
@ -113,10 +113,10 @@ impl IfaceCheckDriver<'_> {
|
|||
true
|
||||
}
|
||||
|
||||
fn ty(&mut self, ty: &Ty, pol: bool) {
|
||||
crate::log_debug_ct!("check iface ty: {ty:?}");
|
||||
fn ty(&mut self, at: &Ty, pol: bool) {
|
||||
crate::log_debug_ct!("check iface ty: {at:?}");
|
||||
|
||||
match ty {
|
||||
match at {
|
||||
Ty::Builtin(BuiltinTy::Stroke) if self.dict_as_iface() => {
|
||||
self.checker
|
||||
.check(Iface::Dict(&FLOW_STROKE_DICT), &mut self.ctx, pol);
|
||||
|
@ -138,45 +138,42 @@ impl IfaceCheckDriver<'_> {
|
|||
.check(Iface::Dict(&FLOW_RADIUS_DICT), &mut self.ctx, pol);
|
||||
}
|
||||
// // todo: deduplicate checking early
|
||||
Ty::Value(v) => {
|
||||
Ty::Value(ins_ty) => {
|
||||
if self.value_as_iface() {
|
||||
match &v.val {
|
||||
Value::Module(t) => {
|
||||
self.checker.check(
|
||||
Iface::ModuleVal { val: t, at: ty },
|
||||
&mut self.ctx,
|
||||
pol,
|
||||
);
|
||||
}
|
||||
Value::Dict(d) => {
|
||||
match &ins_ty.val {
|
||||
Value::Module(val) => {
|
||||
self.checker
|
||||
.check(Iface::Value { val: d, at: ty }, &mut self.ctx, pol);
|
||||
.check(Iface::ModuleVal { val, at }, &mut self.ctx, pol);
|
||||
}
|
||||
Value::Type(t) => {
|
||||
Value::Dict(dict) => {
|
||||
self.checker
|
||||
.check(Iface::Type { val: t, at: ty }, &mut self.ctx, pol);
|
||||
.check(Iface::Value { val: dict, at }, &mut self.ctx, pol);
|
||||
}
|
||||
Value::Func(t) => {
|
||||
Value::Type(ty) => {
|
||||
self.checker
|
||||
.check(Iface::Func { val: t, at: ty }, &mut self.ctx, pol);
|
||||
.check(Iface::Type { val: ty, at }, &mut self.ctx, pol);
|
||||
}
|
||||
Value::Func(func) => {
|
||||
self.checker
|
||||
.check(Iface::Func { val: func, at }, &mut self.ctx, pol);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Type(b_ty)) if self.value_as_iface() => {
|
||||
Ty::Builtin(BuiltinTy::Type(ty)) if self.value_as_iface() => {
|
||||
// todo: distinguish between element and function
|
||||
self.checker
|
||||
.check(Iface::Type { val: b_ty, at: ty }, &mut self.ctx, pol);
|
||||
.check(Iface::Type { val: ty, at }, &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Element(elem)) if self.value_as_iface() => {
|
||||
self.checker
|
||||
.check(Iface::Element { val: elem, at: ty }, &mut self.ctx, pol);
|
||||
.check(Iface::Element { val: elem, at }, &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Module(module)) => {
|
||||
if let Decl::Module(m) = module.as_ref() {
|
||||
self.checker
|
||||
.check(Iface::Module { val: m.fid, at: ty }, &mut self.ctx, pol);
|
||||
.check(Iface::Module { val: m.fid, at }, &mut self.ctx, pol);
|
||||
}
|
||||
}
|
||||
// Ty::Func(sig) if self.value_as_iface() => {
|
||||
|
@ -192,8 +189,8 @@ impl IfaceCheckDriver<'_> {
|
|||
// self.check_dict_signature(sig, pol, self.checker);
|
||||
self.checker.check(Iface::Dict(sig), &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Var(..) => ty.bounds(pol, self),
|
||||
_ if ty.has_bounds() => ty.bounds(pol, self),
|
||||
Ty::Var(..) => at.bounds(pol, self),
|
||||
_ if at.has_bounds() => at.bounds(pol, self),
|
||||
_ => {}
|
||||
}
|
||||
// Ty::Select(sel) => sel.ty.bounds(pol, &mut MethodDriver(self,
|
||||
|
|
|
@ -71,17 +71,17 @@ pub trait TyMutator {
|
|||
})
|
||||
}
|
||||
|
||||
fn mutate_param(&mut self, f: &Interned<ParamTy>, pol: bool) -> Option<ParamTy> {
|
||||
let ty = self.mutate(&f.ty, pol)?;
|
||||
let mut f = f.as_ref().clone();
|
||||
f.ty = ty;
|
||||
Some(f)
|
||||
fn mutate_param(&mut self, param: &Interned<ParamTy>, pol: bool) -> Option<ParamTy> {
|
||||
let ty = self.mutate(¶m.ty, pol)?;
|
||||
let mut param = param.as_ref().clone();
|
||||
param.ty = ty;
|
||||
Some(param)
|
||||
}
|
||||
|
||||
fn mutate_record(&mut self, ty: &Interned<RecordTy>, pol: bool) -> Option<RecordTy> {
|
||||
let types = self.mutate_vec(&ty.types, pol)?;
|
||||
fn mutate_record(&mut self, record: &Interned<RecordTy>, pol: bool) -> Option<RecordTy> {
|
||||
let types = self.mutate_vec(&record.types, pol)?;
|
||||
|
||||
let rec = ty.as_ref().clone();
|
||||
let rec = record.as_ref().clone();
|
||||
Some(RecordTy { types, ..rec })
|
||||
}
|
||||
|
||||
|
|
|
@ -57,23 +57,20 @@ impl<'a> Sig<'a> {
|
|||
sig => (sig, None),
|
||||
};
|
||||
|
||||
let sig_ins = match cano_sig {
|
||||
let sig = match cano_sig {
|
||||
Sig::Builtin(_) => return None,
|
||||
Sig::ArrayCons(a) => SigTy::array_cons(a.as_ref().clone(), false),
|
||||
Sig::TupleCons(t) => SigTy::tuple_cons(t.clone(), false),
|
||||
Sig::DictCons(d) => SigTy::dict_cons(d, false),
|
||||
Sig::ArrayCons(arr) => SigTy::array_cons(arr.as_ref().clone(), false),
|
||||
Sig::TupleCons(tup) => SigTy::tuple_cons(tup.clone(), false),
|
||||
Sig::DictCons(dict) => SigTy::dict_cons(dict, false),
|
||||
Sig::TypeCons { val, .. } => ctx.type_of_func(&val.constructor().ok()?)?,
|
||||
Sig::Value { val, .. } => ctx.type_of_func(val)?,
|
||||
// todo
|
||||
Sig::Partialize(..) => return None,
|
||||
Sig::With { .. } => return None,
|
||||
Sig::Type(t) => t.clone(),
|
||||
Sig::Type(ty) => ty.clone(),
|
||||
};
|
||||
|
||||
Some(SigShape {
|
||||
sig: sig_ins,
|
||||
withs,
|
||||
})
|
||||
Some(SigShape { sig, withs })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -166,9 +163,9 @@ impl SigCheckDriver<'_> {
|
|||
)
|
||||
}
|
||||
|
||||
fn ty(&mut self, ty: &Ty, pol: bool) {
|
||||
crate::log_debug_ct!("check sig: {ty:?}");
|
||||
match ty {
|
||||
fn ty(&mut self, at: &Ty, pol: bool) {
|
||||
crate::log_debug_ct!("check sig: {at:?}");
|
||||
match at {
|
||||
Ty::Builtin(BuiltinTy::Stroke) if self.dict_as_sig() => {
|
||||
self.checker
|
||||
.check(Sig::DictCons(&FLOW_STROKE_DICT), &mut self.ctx, pol);
|
||||
|
@ -190,19 +187,16 @@ impl SigCheckDriver<'_> {
|
|||
.check(Sig::DictCons(&FLOW_RADIUS_DICT), &mut self.ctx, pol);
|
||||
}
|
||||
// todo: deduplicate checking early
|
||||
Ty::Value(v) => {
|
||||
Ty::Value(ins_ty) => {
|
||||
if self.func_as_sig() {
|
||||
match &v.val {
|
||||
Value::Func(f) => {
|
||||
match &ins_ty.val {
|
||||
Value::Func(func) => {
|
||||
self.checker
|
||||
.check(Sig::Value { val: f, at: ty }, &mut self.ctx, pol);
|
||||
.check(Sig::Value { val: func, at }, &mut self.ctx, pol);
|
||||
}
|
||||
Value::Type(t) => {
|
||||
self.checker.check(
|
||||
Sig::TypeCons { val: t, at: ty },
|
||||
&mut self.ctx,
|
||||
pol,
|
||||
);
|
||||
Value::Type(ty) => {
|
||||
self.checker
|
||||
.check(Sig::TypeCons { val: ty, at }, &mut self.ctx, pol);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -211,13 +205,13 @@ impl SigCheckDriver<'_> {
|
|||
Ty::Builtin(BuiltinTy::Type(b_ty)) if self.func_as_sig() => {
|
||||
// todo: distinguish between element and function
|
||||
self.checker
|
||||
.check(Sig::TypeCons { val: b_ty, at: ty }, &mut self.ctx, pol);
|
||||
.check(Sig::TypeCons { val: b_ty, at }, &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Builtin(BuiltinTy::Element(elem)) if self.func_as_sig() => {
|
||||
// todo: distinguish between element and function
|
||||
let f = (*elem).into();
|
||||
let func = (*elem).into();
|
||||
self.checker
|
||||
.check(Sig::Value { val: &f, at: ty }, &mut self.ctx, pol);
|
||||
.check(Sig::Value { val: &func, at }, &mut self.ctx, pol);
|
||||
}
|
||||
Ty::Func(sig) if self.func_as_sig() => {
|
||||
self.checker.check(Sig::Type(sig), &mut self.ctx, pol);
|
||||
|
@ -247,7 +241,7 @@ impl SigCheckDriver<'_> {
|
|||
// todo: keep type information
|
||||
self.ty(¶m.ty, pol);
|
||||
}
|
||||
_ if ty.has_bounds() => ty.bounds(pol, self),
|
||||
_ if at.has_bounds() => at.bounds(pol, self),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
@ -288,10 +282,10 @@ impl BoundChecker for MethodDriver<'_, '_> {
|
|||
// todo: deduplicate checking early
|
||||
Ty::Value(v) => {
|
||||
match &v.val {
|
||||
Value::Func(f) => {
|
||||
Value::Func(func) => {
|
||||
if self.is_binder() {
|
||||
self.0.checker.check(
|
||||
Sig::Partialize(&Sig::Value { val: f, at: ty }),
|
||||
Sig::Partialize(&Sig::Value { val: func, at: ty }),
|
||||
&mut self.0.ctx,
|
||||
pol,
|
||||
);
|
||||
|
@ -306,9 +300,9 @@ impl BoundChecker for MethodDriver<'_, '_> {
|
|||
Ty::Builtin(BuiltinTy::Element(elem)) => {
|
||||
// todo: distinguish between element and function
|
||||
if self.is_binder() {
|
||||
let f = (*elem).into();
|
||||
let func = (*elem).into();
|
||||
self.0.checker.check(
|
||||
Sig::Partialize(&Sig::Value { val: &f, at: ty }),
|
||||
Sig::Partialize(&Sig::Value { val: &func, at: ty }),
|
||||
&mut self.0.ctx,
|
||||
pol,
|
||||
);
|
||||
|
|
|
@ -17,21 +17,21 @@ struct CompactTy {
|
|||
impl TypeInfo {
|
||||
/// Simplify (Canonicalize) the given type with the given type scheme.
|
||||
pub fn simplify(&self, ty: Ty, principal: bool) -> Ty {
|
||||
let mut c = self.cano_cache.lock();
|
||||
let c = &mut *c;
|
||||
let mut cache = self.cano_cache.lock();
|
||||
let cache = &mut *cache;
|
||||
|
||||
c.cano_local_cache.clear();
|
||||
c.positives.clear();
|
||||
c.negatives.clear();
|
||||
cache.cano_local_cache.clear();
|
||||
cache.positives.clear();
|
||||
cache.negatives.clear();
|
||||
|
||||
let mut worker = TypeSimplifier {
|
||||
principal,
|
||||
vars: &self.vars,
|
||||
cano_cache: &mut c.cano_cache,
|
||||
cano_local_cache: &mut c.cano_local_cache,
|
||||
cano_cache: &mut cache.cano_cache,
|
||||
cano_local_cache: &mut cache.cano_local_cache,
|
||||
|
||||
positives: &mut c.positives,
|
||||
negatives: &mut c.negatives,
|
||||
positives: &mut cache.positives,
|
||||
negatives: &mut cache.negatives,
|
||||
};
|
||||
|
||||
worker.simplify(ty, principal)
|
||||
|
@ -62,98 +62,98 @@ impl TypeSimplifier<'_, '_> {
|
|||
|
||||
fn analyze(&mut self, ty: &Ty, pol: bool) {
|
||||
match ty {
|
||||
Ty::Var(v) => {
|
||||
let w = self.vars.get(&v.def).unwrap();
|
||||
Ty::Var(var) => {
|
||||
let w = self.vars.get(&var.def).unwrap();
|
||||
match &w.bounds {
|
||||
FlowVarKind::Strong(w) | FlowVarKind::Weak(w) => {
|
||||
let w = w.read();
|
||||
let bounds = w.read();
|
||||
let inserted = if pol {
|
||||
self.positives.insert(v.def.clone())
|
||||
self.positives.insert(var.def.clone())
|
||||
} else {
|
||||
self.negatives.insert(v.def.clone())
|
||||
self.negatives.insert(var.def.clone())
|
||||
};
|
||||
if !inserted {
|
||||
return;
|
||||
}
|
||||
|
||||
if pol {
|
||||
for lb in w.lbs.iter() {
|
||||
for lb in bounds.lbs.iter() {
|
||||
self.analyze(lb, pol);
|
||||
}
|
||||
} else {
|
||||
for ub in w.ubs.iter() {
|
||||
for ub in bounds.ubs.iter() {
|
||||
self.analyze(ub, pol);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ty::Func(f) => {
|
||||
for p in f.inputs() {
|
||||
self.analyze(p, !pol);
|
||||
Ty::Func(func) => {
|
||||
for input_ty in func.inputs() {
|
||||
self.analyze(input_ty, !pol);
|
||||
}
|
||||
if let Some(ret) = &f.body {
|
||||
self.analyze(ret, pol);
|
||||
if let Some(ret_ty) = &func.body {
|
||||
self.analyze(ret_ty, pol);
|
||||
}
|
||||
}
|
||||
Ty::Dict(r) => {
|
||||
for p in r.types.iter() {
|
||||
self.analyze(p, pol);
|
||||
Ty::Dict(record) => {
|
||||
for member in record.types.iter() {
|
||||
self.analyze(member, pol);
|
||||
}
|
||||
}
|
||||
Ty::Tuple(tup) => {
|
||||
for ty in tup.iter() {
|
||||
self.analyze(ty, pol);
|
||||
Ty::Tuple(elems) => {
|
||||
for elem in elems.iter() {
|
||||
self.analyze(elem, pol);
|
||||
}
|
||||
}
|
||||
Ty::Array(arr) => {
|
||||
self.analyze(arr, pol);
|
||||
}
|
||||
Ty::With(w) => {
|
||||
self.analyze(&w.sig, pol);
|
||||
for p in w.with.inputs() {
|
||||
self.analyze(p, pol);
|
||||
Ty::With(with) => {
|
||||
self.analyze(&with.sig, pol);
|
||||
for input in with.with.inputs() {
|
||||
self.analyze(input, pol);
|
||||
}
|
||||
}
|
||||
Ty::Args(args) => {
|
||||
for p in args.inputs() {
|
||||
self.analyze(p, pol);
|
||||
for input in args.inputs() {
|
||||
self.analyze(input, pol);
|
||||
}
|
||||
}
|
||||
Ty::Pattern(args) => {
|
||||
for p in args.inputs() {
|
||||
self.analyze(p, pol);
|
||||
Ty::Pattern(pat) => {
|
||||
for input in pat.inputs() {
|
||||
self.analyze(input, pol);
|
||||
}
|
||||
}
|
||||
Ty::Unary(u) => self.analyze(&u.lhs, pol),
|
||||
Ty::Binary(b) => {
|
||||
let [lhs, rhs] = b.operands();
|
||||
Ty::Unary(unary) => self.analyze(&unary.lhs, pol),
|
||||
Ty::Binary(binary) => {
|
||||
let [lhs, rhs] = binary.operands();
|
||||
self.analyze(lhs, pol);
|
||||
self.analyze(rhs, pol);
|
||||
}
|
||||
Ty::If(i) => {
|
||||
self.analyze(&i.cond, pol);
|
||||
self.analyze(&i.then, pol);
|
||||
self.analyze(&i.else_, pol);
|
||||
Ty::If(if_expr) => {
|
||||
self.analyze(&if_expr.cond, pol);
|
||||
self.analyze(&if_expr.then, pol);
|
||||
self.analyze(&if_expr.else_, pol);
|
||||
}
|
||||
Ty::Union(v) => {
|
||||
for ty in v.iter() {
|
||||
Ty::Union(types) => {
|
||||
for ty in types.iter() {
|
||||
self.analyze(ty, pol);
|
||||
}
|
||||
}
|
||||
Ty::Select(a) => {
|
||||
self.analyze(&a.ty, pol);
|
||||
Ty::Select(select) => {
|
||||
self.analyze(&select.ty, pol);
|
||||
}
|
||||
Ty::Let(v) => {
|
||||
for lb in v.lbs.iter() {
|
||||
Ty::Let(bounds) => {
|
||||
for lb in bounds.lbs.iter() {
|
||||
self.analyze(lb, !pol);
|
||||
}
|
||||
for ub in v.ubs.iter() {
|
||||
for ub in bounds.ubs.iter() {
|
||||
self.analyze(ub, pol);
|
||||
}
|
||||
}
|
||||
Ty::Param(v) => {
|
||||
self.analyze(&v.ty, pol);
|
||||
Ty::Param(param) => {
|
||||
self.analyze(¶m.ty, pol);
|
||||
}
|
||||
Ty::Value(_v) => {}
|
||||
Ty::Any => {}
|
||||
|
@ -164,72 +164,77 @@ impl TypeSimplifier<'_, '_> {
|
|||
|
||||
fn transform(&mut self, ty: &Ty, pol: bool) -> Ty {
|
||||
match ty {
|
||||
Ty::Let(w) => self.transform_let(w.lbs.iter(), w.ubs.iter(), None, pol),
|
||||
Ty::Var(v) => {
|
||||
if let Some(cano) = self.cano_local_cache.get(&(v.def.clone(), self.principal)) {
|
||||
Ty::Let(bounds) => self.transform_let(bounds.lbs.iter(), bounds.ubs.iter(), None, pol),
|
||||
Ty::Var(var) => {
|
||||
if let Some(cano) = self
|
||||
.cano_local_cache
|
||||
.get(&(var.def.clone(), self.principal))
|
||||
{
|
||||
return cano.clone();
|
||||
}
|
||||
// todo: avoid cycle
|
||||
self.cano_local_cache
|
||||
.insert((v.def.clone(), self.principal), Ty::Any);
|
||||
.insert((var.def.clone(), self.principal), Ty::Any);
|
||||
|
||||
let res = match &self.vars.get(&v.def).unwrap().bounds {
|
||||
let res = match &self.vars.get(&var.def).unwrap().bounds {
|
||||
FlowVarKind::Strong(w) | FlowVarKind::Weak(w) => {
|
||||
let w = w.read();
|
||||
|
||||
self.transform_let(w.lbs.iter(), w.ubs.iter(), Some(&v.def), pol)
|
||||
self.transform_let(w.lbs.iter(), w.ubs.iter(), Some(&var.def), pol)
|
||||
}
|
||||
};
|
||||
|
||||
self.cano_local_cache
|
||||
.insert((v.def.clone(), self.principal), res.clone());
|
||||
.insert((var.def.clone(), self.principal), res.clone());
|
||||
|
||||
res
|
||||
}
|
||||
Ty::Func(f) => Ty::Func(self.transform_sig(f, pol)),
|
||||
Ty::Dict(f) => {
|
||||
let mut f = f.as_ref().clone();
|
||||
f.types = self.transform_seq(&f.types, pol);
|
||||
Ty::Func(func) => Ty::Func(self.transform_sig(func, pol)),
|
||||
Ty::Dict(record) => {
|
||||
let mut mutated = record.as_ref().clone();
|
||||
mutated.types = self.transform_seq(&mutated.types, pol);
|
||||
|
||||
Ty::Dict(f.into())
|
||||
Ty::Dict(mutated.into())
|
||||
}
|
||||
Ty::Tuple(tup) => Ty::Tuple(self.transform_seq(tup, pol)),
|
||||
Ty::Array(arr) => Ty::Array(self.transform(arr, pol).into()),
|
||||
Ty::With(w) => {
|
||||
let sig = self.transform(&w.sig, pol).into();
|
||||
Ty::With(with) => {
|
||||
let sig = self.transform(&with.sig, pol).into();
|
||||
// Negate the pol to make correct covariance
|
||||
let with = self.transform_sig(&w.with, !pol);
|
||||
let mutated = self.transform_sig(&with.with, !pol);
|
||||
|
||||
Ty::With(SigWithTy::new(sig, with))
|
||||
Ty::With(SigWithTy::new(sig, mutated))
|
||||
}
|
||||
// Negate the pol to make correct covariance
|
||||
// todo: negate?
|
||||
Ty::Args(args) => Ty::Args(self.transform_sig(args, !pol)),
|
||||
Ty::Pattern(args) => Ty::Pattern(self.transform_sig(args, !pol)),
|
||||
Ty::Unary(u) => Ty::Unary(TypeUnary::new(u.op, self.transform(&u.lhs, pol))),
|
||||
Ty::Binary(b) => {
|
||||
let [lhs, rhs] = b.operands();
|
||||
Ty::Pattern(pat) => Ty::Pattern(self.transform_sig(pat, !pol)),
|
||||
Ty::Unary(unary) => {
|
||||
Ty::Unary(TypeUnary::new(unary.op, self.transform(&unary.lhs, pol)))
|
||||
}
|
||||
Ty::Binary(binary) => {
|
||||
let [lhs, rhs] = binary.operands();
|
||||
let lhs = self.transform(lhs, pol);
|
||||
let rhs = self.transform(rhs, pol);
|
||||
|
||||
Ty::Binary(TypeBinary::new(b.op, lhs, rhs))
|
||||
Ty::Binary(TypeBinary::new(binary.op, lhs, rhs))
|
||||
}
|
||||
Ty::If(i) => Ty::If(IfTy::new(
|
||||
self.transform(&i.cond, pol).into(),
|
||||
self.transform(&i.then, pol).into(),
|
||||
self.transform(&i.else_, pol).into(),
|
||||
Ty::If(if_ty) => Ty::If(IfTy::new(
|
||||
self.transform(&if_ty.cond, pol).into(),
|
||||
self.transform(&if_ty.then, pol).into(),
|
||||
self.transform(&if_ty.else_, pol).into(),
|
||||
)),
|
||||
Ty::Union(seq) => {
|
||||
let seq = seq.iter().map(|ty| self.transform(ty, pol));
|
||||
Ty::Union(types) => {
|
||||
let seq = types.iter().map(|ty| self.transform(ty, pol));
|
||||
let seq_no_any = seq.filter(|ty| !matches!(ty, Ty::Any));
|
||||
let seq = seq_no_any.collect::<Vec<_>>();
|
||||
Ty::from_types(seq.into_iter())
|
||||
}
|
||||
Ty::Param(ty) => {
|
||||
let mut ty = ty.as_ref().clone();
|
||||
ty.ty = self.transform(&ty.ty, pol);
|
||||
Ty::Param(param) => {
|
||||
let mut param = param.as_ref().clone();
|
||||
param.ty = self.transform(¶m.ty, pol);
|
||||
|
||||
Ty::Param(ty.into())
|
||||
Ty::Param(param.into())
|
||||
}
|
||||
Ty::Select(sel) => {
|
||||
let mut sel = sel.as_ref().clone();
|
||||
|
@ -238,15 +243,15 @@ impl TypeSimplifier<'_, '_> {
|
|||
Ty::Select(sel.into())
|
||||
}
|
||||
|
||||
Ty::Value(v) => Ty::Value(v.clone()),
|
||||
Ty::Value(ins_ty) => Ty::Value(ins_ty.clone()),
|
||||
Ty::Any => Ty::Any,
|
||||
Ty::Boolean(b) => Ty::Boolean(*b),
|
||||
Ty::Builtin(b) => Ty::Builtin(b.clone()),
|
||||
Ty::Boolean(truthiness) => Ty::Boolean(*truthiness),
|
||||
Ty::Builtin(ty) => Ty::Builtin(ty.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
fn transform_seq(&mut self, seq: &[Ty], pol: bool) -> Interned<Vec<Ty>> {
|
||||
let seq = seq.iter().map(|ty| self.transform(ty, pol));
|
||||
fn transform_seq(&mut self, types: &[Ty], pol: bool) -> Interned<Vec<Ty>> {
|
||||
let seq = types.iter().map(|ty| self.transform(ty, pol));
|
||||
seq.collect::<Vec<_>>().into()
|
||||
}
|
||||
|
||||
|
@ -255,7 +260,7 @@ impl TypeSimplifier<'_, '_> {
|
|||
&mut self,
|
||||
lbs_iter: impl ExactSizeIterator<Item = &'a Ty>,
|
||||
ubs_iter: impl ExactSizeIterator<Item = &'a Ty>,
|
||||
def_id: Option<&DeclExpr>,
|
||||
decl: Option<&DeclExpr>,
|
||||
pol: bool,
|
||||
) -> Ty {
|
||||
let mut lbs = HashSet::with_capacity(lbs_iter.len());
|
||||
|
@ -263,12 +268,12 @@ impl TypeSimplifier<'_, '_> {
|
|||
|
||||
crate::log_debug_ct!("transform let [principal={}]", self.principal);
|
||||
|
||||
if !self.principal || ((pol) && !def_id.is_some_and(|i| self.negatives.contains(i))) {
|
||||
if !self.principal || ((pol) && !decl.is_some_and(|decl| self.negatives.contains(decl))) {
|
||||
for lb in lbs_iter {
|
||||
lbs.insert(self.transform(lb, pol));
|
||||
}
|
||||
}
|
||||
if !self.principal || ((!pol) && !def_id.is_some_and(|i| self.positives.contains(i))) {
|
||||
if !self.principal || ((!pol) && !decl.is_some_and(|decl| self.positives.contains(decl))) {
|
||||
for ub in ubs_iter {
|
||||
ubs.insert(self.transform(ub, !pol));
|
||||
}
|
||||
|
|
|
@ -883,7 +883,7 @@ impl<'a> CompletionContext<'a> {
|
|||
let label = label.unwrap_or_else(|| value.repr());
|
||||
|
||||
let detail = docs.map(Into::into).or_else(|| match value {
|
||||
Value::Symbol(c) => Some(symbol_detail(c.get())),
|
||||
Value::Symbol(symbol) => Some(symbol_detail(symbol.get())),
|
||||
Value::Func(func) => func.docs().map(plain_docs_sentence),
|
||||
Value::Type(ty) => Some(plain_docs_sentence(ty.docs())),
|
||||
v => {
|
||||
|
@ -956,5 +956,5 @@ fn slice_at(s: &str, mut rng: Range<usize>) -> &str {
|
|||
}
|
||||
|
||||
fn is_triggered_by_punc(trigger_character: Option<char>) -> bool {
|
||||
trigger_character.is_some_and(|c| c.is_ascii_punctuation())
|
||||
trigger_character.is_some_and(|ch| ch.is_ascii_punctuation())
|
||||
}
|
||||
|
|
|
@ -389,12 +389,12 @@ impl CompletionContext<'_> {
|
|||
functions: HashSet::default(),
|
||||
};
|
||||
|
||||
let filter = |c: &CompletionKindChecker| {
|
||||
let filter = |checker: &CompletionKindChecker| {
|
||||
match surrounding_syntax {
|
||||
SurroundingSyntax::Regular => true,
|
||||
SurroundingSyntax::StringContent | SurroundingSyntax::ImportList => false,
|
||||
SurroundingSyntax::Selector => 'selector: {
|
||||
for func in &c.functions {
|
||||
for func in &checker.functions {
|
||||
if func.element().is_some() {
|
||||
break 'selector true;
|
||||
}
|
||||
|
@ -402,10 +402,10 @@ impl CompletionContext<'_> {
|
|||
|
||||
false
|
||||
}
|
||||
SurroundingSyntax::ShowTransform => !c.functions.is_empty(),
|
||||
SurroundingSyntax::ShowTransform => !checker.functions.is_empty(),
|
||||
SurroundingSyntax::SetRule => 'set_rule: {
|
||||
// todo: user defined elements
|
||||
for func in &c.functions {
|
||||
for func in &checker.functions {
|
||||
if let Some(elem) = func.element() {
|
||||
if elem.params().iter().any(|param| param.settable) {
|
||||
break 'set_rule true;
|
||||
|
@ -749,9 +749,9 @@ impl CompletionKindChecker {
|
|||
Ty::Builtin(BuiltinTy::Element(..)) => {
|
||||
self.functions.insert(ty.clone());
|
||||
}
|
||||
Ty::Let(l) => {
|
||||
for ty in l.ubs.iter().chain(l.lbs.iter()) {
|
||||
self.check(ty);
|
||||
Ty::Let(bounds) => {
|
||||
for bound in bounds.ubs.iter().chain(bounds.lbs.iter()) {
|
||||
self.check(bound);
|
||||
}
|
||||
}
|
||||
Ty::Any
|
||||
|
@ -933,7 +933,7 @@ fn enclosed_by(parent: &LinkedNode, s: Option<Span>, leaf: &LinkedNode) -> bool
|
|||
|
||||
pub fn ty_to_completion_kind(ty: &Ty) -> CompletionKind {
|
||||
match ty {
|
||||
Ty::Value(ty) => value_to_completion_kind(&ty.val),
|
||||
Ty::Value(ins_ty) => value_to_completion_kind(&ins_ty.val),
|
||||
Ty::Func(..) | Ty::With(..) => CompletionKind::Func,
|
||||
Ty::Any => CompletionKind::Variable,
|
||||
Ty::Builtin(b) => match b {
|
||||
|
@ -941,8 +941,8 @@ pub fn ty_to_completion_kind(ty: &Ty) -> CompletionKind {
|
|||
BuiltinTy::Type(..) | BuiltinTy::TypeType(..) => CompletionKind::Type,
|
||||
_ => CompletionKind::Variable,
|
||||
},
|
||||
Ty::Let(l) => fold_ty_kind(l.ubs.iter().chain(l.lbs.iter())),
|
||||
Ty::Union(u) => fold_ty_kind(u.iter()),
|
||||
Ty::Let(bounds) => fold_ty_kind(bounds.ubs.iter().chain(bounds.lbs.iter())),
|
||||
Ty::Union(types) => fold_ty_kind(types.iter()),
|
||||
Ty::Boolean(..)
|
||||
| Ty::Param(..)
|
||||
| Ty::Var(..)
|
||||
|
@ -1126,19 +1126,19 @@ impl TypeCompletionContext<'_, '_> {
|
|||
self.ctx.value_completion(None, &v.val, true, docs);
|
||||
}
|
||||
}
|
||||
Ty::Param(p) => {
|
||||
Ty::Param(param) => {
|
||||
// todo: variadic
|
||||
|
||||
let docs = docs.or_else(|| p.docs.as_deref());
|
||||
if p.attrs.positional {
|
||||
self.type_completion(&p.ty, docs);
|
||||
let docs = docs.or_else(|| param.docs.as_deref());
|
||||
if param.attrs.positional {
|
||||
self.type_completion(¶m.ty, docs);
|
||||
}
|
||||
if !p.attrs.named {
|
||||
if !param.attrs.named {
|
||||
return Some(());
|
||||
}
|
||||
|
||||
let f = &p.name;
|
||||
if self.ctx.seen_field(f.clone()) {
|
||||
let field = ¶m.name;
|
||||
if self.ctx.seen_field(field.clone()) {
|
||||
return Some(());
|
||||
}
|
||||
if !(self.filter)(infer_type) {
|
||||
|
@ -1146,7 +1146,7 @@ impl TypeCompletionContext<'_, '_> {
|
|||
}
|
||||
|
||||
let mut rev_stream = self.ctx.before.chars().rev();
|
||||
let ch = rev_stream.find(|c| !typst::syntax::is_id_continue(*c));
|
||||
let ch = rev_stream.find(|ch| !typst::syntax::is_id_continue(*ch));
|
||||
// skip label/ref completion.
|
||||
// todo: more elegant way
|
||||
if matches!(ch, Some('<' | '@')) {
|
||||
|
@ -1155,9 +1155,9 @@ impl TypeCompletionContext<'_, '_> {
|
|||
|
||||
self.ctx.completions.push(Completion {
|
||||
kind: CompletionKind::Field,
|
||||
label: f.into(),
|
||||
apply: Some(eco_format!("{}: ${{}}", f)),
|
||||
label_detail: p.ty.describe(),
|
||||
label: field.into(),
|
||||
apply: Some(eco_format!("{}: ${{}}", field)),
|
||||
label_detail: param.ty.describe(),
|
||||
detail: docs.map(Into::into),
|
||||
command: self
|
||||
.ctx
|
||||
|
@ -1189,7 +1189,7 @@ impl TypeCompletionContext<'_, '_> {
|
|||
BuiltinTy::Tag(..) => return None,
|
||||
BuiltinTy::Module(..) => return None,
|
||||
|
||||
BuiltinTy::Path(p) => {
|
||||
BuiltinTy::Path(preference) => {
|
||||
let source = self.ctx.ctx.source_by_id(self.ctx.root.span().id()?).ok()?;
|
||||
|
||||
self.ctx.completions2.extend(
|
||||
|
@ -1198,7 +1198,7 @@ impl TypeCompletionContext<'_, '_> {
|
|||
Some(self.ctx.leaf.clone()),
|
||||
&source,
|
||||
self.ctx.cursor,
|
||||
p,
|
||||
preference,
|
||||
)
|
||||
.into_iter()
|
||||
.flatten(),
|
||||
|
@ -1441,7 +1441,7 @@ pub(crate) fn complete_type_and_syntax(ctx: &mut CompletionContext) -> Option<()
|
|||
|
||||
let ty = ctx
|
||||
.ctx
|
||||
.literal_type_of_node(ctx.leaf.clone())
|
||||
.post_type_of_node(ctx.leaf.clone())
|
||||
.filter(|ty| !matches!(ty, Ty::Any));
|
||||
|
||||
let scope = ctx.surrounding_syntax();
|
||||
|
@ -1551,12 +1551,12 @@ pub(crate) fn complete_type_and_syntax(ctx: &mut CompletionContext) -> Option<()
|
|||
// currently, there are only path completions in ctx.completions2
|
||||
// and type/named param/positional param completions in completions
|
||||
// and all rest less relevant completions inctx.completions
|
||||
for (i, compl) in ctx.completions2.iter_mut().enumerate() {
|
||||
compl.sort_text = Some(format!("{i:03}"));
|
||||
for (idx, compl) in ctx.completions2.iter_mut().enumerate() {
|
||||
compl.sort_text = Some(format!("{idx:03}"));
|
||||
}
|
||||
let sort_base = ctx.completions2.len();
|
||||
for (i, compl) in (completions.iter_mut().chain(ctx.completions.iter_mut())).enumerate() {
|
||||
compl.sort_text = Some(eco_format!("{i:03}", i = i + sort_base));
|
||||
for (idx, compl) in (completions.iter_mut().chain(ctx.completions.iter_mut())).enumerate() {
|
||||
compl.sort_text = Some(eco_format!("{:03}", idx + sort_base));
|
||||
}
|
||||
|
||||
crate::log_debug_ct!(
|
||||
|
@ -1566,10 +1566,10 @@ pub(crate) fn complete_type_and_syntax(ctx: &mut CompletionContext) -> Option<()
|
|||
|
||||
ctx.completions.append(&mut completions);
|
||||
|
||||
if let Some(c) = args_node {
|
||||
crate::log_debug_ct!("content block compl: args {c:?}");
|
||||
let is_unclosed = matches!(c.kind(), SyntaxKind::Args)
|
||||
&& c.children().fold(0i32, |acc, node| match node.kind() {
|
||||
if let Some(node) = args_node {
|
||||
crate::log_debug_ct!("content block compl: args {node:?}");
|
||||
let is_unclosed = matches!(node.kind(), SyntaxKind::Args)
|
||||
&& node.children().fold(0i32, |acc, node| match node.kind() {
|
||||
SyntaxKind::LeftParen => acc + 1,
|
||||
SyntaxKind::RightParen => acc - 1,
|
||||
SyntaxKind::Error if node.text() == "(" => acc + 1,
|
||||
|
@ -1598,10 +1598,10 @@ pub(crate) fn complete_type_and_syntax(ctx: &mut CompletionContext) -> Option<()
|
|||
|
||||
fn complete_path(
|
||||
ctx: &LocalContext,
|
||||
v: Option<LinkedNode>,
|
||||
node: Option<LinkedNode>,
|
||||
source: &Source,
|
||||
cursor: usize,
|
||||
p: &PathPreference,
|
||||
preference: &PathPreference,
|
||||
) -> Option<Vec<CompletionItem>> {
|
||||
let id = source.id();
|
||||
if id.package().is_some() {
|
||||
|
@ -1611,12 +1611,12 @@ fn complete_path(
|
|||
let is_in_text;
|
||||
let text;
|
||||
let rng;
|
||||
let v = v.filter(|v| v.kind() == SyntaxKind::Str);
|
||||
if let Some(v) = v {
|
||||
let node = node.filter(|v| v.kind() == SyntaxKind::Str);
|
||||
if let Some(str_node) = node {
|
||||
// todo: the non-str case
|
||||
v.cast::<ast::Str>()?;
|
||||
str_node.cast::<ast::Str>()?;
|
||||
|
||||
let vr = v.range();
|
||||
let vr = str_node.range();
|
||||
rng = vr.start + 1..vr.end - 1;
|
||||
crate::log_debug_ct!("path_of: {rng:?} {cursor}");
|
||||
if rng.start > rng.end || (cursor != rng.end && !rng.contains(&cursor)) {
|
||||
|
@ -1659,7 +1659,7 @@ fn complete_path(
|
|||
let folder_completions = vec![];
|
||||
let mut module_completions = vec![];
|
||||
// todo: test it correctly
|
||||
for path in ctx.completion_files(p) {
|
||||
for path in ctx.completion_files(preference) {
|
||||
crate::log_debug_ct!("compl_check_path: {path:?}");
|
||||
|
||||
// Skip self smartly
|
||||
|
@ -1692,17 +1692,21 @@ fn complete_path(
|
|||
|
||||
let replace_range = ctx.to_lsp_range(rng, source);
|
||||
|
||||
let path_priority_cmp = |a: &str, b: &str| {
|
||||
fn is_dot_or_slash(ch: &char) -> bool {
|
||||
matches!(*ch, '.' | '/')
|
||||
}
|
||||
|
||||
let path_priority_cmp = |lhs: &str, rhs: &str| {
|
||||
// files are more important than dot started paths
|
||||
if a.starts_with('.') || b.starts_with('.') {
|
||||
if lhs.starts_with('.') || rhs.starts_with('.') {
|
||||
// compare consecutive dots and slashes
|
||||
let a_prefix = a.chars().take_while(|c| *c == '.' || *c == '/').count();
|
||||
let b_prefix = b.chars().take_while(|c| *c == '.' || *c == '/').count();
|
||||
let a_prefix = lhs.chars().take_while(is_dot_or_slash).count();
|
||||
let b_prefix = rhs.chars().take_while(is_dot_or_slash).count();
|
||||
if a_prefix != b_prefix {
|
||||
return a_prefix.cmp(&b_prefix);
|
||||
}
|
||||
}
|
||||
a.cmp(b)
|
||||
lhs.cmp(rhs)
|
||||
};
|
||||
|
||||
module_completions.sort_by(|a, b| path_priority_cmp(&a.0, &b.0));
|
||||
|
|
|
@ -25,20 +25,20 @@ pub use complete::*;
|
|||
pub fn plain_docs_sentence(docs: &str) -> EcoString {
|
||||
crate::log_debug_ct!("plain docs {docs:?}");
|
||||
let docs = docs.replace("```example", "```typ");
|
||||
let mut s = unscanny::Scanner::new(&docs);
|
||||
let mut scanner = unscanny::Scanner::new(&docs);
|
||||
let mut output = EcoString::new();
|
||||
let mut link = false;
|
||||
while let Some(c) = s.eat() {
|
||||
match c {
|
||||
while let Some(ch) = scanner.eat() {
|
||||
match ch {
|
||||
'`' => {
|
||||
let mut raw = s.eat_until('`');
|
||||
let mut raw = scanner.eat_until('`');
|
||||
if (raw.starts_with('{') && raw.ends_with('}'))
|
||||
|| (raw.starts_with('[') && raw.ends_with(']'))
|
||||
{
|
||||
raw = &raw[1..raw.len() - 1];
|
||||
}
|
||||
|
||||
s.eat();
|
||||
scanner.eat();
|
||||
output.push('`');
|
||||
output.push_str(raw);
|
||||
output.push('`');
|
||||
|
@ -49,11 +49,11 @@ pub fn plain_docs_sentence(docs: &str) -> EcoString {
|
|||
}
|
||||
']' if link => {
|
||||
output.push(']');
|
||||
let c = s.cursor();
|
||||
if s.eat_if('(') {
|
||||
s.eat_until(')');
|
||||
let link_content = s.from(c + 1);
|
||||
s.eat();
|
||||
let cursor = scanner.cursor();
|
||||
if scanner.eat_if('(') {
|
||||
scanner.eat_until(')');
|
||||
let link_content = scanner.from(cursor + 1);
|
||||
scanner.eat();
|
||||
|
||||
crate::log_debug_ct!("Intra Link: {link_content}");
|
||||
let link = resolve(link_content, "https://typst.app/docs/").ok();
|
||||
|
@ -65,10 +65,10 @@ pub fn plain_docs_sentence(docs: &str) -> EcoString {
|
|||
output.push('(');
|
||||
output.push_str(&link);
|
||||
output.push(')');
|
||||
} else if s.eat_if('[') {
|
||||
s.eat_until(']');
|
||||
s.eat();
|
||||
output.push_str(s.from(c));
|
||||
} else if scanner.eat_if('[') {
|
||||
scanner.eat_until(']');
|
||||
scanner.eat();
|
||||
output.push_str(scanner.from(cursor));
|
||||
}
|
||||
link = false
|
||||
}
|
||||
|
@ -77,7 +77,7 @@ pub fn plain_docs_sentence(docs: &str) -> EcoString {
|
|||
// output.push('.');
|
||||
// break;
|
||||
// }
|
||||
_ => output.push(c),
|
||||
_ => output.push(ch),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -289,9 +289,9 @@ static ROUTE_MAPS: Lazy<HashMap<CatKey, String>> = Lazy::new(|| {
|
|||
let cat = cat.or_else(|| scope.get_category(name));
|
||||
let name = urlify(name);
|
||||
match value {
|
||||
Value::Func(f) => {
|
||||
Value::Func(func) => {
|
||||
if let Some(cat) = cat {
|
||||
let Some(name) = f.name() else {
|
||||
let Some(name) = func.name() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
|
@ -304,11 +304,11 @@ static ROUTE_MAPS: Lazy<HashMap<CatKey, String>> = Lazy::new(|| {
|
|||
"reference/{}/{}/#functions-{name}",
|
||||
group.category, group.name
|
||||
);
|
||||
map.insert(CatKey::Func(f.clone()), route);
|
||||
map.insert(CatKey::Func(func.clone()), route);
|
||||
continue;
|
||||
}
|
||||
|
||||
crate::log_debug_ct!("func: {f:?} -> {cat:?}");
|
||||
crate::log_debug_ct!("func: {func:?} -> {cat:?}");
|
||||
|
||||
let route = if let Some(parent_name) = &parent_name {
|
||||
format!("reference/{}/{parent_name}/#definitions-{name}", cat.name())
|
||||
|
@ -316,9 +316,9 @@ static ROUTE_MAPS: Lazy<HashMap<CatKey, String>> = Lazy::new(|| {
|
|||
format!("reference/{}/{name}/", cat.name())
|
||||
};
|
||||
|
||||
map.insert(CatKey::Func(f.clone()), route);
|
||||
map.insert(CatKey::Func(func.clone()), route);
|
||||
}
|
||||
if let Some(s) = f.scope() {
|
||||
if let Some(s) = func.scope() {
|
||||
scope_to_finds.push((s, Some(name), cat));
|
||||
}
|
||||
}
|
||||
|
@ -349,19 +349,19 @@ static ROUTE_MAPS: Lazy<HashMap<CatKey, String>> = Lazy::new(|| {
|
|||
pub(crate) fn urlify(title: &str) -> EcoString {
|
||||
title
|
||||
.chars()
|
||||
.map(|c| c.to_ascii_lowercase())
|
||||
.map(|c| match c {
|
||||
'a'..='z' | '0'..='9' => c,
|
||||
.map(|ch| ch.to_ascii_lowercase())
|
||||
.map(|ch| match ch {
|
||||
'a'..='z' | '0'..='9' => ch,
|
||||
_ => '-',
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn route_of_value(k: &Value) -> Option<&'static String> {
|
||||
pub fn route_of_value(val: &Value) -> Option<&'static String> {
|
||||
// ROUTE_MAPS.get(&CatKey::Func(k.clone()))
|
||||
let key = match k {
|
||||
Value::Func(f) => CatKey::Func(f.clone()),
|
||||
Value::Type(t) => CatKey::Type(*t),
|
||||
let key = match val {
|
||||
Value::Func(func) => CatKey::Func(func.clone()),
|
||||
Value::Type(ty) => CatKey::Type(*ty),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue