feat: support syntax analysis on packages (#47)

* feat: support syntax analysis on packages

* dev: add more tests
This commit is contained in:
Myriad-Dreamin 2024-03-16 10:21:59 +08:00 committed by GitHub
parent f683426753
commit d5c7bcdd18
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 121 additions and 35 deletions

1
Cargo.lock generated
View file

@ -3654,6 +3654,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"strum 0.25.0", "strum 0.25.0",
"toml",
"typst", "typst",
"typst-ide", "typst-ide",
"typst-ts-compiler", "typst-ts-compiler",

View file

@ -27,6 +27,10 @@ once_cell.workspace = true
fxhash.workspace = true fxhash.workspace = true
walkdir = "2" walkdir = "2"
indexmap = "2.1.0" indexmap = "2.1.0"
toml = { version = "0.8", default-features = false, features = [
"parse",
"display",
] }
typst.workspace = true typst.workspace = true
typst-ide.workspace = true typst-ide.workspace = true

View file

@ -0,0 +1,3 @@
// path: base.typ
#import "@preview/example:0.1.0";
#(/* ident after */ example.add(1, 1))

View file

@ -0,0 +1,3 @@
// path: base.typ
#import "@preview/example:0.1.0";
#(/* ident after */ example)

View file

@ -0,0 +1,12 @@
// path: base.typ
#let f() = 1;
-----
.
#import "/base.typ": *;
#let conf() = {
import "@preview/example:0.1.0";
set text(size: /* ident after */ f());
}

View file

@ -0,0 +1,12 @@
---
source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/import_package.typ
---
[
{
"originSelectionRange": "1:20:1:27",
"targetRange": "0:8:0:32",
"targetSelectionRange": "0:8:0:32"
}
]

View file

@ -0,0 +1,12 @@
---
source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/import_package_self.typ
---
[
{
"originSelectionRange": "1:20:1:27",
"targetRange": "0:8:0:32",
"targetSelectionRange": "0:8:0:32"
}
]

View file

@ -0,0 +1,12 @@
---
source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/inside_block.typ
---
[
{
"originSelectionRange": "7:35:7:36",
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6"
}
]

View file

@ -1,20 +1,26 @@
use std::path::Path; use std::path::Path;
use log::debug; use typst::syntax::{ast, package::PackageManifest, LinkedNode, Source, SyntaxKind, VirtualPath};
use typst::syntax::{ast, LinkedNode, Source, SyntaxKind, VirtualPath}; use typst_ts_core::{package::PackageSpec, typst::prelude::EcoVec, TypstFileId};
use typst_ts_core::{typst::prelude::EcoVec, TypstFileId};
use crate::prelude::*; use crate::prelude::*;
/// Find a source instance by its import path. fn resolve_id_by_path(
pub fn find_source_by_import_path(
world: &dyn World, world: &dyn World,
current: TypstFileId, current: TypstFileId,
import_path: &str, import_path: &str,
) -> Option<Source> { ) -> Option<TypstFileId> {
if import_path.starts_with('@') { if import_path.starts_with('@') {
// todo: import from package let spec = import_path.parse::<PackageSpec>().ok()?;
return None; // Evaluate the manifest.
let manifest_id = FileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
let bytes = world.file(manifest_id).ok()?;
let string = std::str::from_utf8(&bytes).map_err(FileError::from).ok()?;
let manifest: PackageManifest = toml::from_str(string).ok()?;
manifest.validate(&spec).ok()?;
// Evaluate the entry point.
return Some(manifest_id.join(&manifest.package.entrypoint));
} }
let path = Path::new(import_path); let path = Path::new(import_path);
@ -24,8 +30,18 @@ pub fn find_source_by_import_path(
VirtualPath::new(path) VirtualPath::new(path)
}; };
let id = TypstFileId::new(current.package().cloned(), vpath); Some(TypstFileId::new(current.package().cloned(), vpath))
world.source(id).ok() }
/// Find a source instance by its import path.
pub fn find_source_by_import_path(
world: &dyn World,
current: TypstFileId,
import_path: &str,
) -> Option<Source> {
world
.source(resolve_id_by_path(world, current, import_path)?)
.ok()
} }
/// Find a source instance by its import node. /// Find a source instance by its import node.
@ -43,11 +59,11 @@ pub fn find_source_by_import(
} }
/// Find all static imports in a source. /// Find all static imports in a source.
#[comemo::memoize] pub fn find_imports(world: &dyn World, source: &Source) -> EcoVec<TypstFileId> {
pub fn find_imports(source: &Source) -> EcoVec<TypstFileId> {
let root = LinkedNode::new(source.root()); let root = LinkedNode::new(source.root());
let mut worker = ImportWorker { let mut worker = ImportWorker {
world,
current: source.id(), current: source.id(),
imports: EcoVec::new(), imports: EcoVec::new(),
}; };
@ -55,18 +71,16 @@ pub fn find_imports(source: &Source) -> EcoVec<TypstFileId> {
worker.analyze(root); worker.analyze(root);
let res = worker.imports; let res = worker.imports;
let mut res: Vec<TypstFileId> = res let mut res: Vec<TypstFileId> = res.into_iter().map(|(id, _)| id).collect();
.into_iter()
.map(|(vpath, _)| TypstFileId::new(None, vpath))
.collect();
res.sort(); res.sort();
res.dedup(); res.dedup();
res.into_iter().collect() res.into_iter().collect()
} }
struct ImportWorker<'a> { struct ImportWorker<'a> {
world: &'a dyn World,
current: TypstFileId, current: TypstFileId,
imports: EcoVec<(VirtualPath, LinkedNode<'a>)>, imports: EcoVec<(FileId, LinkedNode<'a>)>,
} }
impl<'a> ImportWorker<'a> { impl<'a> ImportWorker<'a> {
@ -79,15 +93,9 @@ impl<'a> ImportWorker<'a> {
ast::Expr::Str(s) => { ast::Expr::Str(s) => {
// todo: source in packages // todo: source in packages
let s = s.get(); let s = s.get();
let path = Path::new(s.as_str()); let id = resolve_id_by_path(self.world, self.current, s.as_str())?;
let vpath = if path.is_relative() {
self.current.vpath().join(path)
} else {
VirtualPath::new(path)
};
debug!("found import {vpath:?}");
self.imports.push((vpath, node)); self.imports.push((id, node));
} }
// todo: handle dynamic import // todo: handle dynamic import
ast::Expr::FieldAccess(..) | ast::Expr::Ident(..) => {} ast::Expr::FieldAccess(..) | ast::Expr::Ident(..) => {}

View file

@ -14,7 +14,11 @@ use typst::{
}, },
util::LazyHash, util::LazyHash,
}; };
use typst_ts_core::typst::prelude::{eco_vec, EcoVec}; use typst_ts_core::{
error::prelude::WithContext,
package::PackageSpec,
typst::prelude::{eco_vec, EcoVec},
};
use super::IdentRef; use super::IdentRef;
@ -37,7 +41,13 @@ pub(crate) fn get_lexical_hierarchy(
}, },
eco_vec![], eco_vec![],
)); ));
let res = worker.get_symbols(root).ok(); let res = match worker.get_symbols(root) {
Ok(()) => Some(()),
Err(e) => {
log::error!("lexical hierarchy analysis failed: {:?}", e);
None
}
};
while worker.stack.len() > 1 { while worker.stack.len() > 1 {
worker.symbreak(); worker.symbreak();
@ -641,15 +651,24 @@ impl LexicalHierarchyWorker {
match v { match v {
ast::Expr::Str(e) => { ast::Expr::Str(e) => {
let e = e.get(); let e = e.get();
let e = Path::new(e.as_ref())
.file_name() let name = if e.starts_with('@') {
.context("no file name")? let spec = e
.to_string_lossy(); .parse::<PackageSpec>()
let e = e.as_ref(); .context("parse package spec failed for name")?;
let e = e.strip_suffix(".typ").context("no suffix")?; spec.name.to_string()
} else {
let e = Path::new(e.as_ref())
.file_name()
.context("no file name")?
.to_string_lossy();
let e = e.as_ref();
e.strip_suffix(".typ").context("no suffix")?.to_owned()
};
// return (e == name).then_some(ImportRef::Path(v)); // return (e == name).then_some(ImportRef::Path(v));
self.push_leaf(LexicalInfo { self.push_leaf(LexicalInfo {
name: e.to_string(), name,
kind: LexicalKind::module_path(), kind: LexicalKind::module_path(),
range: v_linked.range(), range: v_linked.range(),
}); });

View file

@ -31,7 +31,7 @@ pub fn construct_module_dependencies(
}; };
let file_id = source.id(); let file_id = source.id();
let deps = find_imports(&source); let deps = find_imports(ctx.world, &source);
dependencies dependencies
.entry(file_id) .entry(file_id)
.or_insert_with(|| ModuleDependency { .or_insert_with(|| ModuleDependency {