refactor: remove once_cell use from tinymist (#1632)

* refactor: remove `once_cell` use from tinymist

* feat: remove more
This commit is contained in:
Myriad-Dreamin 2025-04-08 02:11:44 +08:00 committed by GitHub
parent 769fc93df9
commit 72e33e461d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 68 additions and 82 deletions

3
Cargo.lock generated
View file

@ -4038,7 +4038,6 @@ dependencies = [
"itertools 0.13.0", "itertools 0.13.0",
"log", "log",
"lsp-types", "lsp-types",
"once_cell",
"open", "open",
"parking_lot", "parking_lot",
"paste", "paste",
@ -4235,7 +4234,6 @@ dependencies = [
"itertools 0.13.0", "itertools 0.13.0",
"log", "log",
"lsp-types", "lsp-types",
"once_cell",
"parking_lot", "parking_lot",
"percent-encoding", "percent-encoding",
"rayon", "rayon",
@ -4889,7 +4887,6 @@ dependencies = [
"futures", "futures",
"indexmap 2.8.0", "indexmap 2.8.0",
"log", "log",
"once_cell",
"parking_lot", "parking_lot",
"reflexo-typst", "reflexo-typst",
"reflexo-vec2svg", "reflexo-vec2svg",

View file

@ -20,7 +20,6 @@ members = ["benches/*", "crates/*", "tests"]
anyhow = "1" anyhow = "1"
if_chain = "1" if_chain = "1"
itertools = "0.13" itertools = "0.13"
once_cell = "1"
paste = "1.0" paste = "1.0"
cfg-if = "1.0" cfg-if = "1.0"
strum = { version = "0.26.2", features = ["derive"] } strum = { version = "0.26.2", features = ["derive"] }

View file

@ -27,7 +27,6 @@ serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
parking_lot.workspace = true parking_lot.workspace = true
ena.workspace = true ena.workspace = true
once_cell.workspace = true
toml.workspace = true toml.workspace = true
walkdir.workspace = true walkdir.workspace = true
indexmap.workspace = true indexmap.workspace = true
@ -61,7 +60,6 @@ tinymist-std.workspace = true
tinymist-l10n.workspace = true tinymist-l10n.workspace = true
[dev-dependencies] [dev-dependencies]
once_cell.workspace = true
insta.workspace = true insta.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true

View file

@ -7,7 +7,6 @@ use std::ops::Range;
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
use if_chain::if_chain; use if_chain::if_chain;
use lsp_types::InsertTextFormat; use lsp_types::InsertTextFormat;
use once_cell::sync::Lazy;
use regex::{Captures, Regex}; use regex::{Captures, Regex};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tinymist_analysis::syntax::{bad_completion_cursor, BadCompletionCursor}; use tinymist_analysis::syntax::{bad_completion_cursor, BadCompletionCursor};
@ -890,8 +889,8 @@ fn slice_at(s: &str, mut rng: Range<usize>) -> &str {
&s[rng] &s[rng]
} }
static TYPST_SNIPPET_PLACEHOLDER_RE: Lazy<Regex> = static TYPST_SNIPPET_PLACEHOLDER_RE: LazyLock<Regex> =
Lazy::new(|| Regex::new(r"\$\{(.*?)\}").unwrap()); LazyLock::new(|| Regex::new(r"\$\{(.*?)\}").unwrap());
/// Adds numbering to placeholders in snippets /// Adds numbering to placeholders in snippets
fn to_lsp_snippet(typst_snippet: &str) -> EcoString { fn to_lsp_snippet(typst_snippet: &str) -> EcoString {

View file

@ -1,11 +1,11 @@
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::ops::DerefMut; use std::ops::DerefMut;
use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::OnceLock;
use std::{collections::HashSet, ops::Deref}; use std::{collections::HashSet, ops::Deref};
use comemo::{Track, Tracked}; use comemo::{Track, Tracked};
use lsp_types::Url; use lsp_types::Url;
use once_cell::sync::OnceCell;
use parking_lot::Mutex; use parking_lot::Mutex;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use tinymist_project::LspWorld; use tinymist_project::LspWorld;
@ -1009,7 +1009,7 @@ impl SharedContext {
} }
// Needed by recursive computation // Needed by recursive computation
type DeferredCompute<T> = Arc<OnceCell<T>>; type DeferredCompute<T> = Arc<OnceLock<T>>;
#[derive(Clone)] #[derive(Clone)]
struct IncrCacheMap<K, V> { struct IncrCacheMap<K, V> {
@ -1142,9 +1142,9 @@ pub struct AnalysisGlobalCaches {
#[derive(Default)] #[derive(Default)]
pub struct AnalysisLocalCaches { pub struct AnalysisLocalCaches {
modules: HashMap<TypstFileId, ModuleAnalysisLocalCache>, modules: HashMap<TypstFileId, ModuleAnalysisLocalCache>,
completion_files: OnceCell<Vec<TypstFileId>>, completion_files: OnceLock<Vec<TypstFileId>>,
root_files: OnceCell<Vec<TypstFileId>>, root_files: OnceLock<Vec<TypstFileId>>,
module_deps: OnceCell<HashMap<TypstFileId, ModuleDependency>>, module_deps: OnceLock<HashMap<TypstFileId, ModuleDependency>>,
} }
/// A local cache for module-level analysis results of a module. /// A local cache for module-level analysis results of a module.
@ -1153,8 +1153,8 @@ pub struct AnalysisLocalCaches {
/// change. /// change.
#[derive(Default)] #[derive(Default)]
pub struct ModuleAnalysisLocalCache { pub struct ModuleAnalysisLocalCache {
expr_stage: OnceCell<Arc<ExprInfo>>, expr_stage: OnceLock<Arc<ExprInfo>>,
type_check: OnceCell<Arc<TypeInfo>>, type_check: OnceLock<Arc<TypeInfo>>,
} }
/// A revision-managed (per input change) cache for all level of analysis /// A revision-managed (per input change) cache for all level of analysis

View file

@ -5,7 +5,6 @@ use typst::syntax::Span;
use crate::{prelude::*, LspWorldExt}; use crate::{prelude::*, LspWorldExt};
use once_cell::sync::Lazy;
use regex::RegexSet; use regex::RegexSet;
/// Stores diagnostics for files. /// Stores diagnostics for files.
@ -198,7 +197,7 @@ trait DiagnosticRefiner {
struct DeprecationRefiner<const MINOR: usize>(); struct DeprecationRefiner<const MINOR: usize>();
static DEPRECATION_PATTERNS: Lazy<RegexSet> = Lazy::new(|| { static DEPRECATION_PATTERNS: LazyLock<RegexSet> = LazyLock::new(|| {
RegexSet::new([ RegexSet::new([
r"unknown variable: style", r"unknown variable: style",
r"unexpected argument: fill", r"unexpected argument: fill",

View file

@ -1,4 +1,3 @@
use once_cell::sync::OnceCell;
use typst_shim::syntax::LinkedNodeExt; use typst_shim::syntax::LinkedNodeExt;
use crate::{ use crate::{
@ -52,7 +51,7 @@ impl SemanticRequest for SignatureHelpRequest {
label.push('('); label.push('(');
let mut real_offset = 0; let mut real_offset = 0;
let focus_name = OnceCell::new(); let focus_name = OnceLock::new();
for (idx, (param, ty)) in sig.params().enumerate() { for (idx, (param, ty)) in sig.params().enumerate() {
if is_set && !param.attrs.settable { if is_set && !param.attrs.settable {
continue; continue;

View file

@ -1,6 +1,5 @@
use std::sync::Once; use std::sync::Once;
use once_cell::sync::Lazy;
use regex::RegexSet; use regex::RegexSet;
use crate::prelude::*; use crate::prelude::*;
@ -95,7 +94,7 @@ pub(crate) fn scan_workspace_files<T>(
} }
/// this is a temporary solution to ignore some common build directories /// this is a temporary solution to ignore some common build directories
static IGNORE_REGEX: Lazy<RegexSet> = Lazy::new(|| { static IGNORE_REGEX: LazyLock<RegexSet> = LazyLock::new(|| {
RegexSet::new([ RegexSet::new([
r#"^build$"#, r#"^build$"#,
r#"^target$"#, r#"^target$"#,

View file

@ -8,7 +8,6 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use once_cell::sync::Lazy;
use serde_json::{ser::PrettyFormatter, Serializer, Value}; use serde_json::{ser::PrettyFormatter, Serializer, Value};
use tinymist_project::{CompileFontArgs, ExportTarget, LspCompileSnapshot, LspComputeGraph}; use tinymist_project::{CompileFontArgs, ExportTarget, LspCompileSnapshot, LspComputeGraph};
use tinymist_std::path::unix_slash; use tinymist_std::path::unix_slash;
@ -353,7 +352,7 @@ pub fn make_range_annoation(source: &Source) -> String {
// pub static REDACT_URI: Lazy<RedactFields> = Lazy::new(|| // pub static REDACT_URI: Lazy<RedactFields> = Lazy::new(||
// RedactFields::from_iter(["uri"])); // RedactFields::from_iter(["uri"]));
pub static REDACT_LOC: Lazy<RedactFields> = Lazy::new(|| { pub static REDACT_LOC: LazyLock<RedactFields> = LazyLock::new(|| {
RedactFields::from_iter([ RedactFields::from_iter([
"location", "location",
"contents", "contents",

View file

@ -1,8 +1,8 @@
use core::fmt; use core::fmt;
use std::path::Path; use std::path::Path;
use std::sync::LazyLock;
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
use once_cell::sync::Lazy;
use regex::RegexSet; use regex::RegexSet;
use strum::{EnumIter, IntoEnumIterator}; use strum::{EnumIter, IntoEnumIterator};
use typst::foundations::{CastInfo, Regex}; use typst::foundations::{CastInfo, Regex};
@ -36,31 +36,33 @@ pub enum PathPreference {
impl PathPreference { impl PathPreference {
pub fn ext_matcher(&self) -> &'static RegexSet { pub fn ext_matcher(&self) -> &'static RegexSet {
type RegSet = LazyLock<RegexSet>;
fn make_regex(patterns: &[&str]) -> RegexSet { fn make_regex(patterns: &[&str]) -> RegexSet {
let patterns = patterns.iter().map(|pattern| format!("(?i)^{pattern}$")); let patterns = patterns.iter().map(|pattern| format!("(?i)^{pattern}$"));
RegexSet::new(patterns).unwrap() RegexSet::new(patterns).unwrap()
} }
static SOURCE_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["typ", "typc"])); static SOURCE_REGSET: RegSet = RegSet::new(|| make_regex(&["typ", "typc"]));
static WASM_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["wasm"])); static WASM_REGSET: RegSet = RegSet::new(|| make_regex(&["wasm"]));
static IMAGE_REGSET: Lazy<RegexSet> = Lazy::new(|| { static IMAGE_REGSET: RegSet = RegSet::new(|| {
make_regex(&[ make_regex(&[
"ico", "bmp", "png", "webp", "jpg", "jpeg", "jfif", "tiff", "gif", "svg", "svgz", "ico", "bmp", "png", "webp", "jpg", "jpeg", "jfif", "tiff", "gif", "svg", "svgz",
]) ])
}); });
static JSON_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["json", "jsonc", "json5"])); static JSON_REGSET: RegSet = RegSet::new(|| make_regex(&["json", "jsonc", "json5"]));
static YAML_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["yaml", "yml"])); static YAML_REGSET: RegSet = RegSet::new(|| make_regex(&["yaml", "yml"]));
static XML_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["xml"])); static XML_REGSET: RegSet = RegSet::new(|| make_regex(&["xml"]));
static TOML_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["toml"])); static TOML_REGSET: RegSet = RegSet::new(|| make_regex(&["toml"]));
static CSV_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["csv"])); static CSV_REGSET: RegSet = RegSet::new(|| make_regex(&["csv"]));
static BIB_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["yaml", "yml", "bib"])); static BIB_REGSET: RegSet = RegSet::new(|| make_regex(&["yaml", "yml", "bib"]));
static CSL_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["csl"])); static CSL_REGSET: RegSet = RegSet::new(|| make_regex(&["csl"]));
static RAW_THEME_REGSET: Lazy<RegexSet> = Lazy::new(|| make_regex(&["tmTheme", "xml"])); static RAW_THEME_REGSET: RegSet = RegSet::new(|| make_regex(&["tmTheme", "xml"]));
static RAW_SYNTAX_REGSET: Lazy<RegexSet> = static RAW_SYNTAX_REGSET: RegSet =
Lazy::new(|| make_regex(&["tmLanguage", "sublime-syntax"])); RegSet::new(|| make_regex(&["tmLanguage", "sublime-syntax"]));
static ALL_REGSET: Lazy<RegexSet> = Lazy::new(|| RegexSet::new([r".*"]).unwrap()); static ALL_REGSET: RegSet = RegSet::new(|| RegexSet::new([r".*"]).unwrap());
static ALL_SPECIAL_REGSET: Lazy<RegexSet> = Lazy::new(|| { static ALL_SPECIAL_REGSET: RegSet = RegSet::new(|| {
RegexSet::new({ RegexSet::new({
let patterns = SOURCE_REGSET.patterns(); let patterns = SOURCE_REGSET.patterns();
let patterns = patterns.iter().chain(WASM_REGSET.patterns()); let patterns = patterns.iter().chain(WASM_REGSET.patterns());
@ -501,7 +503,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
Ty::from_cast_info(&param.input), Ty::from_cast_info(&param.input),
])), ])),
("link", "dest") => { ("link", "dest") => {
static LINK_DEST_TYPE: Lazy<Ty> = Lazy::new(|| { static LINK_DEST_TYPE: LazyLock<Ty> = LazyLock::new(|| {
flow_union!( flow_union!(
literally(RefLabel), literally(RefLabel),
Ty::Builtin(BuiltinTy::Type(Type::of::<foundations::Str>())), Ty::Builtin(BuiltinTy::Type(Type::of::<foundations::Str>())),
@ -515,7 +517,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
Some(LINK_DEST_TYPE.clone()) Some(LINK_DEST_TYPE.clone())
} }
("bibliography", "path" | "sources") => { ("bibliography", "path" | "sources") => {
static BIB_PATH_TYPE: Lazy<Ty> = Lazy::new(|| { static BIB_PATH_TYPE: LazyLock<Ty> = LazyLock::new(|| {
let bib_path_ty = literally(Path(PathPreference::Bibliography)); let bib_path_ty = literally(Path(PathPreference::Bibliography));
Ty::iter_union([bib_path_ty.clone(), Ty::Array(bib_path_ty.into())]) Ty::iter_union([bib_path_ty.clone(), Ty::Array(bib_path_ty.into())])
}); });
@ -524,13 +526,13 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
("text", "size") => Some(literally(TextSize)), ("text", "size") => Some(literally(TextSize)),
("text", "font") => { ("text", "font") => {
// todo: the dict can be completed, but we have bugs... // todo: the dict can be completed, but we have bugs...
static FONT_TYPE: Lazy<Ty> = Lazy::new(|| { static FONT_TYPE: LazyLock<Ty> = LazyLock::new(|| {
Ty::iter_union([literally(TextFont), Ty::Array(literally(TextFont).into())]) Ty::iter_union([literally(TextFont), Ty::Array(literally(TextFont).into())])
}); });
Some(FONT_TYPE.clone()) Some(FONT_TYPE.clone())
} }
("text", "feature") => { ("text", "feature") => {
static FONT_TYPE: Lazy<Ty> = Lazy::new(|| { static FONT_TYPE: LazyLock<Ty> = LazyLock::new(|| {
Ty::iter_union([ Ty::iter_union([
// todo: the key can only be the text feature // todo: the key can only be the text feature
Ty::Builtin(BuiltinTy::Type(Type::of::<foundations::Dict>())), Ty::Builtin(BuiltinTy::Type(Type::of::<foundations::Dict>())),
@ -540,7 +542,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
Some(FONT_TYPE.clone()) Some(FONT_TYPE.clone())
} }
("text", "costs") => { ("text", "costs") => {
static FONT_TYPE: Lazy<Ty> = Lazy::new(|| { static FONT_TYPE: LazyLock<Ty> = LazyLock::new(|| {
Ty::Dict(flow_record!( Ty::Dict(flow_record!(
"hyphenation" => literally(BuiltinTy::Type(Type::of::<Ratio>())), "hyphenation" => literally(BuiltinTy::Type(Type::of::<Ratio>())),
"runt" => literally(BuiltinTy::Type(Type::of::<Ratio>())), "runt" => literally(BuiltinTy::Type(Type::of::<Ratio>())),
@ -554,7 +556,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
("text", "region") => Some(literally(TextRegion)), ("text", "region") => Some(literally(TextRegion)),
("text" | "stack", "dir") => Some(literally(Dir)), ("text" | "stack", "dir") => Some(literally(Dir)),
("par", "first-line-indent") => { ("par", "first-line-indent") => {
static FIRST_LINE_INDENT: Lazy<Ty> = Lazy::new(|| { static FIRST_LINE_INDENT: LazyLock<Ty> = LazyLock::new(|| {
Ty::iter_union([ Ty::iter_union([
literally(Length), literally(Length),
Ty::Dict(RecordTy::new(vec![ Ty::Dict(RecordTy::new(vec![
@ -581,7 +583,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
} }
("block" | "box" | "rect" | "square" | "highlight", "radius") => Some(literally(Radius)), ("block" | "box" | "rect" | "square" | "highlight", "radius") => Some(literally(Radius)),
("grid" | "table", "columns" | "rows" | "gutter" | "column-gutter" | "row-gutter") => { ("grid" | "table", "columns" | "rows" | "gutter" | "column-gutter" | "row-gutter") => {
static COLUMN_TYPE: Lazy<Ty> = Lazy::new(|| { static COLUMN_TYPE: LazyLock<Ty> = LazyLock::new(|| {
flow_union!( flow_union!(
Ty::Value(InsTy::new(Value::Auto)), Ty::Value(InsTy::new(Value::Auto)),
Ty::Value(InsTy::new(Value::Type(Type::of::<i64>()))), Ty::Value(InsTy::new(Value::Type(Type::of::<i64>()))),
@ -592,7 +594,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
Some(COLUMN_TYPE.clone()) Some(COLUMN_TYPE.clone())
} }
("pattern" | "tiling", "size") => { ("pattern" | "tiling", "size") => {
static PATTERN_SIZE_TYPE: Lazy<Ty> = Lazy::new(|| { static PATTERN_SIZE_TYPE: LazyLock<Ty> = LazyLock::new(|| {
flow_union!( flow_union!(
Ty::Value(InsTy::new(Value::Auto)), Ty::Value(InsTy::new(Value::Auto)),
Ty::Array(Ty::Builtin(Length).into()), Ty::Array(Ty::Builtin(Length).into()),
@ -613,7 +615,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
} }
} }
static FLOW_STROKE_DASH_TYPE: Lazy<Ty> = Lazy::new(|| { static FLOW_STROKE_DASH_TYPE: LazyLock<Ty> = LazyLock::new(|| {
flow_union!( flow_union!(
"solid", "solid",
"dotted", "dotted",
@ -633,7 +635,7 @@ static FLOW_STROKE_DASH_TYPE: Lazy<Ty> = Lazy::new(|| {
) )
}); });
pub static FLOW_STROKE_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| { pub static FLOW_STROKE_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"paint" => literally(Color), "paint" => literally(Color),
"thickness" => literally(Length), "thickness" => literally(Length),
@ -644,7 +646,7 @@ pub static FLOW_STROKE_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| {
) )
}); });
pub static FLOW_MARGIN_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| { pub static FLOW_MARGIN_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"top" => literally(Length), "top" => literally(Length),
"right" => literally(Length), "right" => literally(Length),
@ -658,7 +660,7 @@ pub static FLOW_MARGIN_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| {
) )
}); });
pub static FLOW_INSET_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| { pub static FLOW_INSET_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"top" => literally(Length), "top" => literally(Length),
"right" => literally(Length), "right" => literally(Length),
@ -670,7 +672,7 @@ pub static FLOW_INSET_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| {
) )
}); });
pub static FLOW_OUTSET_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| { pub static FLOW_OUTSET_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"top" => literally(Length), "top" => literally(Length),
"right" => literally(Length), "right" => literally(Length),
@ -682,7 +684,7 @@ pub static FLOW_OUTSET_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| {
) )
}); });
pub static FLOW_RADIUS_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| { pub static FLOW_RADIUS_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"top" => literally(Length), "top" => literally(Length),
"right" => literally(Length), "right" => literally(Length),
@ -696,7 +698,7 @@ pub static FLOW_RADIUS_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| {
) )
}); });
pub static FLOW_TEXT_FONT_DICT: Lazy<Interned<RecordTy>> = Lazy::new(|| { pub static FLOW_TEXT_FONT_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"name" => literally(TextFont), "name" => literally(TextFont),
"covers" => flow_union!("latin-in-cjk", BuiltinTy::Type(Type::of::<Regex>())), "covers" => flow_union!("latin-in-cjk", BuiltinTy::Type(Type::of::<Regex>())),

View file

@ -5,11 +5,10 @@
use core::fmt; use core::fmt;
use std::{ use std::{
hash::{Hash, Hasher}, hash::{Hash, Hasher},
sync::Arc, sync::{Arc, OnceLock},
}; };
use ecow::EcoString; use ecow::EcoString;
use once_cell::sync::OnceCell;
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -283,7 +282,7 @@ pub struct TypeSource {
/// A name node with span /// A name node with span
pub name_node: SyntaxNode, pub name_node: SyntaxNode,
/// A lazy evaluated name /// A lazy evaluated name
pub name_repr: OnceCell<StrRef>, pub name_repr: OnceLock<StrRef>,
/// Attached documentation /// Attached documentation
pub doc: StrRef, pub doc: StrRef,
} }
@ -509,7 +508,7 @@ impl InsTy {
val, val,
syntax: Some(Interned::new(TypeSource { syntax: Some(Interned::new(TypeSource {
name_node: name, name_node: name,
name_repr: OnceCell::new(), name_repr: OnceLock::new(),
doc: "".into(), doc: "".into(),
})), })),
}) })
@ -520,7 +519,7 @@ impl InsTy {
val, val,
syntax: Some(Interned::new(TypeSource { syntax: Some(Interned::new(TypeSource {
name_node: SyntaxNode::default(), name_node: SyntaxNode::default(),
name_repr: OnceCell::new(), name_repr: OnceLock::new(),
doc: doc.into(), doc: doc.into(),
})), })),
}) })

View file

@ -1,8 +1,7 @@
use std::{collections::HashMap, fmt::Write}; use std::{collections::HashMap, fmt::Write, sync::LazyLock};
use comemo::Tracked; use comemo::Tracked;
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
use once_cell::sync::Lazy;
use serde::Deserialize; use serde::Deserialize;
use serde_yaml as yaml; use serde_yaml as yaml;
use typst::{ use typst::{
@ -105,7 +104,7 @@ impl GroupData {
} }
} }
static GROUPS: Lazy<Vec<GroupData>> = Lazy::new(|| { static GROUPS: LazyLock<Vec<GroupData>> = LazyLock::new(|| {
let mut groups: Vec<GroupData> = yaml::from_str(include_str!("groups.yml")).unwrap(); let mut groups: Vec<GroupData> = yaml::from_str(include_str!("groups.yml")).unwrap();
for group in &mut groups { for group in &mut groups {
if group.filter.is_empty() { if group.filter.is_empty() {
@ -170,7 +169,7 @@ fn resolve_known(head: &str, base: &str) -> Option<String> {
}) })
} }
static LIBRARY: Lazy<Library> = Lazy::new(Library::default); static LIBRARY: LazyLock<Library> = LazyLock::new(Library::default);
/// Extract a module from another module. /// Extract a module from another module.
#[track_caller] #[track_caller]
@ -274,7 +273,7 @@ impl PartialEq for CatKey {
impl Eq for CatKey {} impl Eq for CatKey {}
// todo: category of types // todo: category of types
static ROUTE_MAPS: Lazy<HashMap<CatKey, String>> = Lazy::new(|| { static ROUTE_MAPS: LazyLock<HashMap<CatKey, String>> = LazyLock::new(|| {
// todo: this is a false positive for clippy on LazyHash // todo: this is a false positive for clippy on LazyHash
#[allow(clippy::mutable_key_type)] #[allow(clippy::mutable_key_type)]
let mut map = HashMap::new(); let mut map = HashMap::new();

View file

@ -42,7 +42,6 @@ hyper-tungstenite = { workspace = true, optional = true }
itertools.workspace = true itertools.workspace = true
lsp-types.workspace = true lsp-types.workspace = true
log.workspace = true log.workspace = true
once_cell.workspace = true
open.workspace = true open.workspace = true
parking_lot.workspace = true parking_lot.workspace = true
paste.workspace = true paste.workspace = true

View file

@ -1,11 +1,10 @@
use core::fmt; use core::fmt;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::{Arc, LazyLock, OnceLock};
use clap::Parser; use clap::Parser;
use itertools::Itertools; use itertools::Itertools;
use lsp_types::*; use lsp_types::*;
use once_cell::sync::{Lazy, OnceCell};
use reflexo::error::IgnoreLogging; use reflexo::error::IgnoreLogging;
use reflexo::CowStr; use reflexo::CowStr;
use reflexo_typst::{ImmutPath, TypstDict}; use reflexo_typst::{ImmutPath, TypstDict};
@ -97,7 +96,7 @@ pub struct Config {
/// Specifies the font paths /// Specifies the font paths
pub font_paths: Vec<PathBuf>, pub font_paths: Vec<PathBuf>,
/// Computed fonts based on configuration. /// Computed fonts based on configuration.
pub fonts: OnceCell<Derived<Arc<TinymistFontResolver>>>, pub fonts: OnceLock<Derived<Arc<TinymistFontResolver>>>,
/// Whether to use system fonts. /// Whether to use system fonts.
pub system_fonts: Option<bool>, pub system_fonts: Option<bool>,
@ -533,7 +532,7 @@ impl Config {
opts.font_paths.clone_from(paths); opts.font_paths.clone_from(paths);
} }
let root = OnceCell::new(); let root = OnceLock::new();
for path in opts.font_paths.iter_mut() { for path in opts.font_paths.iter_mut() {
if path.is_relative() { if path.is_relative() {
if let Some(root) = root.get_or_init(|| self.entry_resolver.root(None)) { if let Some(root) = root.get_or_init(|| self.entry_resolver.root(None)) {
@ -586,7 +585,7 @@ impl Config {
} }
fn user_inputs(&self) -> ImmutDict { fn user_inputs(&self) -> ImmutDict {
static EMPTY: Lazy<ImmutDict> = Lazy::new(ImmutDict::default); static EMPTY: LazyLock<ImmutDict> = LazyLock::new(ImmutDict::default);
if let Some(extras) = &self.typst_extra_args { if let Some(extras) = &self.typst_extra_args {
return extras.inputs.clone(); return extras.inputs.clone();
@ -852,7 +851,6 @@ pub(crate) fn get_semantic_tokens_options() -> SemanticTokensOptions {
mod tests { mod tests {
use super::*; use super::*;
use serde_json::json; use serde_json::json;
use tinymist_project::PathPattern;
fn update_config(config: &mut Config, update: &JsonValue) -> Result<()> { fn update_config(config: &mut Config, update: &JsonValue) -> Result<()> {
temp_env::with_vars_unset(Vec::<String>::new(), || config.update(update)) temp_env::with_vars_unset(Vec::<String>::new(), || config.update(update))

View file

@ -1,6 +1,7 @@
use std::sync::OnceLock;
use lsp_types::request::WorkspaceConfiguration; use lsp_types::request::WorkspaceConfiguration;
use lsp_types::*; use lsp_types::*;
use once_cell::sync::OnceCell;
use reflexo::ImmutPath; use reflexo::ImmutPath;
use request::{RegisterCapability, UnregisterCapability}; use request::{RegisterCapability, UnregisterCapability};
use serde_json::{Map, Value as JsonValue}; use serde_json::{Map, Value as JsonValue};
@ -149,7 +150,7 @@ impl ServerState {
} }
if old_config.primary_opts() != self.config.primary_opts() { if old_config.primary_opts() != self.config.primary_opts() {
self.config.fonts = OnceCell::new(); // todo: don't reload fonts if not changed self.config.fonts = OnceLock::new(); // todo: don't reload fonts if not changed
self.reload_projects() self.reload_projects()
.log_error("could not restart primary"); .log_error("could not restart primary");
} }

View file

@ -5,12 +5,12 @@ mod args;
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::FromStr; use std::str::FromStr;
use std::sync::LazyLock;
use clap::Parser; use clap::Parser;
use clap_builder::CommandFactory; use clap_builder::CommandFactory;
use clap_complete::generate; use clap_complete::generate;
use futures::future::MaybeDone; use futures::future::MaybeDone;
use once_cell::sync::Lazy;
use reflexo::ImmutPath; use reflexo::ImmutPath;
use reflexo_typst::package::PackageSpec; use reflexo_typst::package::PackageSpec;
use sync_ls::transport::{with_stdio_transport, MirrorArgs}; use sync_ls::transport::{with_stdio_transport, MirrorArgs};
@ -53,7 +53,7 @@ impl Default for Runtimes {
} }
} }
static RUNTIMES: Lazy<Runtimes> = Lazy::new(Runtimes::default); static RUNTIMES: LazyLock<Runtimes> = LazyLock::new(Runtimes::default);
/// The main entry point. /// The main entry point.
fn main() -> Result<()> { fn main() -> Result<()> {

View file

@ -5,7 +5,6 @@ mod prelude {
pub use std::collections::HashMap; pub use std::collections::HashMap;
pub use once_cell::sync::Lazy;
pub use reflexo_vec2svg::ir::{GlyphItem, GlyphRef}; pub use reflexo_vec2svg::ir::{GlyphItem, GlyphRef};
pub use reflexo_vec2svg::{DefaultExportFeature, SvgTask, SvgText}; pub use reflexo_vec2svg::{DefaultExportFeature, SvgTask, SvgText};
pub use serde::{Deserialize, Serialize}; pub use serde::{Deserialize, Serialize};

View file

@ -1,3 +1,4 @@
use std::sync::LazyLock;
use std::{collections::BTreeMap, path::Path, sync::Arc}; use std::{collections::BTreeMap, path::Path, sync::Arc};
use reflexo_typst::TypstPagedDocument; use reflexo_typst::TypstPagedDocument;
@ -85,7 +86,7 @@ struct FontItem {
type ResourceSymbolMap = BTreeMap<String, ResourceSymbolItem>; type ResourceSymbolMap = BTreeMap<String, ResourceSymbolItem>;
static CAT_MAP: Lazy<HashMap<&str, SymCategory>> = Lazy::new(|| { static CAT_MAP: LazyLock<HashMap<&str, SymCategory>> = LazyLock::new(|| {
use SymCategory::*; use SymCategory::*;
HashMap::from_iter([ HashMap::from_iter([

View file

@ -19,7 +19,6 @@ comemo.workspace = true
reflexo-vec2svg.workspace = true reflexo-vec2svg.workspace = true
reflexo-typst.workspace = true reflexo-typst.workspace = true
once_cell.workspace = true
tokio.workspace = true tokio.workspace = true
env_logger.workspace = true env_logger.workspace = true
log.workspace = true log.workspace = true

View file

@ -10,10 +10,10 @@ pub use actor::editor::{
pub use args::*; pub use args::*;
pub use outline::Outline; pub use outline::Outline;
use std::sync::OnceLock;
use std::{collections::HashMap, future::Future, path::PathBuf, pin::Pin, sync::Arc}; use std::{collections::HashMap, future::Future, path::PathBuf, pin::Pin, sync::Arc};
use futures::sink::SinkExt; use futures::sink::SinkExt;
use once_cell::sync::OnceCell;
use reflexo_typst::debug_loc::{DocumentPosition, SourceSpanOffset}; use reflexo_typst::debug_loc::{DocumentPosition, SourceSpanOffset};
use reflexo_typst::Error; use reflexo_typst::Error;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -197,7 +197,7 @@ pub struct PreviewBuilder {
webview_conn: BroadcastChannel<WebviewActorRequest>, webview_conn: BroadcastChannel<WebviewActorRequest>,
doc_sender: Arc<parking_lot::RwLock<Option<Arc<dyn CompileView>>>>, doc_sender: Arc<parking_lot::RwLock<Option<Arc<dyn CompileView>>>>,
compile_watcher: OnceCell<Arc<CompileWatcher>>, compile_watcher: OnceLock<Arc<CompileWatcher>>,
} }
impl PreviewBuilder { impl PreviewBuilder {
@ -209,7 +209,7 @@ impl PreviewBuilder {
editor_conn: mpsc::unbounded_channel(), editor_conn: mpsc::unbounded_channel(),
webview_conn: broadcast::channel(32), webview_conn: broadcast::channel(32),
doc_sender: Arc::new(parking_lot::RwLock::new(None)), doc_sender: Arc::new(parking_lot::RwLock::new(None)),
compile_watcher: OnceCell::new(), compile_watcher: OnceLock::new(),
} }
} }