dev: collect server information for summary (#162)

* dev: collect server information for summary

* dev: humanize font variant to show

* fix: let focus state correct
This commit is contained in:
Myriad-Dreamin 2024-04-05 13:18:36 +08:00 committed by GitHub
parent 6722b2501f
commit 703c8b4c1d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 467 additions and 46 deletions

View file

@ -15,8 +15,7 @@ use typst::syntax::Source;
use super::SearchCtx;
use crate::syntax::{
find_source_by_import_path, get_lexical_hierarchy, IdentRef, LexicalHierarchy, LexicalKind,
LexicalScopeKind, LexicalVarKind, ModSrc,
find_source_by_import_path, IdentRef, LexicalHierarchy, LexicalKind, LexicalVarKind, ModSrc,
};
use crate::{adt::snapshot_map::SnapshotMap, syntax::LexicalModKind};
@ -57,6 +56,23 @@ pub struct DefUseInfo {
}
impl DefUseInfo {
/// Get the estimated memory usage of the def-use information.
pub fn estimated_memory(&self) -> usize {
std::mem::size_of::<Self>()
+ self.ident_defs.capacity()
* (std::mem::size_of::<IdentDef>() + std::mem::size_of::<IdentRef>() + 32)
+ self.external_refs.capacity()
* (std::mem::size_of::<(TypstFileId, Option<String>)>()
+ std::mem::size_of::<Vec<(Option<DefId>, IdentRef)>>()
+ 32)
+ self.ident_refs.capacity()
* (std::mem::size_of::<IdentRef>() + std::mem::size_of::<DefId>() + 32)
+ (self.undefined_refs.capacity() * std::mem::size_of::<IdentRef>() + 32)
+ (self.exports_refs.capacity() * std::mem::size_of::<DefId>() + 32)
+ self.exports_defs.capacity()
* (std::mem::size_of::<String>() + std::mem::size_of::<DefId>() + 32)
}
/// Get the definition id of a symbol by its name reference.
pub fn get_ref(&self, ident: &IdentRef) -> Option<DefId> {
self.ident_refs.get(ident).copied()
@ -112,7 +128,7 @@ pub(super) fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<A
return None;
}
let e = get_lexical_hierarchy(source, LexicalScopeKind::DefUse)?;
let e = ctx.ctx.def_use_lexical_hierarchy(source)?;
let mut collector = DefUseCollector {
ctx,

View file

@ -1,9 +1,11 @@
use std::{
collections::{HashMap, HashSet},
hash::Hash,
path::{Path, PathBuf},
sync::Arc,
};
use ecow::EcoVec;
use once_cell::sync::OnceCell;
use reflexo::{cow_mut::CowMut, debug_loc::DataSource, ImmutPath};
use typst::syntax::FileId as TypstFileId;
@ -17,7 +19,9 @@ use typst::{
use super::{get_def_use_inner, DefUseInfo};
use crate::{
lsp_to_typst,
syntax::{construct_module_dependencies, scan_workspace_files, ModuleDependency},
syntax::{
construct_module_dependencies, scan_workspace_files, LexicalHierarchy, ModuleDependency,
},
typst_to_lsp, LspPosition, LspRange, PositionEncoding, TypstRange,
};
@ -59,6 +63,102 @@ pub struct Analysis {
pub root: ImmutPath,
/// The position encoding for the workspace.
pub position_encoding: PositionEncoding,
/// The global caches for analysis.
pub caches: AnalysisGlobalCaches,
}
impl Analysis {
/// Get estimated memory usage of the analysis data.
pub fn estimated_memory(&self) -> usize {
self.caches.modules.capacity() * 32
+ self
.caches
.modules
.values()
.map(|v| {
v.def_use_lexical_heirarchy
.output
.as_ref()
.map_or(0, |e| e.iter().map(|e| e.estimated_memory()).sum())
})
.sum::<usize>()
}
}
struct ComputingNode<Inputs, Output> {
name: &'static str,
inputs: Option<Inputs>,
output: Option<Output>,
}
pub(crate) trait ComputeDebug {
fn compute_debug_repr(&self) -> impl std::fmt::Debug;
}
impl ComputeDebug for Source {
fn compute_debug_repr(&self) -> impl std::fmt::Debug {
self.id()
}
}
impl<Inputs, Output> ComputingNode<Inputs, Output> {
fn new(name: &'static str) -> Self {
Self {
name,
inputs: None,
output: None,
}
}
fn compute(
&mut self,
inputs: Inputs,
compute: impl FnOnce(Option<Inputs>, Inputs) -> Option<Output>,
) -> Option<Output>
where
Inputs: ComputeDebug + Hash + Clone,
Output: Clone,
{
match &self.inputs {
Some(s) if reflexo::hash::hash128(&inputs) == reflexo::hash::hash128(&s) => {
log::debug!(
"{}({:?}): hit cache",
self.name,
inputs.compute_debug_repr()
);
self.output.clone()
}
_ => {
log::info!("{}({:?}): compute", self.name, inputs.compute_debug_repr());
let output = compute(self.inputs.clone(), inputs.clone());
self.output = output.clone();
self.inputs = Some(inputs);
output
}
}
}
}
/// A cache for module-level analysis results of a module.
///
/// You should not holds across requests, because source code may change.
pub struct ModuleAnalysisGlobalCache {
def_use_lexical_heirarchy: ComputingNode<Source, EcoVec<LexicalHierarchy>>,
}
impl Default for ModuleAnalysisGlobalCache {
fn default() -> Self {
Self {
def_use_lexical_heirarchy: ComputingNode::new("def_use_lexical_heirarchy"),
}
}
}
/// A global (compiler server spanned) cache for all level of analysis results
/// of a module.
#[derive(Default)]
pub struct AnalysisGlobalCaches {
modules: HashMap<TypstFileId, ModuleAnalysisGlobalCache>,
}
/// A cache for all level of analysis results of a module.
@ -233,6 +333,21 @@ impl<'w> AnalysisContext<'w> {
pub fn to_lsp_range(&self, position: TypstRange, src: &Source) -> LspRange {
typst_to_lsp::range(position, src, self.analysis.position_encoding)
}
pub(crate) fn def_use_lexical_hierarchy(
&mut self,
source: Source,
) -> Option<EcoVec<LexicalHierarchy>> {
self.analysis
.caches
.modules
.entry(source.id())
.or_default()
.def_use_lexical_heirarchy
.compute(source, |_before, after| {
crate::syntax::get_lexical_hierarchy(after, crate::syntax::LexicalScopeKind::DefUse)
})
}
}
/// The context for searching in the workspace.

View file

@ -3,7 +3,7 @@ use std::{collections::HashMap, path::PathBuf};
use reflexo::debug_loc::DataSource;
use serde::{Deserialize, Serialize};
use typst::text::Font;
use typst::text::{Font, FontStretch, FontStyle, FontWeight};
use typst::{
layout::{Frame, FrameItem},
model::Document,
@ -39,6 +39,12 @@ pub struct DocumentFontInfo {
/// The display name of the font, which is computed by this crate and
/// unnecessary from any fields of the font file.
pub name: String,
/// The style of the font.
pub style: FontStyle,
/// The weight of the font.
pub weight: FontWeight,
/// The stretch of the font.
pub stretch: FontStretch,
/// The PostScript name of the font.
pub postscript_name: Option<String>,
/// The Family in font file.
@ -165,12 +171,16 @@ impl<'a, 'w> DocumentMetricsWorker<'a, 'w> {
.into_iter()
.map(|(font, uses)| {
let extra = self.ctx.resources.font_info(font.clone());
let info = &font.info();
DocumentFontInfo {
name: format!("{} ({:?})", font.info().family, font.info().variant),
name: info.family.clone(),
style: info.variant.style,
weight: info.variant.weight,
stretch: info.variant.stretch,
postscript_name: font.find_name(POST_SCRIPT_NAME),
full_name: font.find_name(FULL_NAME),
family: font.find_name(FAMILY),
fixed_family: Some(font.info().family.clone()),
fixed_family: Some(info.family.clone()),
source: extra.map(|e| self.internal_source(e)),
index: Some(font.index()),
uses_scale: Some(uses),

View file

@ -113,6 +113,7 @@ pub trait StatefulRequest {
mod polymorphic {
use lsp_types::TextEdit;
use serde::{Deserialize, Serialize};
use typst::foundations::Dict;
use super::prelude::*;
use super::*;
@ -161,6 +162,19 @@ mod polymorphic {
pub path: PathBuf,
}
#[derive(Debug, Clone)]
pub struct ServerInfoRequest {}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServerInfoReponse {
pub root: Option<PathBuf>,
#[serde(rename = "fontPaths")]
pub font_paths: Vec<PathBuf>,
pub inputs: Dict,
#[serde(rename = "estimatedMemoryUsage")]
pub estimated_memory_usage: HashMap<String, usize>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum FoldRequestFeature {
PinnedFirst,
@ -192,6 +206,7 @@ mod polymorphic {
SelectionRange(SelectionRangeRequest),
DocumentMetrics(DocumentMetricsRequest),
ServerInfo(ServerInfoRequest),
}
impl CompilerQueryRequest {
@ -219,6 +234,7 @@ mod polymorphic {
CompilerQueryRequest::SelectionRange(..) => ContextFreeUnique,
CompilerQueryRequest::DocumentMetrics(..) => PinnedFirst,
CompilerQueryRequest::ServerInfo(..) => Mergable,
}
}
@ -245,6 +261,7 @@ mod polymorphic {
CompilerQueryRequest::SelectionRange(req) => &req.path,
CompilerQueryRequest::DocumentMetrics(req) => &req.path,
CompilerQueryRequest::ServerInfo(..) => return None,
})
}
}
@ -272,6 +289,7 @@ mod polymorphic {
SelectionRange(Option<Vec<SelectionRange>>),
DocumentMetrics(Option<DocumentMetricsResponse>),
ServerInfo(Option<HashMap<String, ServerInfoReponse>>),
}
}

View file

@ -233,6 +233,18 @@ pub(crate) struct LexicalHierarchy {
pub children: Option<LazyHash<EcoVec<LexicalHierarchy>>>,
}
impl LexicalHierarchy {
pub fn estimated_memory(&self) -> usize {
std::mem::size_of::<Self>()
+ std::mem::size_of::<LexicalInfo>()
+ self.info.name.len()
+ self
.children
.as_ref()
.map_or(0, |c| c.iter().map(|e| e.estimated_memory()).sum())
}
}
impl Serialize for LexicalHierarchy {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeStruct;

View file

@ -74,6 +74,7 @@ pub fn snapshot_testing(name: &str, f: &impl Fn(&mut AnalysisContext, PathBuf))
Analysis {
root,
position_encoding: PositionEncoding::Utf16,
caches: Default::default(),
},
);
ctx.test_files(|| paths);

View file

@ -6,6 +6,9 @@ pub mod render;
pub mod typ_client;
pub mod typ_server;
use std::path::Path;
use tinymist_query::analysis::Analysis;
use tinymist_query::ExportKind;
use tokio::sync::{broadcast, watch};
use typst::util::Deferred;
@ -107,7 +110,11 @@ impl CompileServer {
let driver = CompileDriver {
inner: driver,
handler,
position_encoding,
analysis: Analysis {
position_encoding,
root: Path::new("").into(),
caches: Default::default(),
},
};
// Create the actor

View file

@ -26,6 +26,8 @@
//! information to other actors.
use std::{
collections::HashMap,
ops::Deref,
path::{Path, PathBuf},
sync::Arc,
};
@ -35,7 +37,7 @@ use log::{error, info, trace};
use parking_lot::Mutex;
use tinymist_query::{
analysis::{Analysis, AnalysisContext, AnaylsisResources},
DiagnosticsMap, ExportKind, PositionEncoding, VersionedDocument,
DiagnosticsMap, ExportKind, ServerInfoReponse, VersionedDocument,
};
use tokio::sync::{broadcast, mpsc, oneshot, watch};
use typst::{
@ -143,7 +145,7 @@ pub struct CompileDriver {
pub(super) inner: CompileDriverInner,
#[allow(unused)]
pub(super) handler: CompileHandler,
pub(super) position_encoding: PositionEncoding,
pub(super) analysis: Analysis,
}
impl CompileMiddleware for CompileDriver {
@ -208,7 +210,6 @@ impl CompileDriver {
&mut self,
f: impl FnOnce(&mut AnalysisContext<'_>) -> T,
) -> anyhow::Result<T> {
let enc = self.position_encoding;
let w = self.inner.world_mut();
let Some(main) = w.main_id() else {
@ -252,19 +253,15 @@ impl CompileDriver {
}
let w = WrapWorld(w);
Ok(f(&mut AnalysisContext::new(
&w,
Analysis {
root,
position_encoding: enc,
},
)))
self.analysis.root = root;
Ok(f(&mut AnalysisContext::new_borrow(&w, &mut self.analysis)))
}
}
pub struct CompileClientActor {
diag_group: String,
config: CompileConfig,
pub diag_group: String,
pub config: CompileConfig,
entry: Arc<Mutex<EntryState>>,
inner: Deferred<CompileClient>,
render_tx: broadcast::Sender<RenderActorRequest>,
@ -419,6 +416,29 @@ impl CompileClientActor {
}))
.unwrap();
}
pub fn collect_server_info(&self) -> anyhow::Result<HashMap<String, ServerInfoReponse>> {
let dg = self.diag_group.clone();
let res = self.steal(move |c| {
let cc = &c.compiler.compiler;
let info = ServerInfoReponse {
root: cc.world().entry.root().map(|e| e.as_ref().to_owned()),
// todo: font paths
// font_paths: cc.world().font_resolver.inner,
font_paths: vec![],
inputs: cc.world().inputs.as_ref().deref().clone(),
estimated_memory_usage: HashMap::from_iter([
("vfs".to_owned(), { cc.world().vfs.memory_usage() }),
("analysis".to_owned(), cc.analysis.estimated_memory()),
]),
};
HashMap::from_iter([(dg, info)])
})?;
Ok(res)
}
}
impl CompileClientActor {

View file

@ -602,6 +602,7 @@ impl TypstLanguageServer {
redirected_command!("tinymist.doInitTemplate", Self::init_template),
redirected_command!("tinymist.doGetTemplateEntry", Self::do_get_template_entry),
redirected_command!("tinymist.getDocumentMetrics", Self::get_document_metrics),
redirected_command!("tinymist.getServerInfo", Self::get_server_info),
// For Documentations
redirected_command!("tinymist.getResources", Self::get_resources),
])
@ -650,8 +651,7 @@ impl TypstLanguageServer {
Ok(res)
}
/// Export the current document as some format. The client is responsible
/// for passing the correct absolute path of typst document.
/// Get the metrics of the document.
pub fn get_document_metrics(&self, arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
let path = parse_path(arguments.first())?.as_ref().to_owned();
@ -662,6 +662,16 @@ impl TypstLanguageServer {
Ok(res)
}
/// Get the server info.
pub fn get_server_info(&self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
let res = run_query!(self.ServerInfo())?;
let res = serde_json::to_value(res)
.map_err(|e| internal_error(format!("Cannot serialize response {e}")))?;
Ok(res)
}
/// Clear all cached resources.
///
/// # Errors

View file

@ -151,9 +151,9 @@ impl TypstLanguageServer {
#[macro_export]
macro_rules! run_query {
($self: ident.$query: ident ($($arg_key:ident),+ $(,)?)) => {{
($self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
use tinymist_query::*;
let req = paste! { [<$query Request>] { $($arg_key),+ } };
let req = paste! { [<$query Request>] { $($arg_key),* } };
$self
.query(CompilerQueryRequest::$query(req.clone()))
.map_err(|err| {
@ -268,6 +268,10 @@ impl TypstLanguageServer {
Symbol(req) => query_world!(client, Symbol, req),
DocumentMetrics(req) => query_state!(client, DocumentMetrics, req),
ServerInfo(_) => {
let res = client.collect_server_info()?;
Ok(CompilerQueryResponse::ServerInfo(Some(res)))
}
FoldingRange(..)
| SelectionRange(..)

View file

@ -1,6 +1,7 @@
import * as vscode from "vscode";
import * as path from "path";
import { readFile } from "fs/promises";
import { getFocusingFile } from "./extension";
async function loadHTMLFile(context: vscode.ExtensionContext, relativePath: string) {
const filePath = path.resolve(context.extensionPath, relativePath);
@ -59,7 +60,10 @@ export async function activateEditorTool(context: vscode.ExtensionContext, tool:
const panel = vscode.window.createWebviewPanel(
`tinymist-${tool}`,
title,
vscode.ViewColumn.Beside, // Which sides
{
viewColumn: vscode.ViewColumn.Beside,
preserveFocus: tool == "summary",
}, // Which sides
{
enableScripts: true,
retainContextWhenHidden: true,
@ -126,19 +130,22 @@ export async function activateEditorTool(context: vscode.ExtensionContext, tool:
case "tracing":
break;
case "summary": {
// tinymist.getCurrentDocumentMetrics
const result = await vscode.commands.executeCommand(
"tinymist.getCurrentDocumentMetrics"
);
const [docMetrics, serverInfo] = await fetchSummaryInfo();
if (!docMetrics || !serverInfo) {
if (!docMetrics) {
vscode.window.showErrorMessage("No document metrics available");
}
if (!serverInfo) {
vscode.window.showErrorMessage("No server info");
}
if (!result) {
vscode.window.showErrorMessage("No document metrics available");
panel.dispose();
return;
}
const docMetrics = JSON.stringify(result);
html = html.replace(":[[preview:DocumentMetrics]]:", btoa(docMetrics));
html = html.replace(":[[preview:ServerInfo]]:", btoa(serverInfo));
break;
}
case "symbol-picker": {
@ -149,7 +156,7 @@ export async function activateEditorTool(context: vscode.ExtensionContext, tool:
);
if (!result) {
vscode.window.showErrorMessage("No document metrics available");
vscode.window.showErrorMessage("No resource");
panel.dispose();
return;
}
@ -162,3 +169,48 @@ export async function activateEditorTool(context: vscode.ExtensionContext, tool:
panel.webview.html = html;
}
const waitTimeList = [100, 200, 400, 1000, 1200, 1500, 1800, 2000];
async function fetchSummaryInfo(): Promise<[any | undefined, any | undefined]> {
let res: [any | undefined, any | undefined] = [undefined, undefined];
for (const to of waitTimeList) {
const focusingFile = getFocusingFile();
if (focusingFile === undefined) {
await vscode.window.showErrorMessage("No focusing typst file");
return res;
}
await work(focusingFile, res);
if (res[0] && res[1]) {
break;
}
// wait for a bit
await new Promise((resolve) => setTimeout(resolve, to));
}
return res;
async function work(focusingFile: string, res: [any | undefined, any | undefined]) {
if (!res[0]) {
const result = await vscode.commands.executeCommand(
"tinymist.getDocumentMetrics",
focusingFile
);
if (!result) {
return;
}
const docMetrics = JSON.stringify(result);
res[0] = docMetrics;
}
if (!res[1]) {
const result2 = await vscode.commands.executeCommand("tinymist.getServerInfo");
if (!result2) {
return;
}
const serverInfo = JSON.stringify(result2);
res[1] = serverInfo;
}
}
}

View file

@ -108,6 +108,7 @@ async function startClient(context: ExtensionContext): Promise<void> {
// console.log("plaintext", langId, editor?.document.uri.fsPath);
// }
if (langId !== "typst") {
// console.log("not typst", langId, editor?.document.uri.fsPath);
return commandActivateDoc(undefined);
}
return commandActivateDoc(editor?.document.uri.fsPath);
@ -535,7 +536,6 @@ async function commandActivateDoc(fsPath: string | undefined): Promise<void> {
}
async function commandRunCodeLens(...args: string[]): Promise<void> {
console.log("run code lens", args);
if (args.length === 0) {
return;
}

View file

@ -2,14 +2,16 @@ import van, { ChildDom } from "vanjs-core";
import { requestRevealPath } from "../vscode";
const { div, a, span, code, br } = van.tags;
interface CompileArgs {
interface ServerInfo {
root: string;
fontPaths: string[];
inputs: Record<string, string>;
estimatedMemoryUsage: Record<string, number>;
}
type ServerInfoMap = Record<string, ServerInfo>;
export const Summary = () => {
const compileArgs = van.state(ARGS_MOCK);
const documentMetricsData = `:[[preview:DocumentMetrics]]:`;
const docMetrics = van.state<DocumentMetrics>(
documentMetricsData.startsWith(":")
@ -17,6 +19,13 @@ export const Summary = () => {
: JSON.parse(atob(documentMetricsData))
);
console.log("docMetrics", docMetrics);
const serverInfoData = `:[[preview:ServerInfo]]:`;
const serverInfos = van.state<ServerInfoMap>(
serverInfoData.startsWith(":")
? SERVER_INFO_MOCK
: JSON.parse(atob(serverInfoData))
);
console.log("serverInfos", serverInfos);
const FontSlot = (font: FontInfo) => {
let fontName;
@ -59,6 +68,23 @@ export const Summary = () => {
font.usesScale,
" use(s).",
br(),
code("Variant"),
": ",
code(
font.style === "normal" || !font.style
? ""
: `${humanStyle(font.style)}, `,
span(
{ title: `Weight ${font.weight || 400}` },
`${humanWeight(font.weight)} Weight`
),
", ",
span(
{ title: `Stretch ${(font.stretch || 1000) / 10}%` },
`${humanStretch(font.stretch)} Stretch`
)
),
br(),
code("PostScriptName"),
": ",
code(font.postscriptName),
@ -79,7 +105,7 @@ export const Summary = () => {
const ArgSlots = () => {
const res: ChildDom[] = [];
let val = compileArgs.val;
let val = serverInfos.val["primary"];
if (val.root) {
res.push(
div(
@ -117,6 +143,12 @@ export const Summary = () => {
code(...codeList)
)
);
for (const [key, usage] of Object.entries(val.estimatedMemoryUsage)) {
res.push(
div(a(code(`memoryUsage (${key})`)), ": ", code(humanSize(usage)))
);
}
}
return res;
@ -261,6 +293,9 @@ interface AnnotatedContent {
interface FontInfo {
name: string;
style?: string;
weight?: number;
stretch?: number;
postscriptName: string;
family?: string;
fullName?: string;
@ -315,14 +350,135 @@ const DOC_MOCK: DocumentMetrics = {
],
};
const ARGS_MOCK: CompileArgs = {
root: "C:\\Users\\OvO\\work\\rust\\tinymist",
fontPaths: [
"C:\\Users\\OvO\\work\\rust\\tinymist\\assets\\fonts",
"C:\\Users\\OvO\\work\\assets\\fonts",
],
inputs: {
theme: "dark",
context: '{"preview":true}',
const SERVER_INFO_MOCK: ServerInfoMap = {
primary: {
root: "C:\\Users\\OvO\\work\\rust\\tinymist",
fontPaths: [
"C:\\Users\\OvO\\work\\rust\\tinymist\\assets\\fonts",
"C:\\Users\\OvO\\work\\assets\\fonts",
],
inputs: {
theme: "dark",
context: '{"preview":true}',
},
estimatedMemoryUsage: {},
},
};
function humanSize(size: number) {
const units = ["B", "KB", "MB", "GB", "TB"];
let unit = 0;
while (size >= 768 && unit < units.length) {
size /= 1024;
unit++;
}
return `${size.toFixed(2)} ${units[unit]}`;
}
function almost(value: number, target: number, threshold = 0.01) {
return Math.abs(value - target) < threshold;
}
function humanStyle(style?: string) {
if (!style) {
return "Regular";
}
if (style === "italic") {
return "Italic";
}
if (style === "oblique") {
return "Oblique";
}
return `Style ${style}`;
}
function humanWeight(weight?: number) {
if (!weight) {
return "Regular";
}
if (almost(weight, 100)) {
return "Thin";
}
if (almost(weight, 200)) {
return "Extra Light";
}
if (almost(weight, 300)) {
return "Light";
}
if (almost(weight, 400)) {
return "Regular";
}
if (almost(weight, 500)) {
return "Medium";
}
if (almost(weight, 600)) {
return "Semibold";
}
if (almost(weight, 700)) {
return "Bold";
}
if (almost(weight, 800)) {
return "Extra Bold";
}
if (almost(weight, 900)) {
return "Black";
}
return `Weight ${weight}`;
}
function humanStretch(stretch?: number) {
if (!stretch) {
return "Normal";
}
if (almost(stretch, 500)) {
return "Ultra-condensed";
}
if (almost(stretch, 625)) {
return "Extra-condensed";
}
if (almost(stretch, 750)) {
return "Condensed";
}
if (almost(stretch, 875)) {
return "Semi-condensed";
}
if (almost(stretch, 1000)) {
return "Normal";
}
if (almost(stretch, 1125)) {
return "Semi-expanded";
}
if (almost(stretch, 1250)) {
return "Expanded";
}
if (almost(stretch, 1500)) {
return "Extra-expanded";
}
if (almost(stretch, 2000)) {
return "Ultra-expanded";
}
return `${stretch}`;
}