Update Tauri to v2 and execute only the node graph in native (#2362)

* Migrate tauri app to v2

* Move flake files to sub directory

* Remove unused plugins

* Backport some of the tauri code

* Implement async node graph execution

Only move node runtime to native code

* Always use gpu feature for tauri

* Fix serialization

* Add logging filters

* Enable native window rendering with vello

* Cleanup

* Remove unused editor instance

* Remove changes from vite config

* Remove warnings

* Remove unused files

* Fix most tests

* Cleanup

* Apply frontend lint

* Readd flake.nix

* Fix tests using --all-features

* Code review

* Enable all backends

* Fix monitor node downcast types

* Change debug log to a warning

* Disable shader passthrough

* Cleanup unused imports

* Remove warning

* Update project setup instructions

---------

Co-authored-by: Keavon Chambers <keavon@keavon.com>
This commit is contained in:
Dennis Kobert 2025-04-14 13:43:15 +02:00 committed by GitHub
parent 29479f6e3e
commit 9b23c7e2db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
39 changed files with 10429 additions and 2831 deletions

1
.envrc Normal file
View file

@ -0,0 +1 @@
use flake .nix

View file

@ -89,7 +89,7 @@ jobs:
- name: 🧪 Run Rust tests
run: |
mold -run cargo test --all-features
mold -run cargo test --all-features --workspace
# miri:
# runs-on: self-hosted

99
.nix/flake.lock generated Normal file
View file

@ -0,0 +1,99 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1743583204,
"narHash": "sha256-F7n4+KOIfWrwoQjXrL2wD9RhFYLs2/GGe/MQY1sSdlE=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "5135c59491985879812717f4c9fea69604e7f26f",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1739214665,
"narHash": "sha256-26L8VAu3/1YRxS8MHgBOyOM8xALdo6N0I04PgorE7UM=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "2c8d3f48d33929642c1c12cd243df4cc7d2ce434",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"nixpkgs-unstable": "nixpkgs-unstable",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1743682350,
"narHash": "sha256-S/MyKOFajCiBm5H5laoE59wB6w0NJ4wJG53iAPfYW3k=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "c4a8327b0f25d1d81edecbb6105f74d7cf9d7382",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

114
.nix/flake.nix Normal file
View file

@ -0,0 +1,114 @@
# This is a helper file for people using NixOS as their operating system.
# If you don't know what this file does, you can safely ignore it.
# This file defines both the development environment for the project.
#
# Development Environment:
# - Provides all necessary tools for Rust/WASM development
# - Includes Tauri dependencies for desktop app development
# - Sets up profiling and debugging tools
# - Configures mold as the default linker for faster builds
#
#
# Usage:
# - Development shell: `nix develop`
# - Run in dev shell with direnv: add `use flake` to .envrc
{
description = "Development environment and build configuration";
inputs = {
# This url should be changed to match your system packages if you work on tauri because you need to use the same graphics library versions as the ones used by your system
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
nixpkgs-unstable.url = "github:nixos/nixpkgs/nixos-unstable";
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
flake-utils.url = "github:numtide/flake-utils";
};
outputs = { nixpkgs, nixpkgs-unstable, rust-overlay, flake-utils, ... }:
flake-utils.lib.eachDefaultSystem (system:
let
overlays = [ (import rust-overlay) ];
pkgs = import nixpkgs {
inherit system overlays;
};
pkgs-unstable = import nixpkgs-unstable {
inherit system overlays;
};
rustc-wasm = pkgs.rust-bin.stable.latest.default.override {
targets = [ "wasm32-unknown-unknown" ];
extensions = [ "rust-src" "rust-analyzer" "clippy" "cargo" ];
};
# Shared build inputs - system libraries that need to be in LD_LIBRARY_PATH
buildInputs = with pkgs; [
# System libraries
openssl
vulkan-loader
mesa
libraw
# Tauri dependencies: keep in sync with https://v2.tauri.app/start/prerequisites/
at-spi2-atk
atkmm
cairo
gdk-pixbuf
glib
gtk3
harfbuzz
librsvg
libsoup_3
pango
webkitgtk_4_1
openssl
];
# Development tools that don't need to be in LD_LIBRARY_PATH
buildTools = [
rustc-wasm
pkgs.nodejs
pkgs.nodePackages.npm
pkgs.binaryen
pkgs.wasm-bindgen-cli
pkgs-unstable.wasm-pack
pkgs.pkg-config
pkgs.git
pkgs.gobject-introspection
pkgs-unstable.cargo-tauri
# Linker
pkgs.mold
];
# Development tools that don't need to be in LD_LIBRARY_PATH
devTools = with pkgs; [
cargo-watch
cargo-nextest
cargo-expand
# Profiling tools
gnuplot
samply
cargo-flamegraph
];
in
{
# Development shell configuration
devShells.default = pkgs.mkShell {
packages = buildInputs ++ buildTools ++ devTools;
LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath buildInputs;
GIO_MODULE_DIR="${pkgs.glib-networking}/lib/gio/modules/";
XDG_DATA_DIRS="${pkgs.gsettings-desktop-schemas}/share/gsettings-schemas/${pkgs.gsettings-desktop-schemas.name}:${pkgs.gtk3}/share/gsettings-schemas/${pkgs.gtk3.name}:$XDG_DATA_DIRS";
shellHook = ''
alias cargo='mold --run cargo'
'';
};
}
);
}

3264
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -22,6 +22,16 @@ members = [
"website/other/bezier-rs-demos/wasm",
]
exclude = ["node-graph/gpu-compiler"]
default-members = [
"editor",
"frontend/wasm",
"node-graph/gcore",
"node-graph/gstd",
"node-graph/graph-craft",
"node-graph/graphene-cli",
"node-graph/interpreted-executor",
"node-graph/node-macro",
]
resolver = "2"
[workspace.dependencies]

View file

@ -20,14 +20,17 @@ gpu = [
"wgpu-executor",
"gpu-executor",
]
tauri = ["ron", "decouple-execution"]
decouple-execution = []
resvg = ["graphene-std/resvg"]
vello = ["graphene-std/vello", "resvg", "graphene-core/vello"]
ron = ["dep:ron"]
[dependencies]
# Local dependencies
graphite-proc-macros = { path = "../proc-macros" }
graph-craft = { path = "../node-graph/graph-craft" }
interpreted-executor = { path = "../node-graph/interpreted-executor" }
interpreted-executor = { path = "../node-graph/interpreted-executor", features = ["serde"] }
graphene-core = { path = "../node-graph/gcore" }
graphene-std = { path = "../node-graph/gstd", features = ["serde"] }
@ -69,6 +72,7 @@ gpu-executor = { path = "../node-graph/gpu-executor", optional = true }
# Optional workspace dependencies
wasm-bindgen = { workspace = true, optional = true }
wasm-bindgen-futures = { workspace = true, optional = true }
ron = { workspace = true, optional = true }
[dev-dependencies]
# Workspace dependencies

View file

@ -10,7 +10,9 @@ pub struct OverlaysMessageData<'a> {
#[derive(Debug, Clone, Default)]
pub struct OverlaysMessageHandler {
pub overlay_providers: HashSet<OverlayProvider>,
#[cfg(target_arch = "wasm32")]
canvas: Option<web_sys::HtmlCanvasElement>,
#[cfg(target_arch = "wasm32")]
context: Option<web_sys::CanvasRenderingContext2d>,
}
@ -65,10 +67,7 @@ impl MessageHandler<OverlaysMessage, OverlaysMessageData<'_>> for OverlaysMessag
}
#[cfg(not(target_arch = "wasm32"))]
OverlaysMessage::Draw => {
warn!(
"Cannot render overlays on non-Wasm targets.\n{responses:?} {overlays_visible} {ipp:?} {:?} {:?}",
self.canvas, self.context
);
warn!("Cannot render overlays on non-Wasm targets.\n{responses:?} {overlays_visible} {ipp:?}",);
}
OverlaysMessage::AddProvider(message) => {
self.overlay_providers.insert(message);

View file

@ -24,8 +24,6 @@ use graph_craft::document::{DocumentNodeImplementation, NodeId, NodeInput};
use graphene_core::text::{Font, TypesettingConfig};
use graphene_std::vector::style::{Fill, FillType, Gradient};
use graphene_std::vector::{VectorData, VectorDataTable};
use interpreted_executor::dynamic_executor::IntrospectError;
use std::sync::Arc;
use std::vec;
pub struct PortfolioMessageData<'a> {
@ -1202,10 +1200,6 @@ impl PortfolioMessageHandler {
Self { executor, ..Default::default() }
}
pub async fn introspect_node(&self, node_path: &[NodeId]) -> Result<Arc<dyn std::any::Any + Send + Sync>, IntrospectError> {
self.executor.introspect_node(node_path).await
}
pub fn document(&self, document_id: DocumentId) -> Option<&DocumentMessageHandler> {
self.documents.get(&document_id)
}

View file

@ -37,9 +37,9 @@ impl MessageHandler<SpreadsheetMessage, ()> for SpreadsheetMessageHandler {
self.update_layout(responses);
}
SpreadsheetMessage::UpdateLayout { inspect_result } => {
SpreadsheetMessage::UpdateLayout { mut inspect_result } => {
self.inspect_node = Some(inspect_result.inspect_node);
self.introspected_data = inspect_result.introspected_data;
self.introspected_data = inspect_result.take_data();
self.update_layout(responses)
}

View file

@ -713,15 +713,10 @@ impl MessageHandler<TransformLayerMessage, TransformData<'_>> for TransformLayer
#[cfg(test)]
mod test_transform_layer {
use crate::messages::portfolio::document::graph_operation::{transform_utils, utility_types::ModifyInputsContext};
use crate::messages::portfolio::document::utility_types::misc::GroupFolderType;
use crate::messages::{
portfolio::document::graph_operation::{
transform_utils,
utility_types::{ModifyInputsContext, TransformIn},
},
prelude::Message,
tool::transform_layer::transform_layer_message_handler::VectorModificationType,
};
use crate::messages::prelude::Message;
use crate::messages::tool::transform_layer::transform_layer_message_handler::VectorModificationType;
use crate::test_utils::test_prelude::*;
use glam::DAffine2;
use graphene_core::vector::PointId;
@ -1232,7 +1227,7 @@ mod test_transform_layer {
// Test 4: Transform layers inside transformed group
let child_layer_id = {
let mut document = editor.active_document_mut();
let document = editor.active_document_mut();
let group_children = document.network_interface.downstream_layers(&group_layer.to_node(), &[]);
if !group_children.is_empty() {
Some(LayerNodeIdentifier::new(group_children[0], &document.network_interface, &[]))

View file

@ -1,88 +1,35 @@
use crate::consts::FILE_SAVE_SUFFIX;
use crate::messages::animation::TimingInformation;
use crate::messages::frontend::utility_types::{ExportBounds, FileType};
use crate::messages::portfolio::document::utility_types::document_metadata::LayerNodeIdentifier;
use crate::messages::portfolio::document::utility_types::network_interface::NodeNetworkInterface;
use crate::messages::prelude::*;
use crate::messages::tool::common_functionality::graph_modification_utils::NodeGraphLayer;
use glam::{DAffine2, DVec2, UVec2};
use graph_craft::concrete;
use graph_craft::document::value::{RenderOutput, TaggedValue};
use graph_craft::document::{DocumentNode, DocumentNodeImplementation, NodeId, NodeInput, NodeNetwork, generate_uuid};
use graph_craft::graphene_compiler::Compiler;
use graph_craft::document::{DocumentNode, DocumentNodeImplementation, NodeId, NodeInput, generate_uuid};
use graph_craft::proto::GraphErrors;
use graph_craft::wasm_application_io::EditorPreferences;
use graphene_core::Context;
use graphene_core::application_io::{NodeGraphUpdateMessage, NodeGraphUpdateSender, RenderConfig};
use graphene_core::memo::IORecord;
use graphene_core::application_io::{NodeGraphUpdateMessage, RenderConfig};
use graphene_core::renderer::RenderSvgSegmentList;
use graphene_core::renderer::{GraphicElementRendered, RenderParams, SvgRender};
use graphene_core::renderer::{RenderSvgSegmentList, SvgSegment};
use graphene_core::text::FontCache;
use graphene_core::transform::Footprint;
use graphene_core::vector::style::ViewMode;
use graphene_std::application_io::TimingInformation;
use graphene_std::renderer::{RenderMetadata, format_transform_matrix};
use graphene_std::vector::{VectorData, VectorDataTable};
use graphene_std::wasm_application_io::{WasmApplicationIo, WasmEditorApi};
use interpreted_executor::dynamic_executor::{DynamicExecutor, IntrospectError, ResolvedDocumentNodeTypesDelta};
use interpreted_executor::util::wrap_network_in_scope;
use once_cell::sync::Lazy;
use spin::Mutex;
use std::sync::Arc;
use std::sync::mpsc::{Receiver, Sender};
use graphene_std::vector::VectorData;
use interpreted_executor::dynamic_executor::ResolvedDocumentNodeTypesDelta;
/// Persistent data between graph executions. It's updated via message passing from the editor thread with [`NodeRuntimeMessage`]`.
/// Some of these fields are put into a [`WasmEditorApi`] which is passed to the final compiled graph network upon each execution.
/// Once the implementation is finished, this will live in a separate thread. Right now it's part of the main JS thread, but its own separate JS stack frame independent from the editor.
pub struct NodeRuntime {
executor: DynamicExecutor,
receiver: Receiver<NodeRuntimeMessage>,
sender: InternalNodeGraphUpdateSender,
editor_preferences: EditorPreferences,
old_graph: Option<NodeNetwork>,
update_thumbnails: bool,
mod runtime_io;
pub use runtime_io::NodeRuntimeIO;
editor_api: Arc<WasmEditorApi>,
node_graph_errors: GraphErrors,
monitor_nodes: Vec<Vec<NodeId>>,
/// Which node is inspected and which monitor node is used (if any) for the current execution
inspect_state: Option<InspectState>,
// TODO: Remove, it doesn't need to be persisted anymore
/// The current renders of the thumbnails for layer nodes.
thumbnail_renders: HashMap<NodeId, Vec<SvgSegment>>,
vector_modify: HashMap<NodeId, VectorData>,
}
/// Messages passed from the editor thread to the node runtime thread.
pub enum NodeRuntimeMessage {
GraphUpdate(GraphUpdate),
ExecutionRequest(ExecutionRequest),
FontCacheUpdate(FontCache),
EditorPreferencesUpdate(EditorPreferences),
}
#[derive(Default, Debug, Clone)]
pub struct ExportConfig {
pub file_name: String,
pub file_type: FileType,
pub scale_factor: f64,
pub bounds: ExportBounds,
pub transparent_background: bool,
pub size: DVec2,
}
pub struct GraphUpdate {
network: NodeNetwork,
/// The node that should be temporary inspected during execution
inspect_node: Option<NodeId>,
}
mod runtime;
pub use runtime::*;
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct ExecutionRequest {
execution_id: u64,
render_config: RenderConfig,
}
#[cfg_attr(feature = "decouple-execution", derive(serde::Serialize, serde::Deserialize))]
pub struct ExecutionResponse {
execution_id: u64,
result: Result<TaggedValue, String>,
@ -93,378 +40,27 @@ pub struct ExecutionResponse {
inspect_result: Option<InspectResult>,
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct CompilationResponse {
result: Result<ResolvedDocumentNodeTypesDelta, String>,
node_graph_errors: GraphErrors,
}
#[cfg_attr(feature = "decouple-execution", derive(serde::Serialize, serde::Deserialize))]
pub enum NodeGraphUpdate {
ExecutionResponse(ExecutionResponse),
CompilationResponse(CompilationResponse),
NodeGraphUpdateMessage(NodeGraphUpdateMessage),
}
#[derive(Clone)]
struct InternalNodeGraphUpdateSender(Sender<NodeGraphUpdate>);
impl InternalNodeGraphUpdateSender {
fn send_generation_response(&self, response: CompilationResponse) {
self.0.send(NodeGraphUpdate::CompilationResponse(response)).expect("Failed to send response")
}
fn send_execution_response(&self, response: ExecutionResponse) {
self.0.send(NodeGraphUpdate::ExecutionResponse(response)).expect("Failed to send response")
}
}
impl NodeGraphUpdateSender for InternalNodeGraphUpdateSender {
fn send(&self, message: NodeGraphUpdateMessage) {
self.0.send(NodeGraphUpdate::NodeGraphUpdateMessage(message)).expect("Failed to send response")
}
}
pub static NODE_RUNTIME: Lazy<Mutex<Option<NodeRuntime>>> = Lazy::new(|| Mutex::new(None));
impl NodeRuntime {
pub fn new(receiver: Receiver<NodeRuntimeMessage>, sender: Sender<NodeGraphUpdate>) -> Self {
Self {
executor: DynamicExecutor::default(),
receiver,
sender: InternalNodeGraphUpdateSender(sender.clone()),
editor_preferences: EditorPreferences::default(),
old_graph: None,
update_thumbnails: true,
editor_api: WasmEditorApi {
font_cache: FontCache::default(),
editor_preferences: Box::new(EditorPreferences::default()),
node_graph_message_sender: Box::new(InternalNodeGraphUpdateSender(sender)),
application_io: None,
}
.into(),
node_graph_errors: Vec::new(),
monitor_nodes: Vec::new(),
inspect_state: None,
thumbnail_renders: Default::default(),
vector_modify: Default::default(),
}
}
pub async fn run(&mut self) {
if self.editor_api.application_io.is_none() {
self.editor_api = WasmEditorApi {
application_io: Some(WasmApplicationIo::new().await.into()),
font_cache: self.editor_api.font_cache.clone(),
node_graph_message_sender: Box::new(self.sender.clone()),
editor_preferences: Box::new(self.editor_preferences.clone()),
}
.into();
}
let mut font = None;
let mut preferences = None;
let mut graph = None;
let mut execution = None;
for request in self.receiver.try_iter() {
match request {
NodeRuntimeMessage::GraphUpdate(_) => graph = Some(request),
NodeRuntimeMessage::ExecutionRequest(_) => execution = Some(request),
NodeRuntimeMessage::FontCacheUpdate(_) => font = Some(request),
NodeRuntimeMessage::EditorPreferencesUpdate(_) => preferences = Some(request),
}
}
let requests = [font, preferences, graph, execution].into_iter().flatten();
for request in requests {
match request {
NodeRuntimeMessage::FontCacheUpdate(font_cache) => {
self.editor_api = WasmEditorApi {
font_cache,
application_io: self.editor_api.application_io.clone(),
node_graph_message_sender: Box::new(self.sender.clone()),
editor_preferences: Box::new(self.editor_preferences.clone()),
}
.into();
if let Some(graph) = self.old_graph.clone() {
// We ignore this result as compilation errors should have been reported in an earlier iteration
let _ = self.update_network(graph).await;
}
}
NodeRuntimeMessage::EditorPreferencesUpdate(preferences) => {
self.editor_preferences = preferences.clone();
self.editor_api = WasmEditorApi {
font_cache: self.editor_api.font_cache.clone(),
application_io: self.editor_api.application_io.clone(),
node_graph_message_sender: Box::new(self.sender.clone()),
editor_preferences: Box::new(preferences),
}
.into();
if let Some(graph) = self.old_graph.clone() {
// We ignore this result as compilation errors should have been reported in an earlier iteration
let _ = self.update_network(graph).await;
}
}
NodeRuntimeMessage::GraphUpdate(GraphUpdate { mut network, inspect_node }) => {
// Insert the monitor node to manage the inspection
self.inspect_state = inspect_node.map(|inspect| InspectState::monitor_inspect_node(&mut network, inspect));
self.old_graph = Some(network.clone());
self.node_graph_errors.clear();
let result = self.update_network(network).await;
self.update_thumbnails = true;
self.sender.send_generation_response(CompilationResponse {
result,
node_graph_errors: self.node_graph_errors.clone(),
});
}
NodeRuntimeMessage::ExecutionRequest(ExecutionRequest { execution_id, render_config, .. }) => {
let transform = render_config.viewport.transform;
let result = self.execute_network(render_config).await;
let mut responses = VecDeque::new();
// TODO: Only process monitor nodes if the graph has changed, not when only the Footprint changes
self.process_monitor_nodes(&mut responses, self.update_thumbnails);
self.update_thumbnails = false;
// Resolve the result from the inspection by accessing the monitor node
let inspect_result = self.inspect_state.and_then(|state| state.access(&self.executor));
self.sender.send_execution_response(ExecutionResponse {
execution_id,
result,
responses,
transform,
vector_modify: self.vector_modify.clone(),
inspect_result,
});
}
}
}
}
async fn update_network(&mut self, graph: NodeNetwork) -> Result<ResolvedDocumentNodeTypesDelta, String> {
let scoped_network = wrap_network_in_scope(graph, self.editor_api.clone());
// We assume only one output
assert_eq!(scoped_network.exports.len(), 1, "Graph with multiple outputs not yet handled");
let c = Compiler {};
let proto_network = match c.compile_single(scoped_network) {
Ok(network) => network,
Err(e) => return Err(e),
};
self.monitor_nodes = proto_network
.nodes
.iter()
.filter(|(_, node)| node.identifier == "graphene_core::memo::MonitorNode".into())
.map(|(_, node)| node.original_location.path.clone().unwrap_or_default())
.collect::<Vec<_>>();
assert_ne!(proto_network.nodes.len(), 0, "No proto nodes exist?");
self.executor.update(proto_network).await.map_err(|e| {
self.node_graph_errors.clone_from(&e);
format!("{e:?}")
})
}
async fn execute_network(&mut self, render_config: RenderConfig) -> Result<TaggedValue, String> {
use graph_craft::graphene_compiler::Executor;
let result = match self.executor.input_type() {
Some(t) if t == concrete!(RenderConfig) => (&self.executor).execute(render_config).await.map_err(|e| e.to_string()),
Some(t) if t == concrete!(()) => (&self.executor).execute(()).await.map_err(|e| e.to_string()),
Some(t) => Err(format!("Invalid input type {t:?}")),
_ => Err(format!("No input type:\n{:?}", self.node_graph_errors)),
};
let result = match result {
Ok(value) => value,
Err(e) => return Err(e),
};
Ok(result)
}
/// Updates state data
pub fn process_monitor_nodes(&mut self, responses: &mut VecDeque<FrontendMessage>, update_thumbnails: bool) {
// TODO: Consider optimizing this since it's currently O(m*n^2), with a sort it could be made O(m * n*log(n))
self.thumbnail_renders.retain(|id, _| self.monitor_nodes.iter().any(|monitor_node_path| monitor_node_path.contains(id)));
for monitor_node_path in &self.monitor_nodes {
// Skip the inspect monitor node
if self.inspect_state.is_some_and(|inspect_state| monitor_node_path.last().copied() == Some(inspect_state.monitor_node)) {
continue;
}
// The monitor nodes are located within a document node, and are thus children in that network, so this gets the parent document node's ID
let Some(parent_network_node_id) = monitor_node_path.len().checked_sub(2).and_then(|index| monitor_node_path.get(index)).copied() else {
warn!("Monitor node has invalid node id");
continue;
};
// Extract the monitor node's stored `GraphicElement` data.
let Ok(introspected_data) = self.executor.introspect(monitor_node_path) else {
// TODO: Fix the root of the issue causing the spam of this warning (this at least temporarily disables it in release builds)
#[cfg(debug_assertions)]
warn!("Failed to introspect monitor node {}", self.executor.introspect(monitor_node_path).unwrap_err());
continue;
};
if let Some(io) = introspected_data.downcast_ref::<IORecord<Context, graphene_core::GraphicElement>>() {
Self::process_graphic_element(&mut self.thumbnail_renders, parent_network_node_id, &io.output, responses, update_thumbnails)
} else if let Some(io) = introspected_data.downcast_ref::<IORecord<(), graphene_core::GraphicElement>>() {
Self::process_graphic_element(&mut self.thumbnail_renders, parent_network_node_id, &io.output, responses, update_thumbnails)
} else if let Some(io) = introspected_data.downcast_ref::<IORecord<Context, graphene_core::Artboard>>() {
Self::process_graphic_element(&mut self.thumbnail_renders, parent_network_node_id, &io.output, responses, update_thumbnails)
} else if let Some(io) = introspected_data.downcast_ref::<IORecord<(), graphene_core::Artboard>>() {
Self::process_graphic_element(&mut self.thumbnail_renders, parent_network_node_id, &io.output, responses, update_thumbnails)
}
// Insert the vector modify if we are dealing with vector data
else if let Some(record) = introspected_data.downcast_ref::<IORecord<Context, VectorDataTable>>() {
self.vector_modify.insert(parent_network_node_id, record.output.one_instance().instance.clone());
} else if let Some(record) = introspected_data.downcast_ref::<IORecord<(), VectorDataTable>>() {
self.vector_modify.insert(parent_network_node_id, record.output.one_instance().instance.clone());
}
}
}
// If this is `GraphicElement` data:
// Regenerate click targets and thumbnails for the layers in the graph, modifying the state and updating the UI.
fn process_graphic_element(
thumbnail_renders: &mut HashMap<NodeId, Vec<SvgSegment>>,
parent_network_node_id: NodeId,
graphic_element: &impl GraphicElementRendered,
responses: &mut VecDeque<FrontendMessage>,
update_thumbnails: bool,
) {
// RENDER THUMBNAIL
if !update_thumbnails {
return;
}
let bounds = graphic_element.bounding_box(DAffine2::IDENTITY);
// Render the thumbnail from a `GraphicElement` into an SVG string
let render_params = RenderParams::new(ViewMode::Normal, bounds, true, false, false);
let mut render = SvgRender::new();
graphic_element.render_svg(&mut render, &render_params);
// And give the SVG a viewbox and outer <svg>...</svg> wrapper tag
let [min, max] = bounds.unwrap_or_default();
render.format_svg(min, max);
// UPDATE FRONTEND THUMBNAIL
let new_thumbnail_svg = render.svg;
let old_thumbnail_svg = thumbnail_renders.entry(parent_network_node_id).or_default();
if old_thumbnail_svg != &new_thumbnail_svg {
responses.push_back(FrontendMessage::UpdateNodeThumbnail {
id: parent_network_node_id,
value: new_thumbnail_svg.to_svg_string(),
});
*old_thumbnail_svg = new_thumbnail_svg;
}
}
}
pub async fn introspect_node(path: &[NodeId]) -> Result<Arc<dyn std::any::Any + Send + Sync + 'static>, IntrospectError> {
let runtime = NODE_RUNTIME.lock();
if let Some(ref mut runtime) = runtime.as_ref() {
return runtime.executor.introspect(path);
}
Err(IntrospectError::RuntimeNotReady)
}
pub async fn run_node_graph() -> bool {
let Some(mut runtime) = NODE_RUNTIME.try_lock() else { return false };
if let Some(ref mut runtime) = runtime.as_mut() {
runtime.run().await;
}
true
}
pub async fn replace_node_runtime(runtime: NodeRuntime) -> Option<NodeRuntime> {
let mut node_runtime = NODE_RUNTIME.lock();
node_runtime.replace(runtime)
}
#[derive(Debug)]
pub struct NodeGraphExecutor {
sender: Sender<NodeRuntimeMessage>,
receiver: Receiver<NodeGraphUpdate>,
runtime_io: NodeRuntimeIO,
futures: HashMap<u64, ExecutionContext>,
node_graph_hash: u64,
old_inspect_node: Option<NodeId>,
}
/// Which node is inspected and which monitor node is used (if any) for the current execution
#[derive(Debug, Clone, Copy)]
struct InspectState {
inspect_node: NodeId,
monitor_node: NodeId,
}
/// The resulting value from the temporary inspected during execution
#[derive(Clone, Debug, Default)]
pub struct InspectResult {
pub introspected_data: Option<Arc<dyn std::any::Any + Send + Sync + 'static>>,
pub inspect_node: NodeId,
}
// This is very ugly but is required to be inside a message
impl PartialEq for InspectResult {
fn eq(&self, other: &Self) -> bool {
self.inspect_node == other.inspect_node
}
}
impl InspectState {
/// Insert the monitor node to manage the inspection
pub fn monitor_inspect_node(network: &mut NodeNetwork, inspect_node: NodeId) -> Self {
let monitor_id = NodeId::new();
// It is necessary to replace the inputs before inserting the monitor node to avoid changing the input of the new monitor node
for input in network.nodes.values_mut().flat_map(|node| node.inputs.iter_mut()).chain(&mut network.exports) {
let NodeInput::Node { node_id, output_index, .. } = input else { continue };
// We only care about the primary output of our inspect node
if *output_index != 0 || *node_id != inspect_node {
continue;
}
*node_id = monitor_id;
}
let monitor_node = DocumentNode {
inputs: vec![NodeInput::node(inspect_node, 0)], // Connect to the primary output of the inspect node
implementation: DocumentNodeImplementation::proto("graphene_core::memo::MonitorNode"),
manual_composition: Some(graph_craft::generic!(T)),
skip_deduplication: true,
..Default::default()
};
network.nodes.insert(monitor_id, monitor_node);
Self {
inspect_node,
monitor_node: monitor_id,
}
}
/// Resolve the result from the inspection by accessing the monitor node
fn access(&self, executor: &DynamicExecutor) -> Option<InspectResult> {
let introspected_data = executor.introspect(&[self.monitor_node]).inspect_err(|e| warn!("Failed to introspect monitor node {e}")).ok();
Some(InspectResult {
inspect_node: self.inspect_node,
introspected_data,
})
}
}
#[derive(Debug, Clone)]
struct ExecutionContext {
export_config: Option<ExportConfig>,
@ -472,14 +68,9 @@ struct ExecutionContext {
impl Default for NodeGraphExecutor {
fn default() -> Self {
let (request_sender, request_receiver) = std::sync::mpsc::channel();
let (response_sender, response_receiver) = std::sync::mpsc::channel();
futures::executor::block_on(replace_node_runtime(NodeRuntime::new(request_receiver, response_sender)));
Self {
futures: Default::default(),
sender: request_sender,
receiver: response_receiver,
runtime_io: NodeRuntimeIO::new(),
node_graph_hash: 0,
old_inspect_node: None,
}
@ -496,57 +87,31 @@ impl NodeGraphExecutor {
let node_executor = Self {
futures: Default::default(),
sender: request_sender,
receiver: response_receiver,
runtime_io: NodeRuntimeIO::with_channels(request_sender, response_receiver),
node_graph_hash: 0,
old_inspect_node: None,
};
(node_runtime, node_executor)
}
/// Execute the network by flattening it and creating a borrow stack.
fn queue_execution(&self, render_config: RenderConfig) -> u64 {
let execution_id = generate_uuid();
let request = ExecutionRequest { execution_id, render_config };
self.sender.send(NodeRuntimeMessage::ExecutionRequest(request)).expect("Failed to send generation request");
self.runtime_io.send(GraphRuntimeRequest::ExecutionRequest(request)).expect("Failed to send generation request");
execution_id
}
pub async fn introspect_node(&self, path: &[NodeId]) -> Result<Arc<dyn std::any::Any + Send + Sync + 'static>, IntrospectError> {
introspect_node(path).await
}
pub fn update_font_cache(&self, font_cache: FontCache) {
self.sender.send(NodeRuntimeMessage::FontCacheUpdate(font_cache)).expect("Failed to send font cache update");
self.runtime_io.send(GraphRuntimeRequest::FontCacheUpdate(font_cache)).expect("Failed to send font cache update");
}
pub fn update_editor_preferences(&self, editor_preferences: EditorPreferences) {
self.sender
.send(NodeRuntimeMessage::EditorPreferencesUpdate(editor_preferences))
self.runtime_io
.send(GraphRuntimeRequest::EditorPreferencesUpdate(editor_preferences))
.expect("Failed to send editor preferences");
}
pub fn introspect_node_in_network<T: std::any::Any + core::fmt::Debug, U, F1: FnOnce(&NodeNetwork) -> Option<NodeId>, F2: FnOnce(&T) -> U>(
&mut self,
network: &NodeNetwork,
node_path: &[NodeId],
find_node: F1,
extract_data: F2,
) -> Option<U> {
let wrapping_document_node = network.nodes.get(node_path.last()?)?;
let DocumentNodeImplementation::Network(wrapped_network) = &wrapping_document_node.implementation else {
return None;
};
let introspection_node = find_node(wrapped_network)?;
let introspection = futures::executor::block_on(self.introspect_node(&[node_path, &[introspection_node]].concat())).ok()?;
let Some(downcasted): Option<&T> = <dyn std::any::Any>::downcast_ref(introspection.as_ref()) else {
log::warn!("Failed to downcast type for introspection");
return None;
};
Some(extract_data(downcasted))
}
/// Updates the network to monitor all inputs. Useful for the testing.
#[cfg(test)]
pub(crate) fn update_node_graph_instrumented(&mut self, document: &mut DocumentMessageHandler) -> Result<Instrumented, String> {
@ -555,8 +120,8 @@ impl NodeGraphExecutor {
let mut network = document.network_interface.document_network().clone();
let instrumented = Instrumented::new(&mut network);
self.sender
.send(NodeRuntimeMessage::GraphUpdate(GraphUpdate { network, inspect_node: None }))
self.runtime_io
.send(GraphRuntimeRequest::GraphUpdate(GraphUpdate { network, inspect_node: None }))
.map_err(|e| e.to_string())?;
Ok(instrumented)
}
@ -570,7 +135,9 @@ impl NodeGraphExecutor {
self.old_inspect_node = inspect_node;
self.node_graph_hash = network_hash;
self.sender.send(NodeRuntimeMessage::GraphUpdate(GraphUpdate { network, inspect_node })).map_err(|e| e.to_string())?;
self.runtime_io
.send(GraphRuntimeRequest::GraphUpdate(GraphUpdate { network, inspect_node }))
.map_err(|e| e.to_string())?;
}
Ok(())
}
@ -597,6 +164,7 @@ impl NodeGraphExecutor {
let execution_id = self.queue_execution(render_config);
self.futures.insert(execution_id, ExecutionContext { export_config: None });
Ok(())
}
@ -644,8 +212,8 @@ impl NodeGraphExecutor {
export_config.size = size;
// Execute the node graph
self.sender
.send(NodeRuntimeMessage::GraphUpdate(GraphUpdate { network, inspect_node: None }))
self.runtime_io
.send(GraphRuntimeRequest::GraphUpdate(GraphUpdate { network, inspect_node: None }))
.map_err(|e| e.to_string())?;
let execution_id = self.queue_execution(render_config);
let execution_context = ExecutionContext { export_config: Some(export_config) };
@ -688,7 +256,7 @@ impl NodeGraphExecutor {
}
pub fn poll_node_graph_evaluation(&mut self, document: &mut DocumentMessageHandler, responses: &mut VecDeque<Message>) -> Result<(), String> {
let results = self.receiver.try_iter().collect::<Vec<_>>();
let results = self.runtime_io.receive().collect::<Vec<_>>();
for response in results {
match response {
NodeGraphUpdate::ExecutionResponse(execution_response) => {
@ -829,6 +397,23 @@ impl NodeGraphExecutor {
}
}
// Re-export for usage by tests in other modules
#[cfg(test)]
pub use test::Instrumented;
#[cfg(test)]
mod test {
use std::sync::Arc;
use super::*;
use crate::messages::portfolio::document::utility_types::network_interface::NodeNetworkInterface;
use crate::test_utils::test_prelude::{self, NodeGraphLayer};
use graph_craft::document::NodeNetwork;
use graphene_std::Context;
use graphene_std::NodeInputDecleration;
use graphene_std::memo::IORecord;
use test_prelude::LayerNodeIdentifier;
/// Stores all of the monitor nodes that have been attached to a graph
#[derive(Default)]
pub struct Instrumented {
@ -883,7 +468,7 @@ impl Instrumented {
instrumented
}
fn downcast<Input: graphene_std::NodeInputDecleration>(dynamic: Arc<dyn std::any::Any + Send + Sync>) -> Option<Input::Result>
fn downcast<Input: NodeInputDecleration>(dynamic: Arc<dyn std::any::Any + Send + Sync>) -> Option<Input::Result>
where
Input::Result: Send + Sync + Clone + 'static,
{
@ -900,7 +485,7 @@ impl Instrumented {
}
/// Grab all of the values of the input every time it occurs in the graph.
pub fn grab_all_input<'a, Input: graphene_std::NodeInputDecleration + 'a>(&'a self, runtime: &'a NodeRuntime) -> impl Iterator<Item = Input::Result> + 'a
pub fn grab_all_input<'a, Input: NodeInputDecleration + 'a>(&'a self, runtime: &'a NodeRuntime) -> impl Iterator<Item = Input::Result> + 'a
where
Input::Result: Send + Sync + Clone + 'static,
{
@ -913,7 +498,7 @@ impl Instrumented {
.filter_map(Instrumented::downcast::<Input>)
}
pub fn grab_protonode_input<Input: graphene_std::NodeInputDecleration>(&self, path: &Vec<NodeId>, runtime: &NodeRuntime) -> Option<Input::Result>
pub fn grab_protonode_input<Input: NodeInputDecleration>(&self, path: &Vec<NodeId>, runtime: &NodeRuntime) -> Option<Input::Result>
where
Input::Result: Send + Sync + Clone + 'static,
{
@ -924,7 +509,7 @@ impl Instrumented {
Self::downcast::<Input>(dynamic)
}
pub fn grab_input_from_layer<Input: graphene_std::NodeInputDecleration>(&self, layer: LayerNodeIdentifier, network_interface: &NodeNetworkInterface, runtime: &NodeRuntime) -> Option<Input::Result>
pub fn grab_input_from_layer<Input: NodeInputDecleration>(&self, layer: LayerNodeIdentifier, network_interface: &NodeNetworkInterface, runtime: &NodeRuntime) -> Option<Input::Result>
where
Input::Result: Send + Sync + Clone + 'static,
{
@ -933,3 +518,4 @@ impl Instrumented {
self.grab_protonode_input::<Input>(&vec![node], runtime)
}
}
}

View file

@ -0,0 +1,441 @@
use super::*;
use crate::messages::frontend::utility_types::{ExportBounds, FileType};
use glam::{DAffine2, DVec2};
use graph_craft::concrete;
use graph_craft::document::value::TaggedValue;
use graph_craft::document::{NodeId, NodeNetwork};
use graph_craft::graphene_compiler::Compiler;
use graph_craft::proto::GraphErrors;
use graph_craft::wasm_application_io::EditorPreferences;
use graphene_core::application_io::{NodeGraphUpdateMessage, NodeGraphUpdateSender, RenderConfig};
use graphene_core::memo::IORecord;
use graphene_core::renderer::{GraphicElementRendered, RenderParams, SvgRender};
use graphene_core::renderer::{RenderSvgSegmentList, SvgSegment};
use graphene_core::text::FontCache;
use graphene_core::vector::style::ViewMode;
use graphene_std::Context;
use graphene_std::vector::{VectorData, VectorDataTable};
use graphene_std::wasm_application_io::{WasmApplicationIo, WasmEditorApi};
use interpreted_executor::dynamic_executor::{DynamicExecutor, IntrospectError, ResolvedDocumentNodeTypesDelta};
use interpreted_executor::util::wrap_network_in_scope;
use once_cell::sync::Lazy;
use spin::Mutex;
use std::sync::Arc;
use std::sync::mpsc::{Receiver, Sender};
/// Persistent data between graph executions. It's updated via message passing from the editor thread with [`GraphRuntimeRequest`]`.
/// Some of these fields are put into a [`WasmEditorApi`] which is passed to the final compiled graph network upon each execution.
/// Once the implementation is finished, this will live in a separate thread. Right now it's part of the main JS thread, but its own separate JS stack frame independent from the editor.
pub struct NodeRuntime {
#[cfg(test)]
pub(super) executor: DynamicExecutor,
#[cfg(not(test))]
executor: DynamicExecutor,
receiver: Receiver<GraphRuntimeRequest>,
sender: InternalNodeGraphUpdateSender,
editor_preferences: EditorPreferences,
old_graph: Option<NodeNetwork>,
update_thumbnails: bool,
editor_api: Arc<WasmEditorApi>,
node_graph_errors: GraphErrors,
monitor_nodes: Vec<Vec<NodeId>>,
/// Which node is inspected and which monitor node is used (if any) for the current execution
inspect_state: Option<InspectState>,
// TODO: Remove, it doesn't need to be persisted anymore
/// The current renders of the thumbnails for layer nodes.
thumbnail_renders: HashMap<NodeId, Vec<SvgSegment>>,
vector_modify: HashMap<NodeId, VectorData>,
}
/// Messages passed from the editor thread to the node runtime thread.
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub enum GraphRuntimeRequest {
GraphUpdate(GraphUpdate),
ExecutionRequest(ExecutionRequest),
FontCacheUpdate(FontCache),
EditorPreferencesUpdate(EditorPreferences),
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct GraphUpdate {
pub(super) network: NodeNetwork,
/// The node that should be temporary inspected during execution
pub(super) inspect_node: Option<NodeId>,
}
#[derive(Default, Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct ExportConfig {
pub file_name: String,
pub file_type: FileType,
pub scale_factor: f64,
pub bounds: ExportBounds,
pub transparent_background: bool,
pub size: DVec2,
}
#[derive(Clone)]
struct InternalNodeGraphUpdateSender(Sender<NodeGraphUpdate>);
impl InternalNodeGraphUpdateSender {
fn send_generation_response(&self, response: CompilationResponse) {
self.0.send(NodeGraphUpdate::CompilationResponse(response)).expect("Failed to send response")
}
fn send_execution_response(&self, response: ExecutionResponse) {
self.0.send(NodeGraphUpdate::ExecutionResponse(response)).expect("Failed to send response")
}
}
impl NodeGraphUpdateSender for InternalNodeGraphUpdateSender {
fn send(&self, message: NodeGraphUpdateMessage) {
self.0.send(NodeGraphUpdate::NodeGraphUpdateMessage(message)).expect("Failed to send response")
}
}
pub static NODE_RUNTIME: Lazy<Mutex<Option<NodeRuntime>>> = Lazy::new(|| Mutex::new(None));
impl NodeRuntime {
pub fn new(receiver: Receiver<GraphRuntimeRequest>, sender: Sender<NodeGraphUpdate>) -> Self {
Self {
executor: DynamicExecutor::default(),
receiver,
sender: InternalNodeGraphUpdateSender(sender.clone()),
editor_preferences: EditorPreferences::default(),
old_graph: None,
update_thumbnails: true,
editor_api: WasmEditorApi {
font_cache: FontCache::default(),
editor_preferences: Box::new(EditorPreferences::default()),
node_graph_message_sender: Box::new(InternalNodeGraphUpdateSender(sender)),
application_io: None,
}
.into(),
node_graph_errors: Vec::new(),
monitor_nodes: Vec::new(),
thumbnail_renders: Default::default(),
vector_modify: Default::default(),
inspect_state: None,
}
}
pub async fn run(&mut self) {
if self.editor_api.application_io.is_none() {
self.editor_api = WasmEditorApi {
#[cfg(not(test))]
application_io: Some(WasmApplicationIo::new().await.into()),
#[cfg(test)]
application_io: Some(WasmApplicationIo::new_offscreen().await.into()),
font_cache: self.editor_api.font_cache.clone(),
node_graph_message_sender: Box::new(self.sender.clone()),
editor_preferences: Box::new(self.editor_preferences.clone()),
}
.into();
}
let mut font = None;
let mut preferences = None;
let mut graph = None;
let mut execution = None;
for request in self.receiver.try_iter() {
match request {
GraphRuntimeRequest::GraphUpdate(_) => graph = Some(request),
GraphRuntimeRequest::ExecutionRequest(_) => execution = Some(request),
GraphRuntimeRequest::FontCacheUpdate(_) => font = Some(request),
GraphRuntimeRequest::EditorPreferencesUpdate(_) => preferences = Some(request),
}
}
let requests = [font, preferences, graph, execution].into_iter().flatten();
for request in requests {
match request {
GraphRuntimeRequest::FontCacheUpdate(font_cache) => {
self.editor_api = WasmEditorApi {
font_cache,
application_io: self.editor_api.application_io.clone(),
node_graph_message_sender: Box::new(self.sender.clone()),
editor_preferences: Box::new(self.editor_preferences.clone()),
}
.into();
if let Some(graph) = self.old_graph.clone() {
// We ignore this result as compilation errors should have been reported in an earlier iteration
let _ = self.update_network(graph).await;
}
}
GraphRuntimeRequest::EditorPreferencesUpdate(preferences) => {
self.editor_preferences = preferences.clone();
self.editor_api = WasmEditorApi {
font_cache: self.editor_api.font_cache.clone(),
application_io: self.editor_api.application_io.clone(),
node_graph_message_sender: Box::new(self.sender.clone()),
editor_preferences: Box::new(preferences),
}
.into();
if let Some(graph) = self.old_graph.clone() {
// We ignore this result as compilation errors should have been reported in an earlier iteration
let _ = self.update_network(graph).await;
}
}
GraphRuntimeRequest::GraphUpdate(GraphUpdate { mut network, inspect_node }) => {
// Insert the monitor node to manage the inspection
self.inspect_state = inspect_node.map(|inspect| InspectState::monitor_inspect_node(&mut network, inspect));
self.old_graph = Some(network.clone());
self.node_graph_errors.clear();
let result = self.update_network(network).await;
self.update_thumbnails = true;
self.sender.send_generation_response(CompilationResponse {
result,
node_graph_errors: self.node_graph_errors.clone(),
});
}
GraphRuntimeRequest::ExecutionRequest(ExecutionRequest { execution_id, render_config, .. }) => {
let transform = render_config.viewport.transform;
let result = self.execute_network(render_config).await;
let mut responses = VecDeque::new();
// TODO: Only process monitor nodes if the graph has changed, not when only the Footprint changes
self.process_monitor_nodes(&mut responses, self.update_thumbnails);
self.update_thumbnails = false;
// Resolve the result from the inspection by accessing the monitor node
let inspect_result = self.inspect_state.and_then(|state| state.access(&self.executor));
self.sender.send_execution_response(ExecutionResponse {
execution_id,
result,
responses,
transform,
vector_modify: self.vector_modify.clone(),
inspect_result,
});
}
}
}
}
async fn update_network(&mut self, graph: NodeNetwork) -> Result<ResolvedDocumentNodeTypesDelta, String> {
let scoped_network = wrap_network_in_scope(graph, self.editor_api.clone());
// We assume only one output
assert_eq!(scoped_network.exports.len(), 1, "Graph with multiple outputs not yet handled");
let c = Compiler {};
let proto_network = match c.compile_single(scoped_network) {
Ok(network) => network,
Err(e) => return Err(e),
};
self.monitor_nodes = proto_network
.nodes
.iter()
.filter(|(_, node)| node.identifier == "graphene_core::memo::MonitorNode".into())
.map(|(_, node)| node.original_location.path.clone().unwrap_or_default())
.collect::<Vec<_>>();
assert_ne!(proto_network.nodes.len(), 0, "No proto nodes exist?");
self.executor.update(proto_network).await.map_err(|e| {
self.node_graph_errors.clone_from(&e);
format!("{e:?}")
})
}
async fn execute_network(&mut self, render_config: RenderConfig) -> Result<TaggedValue, String> {
use graph_craft::graphene_compiler::Executor;
let result = match self.executor.input_type() {
Some(t) if t == concrete!(RenderConfig) => (&self.executor).execute(render_config).await.map_err(|e| e.to_string()),
Some(t) if t == concrete!(()) => (&self.executor).execute(()).await.map_err(|e| e.to_string()),
Some(t) => Err(format!("Invalid input type {t:?}")),
_ => Err(format!("No input type:\n{:?}", self.node_graph_errors)),
};
let result = match result {
Ok(value) => value,
Err(e) => return Err(e),
};
Ok(result)
}
/// Updates state data
pub fn process_monitor_nodes(&mut self, responses: &mut VecDeque<FrontendMessage>, update_thumbnails: bool) {
// TODO: Consider optimizing this since it's currently O(m*n^2), with a sort it could be made O(m * n*log(n))
self.thumbnail_renders.retain(|id, _| self.monitor_nodes.iter().any(|monitor_node_path| monitor_node_path.contains(id)));
for monitor_node_path in &self.monitor_nodes {
// Skip the inspect monitor node
if self.inspect_state.is_some_and(|inspect_state| monitor_node_path.last().copied() == Some(inspect_state.monitor_node)) {
continue;
}
// The monitor nodes are located within a document node, and are thus children in that network, so this gets the parent document node's ID
let Some(parent_network_node_id) = monitor_node_path.len().checked_sub(2).and_then(|index| monitor_node_path.get(index)).copied() else {
warn!("Monitor node has invalid node id");
continue;
};
// Extract the monitor node's stored `GraphicElement` data.
let Ok(introspected_data) = self.executor.introspect(monitor_node_path) else {
// TODO: Fix the root of the issue causing the spam of this warning (this at least temporarily disables it in release builds)
#[cfg(debug_assertions)]
warn!("Failed to introspect monitor node {}", self.executor.introspect(monitor_node_path).unwrap_err());
continue;
};
if let Some(io) = introspected_data.downcast_ref::<IORecord<Context, graphene_core::GraphicElement>>() {
Self::process_graphic_element(&mut self.thumbnail_renders, parent_network_node_id, &io.output, responses, update_thumbnails)
} else if let Some(io) = introspected_data.downcast_ref::<IORecord<Context, graphene_core::Artboard>>() {
Self::process_graphic_element(&mut self.thumbnail_renders, parent_network_node_id, &io.output, responses, update_thumbnails)
// Insert the vector modify if we are dealing with vector data
} else if let Some(record) = introspected_data.downcast_ref::<IORecord<Context, VectorDataTable>>() {
self.vector_modify.insert(parent_network_node_id, record.output.one_instance().instance.clone());
} else {
log::warn!("failed to downcast monitor node output");
}
}
}
// If this is `GraphicElement` data:
// Regenerate click targets and thumbnails for the layers in the graph, modifying the state and updating the UI.
fn process_graphic_element(
thumbnail_renders: &mut HashMap<NodeId, Vec<SvgSegment>>,
parent_network_node_id: NodeId,
graphic_element: &impl GraphicElementRendered,
responses: &mut VecDeque<FrontendMessage>,
update_thumbnails: bool,
) {
// RENDER THUMBNAIL
if !update_thumbnails {
return;
}
let bounds = graphic_element.bounding_box(DAffine2::IDENTITY);
// Render the thumbnail from a `GraphicElement` into an SVG string
let render_params = RenderParams::new(ViewMode::Normal, bounds, true, false, false);
let mut render = SvgRender::new();
graphic_element.render_svg(&mut render, &render_params);
// And give the SVG a viewbox and outer <svg>...</svg> wrapper tag
let [min, max] = bounds.unwrap_or_default();
render.format_svg(min, max);
// UPDATE FRONTEND THUMBNAIL
let new_thumbnail_svg = render.svg;
let old_thumbnail_svg = thumbnail_renders.entry(parent_network_node_id).or_default();
if old_thumbnail_svg != &new_thumbnail_svg {
responses.push_back(FrontendMessage::UpdateNodeThumbnail {
id: parent_network_node_id,
value: new_thumbnail_svg.to_svg_string(),
});
*old_thumbnail_svg = new_thumbnail_svg;
}
}
}
pub async fn introspect_node(path: &[NodeId]) -> Result<Arc<dyn std::any::Any + Send + Sync + 'static>, IntrospectError> {
let runtime = NODE_RUNTIME.lock();
if let Some(ref mut runtime) = runtime.as_ref() {
return runtime.executor.introspect(path);
}
Err(IntrospectError::RuntimeNotReady)
}
pub async fn run_node_graph() -> bool {
let Some(mut runtime) = NODE_RUNTIME.try_lock() else { return false };
if let Some(ref mut runtime) = runtime.as_mut() {
runtime.run().await;
}
true
}
pub async fn replace_node_runtime(runtime: NodeRuntime) -> Option<NodeRuntime> {
let mut node_runtime = NODE_RUNTIME.lock();
node_runtime.replace(runtime)
}
/// Which node is inspected and which monitor node is used (if any) for the current execution
#[derive(Debug, Clone, Copy)]
struct InspectState {
inspect_node: NodeId,
monitor_node: NodeId,
}
/// The resulting value from the temporary inspected during execution
#[derive(Clone, Debug, Default)]
#[cfg_attr(feature = "decouple-execution", derive(serde::Serialize, serde::Deserialize))]
pub struct InspectResult {
#[cfg(not(feature = "decouple-execution"))]
introspected_data: Option<Arc<dyn std::any::Any + Send + Sync + 'static>>,
#[cfg(feature = "decouple-execution")]
introspected_data: Option<TaggedValue>,
pub inspect_node: NodeId,
}
impl InspectResult {
pub fn take_data(&mut self) -> Option<Arc<dyn std::any::Any + Send + Sync + 'static>> {
#[cfg(not(feature = "decouple-execution"))]
return self.introspected_data.clone();
#[cfg(feature = "decouple-execution")]
return self.introspected_data.take().map(|value| value.to_any());
}
}
// This is very ugly but is required to be inside a message
impl PartialEq for InspectResult {
fn eq(&self, other: &Self) -> bool {
self.inspect_node == other.inspect_node
}
}
impl InspectState {
/// Insert the monitor node to manage the inspection
pub fn monitor_inspect_node(network: &mut NodeNetwork, inspect_node: NodeId) -> Self {
let monitor_id = NodeId::new();
// It is necessary to replace the inputs before inserting the monitor node to avoid changing the input of the new monitor node
for input in network.nodes.values_mut().flat_map(|node| node.inputs.iter_mut()).chain(&mut network.exports) {
let NodeInput::Node { node_id, output_index, .. } = input else { continue };
// We only care about the primary output of our inspect node
if *output_index != 0 || *node_id != inspect_node {
continue;
}
*node_id = monitor_id;
}
let monitor_node = DocumentNode {
inputs: vec![NodeInput::node(inspect_node, 0)], // Connect to the primary output of the inspect node
implementation: DocumentNodeImplementation::proto("graphene_core::memo::MonitorNode"),
manual_composition: Some(graph_craft::generic!(T)),
skip_deduplication: true,
..Default::default()
};
network.nodes.insert(monitor_id, monitor_node);
Self {
inspect_node,
monitor_node: monitor_id,
}
}
/// Resolve the result from the inspection by accessing the monitor node
fn access(&self, executor: &DynamicExecutor) -> Option<InspectResult> {
let introspected_data = executor.introspect(&[self.monitor_node]).inspect_err(|e| warn!("Failed to introspect monitor node {e}")).ok();
// TODO: Consider displaying the error instead of ignoring it
#[cfg(feature = "decouple-execution")]
let introspected_data = introspected_data.as_ref().and_then(|data| TaggedValue::try_from_std_any_ref(data).ok());
Some(InspectResult {
inspect_node: self.inspect_node,
introspected_data,
})
}
}

View file

@ -0,0 +1,102 @@
use super::*;
use std::sync::mpsc::{Receiver, Sender};
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
// Invoke with arguments (default)
#[wasm_bindgen(js_namespace = ["window", "__TAURI__", "core"])]
async fn invoke(cmd: &str, args: JsValue) -> JsValue;
#[wasm_bindgen(js_namespace = ["window", "__TAURI__", "core"], js_name="invoke")]
async fn invoke_without_arg(cmd: &str) -> JsValue;
}
/// Handles communication with the NodeRuntime, either locally or via Tauri
#[derive(Debug)]
pub struct NodeRuntimeIO {
// Send to
#[cfg(any(not(feature = "tauri"), test))]
sender: Sender<GraphRuntimeRequest>,
#[cfg(all(feature = "tauri", not(test)))]
sender: Sender<NodeGraphUpdate>,
receiver: Receiver<NodeGraphUpdate>,
}
impl Default for NodeRuntimeIO {
fn default() -> Self {
Self::new()
}
}
impl NodeRuntimeIO {
/// Creates a new NodeRuntimeIO instance
pub fn new() -> Self {
#[cfg(any(not(feature = "tauri"), test))]
{
let (response_sender, response_receiver) = std::sync::mpsc::channel();
let (request_sender, request_receiver) = std::sync::mpsc::channel();
futures::executor::block_on(replace_node_runtime(NodeRuntime::new(request_receiver, response_sender)));
Self {
sender: request_sender,
receiver: response_receiver,
}
}
#[cfg(all(feature = "tauri", not(test)))]
{
let (response_sender, response_receiver) = std::sync::mpsc::channel();
Self {
sender: response_sender,
receiver: response_receiver,
}
}
}
#[cfg(test)]
pub fn with_channels(sender: Sender<GraphRuntimeRequest>, receiver: Receiver<NodeGraphUpdate>) -> Self {
Self { sender, receiver }
}
/// Sends a message to the NodeRuntime
pub fn send(&self, message: GraphRuntimeRequest) -> Result<(), String> {
#[cfg(any(not(feature = "tauri"), test))]
{
self.sender.send(message).map_err(|e| e.to_string())
}
#[cfg(all(feature = "tauri", not(test)))]
{
let serialized = ron::to_string(&message).map_err(|e| e.to_string()).unwrap();
wasm_bindgen_futures::spawn_local(async move {
let js_message = create_message_object(&serialized);
invoke("runtime_message", js_message).await;
});
Ok(())
}
}
/// Receives any pending updates from the NodeRuntime
pub fn receive(&self) -> impl Iterator<Item = NodeGraphUpdate> + use<'_> {
// TODO: This introduces extra latency
#[cfg(all(feature = "tauri", not(test)))]
{
let sender = self.sender.clone();
// In the Tauri case, responses are handled separately via poll_node_runtime_updates
wasm_bindgen_futures::spawn_local(async move {
let messages = invoke_without_arg("poll_node_graph").await;
let vec: Vec<_> = ron::from_str(&messages.as_string().unwrap()).unwrap();
for message in vec {
sender.send(message).unwrap();
}
});
}
self.receiver.try_iter()
}
}
#[cfg(all(feature = "tauri", not(test)))]
pub fn create_message_object(message: &str) -> JsValue {
let obj = js_sys::Object::new();
js_sys::Reflect::set(&obj, &JsValue::from_str("message"), &JsValue::from_str(message)).unwrap();
obj.into()
}

View file

@ -7,7 +7,7 @@
"name": "graphite-web-frontend",
"license": "Apache-2.0",
"dependencies": {
"@tauri-apps/api": "^1.6.0",
"@tauri-apps/api": "^2.2.0",
"class-transformer": "^0.5.1",
"idb-keyval": "^6.2.1",
"reflect-metadata": "^0.2.2"
@ -1051,15 +1051,10 @@
}
},
"node_modules/@tauri-apps/api": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@tauri-apps/api/-/api-1.6.0.tgz",
"integrity": "sha512-rqI++FWClU5I2UBp4HXFvl+sBWkdigBkxnpJDQUWttNyG7IZP4FwQGhTNL5EOw0vI8i6eSAJ5frLqO7n7jbJdg==",
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@tauri-apps/api/-/api-2.2.0.tgz",
"integrity": "sha512-R8epOeZl1eJEl603aUMIGb4RXlhPjpgxbGVEaqY+0G5JG9vzV/clNlzTeqc+NLYXVqXcn8mb4c5b9pJIUDEyAg==",
"license": "Apache-2.0 OR MIT",
"engines": {
"node": ">= 14.6.0",
"npm": ">= 6.6.0",
"yarn": ">= 1.19.1"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/tauri"

View file

@ -19,6 +19,8 @@
"lint-fix": "eslint . --fix && tsc --noEmit",
"---------- INTERNAL ----------": "",
"setup": "node package-installer.js",
"tauri:dev": "vite",
"tauri:build": "wasm-pack build ./wasm --target=web --features=tauri",
"wasm:build-dev": "wasm-pack build ./wasm --dev --target=web",
"wasm:build-profiling": "wasm-pack build ./wasm --profiling --target=web",
"wasm:build-production": "wasm-pack build ./wasm --release --target=web",
@ -27,7 +29,6 @@
"wasm:watch-production": "cargo watch --postpone --watch-when-idle --workdir=wasm --shell \"wasm-pack build . --release --target=web -- --color=always\""
},
"dependencies": {
"@tauri-apps/api": "^1.6.0",
"class-transformer": "^0.5.1",
"idb-keyval": "^6.2.1",
"reflect-metadata": "^0.2.2"

View file

@ -10,37 +10,38 @@ edition = "2021"
rust-version = "1.79"
[features]
# by default Tauri runs in production mode
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is an URL
default = ["custom-protocol"]
# this feature is used for production builds where `devPath` points to the filesystem
# By default Tauri runs in production mode when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is an URL
default = ["custom-protocol", "gpu"]
# This feature is used for production builds where `devPath` points to the filesystem
# DO NOT remove this
custom-protocol = ["tauri/custom-protocol"]
gpu = ["graphite-editor/gpu"]
[dependencies]
# Local dependencies
graphite-editor = { path = "../../editor" }
graphite-editor = { path = "../../editor", features = [
"gpu",
"ron",
"vello",
"decouple-execution",
] }
# Workspace dependencies
serde_json = { workspace = true }
serde = { workspace = true }
axum = { workspace = true }
chrono = { workspace = true }
tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }
tokio = { workspace = true, features = ["macros", "rt"] }
ron = { workspace = true }
log = { workspace = true }
fern = { workspace = true }
futures = { workspace = true }
# Required dependencies
tauri = { version = "1.5", features = [
"api-all",
"devtools",
"linux-protocol-headers",
"wry",
] }
tauri = { version = "2", features = ["devtools", "wry"] }
tauri-plugin-shell = "2"
tauri-plugin-http = "2"
[build-dependencies]
# Required dependencies
tauri-build = { version = "1.2", features = [] }
tauri-build = { version = "2", features = [] }

View file

@ -0,0 +1,6 @@
{
"identifier": "desktop-capability",
"platforms": ["macOS", "windows", "linux"],
"windows": ["main"],
"permissions": ["http:default"]
}

View file

@ -0,0 +1,49 @@
{
"identifier": "migrated",
"description": "permissions that were migrated from v1",
"local": true,
"windows": ["main"],
"permissions": [
"core:default",
"core:window:allow-create",
"core:window:allow-center",
"core:window:allow-request-user-attention",
"core:window:allow-set-resizable",
"core:window:allow-set-maximizable",
"core:window:allow-set-minimizable",
"core:window:allow-set-closable",
"core:window:allow-set-title",
"core:window:allow-maximize",
"core:window:allow-unmaximize",
"core:window:allow-minimize",
"core:window:allow-unminimize",
"core:window:allow-show",
"core:window:allow-hide",
"core:window:allow-close",
"core:window:allow-set-decorations",
"core:window:allow-set-always-on-top",
"core:window:allow-set-content-protected",
"core:window:allow-set-size",
"core:window:allow-set-min-size",
"core:window:allow-set-max-size",
"core:window:allow-set-position",
"core:window:allow-set-fullscreen",
"core:window:allow-set-focus",
"core:window:allow-set-icon",
"core:window:allow-set-skip-taskbar",
"core:window:allow-set-cursor-grab",
"core:window:allow-set-cursor-visible",
"core:window:allow-set-cursor-icon",
"core:window:allow-set-cursor-position",
"core:window:allow-set-ignore-cursor-events",
"core:window:allow-start-dragging",
"core:webview:allow-print",
"shell:allow-execute",
"shell:allow-open",
"http:default",
"core:app:allow-app-show",
"core:app:allow-app-hide",
"shell:default",
"http:default"
]
}

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
{"desktop-capability":{"identifier":"desktop-capability","description":"","local":true,"windows":["main"],"permissions":["http:default"],"platforms":["macOS","windows","linux"]},"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main"],"permissions":["core:default","core:window:allow-create","core:window:allow-center","core:window:allow-request-user-attention","core:window:allow-set-resizable","core:window:allow-set-maximizable","core:window:allow-set-minimizable","core:window:allow-set-closable","core:window:allow-set-title","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-minimize","core:window:allow-unminimize","core:window:allow-show","core:window:allow-hide","core:window:allow-close","core:window:allow-set-decorations","core:window:allow-set-always-on-top","core:window:allow-set-content-protected","core:window:allow-set-size","core:window:allow-set-min-size","core:window:allow-set-max-size","core:window:allow-set-position","core:window:allow-set-fullscreen","core:window:allow-set-focus","core:window:allow-set-icon","core:window:allow-set-skip-taskbar","core:window:allow-set-cursor-grab","core:window:allow-set-cursor-visible","core:window:allow-set-cursor-icon","core:window:allow-set-cursor-position","core:window:allow-set-ignore-cursor-events","core:window:allow-start-dragging","core:webview:allow-print","shell:allow-execute","shell:allow-open","http:default","core:app:allow-app-show","core:app:allow-app-hide","shell:default","http:default"]}}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,37 +1,16 @@
#![cfg_attr(all(not(debug_assertions), target_os = "windows"), windows_subsystem = "windows")]
use graphite_editor::application::Editor;
use graphite_editor::messages::prelude::*;
// use axum::body::StreamBody;
// use axum::extract::Path;
// use axum::http;
// use axum::response::IntoResponse;
use axum::routing::get;
use axum::Router;
use fern::colors::{Color, ColoredLevelConfig};
// use http::{Response, StatusCode};
use std::cell::RefCell;
// use std::collections::HashMap;
// use std::sync::Arc;
// use std::sync::Mutex;
use graphite_editor::application::Editor;
use graphite_editor::messages::prelude::*;
use graphite_editor::node_graph_executor::GraphRuntimeRequest;
use graphite_editor::node_graph_executor::NODE_RUNTIME;
use graphite_editor::node_graph_executor::*;
use std::sync::Mutex;
thread_local! {
static EDITOR: RefCell<Option<Editor>> = const { RefCell::new(None) };
}
// async fn respond_to(id: Path<String>) -> impl IntoResponse {
// let builder = Response::builder().header("Access-Control-Allow-Origin", "*").status(StatusCode::OK);
// let guard = IMAGES.lock().unwrap();
// let images = guard;
// let image = images.as_ref().unwrap().get(&id.0).unwrap();
// println!("image: {:#?}", image.path);
// let result: Result<Vec<u8>, &str> = Ok((*image.image_data).clone());
// let stream = futures::stream::once(async move { result });
// builder.body(StreamBody::new(stream)).unwrap()
// }
static NODE_RUNTIME_IO: Mutex<Option<NodeRuntimeIO>> = const { Mutex::new(None) };
#[tokio::main]
async fn main() {
@ -42,63 +21,68 @@ async fn main() {
fern::Dispatch::new()
.chain(std::io::stdout())
.level(log::LevelFilter::Trace)
.level_for("naga", log::LevelFilter::Error)
.level_for("wgpu-hal", log::LevelFilter::Error)
.level_for("wgpu_hal", log::LevelFilter::Error)
.level_for("wgpu_core", log::LevelFilter::Error)
.format(move |out, message, record| {
out.finish(format_args!(
"[{}]{} {}",
"[{}]{} {} {}",
// This will color the log level only, not the whole line. Just a touch.
colors.color(record.level()),
chrono::Utc::now().format("[%Y-%m-%d %H:%M:%S]"),
message
message,
record.module_path().unwrap_or("")
))
})
.apply()
.unwrap();
// *(IMAGES.lock().unwrap()) = Some(HashMap::new());
std::thread::spawn(|| loop {
futures::executor::block_on(graphite_editor::node_graph_executor::run_node_graph());
std::thread::sleep(std::time::Duration::from_millis(16))
});
graphite_editor::application::set_uuid_seed(0);
EDITOR.with(|editor| editor.borrow_mut().replace(Editor::new()));
let app = Router::new().route("/", get(|| async { "Hello, World!" }))/*.route("/image/:id", get(respond_to))*/;
// run it with hyper on localhost:3000
let mut runtime_lock = NODE_RUNTIME_IO.lock().unwrap();
*runtime_lock = Some(NodeRuntimeIO::new());
drop(runtime_lock);
let app = Router::new().route("/", get(|| async { "Hello, World!" }));
// Run it with hyper on localhost:3000
tauri::async_runtime::spawn(async {
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
axum::serve(listener, app).await.unwrap();
});
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![set_random_seed, handle_message])
.plugin(tauri_plugin_http::init())
.plugin(tauri_plugin_shell::init())
.invoke_handler(tauri::generate_handler![poll_node_graph, runtime_message])
.setup(|_app| {
use tauri::Manager;
_app.get_window("main").unwrap().open_devtools();
_app.get_webview_window("main").unwrap().open_devtools();
Ok(())
})
.run(tauri::generate_context!())
.expect("error while running tauri application");
}
#[tauri::command]
fn set_random_seed(seed: f64) {
graphite_editor::application::set_uuid_seed(seed as u64);
fn poll_node_graph() -> String {
let vec: Vec<_> = NODE_RUNTIME_IO.lock().as_mut().unwrap().as_mut().unwrap().receive().collect();
ron::to_string(&vec).unwrap()
}
#[tauri::command]
fn handle_message(message: String) -> String {
let Ok(message) = ron::from_str::<graphite_editor::messages::message::Message>(&message) else {
panic!("Error parsing message: {message}")
fn runtime_message(message: String) -> Result<(), String> {
let message = match ron::from_str(&message) {
Ok(message) => message,
Err(e) => {
log::error!("Failed to deserialize message: {}\nwith error: {}", message, e);
return Err("Failed to deserialize message".into());
}
};
let responses = EDITOR.with(|editor| {
let mut editor = editor.borrow_mut();
editor.as_mut().unwrap().handle_message(message)
});
for response in &responses {
let serialized = ron::to_string(&response.clone()).unwrap();
if let Err(error) = ron::from_str::<FrontendMessage>(&serialized) {
log::error!("Error deserializing message: {error}");
}
}
// Process any `FrontendMessage` responses resulting from the backend processing the dispatched message
let result: Vec<_> = responses.into_iter().collect();
ron::to_string(&result).expect("Failed to serialize FrontendMessage")
let response = NODE_RUNTIME_IO.lock().as_ref().unwrap().as_ref().unwrap().send(message);
response
}

View file

@ -1,29 +1,23 @@
{
"$schema": "../node_modules/@tauri-apps/cli/schema.json",
"build": {
"beforeBuildCommand": "npm run tauri:build-wasm",
"beforeBuildCommand": "npm run tauri:build",
"beforeDevCommand": "npm run tauri:dev",
"distDir": "../dist",
"devPath": "http://127.0.0.1:8080/"
},
"package": {
"productName": "Graphite",
"version": "0.1.0"
},
"tauri": {
"allowlist": {
"all": true
"frontendDist": "../dist",
"devUrl": "http://127.0.0.1:8080/"
},
"bundle": {
"active": true,
"category": "DeveloperTool",
"copyright": "",
"deb": {
"depends": ["librustc_codegen_spirv"]
},
"targets": "all",
"externalBin": [],
"icon": ["icons/32x32.png", "icons/128x128.png", "icons/128x128@2x.png", "icons/icon.icns", "icons/icon.ico"],
"identifier": "rs.graphite.editor",
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"timestampUrl": ""
},
"longDescription": "",
"macOS": {
"entitlements": null,
@ -34,27 +28,32 @@
},
"resources": [],
"shortDescription": "",
"targets": "all",
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"timestampUrl": ""
"linux": {
"deb": {
"depends": ["librustc_codegen_spirv"]
}
}
},
"security": {
"csp": null
},
"updater": {
"active": false
},
"productName": "Graphite",
"mainBinaryName": "Graphite",
"version": "0.1.0",
"identifier": "rs.graphite.editor",
"plugins": {},
"app": {
"withGlobalTauri": true,
"windows": [
{
"decorations": false,
"fullscreen": false,
"height": 600,
"height": 1080,
"resizable": true,
"title": "Graphite",
"width": 800
"width": 1920,
"useHttpsScheme": true
}
],
"security": {
"csp": null
}
]
}
}

View file

@ -13,7 +13,7 @@ license = "Apache-2.0"
[features]
default = ["gpu"]
gpu = ["editor/gpu"]
tauri = ["ron"]
tauri = ["ron", "editor/tauri"]
[lib]
crate-type = ["cdylib", "rlib"]
@ -37,6 +37,7 @@ js-sys = { workspace = true }
wasm-bindgen-futures = { workspace = true }
bezier-rs = { workspace = true }
glam = { workspace = true }
futures = { workspace = true }
math-parser = { workspace = true }
wgpu = { workspace = true, features = [
"fragile-send-sync-non-atomic-wasm",
@ -77,4 +78,6 @@ demangle-name-section = true
dwarf-debug-info = true
[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(wasm_bindgen_unstable_test_coverage)'] }
unexpected_cfgs = { level = "warn", check-cfg = [
'cfg(wasm_bindgen_unstable_test_coverage)',
] }

View file

@ -24,14 +24,6 @@ use std::sync::atomic::Ordering;
use std::time::Duration;
use wasm_bindgen::prelude::*;
// /// We directly interface with the updateImage JS function for massively increased performance over serializing and deserializing.
// /// This avoids creating a json with a list millions of numbers long.
// #[wasm_bindgen(module = "/../src/editor.ts")]
// extern "C" {
// // fn dispatchTauri(message: String) -> String;
// fn dispatchTauri(message: String);
// }
/// Set the random seed used by the editor by calling this from JS upon initialization.
/// This is necessary because WASM doesn't have a random number generator.
#[wasm_bindgen(js_name = setRandomSeed)]
@ -184,21 +176,6 @@ impl EditorHandle {
}
}
// #[wasm_bindgen(js_name = tauriResponse)]
// pub fn tauri_response(&self, _message: JsValue) {
// #[cfg(feature = "tauri")]
// match ron::from_str::<Vec<FrontendMessage>>(&_message.as_string().unwrap()) {
// Ok(response) => {
// for message in response {
// self.send_frontend_message_to_js(message);
// }
// }
// Err(error) => {
// log::error!("tauri response: {error:?}\n{_message:?}");
// }
// }
// }
/// Displays a dialog with an error message
#[wasm_bindgen(js_name = errorDialog)]
pub fn error_dialog(&self, title: String, description: String) {

View file

@ -220,7 +220,7 @@ pub enum ApplicationError {
InvalidUrl,
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub enum NodeGraphUpdateMessage {
// ImaginateStatusUpdate,
}
@ -240,7 +240,7 @@ pub trait GetEditorPreferences {
fn use_vello(&self) -> bool;
}
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum ExportFormat {
#[default]
Svg,
@ -251,13 +251,13 @@ pub enum ExportFormat {
Canvas,
}
#[derive(Clone, Copy, Debug, PartialEq, Default)]
#[derive(Debug, Default, Clone, Copy, PartialEq, DynAny, serde::Serialize, serde::Deserialize)]
pub struct TimingInformation {
pub time: f64,
pub animation_time: Duration,
}
#[derive(Debug, Default, Clone, Copy, PartialEq, DynAny)]
#[derive(Debug, Default, Clone, Copy, PartialEq, DynAny, serde::Serialize, serde::Deserialize)]
pub struct RenderConfig {
pub viewport: Footprint,
pub export_format: ExportFormat,

View file

@ -78,7 +78,7 @@ macro_rules! fn_type_fut {
};
}
#[derive(Clone, PartialEq, Eq, Hash, Default)]
#[derive(Clone, PartialEq, Eq, Hash, Default, serde::Serialize, serde::Deserialize)]
pub struct NodeIOTypes {
pub call_argument: Type,
pub return_value: Type,

View file

@ -30,61 +30,6 @@ fn return_true() -> bool {
true
}
// TODO: Eventually remove this document upgrade code
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
#[cfg_attr(feature = "serde", serde(untagged))]
enum NodeInputVersions {
OldNodeInput(OldNodeInput),
NodeInput(NodeInput),
}
// TODO: Eventually remove this document upgrade code
#[derive(Debug)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
pub enum OldNodeInput {
/// A reference to another node in the same network from which this node can receive its input.
Node { node_id: NodeId, output_index: usize, lambda: bool },
/// A hardcoded value that can't change after the graph is compiled. Gets converted into a value node during graph compilation.
Value { tagged_value: TaggedValue, exposed: bool },
/// Input that is provided by the parent network to this document node, instead of from a hardcoded value or another node within the same network.
Network(Type),
/// A Rust source code string. Allows us to insert literal Rust code. Only used for GPU compilation.
/// We can use this whenever we spin up Rustc. Sort of like inline assembly, but because our language is Rust, it acts as inline Rust.
Inline(InlineRust),
}
// TODO: Eventually remove this document upgrade code
#[cfg(feature = "serde")]
fn deserialize_inputs<'de, D>(deserializer: D) -> Result<Vec<NodeInput>, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::Deserialize;
let input_versions = Vec::<NodeInputVersions>::deserialize(deserializer)?;
let inputs = input_versions
.into_iter()
.map(|old_input| {
let old_input = match old_input {
NodeInputVersions::OldNodeInput(old_input) => old_input,
NodeInputVersions::NodeInput(node_input) => return node_input,
};
match old_input {
OldNodeInput::Node { node_id, output_index, .. } => NodeInput::node(node_id, output_index),
OldNodeInput::Value { tagged_value, exposed } => NodeInput::value(tagged_value, exposed),
OldNodeInput::Network(network_type) => NodeInput::network(network_type, 0),
OldNodeInput::Inline(inline) => NodeInput::Inline(inline),
}
})
.collect();
Ok(inputs)
}
/// An instance of a [`DocumentNodeDefinition`] that has been instantiated in a [`NodeNetwork`].
/// Currently, when an instance is made, it lives all on its own without any lasting connection to the definition.
/// But we will want to change it in the future so it merely references its definition.
@ -99,7 +44,7 @@ pub struct DocumentNode {
/// In the root network, it is resolved when evaluating the borrow tree.
/// Ensure the click target in the encapsulating network is updated when the inputs cause the node shape to change (currently only when exposing/hiding an input)
/// by using network.update_click_target(node_id).
#[cfg_attr(feature = "serde", serde(deserialize_with = "deserialize_inputs"))]
#[cfg_attr(all(feature = "serde", target_arch = "wasm32"), serde(alias = "outputs"))]
pub inputs: Vec<NodeInput>,
/// Manual composition is the methodology by which most nodes are implemented, involving a call argument and upstream inputs.
/// By contrast, automatic composition is an alternative way to handle the composition of nodes as they execute in the graph.
@ -635,7 +580,7 @@ pub struct OldDocumentNode {
///
/// In the root network, it is resolved when evaluating the borrow tree.
/// Ensure the click target in the encapsulating network is updated when the inputs cause the node shape to change (currently only when exposing/hiding an input) by using network.update_click_target(node_id).
#[cfg_attr(feature = "serde", serde(deserialize_with = "deserialize_inputs"))]
#[cfg_attr(all(feature = "serde", target_arch = "wasm32"), serde(alias = "outputs"))]
pub inputs: Vec<NodeInput>,
pub manual_composition: Option<Type>,
// TODO: Remove once this references its definition instead (see above TODO).
@ -745,7 +690,8 @@ fn default_export_metadata() -> (NodeId, IVec2) {
pub struct NodeNetwork {
/// The list of data outputs that are exported from this network to the parent network.
/// Each export is a reference to a node within this network, paired with its output index, that is the source of the network's exported data.
#[cfg_attr(feature = "serde", serde(alias = "outputs", deserialize_with = "deserialize_exports"))] // TODO: Eventually remove this alias document upgrade code
// TODO: Eventually remove this alias document upgrade code
#[cfg_attr(all(feature = "serde", target_arch = "wasm32"), serde(alias = "outputs", deserialize_with = "deserialize_exports"))]
pub exports: Vec<NodeInput>,
// TODO: Instead of storing import types in each NodeInput::Network connection, the types are stored here. This is similar to how types need to be defined for parameters when creating a function in Rust.
// pub import_types: Vec<Type>,

View file

@ -49,8 +49,8 @@ macro_rules! tagged_value {
}
}
impl<'a> TaggedValue {
/// Converts to a Box<dyn DynAny> - this isn't very neat but I'm not sure of a better approach
pub fn to_any(self) -> DAny<'a> {
/// Converts to a Box<dyn DynAny>
pub fn to_dynany(self) -> DAny<'a> {
match self {
Self::None => Box::new(()),
$( Self::$identifier(x) => Box::new(x), )*
@ -59,6 +59,16 @@ macro_rules! tagged_value {
Self::EditorApi(x) => Box::new(x),
}
}
/// Converts to a Arc<dyn Any + Send + Sync + 'static>
pub fn to_any(self) -> Arc<dyn std::any::Any + Send + Sync + 'static> {
match self {
Self::None => Arc::new(()),
$( Self::$identifier(x) => Arc::new(x), )*
Self::RenderOutput(x) => Arc::new(x),
Self::SurfaceFrame(x) => Arc::new(x),
Self::EditorApi(x) => Arc::new(x),
}
}
/// Creates a graphene_core::Type::Concrete(TypeDescriptor { .. }) with the type of the value inside the tagged value
pub fn ty(&self) -> Type {
match self {
@ -84,6 +94,18 @@ macro_rules! tagged_value {
_ => Err(format!("Cannot convert {:?} to TaggedValue", DynAny::type_name(input.as_ref()))),
}
}
/// Attempts to downcast the dynamic type to a tagged value
pub fn try_from_std_any_ref(input: &(dyn std::any::Any)) -> Result<Self, String> {
use std::any::TypeId;
match input.type_id() {
x if x == TypeId::of::<()>() => Ok(TaggedValue::None),
$( x if x == TypeId::of::<$ty>() => Ok(TaggedValue::$identifier(<$ty as Clone>::clone(input.downcast_ref().unwrap()))), )*
x if x == TypeId::of::<RenderOutput>() => Ok(TaggedValue::RenderOutput(RenderOutput::clone(input.downcast_ref().unwrap()))),
x if x == TypeId::of::<graphene_core::SurfaceFrame>() => Ok(TaggedValue::SurfaceFrame(graphene_core::SurfaceFrame::clone(input.downcast_ref().unwrap()))),
_ => Err(format!("Cannot convert {:?} to TaggedValue",std::any::type_name_of_val(input))),
}
}
pub fn from_type(input: &Type) -> Option<Self> {
match input {
Type::Generic(_) => {
@ -135,16 +157,16 @@ macro_rules! tagged_value {
tagged_value! {
// TODO: Eventually remove this migration document upgrade code
#[cfg_attr(feature = "serde", serde(deserialize_with = "graphene_core::raster::image::migrate_image_frame"))]
#[cfg_attr(all(feature = "serde", target_arch = "wasm32"), serde(deserialize_with = "graphene_core::raster::image::migrate_image_frame"))]
ImageFrame(graphene_core::raster::image::ImageFrameTable<Color>),
// TODO: Eventually remove this migration document upgrade code
#[cfg_attr(feature = "serde", serde(deserialize_with = "graphene_core::vector::migrate_vector_data"))]
#[cfg_attr(all(feature = "serde", target_arch = "wasm32"), serde(deserialize_with = "graphene_core::vector::migrate_vector_data"))]
VectorData(graphene_core::vector::VectorDataTable),
// TODO: Eventually remove this migration document upgrade code
#[cfg_attr(feature = "serde", serde(deserialize_with = "graphene_core::migrate_graphic_group"))]
#[cfg_attr(all(feature = "serde", target_arch = "wasm32"), serde(deserialize_with = "graphene_core::migrate_graphic_group"))]
GraphicGroup(graphene_core::GraphicGroupTable),
// TODO: Eventually remove this migration document upgrade code
#[cfg_attr(feature = "serde", serde(deserialize_with = "graphene_core::migrate_artboard_group"))]
#[cfg_attr(all(feature = "serde", target_arch = "wasm32"), serde(deserialize_with = "graphene_core::migrate_artboard_group"))]
ArtboardGroup(graphene_core::ArtboardGroupTable),
GraphicElement(graphene_core::GraphicElement),
Artboard(graphene_core::Artboard),
@ -332,7 +354,7 @@ impl<'input> Node<'input, DAny<'input>> for UpcastNode {
type Output = FutureAny<'input>;
fn eval(&'input self, _: DAny<'input>) -> Self::Output {
Box::pin(async move { self.value.clone().into_inner().to_any() })
Box::pin(async move { self.value.clone().into_inner().to_dynany() })
}
}
impl UpcastNode {

View file

@ -532,7 +532,7 @@ impl ProtoNetwork {
Ok(())
}
}
#[derive(Clone, PartialEq)]
#[derive(Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub enum GraphErrorType {
NodeNotFound(NodeId),
InputNodeNotFound(NodeId),
@ -571,7 +571,7 @@ impl core::fmt::Debug for GraphErrorType {
}
}
}
#[derive(Clone, PartialEq)]
#[derive(Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct GraphError {
pub node_path: Vec<NodeId>,
pub identifier: Cow<'static, str>,

View file

@ -68,6 +68,14 @@ pub struct WasmApplicationIo {
static WGPU_AVAILABLE: std::sync::atomic::AtomicI8 = std::sync::atomic::AtomicI8::new(-1);
pub fn wgpu_available() -> Option<bool> {
// Always enable wgpu when running with Tauri
#[cfg(target_arch = "wasm32")]
if let Some(window) = web_sys::window() {
if js_sys::Reflect::get(&window, &wasm_bindgen::JsValue::from_str("__TAURI__")).is_ok() {
return Some(true);
}
}
match WGPU_AVAILABLE.load(::std::sync::atomic::Ordering::SeqCst) {
-1 => None,
0 => Some(false),
@ -92,9 +100,11 @@ impl WasmApplicationIo {
} else {
None
};
#[cfg(not(target_arch = "wasm32"))]
let executor = WgpuExecutor::new().await;
WGPU_AVAILABLE.store(executor.is_some() as i8, ::std::sync::atomic::Ordering::SeqCst);
let mut io = Self {
#[cfg(target_arch = "wasm32")]
ids: AtomicU64::new(0),
@ -103,12 +113,30 @@ impl WasmApplicationIo {
windows: Vec::new(),
resources: HashMap::new(),
};
if cfg!(target_arch = "wasm32") {
let window = io.create_window();
io.windows.push(WindowWrapper { window });
io.resources.insert("null".to_string(), Arc::from(include_bytes!("null.png").to_vec()));
io
}
pub async fn new_offscreen() -> Self {
let executor = WgpuExecutor::new().await;
WGPU_AVAILABLE.store(executor.is_some() as i8, ::std::sync::atomic::Ordering::SeqCst);
// Always enable wgpu when running with Tauri
let mut io = Self {
#[cfg(target_arch = "wasm32")]
ids: AtomicU64::new(0),
#[cfg(feature = "wgpu")]
gpu_executor: executor,
windows: Vec::new(),
resources: HashMap::new(),
};
io.resources.insert("null".to_string(), Arc::from(include_bytes!("null.png").to_vec()));
io
}
}
@ -178,19 +206,22 @@ impl ApplicationIo for WasmApplicationIo {
}
#[cfg(not(target_arch = "wasm32"))]
fn create_window(&self) -> SurfaceHandle<Self::Surface> {
#[cfg(feature = "wayland")]
log::trace!("Spawning window");
#[cfg(not(test))]
use winit::platform::wayland::EventLoopBuilderExtWayland;
#[cfg(feature = "wayland")]
#[cfg(not(test))]
let event_loop = winit::event_loop::EventLoopBuilder::new().with_any_thread(true).build().unwrap();
#[cfg(not(feature = "wayland"))]
#[cfg(test)]
let event_loop = winit::event_loop::EventLoop::new().unwrap();
let window = winit::window::WindowBuilder::new()
.with_title("Graphite")
.with_inner_size(winit::dpi::PhysicalSize::new(800, 600))
.build(&event_loop)
.unwrap();
// self.windows.lock().as_mut().unwrap().push(window.clone());
SurfaceHandle {
window_id: SurfaceId(window.id().into()),
surface: Arc::new(window),
@ -271,7 +302,7 @@ impl ApplicationIo for WasmApplicationIo {
pub type WasmSurfaceHandle = SurfaceHandle<wgpu_executor::Window>;
pub type WasmSurfaceHandleFrame = SurfaceHandleFrame<wgpu_executor::Window>;
#[derive(Clone, Debug, Default, PartialEq, Hash, specta::Type)]
#[derive(Clone, Debug, PartialEq, Hash, specta::Type)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct EditorPreferences {
// pub imaginate_hostname: String,
@ -287,6 +318,18 @@ impl graphene_core::application_io::GetEditorPreferences for EditorPreferences {
}
}
impl Default for EditorPreferences {
fn default() -> Self {
Self {
// imaginate_hostname: "http://localhost:7860/".into(),
#[cfg(target_arch = "wasm32")]
use_vello: false,
#[cfg(not(target_arch = "wasm32"))]
use_vello: true,
}
}
}
unsafe impl dyn_any::StaticType for EditorPreferences {
type Static = EditorPreferences;
}

View file

@ -30,7 +30,7 @@ use web_sys::{CanvasRenderingContext2d, HtmlCanvasElement};
#[node_macro::node(category("Debug: GPU"))]
async fn create_surface<'a: 'n>(_: impl Ctx, editor: &'a WasmEditorApi) -> Arc<WasmSurfaceHandle> {
Arc::new(editor.application_io.as_ref().unwrap().create_window())
return Arc::new(editor.application_io.as_ref().unwrap().create_window());
}
// #[cfg(target_arch = "wasm32")]
@ -254,11 +254,11 @@ async fn render<'a: 'n, T: 'n + GraphicElementRendered + WasmNotSend>(
let data = data.eval(ctx.clone()).await;
let editor_api = editor_api.eval(None).await;
#[cfg(all(feature = "vello", target_arch = "wasm32"))]
#[cfg(all(feature = "vello", not(test)))]
let surface_handle = _surface_handle.eval(None).await;
let use_vello = editor_api.editor_preferences.use_vello();
#[cfg(all(feature = "vello", target_arch = "wasm32"))]
#[cfg(all(feature = "vello", not(test)))]
let use_vello = use_vello && surface_handle.is_some();
let mut metadata = RenderMetadata {
@ -274,12 +274,12 @@ async fn render<'a: 'n, T: 'n + GraphicElementRendered + WasmNotSend>(
ExportFormat::Svg => render_svg(data, SvgRender::new(), render_params, footprint),
ExportFormat::Canvas => {
if use_vello && editor_api.application_io.as_ref().unwrap().gpu_executor().is_some() {
#[cfg(all(feature = "vello", target_arch = "wasm32"))]
#[cfg(all(feature = "vello", not(test)))]
return RenderOutput {
data: render_canvas(render_config, data, editor_api, surface_handle.unwrap(), render_params).await,
metadata,
};
#[cfg(not(all(feature = "vello", target_arch = "wasm32")))]
#[cfg(any(not(feature = "vello"), test))]
render_svg(data, SvgRender::new(), render_params, footprint)
} else {
render_svg(data, SvgRender::new(), render_params, footprint)

View file

@ -13,7 +13,7 @@ impl Context {
pub async fn new() -> Option<Self> {
// Instantiates instance of WebGPU
let instance_descriptor = wgpu::InstanceDescriptor {
backends: wgpu::Backends::VULKAN | wgpu::Backends::BROWSER_WEBGPU,
backends: wgpu::Backends::all(),
..Default::default()
};
let instance = wgpu::Instance::new(instance_descriptor);
@ -33,10 +33,12 @@ impl Context {
.request_device(
&wgpu::DeviceDescriptor {
label: None,
#[cfg(not(feature = "passthrough"))]
// #[cfg(not(feature = "passthrough"))]
required_features: wgpu::Features::empty(),
#[cfg(feature = "passthrough")]
required_features: wgpu::Features::SPIRV_SHADER_PASSTHROUGH,
// Currently disabled because not all backend support passthrough.
// TODO: reenable only when vulkan adapter is available
// #[cfg(feature = "passthrough")]
// required_features: wgpu::Features::SPIRV_SHADER_PASSTHROUGH,
required_limits,
memory_hints: Default::default(),
},

View file

@ -2,6 +2,16 @@
"description": "A convenience package for calling the real package.json in ./frontend",
"private": true,
"scripts": {
"start": "cd frontend && npm start"
"---------- DEV SERVER ----------": "",
"start": "cd frontend && npm start",
"profiling": "cd frontend && npm run profiling",
"production": "cd frontend && npm run production",
"---------- BUILDS ----------": "",
"build-dev": "cd frontend && npm run build-dev",
"build-profiling": "cd frontend && npm run build-profiling",
"build": "cd frontend && npm run build",
"---------- UTILITIES ----------": "",
"lint": "cd frontend && npm run lint",
"lint-fix": "cd frontend && npm run lint-fix"
}
}

View file

@ -49,9 +49,6 @@ in
wasm-bindgen-cli
vulkan-loader
libxkbcommon
llvm
gcc-unwrapped.lib
llvmPackages.libcxxStdenv
pkg-config
# used for profiling
gnuplot
@ -59,11 +56,18 @@ in
cargo-flamegraph
# For Tauri
openssl
at-spi2-atk
atkmm
cairo
gdk-pixbuf
glib
gtk3
libsoup
webkitgtk
harfbuzz
librsvg
libsoup_3
pango
webkitgtk_4_1
openssl
# For Rawkit tests
libraw

View file

@ -26,26 +26,7 @@ cargo install -f wasm-bindgen-cli@0.2.100
Regarding the last one: you'll likely get faster build times if you manually install that specific version of `wasm-bindgen-cli`. It is supposed to be installed automatically but a version mismatch causes it to reinstall every single recompilation. It may need to be manually updated periodically to match the version of the `wasm-bindgen` dependency in [`Cargo.toml`](https://github.com/GraphiteEditor/Graphite/blob/master/Cargo.toml).
<details>
<summary>Linux users: click here</summary>
On Linux, you likely need to install this set of additional packages which are required by Tauri, even if you're just building the web app:
```sh
# On Debian-based (Ubuntu, Mint, etc.) distributions:
sudo apt install libgtk-3-dev libsoup2.4-dev libjavascriptcoregtk-4.0-dev libwebkit2gtk-4.0-dev
# On Fedora-based (RHEL, CentOS, etc.) distributions:
sudo dnf install gtk3-devel libsoup-devel javascriptcoregtk4.0-devel webkit2gtk4.0-devel
# On OpenSUSE-based distributions:
sudo zypper install gtk3-devel libsoup-devel webkit2gtk3-soup2-devel
# On NixOS or when using the Nix package manager:
nix-shell
```
</details>
Lastly, if you intend to develop using the Tauri desktop app build target, obtain [Tauri's dependencies](https://v2.tauri.app/start/prerequisites/). This is not the usual setup for most contributors, so you will know if you need it.
## Repository
@ -66,7 +47,17 @@ npm start
This spins up the dev server at <http://localhost:8080> with a file watcher that performs hot reloading of the web page. You should be able to start the server, edit and save web and Rust code, and shut it down by double pressing <kbd>Ctrl</kbd><kbd>C</kbd>. You sometimes may need to reload the browser's page if hot reloading didn't behave right— always refresh when Rust recompiles.
This method compiles Graphite code in debug mode which includes debug symbols for viewing function names in stack traces. But be aware, it runs slower and the Wasm binary is much larger. Having your browser's developer tools open will also significantly impact performance in both debug and release builds, so it's best to close that when not in use.
This method compiles Graphite code in debug mode which includes debug symbols for viewing function names in stack traces. But be aware, it runs slower and the Wasm binary is much larger. (Having your browser's developer tools open will also significantly impact performance in both debug and release builds, so it's best to close that when not in use.)
To run the dev server in optimized mode, which is faster and produces a smaller Wasm binary:
```sh
# Includes debug symbols
npm run profiling
# Excludes (most) debug symbols, used in release builds
npm run production
```
<details>
<summary>Production build instructions: click here</summary>