Replace the image layer type with an Image node (#948)

* Use builder pattern for widgets

* Arguments to new function

* Add node graph when dragging in image

* Fix duplicate import

* Skip processing under node graph frame if unused

* Reduce node graph rerenders

* DUPLICATE ALL frontend changes into other frontend

* DUPLICATE more changes to another frontend

* Code review

* Allow importing SVG files as bitmaps

Co-authored-by: Keavon Chambers <keavon@keavon.com>
This commit is contained in:
0HyperCube 2023-01-27 10:01:09 +00:00 committed by Keavon Chambers
parent 66e8325362
commit 64e62699fc
32 changed files with 444 additions and 261 deletions

2
Cargo.lock generated
View file

@ -1744,6 +1744,7 @@ version = "0.0.0"
dependencies = [
"bezier-rs",
"graph-craft",
"graphene-core",
"graphite-document-legacy",
"graphite-editor",
"js-sys",
@ -1762,6 +1763,7 @@ version = "0.0.0"
dependencies = [
"bezier-rs",
"graph-craft",
"graphene-core",
"graphite-document-legacy",
"graphite-editor",
"js-sys",

View file

@ -1045,7 +1045,7 @@ impl Document {
match &mut layer.data {
LayerDataType::Shape(s) => s.style = style,
LayerDataType::Text(text) => text.path_style = style,
_ => return Err(DocumentError::NotAShape),
_ => return Err(DocumentError::NotShape),
}
self.mark_as_dirty(&path)?;
Some([vec![DocumentChanged, LayerChanged { path: path.clone() }], update_thumbnails_upstream(&path)].concat())

View file

@ -9,10 +9,9 @@ pub enum DocumentError {
IndexOutOfBounds,
NotAFolder,
NonReorderableSelection,
NotAShape,
NotShape,
NotText,
NotAnImage,
NotAnImaginate,
NotNodeGraph,
InvalidFile(String),
BooleanOperationError(BooleanOperationError),
}

View file

@ -488,21 +488,21 @@ impl Layer {
}
}
/// Get a mutable reference to the Image element wrapped by the layer.
/// This operation will fail if the [Layer type](Layer::data) is not `LayerDataType::Image`.
pub fn as_image_mut(&mut self) -> Result<&mut ImageLayer, DocumentError> {
/// Get a mutable reference to the NodeNetwork
/// This operation will fail if the [Layer type](Layer::data) is not `LayerDataType::NodeGraphFrame`.
pub fn as_node_graph_mut(&mut self) -> Result<&mut graph_craft::document::NodeNetwork, DocumentError> {
match &mut self.data {
LayerDataType::Image(img) => Ok(img),
_ => Err(DocumentError::NotAnImage),
LayerDataType::NodeGraphFrame(frame) => Ok(&mut frame.network),
_ => Err(DocumentError::NotNodeGraph),
}
}
/// Get a reference to the Image element wrapped by the layer.
/// This operation will fail if the [Layer type](Layer::data) is not `LayerDataType::Image`.
pub fn as_image(&self) -> Result<&ImageLayer, DocumentError> {
/// Get a reference to the NodeNetwork
/// This operation will fail if the [Layer type](Layer::data) is not `LayerDataType::NodeGraphFrame`.
pub fn as_node_graph(&self) -> Result<&graph_craft::document::NodeNetwork, DocumentError> {
match &self.data {
LayerDataType::Image(img) => Ok(img),
_ => Err(DocumentError::NotAnImage),
LayerDataType::NodeGraphFrame(frame) => Ok(&frame.network),
_ => Err(DocumentError::NotNodeGraph),
}
}
@ -510,7 +510,7 @@ impl Layer {
match &self.data {
LayerDataType::Shape(s) => Ok(&s.style),
LayerDataType::Text(t) => Ok(&t.path_style),
_ => Err(DocumentError::NotAShape),
_ => Err(DocumentError::NotShape),
}
}
@ -518,7 +518,7 @@ impl Layer {
match &mut self.data {
LayerDataType::Shape(s) => Ok(&mut s.style),
LayerDataType::Text(t) => Ok(&mut t.path_style),
_ => Err(DocumentError::NotAShape),
_ => Err(DocumentError::NotShape),
}
}
}

View file

@ -10,6 +10,7 @@ use document_legacy::layers::style::ViewMode;
use document_legacy::LayerId;
use document_legacy::Operation as DocumentOperation;
use graph_craft::document::NodeId;
use graphene_core::raster::Image;
use serde::{Deserialize, Serialize};
#[remain::sorted]
@ -115,8 +116,7 @@ pub enum DocumentMessage {
delta_y: f64,
},
PasteImage {
mime: String,
image_data: Vec<u8>,
image: Image,
mouse: Option<(f64, f64)>,
},
Redo,

View file

@ -546,25 +546,36 @@ impl MessageHandler<DocumentMessage, (u64, &InputPreprocessorMessageHandler, &Pe
}
responses.push_back(BroadcastEvent::DocumentIsDirty.into());
}
PasteImage { mime, image_data, mouse } => {
PasteImage { image, mouse } => {
let image_size = DVec2::new(image.width as f64, image.height as f64);
responses.push_back(DocumentMessage::StartTransaction.into());
let path = vec![generate_uuid()];
responses.push_back(
DocumentOperation::AddImage {
path: path.clone(),
transform: DAffine2::ZERO.to_cols_array(),
insert_index: -1,
image_data: image_data.clone(),
mime: mime.clone(),
}
.into(),
let image_node_id = 2;
let mut network = graph_craft::document::NodeNetwork::new_network(32, image_node_id);
let Some(image_node_type) = crate::messages::portfolio::document::node_graph::resolve_document_node_type("Image") else {
warn!("Image node should be in registry");
return;
};
network.nodes.insert(
image_node_id,
graph_craft::document::DocumentNode {
name: image_node_type.name.to_string(),
inputs: vec![graph_craft::document::NodeInput::value(graph_craft::document::value::TaggedValue::Image(image), false)],
implementation: image_node_type.generate_implementation(),
metadata: graph_craft::document::DocumentNodeMetadata { position: (20, 4).into() },
},
);
let image_data = std::sync::Arc::new(image_data);
responses.push_back(
FrontendMessage::UpdateImageData {
document_id,
image_data: vec![FrontendImageData { path: path.clone(), image_data, mime }],
DocumentOperation::AddNodeGraphFrame {
path: path.clone(),
insert_index: -1,
transform: DAffine2::ZERO.to_cols_array(),
network,
}
.into(),
);
@ -575,9 +586,21 @@ impl MessageHandler<DocumentMessage, (u64, &InputPreprocessorMessageHandler, &Pe
.into(),
);
let mouse = mouse.map_or(ipp.viewport_bounds.center(), |pos| pos.into());
let transform = DAffine2::from_translation(mouse - ipp.viewport_bounds.top_left).to_cols_array();
responses.push_back(DocumentOperation::SetLayerTransformInViewport { path, transform }.into());
// Transform of parent folder
let to_parent_folder = self.document_legacy.generate_transform_across_scope(&path[..path.len() - 1], None).unwrap_or_default();
// Align the layer with the mouse or center of viewport
let viewport_location = mouse.map_or(ipp.viewport_bounds.center(), |pos| pos.into());
let center_in_viewport = DAffine2::from_translation(viewport_location - ipp.viewport_bounds.top_left);
let center_in_viewport_layerspace = to_parent_folder.inverse() * center_in_viewport;
// Make layer the size of the image
let fit_image_size = DAffine2::from_scale_angle_translation(image_size, 0., image_size / -2.);
let transform = (center_in_viewport_layerspace * fit_image_size).to_cols_array();
responses.push_back(DocumentOperation::SetLayerTransform { path, transform }.into());
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
}
Redo => {
responses.push_back(SelectToolMessage::Abort.into());
@ -949,6 +972,24 @@ impl DocumentMessageHandler {
// Prepare the node graph input image
let Some(node_network) = self.document_legacy.layer(&layer_path).ok().and_then(|layer|layer.as_node_graph().ok()) else {
return None;
};
// Skip processing under node graph frame input if not connected
if !node_network.connected_to_output(node_network.inputs[0]) {
return Some(
PortfolioMessage::ProcessNodeGraphFrame {
document_id,
layer_path,
image_data: Default::default(),
size: (0, 0),
imaginate_node,
}
.into(),
);
}
// Calculate the size of the region to be exported
let old_transforms = self.remove_document_transform();
@ -1366,7 +1407,7 @@ impl DocumentMessageHandler {
responses.push_back(DocumentMessage::LayerChanged { affected_layer_path: layer.clone() }.into())
}
responses.push_back(NodeGraphMessage::SendGraph.into());
responses.push_back(NodeGraphMessage::SendGraph { should_rerender: true }.into());
Ok(())
}
@ -1400,7 +1441,7 @@ impl DocumentMessageHandler {
responses.push_back(DocumentMessage::LayerChanged { affected_layer_path: layer.clone() }.into())
}
responses.push_back(NodeGraphMessage::SendGraph.into());
responses.push_back(NodeGraphMessage::SendGraph { should_rerender: true }.into());
Ok(())
}

View file

@ -61,7 +61,9 @@ pub enum NodeGraphMessage {
SelectNodes {
nodes: Vec<NodeId>,
},
SendGraph,
SendGraph {
should_rerender: bool,
},
SetDrawing {
new_drawing: bool,
},

View file

@ -1,18 +1,17 @@
pub use self::document_node_types::*;
use crate::messages::input_mapper::utility_types::macros::action_keys;
use crate::messages::layout::utility_types::layout_widget::{Layout, LayoutGroup, Widget, WidgetCallback, WidgetHolder, WidgetLayout};
use crate::messages::layout::utility_types::widgets::button_widgets::{BreadcrumbTrailButtons, TextButton};
use crate::messages::prelude::*;
use document_legacy::document::Document;
use document_legacy::layers::layer_info::{LayerDataType, LayerDataTypeDiscriminant};
use document_legacy::layers::layer_info::LayerDataTypeDiscriminant;
use document_legacy::layers::nodegraph_layer::NodeGraphFrameLayer;
use document_legacy::LayerId;
use graph_craft::document::value::TaggedValue;
use graph_craft::document::{DocumentNode, DocumentNodeImplementation, DocumentNodeMetadata, NodeId, NodeInput, NodeNetwork};
use graph_craft::document::{DocumentNode, DocumentNodeImplementation, NodeId, NodeInput, NodeNetwork};
mod document_node_types;
mod node_properties;
pub use self::document_node_types::*;
use glam::IVec2;
@ -112,37 +111,21 @@ pub struct NodeGraphMessageHandler {
impl NodeGraphMessageHandler {
fn get_root_network<'a>(&self, document: &'a Document) -> Option<&'a graph_craft::document::NodeNetwork> {
self.layer_path.as_ref().and_then(|path| document.layer(path).ok()).and_then(|layer| match &layer.data {
LayerDataType::NodeGraphFrame(n) => Some(&n.network),
_ => None,
})
self.layer_path.as_ref().and_then(|path| document.layer(path).ok()).and_then(|layer| layer.as_node_graph().ok())
}
fn get_root_network_mut<'a>(&self, document: &'a mut Document) -> Option<&'a mut graph_craft::document::NodeNetwork> {
self.layer_path.as_ref().and_then(|path| document.layer_mut(path).ok()).and_then(|layer| match &mut layer.data {
LayerDataType::NodeGraphFrame(n) => Some(&mut n.network),
_ => None,
})
self.layer_path.as_ref().and_then(|path| document.layer_mut(path).ok()).and_then(|layer| layer.as_node_graph_mut().ok())
}
/// Get the active graph_craft NodeNetwork struct
fn get_active_network<'a>(&self, document: &'a Document) -> Option<&'a graph_craft::document::NodeNetwork> {
let mut network = self.get_root_network(document);
for segement in &self.nested_path {
network = network.and_then(|network| network.nodes.get(segement)).and_then(|node| node.implementation.get_network());
}
network
self.get_root_network(document).and_then(|network| network.nested_network(&self.nested_path))
}
/// Get the active graph_craft NodeNetwork struct
fn get_active_network_mut<'a>(&self, document: &'a mut Document) -> Option<&'a mut graph_craft::document::NodeNetwork> {
let mut network = self.get_root_network_mut(document);
for segement in &self.nested_path {
network = network.and_then(|network| network.nodes.get_mut(segement)).and_then(|node| node.implementation.get_network_mut());
}
network
self.get_root_network_mut(document).and_then(|network| network.nested_network_mut(&self.nested_path))
}
/// Send the cached layout for the bar at the top of the node panel to the frontend
@ -239,8 +222,8 @@ impl NodeGraphMessageHandler {
pub fn collate_properties(&self, node_graph_frame: &NodeGraphFrameLayer, context: &mut NodePropertiesContext, sections: &mut Vec<LayoutGroup>) {
let mut network = &node_graph_frame.network;
for segement in &self.nested_path {
network = network.nodes.get(segement).and_then(|node| node.implementation.get_network()).unwrap();
for segment in &self.nested_path {
network = network.nodes.get(segment).and_then(|node| node.implementation.get_network()).unwrap();
}
// If empty, show all nodes in the network starting with the output
@ -293,7 +276,7 @@ impl NodeGraphMessageHandler {
for (id, node) in &network.nodes {
let Some(node_type) = document_node_types::resolve_document_node_type(&node.name) else {
warn!("Node '{}' does not exist in library", node.name);
continue
continue;
};
nodes.push(FrontendNode {
id: *id,
@ -442,7 +425,8 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
let input = NodeInput::Node(output_node);
responses.push_back(NodeGraphMessage::SetNodeInput { node_id, input_index, input }.into());
responses.push_back(NodeGraphMessage::SendGraph.into());
let should_rerender = network.connected_to_output(node_id);
responses.push_back(NodeGraphMessage::SendGraph { should_rerender }.into());
}
NodeGraphMessage::Copy => {
let Some(network) = self.get_active_network(document) else {
@ -468,38 +452,17 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
return;
};
let num_inputs = document_node_type.inputs.len();
let inner_network = NodeNetwork {
inputs: (0..num_inputs).map(|_| 0).collect(),
output: 0,
nodes: [(
0,
DocumentNode {
name: format!("{}_impl", document_node_type.name),
// TODO: Allow inserting nodes that contain other nodes.
implementation: DocumentNodeImplementation::Unresolved(document_node_type.identifier.clone()),
inputs: (0..num_inputs).map(|_| NodeInput::Network).collect(),
metadata: DocumentNodeMetadata::default(),
},
)]
.into_iter()
.collect(),
..Default::default()
};
responses.push_back(DocumentMessage::StartTransaction.into());
let document_node = DocumentNode {
name: node_type.clone(),
inputs: document_node_type.inputs.iter().map(|input| input.default.clone()).collect(),
// TODO: Allow inserting nodes that contain other nodes.
implementation: DocumentNodeImplementation::Network(inner_network),
implementation: document_node_type.generate_implementation(),
metadata: graph_craft::document::DocumentNodeMetadata { position: (x, y).into() },
};
responses.push_back(NodeGraphMessage::InsertNode { node_id, document_node }.into());
responses.push_back(NodeGraphMessage::SendGraph.into());
responses.push_back(NodeGraphMessage::SendGraph { should_rerender: false }.into());
}
NodeGraphMessage::Cut => {
responses.push_back(NodeGraphMessage::Copy.into());
@ -518,8 +481,14 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
responses.push_back(NodeGraphMessage::DeleteNode { node_id }.into());
}
responses.push_back(NodeGraphMessage::SendGraph.into());
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
responses.push_back(NodeGraphMessage::SendGraph { should_rerender: false }.into());
if let Some(network) = self.get_active_network(document) {
// Only generate node graph if one of the selected nodes is connected to the output
if self.selected_nodes.iter().any(|&node_id| network.connected_to_output(node_id)) {
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
}
}
}
NodeGraphMessage::DisconnectNodes { node_id, input_index } => {
let Some(network) = self.get_active_network(document) else {
@ -540,7 +509,8 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
let input = node_type.inputs[input_index].default.clone();
responses.push_back(NodeGraphMessage::SetNodeInput { node_id, input_index, input }.into());
responses.push_back(NodeGraphMessage::SendGraph.into());
let should_rerender = network.connected_to_output(node_id);
responses.push_back(NodeGraphMessage::SendGraph { should_rerender }.into());
}
NodeGraphMessage::DoubleClickNode { node } => {
if let Some(network) = self.get_active_network(document) {
@ -615,7 +585,8 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
}
responses.push_back(NodeGraphMessage::SetNodeInput { node_id, input_index, input }.into());
responses.push_back(NodeGraphMessage::SendGraph.into());
let should_rerender = network.connected_to_output(node_id);
responses.push_back(NodeGraphMessage::SendGraph { should_rerender }.into());
responses.push_back(PropertiesPanelMessage::ResendActiveProperties.into());
}
NodeGraphMessage::InsertNode { node_id, document_node } => {
@ -697,7 +668,7 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
let nodes = new_ids.values().copied().collect();
responses.push_back(NodeGraphMessage::SelectNodes { nodes }.into());
responses.push_back(NodeGraphMessage::SendGraph.into());
responses.push_back(NodeGraphMessage::SendGraph { should_rerender: false }.into());
}
NodeGraphMessage::SelectNodes { nodes } => {
self.selected_nodes = nodes;
@ -705,10 +676,12 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
self.update_selected(document, responses);
responses.push_back(PropertiesPanelMessage::ResendActiveProperties.into());
}
NodeGraphMessage::SendGraph => {
NodeGraphMessage::SendGraph { should_rerender } => {
if let Some(network) = self.get_active_network(document) {
Self::send_graph(network, responses);
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
if should_rerender {
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
}
}
}
NodeGraphMessage::SetDrawing { new_drawing } => {
@ -738,7 +711,7 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
let input = NodeInput::Value { tagged_value: value, exposed: false };
responses.push_back(NodeGraphMessage::SetNodeInput { node_id, input_index, input }.into());
responses.push_back(PropertiesPanelMessage::ResendActiveProperties.into());
if node.name != "Imaginate" || input_index == 0 {
if (node.name != "Imaginate" || input_index == 0) && network.connected_to_output(node_id) {
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
}
}
@ -757,18 +730,16 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
input_index,
value,
} => {
let mut network = document.layer_mut(&layer_path).ok().and_then(|layer| match &mut layer.data {
LayerDataType::NodeGraphFrame(n) => Some(&mut n.network),
_ => None,
});
let Some((node_id, node_path)) = node_path.split_last() else {
error!("Node path is empty");
return
return;
};
for segement in node_path {
network = network.and_then(|network| network.nodes.get_mut(segement)).and_then(|node| node.implementation.get_network_mut());
}
let network = document
.layer_mut(&layer_path)
.ok()
.and_then(|layer| layer.as_node_graph_mut().ok())
.and_then(|network| network.nested_network_mut(node_path));
if let Some(network) = network {
if let Some(node) = network.nodes.get_mut(node_id) {
@ -777,7 +748,9 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
node.inputs.extend(((node.inputs.len() - 1)..input_index).map(|_| NodeInput::Network));
}
node.inputs[input_index] = NodeInput::Value { tagged_value: value, exposed: false };
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
if network.connected_to_output(*node_id) {
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
}
}
}
}
@ -826,7 +799,7 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
stack.extend(outwards_links.get(&id).unwrap_or(&Vec::new()).iter().copied())
}
}
responses.push_back(NodeGraphMessage::SendGraph.into());
responses.push_back(NodeGraphMessage::SendGraph { should_rerender: false }.into());
}
NodeGraphMessage::ToggleHidden => {
responses.push_back(DocumentMessage::StartTransaction.into());
@ -844,9 +817,13 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
network.disabled.extend(self.selected_nodes.iter().filter(|&id| !network.inputs.contains(id) && original_output != *id));
}
Self::send_graph(network, responses);
// Only generate node graph if one of the selected nodes is connected to the output
if self.selected_nodes.iter().any(|&node_id| network.connected_to_output(node_id)) {
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
}
}
self.update_selection_action_buttons(document, responses);
responses.push_back(DocumentMessage::NodeGraphFrameGenerate.into());
}
NodeGraphMessage::TogglePreview { node_id } => {
responses.push_back(DocumentMessage::StartTransaction.into());
@ -860,6 +837,8 @@ impl MessageHandler<NodeGraphMessage, (&mut Document, &mut dyn Iterator<Item = &
network.output = node_id;
} else if let Some(output) = network.previous_output.take() {
network.output = output
} else {
return;
}
Self::send_graph(network, responses);
}

View file

@ -3,7 +3,7 @@ use crate::messages::layout::utility_types::layout_widget::LayoutGroup;
use graph_craft::concrete;
use graph_craft::document::value::*;
use graph_craft::document::{DocumentNode, NodeId, NodeInput};
use graph_craft::document::*;
use graph_craft::imaginate_input::ImaginateSamplingMethod;
use graph_craft::proto::{NodeIdentifier, Type};
use graphene_core::raster::Image;
@ -61,11 +61,19 @@ static DOCUMENT_NODE_TYPES: &[DocumentNodeType] = &[
default: NodeInput::Node(0),
}],
outputs: &[FrontendGraphDataType::General],
properties: |_document_node, _node_id, _context| node_properties::string_properties("The identity node simply returns the input".to_string()),
properties: |_document_node, _node_id, _context| node_properties::string_properties("The identity node simply returns the input"),
},
DocumentNodeType {
name: "Image",
category: "Ignore",
identifier: NodeIdentifier::new("graphene_core::ops::IdNode", &[concrete!("Any<'_>")]),
inputs: &[DocumentInputType::new("Image", TaggedValue::Image(Image::empty()), false)],
outputs: &[FrontendGraphDataType::Raster],
properties: |_document_node, _node_id, _context| node_properties::string_properties("A bitmap image embedded in this node"),
},
DocumentNodeType {
name: "Input",
category: "Meta",
category: "Ignore",
identifier: NodeIdentifier::new("graphene_core::ops::IdNode", &[concrete!("Any<'_>")]),
inputs: &[DocumentInputType {
name: "In",
@ -77,7 +85,7 @@ static DOCUMENT_NODE_TYPES: &[DocumentNodeType] = &[
},
DocumentNodeType {
name: "Output",
category: "Meta",
category: "Ignore",
identifier: NodeIdentifier::new("graphene_core::ops::IdNode", &[concrete!("Any<'_>")]),
inputs: &[DocumentInputType {
name: "In",
@ -337,7 +345,32 @@ pub fn resolve_document_node_type(name: &str) -> Option<&DocumentNodeType> {
pub fn collect_node_types() -> Vec<FrontendNodeType> {
DOCUMENT_NODE_TYPES
.iter()
.filter(|node_type| !matches!(node_type.name, "Input" | "Output"))
.filter(|node_type| !node_type.category.eq_ignore_ascii_case("ignore"))
.map(|node_type| FrontendNodeType::new(node_type.name, node_type.category))
.collect()
}
impl DocumentNodeType {
/// Generate a [`DocumentNodeImplementation`] from this node type, using a nested network.
pub fn generate_implementation(&self) -> DocumentNodeImplementation {
let number_of_inputs = self.inputs.len();
let network = NodeNetwork {
inputs: (0..number_of_inputs).map(|_| 0).collect(),
output: 0,
nodes: [(
0,
DocumentNode {
name: format!("{}_impl", self.name),
// TODO: Allow inserting nodes that contain other nodes.
implementation: DocumentNodeImplementation::Unresolved(self.identifier.clone()),
inputs: (0..number_of_inputs).map(|_| NodeInput::Network).collect(),
metadata: DocumentNodeMetadata::default(),
},
)]
.into_iter()
.collect(),
..Default::default()
};
DocumentNodeImplementation::Network(network)
}
}

View file

@ -633,7 +633,7 @@ pub fn imaginate_properties(document_node: &DocumentNode, node_id: NodeId, conte
LayoutGroup::Row { widgets }.with_tooltip(
"Amplification of the text prompt's influence over the outcome. At 0, the prompt is entirely ignored.\n\
\n\
Lower values are more creative and exploratory. Higher values are more literal and uninspired, but may be lower quality.\n\
Lower values are more creative and exploratory. Higher values are more literal and uninspired.\n\
\n\
This parameter is otherwise known as CFG (classifier-free guidance).",
)

View file

@ -276,14 +276,13 @@ impl MessageHandler<PortfolioMessage, (&InputPreprocessorMessageHandler, &Prefer
} => {
let get = |name: &str| IMAGINATE_NODE.inputs.iter().position(|input| input.name == name).unwrap_or_else(|| panic!("Input {name} not found"));
let data = image_data.chunks_exact(4).map(|v| graphene_core::raster::color::Color::from_rgba8(v[0], v[1], v[2], v[3])).collect();
let image = Image { width, height, data };
let image = Image::from_image_data(&image_data, width, height);
responses.push_back(
PortfolioMessage::DocumentPassMessage {
document_id,
message: NodeGraphMessage::SetQualifiedInputValue {
layer_path: layer_path.clone(),
node_path: node_path.clone(),
layer_path,
node_path,
input_index: get("Cached Data"),
value: TaggedValue::RcImage(Some(std::sync::Arc::new(image))),
}
@ -769,15 +768,13 @@ impl PortfolioMessageHandler {
&mut self,
document_id: u64,
layer_path: Vec<LayerId>,
(image_data, size): (Vec<u8>, (u32, u32)),
(image_data, (width, height)): (Vec<u8>, (u32, u32)),
imaginate_node: Option<Vec<NodeId>>,
preferences: &PreferencesMessageHandler,
responses: &mut VecDeque<Message>,
) -> Result<(), String> {
// Reformat the input image data into an f32 image
let data = image_data.chunks_exact(4).map(|v| graphene_core::raster::color::Color::from_rgba8(v[0], v[1], v[2], v[3])).collect();
let (width, height) = size;
let image = graphene_core::raster::Image { width, height, data };
let image = graphene_core::raster::Image::from_image_data(&image_data, width, height);
// Get the node graph layer
let document = self.documents.get_mut(&document_id).ok_or_else(|| "Invalid document".to_string())?;
@ -885,8 +882,7 @@ impl PortfolioMessageHandler {
// If no image was generated, use the input image
if image.width == 0 || image.height == 0 {
let data = image_data.chunks_exact(4).map(|v| graphene_core::raster::color::Color::from_rgba8(v[0], v[1], v[2], v[3])).collect();
image = graphene_core::raster::Image { width, height, data };
image = graphene_core::raster::Image::from_image_data(&image_data, width, height);
}
let (image_data, _size) = Self::encode_img(image, None, image::ImageOutputFormat::Bmp)?;

View file

@ -62,7 +62,7 @@ impl OverlayRenderer {
// Only view in and out handles if they are not on top of the anchor
let [in_handle, out_handle] = {
let Some(anchor) = manipulator_group.points[ManipulatorType::Anchor].as_ref() else{
let Some(anchor) = manipulator_group.points[ManipulatorType::Anchor].as_ref() else {
continue;
};

View file

@ -81,7 +81,7 @@ impl SelectedEdges {
let mut pivot = self.pivot_from_bounds(min, max);
if center {
// The below ratio is: `dragging edge / being centred`.
// The below ratio is: `dragging edge / being centered`.
// The `is_finite()` checks are in case the user is dragging the edge where the pivot is located (in which case the centering mode is ignored).
if self.top {
let ratio = (center_around.y - min.y) / (center_around.y - self.bounds[0].y);

View file

@ -139,68 +139,24 @@ impl Fsm for ImaginateToolFsmState {
shape_data.path = Some(document.get_path_for_new_layer());
responses.push_back(DocumentMessage::DeselectAllLayers.into());
use graph_craft::{document::*, generic, proto::*};
use graph_craft::document::*;
let imaginate_node_type = IMAGINATE_NODE;
let num_inputs = imaginate_node_type.inputs.len();
let imaginate_inner_network = NodeNetwork {
inputs: (0..num_inputs).map(|_| 0).collect(),
output: 0,
nodes: [(
0,
DocumentNode {
name: format!("{}_impl", imaginate_node_type.name),
// TODO: Allow inserting nodes that contain other nodes.
implementation: DocumentNodeImplementation::Unresolved(imaginate_node_type.identifier.clone()),
inputs: (0..num_inputs).map(|_| NodeInput::Network).collect(),
metadata: DocumentNodeMetadata::default(),
},
)]
.into_iter()
.collect(),
..Default::default()
};
let mut imaginate_inputs: Vec<NodeInput> = imaginate_node_type.inputs.iter().map(|input| input.default.clone()).collect();
imaginate_inputs[0] = NodeInput::Node(0);
let network = NodeNetwork {
inputs: vec![0],
output: 1,
nodes: [
(
0,
DocumentNode {
name: "Input".into(),
inputs: vec![NodeInput::Network],
implementation: DocumentNodeImplementation::Unresolved(NodeIdentifier::new("graphene_core::ops::IdNode", &[generic!("T")])),
metadata: DocumentNodeMetadata { position: (8, 4).into() },
},
),
(
1,
DocumentNode {
name: "Output".into(),
inputs: vec![NodeInput::Node(2)],
implementation: DocumentNodeImplementation::Unresolved(NodeIdentifier::new("graphene_core::ops::IdNode", &[generic!("T")])),
metadata: DocumentNodeMetadata { position: (32, 4).into() },
},
),
(
2,
DocumentNode {
name: imaginate_node_type.name.to_string(),
inputs: imaginate_inputs,
// TODO: Allow inserting nodes that contain other nodes.
implementation: DocumentNodeImplementation::Network(imaginate_inner_network),
metadata: graph_craft::document::DocumentNodeMetadata { position: (20, 4).into() },
},
),
]
.into_iter()
.collect(),
..Default::default()
};
let imaginate_node_id = 2;
let mut network = NodeNetwork::new_network(32, imaginate_node_id);
network.nodes.insert(
imaginate_node_id,
DocumentNode {
name: imaginate_node_type.name.to_string(),
inputs: imaginate_inputs,
implementation: imaginate_node_type.generate_implementation(),
metadata: graph_craft::document::DocumentNodeMetadata { position: (20, 4).into() },
},
);
responses.push_back(
Operation::AddNodeGraphFrame {

View file

@ -138,35 +138,7 @@ impl Fsm for NodeGraphToolFsmState {
shape_data.path = Some(document.get_path_for_new_layer());
responses.push_back(DocumentMessage::DeselectAllLayers.into());
use graph_craft::{document::*, generic, proto::*};
let network = NodeNetwork {
inputs: vec![0],
output: 1,
nodes: [
(
0,
DocumentNode {
name: "Input".into(),
inputs: vec![NodeInput::Network],
implementation: DocumentNodeImplementation::Unresolved(NodeIdentifier::new("graphene_core::ops::IdNode", &[generic!("T")])),
metadata: DocumentNodeMetadata { position: (8, 4).into() },
},
),
(
1,
DocumentNode {
name: "Output".into(),
inputs: vec![NodeInput::Node(0)],
implementation: DocumentNodeImplementation::Unresolved(NodeIdentifier::new("graphene_core::ops::IdNode", &[generic!("T")])),
metadata: DocumentNodeMetadata { position: (20, 4).into() },
},
),
]
.into_iter()
.collect(),
..Default::default()
};
let network = graph_craft::document::NodeNetwork::new_network(20, 0);
responses.push_back(
Operation::AddNodeGraphFrame {

View file

@ -2,7 +2,7 @@
import { getContext, onMount, tick } from "svelte";
import { textInputCleanup } from "@/utility-functions/keyboard-entry";
import { rasterizeSVGCanvas } from "@/utility-functions/rasterization";
import { extractPixelData, rasterizeSVGCanvas } from "@/utility-functions/rasterization";
import {
type MouseCursorIcon,
type XY,
@ -82,10 +82,9 @@
Array.from(dataTransfer.items).forEach(async (item) => {
const file = item.getAsFile();
if (file?.type.startsWith("image")) {
const buffer = await file.arrayBuffer();
const u8Array = new Uint8Array(buffer);
const imageData = await extractPixelData(file);
editor.instance.pasteImage(file.type, u8Array, e.clientX, e.clientY);
editor.instance.pasteImage(new Uint8Array(imageData.data), imageData.width, imageData.height, e.clientX, e.clientY);
}
});
}

View file

@ -5,6 +5,7 @@ import { type FullscreenState } from "@/state-providers/fullscreen";
import { type PortfolioState } from "@/state-providers/portfolio";
import { makeKeyboardModifiersBitfield, textInputCleanup, getLocalizedScanCode } from "@/utility-functions/keyboard-entry";
import { platformIsMac } from "@/utility-functions/platform";
import { extractPixelData } from "@/utility-functions/rasterization";
import { stripIndents } from "@/utility-functions/strip-indents";
import { type Editor } from "@/wasm-communication/editor";
import { TriggerPaste } from "@/wasm-communication/messages";
@ -271,10 +272,8 @@ export function createInputManager(editor: Editor, dialog: DialogState, document
const file = item.getAsFile();
if (file?.type.startsWith("image")) {
file.arrayBuffer().then((buffer): void => {
const u8Array = new Uint8Array(buffer);
editor.instance.pasteImage(file.type, u8Array);
extractPixelData(file).then((imageData): void => {
editor.instance.pasteImage(new Uint8Array(imageData.data), imageData.width, imageData.height);
});
}
});
@ -318,10 +317,11 @@ export function createInputManager(editor: Editor, dialog: DialogState, document
if (imageType) {
const blob = await item.getType(imageType);
const reader = new FileReader();
reader.onload = (): void => {
const u8Array = new Uint8Array(reader.result as ArrayBuffer);
editor.instance.pasteImage(imageType, u8Array);
reader.onload = async (): Promise<void> => {
if (reader.result instanceof ArrayBuffer) {
const imageData = await extractPixelData(new Blob([reader.result], { type: imageType }));
editor.instance.pasteImage(new Uint8Array(imageData.data), imageData.width, imageData.height);
}
};
reader.readAsArrayBuffer(blob);
}

View file

@ -4,7 +4,7 @@ import {writable} from "svelte/store";
import { downloadFileText, downloadFileBlob, upload } from "@/utility-functions/files";
import { imaginateGenerate, imaginateCheckConnection, imaginateTerminate, updateBackendImage } from "@/utility-functions/imaginate";
import { rasterizeSVG, rasterizeSVGCanvas } from "@/utility-functions/rasterization";
import { extractPixelData, rasterizeSVG, rasterizeSVGCanvas } from "@/utility-functions/rasterization";
import { type Editor } from "@/wasm-communication/editor";
import {
type FrontendDocumentDetails,
@ -52,7 +52,8 @@ export function createPortfolioState(editor: Editor) {
});
editor.subscriptions.subscribeJsMessage(TriggerImport, async () => {
const data = await upload("image/*", "data");
editor.instance.pasteImage(data.type, Uint8Array.from(data.content));
const imageData = await extractPixelData(new Blob([data.content], { type: data.type }));
editor.instance.pasteImage(new Uint8Array(imageData.data), imageData.width, imageData.height);
});
editor.subscriptions.subscribeJsMessage(TriggerFileDownload, (triggerFileDownload) => {
downloadFileText(triggerFileDownload.name, triggerFileDownload.document);

View file

@ -1,6 +1,6 @@
import { replaceBlobURLsWithBase64 } from "@/utility-functions/files";
// Rasterize the string of an SVG document at a given width and height and turn it into the blob data of an image file matching the given MIME type
// Rasterize the string of an SVG document at a given width and height and return the canvas it was drawn onto during the rasterization process
export async function rasterizeSVGCanvas(svg: string, width: number, height: number, backgroundColor?: string): Promise<HTMLCanvasElement> {
// A canvas to render our SVG to in order to get a raster image
const canvas = document.createElement("canvas");
@ -22,6 +22,7 @@ export async function rasterizeSVGCanvas(svg: string, width: number, height: num
const svgBlob = new Blob([svgWithBase64Images], { type: "image/svg+xml;charset=utf-8" });
const url = URL.createObjectURL(svgBlob);
// Load the Image from the URL and wait until it's done
const image = new Image();
image.src = url;
await new Promise<void>((resolve) => {
@ -37,6 +38,7 @@ export async function rasterizeSVGCanvas(svg: string, width: number, height: num
return canvas;
}
// Rasterize the string of an SVG document at a given width and height and turn it into the blob data of an image file matching the given MIME type
export async function rasterizeSVG(svg: string, width: number, height: number, mime: string, backgroundColor?: string): Promise<Blob> {
const canvas = await rasterizeSVGCanvas(svg, width, height, backgroundColor);
@ -51,3 +53,49 @@ export async function rasterizeSVG(svg: string, width: number, height: number, m
return blob;
}
/// Convert an image source (e.g. PNG document) into pixel data, a width and a height
export async function extractPixelData(imageData: ImageBitmapSource): Promise<ImageData> {
// Special handling to rasterize an SVG file
let svgImageData;
if (imageData instanceof File && imageData.type === "image/svg+xml") {
const svgSource = await imageData.text();
const svgElement = new DOMParser().parseFromString(svgSource, "image/svg+xml").querySelector("svg");
if (!svgElement) throw new Error("Error reading SVG file");
let bounds = svgElement.viewBox.baseVal;
// If the bounds are zero (which will happen if the `viewBox` is not provided), set bounds to the artwork's bounding box
if (bounds.width === 0 || bounds.height === 0) {
// It's necessary to measure while the element is in the DOM, otherwise the dimensions are zero
const toRemove = document.body.insertAdjacentElement("beforeend", svgElement);
bounds = svgElement.getBBox();
toRemove?.remove();
}
svgImageData = await rasterizeSVGCanvas(svgSource, bounds.width, bounds.height);
}
// Decode the image file binary data
const image = await createImageBitmap(svgImageData || imageData);
// Halve the image size until the editor lag is somewhat usable
// TODO: Fix lag so this can be removed
const MAX_IMAGE_SIZE = 512;
let { width, height } = image;
while (width > MAX_IMAGE_SIZE || height > MAX_IMAGE_SIZE) {
width /= 2;
height /= 2;
}
width = Math.floor(width);
height = Math.floor(height);
// Render image to canvas
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
const context = canvas.getContext("2d");
if (!context) throw new Error("Could not create canvas context");
context.drawImage(image, 0, 0, image.width, image.height, 0, 0, width, height);
return context.getImageData(0, 0, width, height);
}

View file

@ -23,6 +23,7 @@ editor = { path = "../../editor", package = "graphite-editor" }
document-legacy = { path = "../../document-legacy", package = "graphite-document-legacy" }
graph-craft = { path = "../../node-graph/graph-craft" }
log = "0.4"
graphene-core = { path = "../../node-graph/gcore", features = ["async", "std", "alloc"] }
serde = { version = "1.0", features = ["derive"] }
wasm-bindgen = { version = "0.2.73" }
serde-wasm-bindgen = "0.4.1"

View file

@ -660,9 +660,10 @@ impl JsEditorHandle {
/// Pastes an image
#[wasm_bindgen(js_name = pasteImage)]
pub fn paste_image(&self, mime: String, image_data: Vec<u8>, mouse_x: Option<f64>, mouse_y: Option<f64>) {
pub fn paste_image(&self, image_data: Vec<u8>, width: u32, height: u32, mouse_x: Option<f64>, mouse_y: Option<f64>) {
let mouse = mouse_x.and_then(|x| mouse_y.map(|y| (x, y)));
let message = DocumentMessage::PasteImage { mime, image_data, mouse };
let image = graphene_core::raster::Image::from_image_data(&image_data, width, height);
let message = DocumentMessage::PasteImage { image, mouse };
self.dispatch(message);
}

View file

@ -228,7 +228,7 @@
import { defineComponent, nextTick } from "vue";
import { textInputCleanup } from "@/utility-functions/keyboard-entry";
import { rasterizeSVGCanvas } from "@/utility-functions/rasterization";
import { extractPixelData, rasterizeSVGCanvas } from "@/utility-functions/rasterization";
import { type DisplayEditableTextbox, type MouseCursorIcon, type XY } from "@/wasm-communication/messages";
import EyedropperPreview, { ZOOM_WINDOW_DIMENSIONS } from "@/components/floating-menus/EyedropperPreview.vue";
@ -300,10 +300,9 @@ export default defineComponent({
Array.from(dataTransfer.items).forEach(async (item) => {
const file = item.getAsFile();
if (file?.type.startsWith("image")) {
const buffer = await file.arrayBuffer();
const u8Array = new Uint8Array(buffer);
const imageData = await extractPixelData(file);
this.editor.instance.pasteImage(file.type, u8Array, e.clientX, e.clientY);
this.editor.instance.pasteImage(new Uint8Array(imageData.data), imageData.width, imageData.height, e.clientX, e.clientY);
}
});
},

View file

@ -3,6 +3,7 @@ import { type FullscreenState } from "@/state-providers/fullscreen";
import { type PortfolioState } from "@/state-providers/portfolio";
import { makeKeyboardModifiersBitfield, textInputCleanup, getLocalizedScanCode } from "@/utility-functions/keyboard-entry";
import { platformIsMac } from "@/utility-functions/platform";
import { extractPixelData } from "@/utility-functions/rasterization";
import { stripIndents } from "@/utility-functions/strip-indents";
import { type Editor } from "@/wasm-communication/editor";
import { TriggerPaste } from "@/wasm-communication/messages";
@ -270,10 +271,8 @@ export function createInputManager(editor: Editor, container: HTMLElement, dialo
const file = item.getAsFile();
if (file?.type.startsWith("image")) {
file.arrayBuffer().then((buffer): void => {
const u8Array = new Uint8Array(buffer);
editor.instance.pasteImage(file.type, u8Array);
extractPixelData(file).then((imageData): void => {
editor.instance.pasteImage(new Uint8Array(imageData.data), imageData.width, imageData.height);
});
}
});
@ -317,10 +316,11 @@ export function createInputManager(editor: Editor, container: HTMLElement, dialo
if (imageType) {
const blob = await item.getType(imageType);
const reader = new FileReader();
reader.onload = (): void => {
const u8Array = new Uint8Array(reader.result as ArrayBuffer);
editor.instance.pasteImage(imageType, u8Array);
reader.onload = async (): Promise<void> => {
if (reader.result instanceof ArrayBuffer) {
const imageData = await extractPixelData(new Blob([reader.result], { type: imageType }));
editor.instance.pasteImage(new Uint8Array(imageData.data), imageData.width, imageData.height);
}
};
reader.readAsArrayBuffer(blob);
}

View file

@ -3,7 +3,7 @@ import { reactive, readonly } from "vue";
import { downloadFileText, downloadFileBlob, upload } from "@/utility-functions/files";
import { imaginateGenerate, imaginateCheckConnection, imaginateTerminate, updateBackendImage } from "@/utility-functions/imaginate";
import { rasterizeSVG, rasterizeSVGCanvas } from "@/utility-functions/rasterization";
import { extractPixelData, rasterizeSVG, rasterizeSVGCanvas } from "@/utility-functions/rasterization";
import { type Editor } from "@/wasm-communication/editor";
import {
type FrontendDocumentDetails,
@ -45,7 +45,8 @@ export function createPortfolioState(editor: Editor) {
});
editor.subscriptions.subscribeJsMessage(TriggerImport, async () => {
const data = await upload("image/*", "data");
editor.instance.pasteImage(data.type, Uint8Array.from(data.content));
const imageData = await extractPixelData(new Blob([data.content], { type: data.type }));
editor.instance.pasteImage(new Uint8Array(imageData.data), imageData.width, imageData.height);
});
editor.subscriptions.subscribeJsMessage(TriggerFileDownload, (triggerFileDownload) => {
downloadFileText(triggerFileDownload.name, triggerFileDownload.document);

View file

@ -1,6 +1,6 @@
import { replaceBlobURLsWithBase64 } from "@/utility-functions/files";
// Rasterize the string of an SVG document at a given width and height and turn it into the blob data of an image file matching the given MIME type
// Rasterize the string of an SVG document at a given width and height and return the canvas it was drawn onto during the rasterization process
export async function rasterizeSVGCanvas(svg: string, width: number, height: number, backgroundColor?: string): Promise<HTMLCanvasElement> {
// A canvas to render our SVG to in order to get a raster image
const canvas = document.createElement("canvas");
@ -22,6 +22,7 @@ export async function rasterizeSVGCanvas(svg: string, width: number, height: num
const svgBlob = new Blob([svgWithBase64Images], { type: "image/svg+xml;charset=utf-8" });
const url = URL.createObjectURL(svgBlob);
// Load the Image from the URL and wait until it's done
const image = new Image();
image.src = url;
await new Promise<void>((resolve) => {
@ -37,6 +38,7 @@ export async function rasterizeSVGCanvas(svg: string, width: number, height: num
return canvas;
}
// Rasterize the string of an SVG document at a given width and height and turn it into the blob data of an image file matching the given MIME type
export async function rasterizeSVG(svg: string, width: number, height: number, mime: string, backgroundColor?: string): Promise<Blob> {
const canvas = await rasterizeSVGCanvas(svg, width, height, backgroundColor);
@ -51,3 +53,49 @@ export async function rasterizeSVG(svg: string, width: number, height: number, m
return blob;
}
/// Convert an image source (e.g. PNG document) into pixel data, a width and a height
export async function extractPixelData(imageData: ImageBitmapSource): Promise<ImageData> {
// Special handling to rasterize an SVG file
let svgImageData;
if (imageData instanceof File && imageData.type === "image/svg+xml") {
const svgSource = await imageData.text();
const svgElement = new DOMParser().parseFromString(svgSource, "image/svg+xml").querySelector("svg");
if (!svgElement) throw new Error("Error reading SVG file");
let bounds = svgElement.viewBox.baseVal;
// If the bounds are zero (which will happen if the `viewBox` is not provided), set bounds to the artwork's bounding box
if (bounds.width === 0 || bounds.height === 0) {
// It's necessary to measure while the element is in the DOM, otherwise the dimensions are zero
const toRemove = document.body.insertAdjacentElement("beforeend", svgElement);
bounds = svgElement.getBBox();
toRemove?.remove();
}
svgImageData = await rasterizeSVGCanvas(svgSource, bounds.width, bounds.height);
}
// Decode the image file binary data
const image = await createImageBitmap(svgImageData || imageData);
// Halve the image size until the editor lag is somewhat usable
// TODO: Fix lag so this can be removed
const MAX_IMAGE_SIZE = 512;
let { width, height } = image;
while (width > MAX_IMAGE_SIZE || height > MAX_IMAGE_SIZE) {
width /= 2;
height /= 2;
}
width = Math.floor(width);
height = Math.floor(height);
// Render image to canvas
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
const context = canvas.getContext("2d");
if (!context) throw new Error("Could not create canvas context");
context.drawImage(image, 0, 0, image.width, image.height, 0, 0, width, height);
return context.getImageData(0, 0, width, height);
}

View file

@ -22,6 +22,7 @@ crate-type = ["cdylib", "rlib"]
editor = { path = "../../editor", package = "graphite-editor" }
document-legacy = { path = "../../document-legacy", package = "graphite-document-legacy" }
graph-craft = { path = "../../node-graph/graph-craft" }
graphene-core = { path = "../../node-graph/gcore", features = ["async", "std", "alloc"] }
log = "0.4"
serde = { version = "1.0", features = ["derive"] }
wasm-bindgen = { version = "0.2.73" }

View file

@ -660,9 +660,10 @@ impl JsEditorHandle {
/// Pastes an image
#[wasm_bindgen(js_name = pasteImage)]
pub fn paste_image(&self, mime: String, image_data: Vec<u8>, mouse_x: Option<f64>, mouse_y: Option<f64>) {
pub fn paste_image(&self, image_data: Vec<u8>, width: u32, height: u32, mouse_x: Option<f64>, mouse_y: Option<f64>) {
let mouse = mouse_x.and_then(|x| mouse_y.map(|y| (x, y)));
let message = DocumentMessage::PasteImage { mime, image_data, mouse };
let image = graphene_core::raster::Image::from_image_data(&image_data, width, height);
let message = DocumentMessage::PasteImage { image, mouse };
self.dispatch(message);
}

View file

@ -459,6 +459,11 @@ mod image {
data: self.data.as_slice(),
}
}
/// Generate Image from some frontend image data (the canvas pixels as u8s in a flat array)
pub fn from_image_data(image_data: &[u8], width: u32, height: u32) -> Self {
let data = image_data.chunks_exact(4).map(|v| Color::from_rgba8(v[0], v[1], v[2], v[3])).collect();
Image { width, height, data }
}
}
impl IntoIterator for Image {

View file

@ -47,8 +47,8 @@ fn bilt_subpath(base_image: Image, path_data: Subpath) -> Image {
let composition = Composition::new();
let mut renderer = cpu::Renderer::new();
let mut path_builder = PathBuilder::new();
for path_segement in path_data.bezier_iter() {
let points = path_segement.internal.get_points().collect::<Vec<_>>();
for path_segment in path_data.bezier_iter() {
let points = path_segment.internal.get_points().collect::<Vec<_>>();
match points.len() {
2 => path_builder.line_to(points[1].into()),
3 => path_builder.quad_to(points[1].into(), points[2].into()),

View file

@ -11,7 +11,7 @@ serde = ["dep:serde", "graphene-core/serde", "glam/serde"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
graphene-core = { path = "../gcore", features = ["async", "std", "alloc"] }
graphene-core = { path = "../gcore", features = ["alloc"] }
dyn-any = { path = "../../libraries/dyn-any", features = ["log-bad-types", "rc", "glam"] }
num-traits = "0.2"
dyn-clone = "1.0"

View file

@ -1,9 +1,6 @@
use crate::document::value::TaggedValue;
use crate::generic;
use crate::proto::{ConstructionArgs, NodeIdentifier, ProtoNetwork, ProtoNode, ProtoNodeInput, Type};
use std::collections::HashMap;
use std::sync::Mutex;
pub mod value;
use dyn_any::{DynAny, StaticType};
use glam::IVec2;
@ -11,6 +8,10 @@ use rand_chacha::{
rand_core::{RngCore, SeedableRng},
ChaCha20Rng,
};
use std::collections::{HashMap, HashSet};
use std::sync::Mutex;
pub mod value;
pub type NodeId = u64;
static RNG: Mutex<Option<ChaCha20Rng>> = Mutex::new(None);
@ -226,7 +227,7 @@ impl NodeNetwork {
self.flatten_with_fns(node, merge_ids, generate_uuid)
}
/// Recursively dissolve non primitive document nodes and return a single flattened network of nodes.
/// Recursively dissolve non-primitive document nodes and return a single flattened network of nodes.
pub fn flatten_with_fns(&mut self, node: NodeId, map_ids: impl Fn(NodeId, NodeId) -> NodeId + Copy, gen_id: impl Fn() -> NodeId + Copy) {
let (id, mut node) = self
.nodes
@ -258,10 +259,18 @@ impl NodeNetwork {
network_input.populate_first_network_input(node, *offset);
}
NodeInput::Value { tagged_value, exposed } => {
let name = format!("Value: {:?}", tagged_value.clone().to_value());
// Skip formatting very large values for seconds in performance speedup
let name = if matches!(
tagged_value,
TaggedValue::Image(_) | TaggedValue::RcImage(_) | TaggedValue::Color(_) | TaggedValue::Subpath(_) | TaggedValue::RcSubpath(_)
) {
"Value".to_string()
} else {
format!("Value: {:?}", tagged_value.clone().to_value())
};
let new_id = map_ids(id, gen_id());
let value_node = DocumentNode {
name: name.clone(),
name,
inputs: vec![NodeInput::Value { tagged_value, exposed }],
implementation: DocumentNodeImplementation::Unresolved(NodeIdentifier::new("graphene_core::value::ValueNode", &[generic!("T")])),
metadata: DocumentNodeMetadata::default(),
@ -305,6 +314,95 @@ impl NodeNetwork {
pub fn original_output(&self) -> NodeId {
self.previous_output.unwrap_or(self.output)
}
/// A graph with just an input and output node
pub fn new_network(output_offset: i32, output_node_id: NodeId) -> Self {
Self {
inputs: vec![0],
output: 1,
nodes: [
(
0,
DocumentNode {
name: "Input".into(),
inputs: vec![NodeInput::Network],
implementation: DocumentNodeImplementation::Unresolved(NodeIdentifier::new("graphene_core::ops::IdNode", &[generic!("T")])),
metadata: DocumentNodeMetadata { position: (8, 4).into() },
},
),
(
1,
DocumentNode {
name: "Output".into(),
inputs: vec![NodeInput::Node(output_node_id)],
implementation: DocumentNodeImplementation::Unresolved(NodeIdentifier::new("graphene_core::ops::IdNode", &[generic!("T")])),
metadata: DocumentNodeMetadata { position: (output_offset, 4).into() },
},
),
]
.into_iter()
.collect(),
..Default::default()
}
}
/// Get the nested network given by the path of node ids
pub fn nested_network(&self, nested_path: &[NodeId]) -> Option<&Self> {
let mut network = Some(self);
for segment in nested_path {
network = network.and_then(|network| network.nodes.get(segment)).and_then(|node| node.implementation.get_network());
}
network
}
/// Get the mutable nested network given by the path of node ids
pub fn nested_network_mut(&mut self, nested_path: &[NodeId]) -> Option<&mut Self> {
let mut network = Some(self);
for segment in nested_path {
network = network.and_then(|network| network.nodes.get_mut(segment)).and_then(|node| node.implementation.get_network_mut());
}
network
}
/// Check if the specified node id is connected to the output
pub fn connected_to_output(&self, node_id: NodeId) -> bool {
// If the node is the output then return true
if self.output == node_id {
return true;
}
// Get the output
let Some(output_node) = self.nodes.get(&self.output) else {
return false;
};
let mut stack = vec![output_node];
let mut already_visited = HashSet::new();
already_visited.insert(self.output);
while let Some(node) = stack.pop() {
for input in &node.inputs {
if let &NodeInput::Node(ref_id) = input {
// Skip if already viewed
if already_visited.contains(&ref_id) {
continue;
}
// If the target node is used as input then return true
if ref_id == node_id {
return true;
}
// Add the referenced node to the stack
let Some(ref_node) = self.nodes.get(&ref_id) else {
continue;
};
already_visited.insert(ref_id);
stack.push(ref_node);
}
}
}
false
}
}
#[cfg(test)]

View file

@ -16,10 +16,10 @@ fn has_attribute(attrs: &[Attribute], target: &str) -> bool {
/// Returns the new input type and a conversion to the origional.
fn easier_string_assignment(field_ty: &Type, field_ident: &Ident) -> (TokenStream2, TokenStream2) {
if let Type::Path(type_path) = field_ty {
if let Some(last_segement) = type_path.path.segments.last() {
if let Some(last_segment) = type_path.path.segments.last() {
// Check if this type is a `String`
// Based on https://stackoverflow.com/questions/66906261/rust-proc-macro-derive-how-do-i-check-if-a-field-is-of-a-primitive-type-like-b
if last_segement.ident == Ident::new("String", last_segement.ident.span()) {
if last_segment.ident == Ident::new("String", last_segment.ident.span()) {
return (
quote::quote_spanned!(type_path.span() => impl Into<String>),
quote::quote_spanned!(field_ident.span() => #field_ident.into()),
@ -49,10 +49,10 @@ fn find_type_and_assignment(field: &Field) -> syn::Result<(TokenStream2, TokenSt
// Check if type is `WidgetCallback`
if let Type::Path(type_path) = field_ty {
if let Some(last_segement) = type_path.path.segments.last() {
if let PathArguments::AngleBracketed(generic_args) = &last_segement.arguments {
if let Some(last_segment) = type_path.path.segments.last() {
if let PathArguments::AngleBracketed(generic_args) = &last_segment.arguments {
if let Some(first_generic) = generic_args.args.first() {
if last_segement.ident == Ident::new("WidgetCallback", last_segement.ident.span()) {
if last_segment.ident == Ident::new("WidgetCallback", last_segment.ident.span()) {
// Assign builder pattern to assign the closure directly
function_input_ty = quote::quote_spanned!(field_ty.span() => impl Fn(&#first_generic) -> crate::messages::message::Message + 'static + Send + Sync);
assignment = quote::quote_spanned!(field_ident.span() => crate::messages::layout::utility_types::layout_widget::WidgetCallback::new(#field_ident));