Insert pasted images as layers in document graph (#1418)

Changes:

Graph is evaluated on every viewport change
move all navigation logic into the viewport
reduce the number of js roundtrips
add canvas rendering
enable image pasting
various cleanups
Fix cache nodes being reset every evaluation
This commit is contained in:
Dennis Kobert 2023-09-13 17:02:35 +02:00 committed by Keavon Chambers
parent d82f133514
commit 833f41bccb
20 changed files with 202 additions and 226 deletions

View file

@ -294,6 +294,9 @@ impl GraphicElementRendered for ImageFrame<Color> {
}
ImageRenderMode::Base64 => {
let image = &self.image;
if image.data.is_empty() {
return;
}
let (flat_data, _, _) = image.clone().into_flat_u8();
let mut output = Vec::new();
let encoder = image::codecs::png::PngEncoder::new(&mut output);
@ -303,13 +306,12 @@ impl GraphicElementRendered for ImageFrame<Color> {
let preamble = "data:image/png;base64,";
let mut base64_string = String::with_capacity(preamble.len() + output.len() * 4);
base64_string.push_str(preamble);
log::debug!("len: {}", image.data.len());
base64::engine::general_purpose::STANDARD.encode_string(output, &mut base64_string);
render.leaf_tag("image", |attributes| {
attributes.push("width", image.width.to_string());
attributes.push("width", 1.to_string());
attributes.push("height", image.height.to_string());
attributes.push("height", 1.to_string());
attributes.push("preserveAspectRatio", "none");
attributes.push("transform", transform);
attributes.push("href", base64_string)

View file

@ -94,7 +94,8 @@ impl PartialEq for TypeDescriptor {
match (self.id, other.id) {
(Some(id), Some(other_id)) => id == other_id,
_ => {
warn!("TypeDescriptor::eq: comparing types without ids based on name");
// TODO: Add a flag to disable this warning
//warn!("TypeDescriptor::eq: comparing types without ids based on name");
self.name == other.name
}
}

View file

@ -90,5 +90,6 @@ features = [
"Navigator",
"Gpu",
"HtmlCanvasElement",
"HtmlImageElement",
"ImageBitmapRenderingContext",
]

View file

@ -58,41 +58,49 @@ fn buffer_node<R: std::io::Read>(reader: R) -> Result<Vec<u8>, Error> {
Ok(std::io::Read::bytes(reader).collect::<Result<Vec<_>, _>>()?)
}
pub struct DownresNode<ImageFrame> {
pub struct SampleNode<ImageFrame> {
image_frame: ImageFrame,
}
#[node_macro::node_fn(DownresNode)]
fn downres(footprint: Footprint, image_frame: ImageFrame<Color>) -> ImageFrame<Color> {
#[node_macro::node_fn(SampleNode)]
fn sample(footprint: Footprint, image_frame: ImageFrame<Color>) -> ImageFrame<Color> {
// resize the image using the image crate
let image = image_frame.image;
let data = bytemuck::cast_vec(image.data);
let viewport_bounds = footprint.viewport_bounds_in_local_space();
log::debug!("viewport_bounds: {viewport_bounds:?}");
let bbox = Bbox::from_transform(image_frame.transform * DAffine2::from_scale(DVec2::new(image.width as f64, image.height as f64)));
log::debug!("local_bounds: {bbox:?}");
let bounds = viewport_bounds.intersect(&bbox.to_axis_aligned_bbox());
log::debug!("intersection: {bounds:?}");
let union = viewport_bounds.union(&bbox.to_axis_aligned_bbox());
log::debug!("union: {union:?}");
let size = bounds.size();
let image_bounds = Bbox::from_transform(image_frame.transform).to_axis_aligned_bbox();
let intersection = viewport_bounds.intersect(&image_bounds);
let image_size = DAffine2::from_scale(DVec2::new(image.width as f64, image.height as f64));
let size = intersection.size();
let size_px = image_size.transform_vector2(size).as_uvec2();
// If the image would not be visible, return an empty image
if size.x <= 0. || size.y <= 0. {
return ImageFrame::empty();
}
let image_buffer = image::Rgba32FImage::from_raw(image.width, image.height, data).expect("Failed to convert internal ImageFrame into image-rs data type.");
let dynamic_image: image::DynamicImage = image_buffer.into();
let offset = (bounds.start - viewport_bounds.start).as_uvec2();
let cropped = dynamic_image.crop_imm(offset.x, offset.y, size.x as u32, size.y as u32);
let offset = (intersection.start - image_bounds.start).max(DVec2::ZERO);
let offset_px = image_size.transform_vector2(offset).as_uvec2();
let cropped = dynamic_image.crop_imm(offset_px.x, offset_px.y, size_px.x, size_px.y);
log::debug!("transform: {:?}", footprint.transform);
log::debug!("size: {size:?}");
let viewport_resolution_x = footprint.transform.transform_vector2(DVec2::X * size.x).length();
let viewport_resolution_y = footprint.transform.transform_vector2(DVec2::Y * size.y).length();
let nwidth = viewport_resolution_x as u32;
let nheight = viewport_resolution_y as u32;
log::debug!("x: {viewport_resolution_x}, y: {viewport_resolution_y}");
let mut nwidth = size_px.x;
let mut nheight = size_px.y;
let resized = cropped.resize_exact(nwidth, nheight, image::imageops::Lanczos3);
// Only downscale the image for now
let resized = if nwidth < image.width || nheight < image.height {
nwidth = viewport_resolution_x as u32;
nheight = viewport_resolution_y as u32;
// TODO: choose filter based on quality reqirements
cropped.resize_exact(nwidth, nheight, image::imageops::Triangle)
} else {
cropped
};
let buffer = resized.to_rgba32f();
let buffer = buffer.into_raw();
let vec = bytemuck::cast_vec(buffer);
@ -101,11 +109,10 @@ fn downres(footprint: Footprint, image_frame: ImageFrame<Color>) -> ImageFrame<C
height: nheight,
data: vec,
};
// we need to adjust the offset if we truncate the offset calculation
ImageFrame {
image,
transform: image_frame.transform,
}
let new_transform = image_frame.transform * DAffine2::from_translation(offset) * DAffine2::from_scale(DVec2::new(size.x, size.y));
ImageFrame { image, transform: new_transform }
}
#[derive(Debug, Clone, Copy)]

View file

@ -28,6 +28,9 @@ use web_sys::{CanvasRenderingContext2d, HtmlCanvasElement};
#[cfg(feature = "wgpu")]
use wgpu_executor::WgpuExecutor;
use base64::Engine;
use glam::DAffine2;
pub struct Canvas(CanvasRenderingContext2d);
#[derive(Debug, Default)]
@ -301,14 +304,45 @@ async fn render_node<'a: 'input, F: Future<Output = GraphicGroup>>(
let mut render = SvgRender::new();
let render_params = RenderParams::new(ViewMode::Normal, graphene_core::renderer::ImageRenderMode::Base64, None, false);
let output_format = editor.render_config.export_format;
let resolution = footprint.resolution;
match output_format {
ExportFormat::Svg => {
data.render_svg(&mut render, &render_params);
// TODO: reenable once we switch to full node graph
//render.format_svg((0., 0.).into(), (1., 1.).into());
let min = footprint.transform.inverse().transform_point2((0., 0.).into());
let max = footprint.transform.inverse().transform_point2(resolution.as_dvec2());
render.format_svg(min, max);
RenderOutput::Svg(render.svg.to_string())
}
ExportFormat::Canvas => {
data.render_svg(&mut render, &render_params);
// TODO: reenable once we switch to full node graph
let min = footprint.transform.inverse().transform_point2((0., 0.).into());
let max = footprint.transform.inverse().transform_point2(resolution.as_dvec2());
render.format_svg(min, max);
let string = render.svg.to_string();
let array = string.as_bytes();
let canvas = &surface_handle.surface;
canvas.set_width(resolution.x);
canvas.set_height(resolution.y);
let preamble = "data:image/svg+xml;base64,";
let mut base64_string = String::with_capacity(preamble.len() + array.len() * 4);
base64_string.push_str(preamble);
base64::engine::general_purpose::STANDARD.encode_string(array, &mut base64_string);
let image_data = web_sys::HtmlImageElement::new().unwrap();
image_data.set_src(base64_string.as_str());
let context = canvas.get_context("2d").unwrap().unwrap().dyn_into::<CanvasRenderingContext2d>().unwrap();
wasm_bindgen_futures::JsFuture::from(image_data.decode()).await.unwrap();
context.draw_image_with_html_image_element(&image_data, 0.0, 0.0).unwrap();
let frame = SurfaceHandleFrame {
surface_handle,
transform: DAffine2::IDENTITY,
};
RenderOutput::CanvasFrame(frame.into())
}
_ => todo!("Non svg render output"),
}
}

View file

@ -98,9 +98,6 @@ impl BorrowTree {
for (id, node) in proto_network.nodes {
if !self.nodes.contains_key(&id) {
self.push_node(id, node, typing_context).await?;
} else {
let Some(node_container) = self.nodes.get_mut(&id) else { continue };
node_container.reset();
}
old_nodes.remove(&id);
}

View file

@ -732,7 +732,7 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
register_node!(graphene_core::transform::CullNode<_>, input: Footprint, params: [VectorData]),
register_node!(graphene_core::transform::CullNode<_>, input: Footprint, params: [graphene_core::Artboard]),
register_node!(graphene_core::transform::CullNode<_>, input: Footprint, params: [graphene_core::GraphicGroup]),
register_node!(graphene_std::raster::DownresNode<_>, input: Footprint, params: [ImageFrame<Color>]),
register_node!(graphene_std::raster::SampleNode<_>, input: Footprint, params: [ImageFrame<Color>]),
register_node!(graphene_core::vector::ResamplePoints<_>, input: VectorData, params: [f64]),
register_node!(graphene_core::vector::SplineFromPointsNode, input: VectorData, params: []),
register_node!(graphene_core::vector::generator_nodes::CircleGenerator<_>, input: (), params: [f32]),