Color system based on traits, and conversion to linear color in the graph (#1123)

* Migrate Nodes to use RasterMut + Samplable

* Add Pixel trait to include serialization

* Implement traits for Color and propagate new generics

* Always convert to linear color when loading images
This commit is contained in:
Dennis Kobert 2023-04-16 02:57:05 +02:00 committed by Keavon Chambers
parent e21c2fb67b
commit 37b892a516
16 changed files with 638 additions and 265 deletions

View file

@ -13,7 +13,7 @@ std = ["dyn-any", "dyn-any/std", "alloc", "glam/std", "specta"]
default = ["async", "serde", "kurbo", "log", "std"]
log = ["dep:log"]
serde = ["dep:serde", "glam/serde", "bezier-rs/serde", "base64"]
gpu = ["spirv-std", "bytemuck", "glam/bytemuck", "dyn-any", "glam/libm"]
gpu = ["spirv-std", "glam/bytemuck", "dyn-any", "glam/libm"]
async = ["async-trait", "alloc"]
nightly = []
alloc = ["dyn-any", "bezier-rs", "once_cell"]
@ -26,7 +26,7 @@ dyn-any = { path = "../../libraries/dyn-any", features = [
], optional = true, default-features = false }
spirv-std = { version = "0.5", features = ["glam"], optional = true }
bytemuck = { version = "1.8", features = ["derive"], optional = true }
bytemuck = { version = "1.8", features = ["derive"] }
async-trait = { version = "0.1", optional = true }
serde = { version = "1.0", features = [
"derive",
@ -47,3 +47,6 @@ base64 = { version = "0.13", optional = true }
specta.workspace = true
specta.optional = true
once_cell = { version = "1.17.0", default-features = false, optional = true }
num = "0.4.0"
num-derive = "0.3.3"
num-traits = "0.2.15"

View file

@ -2,6 +2,9 @@ use core::{fmt::Debug, marker::PhantomData};
use crate::Node;
use bytemuck::{Pod, Zeroable};
use glam::DVec2;
use num::Num;
#[cfg(target_arch = "spirv")]
use spirv_std::num_traits::float::Float;
@ -12,6 +15,183 @@ pub mod brightness_contrast;
pub mod color;
pub use adjustments::*;
pub trait Channel: Copy + Debug + num::Num + num::NumCast {
fn to_linear<Out: Linear>(self) -> Out;
fn from_linear<In: Linear>(linear: In) -> Self;
fn to_f32(self) -> f32 {
num::cast(self).expect("Failed to convert channel to f32")
}
fn from_f32(value: f32) -> Self {
num::cast(value).expect("Failed to convert f32 to channel")
}
fn to_f64(self) -> f64 {
num::cast(self).expect("Failed to convert channel to f64")
}
fn from_f64(value: f64) -> Self {
num::cast(value).expect("Failed to convert f64 to channel")
}
fn to_channel<Out: Channel>(self) -> Out {
num::cast(self).expect("Failed to convert channel to channel")
}
}
pub trait Linear: num::NumCast + Num {}
impl Linear for f32 {}
impl Linear for f64 {}
impl<T: Linear + Debug + Copy> Channel for T {
#[inline(always)]
fn to_linear<Out: Linear>(self) -> Out {
num::cast(self).expect("Failed to convert channel to linear")
}
#[inline(always)]
fn from_linear<In: Linear>(linear: In) -> Self {
num::cast(linear).expect("Failed to convert linear to channel")
}
}
use num_derive::*;
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Num, NumCast, NumOps, One, Zero, ToPrimitive, FromPrimitive)]
struct SRGBGammaFloat(f32);
impl Channel for SRGBGammaFloat {
#[inline(always)]
fn to_linear<Out: Linear>(self) -> Out {
let channel = num::cast::<_, f32>(self).expect("Failed to convert srgb to linear");
let out = if channel <= 0.04045 { channel / 12.92 } else { ((channel + 0.055) / 1.055).powf(2.4) };
num::cast(out).expect("Failed to convert srgb to linear")
}
#[inline(always)]
fn from_linear<In: Linear>(linear: In) -> Self {
let linear = num::cast::<_, f32>(linear).expect("Failed to convert linear to srgb");
let out = if linear <= 0.0031308 { linear * 12.92 } else { 1.055 * linear.powf(1. / 2.4) - 0.055 };
num::cast(out).expect("Failed to convert linear to srgb")
}
}
pub trait RGBPrimaries {
const RED: DVec2;
const GREEN: DVec2;
const BLUE: DVec2;
const WHITE: DVec2;
}
pub trait Rec709Primaries {}
impl<T: Rec709Primaries> RGBPrimaries for T {
const RED: DVec2 = DVec2::new(0.64, 0.33);
const GREEN: DVec2 = DVec2::new(0.3, 0.6);
const BLUE: DVec2 = DVec2::new(0.15, 0.06);
const WHITE: DVec2 = DVec2::new(0.3127, 0.329);
}
pub trait SRGB: Rec709Primaries {}
#[cfg(feature = "serde")]
pub trait Serde: serde::Serialize + for<'a> serde::Deserialize<'a> {}
#[cfg(not(feature = "serde"))]
pub trait Serde {}
#[cfg(feature = "serde")]
impl<T: serde::Serialize + for<'a> serde::Deserialize<'a>> Serde for T {}
#[cfg(not(feature = "serde"))]
impl<T> Serde for T {}
// TODO: Come up with a better name for this trait
pub trait Pixel: Clone + Pod + Zeroable {
fn to_bytes(&self) -> Vec<u8> {
bytemuck::bytes_of(self).to_vec()
}
// TODO: use u8 for Color
fn from_bytes(bytes: &[u8]) -> &Self {
bytemuck::try_from_bytes(bytes).expect("Failed to convert bytes to pixel")
}
}
impl<T: Serde + Clone + Pod + Zeroable> Pixel for T {}
pub trait RGB: Pixel {
type ColorChannel: Channel;
fn red(&self) -> Self::ColorChannel;
fn r(&self) -> Self::ColorChannel {
self.red()
}
fn green(&self) -> Self::ColorChannel;
fn g(&self) -> Self::ColorChannel {
self.green()
}
fn blue(&self) -> Self::ColorChannel;
fn b(&self) -> Self::ColorChannel {
self.blue()
}
}
pub trait AssociatedAlpha: RGB + Alpha {
fn to_unassociated<Out: UnassociatedAlpha>(&self) -> Out;
}
pub trait UnassociatedAlpha: RGB + Alpha {
fn to_associated<Out: AssociatedAlpha>(&self) -> Out;
}
pub trait Alpha {
type AlphaChannel: Channel;
fn alpha(&self) -> Self::AlphaChannel;
fn a(&self) -> Self::AlphaChannel {
self.alpha()
}
fn multiply_alpha(&self, alpha: Self::AlphaChannel) -> Self;
}
pub trait Depth {
type DepthChannel: Channel;
fn depth(&self) -> Self::DepthChannel;
fn d(&self) -> Self::DepthChannel {
self.depth()
}
}
pub trait ExtraChannels<const NUM: usize> {
type ChannelType: Channel;
fn extra_channels(&self) -> [Self::ChannelType; NUM];
}
pub trait Luminance {
type LuminanceChannel: Channel;
fn luminance(&self) -> Self::LuminanceChannel;
fn l(&self) -> Self::LuminanceChannel {
self.luminance()
}
}
// TODO: We might rename this to Raster at some point
pub trait Sample {
type Pixel: Pixel;
// TODO: Add an area parameter
fn sample(&self, pos: DVec2) -> Option<Self::Pixel>;
}
// TODO: We might rename this to Bitmap at some point
pub trait Raster {
type Pixel: Pixel;
fn width(&self) -> u32;
fn height(&self) -> u32;
fn get_pixel(&self, x: u32, y: u32) -> Option<Self::Pixel>;
}
pub trait RasterMut: Raster {
fn get_pixel_mut(&mut self, x: u32, y: u32) -> Option<&mut Self::Pixel>;
fn set_pixel(&mut self, x: u32, y: u32, pixel: Self::Pixel) {
*self.get_pixel_mut(x, y).unwrap() = pixel;
}
fn map_pixels<F: Fn(Self::Pixel) -> Self::Pixel>(&mut self, map_fn: F) {
for y in 0..self.height() {
for x in 0..self.width() {
let pixel = self.get_pixel(x, y).unwrap();
self.set_pixel(x, y, map_fn(pixel));
}
}
}
}
#[derive(Debug, Default)]
pub struct MapNode<MapFn> {
map_fn: MapFn,
@ -113,25 +293,28 @@ fn distance_node(input: (i32, i32)) -> f32 {
}
#[derive(Debug, Clone, Copy)]
pub struct ImageIndexIterNode;
pub struct ImageIndexIterNode<P> {
_p: core::marker::PhantomData<P>,
}
#[node_macro::node_fn(ImageIndexIterNode)]
fn image_index_iter_node(input: ImageSlice<'input>) -> core::ops::Range<u32> {
#[node_macro::node_fn(ImageIndexIterNode<_P>)]
fn image_index_iter_node<_P>(input: ImageSlice<'input, _P>) -> core::ops::Range<u32> {
0..(input.width * input.height)
}
#[derive(Debug)]
pub struct WindowNode<Radius: for<'i> Node<'i, (), Output = u32>, Image: for<'i> Node<'i, (), Output = ImageSlice<'i>>> {
pub struct WindowNode<P, Radius: for<'i> Node<'i, (), Output = u32>, Image: for<'i> Node<'i, (), Output = ImageSlice<'i, P>>> {
radius: Radius,
image: Image,
_pixel: core::marker::PhantomData<P>,
}
impl<'input, S0: 'input, S1: 'input> Node<'input, u32> for WindowNode<S0, S1>
impl<'input, P: 'input, S0: 'input, S1: 'input> Node<'input, u32> for WindowNode<P, S0, S1>
where
S0: for<'any_input> Node<'any_input, (), Output = u32>,
S1: for<'any_input> Node<'any_input, (), Output = ImageSlice<'any_input>>,
S1: for<'any_input> Node<'any_input, (), Output = ImageSlice<'any_input, P>>,
{
type Output = ImageWindowIterator<'input>;
type Output = ImageWindowIterator<'input, P>;
#[inline]
fn eval<'node: 'input>(&'node self, input: u32) -> Self::Output {
let radius = self.radius.eval(());
@ -142,13 +325,17 @@ where
}
}
}
impl<S0, S1> WindowNode<S0, S1>
impl<P, S0, S1> WindowNode<P, S0, S1>
where
S0: for<'any_input> Node<'any_input, (), Output = u32>,
S1: for<'any_input> Node<'any_input, (), Output = ImageSlice<'any_input>>,
S1: for<'any_input> Node<'any_input, (), Output = ImageSlice<'any_input, P>>,
{
pub const fn new(radius: S0, image: S1) -> Self {
Self { radius, image }
Self {
radius,
image,
_pixel: core::marker::PhantomData,
}
}
}
/*
@ -159,16 +346,16 @@ fn window_node(input: u32, radius: u32, image: ImageSlice<'input>) -> ImageWindo
}*/
#[derive(Debug, Clone, Copy)]
pub struct ImageWindowIterator<'a> {
image: ImageSlice<'a>,
pub struct ImageWindowIterator<'a, P> {
image: ImageSlice<'a, P>,
radius: u32,
index: u32,
x: u32,
y: u32,
}
impl<'a> ImageWindowIterator<'a> {
fn new(image: ImageSlice<'a>, radius: u32, index: u32) -> Self {
impl<'a, P> ImageWindowIterator<'a, P> {
fn new(image: ImageSlice<'a, P>, radius: u32, index: u32) -> Self {
let start_x = index as i32 % image.width as i32;
let start_y = index as i32 / image.width as i32;
let min_x = (start_x - radius as i32).max(0) as u32;
@ -185,8 +372,8 @@ impl<'a> ImageWindowIterator<'a> {
}
#[cfg(not(target_arch = "spirv"))]
impl<'a> Iterator for ImageWindowIterator<'a> {
type Item = (Color, (i32, i32));
impl<'a, P: Copy> Iterator for ImageWindowIterator<'a, P> {
type Item = (P, (i32, i32));
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let start_x = self.index as i32 % self.image.width as i32;
@ -255,20 +442,24 @@ where
#[cfg(target_arch = "spirv")]
const NOTHING: () = ();
use dyn_any::{DynAny, StaticType};
#[derive(Clone, Debug, PartialEq, DynAny, Copy)]
use dyn_any::{DynAny, StaticType, StaticTypeSized};
#[derive(Clone, Debug, PartialEq, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
pub struct ImageSlice<'a> {
pub struct ImageSlice<'a, Pixel> {
pub width: u32,
pub height: u32,
#[cfg(not(target_arch = "spirv"))]
pub data: &'a [Color],
pub data: &'a [Pixel],
#[cfg(target_arch = "spirv")]
pub data: &'a (),
}
impl<P: StaticTypeSized> StaticType for ImageSlice<'_, P> {
type Static = ImageSlice<'static, P::Static>;
}
#[allow(clippy::derivable_impls)]
impl<'a> Default for ImageSlice<'a> {
impl<'a, P> Default for ImageSlice<'a, P> {
#[cfg(not(target_arch = "spirv"))]
fn default() -> Self {
Self {
@ -287,7 +478,25 @@ impl<'a> Default for ImageSlice<'a> {
}
}
impl ImageSlice<'_> {
impl<P: Copy + Debug + Pixel> Raster for ImageSlice<'_, P> {
type Pixel = P;
#[cfg(not(target_arch = "spirv"))]
fn get_pixel(&self, x: u32, y: u32) -> Option<P> {
self.data.get((x + y * self.width) as usize).copied()
}
#[cfg(target_arch = "spirv")]
fn get_pixel(&self, _x: u32, _y: u32) -> P {
Color::default()
}
fn width(&self) -> u32 {
self.width
}
fn height(&self) -> u32 {
self.height
}
}
impl<P> ImageSlice<'_, P> {
#[cfg(not(target_arch = "spirv"))]
pub const fn empty() -> Self {
Self { width: 0, height: 0, data: &[] }
@ -295,28 +504,30 @@ impl ImageSlice<'_> {
}
#[cfg(not(target_arch = "spirv"))]
impl<'a> IntoIterator for ImageSlice<'a> {
type Item = &'a Color;
type IntoIter = core::slice::Iter<'a, Color>;
impl<'a, P: 'a> IntoIterator for ImageSlice<'a, P> {
type Item = &'a P;
type IntoIter = core::slice::Iter<'a, P>;
fn into_iter(self) -> Self::IntoIter {
self.data.iter()
}
}
#[cfg(not(target_arch = "spirv"))]
impl<'a> IntoIterator for &'a ImageSlice<'a> {
type Item = &'a Color;
type IntoIter = core::slice::Iter<'a, Color>;
impl<'a, P: 'a> IntoIterator for &'a ImageSlice<'a, P> {
type Item = &'a P;
type IntoIter = core::slice::Iter<'a, P>;
fn into_iter(self) -> Self::IntoIter {
self.data.iter()
}
}
#[derive(Debug)]
pub struct ImageDimensionsNode;
pub struct ImageDimensionsNode<P> {
_p: PhantomData<P>,
}
#[node_macro::node_fn(ImageDimensionsNode)]
fn dimensions_node(input: ImageSlice<'input>) -> (u32, u32) {
#[node_macro::node_fn(ImageDimensionsNode<_P>)]
fn dimensions_node<_P>(input: ImageSlice<'input, _P>) -> (u32, u32) {
(input.width, input.height)
}
@ -335,27 +546,24 @@ mod image {
mod base64_serde {
//! Basic wrapper for [`serde`] for [`base64`] encoding
use crate::Color;
use super::super::Pixel;
use serde::{Deserialize, Deserializer, Serializer};
pub fn as_base64<S>(key: &[Color], serializer: S) -> Result<S::Ok, S::Error>
pub fn as_base64<S, P: Pixel>(key: &Vec<P>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let u8_data = key
.iter()
.flat_map(|color| [color.r(), color.g(), color.b(), color.a()].into_iter().map(|channel| (channel * 255.).clamp(0., 255.) as u8))
.collect::<Vec<_>>();
let u8_data = key.iter().flat_map(|color| color.to_bytes()).collect::<Vec<_>>();
serializer.serialize_str(&base64::encode(u8_data))
}
pub fn from_base64<'a, D>(deserializer: D) -> Result<Vec<Color>, D::Error>
pub fn from_base64<'a, D, P: Pixel>(deserializer: D) -> Result<Vec<P>, D::Error>
where
D: Deserializer<'a>,
{
use serde::de::Error;
let color_from_chunk = |chunk: &[u8]| Color::from_rgba8(chunk[0], chunk[1], chunk[2], chunk[3]);
let color_from_chunk = |chunk: &[u8]| P::from_bytes(chunk.try_into().unwrap()).clone();
let colors_from_bytes = |bytes: Vec<u8>| bytes.chunks_exact(4).map(color_from_chunk).collect();
@ -366,16 +574,42 @@ mod image {
}
}
#[derive(Clone, Debug, PartialEq, DynAny, Default, specta::Type)]
#[derive(Clone, Debug, PartialEq, Default, specta::Type)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Image {
pub struct Image<P: Pixel> {
pub width: u32,
pub height: u32,
#[cfg_attr(feature = "serde", serde(serialize_with = "base64_serde::as_base64", deserialize_with = "base64_serde::from_base64"))]
pub data: Vec<Color>,
pub data: Vec<P>,
}
impl Hash for Image {
impl<P: StaticTypeSized + Pixel> StaticType for Image<P>
where
P::Static: Pixel,
{
type Static = Image<P::Static>;
}
impl<P: Copy + Pixel> Raster for Image<P> {
type Pixel = P;
fn get_pixel(&self, x: u32, y: u32) -> Option<P> {
self.data.get((x + y * self.width) as usize).copied()
}
fn width(&self) -> u32 {
self.width
}
fn height(&self) -> u32 {
self.height
}
}
impl<P: Copy + Pixel> RasterMut for Image<P> {
fn get_pixel_mut(&mut self, x: u32, y: u32) -> Option<&mut P> {
self.data.get_mut((x + y * self.width) as usize)
}
}
impl<P: Hash + Pixel> Hash for Image<P> {
fn hash<H: Hasher>(&self, state: &mut H) {
const HASH_SAMPLES: u64 = 1000;
let data_length = self.data.len() as u64;
@ -387,7 +621,7 @@ mod image {
}
}
impl Image {
impl<P: Pixel> Image<P> {
pub const fn empty() -> Self {
Self {
width: 0,
@ -396,7 +630,7 @@ mod image {
}
}
pub fn new(width: u32, height: u32, color: Color) -> Self {
pub fn new(width: u32, height: u32, color: P) -> Self {
Self {
width,
height,
@ -404,51 +638,66 @@ mod image {
}
}
pub fn as_slice(&self) -> ImageSlice {
pub fn as_slice(&self) -> ImageSlice<P> {
ImageSlice {
width: self.width,
height: self.height,
data: self.data.as_slice(),
}
}
}
pub fn get_mut(&mut self, x: u32, y: u32) -> Option<&mut Color> {
self.data.get_mut((y * self.width + x) as usize)
}
pub fn get(&self, x: u32, y: u32) -> Option<&Color> {
self.data.get((y * self.width + x) as usize)
}
impl Image<Color> {
/// Generate Image from some frontend image data (the canvas pixels as u8s in a flat array)
pub fn from_image_data(image_data: &[u8], width: u32, height: u32) -> Self {
let data = image_data.chunks_exact(4).map(|v| Color::from_rgba8(v[0], v[1], v[2], v[3])).collect();
let data = image_data.chunks_exact(4).map(|v| Color::from_rgba8_srgb(v[0], v[1], v[2], v[3])).collect();
Image { width, height, data }
}
}
use super::*;
impl<P: Alpha + RGB> Image<P>
where
P::ColorChannel: Linear,
{
/// Flattens each channel cast to a u8
pub fn into_flat_u8(self) -> (Vec<u8>, u32, u32) {
let Image { width, height, data } = self;
let result_bytes = data.into_iter().flat_map(|color| color.to_rgba8()).collect();
let to_gamma = |x| SRGBGammaFloat::from_linear(x);
let to_u8 = |x| (num::cast::<_, f32>(x).unwrap() * 255.) as u8;
let result_bytes = data
.into_iter()
.flat_map(|color| {
[
to_u8(to_gamma(color.r())),
to_u8(to_gamma(color.g())),
to_u8(to_gamma(color.b())),
(num::cast::<_, f32>(color.a()).unwrap() * 255.) as u8,
]
})
.collect();
(result_bytes, width, height)
}
}
impl IntoIterator for Image {
type Item = Color;
type IntoIter = alloc::vec::IntoIter<Color>;
impl<P: Pixel> IntoIterator for Image<P> {
type Item = P;
type IntoIter = alloc::vec::IntoIter<P>;
fn into_iter(self) -> Self::IntoIter {
self.data.into_iter()
}
}
#[derive(Debug, Clone, Copy, Default)]
pub struct ImageRefNode;
pub struct ImageRefNode<P> {
_p: PhantomData<P>,
}
#[node_macro::node_fn(ImageRefNode)]
fn image_ref_node(image: &'input Image) -> ImageSlice<'input> {
#[node_macro::node_fn(ImageRefNode<_P>)]
fn image_ref_node<_P: Pixel>(image: &'input Image<_P>) -> ImageSlice<'input, _P> {
image.as_slice()
}
@ -469,7 +718,7 @@ mod image {
}
#[node_macro::node_fn(MapImageSliceNode)]
fn map_node(input: (u32, u32), data: Vec<Color>) -> Image {
fn map_node<P: Pixel>(input: (u32, u32), data: Vec<P>) -> Image<P> {
Image {
width: input.0,
height: input.1,
@ -477,14 +726,56 @@ mod image {
}
}
#[derive(Clone, Debug, PartialEq, DynAny, Default, specta::Type)]
#[derive(Clone, Debug, PartialEq, Default, specta::Type)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct ImageFrame {
pub image: Image,
pub struct ImageFrame<P: Pixel> {
pub image: Image<P>,
pub transform: DAffine2,
}
impl ImageFrame {
impl<P: Debug + Copy + Pixel> Sample for ImageFrame<P> {
type Pixel = P;
fn sample(&self, pos: DVec2) -> Option<Self::Pixel> {
let image_size = DVec2::new(self.image.width() as f64, self.image.height() as f64);
let pos = (DAffine2::from_scale(image_size) * self.transform.inverse()).transform_point2(pos);
if pos.x < 0. || pos.y < 0. || pos.x >= image_size.x || pos.y >= image_size.y {
return None;
}
self.image.get_pixel(pos.x as u32, pos.y as u32)
}
}
impl<P: Copy + Pixel> Raster for ImageFrame<P> {
type Pixel = P;
fn width(&self) -> u32 {
self.image.width()
}
fn height(&self) -> u32 {
self.image.height()
}
fn get_pixel(&self, x: u32, y: u32) -> Option<Self::Pixel> {
self.image.get_pixel(x, y)
}
}
impl<P: Copy + Pixel> RasterMut for ImageFrame<P> {
fn get_pixel_mut(&mut self, x: u32, y: u32) -> Option<&mut Self::Pixel> {
self.image.get_pixel_mut(x, y)
}
}
impl<P: StaticTypeSized + Pixel> StaticType for ImageFrame<P>
where
P::Static: Pixel,
{
type Static = ImageFrame<P::Static>;
}
impl<P: Copy + Pixel> ImageFrame<P> {
pub const fn empty() -> Self {
Self {
image: Image::empty(),
@ -492,12 +783,12 @@ mod image {
}
}
pub fn get_mut(&mut self, x: usize, y: usize) -> &mut Color {
pub fn get_mut(&mut self, x: usize, y: usize) -> &mut P {
&mut self.image.data[y * (self.image.width as usize) + x]
}
/// Clamps the provided point to ((0, 0), (ImageSize.x, ImageSize.y)) and returns the closest pixel
pub fn sample(&self, position: DVec2) -> Color {
pub fn sample(&self, position: DVec2) -> P {
let x = position.x.clamp(0., self.image.width as f64 - 1.) as usize;
let y = position.y.clamp(0., self.image.height as f64 - 1.) as usize;
@ -505,13 +796,13 @@ mod image {
}
}
impl AsRef<ImageFrame> for ImageFrame {
fn as_ref(&self) -> &ImageFrame {
impl<P: Pixel> AsRef<ImageFrame<P>> for ImageFrame<P> {
fn as_ref(&self) -> &ImageFrame<P> {
self
}
}
impl Hash for ImageFrame {
impl<P: Hash + Pixel> Hash for ImageFrame<P> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.image.hash(state);
self.transform.to_cols_array().iter().for_each(|x| x.to_bits().hash(state))

View file

@ -10,18 +10,18 @@ use spirv_std::num_traits::float::Float;
#[cfg(target_arch = "spirv")]
use spirv_std::num_traits::Euclid;
#[cfg(feature = "gpu")]
use bytemuck::{Pod, Zeroable};
use super::{Alpha, AssociatedAlpha, Luminance, Rec709Primaries, RGB, SRGB};
/// Structure that represents a color.
/// Internally alpha is stored as `f32` that ranges from `0.0` (transparent) to `1.0` (opaque).
/// The other components (RGB) are stored as `f32` that range from `0.0` up to `f32::MAX`,
/// the values encode the brightness of each channel proportional to the light intensity in cd/m² (nits) in HDR, and `0.0` (black) to `1.0` (white) in SDR color.
#[repr(C)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "gpu", derive(Pod, Zeroable))]
#[cfg_attr(feature = "std", derive(specta::Type))]
#[derive(Debug, Default, Clone, Copy, PartialEq, DynAny)]
#[derive(Debug, Default, Clone, Copy, PartialEq, DynAny, Pod, Zeroable)]
pub struct Color {
red: f32,
green: f32,
@ -39,6 +39,50 @@ impl Hash for Color {
}
}
impl RGB for Color {
type ColorChannel = f32;
fn red(&self) -> f32 {
self.red
}
fn green(&self) -> f32 {
self.green
}
fn blue(&self) -> f32 {
self.blue
}
}
impl Alpha for Color {
type AlphaChannel = f32;
fn alpha(&self) -> f32 {
self.alpha
}
fn multiply_alpha(&self, alpha: Self::AlphaChannel) -> Self {
Self {
red: self.red * alpha,
green: self.green * alpha,
blue: self.blue * alpha,
alpha: self.alpha * alpha,
}
}
}
impl AssociatedAlpha for Color {
fn to_unassociated<Out: super::UnassociatedAlpha>(&self) -> Out {
todo!()
}
}
impl Luminance for Color {
type LuminanceChannel = f32;
fn luminance(&self) -> f32 {
0.2126 * self.red + 0.7152 * self.green + 0.0722 * self.blue
}
}
impl Rec709Primaries for Color {}
impl SRGB for Color {}
impl Color {
pub const BLACK: Color = Color::from_rgbf32_unchecked(0., 0., 0.);
pub const WHITE: Color = Color::from_rgbf32_unchecked(1., 1., 1.);
@ -97,8 +141,8 @@ impl Color {
/// let color2 = Color::from_rgba8(0x72, 0x67, 0x62, 0xFF);
/// assert!(color == color2)
/// ```
pub fn from_rgb8(red: u8, green: u8, blue: u8) -> Color {
Color::from_rgba8(red, green, blue, 255)
pub fn from_rgb8_srgb(red: u8, green: u8, blue: u8) -> Color {
Color::from_rgba8_srgb(red, green, blue, 255)
}
/// Return an SDR `Color` given RGBA channels from `0` to `255`.
@ -108,7 +152,7 @@ impl Color {
/// use graphene_core::raster::color::Color;
/// let color = Color::from_rgba8(0x72, 0x67, 0x62, 0x61);
/// ```
pub fn from_rgba8(red: u8, green: u8, blue: u8, alpha: u8) -> Color {
pub fn from_rgba8_srgb(red: u8, green: u8, blue: u8, alpha: u8) -> Color {
let map_range = |int_color| int_color as f32 / 255.0;
Color {
red: map_range(red),
@ -116,6 +160,7 @@ impl Color {
blue: map_range(blue),
alpha: map_range(alpha),
}
.to_linear_srgb()
}
/// Create a [Color] from a hue, saturation, lightness and alpha (all between 0 and 1)
@ -527,7 +572,7 @@ impl Color {
let b = u8::from_str_radix(&color_str[4..6], 16).ok()?;
let a = u8::from_str_radix(&color_str[6..8], 16).ok()?;
Some(Color::from_rgba8(r, g, b, a))
Some(Color::from_rgba8_srgb(r, g, b, a))
}
/// Creates a color from a 6-character RGB hex string (without a # prefix).
@ -544,7 +589,7 @@ impl Color {
let g = u8::from_str_radix(&color_str[2..4], 16).ok()?;
let b = u8::from_str_radix(&color_str[4..6], 16).ok()?;
Some(Color::from_rgb8(r, g, b))
Some(Color::from_rgb8_srgb(r, g, b))
}
/// Linearly interpolates between two colors based on t.
@ -676,7 +721,7 @@ fn hsl_roundtrip() {
(82, 84, 84),
(255, 255, 178),
] {
let col = Color::from_rgb8(red, green, blue);
let col = Color::from_rgb8_srgb(red, green, blue);
let [hue, saturation, lightness, alpha] = col.to_hsla();
let result = Color::from_hsla(hue, saturation, lightness, alpha);
assert!((col.r() - result.r()) < f32::EPSILON * 100.);

View file

@ -3,6 +3,7 @@ use glam::DAffine2;
use glam::DVec2;
use crate::raster::ImageFrame;
use crate::raster::Pixel;
use crate::vector::VectorData;
use crate::Node;
@ -11,6 +12,12 @@ pub trait Transform {
fn local_pivot(&self, pivot: DVec2) -> DVec2 {
pivot
}
fn decompose_scale(&self) -> DVec2 {
DVec2::new(
self.transform().transform_vector2((1., 0.).into()).length(),
self.transform().transform_vector2((0., 1.).into()).length(),
)
}
}
pub trait TransformMut: Transform {
@ -20,17 +27,17 @@ pub trait TransformMut: Transform {
}
}
impl Transform for ImageFrame {
impl<P: Pixel> Transform for ImageFrame<P> {
fn transform(&self) -> DAffine2 {
self.transform
}
}
impl Transform for &ImageFrame {
impl<P: Pixel> Transform for &ImageFrame<P> {
fn transform(&self) -> DAffine2 {
self.transform
}
}
impl TransformMut for ImageFrame {
impl<P: Pixel> TransformMut for ImageFrame<P> {
fn transform_mut(&mut self) -> &mut DAffine2 {
&mut self.transform
}

View file

@ -396,7 +396,7 @@ impl Default for Stroke {
fn default() -> Self {
Self {
weight: 0.,
color: Some(Color::from_rgba8(0, 0, 0, 255)),
color: Some(Color::from_rgba8_srgb(0, 0, 0, 255)),
dash_lengths: Vec::new(),
dash_offset: 0.,
line_cap: LineCap::Butt,

View file

@ -3,7 +3,7 @@ use dyn_any::{DynAny, Upcast};
use dyn_clone::DynClone;
pub use glam::{DAffine2, DVec2};
use graphene_core::raster::{BlendMode, LuminanceCalculation};
use graphene_core::{Node, Type};
use graphene_core::{Color, Node, Type};
use std::hash::Hash;
pub use std::sync::Arc;
@ -23,9 +23,9 @@ pub enum TaggedValue {
DVec2(DVec2),
OptionalDVec2(Option<DVec2>),
DAffine2(DAffine2),
Image(graphene_core::raster::Image),
RcImage(Option<Arc<graphene_core::raster::Image>>),
ImageFrame(graphene_core::raster::ImageFrame),
Image(graphene_core::raster::Image<Color>),
RcImage(Option<Arc<graphene_core::raster::Image<Color>>>),
ImageFrame(graphene_core::raster::ImageFrame<Color>),
Color(graphene_core::raster::color::Color),
Subpaths(Vec<bezier_rs::Subpath<graphene_core::uuid::ManipulatorGroupId>>),
RcSubpath(Arc<bezier_rs::Subpath<graphene_core::uuid::ManipulatorGroupId>>),
@ -168,9 +168,9 @@ impl<'a> TaggedValue {
TaggedValue::Bool(_) => concrete!(bool),
TaggedValue::DVec2(_) => concrete!(DVec2),
TaggedValue::OptionalDVec2(_) => concrete!(Option<DVec2>),
TaggedValue::Image(_) => concrete!(graphene_core::raster::Image),
TaggedValue::RcImage(_) => concrete!(Option<Arc<graphene_core::raster::Image>>),
TaggedValue::ImageFrame(_) => concrete!(graphene_core::raster::ImageFrame),
TaggedValue::Image(_) => concrete!(graphene_core::raster::Image<Color>),
TaggedValue::RcImage(_) => concrete!(Option<Arc<graphene_core::raster::Image<Color>>>),
TaggedValue::ImageFrame(_) => concrete!(graphene_core::raster::ImageFrame<Color>),
TaggedValue::Color(_) => concrete!(graphene_core::raster::Color),
TaggedValue::Subpaths(_) => concrete!(Vec<bezier_rs::Subpath<graphene_core::uuid::ManipulatorGroupId>>),
TaggedValue::RcSubpath(_) => concrete!(Arc<bezier_rs::Subpath<graphene_core::uuid::ManipulatorGroupId>>),

View file

@ -1,7 +1,7 @@
use std::marker::PhantomData;
use glam::{DAffine2, DVec2};
use graphene_core::raster::{Color, Image, ImageFrame};
use graphene_core::raster::{Color, Image, ImageFrame, RasterMut};
use graphene_core::transform::TransformMut;
use graphene_core::vector::VectorData;
use graphene_core::Node;
@ -93,7 +93,7 @@ fn erase(input: (Color, Color), flow: f64) -> Color {
}
#[node_fn(BrushTextureNode)]
fn brush_texture(diameter: f64, color: Color, hardness: f64, flow: f64) -> ImageFrame {
fn brush_texture(diameter: f64, color: Color, hardness: f64, flow: f64) -> ImageFrame<Color> {
// Diameter
let radius = diameter / 2.;
// TODO: Remove the 4px padding after figuring out why the brush stamp gets randomly offset by 1px up/down/left/right when clicking with the Brush tool
@ -128,7 +128,7 @@ fn brush_texture(diameter: f64, color: Color, hardness: f64, flow: f64) -> Image
let pixel_fill = summation / MULTISAMPLE_GRID.len() as f64;
let pixel = image.get_mut(x, y).unwrap();
let pixel = image.get_pixel_mut(x, y).unwrap();
*pixel = color.apply_opacity(pixel_fill as f32);
}
}
@ -187,7 +187,7 @@ mod test {
assert_eq!(image.image.height, size.ceil() as u32 + 4);
assert_eq!(image.transform, DAffine2::from_scale_angle_translation(DVec2::splat(size.ceil() + 4.), 0., -DVec2::splat(size / 2.)));
// center pixel should be BLACK
assert_eq!(image.image.get(11, 11), Some(&Color::BLACK));
assert_eq!(image.image.get_pixel(11, 11), Some(Color::BLACK));
}
#[test]

View file

@ -1,11 +1,12 @@
use dyn_any::{DynAny, StaticType, StaticTypeSized};
use glam::{BVec2, DAffine2, DVec2};
use graphene_core::raster::{Color, Image, ImageFrame};
use graphene_core::raster::{Alpha, Channel, Color, Image, ImageFrame, Luminance, Pixel, RasterMut, Sample};
use graphene_core::transform::Transform;
use graphene_core::value::{ClonedNode, ValueNode};
use graphene_core::Node;
use std::fmt::Debug;
use std::marker::PhantomData;
use std::path::Path;
@ -93,10 +94,12 @@ pub fn export_image_node<'i, 's: 'i>() -> impl Node<'i, 's, (Image, &'i str), Ou
}
*/
pub struct DownresNode;
pub struct DownresNode<P> {
_p: PhantomData<P>,
}
#[node_macro::node_fn(DownresNode)]
fn downres(image_frame: ImageFrame) -> ImageFrame {
#[node_macro::node_fn(DownresNode<_P>)]
fn downres<_P: Pixel>(image_frame: ImageFrame<_P>) -> ImageFrame<_P> {
let target_width = (image_frame.transform.transform_vector2((1., 0.).into()).length() as usize).min(image_frame.image.width as usize);
let target_height = (image_frame.transform.transform_vector2((0., 1.).into()).length() as usize).min(image_frame.image.height as usize);
@ -121,43 +124,22 @@ fn downres(image_frame: ImageFrame) -> ImageFrame {
}
#[derive(Debug, Clone, Copy)]
pub struct MapImageNode<MapFn> {
pub struct MapImageNode<P, MapFn> {
map_fn: MapFn,
_p: PhantomData<P>,
}
#[node_macro::node_fn(MapImageNode)]
fn map_image<MapFn>(image: Image, map_fn: &'any_input MapFn) -> Image
#[node_macro::node_fn(MapImageNode<_P>)]
fn map_image<MapFn, _P, Img: RasterMut<Pixel = _P>>(image: Img, map_fn: &'any_input MapFn) -> Img
where
MapFn: for<'any_input> Node<'any_input, Color, Output = Color> + 'input,
MapFn: for<'any_input> Node<'any_input, _P, Output = _P> + 'input,
{
let mut image = image;
for pixel in &mut image.data {
*pixel = map_fn.eval(*pixel);
}
image.map_pixels(|c| map_fn.eval(c));
image
}
#[derive(Debug, Clone, Copy)]
pub struct MapImageFrameNode<MapFn> {
map_fn: MapFn,
}
impl<MapFn: dyn_any::StaticTypeSized> StaticType for MapImageFrameNode<MapFn> {
type Static = MapImageFrameNode<MapFn::Static>;
}
#[node_macro::node_fn(MapImageFrameNode)]
fn map_image<MapFn>(mut image_frame: ImageFrame, map_fn: &'any_input MapFn) -> ImageFrame
where
MapFn: for<'any_input> Node<'any_input, Color, Output = Color> + 'input,
{
for pixel in &mut image_frame.image.data {
*pixel = map_fn.eval(*pixel);
}
image_frame
}
#[derive(Debug, Clone, DynAny)]
pub struct AxisAlignedBbox {
start: DVec2,
@ -227,33 +209,48 @@ fn compute_transformed_bounding_box(transform: DAffine2) -> Bbox {
}
#[derive(Debug, Clone, Copy)]
pub struct MaskImageNode<Mask> {
mask: Mask,
pub struct MaskImageNode<P, S, Stencil> {
stencil: Stencil,
_p: PhantomData<P>,
_s: PhantomData<S>,
}
#[node_macro::node_fn(MaskImageNode)]
fn mask_image(mut image: ImageFrame, mask: ImageFrame) -> ImageFrame {
let image_size = DVec2::new(image.image.width as f64, image.image.height as f64);
let mask_size = DVec2::new(mask.image.width as f64, mask.image.height as f64);
#[node_macro::node_fn(MaskImageNode<_P, _S>)]
fn mask_image<
// _P is the color of the input image. It must have an alpha channel because that is going to
// be modified by the mask
_P: Copy + Alpha,
// _S is the color of the stencil. It must have a luminance channel because that is used to
// mask the input image
_S: Luminance,
// Input image
Input: Transform + RasterMut<Pixel = _P>,
// Stencil
Stencil: Sample<Pixel = _S> + Transform,
>(
mut image: Input,
stencil: Stencil,
) -> Input {
let image_size = DVec2::new(image.width() as f64, image.height() as f64);
let mask_size = stencil.transform().decompose_scale();
if mask_size == DVec2::ZERO {
return image;
}
// Transforms a point from the background image to the forground image
let bg_to_fg = DAffine2::from_scale(mask_size) * mask.transform.inverse() * image.transform * DAffine2::from_scale(1. / image_size);
let bg_to_fg = DAffine2::from_scale(mask_size) * stencil.transform().inverse() * image.transform() * DAffine2::from_scale(1. / image_size);
for y in 0..image.image.height {
for x in 0..image.image.width {
for y in 0..image.height() {
for x in 0..image.width() {
let image_point = DVec2::new(x as f64, y as f64);
let mut mask_point = bg_to_fg.transform_point2(image_point);
mask_point = mask_point.clamp(DVec2::ZERO, mask_size);
let image_pixel = image.get_mut(x as usize, y as usize);
let mask_pixel = mask.sample(mask_point);
let alpha = image_pixel.a() * mask_pixel.r();
*image_pixel = Color::from_rgbaf32(image_pixel.r(), image_pixel.g(), image_pixel.b(), alpha).unwrap();
let image_pixel = image.get_pixel_mut(x as u32, y as u32).unwrap();
if let Some(mask_pixel) = stencil.sample(mask_point) {
image_pixel.multiply_alpha(mask_pixel.l().to_channel());
}
}
}
@ -261,18 +258,19 @@ fn mask_image(mut image: ImageFrame, mask: ImageFrame) -> ImageFrame {
}
#[derive(Debug, Clone, Copy)]
pub struct BlendImageTupleNode<MapFn> {
pub struct BlendImageTupleNode<P, MapFn> {
map_fn: MapFn,
_p: PhantomData<P>,
}
impl<MapFn: StaticTypeSized> StaticType for BlendImageTupleNode<MapFn> {
type Static = BlendImageTupleNode<MapFn::Static>;
impl<MapFn: StaticTypeSized, P: StaticTypeSized> StaticType for BlendImageTupleNode<P, MapFn> {
type Static = BlendImageTupleNode<P::Static, MapFn::Static>;
}
#[node_macro::node_fn(BlendImageTupleNode)]
fn blend_image_tuple<MapFn>(images: (ImageFrame, ImageFrame), map_fn: &'any_input MapFn) -> ImageFrame
#[node_macro::node_fn(BlendImageTupleNode<_P>)]
fn blend_image_tuple<_P: Pixel + Debug, MapFn>(images: (ImageFrame<_P>, ImageFrame<_P>), map_fn: &'any_input MapFn) -> ImageFrame<_P>
where
MapFn: for<'any_input> Node<'any_input, (Color, Color), Output = Color> + 'input + Clone,
MapFn: for<'any_input> Node<'any_input, (_P, _P), Output = _P> + 'input + Clone,
{
let (background, foreground) = images;
@ -281,30 +279,34 @@ where
}
#[derive(Debug, Clone, Copy)]
pub struct BlendImageNode<Background, MapFn> {
pub struct BlendImageNode<P, Background, MapFn> {
background: Background,
map_fn: MapFn,
_p: PhantomData<P>,
}
impl<Background: StaticTypeSized, MapFn: StaticTypeSized> StaticType for BlendImageNode<Background, MapFn> {
type Static = BlendImageNode<Background::Static, MapFn::Static>;
impl<P: StaticTypeSized, Background: StaticTypeSized, MapFn: StaticTypeSized> StaticType for BlendImageNode<P, Background, MapFn> {
type Static = BlendImageNode<P::Static, Background::Static, MapFn::Static>;
}
// TODO: Implement proper blending
#[node_macro::node_fn(BlendImageNode)]
fn blend_image<MapFn, Frame: AsRef<ImageFrame>>(foreground: Frame, mut background: ImageFrame, map_fn: &'any_input MapFn) -> ImageFrame
#[node_macro::node_fn(BlendImageNode<_P>)]
fn blend_image<_P: Clone, MapFn, Frame: Sample<Pixel = _P> + Transform, Background: RasterMut<Pixel = _P> + Transform>(
foreground: Frame,
mut background: Background,
map_fn: &'any_input MapFn,
) -> Background
where
MapFn: for<'any_input> Node<'any_input, (Color, Color), Output = Color> + 'input,
MapFn: for<'any_input> Node<'any_input, (_P, _P), Output = _P> + 'input,
{
let foreground = foreground.as_ref();
let foreground_size = DVec2::new(foreground.image.width as f64, foreground.image.height as f64);
let background_size = DVec2::new(background.image.width as f64, background.image.height as f64);
let foreground_size = foreground.transform().decompose_scale();
let background_size = DVec2::new(background.width() as f64, background.height() as f64);
// Transforms a point from the background image to the forground image
let bg_to_fg = DAffine2::from_scale(foreground_size) * foreground.transform.inverse() * background.transform * DAffine2::from_scale(1. / background_size);
let bg_to_fg = background.transform() * DAffine2::from_scale(1. / background_size);
// Footprint of the foreground image (0,0) (1, 1) in the background image space
let bg_aabb = compute_transformed_bounding_box(background.transform.inverse() * foreground.transform).axis_aligned_bbox();
let bg_aabb = compute_transformed_bounding_box(background.transform().inverse() * foreground.transform()).axis_aligned_bbox();
// Clamp the foreground image to the background image
let start = (bg_aabb.start * background_size).max(DVec2::ZERO).as_uvec2();
@ -314,14 +316,12 @@ where
for x in start.x..end.x {
let bg_point = DVec2::new(x as f64, y as f64);
let fg_point = bg_to_fg.transform_point2(bg_point);
if !((fg_point.cmpge(DVec2::ZERO) & fg_point.cmple(foreground_size)) == BVec2::new(true, true)) {
continue;
if let Some(src_pixel) = foreground.sample(fg_point) {
if let Some(dst_pixel) = background.get_pixel_mut(x, y) {
*dst_pixel = map_fn.eval((src_pixel, dst_pixel.clone()));
}
}
let dst_pixel = background.get_mut(x as usize, y as usize);
let src_pixel = foreground.sample(fg_point);
*dst_pixel = map_fn.eval((src_pixel, *dst_pixel));
}
}
@ -347,12 +347,13 @@ fn merge_bounding_box_node<_Data: Transform>(input: (Option<AxisAlignedBbox>, _D
}
#[derive(Clone, Debug, PartialEq)]
pub struct EmptyImageNode<FillColor> {
pub struct EmptyImageNode<P, FillColor> {
pub color: FillColor,
_p: PhantomData<P>,
}
#[node_macro::node_fn(EmptyImageNode)]
fn empty_image(transform: DAffine2, color: Color) -> ImageFrame {
#[node_macro::node_fn(EmptyImageNode<_P>)]
fn empty_image<_P: Pixel>(transform: DAffine2, color: _P) -> ImageFrame<_P> {
let width = transform.transform_vector2(DVec2::new(1., 0.)).length() as u32;
let height = transform.transform_vector2(DVec2::new(0., 1.)).length() as u32;
@ -361,12 +362,13 @@ fn empty_image(transform: DAffine2, color: Color) -> ImageFrame {
}
#[derive(Debug, Clone, Copy)]
pub struct ImaginateNode<E> {
pub struct ImaginateNode<P, E> {
cached: E,
_p: PhantomData<P>,
}
#[node_macro::node_fn(ImaginateNode)]
fn imaginate(image_frame: ImageFrame, cached: Option<std::sync::Arc<graphene_core::raster::Image>>) -> ImageFrame {
#[node_macro::node_fn(ImaginateNode<_P>)]
fn imaginate<_P: Pixel>(image_frame: ImageFrame<_P>, cached: Option<std::sync::Arc<graphene_core::raster::Image<_P>>>) -> ImageFrame<_P> {
let cached_image = cached.map(|mut x| std::sync::Arc::make_mut(&mut x).clone()).unwrap_or(image_frame.image);
ImageFrame {
image: cached_image,
@ -375,11 +377,12 @@ fn imaginate(image_frame: ImageFrame, cached: Option<std::sync::Arc<graphene_cor
}
#[derive(Debug, Clone, Copy)]
pub struct ImageFrameNode<Transform> {
pub struct ImageFrameNode<P, Transform> {
transform: Transform,
_p: PhantomData<P>,
}
#[node_macro::node_fn(ImageFrameNode)]
fn image_frame(image: Image, transform: DAffine2) -> graphene_core::raster::ImageFrame {
#[node_macro::node_fn(ImageFrameNode<_P>)]
fn image_frame<_P: Pixel>(image: Image<_P>, transform: DAffine2) -> graphene_core::raster::ImageFrame<_P> {
graphene_core::raster::ImageFrame { image, transform }
}
#[cfg(test)]

View file

@ -87,25 +87,25 @@ macro_rules! raster_node {
|args| {
let node = construct_node!(args, $path, [$($type),*]);
let map_node = graphene_std::raster::MapImageNode::new(graphene_core::value::ValueNode::new(node));
let any: DynAnyNode<Image, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(map_node));
let any: DynAnyNode<Image<Color>, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(map_node));
Box::pin(any)
},
{
let params = vec![$(value_fn!($type)),*];
NodeIOTypes::new(concrete!(Image), concrete!(Image), params)
NodeIOTypes::new(concrete!(Image<Color>), concrete!(Image<Color>), params)
},
),
(
NodeIdentifier::new(stringify!($path)),
|args| {
let node = construct_node!(args, $path, [$($type),*]);
let map_node = graphene_std::raster::MapImageFrameNode::new(graphene_core::value::ValueNode::new(node));
let any: DynAnyNode<ImageFrame, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(map_node));
let map_node = graphene_std::raster::MapImageNode::new(graphene_core::value::ValueNode::new(node));
let any: DynAnyNode<ImageFrame<Color>, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(map_node));
Box::pin(any)
},
{
let params = vec![$(value_fn!($type)),*];
NodeIOTypes::new(concrete!(ImageFrame), concrete!(ImageFrame), params)
NodeIOTypes::new(concrete!(ImageFrame<Color>), concrete!(ImageFrame<Color>), params)
},
)
]
@ -128,7 +128,7 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
register_node!(graphene_core::structural::ConsNode<_, _>, input: &u32, params: [&u32]),
register_node!(graphene_core::ops::AddNode, input: (u32, u32), params: []),
register_node!(graphene_core::ops::AddNode, input: (u32, &u32), params: []),
register_node!(graphene_core::ops::CloneNode<_>, input: &ImageFrame, params: []),
register_node!(graphene_core::ops::CloneNode<_>, input: &ImageFrame<Color>, params: []),
register_node!(graphene_core::ops::AddParameterNode<_>, input: u32, params: [u32]),
register_node!(graphene_core::ops::AddParameterNode<_>, input: &u32, params: [u32]),
register_node!(graphene_core::ops::AddParameterNode<_>, input: u32, params: [&u32]),
@ -137,12 +137,12 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
register_node!(graphene_core::ops::AddParameterNode<_>, input: &f64, params: [f64]),
register_node!(graphene_core::ops::AddParameterNode<_>, input: f64, params: [&f64]),
register_node!(graphene_core::ops::AddParameterNode<_>, input: &f64, params: [&f64]),
register_node!(graphene_core::ops::SomeNode, input: ImageFrame, params: []),
register_node!(graphene_std::raster::DownresNode, input: ImageFrame, params: []),
register_node!(graphene_std::raster::MaskImageNode<_>, input: ImageFrame, params: [ImageFrame]),
register_node!(graphene_std::raster::EmptyImageNode<_>, input: DAffine2, params: [Color]),
register_node!(graphene_core::ops::SomeNode, input: ImageFrame<Color>, params: []),
register_node!(graphene_std::raster::DownresNode<_>, input: ImageFrame<Color>, params: []),
register_node!(graphene_std::raster::MaskImageNode<_, _, _>, input: ImageFrame<Color>, params: [ImageFrame<Color>]),
register_node!(graphene_std::raster::EmptyImageNode<_, _>, input: DAffine2, params: [Color]),
#[cfg(feature = "gpu")]
register_node!(graphene_std::executor::MapGpuSingleImageNode<_>, input: Image, params: [String]),
register_node!(graphene_std::executor::MapGpuSingleImageNode<_>, input: Image<Color>, params: [String]),
vec![(
NodeIdentifier::new("graphene_core::structural::ComposeNode<_, _, _>"),
|args| {
@ -155,8 +155,8 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
vec![Type::Fn(Box::new(generic!(T)), Box::new(generic!(V))), Type::Fn(Box::new(generic!(V)), Box::new(generic!(U)))],
),
)],
//register_node!(graphene_std::brush::ReduceNode<_, _>, input: core::slice::Iter<ImageFrame>, params: [ImageFrame, &ValueNode<BlendImageTupleNode<ValueNode<BlendNode<ClonedNode<BlendMode>, ClonedNode<f64>>>>>]),
//register_node!(graphene_std::brush::ReduceNode<_, _>, input: core::slice::Iter<ImageFrame>, params: [AxisAlignedBbox, &MergeBoundingBoxNode]),
//register_node!(graphene_std::brush::ReduceNode<_, _>, input: core::slice::Iter<ImageFrame<Color>>, params: [ImageFrame<Color>, &ValueNode<BlendImageTupleNode<ValueNode<BlendNode<ClonedNode<BlendMode>, ClonedNode<f64>>>>>]),
//register_node!(graphene_std::brush::ReduceNode<_, _>, input: core::slice::Iter<ImageFrame<Color>>, params: [AxisAlignedBbox, &MergeBoundingBoxNode]),
register_node!(graphene_std::brush::IntoIterNode<_>, input: &Vec<DVec2>, params: []),
vec![(
NodeIdentifier::new("graphene_std::brush::BrushNode"),
@ -193,23 +193,27 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
},
NodeIOTypes::new(
concrete!(()),
concrete!(ImageFrame),
concrete!(ImageFrame<Color>),
vec![value_fn!(Vec<DVec2>), value_fn!(f64), value_fn!(f64), value_fn!(f64), value_fn!(Color)],
),
)],
vec![(
NodeIdentifier::new("graphene_std::brush::ReduceNode<_, _>"),
|args| {
let acc: DowncastBothNode<(), ImageFrame> = DowncastBothNode::new(args[0]);
let acc: DowncastBothNode<(), ImageFrame<Color>> = DowncastBothNode::new(args[0]);
let image = acc.eval(());
let blend_node = graphene_core::raster::BlendNode::new(ClonedNode::new(BlendMode::Normal), ClonedNode::new(1.0));
let _ = &blend_node as &dyn for<'i> Node<'i, (Color, Color), Output = Color>;
let node = ReduceNode::new(ClonedNode::new(image), ValueNode::new(BlendImageTupleNode::new(ValueNode::new(blend_node))));
//let _ = &node as &dyn for<'i> Node<'i, core::slice::Iter<ImageFrame>, Output = ImageFrame>;
let any: DynAnyNode<Box<dyn Iterator<Item = ImageFrame> + Sync + Send>, _, _> = graphene_std::any::DynAnyNode::new(ValueNode::new(node));
//let _ = &node as &dyn for<'i> Node<'i, core::slice::Iter<ImageFrame<Color>>, Output = ImageFrame<Color>>;
let any: DynAnyNode<Box<dyn Iterator<Item = ImageFrame<Color>> + Sync + Send>, _, _> = graphene_std::any::DynAnyNode::new(ValueNode::new(node));
Box::pin(any)
},
NodeIOTypes::new(concrete!(Box<dyn Iterator<Item = &ImageFrame> + Sync + Send>), concrete!(ImageFrame), vec![value_fn!(ImageFrame)]),
NodeIOTypes::new(
concrete!(Box<dyn Iterator<Item = &ImageFrame<Color>> + Sync + Send>),
concrete!(ImageFrame<Color>),
vec![value_fn!(ImageFrame<Color>)],
),
)],
// Filters
raster_node!(graphene_core::raster::LuminanceNode<_>, params: [LuminanceCalculation]),
@ -218,29 +222,33 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
(
NodeIdentifier::new("graphene_core::raster::BlendNode<_, _, _, _>"),
|args| {
let image: DowncastBothNode<(), ImageFrame> = DowncastBothNode::new(args[0]);
let image: DowncastBothNode<(), ImageFrame<Color>> = DowncastBothNode::new(args[0]);
let blend_mode: DowncastBothNode<(), BlendMode> = DowncastBothNode::new(args[1]);
let opacity: DowncastBothNode<(), f64> = DowncastBothNode::new(args[2]);
let blend_node = graphene_core::raster::BlendNode::new(ClonedNode::new(blend_mode.eval(())), ClonedNode::new(opacity.eval(())));
let node = graphene_std::raster::BlendImageNode::new(image, ValueNode::new(blend_node));
let _ = &node as &dyn for<'i> Node<'i, ImageFrame, Output = ImageFrame>;
let any: DynAnyNode<ImageFrame, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(node));
let _ = &node as &dyn for<'i> Node<'i, ImageFrame<Color>, Output = ImageFrame<Color>>;
let any: DynAnyNode<ImageFrame<Color>, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(node));
any.into_type_erased()
},
NodeIOTypes::new(concrete!(ImageFrame), concrete!(ImageFrame), vec![value_fn!(ImageFrame), value_fn!(BlendMode), value_fn!(f64)]),
NodeIOTypes::new(
concrete!(ImageFrame<Color>),
concrete!(ImageFrame<Color>),
vec![value_fn!(ImageFrame<Color>), value_fn!(BlendMode), value_fn!(f64)],
),
),
(
NodeIdentifier::new("graphene_core::raster::EraseNode<_, _>"),
|args| {
let image: DowncastBothNode<(), ImageFrame> = DowncastBothNode::new(args[0]);
let image: DowncastBothNode<(), ImageFrame<Color>> = DowncastBothNode::new(args[0]);
let opacity: DowncastBothNode<(), f64> = DowncastBothNode::new(args[1]);
let blend_node = graphene_std::brush::EraseNode::new(ClonedNode::new(opacity.eval(())));
let node = graphene_std::raster::BlendImageNode::new(image, ValueNode::new(blend_node));
let _ = &node as &dyn for<'i> Node<'i, ImageFrame, Output = ImageFrame>;
let any: DynAnyNode<ImageFrame, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(node));
let _ = &node as &dyn for<'i> Node<'i, ImageFrame<Color>, Output = ImageFrame<Color>>;
let any: DynAnyNode<ImageFrame<Color>, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(node));
any.into_type_erased()
},
NodeIOTypes::new(concrete!(ImageFrame), concrete!(ImageFrame), vec![value_fn!(ImageFrame), value_fn!(f64)]),
NodeIOTypes::new(concrete!(ImageFrame<Color>), concrete!(ImageFrame<Color>), vec![value_fn!(ImageFrame<Color>), value_fn!(f64)]),
),
],
raster_node!(graphene_core::raster::GrayscaleNode<_, _, _, _, _, _, _>, params: [Color, f64, f64, f64, f64, f64, f64]),
@ -261,17 +269,17 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
if use_legacy.eval(()) {
let generate_brightness_contrast_legacy_mapper_node = GenerateBrightnessContrastLegacyMapperNode::new(brightness, contrast);
let map_image_frame_node = graphene_std::raster::MapImageFrameNode::new(ValueNode::new(generate_brightness_contrast_legacy_mapper_node.eval(())));
let any: DynAnyNode<ImageFrame, _, _> = graphene_std::any::DynAnyNode::new(ValueNode::new(map_image_frame_node));
let map_image_frame_node = graphene_std::raster::MapImageNode::new(ValueNode::new(generate_brightness_contrast_legacy_mapper_node.eval(())));
let any: DynAnyNode<ImageFrame<Color>, _, _> = graphene_std::any::DynAnyNode::new(ValueNode::new(map_image_frame_node));
Box::pin(any)
} else {
let generate_brightness_contrast_mapper_node = GenerateBrightnessContrastMapperNode::new(brightness, contrast);
let map_image_frame_node = graphene_std::raster::MapImageFrameNode::new(ValueNode::new(generate_brightness_contrast_mapper_node.eval(())));
let any: DynAnyNode<ImageFrame, _, _> = graphene_std::any::DynAnyNode::new(ValueNode::new(map_image_frame_node));
let map_image_frame_node = graphene_std::raster::MapImageNode::new(ValueNode::new(generate_brightness_contrast_mapper_node.eval(())));
let any: DynAnyNode<ImageFrame<Color>, _, _> = graphene_std::any::DynAnyNode::new(ValueNode::new(map_image_frame_node));
Box::pin(any)
}
},
NodeIOTypes::new(concrete!(ImageFrame), concrete!(ImageFrame), vec![value_fn!(f64), value_fn!(f64), value_fn!(bool)]),
NodeIOTypes::new(concrete!(ImageFrame<Color>), concrete!(ImageFrame<Color>), vec![value_fn!(f64), value_fn!(f64), value_fn!(bool)]),
)],
raster_node!(graphene_core::raster::OpacityNode<_>, params: [f64]),
raster_node!(graphene_core::raster::PosterizeNode<_>, params: [f64]),
@ -280,63 +288,63 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
(
NodeIdentifier::new("graphene_std::memo::LetNode<_>"),
|_| {
let node: LetNode<ImageFrame> = graphene_std::memo::LetNode::new();
let node: LetNode<ImageFrame<Color>> = graphene_std::memo::LetNode::new();
let any = graphene_std::any::DynAnyRefNode::new(node);
any.into_type_erased()
},
NodeIOTypes::new(concrete!(Option<ImageFrame>), concrete!(&ImageFrame), vec![]),
NodeIOTypes::new(concrete!(Option<ImageFrame<Color>>), concrete!(&ImageFrame<Color>), vec![]),
),
(
NodeIdentifier::new("graphene_std::memo::EndLetNode<_>"),
|args| {
let input: DowncastBothNode<(), ImageFrame> = DowncastBothNode::new(args[0]);
let input: DowncastBothNode<(), ImageFrame<Color>> = DowncastBothNode::new(args[0]);
let node = graphene_std::memo::EndLetNode::new(input);
let any: DynAnyInRefNode<ImageFrame, _, _> = graphene_std::any::DynAnyInRefNode::new(node);
let any: DynAnyInRefNode<ImageFrame<Color>, _, _> = graphene_std::any::DynAnyInRefNode::new(node);
any.into_type_erased()
},
NodeIOTypes::new(generic!(T), concrete!(ImageFrame), vec![value_fn!(ImageFrame)]),
NodeIOTypes::new(generic!(T), concrete!(ImageFrame<Color>), vec![value_fn!(ImageFrame<Color>)]),
),
(
NodeIdentifier::new("graphene_std::memo::EndLetNode<_>"),
|args| {
let input: DowncastBothNode<(), VectorData> = DowncastBothNode::new(args[0]);
let node = graphene_std::memo::EndLetNode::new(input);
let any: DynAnyInRefNode<ImageFrame, _, _> = graphene_std::any::DynAnyInRefNode::new(node);
let any: DynAnyInRefNode<ImageFrame<Color>, _, _> = graphene_std::any::DynAnyInRefNode::new(node);
any.into_type_erased()
},
NodeIOTypes::new(generic!(T), concrete!(ImageFrame), vec![value_fn!(VectorData)]),
NodeIOTypes::new(generic!(T), concrete!(ImageFrame<Color>), vec![value_fn!(VectorData)]),
),
(
NodeIdentifier::new("graphene_std::memo::RefNode<_, _>"),
|args| {
let map_fn: DowncastBothRefNode<Option<ImageFrame>, ImageFrame> = DowncastBothRefNode::new(args[0]);
let map_fn: DowncastBothRefNode<Option<ImageFrame<Color>>, ImageFrame<Color>> = DowncastBothRefNode::new(args[0]);
let node = graphene_std::memo::RefNode::new(map_fn);
let any = graphene_std::any::DynAnyRefNode::new(node);
any.into_type_erased()
},
NodeIOTypes::new(concrete!(()), concrete!(&ImageFrame), vec![]),
NodeIOTypes::new(concrete!(()), concrete!(&ImageFrame<Color>), vec![]),
),
(
NodeIdentifier::new("graphene_core::structural::MapImageNode"),
|args| {
let map_fn: DowncastBothNode<Color, Color> = DowncastBothNode::new(args[0]);
let node = graphene_std::raster::MapImageNode::new(ValueNode::new(map_fn));
let any: DynAnyNode<Image, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(node));
let any: DynAnyNode<Image<Color>, _, _> = graphene_std::any::DynAnyNode::new(graphene_core::value::ValueNode::new(node));
any.into_type_erased()
},
NodeIOTypes::new(concrete!(Image), concrete!(Image), vec![]),
NodeIOTypes::new(concrete!(Image<Color>), concrete!(Image<Color>), vec![]),
),
(
NodeIdentifier::new("graphene_std::raster::ImaginateNode<_>"),
|args| {
let cached = graphene_std::any::input_node::<Option<std::sync::Arc<Image>>>(args[15]);
let cached = graphene_std::any::input_node::<Option<std::sync::Arc<Image<Color>>>>(args[15]);
let node = graphene_std::raster::ImaginateNode::new(cached);
let any = DynAnyNode::new(ValueNode::new(node));
any.into_type_erased()
},
NodeIOTypes::new(
concrete!(ImageFrame),
concrete!(ImageFrame),
concrete!(ImageFrame<Color>),
concrete!(ImageFrame<Color>),
vec![
value_fn!(f64),
value_fn!(Option<DVec2>),
@ -353,7 +361,7 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
value_fn!(ImaginateMaskStartingFill),
value_fn!(bool),
value_fn!(bool),
value_fn!(Option<std::sync::Arc<Image>>),
value_fn!(Option<std::sync::Arc<Image<Color>>>),
value_fn!(f64),
value_fn!(ImaginateStatus),
],
@ -364,9 +372,9 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
|args| {
let radius = DowncastBothNode::<(), u32>::new(args[0]);
let sigma = DowncastBothNode::<(), f64>::new(args[1]);
let image = DowncastBothRefNode::<Image, Image>::new(args[2]);
let empty_image: ValueNode<Image> = ValueNode::new(Image::empty());
let empty: TypeNode<_, (), Image> = TypeNode::new(empty_image.then(CloneNode::new()));
let image = DowncastBothRefNode::<Image<Color>, Image<Color>>::new(args[2]);
let empty_image: ValueNode<Image<Color>> = ValueNode::new(Image::empty());
let empty: TypeNode<_, (), Image<Color>> = TypeNode::new(empty_image.then(CloneNode::new()));
use graphene_core::Node;
let radius = ClonedNode::new(radius.eval(()));
let sigma = ClonedNode::new(sigma.eval(()));
@ -391,41 +399,41 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
let dimensions: TypeNode<_, (), (u32, u32)> = TypeNode::new(dimensions);
let new_image = dimensions.then(new_image);
let new_image = ForgetNode::new().then(new_image);
let node: DynAnyNode<&Image, _, _> = DynAnyNode::new(ValueNode::new(new_image));
let node: DynAnyNode<&Image<Color>, _, _> = DynAnyNode::new(ValueNode::new(new_image));
node.into_type_erased()
},
NodeIOTypes::new(concrete!(Image), concrete!(Image), vec![value_fn!(u32), value_fn!(f64)]),
NodeIOTypes::new(concrete!(Image<Color>), concrete!(Image<Color>), vec![value_fn!(u32), value_fn!(f64)]),
),
//register_node!(graphene_std::memo::CacheNode<_>, input: Image, params: []),
//register_node!(graphene_std::memo::CacheNode<_>, input: Image<Color>, params: []),
(
NodeIdentifier::new("graphene_std::memo::CacheNode"),
|args| {
let input: DowncastBothNode<(), Image> = DowncastBothNode::new(args[0]);
let node: CacheNode<Image, _> = graphene_std::memo::CacheNode::new(input);
let input: DowncastBothNode<(), Image<Color>> = DowncastBothNode::new(args[0]);
let node: CacheNode<Image<Color>, _> = graphene_std::memo::CacheNode::new(input);
let any = DynAnyRefNode::new(node);
any.into_type_erased()
},
NodeIOTypes::new(concrete!(()), concrete!(&Image), vec![value_fn!(Image)]),
NodeIOTypes::new(concrete!(()), concrete!(&Image<Color>), vec![value_fn!(Image<Color>)]),
),
(
NodeIdentifier::new("graphene_std::memo::CacheNode"),
|args| {
let input: DowncastBothNode<(), ImageFrame> = DowncastBothNode::new(args[0]);
let node: CacheNode<ImageFrame, _> = graphene_std::memo::CacheNode::new(input);
let input: DowncastBothNode<(), ImageFrame<Color>> = DowncastBothNode::new(args[0]);
let node: CacheNode<ImageFrame<Color>, _> = graphene_std::memo::CacheNode::new(input);
let any = DynAnyRefNode::new(node);
any.into_type_erased()
},
NodeIOTypes::new(concrete!(()), concrete!(&ImageFrame), vec![value_fn!(ImageFrame)]),
NodeIOTypes::new(concrete!(()), concrete!(&ImageFrame<Color>), vec![value_fn!(ImageFrame<Color>)]),
),
(
NodeIdentifier::new("graphene_std::memo::CacheNode"),
|args| {
let input: DowncastBothNode<ImageFrame, ImageFrame> = DowncastBothNode::new(args[0]);
let node: CacheNode<ImageFrame, _> = graphene_std::memo::CacheNode::new(input);
let input: DowncastBothNode<ImageFrame<Color>, ImageFrame<Color>> = DowncastBothNode::new(args[0]);
let node: CacheNode<ImageFrame<Color>, _> = graphene_std::memo::CacheNode::new(input);
let any = DynAnyRefNode::new(node);
any.into_type_erased()
},
NodeIOTypes::new(concrete!(ImageFrame), concrete!(&ImageFrame), vec![fn_type!(ImageFrame, ImageFrame)]),
NodeIOTypes::new(concrete!(ImageFrame<Color>), concrete!(&ImageFrame<Color>), vec![fn_type!(ImageFrame<Color>, ImageFrame<Color>)]),
),
(
NodeIdentifier::new("graphene_std::memo::CacheNode"),
@ -448,19 +456,19 @@ fn node_registry() -> HashMap<NodeIdentifier, HashMap<NodeIOTypes, NodeConstruct
NodeIOTypes::new(concrete!(()), concrete!(&Vec<DVec2>), vec![value_fn!(Vec<DVec2>)]),
),
],
register_node!(graphene_core::structural::ConsNode<_, _>, input: Image, params: [&str]),
register_node!(graphene_std::raster::ImageFrameNode<_>, input: Image, params: [DAffine2]),
register_node!(graphene_core::structural::ConsNode<_, _>, input: Image<Color>, params: [&str]),
register_node!(graphene_std::raster::ImageFrameNode<_, _>, input: Image<Color>, params: [DAffine2]),
#[cfg(feature = "quantization")]
register_node!(graphene_std::quantization::GenerateQuantizationNode<_, _>, input: ImageFrame, params: [u32, u32]),
register_node!(graphene_std::quantization::GenerateQuantizationNode<_, _>, input: ImageFrame<Color>, params: [u32, u32]),
raster_node!(graphene_core::quantization::QuantizeNode<_>, params: [QuantizationChannels]),
raster_node!(graphene_core::quantization::DeQuantizeNode<_>, params: [QuantizationChannels]),
register_node!(graphene_core::ops::CloneNode<_>, input: &QuantizationChannels, params: []),
register_node!(graphene_core::transform::TransformNode<_, _, _, _, _>, input: VectorData, params: [DVec2, f64, DVec2, DVec2, DVec2]),
register_node!(graphene_core::transform::TransformNode<_, _, _, _, _>, input: ImageFrame, params: [DVec2, f64, DVec2, DVec2, DVec2]),
register_node!(graphene_core::transform::TransformNode<_, _, _, _, _>, input: ImageFrame<Color>, params: [DVec2, f64, DVec2, DVec2, DVec2]),
register_node!(graphene_core::transform::SetTransformNode<_>, input: VectorData, params: [VectorData]),
register_node!(graphene_core::transform::SetTransformNode<_>, input: ImageFrame, params: [ImageFrame]),
register_node!(graphene_core::transform::SetTransformNode<_>, input: ImageFrame<Color>, params: [ImageFrame<Color>]),
register_node!(graphene_core::transform::SetTransformNode<_>, input: VectorData, params: [DAffine2]),
register_node!(graphene_core::transform::SetTransformNode<_>, input: ImageFrame, params: [DAffine2]),
register_node!(graphene_core::transform::SetTransformNode<_>, input: ImageFrame<Color>, params: [DAffine2]),
register_node!(graphene_core::vector::SetFillNode<_, _, _, _, _, _, _>, input: VectorData, params: [graphene_core::vector::style::FillType, Option<graphene_core::Color>, graphene_core::vector::style::GradientType, DVec2, DVec2, DAffine2, Vec<(f64, Option<graphene_core::Color>)>]),
register_node!(graphene_core::vector::SetStrokeNode<_, _, _, _, _, _, _>, input: VectorData, params: [Option<graphene_core::Color>, f64, Vec<f32>, f64, graphene_core::vector::style::LineCap, graphene_core::vector::style::LineJoin, f64]),
register_node!(graphene_core::vector::generator_nodes::UnitCircleGenerator, input: (), params: []),