Update GPU execution and quantization to new node system (#1070)

* Update GPU and quantization to new node system

Squashed commit of the following:

commit 3b69bdafed79f0bb1279609537a8eeead3f06830
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Sun Mar 5 11:37:17 2023 +0100

    Disable dev tools by default

commit dbbbedd68e48d1162442574ad8877c9922d40e4a
Merge: b1018eb5 a8f6e11e
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Sun Mar 5 10:45:00 2023 +0100

    Merge branch 'vite' into tauri-restructure-lite

commit b1018eb5ee56c2d23f9d5a4f034608ec684bd746
Merge: 3195833e 0512cb24
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Fri Mar 3 17:06:21 2023 +0100

    Merge branch 'master' into tauri-restructure-lite

commit 3195833e4088a4ed7984955c72617b27b7e39bfc
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Fri Mar 3 17:06:02 2023 +0100

    Bump number of samples

commit 3e57e1e3280759cf4f75726635e31d2b8e9387f9
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Fri Mar 3 16:55:52 2023 +0100

    Move part of quantization code to gcore

commit 10c15b0bc6ffb51e2bf2d94cd4eb0e24d761fb6f
Merge: 2b3db45a 8fe8896c
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Fri Mar 3 14:28:56 2023 +0100

    Merge remote-tracking branch 'origin/master' into tauri-restructure-lite

commit 2b3db45aee44a20660f0b1204666bb81e5a7e4b6
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Fri Mar 3 14:17:11 2023 +0100

    Fix types in node registry

commit 9122f35c0ba9a86255709680d744a48d3c7dcac4
Merge: 26eefc43 2cf4ee0f
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Fri Mar 3 01:04:55 2023 +0100

    Merge remote-tracking branch 'origin/master' into tauri-restructure-lite

commit 26eefc437eaad873f8d38fdb1fae0a1e3ec189e4
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Mar 2 23:05:53 2023 +0100

    Add Quantize node to document_node_types

commit 3f7606a91329200b2c025010d4a0cffee840a11c
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Mar 2 17:47:51 2023 +0100

    Add quantization nodes to node registry

commit 22d8e477ef79eef5b57b1dc9805e41bbf81cae43
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Mar 2 17:13:28 2023 +0100

    Introduce scopes (#1053)

    * Implement let binding

    * Add lambda inputs

    * Fix tests

    * Fix proto network formatting

    * Generate a template Scoped network by default

    * Add comment to explain the lambda parameter

    * Move binding wrapping out of the template

    * Fix errors cause by image frames

commit 9e0c29d92a164d4a4063e93480e1e289ef5243fe
Author: Alexandru Ică <alexandru@seyhanlee.com>
Date:   Thu Mar 2 15:55:10 2023 +0200

    Make use of ImageFrame in the node system more extensively (#1055) (#1062)

    Make the node system use ImageFrame more extensively (#1055)

commit 5912ef9a1a807917eeb90c1f4835bd8a5de9c821
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Wed Mar 1 16:15:21 2023 +0100

    Split quantization into multiple nodes

commit 285d7b76c176b3e2679ea24eecb38ef867a79f3b
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Mon Feb 27 12:35:57 2023 +0100

    Fix gpu support

commit e0b6327eebba8caf7545c4fedc6670abc4c3652e
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Feb 16 22:08:53 2023 +0100

    Don't watch frontend files when using tauri

commit 58ae146f6da935cfd37afbd25e1c331b615252da
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Feb 16 21:48:54 2023 +0100

    Migrate vue code base to vite

commit f996390cc312618a60f98ccb9cd515f1bae5006d
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Feb 16 19:34:33 2023 +0100

    Start migrating vue to use vite

commit 29d752f47cfd1c74ee51fac6f3d75557a378471c
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Feb 16 19:00:53 2023 +0100

    Kill cargo watch process automatically

commit 4d1c76b07acadbf609dbab7d57d9a7769b81d4b5
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Feb 16 17:37:27 2023 +0100

    Start playing around with vite infrastructure

commit 8494f5e9227aa433fd5ca75b268a6a96b2706b36
Author: Locria Cyber <74560659+locriacyber@users.noreply.github.com>
Date:   Thu Jan 19 18:40:46 2023 +0000

    Fix import style and eslint rules

commit 92490f7774a7351bb40091bcec78f79c28704768
Author: Locria Cyber <74560659+locriacyber@users.noreply.github.com>
Date:   Thu Jan 19 18:25:09 2023 +0000

    Fix icons

commit dc67821abad87f8ff780b12ae96668af2f7bb355
Author: Locria Cyber <74560659+locriacyber@users.noreply.github.com>
Date:   Thu Jan 19 18:20:48 2023 +0000

    Add license generator with rollup

commit 441e339d31b76dac4f91321d39a39900b5a79bc1
Author: Locria Cyber <74560659+locriacyber@users.noreply.github.com>
Date:   Thu Jan 19 18:14:22 2023 +0000

    Use eslint --fix to fix TS-in-svelte type imports. Now it compiles.

commit 2e847d339e7dcd51ed4c4677ed337c1e20636724
Author: Locria Cyber <74560659+locriacyber@users.noreply.github.com>
Date:   Thu Jan 19 17:31:49 2023 +0000

    Remove webpack and plugins

commit 3adab1b7f40ff17b91163e7ca47a403ef3c02fbc
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Thu Mar 2 16:10:19 2023 +0100

    Fix errors cause by image frames

commit 4e5f838995e213b4696225a473b9c56c0084e7a8
Author: Alexandru Ică <alexandru@seyhanlee.com>
Date:   Thu Mar 2 15:55:10 2023 +0200

    Make use of ImageFrame in the node system more extensively (#1055) (#1062)

    Make the node system use ImageFrame more extensively (#1055)

commit 1d4b0e29c693a53c068f1a30f0e857a9c1a59587
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Wed Mar 1 15:13:51 2023 +0100

    Update node graph guide readme with new syntax (#1061)

commit 6735d8c61f5709e22d2b22abd037bab417e868d6
Author: Rob Nadal <Robnadal44@gmail.com>
Date:   Tue Feb 28 18:59:06 2023 -0500

    Bezier-rs: Add function to smoothly join bezier curves (#1037)

    * Added bezier join

    * Stylistic changes per review

commit cd1d7aa7fbcce39fbbf7762d131ee16ad9cb46dd
Author: Dennis Kobert <dennis@kobert.dev>
Date:   Wed Feb 22 23:42:32 2023 +0100

    Implement let binding

    Add lambda inputs

    Fix tests

    Fix proto network formatting

    Generate a template Scoped network by default

    Add comment to explain the lambda parameter

    Move binding wrapping out of the template

* Update package-lock.json

* Regenerate package-lock.json and fix lint errors

* Readd git keep dir

* Revert change to panic.ts

* Fix clippy warnings

* Apply code review

* Clean up node_registry

* Fix test / spriv -> spirv typos
This commit is contained in:
Dennis Kobert 2023-03-05 13:22:14 +01:00 committed by Keavon Chambers
parent 4ea3802df1
commit b55e233fff
38 changed files with 2282 additions and 1924 deletions

View file

@ -1,121 +1,61 @@
use graph_craft::document::*;
use graph_craft::proto::*;
use graphene_core::raster::Image;
use graphene_core::raster::*;
use graphene_core::value::ValueNode;
use graphene_core::Node;
use graphene_core::*;
use bytemuck::Pod;
use core::marker::PhantomData;
use dyn_any::StaticTypeSized;
pub struct MapGpuNode<NN: Node<()>, I: IntoIterator<Item = S>, S: StaticTypeSized + Sync + Send + Pod, O: StaticTypeSized + Sync + Send + Pod>(pub NN, PhantomData<(S, I, O)>);
impl<'n, I: IntoIterator<Item = S>, NN: Node<(), Output = &'n NodeNetwork> + Copy, S: StaticTypeSized + Sync + Send + Pod, O: StaticTypeSized + Sync + Send + Pod> Node<I>
for &MapGpuNode<NN, I, S, O>
{
type Output = Vec<O>;
fn eval(self, input: I) -> Self::Output {
let network = self.0.eval(());
map_gpu_impl(network, input)
}
pub struct MapGpuNode<O, Network> {
network: Network,
_o: PhantomData<O>,
}
fn map_gpu_impl<I: IntoIterator<Item = S>, S: StaticTypeSized + Sync + Send + Pod, O: StaticTypeSized + Sync + Send + Pod>(network: &NodeNetwork, input: I) -> Vec<O> {
#[node_macro::node_fn(MapGpuNode<_O>)]
fn map_gpu<I: IntoIterator<Item = S>, S: StaticTypeSized + Sync + Send + Pod, _O: StaticTypeSized + Sync + Send + Pod>(input: I, network: &'any_input NodeNetwork) -> Vec<_O> {
use graph_craft::executor::Executor;
let bytes = compilation_client::compile_sync::<S, O>(network.clone()).unwrap();
let bytes = compilation_client::compile_sync::<S, _O>(network.clone()).unwrap();
let words = unsafe { std::slice::from_raw_parts(bytes.as_ptr() as *const u32, bytes.len() / 4) };
use wgpu_executor::{Context, GpuExecutor};
let executor: GpuExecutor<S, O> = GpuExecutor::new(Context::new_sync().unwrap(), words.into(), "gpu::eval".into()).unwrap();
let executor: GpuExecutor<S, _O> = GpuExecutor::new(Context::new_sync().unwrap(), words.into(), "gpu::eval".into()).unwrap();
let data: Vec<_> = input.into_iter().collect();
let result = executor.execute(Box::new(data)).unwrap();
let result = dyn_any::downcast::<Vec<O>>(result).unwrap();
let result = dyn_any::downcast::<Vec<_O>>(result).unwrap();
*result
}
impl<'n, I: IntoIterator<Item = S>, NN: Node<(), Output = &'n NodeNetwork> + Copy, S: StaticTypeSized + Sync + Send + Pod, O: StaticTypeSized + Sync + Send + Pod> Node<I> for MapGpuNode<NN, I, S, O> {
type Output = Vec<O>;
fn eval(self, input: I) -> Self::Output {
let network = self.0.eval(());
map_gpu_impl(network, input)
}
pub struct MapGpuSingleImageNode<N> {
node: N,
}
impl<I: IntoIterator<Item = S>, NN: Node<()>, S: StaticTypeSized + Sync + Pod + Send, O: StaticTypeSized + Sync + Send + Pod> MapGpuNode<NN, I, S, O> {
pub const fn new(network: NN) -> Self {
MapGpuNode(network, PhantomData)
}
}
pub struct MapGpuSingleImageNode<NN: Node<(), Output = String>>(pub NN);
impl<NN: Node<(), Output = String> + Copy> Node<Image> for MapGpuSingleImageNode<NN> {
type Output = Image;
fn eval(self, input: Image) -> Self::Output {
let node = self.0.eval(());
use graph_craft::document::*;
let identifier = NodeIdentifier {
name: std::borrow::Cow::Owned(node),
types: std::borrow::Cow::Borrowed(&[]),
};
let network = NodeNetwork {
inputs: vec![0],
disabled: vec![],
previous_output: None,
output: 0,
nodes: [(
0,
DocumentNode {
name: "Image filter Node".into(),
inputs: vec![NodeInput::Network],
implementation: DocumentNodeImplementation::Unresolved(identifier),
metadata: DocumentNodeMetadata::default(),
},
)]
.into_iter()
.collect(),
};
let value_network = ValueNode::new(network);
let map_node = MapGpuNode::new(&value_network);
let data = map_node.eval(input.data.clone());
Image { data, ..input }
}
}
impl<NN: Node<(), Output = String> + Copy> Node<Image> for &MapGpuSingleImageNode<NN> {
type Output = Image;
fn eval(self, input: Image) -> Self::Output {
let node = self.0.eval(());
use graph_craft::document::*;
let identifier = NodeIdentifier {
name: std::borrow::Cow::Owned(node),
types: std::borrow::Cow::Borrowed(&[]),
};
let network = NodeNetwork {
inputs: vec![0],
output: 0,
disabled: vec![],
previous_output: None,
nodes: [(
0,
DocumentNode {
name: "Image filter Node".into(),
inputs: vec![NodeInput::Network],
implementation: DocumentNodeImplementation::Unresolved(identifier),
metadata: DocumentNodeMetadata::default(),
},
)]
.into_iter()
.collect(),
};
let value_network = ValueNode::new(network);
let map_node = MapGpuNode::new(&value_network);
let data = map_node.eval(input.data.clone());
Image { data, ..input }
}
#[node_macro::node_fn(MapGpuSingleImageNode)]
fn map_gpu_single_image(input: Image, node: String) -> Image {
use graph_craft::document::*;
use graph_craft::NodeIdentifier;
let identifier = NodeIdentifier { name: std::borrow::Cow::Owned(node) };
let network = NodeNetwork {
inputs: vec![0],
disabled: vec![],
previous_outputs: None,
outputs: vec![NodeOutput::new(0, 0)],
nodes: [(
0,
DocumentNode {
name: "Image filter Node".into(),
inputs: vec![NodeInput::Network(concrete!(Color))],
implementation: DocumentNodeImplementation::Unresolved(identifier),
metadata: DocumentNodeMetadata::default(),
},
)]
.into_iter()
.collect(),
};
let value_network = ValueNode::new(network);
let map_node = MapGpuNode::new(value_network);
let data = map_node.eval(input.data.clone());
Image { data, ..input }
}

View file

@ -1,54 +1,61 @@
use graphene_core::raster::{Color, Image};
use dyn_any::{DynAny, StaticType};
use graphene_core::quantization::*;
use graphene_core::raster::{Color, ImageFrame};
use graphene_core::Node;
/// The `GenerateQuantizationNode` encodes the brightness of each channel of the image as an integer number
/// sepified by the samples parameter. This node is used to asses the loss of visual information when
/// quantizing the image using different fit functions.
pub struct GenerateQuantizationNode<N: Node<(), Output = u32>, M: Node<(), Output = u32>> {
pub struct GenerateQuantizationNode<N, M> {
samples: N,
function: M,
}
#[node_macro::node_fn(GenerateQuantizationNode)]
fn generate_quantization_fn(image: Image, samples: u32, function: u32) -> Image {
// Scale the input image, this can be removed by adding an extra parameter to the fit function.
let max_energy = 16380.;
let data: Vec<f64> = image.data.iter().flat_map(|x| vec![x.r() as f64, x.g() as f64, x.b() as f64]).collect();
let data: Vec<f64> = data.iter().map(|x| x * max_energy).collect();
fn generate_quantization_fn(image_frame: ImageFrame, samples: u32, function: u32) -> [Quantization; 4] {
let image = image_frame.image;
let len = image.data.len().min(10000);
let mut channels: Vec<_> = (0..4).map(|_| Vec::with_capacity(image.data.len())).collect();
image
.data
.iter()
.enumerate()
.filter(|(i, _)| i % (image.data.len() / len) == 0)
.map(|(_, x)| vec![x.r() as f64, x.g() as f64, x.b() as f64, x.a() as f64])
.for_each(|x| x.into_iter().enumerate().for_each(|(i, value)| channels[i].push(value)));
log::info!("Quantizing {} samples", channels[0].len());
log::info!("In {} channels", channels.len());
let quantization: Vec<Quantization> = channels.into_iter().map(|x| generate_quantization_per_channel(x, samples)).collect();
core::array::from_fn(|i| quantization[i].clone())
}
fn generate_quantization_per_channel(data: Vec<f64>, samples: u32) -> Quantization {
let mut dist = autoquant::integrate_distribution(data);
autoquant::drop_duplicates(&mut dist);
let dist = autoquant::normalize_distribution(dist.as_slice());
let max = dist.last().unwrap().0;
let linear = Box::new(autoquant::SimpleFitFn {
/*let linear = Box::new(autoquant::SimpleFitFn {
function: move |x| x / max,
inverse: move |x| x * max,
name: "identity",
});
let best = match function {
0 => linear as Box<dyn autoquant::FitFn>,
1 => linear as Box<dyn autoquant::FitFn>,
2 => Box::new(autoquant::models::OptimizedLog::new(dist, 20)) as Box<dyn autoquant::FitFn>,
_ => linear as Box<dyn autoquant::FitFn>,
});*/
let linear = Quantization {
fn_index: 0,
a: max as f32,
b: 0.,
c: 0.,
d: 0.,
};
let roundtrip = |sample: f32| -> f32 {
let encoded = autoquant::encode(sample as f64 * max_energy, best.as_ref(), samples);
let decoded = autoquant::decode(encoded, best.as_ref(), samples) / max_energy;
log::trace!("{} enc: {} dec: {}", sample, encoded, decoded);
decoded as f32
let log_fit = autoquant::models::OptimizedLog::new(dist, samples as u64);
let parameters = log_fit.parameters();
let log_fit = Quantization {
fn_index: 1,
a: parameters[0] as f32,
b: parameters[1] as f32,
c: parameters[2] as f32,
d: parameters[3] as f32,
};
let new_data = image
.data
.iter()
.map(|c| {
let r = roundtrip(c.r());
let g = roundtrip(c.g());
let b = roundtrip(c.b());
let a = c.a();
Color::from_rgbaf32_unchecked(r, g, b, a)
})
.collect();
Image { data: new_data, ..image }
log_fit
}