Introduce scopes (#1053)

* Implement let binding

* Add lambda inputs

* Fix tests

* Fix proto network formatting

* Generate a template Scoped network by default

* Add comment to explain the lambda parameter

* Move binding wrapping out of the template

* Fix errors cause by image frames
This commit is contained in:
Dennis Kobert 2023-03-02 17:13:28 +01:00 committed by Keavon Chambers
parent 0b813805d2
commit 7254c008f9
12 changed files with 366 additions and 118 deletions

View file

@ -40,6 +40,28 @@ impl<_I, _O, S0> DynAnyRefNode<_I, _O, S0> {
Self { node, _i: core::marker::PhantomData }
}
}
pub struct DynAnyInRefNode<I, O, Node> {
node: Node,
_i: PhantomData<(I, O)>,
}
impl<'input, _I: 'input + StaticType, _O: 'input + StaticType, N: 'input> Node<'input, Any<'input>> for DynAnyInRefNode<_I, _O, N>
where
N: for<'any_input> Node<'any_input, &'any_input _I, Output = _O>,
{
type Output = Any<'input>;
fn eval<'node: 'input>(&'node self, input: Any<'input>) -> Self::Output {
{
let node_name = core::any::type_name::<N>();
let input: Box<&_I> = dyn_any::downcast(input).unwrap_or_else(|e| panic!("DynAnyNode Input, {e} in:\n{node_name}"));
Box::new(self.node.eval(*input))
}
}
}
impl<_I, _O, S0> DynAnyInRefNode<_I, _O, S0> {
pub const fn new(node: S0) -> Self {
Self { node, _i: core::marker::PhantomData }
}
}
pub trait IntoTypeErasedNode<'n> {
fn into_type_erased(self) -> TypeErasedPinned<'n>;

View file

@ -1,3 +1,5 @@
use std::marker::PhantomData;
use graphene_core::Node;
use once_cell::sync::OnceCell;
@ -21,3 +23,71 @@ impl<T> CacheNode<T> {
CacheNode { cache: OnceCell::new() }
}
}
/// Caches the output of a given Node and acts as a proxy
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct LetNode<T> {
cache: OnceCell<T>,
}
impl<'i, T: 'i> Node<'i, Option<T>> for LetNode<T> {
type Output = &'i T;
fn eval<'s: 'i>(&'s self, input: Option<T>) -> Self::Output {
match input {
Some(input) => {
self.cache.set(input).unwrap_or_else(|_| error!("Let node was set twice but is not mutable"));
self.cache.get().unwrap()
}
None => self.cache.get().expect("Let node was not initialized"),
}
}
}
impl<T> LetNode<T> {
pub const fn new() -> LetNode<T> {
LetNode { cache: OnceCell::new() }
}
}
/// Caches the output of a given Node and acts as a proxy
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct EndLetNode<Input> {
input: Input,
}
impl<'i, T: 'i, Input> Node<'i, &'i T> for EndLetNode<Input>
where
Input: Node<'i, ()>,
{
type Output = <Input>::Output;
fn eval<'s: 'i>(&'s self, _: &'i T) -> Self::Output {
self.input.eval(())
}
}
impl<Input> EndLetNode<Input> {
pub const fn new(input: Input) -> EndLetNode<Input> {
EndLetNode { input }
}
}
pub use graphene_core::ops::SomeNode as InitNode;
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct RefNode<T, Let> {
let_node: Let,
_t: PhantomData<T>,
}
impl<'i, T: 'i, Let> Node<'i, ()> for RefNode<T, Let>
where
Let: for<'a> Node<'a, Option<T>, Output = &'a T>,
{
type Output = &'i T;
fn eval<'s: 'i>(&'s self, _: ()) -> Self::Output {
self.let_node.eval(None)
}
}
impl<Let, T> RefNode<T, Let> {
pub const fn new(let_node: Let) -> RefNode<T, Let> {
RefNode { let_node, _t: PhantomData }
}
}

View file

@ -131,13 +131,14 @@ pub struct BlendImageNode<Second, MapFn> {
map_fn: MapFn,
}
// TODO: Implement proper blending
#[node_macro::node_fn(BlendImageNode)]
fn blend_image<MapFn>(image: Image, second: Image, map_fn: &'any_input MapFn) -> Image
fn blend_image<MapFn>(image: ImageFrame, second: ImageFrame, map_fn: &'any_input MapFn) -> ImageFrame
where
MapFn: for<'any_input> Node<'any_input, (Color, Color), Output = Color> + 'input,
{
let mut image = image;
for (pixel, sec_pixel) in &mut image.data.iter_mut().zip(second.data.iter()) {
for (pixel, sec_pixel) in &mut image.image.data.iter_mut().zip(second.image.data.iter()) {
*pixel = map_fn.eval((*pixel, *sec_pixel));
}
image