Make the dynamic node graph execution asynchronous (#1218)

* Make node graph execution async

Make node macro generate async node implementations

Start propagating async through the node system

Async checkpoint

Make Any<'i> Send + Sync

Determine node io type using panic node

Fix types for raster_node macro

Finish porting node registry?

Fix lifetime errors

Remove Send + Sync requirements and start making node construction async

Async MVP

Fix tests

Clippy fix

* Fix nodes

* Simplify lifetims for node macro + make node macro more modular

* Reenable more nodes

* Fix pasting images

* Remove http test from brush node

* Fix output type for cache node

* Fix types for let scope

* Fix formatting
This commit is contained in:
Dennis Kobert 2023-05-27 11:48:57 +02:00 committed by Keavon Chambers
parent 5c7211cb30
commit 4bd9fbd073
40 changed files with 834 additions and 471 deletions

View file

@ -2,6 +2,7 @@ use super::context::Context;
use graph_craft::executor::{Any, Executor};
use graph_craft::proto::LocalFuture;
use graphene_core::gpu::PushConstants;
use bytemuck::Pod;
@ -38,7 +39,7 @@ impl<I: StaticTypeSized, O> GpuExecutor<I, O> {
}
impl<I: StaticTypeSized + Sync + Pod + Send, O: StaticTypeSized + Send + Sync + Pod> Executor for GpuExecutor<I, O> {
fn execute<'i, 's: 'i>(&'s self, input: Any<'i>) -> Result<Any<'i>, Box<dyn std::error::Error>> {
fn execute<'i>(&'i self, input: Any<'i>) -> LocalFuture<Result<Any<'i>, Box<dyn std::error::Error>>> {
let input = dyn_any::downcast::<Vec<I>>(input).expect("Wrong input type");
let context = &self.context;
let result: Vec<O> = execute_shader(
@ -49,10 +50,11 @@ impl<I: StaticTypeSized + Sync + Pod + Send, O: StaticTypeSized + Send + Sync +
&context.command_buffer_allocator,
*input,
);
Ok(Box::new(result))
Box::pin(async move { Ok(Box::new(result) as Any) })
}
}
// TODO: make async
fn execute_shader<I: Pod + Send + Sync, O: Pod + Send + Sync>(
device: std::sync::Arc<Device>,
queue: std::sync::Arc<vulkano::device::Queue>,