Fix some fatal failures related to rendered frame memory size limits

This commit is contained in:
Keavon Chambers 2023-04-02 18:31:41 -07:00
parent 1b50878f3f
commit c16ee88b5d
7 changed files with 27 additions and 15 deletions

View file

@ -341,11 +341,12 @@ mod image {
impl Hash for Image {
fn hash<H: Hasher>(&self, state: &mut H) {
const HASH_SAMPLES: usize = 1000;
const HASH_SAMPLES: u64 = 1000;
let data_length = self.data.len() as u64;
self.width.hash(state);
self.height.hash(state);
for i in 0..HASH_SAMPLES.min(self.data.len()) {
self.data[i * self.data.len() / HASH_SAMPLES].hash(state);
for i in 0..HASH_SAMPLES.min(data_length) {
self.data[(i * data_length / HASH_SAMPLES) as usize].hash(state);
}
}
}

View file

@ -1,9 +1,9 @@
use graphene_core::Node;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use xxhash_rust::xxh3::Xxh3;
use graphene_core::Node;
/// Caches the output of a given Node and acts as a proxy
#[derive(Default)]
pub struct CacheNode<T, CachedNode> {

View file

@ -42,7 +42,10 @@ impl DynamicExecutor {
self.output = proto_network.output;
self.typing_context.update(&proto_network)?;
trace!("setting output to {}", self.output);
self.tree.update(proto_network, &self.typing_context)?;
let orphans = self.tree.update(proto_network, &self.typing_context)?;
for node_id in orphans {
self.tree.free_node(node_id)
}
Ok(())
}
}
@ -104,8 +107,8 @@ impl BorrowTree {
for (id, node) in proto_network.nodes {
if !self.nodes.contains_key(&id) {
self.push_node(id, node, typing_context)?;
old_nodes.remove(&id);
}
old_nodes.remove(&id);
}
Ok(old_nodes.into_iter().collect())
}