Use safe casting for creating key variants

This commit is contained in:
Dennis Kobert 2023-05-31 20:08:30 +02:00 committed by Keavon Chambers
parent d4d6a63d84
commit 503d4ebbe4
8 changed files with 35 additions and 38 deletions

26
Cargo.lock generated
View file

@ -1772,6 +1772,7 @@ dependencies = [
"interpreted-executor",
"kurbo",
"log",
"num_enum 0.6.1",
"once_cell",
"remain",
"serde",
@ -2684,7 +2685,7 @@ dependencies = [
"bitflags 1.3.2",
"jni-sys",
"ndk-sys",
"num_enum",
"num_enum 0.5.11",
"thiserror",
]
@ -2851,7 +2852,16 @@ version = "0.5.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9"
dependencies = [
"num_enum_derive",
"num_enum_derive 0.5.11",
]
[[package]]
name = "num_enum"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1"
dependencies = [
"num_enum_derive 0.6.1",
]
[[package]]
@ -2866,6 +2876,18 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "num_enum_derive"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 2.0.15",
]
[[package]]
name = "num_threads"
version = "0.1.6"

View file

@ -12,10 +12,7 @@ license = "Apache-2.0"
[features]
gpu = ["interpreted-executor/gpu", "graphene-std/gpu", "graphene-core/gpu"]
quantization = [
"graphene-std/quantization",
"interpreted-executor/quantization",
]
quantization = ["graphene-std/quantization", "interpreted-executor/quantization"]
[dependencies]
log = "0.4"
@ -44,6 +41,7 @@ interpreted-executor = { path = "../node-graph/interpreted-executor" }
dyn-any = { path = "../libraries/dyn-any" }
graphene-core = { path = "../node-graph/gcore" }
graphene-std = { path = "../node-graph/gstd" }
num_enum = "0.6.1"
[dependencies.document-legacy]
path = "../document-legacy"

View file

@ -37,9 +37,9 @@ impl InputMapperMessageHandler {
.filter_map(|(i, m)| {
let ma = m.0.iter().find_map(|m| actions.find_map(|a| (a == m.action.to_discriminant()).then(|| m.action.to_discriminant())));
ma.map(|a| unsafe { (std::mem::transmute_copy::<usize, Key>(&i), a) })
ma.map(|a| ((i as u8).try_into().unwrap(), a))
})
.for_each(|(k, a)| {
.for_each(|(k, a): (Key, _)| {
let _ = write!(output, "{}: {}, ", k.to_discriminant().local_name(), a.local_name().split('.').last().unwrap());
});
output.replace("Key", "")
@ -72,7 +72,7 @@ impl InputMapperMessageHandler {
"Attempting to convert a Key with enum index {}, which is larger than the number of Key enums",
i
);
unsafe { std::mem::transmute_copy::<usize, Key>(&i) }
(i as u8).try_into().unwrap()
})
.collect::<Vec<_>>();

View file

@ -50,7 +50,8 @@ bitflags! {
// (although we ignore the shift key, so the user doesn't have to press `Ctrl Shift +` on a US keyboard), even if the keyboard layout
// is for a different locale where the `+` key is somewhere entirely different, shifted or not. This would then also work for numpad `+`.
#[impl_message(Message, InputMapperMessage, KeyDown)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize, Serialize, specta::Type)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize, Serialize, specta::Type, num_enum::TryFromPrimitive)]
#[repr(u8)]
pub enum Key {
// Writing system keys
Digit0,

View file

@ -41,28 +41,6 @@ pub enum Message {
Workspace(WorkspaceMessage),
}
impl Message {
/// Returns the byte representation of the message.
///
/// # Safety
/// This function reads from uninitialized memory!!!
/// Only use if you know what you are doing.
unsafe fn as_slice(&self) -> &[u8] {
core::slice::from_raw_parts(self as *const Message as *const u8, std::mem::size_of::<Message>())
}
/// Returns a pseudo hash that should uniquely identify the message.
/// This is needed because `Hash` is not implemented for `f64`s
///
/// # Safety
/// This function reads from uninitialized memory but the generated value should be fine.
pub fn pseudo_hash(&self) -> u64 {
let mut s = DefaultHasher::new();
unsafe { self.as_slice() }.hash(&mut s);
s.finish()
}
}
/// Provides an impl of `specta::Type` for `MessageDiscriminant`, the struct created by `impl_message`.
/// Specta isn't integrated with `impl_message`, so a remote impl must be provided using this
/// struct.

View file

@ -1222,9 +1222,7 @@ fn edit_layer_deepest_manipulation(intersect: &Layer, responses: &mut VecDeque<M
fn recursive_search(document: &DocumentMessageHandler, layer_path: &Vec<u64>, incoming_layer_path_vector: &Vec<u64>) -> bool {
let layer_paths = document.document_legacy.folder_children_paths(layer_path);
for path in layer_paths {
if path == *incoming_layer_path_vector {
return true;
} else if document.document_legacy.is_folder(path.clone()) && recursive_search(document, &path, incoming_layer_path_vector) {
if path == *incoming_layer_path_vector || document.document_legacy.is_folder(path.clone()) && recursive_search(document, &path, incoming_layer_path_vector) {
return true;
}
}

View file

@ -172,6 +172,7 @@ mod test {
}
#[test]
#[allow(clippy::unit_cmp)]
fn test_apply() {
let mut array = [1, 2, 3];
let slice = &mut array;

View file

@ -86,12 +86,11 @@ fn args(node: &syn::PathSegment) -> Vec<Type> {
fn node_impl_proxy(attr: TokenStream, item: TokenStream) -> TokenStream {
let fn_item = item.clone();
let function = parse_macro_input!(fn_item as ItemFn);
let sync_input = if function.sig.asyncness.is_some() {
if function.sig.asyncness.is_some() {
node_impl_impl(attr, item, Asyncness::AllAsync)
} else {
node_impl_impl(attr, item, Asyncness::Sync)
};
sync_input
}
}
enum Asyncness {
Sync,