diff --git a/Cargo.lock b/Cargo.lock index dad1b44b79..1bd2c62c4e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1430,6 +1430,7 @@ dependencies = [ "bones_schema", "bones_utils", "branches", + "fxhash", "glam 0.24.2", "once_map", "paste", @@ -1481,6 +1482,7 @@ dependencies = [ "fluent", "fluent-langneg", "futures-lite 2.3.0", + "fxhash", "ggrs", "gilrs", "glam 0.24.2", @@ -1501,6 +1503,7 @@ dependencies = [ "rustls 0.21.12", "send_wrapper", "serde", + "serde_json", "serde_yaml", "smallvec", "sys-locale", @@ -1609,21 +1612,29 @@ dependencies = [ name = "bones_utils" version = "0.4.0" dependencies = [ + "bones_schema", + "bones_utils", "bones_utils_macros", "fxhash", "getrandom", + "glam 0.24.2", "hashbrown 0.14.5", "instant", + "paste", "serde", + "tree_iterators_rs", "turborand", "ulid", + "ustr", ] [[package]] name = "bones_utils_macros" version = "0.4.0" dependencies = [ + "proc-macro2", "quote", + "syn 2.0.77", "venial", ] @@ -6978,6 +6989,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "streaming-iterator" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520" + [[package]] name = "strsim" version = "0.11.1" @@ -7729,6 +7746,15 @@ dependencies = [ "cc", ] +[[package]] +name = "tree_iterators_rs" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd22d378c78c58e20672ecd4ec4507a43325742daa79c5db037de084eb5f0c" +dependencies = [ + "streaming-iterator", +] + [[package]] name = "triple_buffer" version = "8.0.0" diff --git a/Cargo.toml b/Cargo.toml index 9f6a805e82..e99a3843a8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ fxhash = "0.2" hashbrown = "0.14" maybe-owned = "0.3" parking_lot = "0.12" +serde_json = "1" smallvec = "1.11" ustr = "0.10" iroh-net = "0.27" diff --git a/framework_crates/bones_asset/Cargo.toml b/framework_crates/bones_asset/Cargo.toml index ebd115c260..a32f131564 100644 --- a/framework_crates/bones_asset/Cargo.toml +++ b/framework_crates/bones_asset/Cargo.toml @@ -37,7 +37,7 @@ paste = "1.0" path-absolutize = { version = "3.1", features = ["use_unix_paths_on_wasm"] } semver = { version = "1.0", features = ["serde"] } serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" +serde_json = { workspace = true } serde_yaml = "0.9" sha2 = "0.10" tracing = "0.1" diff --git a/framework_crates/bones_asset/src/lib.rs b/framework_crates/bones_asset/src/lib.rs index 3c8c94e7b2..92aa7b1f6f 100644 --- a/framework_crates/bones_asset/src/lib.rs +++ b/framework_crates/bones_asset/src/lib.rs @@ -5,6 +5,7 @@ #![cfg_attr(doc, allow(unknown_lints))] #![deny(rustdoc::all)] +use bones_utils::DesyncHash; use serde::{de::DeserializeSeed, Deserializer}; /// Helper to export the same types in the crate root and in the prelude. @@ -282,6 +283,18 @@ impl From> for Maybe { } } +impl DesyncHash for Maybe { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + match self { + Maybe::Unset => 0.hash(hasher), + Maybe::Set(value) => { + 1.hash(hasher); + value.hash(hasher) + } + } + } +} + fn maybe_loader( ctx: &mut MetaAssetLoadCtx, ptr: SchemaRefMut<'_>, diff --git a/framework_crates/bones_ecs/Cargo.toml b/framework_crates/bones_ecs/Cargo.toml index d1720e8d34..c101b817ea 100644 --- a/framework_crates/bones_ecs/Cargo.toml +++ b/framework_crates/bones_ecs/Cargo.toml @@ -14,7 +14,7 @@ keywords.workspace = true default = ["derive", "keysize16"] miri = ["derive", "keysize10"] derive = ["dep:bones_ecs_macros"] -glam = ["dep:glam", "dep:paste", "bones_schema/glam"] +glam = ["dep:glam", "dep:paste", "bones_schema/glam", "bones_utils/glam"] serde = ["dep:serde"] keysize10 = [] @@ -36,6 +36,7 @@ serde = { version = "1", features = ["derive"], optional = true } anyhow = "1.0" branches = { workspace = true } +fxhash = { workspace = true } atomicell = "0.2" bitset-core = "0.1" once_map = "0.4.12" diff --git a/framework_crates/bones_ecs/src/components.rs b/framework_crates/bones_ecs/src/components.rs index 6a014ee4c9..bd8dc9a000 100644 --- a/framework_crates/bones_ecs/src/components.rs +++ b/framework_crates/bones_ecs/src/components.rs @@ -1,5 +1,6 @@ //! ECS component storage. +use fxhash::FxHasher; use once_map::OnceMap; use std::sync::Arc; @@ -47,6 +48,114 @@ impl Clone for ComponentStores { } } +impl DesyncHash for ComponentStores { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + // Compute child hashes and sort + let mut hashes = self + .components + .read_only_view() + .iter() + .filter_map(|(_, component_store)| { + // Verify Schema for component store implement desync hash. If no hash_fn, we don't + // want to add hash. + let component_store = component_store.as_ref().borrow(); + if component_store + .schema() + .type_data + .get::() + .is_some() + { + // We need to compute hashes first + return Some(component_store.compute_hash::()); + } + + None + }) + .collect::>(); + hashes.sort(); + + // Udpate parent hasher from sorted hashes + for hash in hashes.iter() { + hash.hash(hasher); + } + } +} + +impl BuildDesyncNode for ComponentStores { + fn desync_tree_node( + &self, + include_unhashable: bool, + ) -> DefaultDesyncTreeNode { + let mut any_hashable = false; + + // We get the Name component store so we can lookup entity names and set those on component leaves. + let names = self.get::().borrow(); + + let mut child_nodes = self + .components + .read_only_view() + .iter() + .filter_map(|(_, component_store)| { + let component_store = component_store.as_ref().borrow(); + let is_hashable = component_store + .schema() + .type_data + .get::() + .is_some(); + + if is_hashable { + any_hashable = true; + } + + if include_unhashable || is_hashable { + let mut child_node = component_store.desync_tree_node::(include_unhashable); + + // Our child here is a component store, and its children are component leaves. + // Iterate through children, retrieve metadata storing entity_idx if set, and use this + // to update the node's name from Name component. + // + // This is fairly hacky, but should be good enough for now. + for component_node in child_node.children_mut().iter_mut() { + if let DesyncNodeMetadata::Component { entity_idx } = + component_node.metadata() + { + // Constructing Entity with fake generation is bit of a hack - but component store does not + // use generation, only the index. + if let Some(name) = names.get(Entity::new(*entity_idx, 0)) { + component_node.set_name(name.0.clone()); + } + } + } + + return Some(child_node); + } + None + }) + .collect::>(); + child_nodes.sort(); + + let hash = if any_hashable { + let mut hasher = H::default(); + for node in child_nodes.iter() { + // Update parent node hash from data + if let Some(hash) = node.get_hash() { + DesyncHash::hash(&hash, &mut hasher); + } + } + Some(hasher.finish()) + } else { + None + }; + + DefaultDesyncTreeNode::new( + hash, + Some("Components".into()), + child_nodes, + DesyncNodeMetadata::None, + ) + } +} + impl ComponentStores { /// Get the components of a certain type pub fn get_cell(&self) -> AtomicComponentStore { diff --git a/framework_crates/bones_ecs/src/components/untyped.rs b/framework_crates/bones_ecs/src/components/untyped.rs index 0ae0a3cdc0..efa1a7c13d 100644 --- a/framework_crates/bones_ecs/src/components/untyped.rs +++ b/framework_crates/bones_ecs/src/components/untyped.rs @@ -73,6 +73,69 @@ impl Drop for UntypedComponentStore { } } +impl DesyncHash for UntypedComponentStore { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + self.schema().full_name.hash(hasher); + for component in self.iter() { + DesyncHash::hash(&component, hasher); + } + } +} + +impl BuildDesyncNode for UntypedComponentStore { + fn desync_tree_node( + &self, + _include_unhashable: bool, + ) -> DefaultDesyncTreeNode { + let mut hasher = H::default(); + + // Iterate over components by index so we can save entity ID. + let iter = 0..self.bitset().bit_len(); + let child_nodes: Vec = iter + .filter_map(|entity_idx| -> Option { + if let Some(component) = self.get_idx(entity_idx) { + let hash = if component + .schema() + .type_data + .get::() + .is_some() + { + // Update parent node hash from data + DesyncHash::hash(&component, &mut hasher); + Some(component.compute_hash::()) + } else { + None + }; + + return Some(DefaultDesyncTreeNode::new( + hash, + None, + vec![], + DesyncNodeMetadata::Component { + entity_idx: entity_idx as u32, + }, + )); + } + + None + }) + .collect(); + + let hash = if !child_nodes.is_empty() { + Some(hasher.finish()) + } else { + None + }; + + DefaultDesyncTreeNode::new( + hash, + Some(self.schema().full_name.to_string()), + child_nodes, + DesyncNodeMetadata::None, + ) + } +} + impl UntypedComponentStore { /// Create a arbitrary [`UntypedComponentStore`]. /// diff --git a/framework_crates/bones_ecs/src/entities.rs b/framework_crates/bones_ecs/src/entities.rs index 50f8f5ecdd..7ec80c453e 100644 --- a/framework_crates/bones_ecs/src/entities.rs +++ b/framework_crates/bones_ecs/src/entities.rs @@ -85,6 +85,22 @@ impl Default for Entities { } } +/// Utility component storing a name for entity +#[derive(HasSchema, Clone, Debug)] +pub struct Name(pub String); + +impl Default for Name { + fn default() -> Self { + Self("Unnamed".to_string()) + } +} + +impl From<&str> for Name { + fn from(value: &str) -> Self { + Self(value.into()) + } +} + /// A type representing a component-joining entity query. pub trait QueryItem { /// The type of iterator this query item creates diff --git a/framework_crates/bones_ecs/src/resources.rs b/framework_crates/bones_ecs/src/resources.rs index 883d9fcc1d..06a72e7a21 100644 --- a/framework_crates/bones_ecs/src/resources.rs +++ b/framework_crates/bones_ecs/src/resources.rs @@ -2,6 +2,7 @@ use std::{fmt::Debug, marker::PhantomData, sync::Arc}; +use fxhash::FxHasher; use once_map::OnceMap; use crate::prelude::*; @@ -24,6 +25,37 @@ impl std::fmt::Debug for UntypedResource { } } +impl DesyncHash for UntypedResource { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + if let Some(schema_box) = self.cell.borrow().as_ref() { + DesyncHash::hash(&schema_box.schema().full_name, hasher); + DesyncHash::hash(&schema_box.as_ref(), hasher); + } + } +} + +impl BuildDesyncNode for UntypedResource { + fn desync_tree_node( + &self, + _include_unhashable: bool, + ) -> DefaultDesyncTreeNode { + let name = Some(self.schema().full_name.to_string()); + + let hashable = self.schema().type_data.get::().is_some(); + + if let Some(schema_box) = self.cell.borrow().as_ref() { + let hash = if hashable { + Some(schema_box.as_ref().compute_hash::()) + } else { + None + }; + return DefaultDesyncTreeNode::new(hash, name, vec![], DesyncNodeMetadata::None); + } + + DefaultDesyncTreeNode::new(None, name, vec![], DesyncNodeMetadata::None) + } +} + impl UntypedResource { /// Initialize a new, empty [`UntypedResource`]. pub fn empty(schema: &'static Schema) -> Self { @@ -136,6 +168,77 @@ impl Clone for UntypedResources { } } +impl DesyncHash for UntypedResources { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + let mut child_hashes: Vec = self + .resources + .read_only_view() + .iter() + .filter_map(|(schema_id, resource_cell)| { + let is_shared = self.shared_resources.contains_key(schema_id); + + if !is_shared { + // Only build child node if hashable + let schema = resource_cell.schema(); + if schema.type_data.get::().is_some() { + return Some(resource_cell.compute_hash::()); + } + } + None + }) + .collect(); + + child_hashes.sort(); + + for hash in child_hashes { + // Update parent hash + hash.hash(hasher); + } + } +} + +impl BuildDesyncNode for UntypedResources { + fn desync_tree_node( + &self, + include_unhashable: bool, + ) -> DefaultDesyncTreeNode { + let mut hasher = H::default(); + let mut child_nodes: Vec = self + .resources + .read_only_view() + .iter() + .filter_map(|(schema_id, resource_cell)| { + let is_shared = self.shared_resources.contains_key(schema_id); + + if !is_shared { + // Only build child node if hashable + let schema = resource_cell.schema(); + if include_unhashable || schema.type_data.get::().is_some() { + return Some(resource_cell.desync_tree_node::(include_unhashable)); + } + } + None + }) + .collect(); + + child_nodes.sort(); + + for node in child_nodes.iter() { + // Update parent hash + if let Some(hash) = node.get_hash() { + DesyncHash::hash(&hash, &mut hasher); + } + } + + DefaultDesyncTreeNode::new( + Some(hasher.finish()), + Some("Resources".into()), + child_nodes, + DesyncNodeMetadata::None, + ) + } +} + /// Error thrown when a resource cell cannot be inserted because it already exists. #[derive(Debug, Clone, Copy)] pub struct CellAlreadyPresentError; @@ -210,6 +313,21 @@ pub struct Resources { untyped: UntypedResources, } +impl DesyncHash for Resources { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + self.untyped.hash(hasher); + } +} + +impl BuildDesyncNode for Resources { + fn desync_tree_node( + &self, + include_unhashable: bool, + ) -> DefaultDesyncTreeNode { + self.untyped.desync_tree_node::(include_unhashable) + } +} + impl Resources { /// Create an empty [`Resources`]. pub fn new() -> Self { diff --git a/framework_crates/bones_ecs/src/world.rs b/framework_crates/bones_ecs/src/world.rs index 9e6ffbd655..efc7de3d9a 100644 --- a/framework_crates/bones_ecs/src/world.rs +++ b/framework_crates/bones_ecs/src/world.rs @@ -36,6 +36,43 @@ impl Default for World { } } +impl DesyncHash for World { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + self.components.hash(hasher); + self.resources.hash(hasher); + } +} + +impl BuildDesyncNode for World { + fn desync_tree_node( + &self, + include_unhashable: bool, + ) -> DefaultDesyncTreeNode { + let mut hasher = H::default(); + + let mut child_nodes: Vec = vec![]; + + let components_node = self.components.desync_tree_node::(include_unhashable); + if let Some(hash) = components_node.get_hash() { + hash.hash(&mut hasher); + } + child_nodes.push(components_node); + + let resources_node = self.resources.desync_tree_node::(include_unhashable); + if let Some(hash) = resources_node.get_hash() { + hash.hash(&mut hasher); + } + child_nodes.push(resources_node); + + DefaultDesyncTreeNode::new( + Some(hasher.finish()), + Some("World".into()), + child_nodes, + DesyncNodeMetadata::None, + ) + } +} + impl World { /// Create a new [`World`]. pub fn new() -> Self { @@ -214,6 +251,22 @@ impl World { // Always maintain to clean up any killed entities self.maintain(); } + /// Build [`DefaultDesyncTree`] from [`World`]. + /// + /// `include_unhashable` sets whether components or resources be included as non-contributing nodes + /// in tree, to see what could be opted-in to desync hashing. + /// + /// # Panics + /// + /// This will immutably borrow all components and resources, if any are mutably borrowed, this will panic. + pub fn desync_hash_tree( + &self, + include_unhashable: bool, + ) -> DefaultDesyncTree { + let root = self.desync_tree_node::(include_unhashable); + + DefaultDesyncTree::from_root(root) + } } /// Creates an instance of the type this trait is implemented for diff --git a/framework_crates/bones_ecs/tests/desync_tree.rs b/framework_crates/bones_ecs/tests/desync_tree.rs new file mode 100644 index 0000000000..73e986e30a --- /dev/null +++ b/framework_crates/bones_ecs/tests/desync_tree.rs @@ -0,0 +1,46 @@ +//! Tests for [`DesyncTree`] build that rely on types in [`bones_ecs`]. + +use bones_ecs::prelude::*; + +#[derive(Clone, HasSchema, Debug, Eq, PartialEq, Default, DesyncHash)] +#[net] +#[repr(C)] +struct Pos(i32, i32); + +#[test] +fn desync_tree_entity_names() { + let world = World::default(); + + // Scope this so mut borrows are finished by time tree is being created. + let (ent1, ent2) = { + let mut entities = world.resource_mut::(); + let mut positions = world.component_mut::(); + let mut names = world.component_mut::(); + + let ent1 = entities.create(); + positions.insert(ent1, Pos(0, 0)); + + let ent2 = entities.create(); + positions.insert(ent2, Pos(1, 1)); + names.insert(ent2, "entity2".into()); + (ent1, ent2) + }; + + let mut found_ent1_metadata = false; + let mut found_ent2_metadata = false; + + let desync_tree = world.desync_hash_tree::(false); + for node in desync_tree.root().dfs_preorder_iter() { + if let DesyncNodeMetadata::Component { entity_idx } = node.metadata() { + if *entity_idx == ent1.index() { + found_ent1_metadata = true; + } else if *entity_idx == ent2.index() { + found_ent2_metadata = true; + assert_eq!(*node.name(), Some("entity2".to_string())); + } + } + } + + assert!(found_ent1_metadata); + assert!(found_ent2_metadata); +} diff --git a/framework_crates/bones_framework/Cargo.toml b/framework_crates/bones_framework/Cargo.toml index 1ba48f2e53..def89511af 100644 --- a/framework_crates/bones_framework/Cargo.toml +++ b/framework_crates/bones_framework/Cargo.toml @@ -30,6 +30,9 @@ scripting = ["dep:bones_scripting"] ## Enable networking debug window + frame prediction history. net-debug = ["ui"] +## Enables advanced desync debugging, generates hash tree of world for frames +desync-debug = ["dep:serde_json"] + #! ### Audio formats #! These features enable different audio formats @@ -95,6 +98,7 @@ bevy_tasks = "0.11" bytemuck = "1.12" either = "1.8" futures-lite = { workspace = true } +fxhash = { workspace = true } glam = "0.24" hex = "0.4" instant = { version = "0.1", features = ["wasm-bindgen"] } @@ -157,6 +161,7 @@ iroh-quinn = { version = "0.11" } tokio = { version = "1", features = ["rt-multi-thread", "macros"] } turborand = { version = "0.10.0", features = ["atomic"] } iroh-net = { workspace = true, features = ["discovery-local-network"] } +serde_json = { workspace = true, optional = true } directories = "5.0" diff --git a/framework_crates/bones_framework/src/networking.rs b/framework_crates/bones_framework/src/networking.rs index 8f52b20191..eb9d1f7bd2 100644 --- a/framework_crates/bones_framework/src/networking.rs +++ b/framework_crates/bones_framework/src/networking.rs @@ -8,10 +8,12 @@ use crate::networking::online::OnlineMatchmakerResponse; pub use crate::networking::random::RngGenerator; use crate::prelude::*; use bones_matchmaker_proto::{MATCH_ALPN, PLAY_ALPN}; -use ggrs::P2PSession; +use desync::{DesyncDebugHistoryBuffer, DetectDesyncs}; +use fxhash::FxHasher; +use ggrs::{DesyncDetection, P2PSession}; use instant::Duration; use once_cell::sync::Lazy; -use std::{fmt::Debug, marker::PhantomData, sync::Arc}; +use std::{fmt::Debug, hash::Hasher, marker::PhantomData, sync::Arc}; use tracing::{debug, error, info, trace, warn}; #[cfg(feature = "net-debug")] @@ -22,6 +24,7 @@ use { use crate::input::PlayerControls as PlayerControlsTrait; +pub mod desync; pub mod input; pub mod lan; pub mod online; @@ -62,7 +65,8 @@ impl From for NetworkInputStatus { /// Module prelude. pub mod prelude { pub use super::{ - input, lan, online, proto, random, DisconnectedPlayers, RngGenerator, SyncingInfo, RUNTIME, + desync::DetectDesyncs, input, lan, online, proto, random, DisconnectedPlayers, + RngGenerator, SyncingInfo, RUNTIME, }; #[cfg(feature = "net-debug")] @@ -550,6 +554,13 @@ pub struct GgrsSessionRunner<'a, InputTypes: NetworkInputConfig<'a>> { /// The random seed used for this session pub random_seed: u64, + + /// When provided, desync detection is enabled. Contains settings for desync detection. + detect_desyncs: Option, + + /// History buffer for desync debug data to fetch it upon detected desyncs. + /// [`DefaultDesyncTree`] will be generated and saved here if feature `desync-debug` is enabled. + pub desync_debug_history: Option>, } /// The info required to create a [`GgrsSessionRunner`]. @@ -572,8 +583,12 @@ pub struct GgrsSessionRunnerInfo { /// /// `None` will use Bone's default. pub local_input_delay: Option, + /// The random seed used for this session pub random_seed: u64, + + /// When provided, desync detection is enabled. Contains settings for desync detection. + pub detect_desyncs: Option, } impl GgrsSessionRunnerInfo { @@ -583,6 +598,7 @@ impl GgrsSessionRunnerInfo { max_prediction_window: Option, local_input_delay: Option, random_seed: u64, + detect_desyncs: Option, ) -> Self { let player_idx = socket.player_idx(); let player_count = socket.player_count(); @@ -593,6 +609,7 @@ impl GgrsSessionRunnerInfo { max_prediction_window, local_input_delay, random_seed, + detect_desyncs, } } } @@ -608,6 +625,7 @@ where target_fps: Option, max_prediction_window: Option, local_input_delay: Option, + detect_desyncs: Option, matchmaker_resp_game_starting: OnlineMatchmakerResponse, ) -> Option { if let OnlineMatchmakerResponse::GameStarting { @@ -624,6 +642,7 @@ where max_prediction_window, local_input_delay, random_seed, + detect_desyncs, ), )) } else { @@ -662,11 +681,19 @@ where .try_send(NetworkDebugMessage::SetMaxPrediction(max_prediction)) .unwrap(); + let desync_detection = match info.detect_desyncs.as_ref() { + Some(config) => DesyncDetection::On { + interval: config.detection_interval, + }, + None => DesyncDetection::Off, + }; + let mut builder = ggrs::SessionBuilder::new() .with_num_players(info.player_count as usize) .with_input_delay(local_input_delay) .with_fps(network_fps) .unwrap() + .with_desync_detection_mode(desync_detection) .with_max_prediction_window(max_prediction) .unwrap(); @@ -685,6 +712,18 @@ where let session = builder.start_p2p_session(info.socket.clone()).unwrap(); + #[cfg(feature = "desync-debug")] + let desync_debug_history = if let Some(detect_desync) = info.detect_desyncs.as_ref() { + Some(DesyncDebugHistoryBuffer::::new( + detect_desync.detection_interval, + )) + } else { + None + }; + + #[cfg(not(feature = "desync-debug"))] + let desync_debug_history = None; + Self { last_player_input: InputTypes::Dense::default(), session, @@ -700,6 +739,8 @@ where local_input_delay, local_input_disabled: false, random_seed: info.random_seed, + detect_desyncs: info.detect_desyncs, + desync_debug_history, } } } @@ -822,6 +863,19 @@ where addr, } => { error!(%frame, %local_checksum, %remote_checksum, player=%addr, "Network de-sync detected"); + + #[cfg(feature = "desync-debug")] + { + if let Some(desync_debug_history) = &self.desync_debug_history { + if let Some(desync_hash_tree) = + desync_debug_history.get_frame_data(frame as u32) + { + let string = serde_json::to_string_pretty(desync_hash_tree) + .expect("Failed to serialize desync hash tree"); + error!("Desync hash tree: frame: {frame}\n{}", string); + } + } + } } } } @@ -845,9 +899,10 @@ where .unwrap(); } + let current_frame = self.session.current_frame(); + #[cfg(feature = "net-debug")] { - let current_frame = self.session.current_frame(); let confirmed_frame = self.session.confirmed_frame(); NETWORK_DEBUG_CHANNEL @@ -869,9 +924,45 @@ where for request in requests { match request { ggrs::GgrsRequest::SaveGameState { cell, frame } => { - cell.save(frame, Some(world.clone()), None) + // TODO: Do we only need to compute hash for desync interval frames? + // GGRS should only use hashes from fixed interval. + + // If desync detection enabled, hash world. + let checksum = if let Some(detect_desyncs) = + self.detect_desyncs.as_ref() + { + #[cfg(feature = "desync-debug")] + { + if let Some(desync_debug_history) = + &mut self.desync_debug_history + { + if desync_debug_history + .is_desync_detect_frame(frame as u32) + { + let tree = DefaultDesyncTree::from( + world.desync_tree_node::( + detect_desyncs.include_unhashable_nodes, + ), + ); + desync_debug_history.record(frame as u32, tree); + } + } + } + + if let Some(hash_func) = detect_desyncs.world_hash_func { + Some(hash_func(world) as u128) + } else { + let mut hasher = FxHasher::default(); + world.hash(&mut hasher); + Some(hasher.finish() as u128) + } + } else { + None + }; + + cell.save(frame, Some(world.clone()), checksum); } - ggrs::GgrsRequest::LoadGameState { cell, .. } => { + ggrs::GgrsRequest::LoadGameState { cell, frame } => { // Swap out sessions to preserve them after world save. // Sessions clone makes empty copy, so saved snapshots do not include sessions. // Sessions are borrowed from Game for execution of this session, @@ -886,6 +977,8 @@ where &mut sessions, &mut world.resource_mut::(), ); + + trace!("Loading (rollback) frame: {frame}"); } ggrs::GgrsRequest::AdvanceFrame { inputs: network_inputs, @@ -949,8 +1042,8 @@ where network_inputs.into_iter().enumerate() { trace!( - "Net player({player_idx}) local: {}, status: {status:?}, input: {:?}", - self.local_player_idx as usize == player_idx, + "Net player({player_idx}) local: {}, status: {status:?}, frame: {current_frame} input: {:?}", + self.local_player_idx == player_idx as u32, input ); player_inputs.network_update( @@ -1024,6 +1117,7 @@ where max_prediction_window: Some(self.session.max_prediction()), local_input_delay: Some(self.local_input_delay), random_seed: self.random_seed, + detect_desyncs: self.detect_desyncs.clone(), }; *self = GgrsSessionRunner::new(Some(self.original_fps as f32), runner_info); } diff --git a/framework_crates/bones_framework/src/networking/desync.rs b/framework_crates/bones_framework/src/networking/desync.rs new file mode 100644 index 0000000000..d760371bc8 --- /dev/null +++ b/framework_crates/bones_framework/src/networking/desync.rs @@ -0,0 +1,94 @@ +//! Desync detection and history buffer for desync trees. + +use std::collections::VecDeque; + +use bones_lib::prelude::*; + +/// Max frames of data in desync history buffer - this is set to match `ggrs::MAX_CHECKSUM_HISTORY_SIZE`, +/// but is private so cannot be used directly. +pub const MAX_DESYNC_HISTORY_BUFFER: usize = 32; + +/// Settings for desync detection +#[derive(Clone)] +pub struct DetectDesyncs { + /// Interval in frames of how often to hash state and check for desync with other clients. + /// i.e if set to 10, will check every 10th frame. + pub detection_interval: u32, + + /// Override of hash function used to hash world for desync detection. + /// By default, [`World`]'s [`DesyncHash`] impl is used. + pub world_hash_func: Option u64>, + + /// When using feature `desync-debug`, a [`bones_utils::DesyncTree`] will be built. Resources and Components + /// that do not support hashing can be optionally included in tree to help highlight candidates + /// to be opted into desync-detection. + pub include_unhashable_nodes: bool, +} + +impl Default for DetectDesyncs { + fn default() -> Self { + Self { + detection_interval: 60, + world_hash_func: None, + include_unhashable_nodes: false, + } + } +} +/// Store history of desync detection data, such as a [`bones_utils::DesyncTree`]. When ggrs finds a desync in past, +/// we can retrieve this data for debugging. Ggrs has a fixed limit of pending desync frames it tests, +/// so we match it by keeping the last [`MAX_DESYNC_HISTORY_BUFFER`] of frame data at the desync detect interval. +/// +/// Desync data provided in `record` will only be saved if frame coincides with desync detect interval, otherwise +/// ggrs will never test this frame, and we do not need to buffer it. +pub struct DesyncDebugHistoryBuffer { + buffer: VecDeque<(u32, T)>, + + /// Desync detection interval, should match ggrs session config. + desync_detect_interval: u32, +} + +impl DesyncDebugHistoryBuffer { + /// Create buffer, use same desync detect interval configured on ggrs session. + pub fn new(desync_detect_interval: u32) -> Self { + Self { + desync_detect_interval, + buffer: default(), + } + } + + /// Check if this frame coincides with desync detection interval. + /// If not, we will not perform desync checks on it, and do not need to record history for frame. + pub fn is_desync_detect_frame(&self, frame: u32) -> bool { + // GGRS sends desync detections every X frames where X is interval, and first frame is interval. + frame % self.desync_detect_interval == 0 + } + + /// Get desync data for frame if it is available. + pub fn get_frame_data(&self, frame: u32) -> Option<&T> { + // Don't bother looking for data if not a desync detect frame + if !self.is_desync_detect_frame(frame) { + return None; + } + + self.buffer.iter().find_map(|d| { + if d.0 == frame { + return Some(&d.1); + } + None + }) + } + + /// Possibly record frame and desync data. It is only recorded on frames matching + /// desync detect interval, as ggrs will not check for desyns otherwise and we don't + /// need to save it. + pub fn record(&mut self, frame: u32, desync_data: T) { + // Only record if on a frame that will be desync detected. + if self.is_desync_detect_frame(frame) { + while self.buffer.len() >= MAX_DESYNC_HISTORY_BUFFER { + self.buffer.pop_front(); + } + + self.buffer.push_back((frame, desync_data)); + } + } +} diff --git a/framework_crates/bones_framework/src/render/transform.rs b/framework_crates/bones_framework/src/render/transform.rs index c42c17a196..0fc8a04585 100644 --- a/framework_crates/bones_framework/src/render/transform.rs +++ b/framework_crates/bones_framework/src/render/transform.rs @@ -5,7 +5,8 @@ use crate::prelude::*; /// The main transform component. /// /// Currently we don't have a hierarchy, and this is therefore a global transform. -#[derive(Clone, Copy, Debug, HasSchema)] +#[derive(Clone, Copy, Debug, HasSchema, DesyncHash)] +#[net] #[repr(C)] pub struct Transform { /// The position of the entity in the world. diff --git a/framework_crates/bones_schema/macros/src/lib.rs b/framework_crates/bones_schema/macros/src/lib.rs index 95dd82f7f8..d42da6498f 100644 --- a/framework_crates/bones_schema/macros/src/lib.rs +++ b/framework_crates/bones_schema/macros/src/lib.rs @@ -115,6 +115,11 @@ pub fn derive_has_schema(input: TokenStream) -> TokenStream { // Get the type datas that have been added and derived let derive_type_data_flags = get_flags_for_attr(&input, "derive_type_data"); + + let has_td_schema_desync_hash = derive_type_data_flags + .iter() + .any(|flag| flag.as_str() == "SchemaDesyncHash"); + let type_datas = { let add_derive_type_datas = derive_type_data_flags.into_iter().map(|ty| { let ty = format_ident!("{ty}"); @@ -122,6 +127,23 @@ pub fn derive_has_schema(input: TokenStream) -> TokenStream { tds.insert(<#ty as #schema_mod::FromType<#name>>::from_type()).unwrap(); } }); + + // Do we have #[net] attribute? + let has_net_attribute = input + .attributes() + .iter() + .any(|attr| attr.path.iter().any(|p| p.to_string().contains("net"))); + + // Only insert SchemaDesyncHash w/ #[net] sugar when #[derive_type_data(SchemaDesyncHash)] not present + // (Avoid double insert) + let add_desync_hash_type_data = if has_net_attribute && !has_td_schema_desync_hash { + quote! { + tds.insert(<#schema_mod::desync_hash::SchemaDesyncHash as #schema_mod::FromType<#name>>::from_type()).unwrap(); + } + } else { + quote! {} + }; + let add_type_datas = input .attributes() .iter() @@ -132,6 +154,7 @@ pub fn derive_has_schema(input: TokenStream) -> TokenStream { quote! { { let tds = #schema_mod::alloc::TypeDatas::default(); + #add_desync_hash_type_data #(#add_derive_type_datas),* #( tds.insert(#add_type_datas).unwrap(); diff --git a/framework_crates/bones_schema/src/desync_hash.rs b/framework_crates/bones_schema/src/desync_hash.rs new file mode 100644 index 0000000000..0ad9ae90fc --- /dev/null +++ b/framework_crates/bones_schema/src/desync_hash.rs @@ -0,0 +1,96 @@ +//! Implementation of [`DesyncHash`] for [`SchemaRef`]. +//! +//! SchemaRef's DesyncHash impl calls a function pointer that is optional. It can be opted in +//! by adding `#[net]`. + +use std::{any::type_name, hash::Hasher}; + +use bones_utils::DesyncHash; + +use crate::{prelude::*, ptr::SchemaRef, FromType, HasSchema, Schema, SchemaData}; + +/// Used in [`Schema`] `TypeDatas` to optionally implement desync hash. +pub struct SchemaDesyncHash { + /// Desync hash fn pointer + pub desync_hash_fn: for<'a> fn(SchemaRef<'a>, hasher: &mut dyn Hasher), +} + +unsafe impl HasSchema for SchemaDesyncHash { + fn schema() -> &'static crate::Schema { + use std::{alloc::Layout, any::TypeId, sync::OnceLock}; + static S: OnceLock<&'static Schema> = OnceLock::new(); + let layout = Layout::new::(); + S.get_or_init(|| { + SCHEMA_REGISTRY.register(SchemaData { + name: type_name::().into(), + full_name: format!("{}::{}", module_path!(), type_name::()).into(), + kind: SchemaKind::Primitive(Primitive::Opaque { + size: layout.size(), + align: layout.align(), + }), + type_id: Some(TypeId::of::()), + clone_fn: None, + drop_fn: None, + default_fn: None, + hash_fn: None, + eq_fn: None, + type_data: Default::default(), + }) + }) + } +} + +impl FromType for SchemaDesyncHash { + fn from_type() -> Self { + SchemaDesyncHash { + desync_hash_fn: |reference, hasher| { + T::schema() + .ensure_match(reference.schema()) + .expect("Schema type does not match schema ref."); + + unsafe { + DesyncHash::hash(&*reference.as_ptr().cast::(), hasher); + } + }, + } + } +} + +impl<'a> DesyncHash for SchemaRef<'a> { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + if let Some(schema_desync_hash) = self.schema().type_data.get::() { + (schema_desync_hash.desync_hash_fn)(*self, hasher); + } + } +} + +impl<'a> DesyncHash for SchemaRefMut<'a> { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + if let Some(schema_desync_hash) = self.schema().type_data.get::() { + (schema_desync_hash.desync_hash_fn)(self.as_ref(), hasher); + } + } +} + +impl DesyncHash for SchemaId { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + hasher.write_u32(self.id()); + } +} + +impl DesyncHash for SVec { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + for value in self { + value.hash(hasher); + } + } +} + +impl DesyncHash for SMap { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + for (key, value) in self.iter() { + key.hash(hasher); + value.hash(hasher); + } + } +} diff --git a/framework_crates/bones_schema/src/lib.rs b/framework_crates/bones_schema/src/lib.rs index 5798335f35..4d27e657d7 100644 --- a/framework_crates/bones_schema/src/lib.rs +++ b/framework_crates/bones_schema/src/lib.rs @@ -20,6 +20,7 @@ pub mod prelude { pub use crate::ser_de::*; pub use crate::{ alloc::{SMap, SVec, SchemaMap, SchemaVec}, + desync_hash::*, ptr::*, registry::*, schema::*, @@ -33,6 +34,7 @@ mod schema; pub use schema::*; pub mod alloc; +pub mod desync_hash; pub mod ptr; pub mod raw_fns; pub mod registry; diff --git a/framework_crates/bones_schema/src/registry.rs b/framework_crates/bones_schema/src/registry.rs index 76b6dcd7e3..0f3273c5a9 100644 --- a/framework_crates/bones_schema/src/registry.rs +++ b/framework_crates/bones_schema/src/registry.rs @@ -16,6 +16,13 @@ pub struct SchemaId { id: u32, } +impl SchemaId { + /// Get schema id + pub fn id(&self) -> u32 { + self.id + } +} + // Note: The schema type is here in the registry module to prevent modification of registered // schemas by other modules. The idea is that once a schema is registered, it is unchangable and // "certified" so to speak. diff --git a/framework_crates/bones_utils/Cargo.toml b/framework_crates/bones_utils/Cargo.toml index 69547da46e..8a2965de83 100644 --- a/framework_crates/bones_utils/Cargo.toml +++ b/framework_crates/bones_utils/Cargo.toml @@ -12,21 +12,30 @@ keywords.workspace = true [features] default = ["ulid"] -serde = ["dep:serde", "hashbrown/serde"] -ulid = ["dep:ulid", "instant", "turborand"] +glam = ["dep:glam"] +serde = ["dep:serde", "hashbrown/serde"] +ulid = ["dep:ulid", "instant", "turborand"] [dependencies] bones_utils_macros = { version = "0.4", path = "./macros" } fxhash = { workspace = true } hashbrown = { workspace = true } +tree_iterators_rs = { version = "1.2.1" } +ustr = { workspace = true } # Optional instant = { version = "0.1", features = ["wasm-bindgen"], optional = true } serde = { version = "1.0", optional = true } turborand = { version = "0.10", optional = true } ulid = { version = "1.0", optional = true } +glam = { version = "0.24", optional = true } +paste = { version = "1.0" } # Make sure that the getrandom package, used in `ulid` works on web # when compiling for WASM. [target.'cfg(target_arch = "wasm32")'.dependencies] getrandom = { version = "0.2", features = ["js"] } + +[dev-dependencies] +bones_schema = { version = "0.4", path = "../bones_schema" } +bones_utils = { version = "0.4", path = ".", features = ["glam"] } diff --git a/framework_crates/bones_utils/macros/Cargo.toml b/framework_crates/bones_utils/macros/Cargo.toml index 0ddf2f36a1..8e45a8a463 100644 --- a/framework_crates/bones_utils/macros/Cargo.toml +++ b/framework_crates/bones_utils/macros/Cargo.toml @@ -14,5 +14,7 @@ keywords.workspace = true proc-macro = true [dependencies] -quote = "1.0" -venial = "0.5" +quote = "1.0" +venial = "0.5" +proc-macro2 = "1" +syn = { version = "2" } diff --git a/framework_crates/bones_utils/macros/src/lib.rs b/framework_crates/bones_utils/macros/src/lib.rs index 0f34c78b27..8f044b289a 100644 --- a/framework_crates/bones_utils/macros/src/lib.rs +++ b/framework_crates/bones_utils/macros/src/lib.rs @@ -1,5 +1,7 @@ use proc_macro::TokenStream; +use proc_macro2::TokenStream as TokenStream2; use quote::{format_ident, quote, quote_spanned, spanned::Spanned}; +use venial::StructFields; /// Helper macro to bail out of the macro with a compile error. macro_rules! throw { @@ -18,6 +20,30 @@ fn is_simple_named_attr(attr: &venial::Attribute, name: &str) -> bool { && attr.get_value_tokens().is_empty() } +/// Attribute adding extra functionality for networking. +/// +/// For example, provides sugar for `#[derive_type_data(SchemaDesyncHash)]`, which +/// opts in `SchemaRef` to support [`DesyncHash`], so it maybe be included in hash for desync detection. +#[proc_macro_attribute] +pub fn net(_attr: TokenStream, item: TokenStream) -> TokenStream { + // Error if #[net] is used with #[schema(no_clone)]. + let input = venial::parse_declaration(item.clone().into()).unwrap(); + + if let Some(schema_attr) = input + .attributes() + .iter() + .find(|attr| attr.path.len() == 1 && attr.path[0].to_string().as_str() == "schema") + { + if let venial::AttributeValue::Group(_, value) = &schema_attr.value { + if value.iter().any(|f| f.to_string().as_str() == "no_clone") { + panic!("#[net] was used with #[schema(no_clone)]. A Schema that cannot be cloned will never be deterministic in network play."); + } + }; + } + + item +} + /// Derive macro for deriving [`Deref`] on structs with one field. #[proc_macro_derive(Deref, attributes(deref))] pub fn derive_deref(input: TokenStream) -> TokenStream { @@ -168,3 +194,188 @@ pub fn derive_deref_mut(input: TokenStream) -> TokenStream { throw!(input, "Cannot derive DerefMut on anything but structs."); } } + +#[proc_macro_derive(DesyncHash, attributes(desync_hash_module, desync_exclude))] +pub fn derive_desync_hash(input: TokenStream) -> TokenStream { + let input = venial::parse_declaration(input.into()).unwrap(); + let name = input.name().expect("Type must have a name"); + + // Get the schema module, reading optionally from the `schema_module` attribute, so that we can + // set the module to `crate` when we want to use it within the `bones_schema` crate itself. + let desync_hash_module = input + .attributes() + .iter() + .find_map(|attr| { + (attr.path.len() == 1 && attr.path[0].to_string() == "desync_hash_module").then(|| { + attr.value + .get_value_tokens() + .iter() + .cloned() + .collect::() + }) + }) + .unwrap_or_else(|| quote!(bones_utils)); + + // Helper to get hash invocations of struct fields + let hash_struct_fields = |fields: &StructFields| { + match fields { + venial::StructFields::Tuple(tuple) => tuple + .fields + .iter() + .enumerate() + .map(|(idx, (field, _))| { + let ty = &field.ty; + let idx = syn::Index::from(idx); + quote! {<#ty as #desync_hash_module::DesyncHash>::hash(&self.#idx, hasher);} + }) + .collect::>(), + venial::StructFields::Named(named) => named + .fields + .iter() + .filter_map(|(field, _)| { + let name = &field.name; + let ty = &field.ty; + if !field.attributes.iter().any(|attr| { + attr.path[0].to_string() == "desync_exclude" + }) { + return Some(quote! {<#ty as #desync_hash_module::DesyncHash>::hash(&self.#name, hasher);}) + } + None + }) + .collect::>(), + venial::StructFields::Unit => vec![], + } + }; + + // Get fields of enum variant + let enum_variant_fields = |fields: &StructFields| match fields { + venial::StructFields::Tuple(tuple) => { + // Build identifiers for tuple as as a,b,c... + + if tuple.fields.len() > 26 { + panic!("DesyncHash derive macro does not support variants of tuples with more than 26 fields."); + } + let identifiers: Vec<_> = (0..tuple.fields.len()) + .map(|i| format_ident!("{}", (b'a' + i as u8) as char)) + .collect(); + + // format as (a, b, c, ...) + let tuple_fields = quote! { + (#(#identifiers),*) + }; + + // generate invocations for each field in tuple using generated identifier + let invocations = identifiers + .iter() + .map(|ident| { + quote! {#desync_hash_module::DesyncHash::hash(#ident, hasher);} + }) + .collect::>(); + (tuple_fields, invocations) + } + venial::StructFields::Named(named) => { + let mut any_fields_excluded = false; + + let field_idents: Vec<_> = named + .fields + .iter() + .filter_map(|f| { + if f.0 + .attributes + .iter() + .any(|attr| attr.path[0].to_string() == "desync_exclude") + { + any_fields_excluded = true; + return None; + } + Some(&f.0.name) + }) + .collect(); + + // format list of fields as '{ fieldA, fieldB, }' + let named_fields = if !any_fields_excluded { + quote! { + {#(#field_idents),*} + } + } else if !field_idents.is_empty() { + // If any fields excluded, include '..' to avoid compilation error + quote! { + {#(#field_idents),* , ..} + } + } else { + // All fields of variant were excluded + quote! { {..} } + }; + + let invocations = field_idents + .iter() + .map(|ident| { + quote! {#desync_hash_module::DesyncHash::hash(#ident, hasher);} + }) + .collect::>(); + (named_fields, invocations) + } + venial::StructFields::Unit => (quote! {}, vec![]), + }; + + let field_hash_invocations: Vec<_> = match &input { + venial::Declaration::Struct(s) => hash_struct_fields(&s.fields), + venial::Declaration::Enum(e) => { + let mut variants = Vec::new(); + + let enum_name = &e.name; + for (idx, v) in e.variants.items().enumerate() { + let variant_name = &v.name; + let variant_excluded = v + .attributes + .iter() + .any(|attr| attr.path[0].to_string() == "desync_exclude"); + + // Excluded variant skips all invocations for hashing fields, + // however index of variant is still hashed, making it unique from + // other variants + let (variant_fields_string, invocations) = if !variant_excluded { + enum_variant_fields(&v.contents) + } else { + (quote! { {..} }, vec![]) + }; + + variants.push(quote! { + #enum_name::#variant_name #variant_fields_string => { + // Hash index of variant to ensure that two variants are unique + hasher.write_usize(#idx); + #( + #invocations + )* + }, + }); + } + + vec![quote! { + match self { + #(#variants)* + } + }] + } + venial::Declaration::Union(_) => { + panic!("DesyncHash derive macro impl does not support Unions"); + } + _ => vec![], + }; + + let combined_hash_invocations = field_hash_invocations.iter().fold(quote! {}, |acc, q| { + quote! { + #acc + #q + } + }); + + quote! { + impl #desync_hash_module::DesyncHash for #name { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + #combined_hash_invocations + } + } + } + .into() +} diff --git a/framework_crates/bones_utils/src/desync_hash.rs b/framework_crates/bones_utils/src/desync_hash.rs new file mode 100644 index 0000000000..9aeb36431f --- /dev/null +++ b/framework_crates/bones_utils/src/desync_hash.rs @@ -0,0 +1,170 @@ +//! [`DesyncHash`] trait and impls, for detecting net desync. +//! +//! In order to use [`DesyncHash`] with [`glam`] types, the "glam" feature flag must be used. + +use std::time::Duration; +use ustr::Ustr; + +pub mod tree; +pub use tree::*; + +/// [`DesyncHash`] is used to hash type and compare over network to detect desyncs. +/// +/// In order to opt in a `HasSchema` Component or Resource to be included in hash of World in networked session, +/// `#[net]` or `#[derive_type_data(SchemaDesyncHash)]` must also be included. +/// +/// Fields may be excluded from hash by using attribute: `#[desync_exclude]` +pub trait DesyncHash { + /// Update hasher from type's values + fn hash(&self, hasher: &mut dyn std::hash::Hasher); +} + +/// Extension of [`DesyncHash`] that is automatically implemented for `T: DesyncHash`. +/// Adds helper to compute standalone hash instead of updating a hasher. +pub trait DesyncHashImpl { + /// Compute hash of type with provided hasher. + fn compute_hash(&self) -> u64; +} + +impl DesyncHashImpl for T { + fn compute_hash(&self) -> u64 { + let mut hasher = H::default(); + self.hash(&mut hasher); + hasher.finish() + } +} + +impl DesyncHash for Duration { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + self.as_nanos().hash(hasher); + } +} + +impl DesyncHash for () { + fn hash(&self, _hasher: &mut dyn std::hash::Hasher) {} +} + +impl DesyncHash for bool { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + hasher.write_u8(*self as u8) + } +} + +impl DesyncHash for [T; N] { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + for value in self { + DesyncHash::hash(value, hasher); + } + } +} + +impl DesyncHash for Vec { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + for value in self { + value.hash(hasher); + } + } +} +macro_rules! desync_hash_impl_int { + ($ty:ident) => { + impl DesyncHash for $ty { + ::paste::paste! { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + hasher.[](*self); + } + } + } + }; +} + +macro_rules! desync_hash_impl_float { + ($ty:ident) => { + impl DesyncHash for $ty { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + if self.is_nan() { + // Ensure all NaN representations hash to the same value + hasher.write(&Self::to_ne_bytes(Self::NAN)); + } else if *self == 0.0 { + // Ensure both zeroes hash to the same value + hasher.write(&Self::to_ne_bytes(0.0)); + } else { + hasher.write(&Self::to_ne_bytes(*self)); + } + } + } + }; +} + +macro_rules! desync_hash_impl_as_bytes { + ($ty:ident) => { + impl DesyncHash for $ty { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + hasher.write(self.as_bytes()); + } + } + }; +} + +desync_hash_impl_float!(f32); +desync_hash_impl_float!(f64); + +desync_hash_impl_int!(i8); +desync_hash_impl_int!(i16); +desync_hash_impl_int!(i32); +desync_hash_impl_int!(i64); +desync_hash_impl_int!(i128); +desync_hash_impl_int!(isize); +desync_hash_impl_int!(u8); +desync_hash_impl_int!(u16); +desync_hash_impl_int!(u32); +desync_hash_impl_int!(u64); +desync_hash_impl_int!(u128); +desync_hash_impl_int!(usize); + +desync_hash_impl_as_bytes!(String); +desync_hash_impl_as_bytes!(str); +desync_hash_impl_as_bytes!(Ustr); + +#[cfg(feature = "glam")] +mod impl_glam { + use glam::*; + + use super::DesyncHash; + + macro_rules! desync_hash_impl_glam_vecs { + ($id:ident) => { + paste::paste! { + desync_hash_impl_glam!( [< $id 2 >], x, y); + desync_hash_impl_glam!( [< $id 3 >], x, y, z); + desync_hash_impl_glam!( [< $id 4 >], x, y, z, w); + } + }; + } + + macro_rules! desync_hash_impl_glam { + ($t:ty, $($field:ident),+) => { + impl DesyncHash for $t { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + // $(self.$field.hash(hasher);)* + // $(hasher.(self.$field);)* + $(DesyncHash::hash(&self.$field, hasher);)* + } + } + }; + } + + desync_hash_impl_glam_vecs!(BVec); + desync_hash_impl_glam_vecs!(UVec); + desync_hash_impl_glam_vecs!(IVec); + desync_hash_impl_glam_vecs!(Vec); + desync_hash_impl_glam_vecs!(DVec); + + impl DesyncHash for Quat { + fn hash(&self, hasher: &mut dyn std::hash::Hasher) { + self.x.hash(hasher); + self.y.hash(hasher); + self.z.hash(hasher); + self.w.hash(hasher); + } + } +} diff --git a/framework_crates/bones_utils/src/desync_hash/tree.rs b/framework_crates/bones_utils/src/desync_hash/tree.rs new file mode 100644 index 0000000000..d485dad114 --- /dev/null +++ b/framework_crates/bones_utils/src/desync_hash/tree.rs @@ -0,0 +1,182 @@ +//! Implementation of [`DesyncTree`] trait for hash tree in desync detection. + +use std::slice::Iter; + +pub use tree_iterators_rs::prelude::BorrowedTreeNode; + +/// Tree of desync hashes +pub trait DesyncTree: Clone { + /// Node type + type Node; + + /// Get root hash of tree + fn get_hash(&self) -> Option; + + /// Get root node + fn root(&self) -> &Self::Node; + + /// make tree from root node + fn from_root(root: Self::Node) -> Self; +} + +/// [`DesyncTree`] node trait, built from children and hash. A node is effectively a sub-tree, +/// as we build the tree bottom-up. +pub trait DesyncTreeNode: Clone { + /// Get node hash + fn get_hash(&self) -> Option; + + /// Get children + fn children(&self) -> &Vec; + + /// Get children mut + fn children_mut(&mut self) -> &mut Vec; +} + +/// Implement to allow type to create a [`DesyncTreeNode`] containing hash built from children. +pub trait BuildDesyncNode { + /// `include_unhashable` sets whether components or resources be included as non-contributing nodes + /// in tree, to see what could be opted-in. + fn desync_tree_node( + &self, + include_unhashable: bool, + ) -> DefaultDesyncTreeNode; +} + +/// Metadata optionally included with ['DesyncTreeNode`]. +#[derive(Copy, Clone, Default)] +pub enum DesyncNodeMetadata { + /// No additional metadata + #[default] + None, + /// Node is a component + Component { + /// Entity idx of component + entity_idx: u32, + }, +} + +/// Default impl for [`DesyncTreeNode`]. +#[derive(Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct DefaultDesyncTreeNode { + name: Option, + hash: Option, + children: Vec, + + /// Some userdata that can be included in node. + #[cfg_attr(feature = "serde", serde(skip))] + metadata: DesyncNodeMetadata, +} + +impl DefaultDesyncTreeNode { + /// Create new node + pub fn new( + hash: Option, + name: Option, + children: Vec, + metadata: DesyncNodeMetadata, + ) -> Self { + Self { + name, + hash, + children, + metadata, + } + } + + /// Get node metadata + pub fn metadata(&self) -> &DesyncNodeMetadata { + &self.metadata + } + + /// Name of node + pub fn name(&self) -> &Option { + &self.name + } + + /// Set the name of node + pub fn set_name(&mut self, name: String) { + self.name = Some(name); + } + + /// Get node hash + pub fn get_hash(&self) -> Option { + self.hash + } + + /// Get children + pub fn children(&self) -> &Vec { + &self.children + } + + /// Get children mut + pub fn children_mut(&mut self) -> &mut Vec { + &mut self.children + } +} + +impl PartialEq for DefaultDesyncTreeNode { + fn eq(&self, other: &Self) -> bool { + self.hash == other.hash + } +} + +impl Eq for DefaultDesyncTreeNode {} + +impl PartialOrd for DefaultDesyncTreeNode { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.hash.cmp(&other.hash)) + } +} + +impl Ord for DefaultDesyncTreeNode { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.hash.cmp(&other.hash) + } +} + +/// Auto impl support for iterating over tree +impl<'a> BorrowedTreeNode<'a> for DefaultDesyncTreeNode { + type BorrowedValue = &'a Self; + + type BorrowedChildren = Iter<'a, DefaultDesyncTreeNode>; + + fn get_value_and_children_iter( + &'a self, + ) -> (Self::BorrowedValue, Option) { + if self.children.is_empty() { + return (self, None); + } + + (self, Some(self.children.iter())) + } +} + +/// Tree of desync hashes, allows storing hash of world and children such as components and resources. +#[derive(Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct DefaultDesyncTree { + root: DefaultDesyncTreeNode, +} + +impl From for DefaultDesyncTree { + fn from(value: DefaultDesyncTreeNode) -> Self { + Self::from_root(value) + } +} + +impl DesyncTree for DefaultDesyncTree { + type Node = DefaultDesyncTreeNode; + + fn get_hash(&self) -> Option { + self.root.get_hash() + } + + fn root(&self) -> &Self::Node { + &self.root + } + + fn from_root(root: Self::Node) -> Self { + Self { root } + } +} diff --git a/framework_crates/bones_utils/src/lib.rs b/framework_crates/bones_utils/src/lib.rs index fef20d5dfa..5d1a70fb62 100644 --- a/framework_crates/bones_utils/src/lib.rs +++ b/framework_crates/bones_utils/src/lib.rs @@ -8,6 +8,7 @@ mod collections; mod default; +mod desync_hash; #[cfg(feature = "ulid")] mod labeled_id; mod names; @@ -21,7 +22,7 @@ macro_rules! pub_use { () => { #[cfg(feature = "turborand")] pub use crate::random::*; - pub use crate::{collections::*, default::*, names::*}; + pub use crate::{collections::*, default::*, desync_hash::*, names::*}; #[cfg(feature = "ulid")] pub use crate::{labeled_id::*, ulid::*}; pub use bones_utils_macros::*; diff --git a/framework_crates/bones_utils/tests/tests.rs b/framework_crates/bones_utils/tests/tests.rs new file mode 100644 index 0000000000..67fe46017c --- /dev/null +++ b/framework_crates/bones_utils/tests/tests.rs @@ -0,0 +1,186 @@ +use std::hash::Hasher; + +use fxhash::FxHasher; +use glam::Vec3; + +use bones_schema::prelude::*; +use bones_utils::{net, DesyncHash}; + +#[derive(HasSchema, DesyncHash, Debug, Clone, Default)] +#[desync_hash_module(crate)] +#[net] +struct StructA { + a: f32, + b: String, +} + +#[derive(HasSchema, DesyncHash, Debug, Clone, Default)] +#[desync_hash_module(crate)] +struct StructB { + a: f32, + b: String, +} + +#[derive(HasSchema, DesyncHash, Debug, Clone, Default)] +#[desync_hash_module(crate)] +#[allow(dead_code)] +struct StructC { + a: f32, + #[desync_exclude] + b: String, +} + +/// Test DesyncHash proc macro on Enum variants +#[derive(HasSchema, DesyncHash, Debug, Clone, Default)] +#[repr(C, u8)] +#[desync_hash_module(crate)] +#[allow(dead_code)] +enum EnumA { + #[default] + A, + B, + C(), + D(f32, u8), + E { + a: f64, + b: u16, + }, + F = 52, +} + +#[derive(HasSchema, DesyncHash, Debug, Clone, Default)] +#[repr(C, u8)] +#[desync_hash_module(crate)] +#[allow(dead_code)] +enum EnumB { + A { + #[desync_exclude] + a: f64, + + b: u16, + }, + #[default] + #[desync_exclude] + B, + #[desync_exclude] + C { a: f32 }, +} + +fn hash_value(value: &T) -> u64 { + let mut hasher = FxHasher::default(); + DesyncHash::hash(value, &mut hasher); + hasher.finish() +} + +#[test] +fn desync_hash_enum() { + let a = EnumA::A; + let b = EnumA::B; + + // ensure enum variants do not hash to same value + assert_ne!(hash_value(&a), hash_value(&b)); + + // verify mutating field of tuple variant gives different hash + let d1 = EnumA::D(16.0, 3); + let d2 = EnumA::D(16.0, 2); + assert_ne!(hash_value(&d1), hash_value(&d2)); + + // verify mutating field of named struct variant gives different hash + let e1 = EnumA::E { a: 1.0, b: 2 }; + let e2 = EnumA::E { a: 1.0, b: 1 }; + assert_ne!(hash_value(&e1), hash_value(&e2)); +} + +#[test] +fn desync_hash_struct() { + let a = StructA { + a: 1.0, + b: "foo".to_string(), + }; + let b = StructA { + a: 1.0, + b: "bar".to_string(), + }; + + assert_ne!(hash_value(&a), hash_value(&b)); +} + +#[test] +fn desync_hash_exclude_struct_field() { + let a = StructC { + a: 1.0, + b: "foo".to_string(), + }; + let b = StructC { + a: 1.0, + b: "bar".to_string(), + }; + + // field b is excluded on StructC, hash should be the same. + assert_eq!(hash_value(&a), hash_value(&b)); +} + +#[test] +fn desync_hash_exclude_enum_variant_named_field() { + let a = EnumB::A { a: 1.0, b: 1 }; + let b = EnumB::A { a: 0.0, b: 1 }; + + // field a is excluded on EnumB::A variant, hash should be the same. + assert_eq!(hash_value(&a), hash_value(&b)); +} + +#[test] +fn desync_hash_exclude_enum_variant() { + let a = EnumB::C { a: 1.0 }; + let b = EnumB::C { a: 0.0 }; + + // Variant EnumB::C Is excluded, should be equal. + assert_eq!(hash_value(&a), hash_value(&b)); + + // Although variant may be excluded and its fields not hashed, + // two variants (even if both excluded) should give unique hash. + let c = EnumB::B; + assert_ne!(hash_value(&c), hash_value(&a)) +} + +#[test] +fn desync_hash_glam() { + let a = Vec3::new(1.0, 2.0, 3.0); + let b = Vec3::new(1.0, 1.0, 1.0); + + assert_ne!(hash_value(&a), hash_value(&b)); +} + +#[test] +fn desync_hash_schemaref() { + // Test that these hash to different values. + let a = StructA { + a: 1.0, + b: "foo".to_string(), + }; + let b = StructA { + a: 1.0, + b: "bar".to_string(), + }; + let a_hash = hash_value(&a.as_schema_ref()); + let b_hash = hash_value(&b.as_schema_ref()); + assert_ne!(a_hash, b_hash); + + // StructB does not support hashing, + // its SchemaRef does not have impl for DesyncHash, + // even if data is different, it will hash to 0. + let a = StructB { + a: 1.0, + b: "foo".to_string(), + }; + let b = StructB { + a: 1.0, + b: "bar".to_string(), + }; + let a_hash = hash_value(&a.as_schema_ref()); + let b_hash = hash_value(&b.as_schema_ref()); + + // They should both hash to 0, StructB doesn't support hashing. + assert_eq!(a_hash, b_hash); + assert_eq!(a_hash, 0); +} diff --git a/other_crates/bones_matchmaker/src/matchmaking.rs b/other_crates/bones_matchmaker/src/matchmaking.rs index 68128f6d6b..07c9a44378 100644 --- a/other_crates/bones_matchmaker/src/matchmaking.rs +++ b/other_crates/bones_matchmaker/src/matchmaking.rs @@ -216,7 +216,7 @@ async fn send_matchmaking_updates( })?; // Send first update and check active connections - for (_index, conn) in connections.into_iter().enumerate() { + for conn in connections.into_iter() { if let Ok(mut send) = conn.open_uni().await { if send.write_all(&first_update_message).await.is_ok() && send.finish().is_ok()