Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
cbournhonesque-sc committed Dec 21, 2023
1 parent a43c566 commit 291a920
Show file tree
Hide file tree
Showing 12 changed files with 118 additions and 97 deletions.
15 changes: 15 additions & 0 deletions NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,21 @@
because start tick or end tick are not updated correctly in some edge cases.


- add PredictionGroup and InterpolationGroup.
- on top of ReplicationGroup?
- or do we just re-use the replication group id (that usually will have a remote entity id) and use it to see the prediction/interpolation group?
- then we add the prediction group id on the Confirmed or Predicted components?
- Then we don't really need the Confirmed/Predicted components anymore, we could just have resources on the Prediction or Interpolation plugin
- The resource needs:
- confirmed<->predicted mapping
- for a given prediction-group, the dependency graph of the entities (using confirmed entities?)
- The prediction systems will:
- iterate through the dependency graph of the prediction group
- for each entity, fetch the confirmed/predicted entity
- do entity mapping if needed
- users can add their own entities in the prediction group (even if thre )



- DEBUGGING REPLICATION BOX:
- the sync from confirmed to predict might not only be for replicated components, but also components that were
Expand Down
2 changes: 1 addition & 1 deletion examples/interest_management/protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ pub struct Channel1;
#[derive(Message, Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct Message1(pub usize);

#[message_protocol(protocol = "MyProtocol", derive(Debug))]
#[message_protocol(protocol = "MyProtocol")]
pub enum Messages {
Message1(Message1),
}
Expand Down
4 changes: 2 additions & 2 deletions examples/replication_groups/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ pub(crate) fn debug_prediction_pre_rollback(
inputs = ?client.get_input_buffer(),
"prediction pre rollback debug");
for (parent, tail_history) in tail_query.iter() {
let (parent_history) = parent_query
let parent_history = parent_query
.get(parent.0)
.expect("Tail entity has no parent entity!");
info!(?parent_history, "parent");
Expand All @@ -213,7 +213,7 @@ pub(crate) fn debug_prediction_post_rollback(
) {
info!(tick = ?client.tick(), "prediction post rollback debug");
for (parent, tail_history) in tail_query.iter() {
let (parent_history) = parent_query
let parent_history = parent_query
.get(parent.0)
.expect("Tail entity has no parent entity!");
info!(?parent_history, "parent");
Expand Down
15 changes: 3 additions & 12 deletions examples/replication_groups/protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,17 +132,6 @@ pub struct TailLength(pub(crate) f32);
// tail inflection points, from front (point closest to the head) to back (tail end point)
pub struct TailPoints(pub(crate) VecDeque<(Vec2, Direction)>);

pub struct TailPointsInterpolation;
// TODO: annoyingly, we still need to implement InterpFn for TailPointsInterpolation
// even if we completely disable interpolation, because the derive macro demands it.
// maybe also add an attribute on the derive macro?
impl InterpFn<TailPoints> for TailPointsInterpolation {
fn lerp(start: TailPoints, other: TailPoints, t: f32) -> TailPoints {
panic!("hi");
start
}
}

pub fn segment_length(from: Vec2, to: Vec2) -> f32 {
(from - to).length()
}
Expand Down Expand Up @@ -218,7 +207,9 @@ pub enum Components {
PlayerColor(PlayerColor),
#[sync(once)]
TailLength(TailLength),
#[sync(full, lerp = "TailPointsInterpolation")]
// we set the interpolation function to NoInterpolation because we are using our own custom interpolation logic
// (by default it would use LinearInterpolation, which requires Add and Mul bounds on this component)
#[sync(full, lerp = "NoInterpolation")]
TailPoints(TailPoints),
#[sync(once)]
PlayerParent(PlayerParent),
Expand Down
2 changes: 1 addition & 1 deletion examples/replication_groups/shared.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub fn shared_config() -> SharedConfig {
tick_duration: Duration::from_secs_f64(1.0 / 64.0),
},
log: LogConfig {
level: Level::INFO,
level: Level::WARN,
filter: "wgpu=error,wgpu_hal=error,naga=warn,bevy_app=info,bevy_render=warn,quinn=warn"
.to_string(),
},
Expand Down
14 changes: 9 additions & 5 deletions examples/simple_box/protocol.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use bevy::prelude::{default, Bundle, Color, Component, Deref, DerefMut, Entity, Vec2};
use bevy::utils::EntityHashSet;
use derive_more::{Add, Mul};
use lightyear::prelude::*;
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -50,13 +51,16 @@ pub struct PlayerColor(pub(crate) Color);
#[message(custom_map)]
pub struct PlayerParent(Entity);

impl MapEntities for PlayerParent {
fn map_entities(&mut self, entity_map: &RemoteEntityMap) {
self.0.map_entities(entity_map);
impl<'a> MapEntities<'a> for PlayerParent {
fn map_entities(&mut self, entity_mapper: Box<dyn EntityMapper + 'a>) {
self.0.map_entities(entity_mapper);
}

fn entities(&self) -> EntityHashSet<Entity> {
EntityHashSet::from_iter(vec![self.0])
}
}

// #[component_protocol(protocol = "MyProtocol", derive(Debug))]
#[component_protocol(protocol = "MyProtocol")]
pub enum Components {
#[sync(once)]
Expand All @@ -77,7 +81,7 @@ pub struct Channel1;
#[derive(Message, Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct Message1(pub usize);

#[message_protocol(protocol = "MyProtocol", derive(Debug))]
#[message_protocol(protocol = "MyProtocol")]
pub enum Messages {
Message1(Message1),
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ pub(crate) fn apply_confirmed_update<T: SyncComponent, P: Protocol>(
for (confirmed_entity, confirmed, confirmed_component) in confirmed_entities.iter() {
if let Some(p) = confirmed.interpolated {
if confirmed_component.is_changed() {
if let Ok((mut interpolated_component, history_option)) =
if let Ok((interpolated_component, history_option)) =
interpolated_entities.get_mut(p)
{
match T::mode() {
Expand Down
9 changes: 9 additions & 0 deletions lightyear/src/client/interpolation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,15 @@ where
}
}

/// Use this if you don't want to use an interpolation function for this component.
/// (For example if you are running your own interpolation logic)
pub struct NoInterpolation;
impl<C> InterpFn<C> for NoInterpolation {
fn lerp(start: C, _other: C, _t: f32) -> C {
start
}
}

pub trait InterpolatedComponent<C>: SyncComponent {
type Fn: InterpFn<C>;

Expand Down
2 changes: 1 addition & 1 deletion lightyear/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ pub mod _reexport {
pub use crate::channel::builder::{
EntityActionsChannel, EntityUpdatesChannel, InputChannel, PingChannel,
};
pub use crate::client::interpolation::LinearInterpolation;
pub use crate::client::interpolation::{
add_interpolation_systems, add_prepare_interpolation_systems, InterpolatedComponent,
};
pub use crate::client::interpolation::{LinearInterpolation, NoInterpolation};
pub use crate::client::prediction::add_prediction_systems;
pub use crate::connection::events::{
IterComponentInsertEvent, IterComponentRemoveEvent, IterComponentUpdateEvent,
Expand Down
5 changes: 4 additions & 1 deletion lightyear/src/shared/ping/manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,10 @@ mod tests {

#[test]
fn test_send_pings() {
let config = PingConfig::default();
let config = PingConfig {
ping_interval: Duration::from_millis(100),
stats_buffer_duration: Duration::from_secs(4),
};
let mut ping_manager = PingManager::new(&config);
let mut time_manager = TimeManager::new(Duration::default());

Expand Down
140 changes: 70 additions & 70 deletions lightyear/src/shared/replication/manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -347,31 +347,30 @@ impl<P: Protocol> ReplicationManager<P> {

// add actions to the message for entities in topological order
for e in entities.iter() {
if let Some(mut a) = actions.remove(e) {
// for any update that was not already in insert/updates, add it to the update list
if let Some(ref mut updates) = my_updates {
// TODO: this suggests that we should already store inserts/updates as HashSet!
let existing_inserts = a
.insert
.iter()
.map(|c| c.into())
.collect::<HashSet<P::ComponentKinds>>();
let existing_updates = a
.updates
.iter()
.map(|c| c.into())
.collect::<HashSet<P::ComponentKinds>>();
updates.remove(e).map(|u| {
u.into_iter()
.filter(|c| {
!existing_inserts.contains(&(c.into()))
&& !existing_updates.contains(&(c.into()))
})
.for_each(|c| a.updates.push(c));
});
let mut a = actions.remove(e).unwrap_or_else(EntityActions::default);
// for any update that was not already in insert/updates, add it to the update list
if let Some(ref mut updates) = my_updates {
// TODO: this suggests that we should already store inserts/updates as HashSet!
let existing_inserts = a
.insert
.iter()
.map(|c| c.into())
.collect::<HashSet<P::ComponentKinds>>();
let existing_updates = a
.updates
.iter()
.map(|c| c.into())
.collect::<HashSet<P::ComponentKinds>>();
if let Some(u) = updates.remove(e) {
u.into_iter()
.filter(|c| {
!existing_inserts.contains(&(c.into()))
&& !existing_updates.contains(&(c.into()))
})
.for_each(|c| a.updates.push(c));
}
actions_message.push((*e, a));
}
actions_message.push((*e, a));
}

messages.push((
Expand All @@ -385,15 +384,15 @@ impl<P: Protocol> ReplicationManager<P> {
}

// create an updates message
self.pending_updates.remove(&group_id).map(|mut updates| {
if let Some(mut updates) = self.pending_updates.remove(&group_id) {
let channel = self.group_channels.entry(group_id).or_default();
let mut updates_message = vec![];

// add updates to the message in topological order
entities.iter().for_each(|e| {
updates.remove(e).map(|u| {
if let Some(u) = updates.remove(e) {
updates_message.push((*e, u));
});
};
});

messages.push((
Expand All @@ -404,7 +403,7 @@ impl<P: Protocol> ReplicationManager<P> {
updates: updates_message,
}),
));
});
};
}
Err(e) => {
error!("There is a cyclic dependency in the group (with entity {:?})! Replication aborted.", e.node_id());
Expand Down Expand Up @@ -810,54 +809,55 @@ mod tests {
MyComponentsProtocol::Component3(Component3(5.0)),
);

// the order of actions is not important if there are no relations between the entities
let message = manager.finalize(Tick(2));
let actions = message.first().unwrap();
assert_eq!(actions.0, ChannelKind::of::<EntityActionsChannel>());
assert_eq!(actions.1, group_1);
let ReplicationMessageData::Actions(ref a) = actions.2 else {
panic!()
};
assert_eq!(a.sequence_id, MessageId(2));
assert_eq!(
manager.finalize(Tick(2)),
vec![
EntityHashMap::from_iter(a.actions.clone()),
EntityHashMap::from_iter(vec![
(
ChannelKind::of::<EntityActionsChannel>(),
group_1,
ReplicationMessageData::Actions(EntityActionMessage {
sequence_id: MessageId(2),
actions: BTreeMap::from([
(
entity_1,
EntityActions {
spawn: true,
despawn: false,
insert: vec![MyComponentsProtocol::Component1(Component1(1.0))],
remove: vec![MyComponentsProtocolKind::Component2],
updates: vec![MyComponentsProtocol::Component3(Component3(
3.0
))],
}
),
(
entity_2,
EntityActions {
spawn: false,
despawn: false,
insert: vec![],
remove: vec![],
updates: vec![MyComponentsProtocol::Component2(Component2(
4.0
))],
}
)
]),
})
entity_1,
EntityActions {
spawn: true,
despawn: false,
insert: vec![MyComponentsProtocol::Component1(Component1(1.0))],
remove: vec![MyComponentsProtocolKind::Component2],
updates: vec![MyComponentsProtocol::Component3(Component3(3.0))],
}
),
(
ChannelKind::of::<EntityUpdatesChannel>(),
group_2,
ReplicationMessageData::Updates(EntityUpdatesMessage {
last_action_tick: Tick(3),
updates: BTreeMap::from([(
entity_3,
vec![MyComponentsProtocol::Component3(Component3(5.0))]
)]),
})
entity_2,
EntityActions {
spawn: false,
despawn: false,
insert: vec![],
remove: vec![],
updates: vec![MyComponentsProtocol::Component2(Component2(4.0))],
}
)
]
])
);

let updates = message.get(1).unwrap();
assert_eq!(
updates,
&(
ChannelKind::of::<EntityUpdatesChannel>(),
group_2,
ReplicationMessageData::Updates(EntityUpdatesMessage {
last_action_tick: Tick(3),
updates: vec![(
entity_3,
vec![MyComponentsProtocol::Component3(Component3(5.0))]
)],
})
)
);
assert_eq!(
manager
Expand Down
5 changes: 2 additions & 3 deletions lightyear/src/tests/protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ pub struct Component3(pub f32);
#[message(custom_map)]
pub struct Component4(pub Entity);

impl MapEntities for Component4 {
fn map_entities(&mut self, entity_mapper: Box<dyn EntityMapper>) {
impl<'a> MapEntities<'a> for Component4 {
fn map_entities(&mut self, entity_mapper: Box<dyn EntityMapper + 'a>) {
self.0.map_entities(entity_mapper);
}

Expand All @@ -43,7 +43,6 @@ impl MapEntities for Component4 {
}
}

// #[component_protocol_internal(protocol = "MyProtocol", derive(Debug))]
#[component_protocol_internal(protocol = "MyProtocol")]
pub enum MyComponentsProtocol {
#[sync(full)]
Expand Down

0 comments on commit 291a920

Please sign in to comment.