Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Keep appending to non-collapsible segments until they are long enough to downsample #3385

Merged
merged 5 commits into from
Jun 25, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 20 additions & 9 deletions ksp_plugin/vessel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -178,18 +178,27 @@ void Vessel::ClearAllIntrinsicForcesAndTorques() {
}

void Vessel::DetectCollapsibilityChange() {
bool const becomes_collapsible = IsCollapsible();
if (is_collapsible_ != becomes_collapsible) {
bool const will_be_collapsible = IsCollapsible();

// It is always correct to mark as non-collapsible a collapsible segment or to
// append collapsible points to a non-collapsible segment (but not
// vice-versa). If a non-collapsible segment is being closed but is very
// short, we don't actually close it but keep appending points to it until it
// is long enough to have been downsampled. If downsampling is disabled,
// surely this is not going to happen so no point in waiting for Godot.
bool const collapsibility_changes = is_collapsible_ != will_be_collapsible;
bool const becomes_non_collapsible = collapsibility_changes &&
!will_be_collapsible;
bool const awaits_first_downsampling = downsampling_parameters_.has_value() &&
!backstory_->was_downsampled();

if (collapsibility_changes &&
(becomes_non_collapsible || !awaits_first_downsampling)) {
// If collapsibility changes, we create a new history segment. This ensures
// that downsampling does not change collapsibility boundaries.
// NOTE(phl): It is always correct to mark as non-collapsible a collapsible
// segment (but not vice-versa). If the segment being closed is a very
// short collapsible one (e.g., no downsampling took place) we could
// consider merging it with its predecessor and avoiding the creation of a
// new segment. The checkpointing code below would remain correct.

// In normal situations we create a new segment with the collapsibility
// given by |becomes_collapsible|. In one cornercase we delete the current
// given by |will_be_collapsible|. In one cornercase we delete the current
// segment.
enum {
Create,
Expand Down Expand Up @@ -253,7 +262,9 @@ void Vessel::DetectCollapsibilityChange() {
}
};
psychohistory_ = trajectory_.AttachSegments(std::move(psychohistory));
is_collapsible_ = becomes_collapsible;

// Not updated if we chose to append to the current segment.
is_collapsible_ = will_be_collapsible;
}
}

Expand Down
13 changes: 6 additions & 7 deletions ksp_plugin_test/plugin_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -446,11 +446,10 @@ TEST_F(PluginTest, Serialization) {
EXPECT_TRUE(message.vessel(0).vessel().has_flight_plan());
EXPECT_TRUE(message.vessel(0).vessel().has_history());
auto const& vessel_0_history = message.vessel(0).vessel().history();
EXPECT_EQ(4, vessel_0_history.segment_size());
EXPECT_EQ(1, vessel_0_history.segment(0).zfp().timeline_size());
EXPECT_EQ(7, vessel_0_history.segment(1).zfp().timeline_size());
EXPECT_EQ(2, vessel_0_history.segment(2).zfp().timeline_size());
EXPECT_EQ(1, vessel_0_history.segment(3).zfp().timeline_size());
EXPECT_EQ(3, vessel_0_history.segment_size());
EXPECT_EQ(7, vessel_0_history.segment(0).zfp().timeline_size());
EXPECT_EQ(2, vessel_0_history.segment(1).zfp().timeline_size());
EXPECT_EQ(1, vessel_0_history.segment(2).zfp().timeline_size());
EXPECT_TRUE(message.has_renderer());
EXPECT_TRUE(message.renderer().has_plotting_frame());
EXPECT_TRUE(message.renderer().plotting_frame().HasExtension(
Expand All @@ -468,9 +467,9 @@ TEST_F(PluginTest, Serialization) {
// bounds of each segment. Ignore it for the purposes of comparing the
// messages.
message.mutable_vessel(0)->mutable_vessel()
->mutable_history()->mutable_segment(1)->clear_zfp();
->mutable_history()->mutable_segment(0)->clear_zfp();
second_message.mutable_vessel(0)->mutable_vessel()
->mutable_history()->mutable_segment(1)->clear_zfp();
->mutable_history()->mutable_segment(0)->clear_zfp();
EXPECT_THAT(message, EqualsProto(second_message));
}

Expand Down
209 changes: 170 additions & 39 deletions ksp_plugin_test/vessel_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -466,6 +466,7 @@ TEST_F(VesselTest, IsCollapsible) {
}
}

// Verifies that checkpoints are correctly created when collapsibility changes.
TEST_F(VesselTest, Checkpointing) {
MockFunction<int(not_null<PileUp const*>)>
serialization_index_for_pile_up;
Expand All @@ -481,6 +482,10 @@ TEST_F(VesselTest, Checkpointing) {
EXPECT_CALL(ephemeris_,
FlowWithAdaptiveStep(_, _, t0_ + 30 * Second, _, _))
.Times(AnyNumber());

// Disable downsampling to make sure that we do not try to append to existing
// segments.
vessel_.DisableDownsampling();
vessel_.CreateTrajectoryIfNeeded(t0_);

auto const pile_up =
Expand Down Expand Up @@ -580,17 +585,12 @@ TEST_F(VesselTest, Checkpointing) {
EXPECT_EQ(0, checkpoint.time().scalar().magnitude());
EXPECT_EQ(3, checkpoint.non_collapsible_segment().segment_size());
auto const& segment0 = checkpoint.non_collapsible_segment().segment(0);
EXPECT_EQ(1, segment0.number_of_dense_points());
EXPECT_EQ(1, segment0.zfp().timeline_size());
EXPECT_EQ(0, segment0.exact(0).instant().scalar().magnitude());
auto const& segment1 = checkpoint.non_collapsible_segment().segment(1);
// The |number_of_dense_points| is 0 for the psychohistory and the
// prediction because they are not subject to downsampling.
EXPECT_FALSE(segment1.has_number_of_dense_points());
EXPECT_EQ(1, segment1.zfp().timeline_size());
EXPECT_EQ(0, segment1.exact(0).instant().scalar().magnitude());
auto const& segment2 = checkpoint.non_collapsible_segment().segment(2);
EXPECT_FALSE(segment2.has_number_of_dense_points());
EXPECT_EQ(1, segment2.zfp().timeline_size());
EXPECT_EQ(0, segment2.exact(0).instant().scalar().magnitude());
EXPECT_EQ(
Expand All @@ -607,22 +607,17 @@ TEST_F(VesselTest, Checkpointing) {
EXPECT_EQ(25, checkpoint.time().scalar().magnitude());
EXPECT_EQ(5, checkpoint.non_collapsible_segment().segment_size());
auto const& segment0 = checkpoint.non_collapsible_segment().segment(0);
EXPECT_EQ(0, segment0.number_of_dense_points());
EXPECT_EQ(0, segment0.zfp().timeline_size());
auto const& segment1 = checkpoint.non_collapsible_segment().segment(1);
EXPECT_EQ(1, segment1.number_of_dense_points());
EXPECT_EQ(1, segment1.zfp().timeline_size());
EXPECT_EQ(10, segment1.exact(0).instant().scalar().magnitude());
auto const& segment2 = checkpoint.non_collapsible_segment().segment(2);
EXPECT_EQ(16, segment2.number_of_dense_points());
EXPECT_EQ(16, segment2.zfp().timeline_size());
EXPECT_EQ(10, segment2.exact(0).instant().scalar().magnitude());
auto const& segment3 = checkpoint.non_collapsible_segment().segment(3);
EXPECT_FALSE(segment3.has_number_of_dense_points());
EXPECT_EQ(1, segment3.zfp().timeline_size());
EXPECT_EQ(25, segment3.exact(0).instant().scalar().magnitude());
auto const& segment4 = checkpoint.non_collapsible_segment().segment(4);
EXPECT_FALSE(segment4.has_number_of_dense_points());
EXPECT_EQ(1, segment4.zfp().timeline_size());
EXPECT_EQ(25, segment4.exact(0).instant().scalar().magnitude());
EXPECT_EQ(
Expand All @@ -641,6 +636,138 @@ TEST_F(VesselTest, Checkpointing) {
}
}

// Exact same setup as the previous test, but with downsampling enabled. We
// create a single segment because it does not reach the downsampling threshold.
TEST_F(VesselTest, SingleSegment) {
MockFunction<int(not_null<PileUp const*>)>
serialization_index_for_pile_up;
EXPECT_CALL(serialization_index_for_pile_up, Call(_))
.Times(2)
.WillRepeatedly(Return(0));

EXPECT_CALL(ephemeris_, t_max())
.WillRepeatedly(Return(t0_ + 30 * Second));
EXPECT_CALL(ephemeris_,
FlowWithAdaptiveStep(_, _, InfiniteFuture, _, _))
.Times(AnyNumber());
EXPECT_CALL(ephemeris_,
FlowWithAdaptiveStep(_, _, t0_ + 30 * Second, _, _))
.Times(AnyNumber());

vessel_.CreateTrajectoryIfNeeded(t0_);

auto const pile_up =
std::make_shared<PileUp>(/*parts=*/std::list<not_null<Part*>>{p1_, p2_},
Instant{},
DefaultPsychohistoryParameters(),
DefaultHistoryParameters(),
&ephemeris_,
/*deletion_callback=*/nullptr);
p1_->set_containing_pile_up(pile_up);
p2_->set_containing_pile_up(pile_up);

// Free-fall trajectory.
AppendTrajectoryTimeline<Barycentric>(
NewLinearTrajectoryTimeline<Barycentric>(p1_dof_,
/*Δt=*/1 * Second,
/*t1=*/t0_ + 1 * Second,
/*t2=*/t0_ + 11 * Second),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p1_->AppendToHistory(time, degrees_of_freedom);
});
AppendTrajectoryTimeline<Barycentric>(
NewLinearTrajectoryTimeline<Barycentric>(p2_dof_,
/*Δt=*/1 * Second,
/*t1=*/t0_ + 1 * Second,
/*t2=*/t0_ + 11 * Second),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p2_->AppendToHistory(time, degrees_of_freedom);
});

vessel_.DetectCollapsibilityChange();
vessel_.AdvanceTime();

// Apply a force. This segment is not collapsible.
auto const p1_force =
Vector<Force, Barycentric>({1 * Newton, 0 * Newton, 0 * Newton});
p1_->apply_intrinsic_force(p1_force);
AppendTrajectoryTimeline<Barycentric>(
NewAcceleratedTrajectoryTimeline(p1_dof_,
/*acceleration=*/p1_force / mass1_,
/*Δt=*/1 * Second,
/*t1=*/t0_ + 11 * Second,
/*t2=*/t0_ + 26 * Second),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p1_->AppendToHistory(time, degrees_of_freedom);
});
AppendTrajectoryTimeline<Barycentric>(
NewLinearTrajectoryTimeline<Barycentric>(p2_dof_,
/*Δt=*/1 * Second,
/*t1=*/t0_ + 11 * Second,
/*t2=*/t0_ + 26 * Second),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p2_->AppendToHistory(time, degrees_of_freedom);
});

vessel_.DetectCollapsibilityChange();
vessel_.AdvanceTime();

// Remove the force.
p1_->clear_intrinsic_force();
AppendTrajectoryTimeline<Barycentric>(
NewLinearTrajectoryTimeline<Barycentric>(p1_dof_,
/*Δt=*/1 * Second,
/*t1=*/t0_ + 26 * Second,
/*t2=*/t0_ + 31 * Second),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p1_->AppendToHistory(time, degrees_of_freedom);
});
AppendTrajectoryTimeline<Barycentric>(
NewLinearTrajectoryTimeline<Barycentric>(p2_dof_,
/*Δt=*/1 * Second,
/*t1=*/t0_ + 26 * Second,
/*t2=*/t0_ + 31 * Second),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p2_->AppendToHistory(time, degrees_of_freedom);
});

vessel_.DetectCollapsibilityChange();
vessel_.AdvanceTime();

serialization::Vessel message;
vessel_.WriteToMessage(&message,
serialization_index_for_pile_up.AsStdFunction());
EXPECT_TRUE(message.has_is_collapsible());
EXPECT_FALSE(message.is_collapsible());
EXPECT_EQ(0, message.checkpoint_size());
EXPECT_TRUE(message.has_downsampling_parameters());
EXPECT_EQ(3, message.history().segment_size());
{
// Non-collapsible segment for the history.
auto const& segment0 = message.history().segment(0);
EXPECT_EQ(31, segment0.number_of_dense_points());
EXPECT_EQ(31, segment0.zfp().timeline_size());
}
{
// Psychohistory, only one point.
auto const& segment1 = message.history().segment(1);
EXPECT_EQ(0, segment1.number_of_dense_points());
EXPECT_EQ(1, segment1.zfp().timeline_size());
}
{
// Prediction, excluded except for its first point.
auto const& segment2 = message.history().segment(2);
EXPECT_EQ(0, segment2.number_of_dense_points());
EXPECT_EQ(1, segment2.zfp().timeline_size());
}
}

TEST_F(VesselTest, SerializationSuccess) {
MockFunction<int(not_null<PileUp const*>)>
serialization_index_for_pile_up;
Expand Down Expand Up @@ -718,32 +845,36 @@ TEST_F(VesselTest, TailSerialization) {
p1_->set_containing_pile_up(pile_up);
p2_->set_containing_pile_up(pile_up);

// A long trajectory for each part.
AppendTrajectoryTimeline<Barycentric>(
NewCircularTrajectoryTimeline<Barycentric>(
/*period=*/20 * Second,
/*r=*/101 * Metre,
/*Δt=*/1 * Second,
/*t1=*/t0_ + 1 * Second,
/*t2=*/t0_ + number_of_points * Second),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p1_->AppendToHistory(time, degrees_of_freedom);
});
AppendTrajectoryTimeline<Barycentric>(
NewCircularTrajectoryTimeline<Barycentric>(
/*period=*/20 * Second,
/*r=*/102 * Metre,
/*Δt=*/1 * Second,
/*t1=*/t0_ + 1 * Second,
/*t2=*/t0_ + number_of_points * Second),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p2_->AppendToHistory(time, degrees_of_freedom);
});

vessel_.DetectCollapsibilityChange();
vessel_.AdvanceTime();
// A long trajectory for each part, one point at a time, with a collapsibility
// check after each point just like it would happen in real life.
for (Instant t1 = t0_ + 1 * Second;
t1 < t0_ + number_of_points * Second;
t1 += 1 * Second) {
Instant const t2 = t1 + 1 * Second;
AppendTrajectoryTimeline<Barycentric>(
NewCircularTrajectoryTimeline<Barycentric>(
/*period=*/20 * Second,
/*r=*/101 * Metre,
/*Δt=*/1 * Second,
t1, t2),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p1_->AppendToHistory(time, degrees_of_freedom);
});
AppendTrajectoryTimeline<Barycentric>(
NewCircularTrajectoryTimeline<Barycentric>(
/*period=*/20 * Second,
/*r=*/102 * Metre,
/*Δt=*/1 * Second,
t1, t2),
[this](Instant const& time,
DegreesOfFreedom<Barycentric> const& degrees_of_freedom) {
p2_->AppendToHistory(time, degrees_of_freedom);
});

vessel_.DetectCollapsibilityChange();
vessel_.AdvanceTime();
}
EXPECT_EQ(25'139,
std::distance(vessel_.trajectory().begin(),
vessel_.psychohistory()->begin()));
Expand All @@ -762,8 +893,8 @@ TEST_F(VesselTest, TailSerialization) {
{
// Collapsible segment of the history (backstory), truncated to the left.
auto const& segment1 = message.history().segment(1);
EXPECT_EQ(159, segment1.number_of_dense_points());
EXPECT_EQ("2000-01-01T23:25:13"_TT,
EXPECT_EQ(152, segment1.number_of_dense_points());
EXPECT_EQ("2000-01-01T23:24:24"_TT,
Instant::ReadFromMessage(segment1.exact(0).instant()));
EXPECT_EQ(t0_ + (number_of_points - 1) * Second,
Instant::ReadFromMessage(segment1.exact(1).instant()));
Expand All @@ -786,7 +917,7 @@ TEST_F(VesselTest, TailSerialization) {
message, &celestial_, &ephemeris_, /*deletion_callback=*/nullptr);
EXPECT_TRUE(v->trajectory().segments().begin()->empty());
auto const backstory = std::next(v->trajectory().segments().begin());
EXPECT_EQ("2000-01-01T23:25:13"_TT, backstory->front().time);
EXPECT_EQ("2000-01-01T23:24:24"_TT, backstory->front().time);
EXPECT_EQ(t0_ + (number_of_points - 1) * Second, backstory->back().time);
}

Expand Down