Skip to content

Commit

Permalink
Merge pull request redpanda-data#16267 from vbotbuildovich/backport-p…
Browse files Browse the repository at this point in the history
…r-16247-v23.2.x-150

[v23.2.x] Changed the `partition_status` `chunked_fifo` chunk size to `8`
  • Loading branch information
mmaslankaprv authored Jan 25, 2024
2 parents 86d8d9e + 37ab8e3 commit 320f9cb
Show file tree
Hide file tree
Showing 12 changed files with 32 additions and 22 deletions.
4 changes: 2 additions & 2 deletions src/v/cluster/health_monitor_backend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -735,8 +735,8 @@ ss::chunked_fifo<ntp_report> collect_shard_local_reports(
return reports;
}

using reports_acc_t = absl::
node_hash_map<model::topic_namespace, ss::chunked_fifo<partition_status>>;
using reports_acc_t
= absl::node_hash_map<model::topic_namespace, partition_statuses_t>;

reports_acc_t reduce_reports_map(
reports_acc_t acc, ss::chunked_fifo<ntp_report> current_reports) {
Expand Down
5 changes: 3 additions & 2 deletions src/v/cluster/health_monitor_types.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
#include "cluster/errc.h"
#include "cluster/node/types.h"
#include "features/feature_table.h"
#include "health_monitor_types.h"
#include "model/adl_serde.h"
#include "model/metadata.h"
#include "utils/to_string.h"
Expand Down Expand Up @@ -156,7 +157,7 @@ topic_status& topic_status::operator=(const topic_status& rhs) {
return *this;
}

ss::chunked_fifo<partition_status> p;
partition_statuses_t p;
p.reserve(rhs.partitions.size());
std::copy(
rhs.partitions.begin(), rhs.partitions.end(), std::back_inserter(p));
Expand All @@ -167,7 +168,7 @@ topic_status& topic_status::operator=(const topic_status& rhs) {
}

topic_status::topic_status(
model::topic_namespace tp_ns, ss::chunked_fifo<partition_status> partitions)
model::topic_namespace tp_ns, partition_statuses_t partitions)
: tp_ns(std::move(tp_ns))
, partitions(std::move(partitions)) {}

Expand Down
9 changes: 7 additions & 2 deletions src/v/cluster/health_monitor_types.h
Original file line number Diff line number Diff line change
Expand Up @@ -97,21 +97,26 @@ struct partition_status
friend bool operator==(const partition_status&, const partition_status&)
= default;
};
/**
* We keep the chunk size in partition status small to make sure that we do not
* waste to much of memory for topics with small number of partitions.
*/
using partition_statuses_t = ss::chunked_fifo<partition_status, 8>;

struct topic_status
: serde::envelope<topic_status, serde::version<0>, serde::compat_version<0>> {
static constexpr int8_t current_version = 0;

topic_status() = default;
topic_status(model::topic_namespace, ss::chunked_fifo<partition_status>);
topic_status(model::topic_namespace, partition_statuses_t);
topic_status& operator=(const topic_status&);
topic_status(const topic_status&);
topic_status& operator=(topic_status&&) = default;
topic_status(topic_status&&) = default;
~topic_status() = default;

model::topic_namespace tp_ns;
ss::chunked_fifo<partition_status> partitions;
partition_statuses_t partitions;
friend std::ostream& operator<<(std::ostream&, const topic_status&);
friend bool operator==(const topic_status&, const topic_status&);

Expand Down
2 changes: 1 addition & 1 deletion src/v/cluster/tests/health_monitor_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ enum part_status { HEALTHY, LEADERLESS, URP };

topic_status
make_ts(ss::sstring name, const std::vector<part_status>& status_list) {
ss::chunked_fifo<cluster::partition_status> statuses;
cluster::partition_statuses_t statuses;

partition_id pid{0};
for (auto status : status_list) {
Expand Down
2 changes: 1 addition & 1 deletion src/v/cluster/tests/partition_balancer_simulator_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ class partition_balancer_sim_fixture {

absl::flat_hash_map<
model::topic_namespace,
ss::chunked_fifo<cluster::partition_status>>
cluster::partition_statuses_t>
topic2partitions;
for (const auto& [ntp, repl] : replicas) {
topic2partitions[model::topic_namespace(ntp.ns, ntp.tp.topic)]
Expand Down
9 changes: 6 additions & 3 deletions src/v/cluster/tests/randoms.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
#include "storage/tests/randoms.h"
#include "test_utils/randoms.h"

#include <iterator>

namespace cluster::node {

inline local_state random_local_state() {
Expand Down Expand Up @@ -60,9 +62,10 @@ inline partition_status random_partition_status() {
}

inline topic_status random_topic_status() {
return topic_status(
model::random_topic_namespace(),
tests::random_chunked_fifo(random_partition_status));
auto d = tests::random_vector(random_partition_status);
cluster::partition_statuses_t partitions;
std::move(d.begin(), d.end(), std::back_inserter(partitions));
return {model::random_topic_namespace(), std::move(partitions)};
}

inline drain_manager::drain_status random_drain_status() {
Expand Down
2 changes: 1 addition & 1 deletion src/v/cluster/tests/serialization_rt_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -731,7 +731,7 @@ cluster::drain_manager::drain_status random_drain_status() {
}

cluster::topic_status random_topic_status() {
ss::chunked_fifo<cluster::partition_status> partitions;
cluster::partition_statuses_t partitions;
for (int i = 0, mi = random_generators::get_int(10); i < mi; i++) {
partitions.push_back(cluster::partition_status{
.id = tests::random_named_int<model::partition_id>(),
Expand Down
2 changes: 1 addition & 1 deletion src/v/compat/cluster_json.h
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ inline void read_value(json::Value const& rd, cluster::partition_status& obj) {

inline void read_value(json::Value const& rd, cluster::topic_status& obj) {
model::topic_namespace tp_ns;
ss::chunked_fifo<cluster::partition_status> partitions;
cluster::partition_statuses_t partitions;

read_member(rd, "tp_ns", tp_ns);
read_member(rd, "partitions", partitions);
Expand Down
4 changes: 2 additions & 2 deletions src/v/compat/json.h
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ void read_value(json::Value const& v, std::vector<T>& target) {
}
}

template<typename T>
void read_value(json::Value const& v, ss::chunked_fifo<T>& target) {
template<typename T, size_t chunk_size = 128>
void read_value(json::Value const& v, ss::chunked_fifo<T, chunk_size>& target) {
for (auto const& e : v.GetArray()) {
auto t = T{};
read_value(e, t);
Expand Down
5 changes: 3 additions & 2 deletions src/v/json/json.h
Original file line number Diff line number Diff line change
Expand Up @@ -107,9 +107,10 @@ void rjson_serialize(
w.EndArray();
}

template<typename T>
template<typename T, size_t chunk_size = 128>
void rjson_serialize(
json::Writer<json::StringBuffer>& w, const ss::chunked_fifo<T>& v) {
json::Writer<json::StringBuffer>& w,
const ss::chunked_fifo<T, chunk_size>& v) {
w.StartArray();
for (const auto& e : v) {
rjson_serialize(w, e);
Expand Down
4 changes: 2 additions & 2 deletions src/v/serde/serde.h
Original file line number Diff line number Diff line change
Expand Up @@ -434,8 +434,8 @@ void write(iobuf& out, fragmented_vector<T, fragment_size> t) {
}
}

template<typename T>
void write(iobuf& out, ss::chunked_fifo<T> t) {
template<typename T, size_t chunk_size = 128>
void write(iobuf& out, ss::chunked_fifo<T, chunk_size> t) {
if (unlikely(t.size() > std::numeric_limits<serde_size_t>::max())) {
throw serde_exception(fmt_with_ctx(
ssx::sformat,
Expand Down
6 changes: 3 additions & 3 deletions src/v/utils/to_string.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ operator<<(std::ostream& o, const ss::lowres_clock::duration& d) {

} // namespace std

template<typename T>
struct fmt::formatter<ss::chunked_fifo<T>> {
using type = ss::chunked_fifo<T>;
template<typename T, size_t chunk_size>
struct fmt::formatter<ss::chunked_fifo<T, chunk_size>> {
using type = ss::chunked_fifo<T, chunk_size>;

constexpr auto parse(format_parse_context& ctx) { return ctx.begin(); }

Expand Down

0 comments on commit 320f9cb

Please sign in to comment.