From 33f84238f03d1036fef68746fa9e15732548fd99 Mon Sep 17 00:00:00 2001 From: Byron Xu Date: Fri, 1 Dec 2023 17:02:14 -0500 Subject: [PATCH] formatting --- .../include/vw/core/array_parameters_dense.h | 5 +- .../core/include/vw/core/reductions/ftrl.h | 2 +- .../vw/core/reductions/search/search.h | 12 +- .../core/src/reductions/search/search.cc | 111 +++++++++--------- .../core/src/reductions/search/search_meta.cc | 11 +- 5 files changed, 71 insertions(+), 70 deletions(-) diff --git a/vowpalwabbit/core/include/vw/core/array_parameters_dense.h b/vowpalwabbit/core/include/vw/core/array_parameters_dense.h index 755a4084ac8..a7f53064f71 100644 --- a/vowpalwabbit/core/include/vw/core/array_parameters_dense.h +++ b/vowpalwabbit/core/include/vw/core/array_parameters_dense.h @@ -102,10 +102,7 @@ class dense_parameters dense_parameters(dense_parameters&&) noexcept; bool not_null(); - VW::weight* first() - { - return _begin.get(); - } // TODO: Temporary fix for allreduce. + VW::weight* first() { return _begin.get(); } // TODO: Temporary fix for allreduce. VW::weight* data() { return _begin.get(); } diff --git a/vowpalwabbit/core/include/vw/core/reductions/ftrl.h b/vowpalwabbit/core/include/vw/core/reductions/ftrl.h index 25231244693..94adb9143b8 100644 --- a/vowpalwabbit/core/include/vw/core/reductions/ftrl.h +++ b/vowpalwabbit/core/include/vw/core/reductions/ftrl.h @@ -47,5 +47,5 @@ size_t write_model_field(io_buf&, VW::reductions::ftrl&, const std::string&, boo } // namespace model_utils std::shared_ptr ftrl_setup(VW::setup_base_i& stack_builder); -} +} // namespace reductions } // namespace VW diff --git a/vowpalwabbit/core/include/vw/core/reductions/search/search.h b/vowpalwabbit/core/include/vw/core/reductions/search/search.h index 2c29a99ebf2..6b8e9c60219 100644 --- a/vowpalwabbit/core/include/vw/core/reductions/search/search.h +++ b/vowpalwabbit/core/include/vw/core/reductions/search/search.h @@ -12,12 +12,12 @@ // (going to clog [which in turn goes to err, with some differences]) // We may want to create/use some macro-based loggers (which will wrap the spdlog ones) // to mimic this behavior. -#define cdbg std::clog -#undef cdbg -#define cdbg \ - if (1) {} \ - else \ - std::clog +# define cdbg std::clog +# undef cdbg +# define cdbg \ + if (1) {} \ + else \ + std::clog // comment the previous two lines if you want loads of debug output :) using action = uint32_t; diff --git a/vowpalwabbit/core/src/reductions/search/search.cc b/vowpalwabbit/core/src/reductions/search/search.cc index 4594e30fc47..3774b9439bd 100644 --- a/vowpalwabbit/core/src/reductions/search/search.cc +++ b/vowpalwabbit/core/src/reductions/search/search.cc @@ -124,7 +124,7 @@ class action_repr action a = 0; std::shared_ptr repr = nullptr; action_repr() = default; - //action_repr(action _a, std::shared_ptr _repr) : a(_a), repr(std::move(_repr)) {} + // action_repr(action _a, std::shared_ptr _repr) : a(_a), repr(std::move(_repr)) {} action_repr(action _a, VW::features* _repr) : a(_a) { // Copy construct the features object @@ -191,7 +191,7 @@ class search_private auto_condition_settings acset; // settings for auto-conditioning size_t history_length = 0; // value of --search_history_length, used by some tasks, default 1 - size_t A = 0; // NOLINT total number of actions, [1..A]; 0 means ldf + size_t A = 0; // NOLINT total number of actions, [1..A]; 0 means ldf size_t feature_width = 0; // total number of learners; bool cb_learner = false; // do contextual bandit learning on action (was "! rollout_all_actions" which was confusing) search_state state; // current state of learning @@ -315,22 +315,13 @@ class search_private ~search_private() { - if (all) - { - clear_memo_foreach_action(*this); - } + if (all) { clear_memo_foreach_action(*this); } } }; -void clear_memo_foreach_action(search_private& priv) -{ - priv.memo_foreach_action.clear(); -} +void clear_memo_foreach_action(search_private& priv) { priv.memo_foreach_action.clear(); } -search::search() -{ - priv = std::make_shared(); -} +search::search() { priv = std::make_shared(); } std::string audit_feature_space("conditional"); uint64_t conditional_constant = 8290743; @@ -817,7 +808,8 @@ void add_example_conditioning(search_private& priv, VW::example& ec, size_t cond { auto old_ft_index_offset = ec.ft_offset; ec.ft_offset = 0; - auto restore_ft_index_offset = VW::scope_exit([&ec, old_ft_index_offset] { ec.ft_offset = old_ft_index_offset; }); + auto restore_ft_index_offset = + VW::scope_exit([&ec, old_ft_index_offset] { ec.ft_offset = old_ft_index_offset; }); VW::foreach_feature(*priv.all, ec, priv); } } @@ -835,7 +827,8 @@ void add_example_conditioning(search_private& priv, VW::example& ec, size_t cond { if ((fs.values[k] > 1e-10) || (fs.values[k] < -1e-10)) { - uint64_t fid = 84913 + 48371803 * (extra_offset + 8392817 * name) + 840137 * (4891 + fs.indices[k] / multiplier); + uint64_t fid = + 84913 + 48371803 * (extra_offset + 8392817 * name) + 840137 * (4891 + fs.indices[k] / multiplier); if (priv.all->output_config.audit) { priv.dat_new_feature_audit_ss.str(""); @@ -1330,11 +1323,13 @@ action single_prediction_ldf(search_private& priv, VW::example* ecs, size_t ec_c VW::polylabel old_label = ecs[a].l; uint64_t old_offset = ecs[a].ft_offset; - auto restore_example = VW::scope_exit([&ecs, a, start_K, &old_label, old_offset] { - ecs[a].l = old_label; - ecs[a].ft_offset = old_offset; - if (start_K > 0) { VW::details::truncate_example_namespaces_from_example(ecs[a], ecs[0]); } - }); + auto restore_example = VW::scope_exit( + [&ecs, a, start_K, &old_label, old_offset] + { + ecs[a].l = old_label; + ecs[a].ft_offset = old_offset; + if (start_K > 0) { VW::details::truncate_example_namespaces_from_example(ecs[a], ecs[0]); } + }); if (start_K > 0) { VW::details::append_example_namespaces_from_example(ecs[a], ecs[0]); } ecs[a].l.cs = priv.ldf_test_label; @@ -1520,10 +1515,12 @@ void generate_training_example(search_private& priv, VW::polylabel& losses, floa VW::example& ec = priv.learn_ec_ref[0]; VW::polylabel old_label = ec.l; - auto restore_example = VW::scope_exit([&priv, &ec, &old_label, add_conditioning] { - if (add_conditioning) { del_example_conditioning(priv, ec); } - ec.l = old_label; - }); + auto restore_example = VW::scope_exit( + [&priv, &ec, &old_label, add_conditioning] + { + if (add_conditioning) { del_example_conditioning(priv, ec); } + ec.l = old_label; + }); ec.l = losses; // labels; if (add_conditioning) @@ -1546,17 +1543,19 @@ void generate_training_example(search_private& priv, VW::polylabel& losses, floa { assert(cs_get_costs_size(priv.cb_learner, losses) == priv.learn_ec_ref_cnt); size_t start_K = (priv.is_ldf && VW::is_cs_example_header(priv.learn_ec_ref[0])) ? 1 : 0; // NOLINT - - auto restore_example = VW::scope_exit([&priv, start_K, add_conditioning] { - if (add_conditioning) - { - for (action a = static_cast(start_K); a < priv.learn_ec_ref_cnt; a++) + + auto restore_example = VW::scope_exit( + [&priv, start_K, add_conditioning] { - VW::example& ec = priv.learn_ec_ref[a]; - del_example_conditioning(priv, ec); - } - } - }); + if (add_conditioning) + { + for (action a = static_cast(start_K); a < priv.learn_ec_ref_cnt; a++) + { + VW::example& ec = priv.learn_ec_ref[a]; + del_example_conditioning(priv, ec); + } + } + }); // TODO: weight if (add_conditioning) @@ -1874,12 +1873,14 @@ action search_predict(search_private& priv, VW::example* ecs, size_t ec_cnt, pta size_t start_K = (priv.is_ldf && VW::is_cs_example_header(ecs[0])) ? 1 : 0; // NOLINT priv.last_action_repr.clear(); - auto restore_example = VW::scope_exit([&priv, start_K, ec_cnt, ecs] { - if (priv.auto_condition_features) - { - for (size_t n = start_K; n < ec_cnt; n++) { del_example_conditioning(priv, ecs[n]); } - } - }); + auto restore_example = VW::scope_exit( + [&priv, start_K, ec_cnt, ecs] + { + if (priv.auto_condition_features) + { + for (size_t n = start_K; n < ec_cnt; n++) { del_example_conditioning(priv, ecs[n]); } + } + }); if (priv.auto_condition_features) { for (size_t n = start_K; n < ec_cnt; n++) @@ -1986,10 +1987,10 @@ void hoopla_permute(size_t* B, size_t* end) std::sort(B, end, cmp_size_t); // make some temporary space std::vector A((N + 1) * 2, 0); // NOLINT - A[N] = B[0]; // arbitrarily choose the maximum in the middle - A[N + 1] = B[N - 1]; // so the maximum goes next to it - size_t lo = N, hi = N + 1; // which parts of A have we filled in? [lo,hi] - size_t i = 0, j = N - 1; // which parts of B have we already covered? [0,i] and [j,N-1] + A[N] = B[0]; // arbitrarily choose the maximum in the middle + A[N + 1] = B[N - 1]; // so the maximum goes next to it + size_t lo = N, hi = N + 1; // which parts of A have we filled in? [lo,hi] + size_t i = 0, j = N - 1; // which parts of B have we already covered? [0,i] and [j,N-1] while (i + 1 < j) { // there are four options depending on where things get placed @@ -2085,15 +2086,17 @@ void BaseTask::Run() float old_test_loss = priv.test_loss; // float old_learn_loss = priv.learn_loss; float old_train_loss = priv.train_loss; - auto restore_priv = VW::scope_exit([this, &priv, old_should_produce_string, old_test_loss, old_train_loss] { - priv.should_produce_string = old_should_produce_string; - if (!this->_final_run) - { - priv.test_loss = old_test_loss; - // priv.learn_loss = old_learn_loss; - priv.train_loss = old_train_loss; - } - }); + auto restore_priv = VW::scope_exit( + [this, &priv, old_should_produce_string, old_test_loss, old_train_loss] + { + priv.should_produce_string = old_should_produce_string; + if (!this->_final_run) + { + priv.test_loss = old_test_loss; + // priv.learn_loss = old_learn_loss; + priv.train_loss = old_train_loss; + } + }); if (!_final_run && !_with_output_string) { priv.should_produce_string = false; } priv.learn_loss *= 0.5; diff --git a/vowpalwabbit/core/src/reductions/search/search_meta.cc b/vowpalwabbit/core/src/reductions/search/search_meta.cc index 934e57c4004..ac30f677a88 100644 --- a/vowpalwabbit/core/src/reductions/search/search_meta.cc +++ b/vowpalwabbit/core/src/reductions/search/search_meta.cc @@ -75,14 +75,14 @@ class task_data public: size_t max_branches, kbest; std::vector branches; - std::vector> > final; + std::vector>> final; path trajectory; float total_cost; size_t cur_branch; std::unique_ptr output_string = nullptr; std::unique_ptr kbest_out = nullptr; - task_data(size_t mb, size_t kb) : max_branches(mb), kbest(kb) { } + task_data(size_t mb, size_t kb) : max_branches(mb), kbest(kb) {} }; void initialize(Search::search& sch, size_t& /*num_actions*/, options_i& options) @@ -99,7 +99,8 @@ void initialize(Search::search& sch, size_t& /*num_actions*/, options_i& options .help("Number of best items to output (0=just like non-selectional-branching, default)")); options.add_and_parse(new_options); - auto d = std::make_shared(VW::cast_to_smaller_type(max_branches), VW::cast_to_smaller_type(kbest)); + auto d = std::make_shared( + VW::cast_to_smaller_type(max_branches), VW::cast_to_smaller_type(kbest)); sch.set_metatask_data(std::move(d)); } @@ -204,8 +205,8 @@ void run(Search::search& sch, VW::multi_ex& ec) // sort the finals by cost stable_sort(d.final.begin(), d.final.end(), - [](const std::pair>& a, const std::pair>& b) -> bool - { return a.first.first < b.first.first; }); + [](const std::pair>& a, + const std::pair>& b) -> bool { return a.first.first < b.first.first; }); d.kbest_out.reset(); if (d.output_string && (d.kbest > 0))