Skip to content

Commit

Permalink
Fix unused params warnings plus incomplete struct init (done to defau…
Browse files Browse the repository at this point in the history
…lt values). (VowpalWabbit#1710)
  • Loading branch information
kumpera authored and JohnLangford committed Jan 16, 2019
1 parent 6dbb7e5 commit 6b7b160
Show file tree
Hide file tree
Showing 38 changed files with 225 additions and 214 deletions.
12 changes: 6 additions & 6 deletions explore/explore_internal.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ namespace exploration
}

template<typename It>
int generate_epsilon_greedy(float epsilon, uint32_t top_action, It pdf_first, It pdf_last, std::random_access_iterator_tag pdf_tag)
int generate_epsilon_greedy(float epsilon, uint32_t top_action, It pdf_first, It pdf_last, std::random_access_iterator_tag /* pdf_tag */)
{
if (pdf_last < pdf_first)
return E_EXPLORATION_BAD_RANGE;
Expand Down Expand Up @@ -65,7 +65,7 @@ namespace exploration
}

template<typename InputIt, typename OutputIt>
int generate_softmax(float lambda, InputIt scores_first, InputIt scores_last, std::input_iterator_tag scores_tag, OutputIt pdf_first, OutputIt pdf_last, std::random_access_iterator_tag pdf_tag)
int generate_softmax(float lambda, InputIt scores_first, InputIt scores_last, std::input_iterator_tag /* scores_tag */, OutputIt pdf_first, OutputIt pdf_last, std::random_access_iterator_tag /* pdf_tag */)
{
if (scores_last < scores_first || pdf_last < pdf_first)
return E_EXPLORATION_BAD_RANGE;
Expand Down Expand Up @@ -118,7 +118,7 @@ namespace exploration
}

template<typename InputIt, typename OutputIt>
int generate_bag(InputIt top_actions_first, InputIt top_actions_last, std::input_iterator_tag top_actions_tag, OutputIt pdf_first, OutputIt pdf_last, std::random_access_iterator_tag pdf_tag)
int generate_bag(InputIt top_actions_first, InputIt top_actions_last, std::input_iterator_tag /* top_actions_tag */, OutputIt pdf_first, OutputIt pdf_last, std::random_access_iterator_tag /* pdf_tag */)
{
// iterators don't support <= in general
if (pdf_first == pdf_last || pdf_last < pdf_first)
Expand Down Expand Up @@ -154,7 +154,7 @@ namespace exploration
}

template<typename It>
int enforce_minimum_probability(float minimum_uniform, bool update_zero_elements, It pdf_first, It pdf_last, std::random_access_iterator_tag pdf_tag)
int enforce_minimum_probability(float minimum_uniform, bool update_zero_elements, It pdf_first, It pdf_last, std::random_access_iterator_tag /* pdf_tag */)
{
// iterators don't support <= in general
if (pdf_first == pdf_last || pdf_last < pdf_first)
Expand Down Expand Up @@ -230,7 +230,7 @@ namespace exploration
}

template<typename It>
int sample_after_normalizing(uint64_t seed, It pdf_first, It pdf_last, uint32_t& chosen_index, std::input_iterator_tag pdf_category)
int sample_after_normalizing(uint64_t seed, It pdf_first, It pdf_last, uint32_t& chosen_index, std::input_iterator_tag /* pdf_category */)
{
if (pdf_first == pdf_last || pdf_last < pdf_first)
return E_EXPLORATION_BAD_RANGE;
Expand Down Expand Up @@ -299,7 +299,7 @@ namespace exploration
}

template<typename ActionIt>
int swap_chosen(ActionIt action_first, ActionIt action_last, std::forward_iterator_tag action_category, uint32_t chosen_index)
int swap_chosen(ActionIt action_first, ActionIt action_last, std::forward_iterator_tag /* action_category */, uint32_t chosen_index)
{
if ( action_last < action_first )
return E_EXPLORATION_BAD_RANGE;
Expand Down
2 changes: 1 addition & 1 deletion vowpalwabbit/OjaNewton.cc
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ struct OjaNewton
}
};

void keep_example(vw& all, OjaNewton& ON, example& ec)
void keep_example(vw& all, OjaNewton& /* ON */, example& ec)
{
output_and_account_example(all, ec);
}
Expand Down
2 changes: 1 addition & 1 deletion vowpalwabbit/active_cover.cc
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ struct active_cover
LEARNER::base_learner* l;
};

bool dis_test(vw& all, example& ec, single_learner& base, float prediction, float threshold)
bool dis_test(vw& all, example& ec, single_learner& base, float /* prediction */, float threshold)
{
if(all.sd->t + ec.weight <= 3)
{
Expand Down
2 changes: 1 addition & 1 deletion vowpalwabbit/array_parameters.h
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ class sparse_parameters
}

#ifndef _WIN32
void share(size_t length)
void share(size_t /* length */)
{throw 1; //TODO: add better exceptions
}
#endif
Expand Down
6 changes: 3 additions & 3 deletions vowpalwabbit/audit_regressor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,12 @@ inline void audit_regressor_feature(audit_regressor_data& dat, const float, cons
if (dat.total_class_cnt > 1) // add class prefix for multiclass problems
temp = to_string(dat.cur_class) + ':' + temp;

bin_write_fixed(*dat.out_file, temp.c_str(), (uint32_t)temp.size());
dat.out_file->bin_write_fixed(temp.c_str(), (uint32_t)temp.size());

weights[ft_idx] = 0.; //mark value audited
}

void audit_regressor_lda(audit_regressor_data& rd, LEARNER::single_learner& base, example& ec)
void audit_regressor_lda(audit_regressor_data& rd, LEARNER::single_learner& /* base */, example& ec)
{
vw& all = *rd.all;

Expand All @@ -90,7 +90,7 @@ void audit_regressor_lda(audit_regressor_data& rd, LEARNER::single_learner& base
}
}

bin_write_fixed(*rd.out_file, tempstream.str().c_str(), (uint32_t)tempstream.str().size());
rd.out_file->bin_write_fixed(tempstream.str().c_str(), (uint32_t)tempstream.str().size());
}


Expand Down
16 changes: 8 additions & 8 deletions vowpalwabbit/bfgs.cc
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ float dot_with_direction(vw& all, example& ec)
}

template<class T>
double regularizer_direction_magnitude(vw& all, bfgs& b, double regularizer, T& weights)
double regularizer_direction_magnitude(vw& /* all */, bfgs& b, double regularizer, T& weights)
{
double ret = 0.;
if (b.regularizers == nullptr)
Expand Down Expand Up @@ -204,7 +204,7 @@ double regularizer_direction_magnitude(vw& all, bfgs& b, float regularizer)
}

template<class T>
float direction_magnitude(vw& all, T& weights)
float direction_magnitude(vw& /* all */, T& weights)
{
//compute direction magnitude
double ret = 0.;
Expand Down Expand Up @@ -480,7 +480,7 @@ double add_regularization(vw& all, bfgs& b, float regularization)
}

template <class T>
void finalize_preconditioner(vw& all, bfgs& b, float regularization, T& weights)
void finalize_preconditioner(vw& /* all */, bfgs& b, float regularization, T& weights)
{
float max_hessian = 0.f;

Expand Down Expand Up @@ -558,7 +558,7 @@ void preconditioner_to_regularizer(vw& all, bfgs& b, float regularization)
}

template<class T>
void regularizer_to_weight(vw& all, bfgs& b, T& weights)
void regularizer_to_weight(vw& /* all */, bfgs& b, T& weights)
{
if (b.regularizers != nullptr)
{
Expand Down Expand Up @@ -587,7 +587,7 @@ void zero_state(vw& all)
}

template<class T>
double derivative_in_direction(vw& all, bfgs& b, float* mem, int &origin, T& weights)
double derivative_in_direction(vw& /* all */, bfgs& b, float* mem, int &origin, T& weights)
{
double ret = 0.;
for (typename T::iterator w = weights.begin(); w != weights.end(); ++w)
Expand All @@ -608,7 +608,7 @@ double derivative_in_direction(vw& all, bfgs& b, float* mem, int &origin)
}

template<class T>
void update_weight(vw& all, float step_size, T& w)
void update_weight(vw& /* all */, float step_size, T& w)
{
for (typename T::iterator iter = w.begin(); iter != w.end(); ++iter)
(&(*iter))[W_XT] += step_size * (&(*iter))[W_DIR];
Expand Down Expand Up @@ -976,12 +976,12 @@ void save_load_regularizer(vw& all, bfgs& b, io_buf& model_file, bool read, bool
if (read)
{
c++;
brw = bin_read_fixed(model_file, (char*)&i, sizeof(i),"");
brw = model_file.bin_read_fixed((char*)&i, sizeof(i),"");
if (brw > 0)
{
assert (i< length);
v = &(b.regularizers[i]);
brw += bin_read_fixed(model_file, (char*)v, sizeof(*v), "");
brw += model_file.bin_read_fixed((char*)v, sizeof(*v), "");
}
}
else // write binary or text
Expand Down
8 changes: 4 additions & 4 deletions vowpalwabbit/boosting.cc
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ void save_load_sampling(boosting &o, io_buf &model_file, bool read, bool text)
if (read)
{
float f;
bin_read_fixed(model_file, (char *) &f, sizeof(f), "");
model_file.bin_read_fixed((char *) &f, sizeof(f), "");
o.alpha[i] = f;
}
else
Expand All @@ -301,7 +301,7 @@ void save_load_sampling(boosting &o, io_buf &model_file, bool read, bool text)
if (read)
{
float f;
bin_read_fixed(model_file, (char *) &f, sizeof(f), "");
model_file.bin_read_fixed((char *) &f, sizeof(f), "");
o.v[i] = f;
}
else
Expand Down Expand Up @@ -332,7 +332,7 @@ void finish(boosting& o)
o.alpha.~vector();
}

void return_example(vw& all, boosting& a, example& ec)
void return_example(vw& all, boosting& /* a */, example& ec)
{
output_and_account_example(all, ec);
VW::finish_example(all,ec);
Expand All @@ -354,7 +354,7 @@ void save_load(boosting &o, io_buf &model_file, bool read, bool text)
if (read)
{
float f;
bin_read_fixed(model_file, (char *) &f, sizeof(f), "");
model_file.bin_read_fixed((char *) &f, sizeof(f), "");
o.alpha[i] = f;
}
else
Expand Down
16 changes: 8 additions & 8 deletions vowpalwabbit/cache.cc
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,12 @@ size_t read_cached_tag(io_buf& cache, example* ae)
{
char* c;
size_t tag_size;
if (buf_read(cache, c, sizeof(tag_size)) < sizeof(tag_size))
if (cache.buf_read(c, sizeof(tag_size)) < sizeof(tag_size))
return 0;
tag_size = *(size_t*)c;
c += sizeof(tag_size);
cache.set(c);
if (buf_read(cache, c, tag_size) < tag_size)
if (cache.buf_read(c, tag_size) < tag_size)
return 0;

ae->tag.clear();
Expand All @@ -75,7 +75,7 @@ int read_cached_features(vw* all, v_array<example*>& examples)
return 0;
char* c;
unsigned char num_indices = 0;
if (buf_read(*input, c, sizeof(num_indices)) < sizeof(num_indices))
if (input->buf_read(c, sizeof(num_indices)) < sizeof(num_indices))
return 0;
num_indices = *(unsigned char*)c;
c += sizeof(num_indices);
Expand All @@ -85,7 +85,7 @@ int read_cached_features(vw* all, v_array<example*>& examples)
{
size_t temp;
unsigned char index = 0;
if((temp = buf_read(*input,c,sizeof(index) + sizeof(size_t))) < sizeof(index) + sizeof(size_t))
if((temp = input->buf_read(c,sizeof(index) + sizeof(size_t))) < sizeof(index) + sizeof(size_t))
{
all->trace_message << "truncated example! " << temp << " " << char_size + sizeof(size_t) << endl;
return 0;
Expand All @@ -99,7 +99,7 @@ int read_cached_features(vw* all, v_array<example*>& examples)
c += sizeof(size_t);
all->p->input->set(c);
total += storage;
if (buf_read(*input,c,storage) < storage)
if (input->buf_read(c,storage) < storage)
{
all->trace_message << "truncated example! wanted: " << storage << " bytes" << endl;
return 0;
Expand Down Expand Up @@ -145,7 +145,7 @@ void output_byte(io_buf& cache, unsigned char s)
{
char *c;

buf_write(cache, c, 1);
cache.buf_write(c, 1);
*(c++) = s;
cache.set(c);
}
Expand All @@ -158,7 +158,7 @@ void output_features(io_buf& cache, unsigned char index, features& fs, uint64_t
if (f != 1. && f != -1.)
storage += sizeof(feature_value);

buf_write(cache, c, sizeof(index) + storage + sizeof(size_t));
cache.buf_write(c, sizeof(index) + storage + sizeof(size_t));
*reinterpret_cast<unsigned char*>(c) = index;
c += sizeof(index);

Expand Down Expand Up @@ -192,7 +192,7 @@ void output_features(io_buf& cache, unsigned char index, features& fs, uint64_t
void cache_tag(io_buf& cache, v_array<char> tag)
{
char *c;
buf_write(cache, c, sizeof(size_t)+tag.size());
cache.buf_write(c, sizeof(size_t)+tag.size());
*(size_t*)c = tag.size();
c += sizeof(size_t);
memcpy(c, tag.begin(), tag.size());
Expand Down
10 changes: 5 additions & 5 deletions vowpalwabbit/cb.cc
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ char* bufread_label(CB::label* ld, char* c, io_buf& cache)
ld->costs.clear();
c += sizeof(size_t);
size_t total = sizeof(cb_class)*num;
if (buf_read(cache, c, total) < total)
if (cache.buf_read(c, total) < total)
{
cout << "error in demarshal of cost data" << endl;
return c;
Expand All @@ -42,7 +42,7 @@ size_t read_cached_label(shared_data*, void* v, io_buf& cache)
ld->costs.clear();
char *c;
size_t total = sizeof(size_t);
if (buf_read(cache, c, total) < total)
if (cache.buf_read(c, total) < total)
return 0;
bufread_label(ld,c, cache);

Expand Down Expand Up @@ -70,7 +70,7 @@ void cache_label(void* v, io_buf& cache)
{
char *c;
CB::label* ld = (CB::label*) v;
buf_write(cache, c, sizeof(size_t)+sizeof(cb_class)*ld->costs.size());
cache.buf_write(c, sizeof(size_t)+sizeof(cb_class)*ld->costs.size());
bufcache_label(ld,c);
}

Expand Down Expand Up @@ -226,7 +226,7 @@ size_t read_cached_label(shared_data*sd, void* v, io_buf& cache)
CB_EVAL::label* ld = (CB_EVAL::label*) v;
char* c;
size_t total = sizeof(uint32_t);
if (buf_read(cache, c, total) < total)
if (cache.buf_read(c, total) < total)
return 0;
ld->action = *(uint32_t*)c;

Expand All @@ -237,7 +237,7 @@ void cache_label(void* v, io_buf& cache)
{
char *c;
CB_EVAL::label* ld = (CB_EVAL::label*) v;
buf_write(cache, c, sizeof(uint32_t));
cache.buf_write(c, sizeof(uint32_t));
*(uint32_t *)c = ld->action;

CB::cache_label(&(ld->event), cache);
Expand Down
2 changes: 1 addition & 1 deletion vowpalwabbit/cb_explore.cc
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ void predict_or_learn_bag(cb_explore& data, single_learner& base, example& ec)
ec.pred.a_s = probs;
}

void get_cover_probabilities(cb_explore& data, single_learner& base, example& ec, v_array<action_score>& probs)
void get_cover_probabilities(cb_explore& data, single_learner& /* base */, example& ec, v_array<action_score>& probs)
{
float additive_probability = 1.f / (float)data.cover_size;
data.preds.clear();
Expand Down
4 changes: 2 additions & 2 deletions vowpalwabbit/cb_explore_adf.cc
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ void get_cost_ranges(std::vector<float> &min_costs,
}
}

size_t fill_tied(cb_explore_adf& data, v_array<action_score>& preds)
size_t fill_tied(cb_explore_adf& /* data */, v_array<action_score>& preds)
{
if (preds.size() == 0 )
return 0;
Expand Down Expand Up @@ -581,7 +581,7 @@ void finish(cb_explore_adf& data)
data.prepped_cs_labels[i].costs.delete_v();
data.prepped_cs_labels.delete_v();
data.gen_cs.pred_scores.costs.delete_v();
data.gen_cs.mtr_ec_seq.~vector();
data.gen_cs.mtr_ec_seq.~vector();
}


Expand Down
4 changes: 2 additions & 2 deletions vowpalwabbit/confidence.cc
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ using namespace std;
struct confidence { vw* all;};

template <bool is_learn, bool is_confidence_after_training>
void predict_or_learn_with_confidence(confidence& c, single_learner& base, example& ec)
void predict_or_learn_with_confidence(confidence& /* c */, single_learner& base, example& ec)
{
float threshold = 0.f;
float sensitivity = 0.f;
Expand Down Expand Up @@ -62,7 +62,7 @@ void output_and_account_confidence_example(vw& all, example& ec)
print_update(all, ec);
}

void return_confidence_example(vw& all, confidence& c, example& ec)
void return_confidence_example(vw& all, confidence& /* c */, example& ec)
{
output_and_account_confidence_example(all, ec);
VW::finish_example(all,ec);
Expand Down
6 changes: 3 additions & 3 deletions vowpalwabbit/cost_sensitive.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ char* bufread_label(label* ld, char* c, io_buf& cache)
ld->costs.clear();
c += sizeof(size_t);
size_t total = sizeof(wclass)*num;
if (buf_read(cache, c, (int)total) < total)
if (cache.buf_read(c, (int)total) < total)
{
cout << "error in demarshal of cost data" << endl;
return c;
Expand All @@ -56,7 +56,7 @@ size_t read_cached_label(shared_data*, void* v, io_buf& cache)
ld->costs.clear();
char *c;
size_t total = sizeof(size_t);
if (buf_read(cache, c, (int)total) < total)
if (cache.buf_read(c, (int)total) < total)
return 0;
bufread_label(ld,c, cache);

Expand Down Expand Up @@ -84,7 +84,7 @@ void cache_label(void* v, io_buf& cache)
{
char *c;
label* ld = (label*) v;
buf_write(cache, c, sizeof(size_t)+sizeof(wclass)*ld->costs.size());
cache.buf_write(c, sizeof(size_t)+sizeof(wclass)*ld->costs.size());
bufcache_label(ld,c);
}

Expand Down
Loading

0 comments on commit 6b7b160

Please sign in to comment.