From b181a88f9f684c4aea100a47365e033ece220898 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sergio=20Gavil=C3=A1n?= <50085759+sgavil@users.noreply.github.com> Date: Thu, 29 Oct 2020 20:36:00 +0100 Subject: [PATCH] Reduced some C++ compiler warnings (#6197) * Removed some warnings * Rebase with master * Solved C++ Google Tests errors made by refactoring in order to remove warnings * Undo renaming path -> path_ * Fix style check Co-authored-by: Hyunsu Cho --- include/xgboost/json.h | 12 +++++------ rabit/src/c_api.cc | 12 +++++------ rabit/src/engine.cc | 6 +++--- src/common/host_device_vector.cc | 8 ++++---- src/common/json.cc | 26 ++++++++++++------------ src/tree/updater_quantile_hist.cc | 30 +++++++++++++++------------- src/tree/updater_quantile_hist.h | 11 +++++----- src/tree/updater_refresh.cc | 2 +- src/tree/updater_sync.cc | 10 +++++----- tests/cpp/tree/test_quantile_hist.cc | 2 +- 10 files changed, 61 insertions(+), 58 deletions(-) diff --git a/include/xgboost/json.h b/include/xgboost/json.h index 68a8a6712..b575c3247 100644 --- a/include/xgboost/json.h +++ b/include/xgboost/json.h @@ -84,7 +84,7 @@ class JsonString : public Value { Value(ValueKind::kString), str_{str} {} JsonString(std::string&& str) : // NOLINT Value(ValueKind::kString), str_{std::move(str)} {} - JsonString(JsonString&& str) : // NOLINT + JsonString(JsonString&& str) noexcept : // NOLINT Value(ValueKind::kString), str_{std::move(str.str_)} {} void Save(JsonWriter* writer) override; @@ -179,7 +179,7 @@ class JsonNumber : public Value { JsonNumber(FloatT value) : Value{ValueKind::kNumber}, // NOLINT number_{static_cast(value)} {} JsonNumber(JsonNumber const& that) = delete; - JsonNumber(JsonNumber&& that) : Value{ValueKind::kNumber}, number_{that.number_} {} + JsonNumber(JsonNumber&& that) noexcept : Value{ValueKind::kNumber}, number_{that.number_} {} void Save(JsonWriter* writer) override; @@ -227,7 +227,7 @@ class JsonInteger : public Value { : Value(ValueKind::kInteger), integer_{static_cast(value)} {} - JsonInteger(JsonInteger &&that) + JsonInteger(JsonInteger &&that) noexcept : Value{ValueKind::kInteger}, integer_{that.integer_} {} Json& operator[](std::string const & key) override; @@ -250,7 +250,7 @@ class JsonNull : public Value { public: JsonNull() : Value(ValueKind::kNull) {} JsonNull(std::nullptr_t) : Value(ValueKind::kNull) {} // NOLINT - JsonNull(JsonNull&&) : Value(ValueKind::kNull) {} + JsonNull(JsonNull&&) noexcept : Value(ValueKind::kNull) {} void Save(JsonWriter* writer) override; @@ -267,7 +267,7 @@ class JsonNull : public Value { /*! \brief Describes both true and false. */ class JsonBoolean : public Value { - bool boolean_; + bool boolean_ = false; public: JsonBoolean() : Value(ValueKind::kBoolean) {} // NOLINT @@ -278,7 +278,7 @@ class JsonBoolean : public Value { std::is_same::value>::type* = nullptr> JsonBoolean(Bool value) : // NOLINT Value(ValueKind::kBoolean), boolean_{value} {} - JsonBoolean(JsonBoolean&& value) : // NOLINT + JsonBoolean(JsonBoolean&& value) noexcept: // NOLINT Value(ValueKind::kBoolean), boolean_{value.boolean_} {} void Save(JsonWriter* writer) override; diff --git a/rabit/src/c_api.cc b/rabit/src/c_api.cc index 071da2944..1caf1e406 100644 --- a/rabit/src/c_api.cc +++ b/rabit/src/c_api.cc @@ -26,10 +26,10 @@ struct FHelper { template struct FHelper { static void - Allreduce(DType *senrecvbuf_, - size_t count, - void (*prepare_fun)(void *arg), - void *prepare_arg) { + Allreduce(DType *, + size_t , + void (*)(void *arg), + void *) { utils::Error("DataType does not support bitwise or operation"); } }; @@ -196,7 +196,7 @@ struct ReadWrapper : public Serializable { "Read pickle string"); } } - void Save(Stream *fo) const override { + void Save(Stream *) const override { utils::Error("not implemented"); } }; @@ -208,7 +208,7 @@ struct WriteWrapper : public Serializable { size_t length) : data(data), length(length) { } - void Load(Stream *fi) override { + void Load(Stream *) override { utils::Error("not implemented"); } void Save(Stream *fo) const override { diff --git a/rabit/src/engine.cc b/rabit/src/engine.cc index 66302530a..48cbc0e45 100644 --- a/rabit/src/engine.cc +++ b/rabit/src/engine.cc @@ -96,8 +96,8 @@ void Allreduce_(void *sendrecvbuf, // NOLINT size_t type_nbytes, size_t count, IEngine::ReduceFunction red, - mpi::DataType dtype, - mpi::OpType op, + mpi::DataType, + mpi::OpType , IEngine::PreprocFunction prepare_fun, void *prepare_arg) { GetEngine()->Allreduce(sendrecvbuf, type_nbytes, count, red, prepare_fun, @@ -112,7 +112,7 @@ int ReduceHandle::TypeSize(const MPI::Datatype &dtype) { return static_cast(dtype.type_size); } -void ReduceHandle::Init(IEngine::ReduceFunction redfunc, size_t type_nbytes) { +void ReduceHandle::Init(IEngine::ReduceFunction redfunc, size_t ) { utils::Assert(redfunc_ == nullptr, "cannot initialize reduce handle twice"); redfunc_ = redfunc; } diff --git a/src/common/host_device_vector.cc b/src/common/host_device_vector.cc index a16154966..cb02b22f0 100644 --- a/src/common/host_device_vector.cc +++ b/src/common/host_device_vector.cc @@ -33,19 +33,19 @@ struct HostDeviceVectorImpl { }; template -HostDeviceVector::HostDeviceVector(size_t size, T v, int device) +HostDeviceVector::HostDeviceVector(size_t size, T v, int) : impl_(nullptr) { impl_ = new HostDeviceVectorImpl(size, v); } template -HostDeviceVector::HostDeviceVector(std::initializer_list init, int device) +HostDeviceVector::HostDeviceVector(std::initializer_list init, int) : impl_(nullptr) { impl_ = new HostDeviceVectorImpl(init); } template -HostDeviceVector::HostDeviceVector(const std::vector& init, int device) +HostDeviceVector::HostDeviceVector(const std::vector& init, int) : impl_(nullptr) { impl_ = new HostDeviceVectorImpl(init); } @@ -166,7 +166,7 @@ bool HostDeviceVector::DeviceCanWrite() const { } template -void HostDeviceVector::SetDevice(int device) const {} +void HostDeviceVector::SetDevice(int) const {} // explicit instantiations are required, as HostDeviceVector isn't header-only template class HostDeviceVector; diff --git a/src/common/json.cc b/src/common/json.cc index e31bbee7b..cdec3aede 100644 --- a/src/common/json.cc +++ b/src/common/json.cc @@ -76,7 +76,7 @@ void JsonWriter::Visit(JsonInteger const* num) { std::memcpy(stream_->data() + ori_size, i2s_buffer_, digits); } -void JsonWriter::Visit(JsonNull const* null) { +void JsonWriter::Visit(JsonNull const* ) { auto s = stream_->size(); stream_->resize(s + 4); auto& buf = (*stream_); @@ -179,7 +179,7 @@ Json& JsonObject::operator[](std::string const & key) { return object_[key]; } -Json& JsonObject::operator[](int ind) { +Json& JsonObject::operator[](int ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by Integer."; return DummyJsonObject(); @@ -203,13 +203,13 @@ void JsonObject::Save(JsonWriter* writer) { } // Json String -Json& JsonString::operator[](std::string const & key) { +Json& JsonString::operator[](std::string const& ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by string."; return DummyJsonObject(); } -Json& JsonString::operator[](int ind) { +Json& JsonString::operator[](int ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by Integer." << " Please try obtaining std::string first."; @@ -236,7 +236,7 @@ void JsonString::Save(JsonWriter* writer) { JsonArray::JsonArray(JsonArray && that) : Value(ValueKind::kArray), vec_{std::move(that.vec_)} {} -Json& JsonArray::operator[](std::string const & key) { +Json& JsonArray::operator[](std::string const& ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by string."; return DummyJsonObject(); @@ -263,13 +263,13 @@ void JsonArray::Save(JsonWriter* writer) { } // Json Number -Json& JsonNumber::operator[](std::string const & key) { +Json& JsonNumber::operator[](std::string const& ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by string."; return DummyJsonObject(); } -Json& JsonNumber::operator[](int ind) { +Json& JsonNumber::operator[](int ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by Integer."; return DummyJsonObject(); @@ -298,13 +298,13 @@ void JsonNumber::Save(JsonWriter* writer) { } // Json Integer -Json& JsonInteger::operator[](std::string const& key) { +Json& JsonInteger::operator[](std::string const& ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by string."; return DummyJsonObject(); } -Json& JsonInteger::operator[](int ind) { +Json& JsonInteger::operator[](int ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by Integer."; return DummyJsonObject(); @@ -326,13 +326,13 @@ void JsonInteger::Save(JsonWriter* writer) { } // Json Null -Json& JsonNull::operator[](std::string const & key) { +Json& JsonNull::operator[](std::string const& ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by string."; return DummyJsonObject(); } -Json& JsonNull::operator[](int ind) { +Json& JsonNull::operator[](int ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by Integer."; return DummyJsonObject(); @@ -353,13 +353,13 @@ void JsonNull::Save(JsonWriter* writer) { } // Json Boolean -Json& JsonBoolean::operator[](std::string const & key) { +Json& JsonBoolean::operator[](std::string const& ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by string."; return DummyJsonObject(); } -Json& JsonBoolean::operator[](int ind) { +Json& JsonBoolean::operator[](int ) { LOG(FATAL) << "Object of type " << Value::TypeStr() << " can not be indexed by Integer."; return DummyJsonObject(); diff --git a/src/tree/updater_quantile_hist.cc b/src/tree/updater_quantile_hist.cc index f22e0d5cd..46a82ea38 100644 --- a/src/tree/updater_quantile_hist.cc +++ b/src/tree/updater_quantile_hist.cc @@ -137,7 +137,7 @@ void BatchHistSynchronizer::SyncHistograms(BuilderT *builder, }, 1024); common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) { - const auto entry = builder->nodes_for_explicit_hist_build_[node]; + const auto& entry = builder->nodes_for_explicit_hist_build_[node]; auto this_hist = builder->hist_[entry.nid]; // Merging histograms from each thread into once builder->hist_buffer_.ReduceHist(node, r.begin(), r.end()); @@ -163,7 +163,7 @@ void DistributedHistSynchronizer::SyncHistograms(BuilderT* builder return nbins; }, 1024); common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) { - const auto entry = builder->nodes_for_explicit_hist_build_[node]; + const auto& entry = builder->nodes_for_explicit_hist_build_[node]; auto this_hist = builder->hist_[entry.nid]; // Merging histograms from each thread into once builder->hist_buffer_.ReduceHist(node, r.begin(), r.end()); @@ -202,7 +202,7 @@ void DistributedHistSynchronizer::ParallelSubtractionHist( const std::vector& nodes, const RegTree * p_tree) { common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) { - const auto entry = nodes[node]; + const auto& entry = nodes[node]; if (!((*p_tree)[entry.nid].IsLeftChild())) { auto this_hist = builder->hist_[entry.nid]; @@ -827,18 +827,18 @@ void QuantileHistMaker::Builder::InitData(const GHistIndexMatrix& const uint32_t nbins_f0 = gmat.cut.Ptrs()[1] - gmat.cut.Ptrs()[0]; if (nrow * ncol == nnz) { // dense data with zero-based indexing - data_layout_ = kDenseDataZeroBased; + data_layout_ = DataLayout::kDenseDataZeroBased; } else if (nbins_f0 == 0 && nrow * (ncol - 1) == nnz) { // dense data with one-based indexing - data_layout_ = kDenseDataOneBased; + data_layout_ = DataLayout::kDenseDataOneBased; } else { // sparse data - data_layout_ = kSparseData; + data_layout_ = DataLayout::kSparseData; } } // store a pointer to the tree p_last_tree_ = &tree; - if (data_layout_ == kDenseDataOneBased) { + if (data_layout_ == DataLayout::kDenseDataOneBased) { column_sampler_.Init(info.num_col_, info.feature_weigths.ConstHostVector(), param_.colsample_bynode, param_.colsample_bylevel, param_.colsample_bytree, true); @@ -847,7 +847,8 @@ void QuantileHistMaker::Builder::InitData(const GHistIndexMatrix& param_.colsample_bynode, param_.colsample_bylevel, param_.colsample_bytree, false); } - if (data_layout_ == kDenseDataZeroBased || data_layout_ == kDenseDataOneBased) { + if (data_layout_ == DataLayout::kDenseDataZeroBased + || data_layout_ == DataLayout::kDenseDataOneBased) { /* specialized code for dense data: choose the column that has a least positive number of discrete bins. For dense data (with no missing value), @@ -1138,9 +1139,9 @@ void QuantileHistMaker::Builder::FindSplitConditions( // split_cond = -1 indicates that split_pt is less than all known cut points CHECK_LT(upper_bound, static_cast(std::numeric_limits::max())); - for (uint32_t i = lower_bound; i < upper_bound; ++i) { - if (split_pt == gmat.cut.Values()[i]) { - split_cond = static_cast(i); + for (uint32_t bound = lower_bound; bound < upper_bound; ++bound) { + if (split_pt == gmat.cut.Values()[bound]) { + split_cond = static_cast(bound); } } (*split_conditions)[i] = split_cond; @@ -1151,7 +1152,7 @@ void QuantileHistMaker::Builder::AddSplitsToRowSet( const std::vector& nodes, RegTree* p_tree) { const size_t n_nodes = nodes.size(); - for (size_t i = 0; i < n_nodes; ++i) { + for (unsigned int i = 0; i < n_nodes; ++i) { const int32_t nid = nodes[i].nid; const size_t n_left = partition_builder_.GetNLeftElems(i); const size_t n_right = partition_builder_.GetNRightElems(i); @@ -1165,7 +1166,7 @@ template void QuantileHistMaker::Builder::ApplySplit(const std::vector nodes, const GHistIndexMatrix& gmat, const ColumnMatrix& column_matrix, - const HistCollection& hist, + const HistCollection&, RegTree* p_tree) { builder_monitor_.Start("ApplySplit"); // 1. Find split condition for each split @@ -1236,7 +1237,8 @@ void QuantileHistMaker::Builder::InitNewNode(int nid, GHistRowT hist = hist_[nid]; GradientPairT grad_stat; if (tree[nid].IsRoot()) { - if (data_layout_ == kDenseDataZeroBased || data_layout_ == kDenseDataOneBased) { + if (data_layout_ == DataLayout::kDenseDataZeroBased + || data_layout_ == DataLayout::kDenseDataOneBased) { const std::vector& row_ptr = gmat.cut.Ptrs(); const uint32_t ibegin = row_ptr[fid_least_bins_]; const uint32_t iend = row_ptr[fid_least_bins_ + 1]; diff --git a/src/tree/updater_quantile_hist.h b/src/tree/updater_quantile_hist.h index 734c3fa51..ed906cb90 100644 --- a/src/tree/updater_quantile_hist.h +++ b/src/tree/updater_quantile_hist.h @@ -99,7 +99,7 @@ class DistributedHistRowsAdder; // training parameters specific to this algorithm struct CPUHistMakerTrainParam : public XGBoostParameter { - bool single_precision_histogram; + bool single_precision_histogram = false; // declare parameters DMLC_DECLARE_PARAMETER(CPUHistMakerTrainParam) { DMLC_DECLARE_FIELD(single_precision_histogram).set_default(false).describe( @@ -127,7 +127,7 @@ class QuantileHistMaker: public TreeUpdater { FromJson(config.at("train_param"), &this->param_); try { FromJson(config.at("cpu_hist_train_param"), &this->hist_maker_param_); - } catch (std::out_of_range& e) { + } catch (std::out_of_range&) { // XGBoost model is from 1.1.x, so 'cpu_hist_train_param' is missing. // We add this compatibility check because it's just recently that we (developers) began // persuade R users away from using saveRDS() for model serialization. Hopefully, one day, @@ -191,7 +191,7 @@ class QuantileHistMaker: public TreeUpdater { /*! \brief current best solution */ SplitEntry best; // constructor - explicit NodeEntry(const TrainParam& param) + explicit NodeEntry(const TrainParam&) : root_gain(0.0f), weight(0.0f) {} }; // actual builder that runs the algorithm @@ -229,7 +229,8 @@ class QuantileHistMaker: public TreeUpdater { if (param_.enable_feature_grouping > 0) { hist_builder_.BuildBlockHist(gpair, row_indices, gmatb, hist); } else { - hist_builder_.BuildHist(gpair, row_indices, gmat, hist, data_layout_ != kSparseData); + hist_builder_.BuildHist(gpair, row_indices, gmat, hist, + data_layout_ != DataLayout::kSparseData); } } @@ -442,7 +443,7 @@ class QuantileHistMaker: public TreeUpdater { // list of nodes whose histograms would be built explicitly. std::vector nodes_for_explicit_hist_build_; - enum DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData }; + enum class DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData }; DataLayout data_layout_; common::Monitor builder_monitor_; diff --git a/src/tree/updater_refresh.cc b/src/tree/updater_refresh.cc index d63d88c80..b51fb3a3c 100644 --- a/src/tree/updater_refresh.cc +++ b/src/tree/updater_refresh.cc @@ -113,7 +113,7 @@ class TreeRefresher: public TreeUpdater { inline static void AddStats(const RegTree &tree, const RegTree::FVec &feat, const std::vector &gpair, - const MetaInfo &info, + const MetaInfo&, const bst_uint ridx, GradStats *gstats) { // start from groups that belongs to current data diff --git a/src/tree/updater_sync.cc b/src/tree/updater_sync.cc index 578bfb83c..7979d10c2 100644 --- a/src/tree/updater_sync.cc +++ b/src/tree/updater_sync.cc @@ -22,17 +22,17 @@ DMLC_REGISTRY_FILE_TAG(updater_sync); */ class TreeSyncher: public TreeUpdater { public: - void Configure(const Args& args) override {} + void Configure(const Args&) override {} - void LoadConfig(Json const& in) override {} - void SaveConfig(Json* p_out) const override {} + void LoadConfig(Json const&) override {} + void SaveConfig(Json*) const override {} char const* Name() const override { return "prune"; } - void Update(HostDeviceVector *gpair, - DMatrix* dmat, + void Update(HostDeviceVector* , + DMatrix*, const std::vector &trees) override { if (rabit::GetWorldSize() == 1) return; std::string s_model; diff --git a/tests/cpp/tree/test_quantile_hist.cc b/tests/cpp/tree/test_quantile_hist.cc index 6f91504a4..ae51ae787 100644 --- a/tests/cpp/tree/test_quantile_hist.cc +++ b/tests/cpp/tree/test_quantile_hist.cc @@ -40,7 +40,7 @@ class QuantileHistMock : public QuantileHistMaker { DMatrix* p_fmat, const RegTree& tree) { RealImpl::InitData(gmat, gpair, *p_fmat, tree); - ASSERT_EQ(this->data_layout_, RealImpl::kSparseData); + ASSERT_EQ(this->data_layout_, RealImpl::DataLayout::kSparseData); /* The creation of HistCutMatrix and GHistIndexMatrix are not technically * part of QuantileHist updater logic, but we include it here because