Reduced some C++ compiler warnings (#6197)

* Removed some warnings

* Rebase with master

* Solved C++ Google Tests errors made by refactoring in order to remove warnings

* Undo renaming path -> path_

* Fix style check

Co-authored-by: Hyunsu Cho <chohyu01@cs.washington.edu>
This commit is contained in:
Sergio Gavilán
2020-10-29 20:36:00 +01:00
committed by GitHub
parent c80657b542
commit b181a88f9f
10 changed files with 61 additions and 58 deletions

View File

@@ -33,19 +33,19 @@ struct HostDeviceVectorImpl {
};
template <typename T>
HostDeviceVector<T>::HostDeviceVector(size_t size, T v, int device)
HostDeviceVector<T>::HostDeviceVector(size_t size, T v, int)
: impl_(nullptr) {
impl_ = new HostDeviceVectorImpl<T>(size, v);
}
template <typename T>
HostDeviceVector<T>::HostDeviceVector(std::initializer_list<T> init, int device)
HostDeviceVector<T>::HostDeviceVector(std::initializer_list<T> init, int)
: impl_(nullptr) {
impl_ = new HostDeviceVectorImpl<T>(init);
}
template <typename T>
HostDeviceVector<T>::HostDeviceVector(const std::vector<T>& init, int device)
HostDeviceVector<T>::HostDeviceVector(const std::vector<T>& init, int)
: impl_(nullptr) {
impl_ = new HostDeviceVectorImpl<T>(init);
}
@@ -166,7 +166,7 @@ bool HostDeviceVector<T>::DeviceCanWrite() const {
}
template <typename T>
void HostDeviceVector<T>::SetDevice(int device) const {}
void HostDeviceVector<T>::SetDevice(int) const {}
// explicit instantiations are required, as HostDeviceVector isn't header-only
template class HostDeviceVector<bst_float>;

View File

@@ -76,7 +76,7 @@ void JsonWriter::Visit(JsonInteger const* num) {
std::memcpy(stream_->data() + ori_size, i2s_buffer_, digits);
}
void JsonWriter::Visit(JsonNull const* null) {
void JsonWriter::Visit(JsonNull const* ) {
auto s = stream_->size();
stream_->resize(s + 4);
auto& buf = (*stream_);
@@ -179,7 +179,7 @@ Json& JsonObject::operator[](std::string const & key) {
return object_[key];
}
Json& JsonObject::operator[](int ind) {
Json& JsonObject::operator[](int ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by Integer.";
return DummyJsonObject();
@@ -203,13 +203,13 @@ void JsonObject::Save(JsonWriter* writer) {
}
// Json String
Json& JsonString::operator[](std::string const & key) {
Json& JsonString::operator[](std::string const& ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by string.";
return DummyJsonObject();
}
Json& JsonString::operator[](int ind) {
Json& JsonString::operator[](int ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by Integer."
<< " Please try obtaining std::string first.";
@@ -236,7 +236,7 @@ void JsonString::Save(JsonWriter* writer) {
JsonArray::JsonArray(JsonArray && that) :
Value(ValueKind::kArray), vec_{std::move(that.vec_)} {}
Json& JsonArray::operator[](std::string const & key) {
Json& JsonArray::operator[](std::string const& ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by string.";
return DummyJsonObject();
@@ -263,13 +263,13 @@ void JsonArray::Save(JsonWriter* writer) {
}
// Json Number
Json& JsonNumber::operator[](std::string const & key) {
Json& JsonNumber::operator[](std::string const& ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by string.";
return DummyJsonObject();
}
Json& JsonNumber::operator[](int ind) {
Json& JsonNumber::operator[](int ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by Integer.";
return DummyJsonObject();
@@ -298,13 +298,13 @@ void JsonNumber::Save(JsonWriter* writer) {
}
// Json Integer
Json& JsonInteger::operator[](std::string const& key) {
Json& JsonInteger::operator[](std::string const& ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by string.";
return DummyJsonObject();
}
Json& JsonInteger::operator[](int ind) {
Json& JsonInteger::operator[](int ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by Integer.";
return DummyJsonObject();
@@ -326,13 +326,13 @@ void JsonInteger::Save(JsonWriter* writer) {
}
// Json Null
Json& JsonNull::operator[](std::string const & key) {
Json& JsonNull::operator[](std::string const& ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by string.";
return DummyJsonObject();
}
Json& JsonNull::operator[](int ind) {
Json& JsonNull::operator[](int ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by Integer.";
return DummyJsonObject();
@@ -353,13 +353,13 @@ void JsonNull::Save(JsonWriter* writer) {
}
// Json Boolean
Json& JsonBoolean::operator[](std::string const & key) {
Json& JsonBoolean::operator[](std::string const& ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by string.";
return DummyJsonObject();
}
Json& JsonBoolean::operator[](int ind) {
Json& JsonBoolean::operator[](int ) {
LOG(FATAL) << "Object of type "
<< Value::TypeStr() << " can not be indexed by Integer.";
return DummyJsonObject();

View File

@@ -137,7 +137,7 @@ void BatchHistSynchronizer<GradientSumT>::SyncHistograms(BuilderT *builder,
}, 1024);
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
const auto entry = builder->nodes_for_explicit_hist_build_[node];
const auto& entry = builder->nodes_for_explicit_hist_build_[node];
auto this_hist = builder->hist_[entry.nid];
// Merging histograms from each thread into once
builder->hist_buffer_.ReduceHist(node, r.begin(), r.end());
@@ -163,7 +163,7 @@ void DistributedHistSynchronizer<GradientSumT>::SyncHistograms(BuilderT* builder
return nbins;
}, 1024);
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
const auto entry = builder->nodes_for_explicit_hist_build_[node];
const auto& entry = builder->nodes_for_explicit_hist_build_[node];
auto this_hist = builder->hist_[entry.nid];
// Merging histograms from each thread into once
builder->hist_buffer_.ReduceHist(node, r.begin(), r.end());
@@ -202,7 +202,7 @@ void DistributedHistSynchronizer<GradientSumT>::ParallelSubtractionHist(
const std::vector<ExpandEntryT>& nodes,
const RegTree * p_tree) {
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
const auto entry = nodes[node];
const auto& entry = nodes[node];
if (!((*p_tree)[entry.nid].IsLeftChild())) {
auto this_hist = builder->hist_[entry.nid];
@@ -827,18 +827,18 @@ void QuantileHistMaker::Builder<GradientSumT>::InitData(const GHistIndexMatrix&
const uint32_t nbins_f0 = gmat.cut.Ptrs()[1] - gmat.cut.Ptrs()[0];
if (nrow * ncol == nnz) {
// dense data with zero-based indexing
data_layout_ = kDenseDataZeroBased;
data_layout_ = DataLayout::kDenseDataZeroBased;
} else if (nbins_f0 == 0 && nrow * (ncol - 1) == nnz) {
// dense data with one-based indexing
data_layout_ = kDenseDataOneBased;
data_layout_ = DataLayout::kDenseDataOneBased;
} else {
// sparse data
data_layout_ = kSparseData;
data_layout_ = DataLayout::kSparseData;
}
}
// store a pointer to the tree
p_last_tree_ = &tree;
if (data_layout_ == kDenseDataOneBased) {
if (data_layout_ == DataLayout::kDenseDataOneBased) {
column_sampler_.Init(info.num_col_, info.feature_weigths.ConstHostVector(),
param_.colsample_bynode, param_.colsample_bylevel,
param_.colsample_bytree, true);
@@ -847,7 +847,8 @@ void QuantileHistMaker::Builder<GradientSumT>::InitData(const GHistIndexMatrix&
param_.colsample_bynode, param_.colsample_bylevel,
param_.colsample_bytree, false);
}
if (data_layout_ == kDenseDataZeroBased || data_layout_ == kDenseDataOneBased) {
if (data_layout_ == DataLayout::kDenseDataZeroBased
|| data_layout_ == DataLayout::kDenseDataOneBased) {
/* specialized code for dense data:
choose the column that has a least positive number of discrete bins.
For dense data (with no missing value),
@@ -1138,9 +1139,9 @@ void QuantileHistMaker::Builder<GradientSumT>::FindSplitConditions(
// split_cond = -1 indicates that split_pt is less than all known cut points
CHECK_LT(upper_bound,
static_cast<uint32_t>(std::numeric_limits<int32_t>::max()));
for (uint32_t i = lower_bound; i < upper_bound; ++i) {
if (split_pt == gmat.cut.Values()[i]) {
split_cond = static_cast<int32_t>(i);
for (uint32_t bound = lower_bound; bound < upper_bound; ++bound) {
if (split_pt == gmat.cut.Values()[bound]) {
split_cond = static_cast<int32_t>(bound);
}
}
(*split_conditions)[i] = split_cond;
@@ -1151,7 +1152,7 @@ void QuantileHistMaker::Builder<GradientSumT>::AddSplitsToRowSet(
const std::vector<ExpandEntry>& nodes,
RegTree* p_tree) {
const size_t n_nodes = nodes.size();
for (size_t i = 0; i < n_nodes; ++i) {
for (unsigned int i = 0; i < n_nodes; ++i) {
const int32_t nid = nodes[i].nid;
const size_t n_left = partition_builder_.GetNLeftElems(i);
const size_t n_right = partition_builder_.GetNRightElems(i);
@@ -1165,7 +1166,7 @@ template <typename GradientSumT>
void QuantileHistMaker::Builder<GradientSumT>::ApplySplit(const std::vector<ExpandEntry> nodes,
const GHistIndexMatrix& gmat,
const ColumnMatrix& column_matrix,
const HistCollection<GradientSumT>& hist,
const HistCollection<GradientSumT>&,
RegTree* p_tree) {
builder_monitor_.Start("ApplySplit");
// 1. Find split condition for each split
@@ -1236,7 +1237,8 @@ void QuantileHistMaker::Builder<GradientSumT>::InitNewNode(int nid,
GHistRowT hist = hist_[nid];
GradientPairT grad_stat;
if (tree[nid].IsRoot()) {
if (data_layout_ == kDenseDataZeroBased || data_layout_ == kDenseDataOneBased) {
if (data_layout_ == DataLayout::kDenseDataZeroBased
|| data_layout_ == DataLayout::kDenseDataOneBased) {
const std::vector<uint32_t>& row_ptr = gmat.cut.Ptrs();
const uint32_t ibegin = row_ptr[fid_least_bins_];
const uint32_t iend = row_ptr[fid_least_bins_ + 1];

View File

@@ -99,7 +99,7 @@ class DistributedHistRowsAdder;
// training parameters specific to this algorithm
struct CPUHistMakerTrainParam
: public XGBoostParameter<CPUHistMakerTrainParam> {
bool single_precision_histogram;
bool single_precision_histogram = false;
// declare parameters
DMLC_DECLARE_PARAMETER(CPUHistMakerTrainParam) {
DMLC_DECLARE_FIELD(single_precision_histogram).set_default(false).describe(
@@ -127,7 +127,7 @@ class QuantileHistMaker: public TreeUpdater {
FromJson(config.at("train_param"), &this->param_);
try {
FromJson(config.at("cpu_hist_train_param"), &this->hist_maker_param_);
} catch (std::out_of_range& e) {
} catch (std::out_of_range&) {
// XGBoost model is from 1.1.x, so 'cpu_hist_train_param' is missing.
// We add this compatibility check because it's just recently that we (developers) began
// persuade R users away from using saveRDS() for model serialization. Hopefully, one day,
@@ -191,7 +191,7 @@ class QuantileHistMaker: public TreeUpdater {
/*! \brief current best solution */
SplitEntry best;
// constructor
explicit NodeEntry(const TrainParam& param)
explicit NodeEntry(const TrainParam&)
: root_gain(0.0f), weight(0.0f) {}
};
// actual builder that runs the algorithm
@@ -229,7 +229,8 @@ class QuantileHistMaker: public TreeUpdater {
if (param_.enable_feature_grouping > 0) {
hist_builder_.BuildBlockHist(gpair, row_indices, gmatb, hist);
} else {
hist_builder_.BuildHist(gpair, row_indices, gmat, hist, data_layout_ != kSparseData);
hist_builder_.BuildHist(gpair, row_indices, gmat, hist,
data_layout_ != DataLayout::kSparseData);
}
}
@@ -442,7 +443,7 @@ class QuantileHistMaker: public TreeUpdater {
// list of nodes whose histograms would be built explicitly.
std::vector<ExpandEntry> nodes_for_explicit_hist_build_;
enum DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData };
enum class DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData };
DataLayout data_layout_;
common::Monitor builder_monitor_;

View File

@@ -113,7 +113,7 @@ class TreeRefresher: public TreeUpdater {
inline static void AddStats(const RegTree &tree,
const RegTree::FVec &feat,
const std::vector<GradientPair> &gpair,
const MetaInfo &info,
const MetaInfo&,
const bst_uint ridx,
GradStats *gstats) {
// start from groups that belongs to current data

View File

@@ -22,17 +22,17 @@ DMLC_REGISTRY_FILE_TAG(updater_sync);
*/
class TreeSyncher: public TreeUpdater {
public:
void Configure(const Args& args) override {}
void Configure(const Args&) override {}
void LoadConfig(Json const& in) override {}
void SaveConfig(Json* p_out) const override {}
void LoadConfig(Json const&) override {}
void SaveConfig(Json*) const override {}
char const* Name() const override {
return "prune";
}
void Update(HostDeviceVector<GradientPair> *gpair,
DMatrix* dmat,
void Update(HostDeviceVector<GradientPair>* ,
DMatrix*,
const std::vector<RegTree*> &trees) override {
if (rabit::GetWorldSize() == 1) return;
std::string s_model;