Reduced some C++ compiler warnings (#6197)
* Removed some warnings * Rebase with master * Solved C++ Google Tests errors made by refactoring in order to remove warnings * Undo renaming path -> path_ * Fix style check Co-authored-by: Hyunsu Cho <chohyu01@cs.washington.edu>
This commit is contained in:
parent
c80657b542
commit
b181a88f9f
@ -84,7 +84,7 @@ class JsonString : public Value {
|
|||||||
Value(ValueKind::kString), str_{str} {}
|
Value(ValueKind::kString), str_{str} {}
|
||||||
JsonString(std::string&& str) : // NOLINT
|
JsonString(std::string&& str) : // NOLINT
|
||||||
Value(ValueKind::kString), str_{std::move(str)} {}
|
Value(ValueKind::kString), str_{std::move(str)} {}
|
||||||
JsonString(JsonString&& str) : // NOLINT
|
JsonString(JsonString&& str) noexcept : // NOLINT
|
||||||
Value(ValueKind::kString), str_{std::move(str.str_)} {}
|
Value(ValueKind::kString), str_{std::move(str.str_)} {}
|
||||||
|
|
||||||
void Save(JsonWriter* writer) override;
|
void Save(JsonWriter* writer) override;
|
||||||
@ -179,7 +179,7 @@ class JsonNumber : public Value {
|
|||||||
JsonNumber(FloatT value) : Value{ValueKind::kNumber}, // NOLINT
|
JsonNumber(FloatT value) : Value{ValueKind::kNumber}, // NOLINT
|
||||||
number_{static_cast<Float>(value)} {}
|
number_{static_cast<Float>(value)} {}
|
||||||
JsonNumber(JsonNumber const& that) = delete;
|
JsonNumber(JsonNumber const& that) = delete;
|
||||||
JsonNumber(JsonNumber&& that) : Value{ValueKind::kNumber}, number_{that.number_} {}
|
JsonNumber(JsonNumber&& that) noexcept : Value{ValueKind::kNumber}, number_{that.number_} {}
|
||||||
|
|
||||||
void Save(JsonWriter* writer) override;
|
void Save(JsonWriter* writer) override;
|
||||||
|
|
||||||
@ -227,7 +227,7 @@ class JsonInteger : public Value {
|
|||||||
: Value(ValueKind::kInteger),
|
: Value(ValueKind::kInteger),
|
||||||
integer_{static_cast<Int>(value)} {}
|
integer_{static_cast<Int>(value)} {}
|
||||||
|
|
||||||
JsonInteger(JsonInteger &&that)
|
JsonInteger(JsonInteger &&that) noexcept
|
||||||
: Value{ValueKind::kInteger}, integer_{that.integer_} {}
|
: Value{ValueKind::kInteger}, integer_{that.integer_} {}
|
||||||
|
|
||||||
Json& operator[](std::string const & key) override;
|
Json& operator[](std::string const & key) override;
|
||||||
@ -250,7 +250,7 @@ class JsonNull : public Value {
|
|||||||
public:
|
public:
|
||||||
JsonNull() : Value(ValueKind::kNull) {}
|
JsonNull() : Value(ValueKind::kNull) {}
|
||||||
JsonNull(std::nullptr_t) : Value(ValueKind::kNull) {} // NOLINT
|
JsonNull(std::nullptr_t) : Value(ValueKind::kNull) {} // NOLINT
|
||||||
JsonNull(JsonNull&&) : Value(ValueKind::kNull) {}
|
JsonNull(JsonNull&&) noexcept : Value(ValueKind::kNull) {}
|
||||||
|
|
||||||
void Save(JsonWriter* writer) override;
|
void Save(JsonWriter* writer) override;
|
||||||
|
|
||||||
@ -267,7 +267,7 @@ class JsonNull : public Value {
|
|||||||
|
|
||||||
/*! \brief Describes both true and false. */
|
/*! \brief Describes both true and false. */
|
||||||
class JsonBoolean : public Value {
|
class JsonBoolean : public Value {
|
||||||
bool boolean_;
|
bool boolean_ = false;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
JsonBoolean() : Value(ValueKind::kBoolean) {} // NOLINT
|
JsonBoolean() : Value(ValueKind::kBoolean) {} // NOLINT
|
||||||
@ -278,7 +278,7 @@ class JsonBoolean : public Value {
|
|||||||
std::is_same<Bool, bool const>::value>::type* = nullptr>
|
std::is_same<Bool, bool const>::value>::type* = nullptr>
|
||||||
JsonBoolean(Bool value) : // NOLINT
|
JsonBoolean(Bool value) : // NOLINT
|
||||||
Value(ValueKind::kBoolean), boolean_{value} {}
|
Value(ValueKind::kBoolean), boolean_{value} {}
|
||||||
JsonBoolean(JsonBoolean&& value) : // NOLINT
|
JsonBoolean(JsonBoolean&& value) noexcept: // NOLINT
|
||||||
Value(ValueKind::kBoolean), boolean_{value.boolean_} {}
|
Value(ValueKind::kBoolean), boolean_{value.boolean_} {}
|
||||||
|
|
||||||
void Save(JsonWriter* writer) override;
|
void Save(JsonWriter* writer) override;
|
||||||
|
|||||||
@ -26,10 +26,10 @@ struct FHelper {
|
|||||||
template<typename DType>
|
template<typename DType>
|
||||||
struct FHelper<op::BitOR, DType> {
|
struct FHelper<op::BitOR, DType> {
|
||||||
static void
|
static void
|
||||||
Allreduce(DType *senrecvbuf_,
|
Allreduce(DType *,
|
||||||
size_t count,
|
size_t ,
|
||||||
void (*prepare_fun)(void *arg),
|
void (*)(void *arg),
|
||||||
void *prepare_arg) {
|
void *) {
|
||||||
utils::Error("DataType does not support bitwise or operation");
|
utils::Error("DataType does not support bitwise or operation");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -196,7 +196,7 @@ struct ReadWrapper : public Serializable {
|
|||||||
"Read pickle string");
|
"Read pickle string");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
void Save(Stream *fo) const override {
|
void Save(Stream *) const override {
|
||||||
utils::Error("not implemented");
|
utils::Error("not implemented");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -208,7 +208,7 @@ struct WriteWrapper : public Serializable {
|
|||||||
size_t length)
|
size_t length)
|
||||||
: data(data), length(length) {
|
: data(data), length(length) {
|
||||||
}
|
}
|
||||||
void Load(Stream *fi) override {
|
void Load(Stream *) override {
|
||||||
utils::Error("not implemented");
|
utils::Error("not implemented");
|
||||||
}
|
}
|
||||||
void Save(Stream *fo) const override {
|
void Save(Stream *fo) const override {
|
||||||
|
|||||||
@ -96,8 +96,8 @@ void Allreduce_(void *sendrecvbuf, // NOLINT
|
|||||||
size_t type_nbytes,
|
size_t type_nbytes,
|
||||||
size_t count,
|
size_t count,
|
||||||
IEngine::ReduceFunction red,
|
IEngine::ReduceFunction red,
|
||||||
mpi::DataType dtype,
|
mpi::DataType,
|
||||||
mpi::OpType op,
|
mpi::OpType ,
|
||||||
IEngine::PreprocFunction prepare_fun,
|
IEngine::PreprocFunction prepare_fun,
|
||||||
void *prepare_arg) {
|
void *prepare_arg) {
|
||||||
GetEngine()->Allreduce(sendrecvbuf, type_nbytes, count, red, prepare_fun,
|
GetEngine()->Allreduce(sendrecvbuf, type_nbytes, count, red, prepare_fun,
|
||||||
@ -112,7 +112,7 @@ int ReduceHandle::TypeSize(const MPI::Datatype &dtype) {
|
|||||||
return static_cast<int>(dtype.type_size);
|
return static_cast<int>(dtype.type_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
void ReduceHandle::Init(IEngine::ReduceFunction redfunc, size_t type_nbytes) {
|
void ReduceHandle::Init(IEngine::ReduceFunction redfunc, size_t ) {
|
||||||
utils::Assert(redfunc_ == nullptr, "cannot initialize reduce handle twice");
|
utils::Assert(redfunc_ == nullptr, "cannot initialize reduce handle twice");
|
||||||
redfunc_ = redfunc;
|
redfunc_ = redfunc;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -33,19 +33,19 @@ struct HostDeviceVectorImpl {
|
|||||||
};
|
};
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
HostDeviceVector<T>::HostDeviceVector(size_t size, T v, int device)
|
HostDeviceVector<T>::HostDeviceVector(size_t size, T v, int)
|
||||||
: impl_(nullptr) {
|
: impl_(nullptr) {
|
||||||
impl_ = new HostDeviceVectorImpl<T>(size, v);
|
impl_ = new HostDeviceVectorImpl<T>(size, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
HostDeviceVector<T>::HostDeviceVector(std::initializer_list<T> init, int device)
|
HostDeviceVector<T>::HostDeviceVector(std::initializer_list<T> init, int)
|
||||||
: impl_(nullptr) {
|
: impl_(nullptr) {
|
||||||
impl_ = new HostDeviceVectorImpl<T>(init);
|
impl_ = new HostDeviceVectorImpl<T>(init);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
HostDeviceVector<T>::HostDeviceVector(const std::vector<T>& init, int device)
|
HostDeviceVector<T>::HostDeviceVector(const std::vector<T>& init, int)
|
||||||
: impl_(nullptr) {
|
: impl_(nullptr) {
|
||||||
impl_ = new HostDeviceVectorImpl<T>(init);
|
impl_ = new HostDeviceVectorImpl<T>(init);
|
||||||
}
|
}
|
||||||
@ -166,7 +166,7 @@ bool HostDeviceVector<T>::DeviceCanWrite() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
void HostDeviceVector<T>::SetDevice(int device) const {}
|
void HostDeviceVector<T>::SetDevice(int) const {}
|
||||||
|
|
||||||
// explicit instantiations are required, as HostDeviceVector isn't header-only
|
// explicit instantiations are required, as HostDeviceVector isn't header-only
|
||||||
template class HostDeviceVector<bst_float>;
|
template class HostDeviceVector<bst_float>;
|
||||||
|
|||||||
@ -76,7 +76,7 @@ void JsonWriter::Visit(JsonInteger const* num) {
|
|||||||
std::memcpy(stream_->data() + ori_size, i2s_buffer_, digits);
|
std::memcpy(stream_->data() + ori_size, i2s_buffer_, digits);
|
||||||
}
|
}
|
||||||
|
|
||||||
void JsonWriter::Visit(JsonNull const* null) {
|
void JsonWriter::Visit(JsonNull const* ) {
|
||||||
auto s = stream_->size();
|
auto s = stream_->size();
|
||||||
stream_->resize(s + 4);
|
stream_->resize(s + 4);
|
||||||
auto& buf = (*stream_);
|
auto& buf = (*stream_);
|
||||||
@ -179,7 +179,7 @@ Json& JsonObject::operator[](std::string const & key) {
|
|||||||
return object_[key];
|
return object_[key];
|
||||||
}
|
}
|
||||||
|
|
||||||
Json& JsonObject::operator[](int ind) {
|
Json& JsonObject::operator[](int ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by Integer.";
|
<< Value::TypeStr() << " can not be indexed by Integer.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
@ -203,13 +203,13 @@ void JsonObject::Save(JsonWriter* writer) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Json String
|
// Json String
|
||||||
Json& JsonString::operator[](std::string const & key) {
|
Json& JsonString::operator[](std::string const& ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by string.";
|
<< Value::TypeStr() << " can not be indexed by string.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
Json& JsonString::operator[](int ind) {
|
Json& JsonString::operator[](int ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by Integer."
|
<< Value::TypeStr() << " can not be indexed by Integer."
|
||||||
<< " Please try obtaining std::string first.";
|
<< " Please try obtaining std::string first.";
|
||||||
@ -236,7 +236,7 @@ void JsonString::Save(JsonWriter* writer) {
|
|||||||
JsonArray::JsonArray(JsonArray && that) :
|
JsonArray::JsonArray(JsonArray && that) :
|
||||||
Value(ValueKind::kArray), vec_{std::move(that.vec_)} {}
|
Value(ValueKind::kArray), vec_{std::move(that.vec_)} {}
|
||||||
|
|
||||||
Json& JsonArray::operator[](std::string const & key) {
|
Json& JsonArray::operator[](std::string const& ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by string.";
|
<< Value::TypeStr() << " can not be indexed by string.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
@ -263,13 +263,13 @@ void JsonArray::Save(JsonWriter* writer) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Json Number
|
// Json Number
|
||||||
Json& JsonNumber::operator[](std::string const & key) {
|
Json& JsonNumber::operator[](std::string const& ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by string.";
|
<< Value::TypeStr() << " can not be indexed by string.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
Json& JsonNumber::operator[](int ind) {
|
Json& JsonNumber::operator[](int ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by Integer.";
|
<< Value::TypeStr() << " can not be indexed by Integer.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
@ -298,13 +298,13 @@ void JsonNumber::Save(JsonWriter* writer) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Json Integer
|
// Json Integer
|
||||||
Json& JsonInteger::operator[](std::string const& key) {
|
Json& JsonInteger::operator[](std::string const& ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by string.";
|
<< Value::TypeStr() << " can not be indexed by string.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
Json& JsonInteger::operator[](int ind) {
|
Json& JsonInteger::operator[](int ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by Integer.";
|
<< Value::TypeStr() << " can not be indexed by Integer.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
@ -326,13 +326,13 @@ void JsonInteger::Save(JsonWriter* writer) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Json Null
|
// Json Null
|
||||||
Json& JsonNull::operator[](std::string const & key) {
|
Json& JsonNull::operator[](std::string const& ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by string.";
|
<< Value::TypeStr() << " can not be indexed by string.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
Json& JsonNull::operator[](int ind) {
|
Json& JsonNull::operator[](int ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by Integer.";
|
<< Value::TypeStr() << " can not be indexed by Integer.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
@ -353,13 +353,13 @@ void JsonNull::Save(JsonWriter* writer) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Json Boolean
|
// Json Boolean
|
||||||
Json& JsonBoolean::operator[](std::string const & key) {
|
Json& JsonBoolean::operator[](std::string const& ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by string.";
|
<< Value::TypeStr() << " can not be indexed by string.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
Json& JsonBoolean::operator[](int ind) {
|
Json& JsonBoolean::operator[](int ) {
|
||||||
LOG(FATAL) << "Object of type "
|
LOG(FATAL) << "Object of type "
|
||||||
<< Value::TypeStr() << " can not be indexed by Integer.";
|
<< Value::TypeStr() << " can not be indexed by Integer.";
|
||||||
return DummyJsonObject();
|
return DummyJsonObject();
|
||||||
|
|||||||
@ -137,7 +137,7 @@ void BatchHistSynchronizer<GradientSumT>::SyncHistograms(BuilderT *builder,
|
|||||||
}, 1024);
|
}, 1024);
|
||||||
|
|
||||||
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
|
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
|
||||||
const auto entry = builder->nodes_for_explicit_hist_build_[node];
|
const auto& entry = builder->nodes_for_explicit_hist_build_[node];
|
||||||
auto this_hist = builder->hist_[entry.nid];
|
auto this_hist = builder->hist_[entry.nid];
|
||||||
// Merging histograms from each thread into once
|
// Merging histograms from each thread into once
|
||||||
builder->hist_buffer_.ReduceHist(node, r.begin(), r.end());
|
builder->hist_buffer_.ReduceHist(node, r.begin(), r.end());
|
||||||
@ -163,7 +163,7 @@ void DistributedHistSynchronizer<GradientSumT>::SyncHistograms(BuilderT* builder
|
|||||||
return nbins;
|
return nbins;
|
||||||
}, 1024);
|
}, 1024);
|
||||||
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
|
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
|
||||||
const auto entry = builder->nodes_for_explicit_hist_build_[node];
|
const auto& entry = builder->nodes_for_explicit_hist_build_[node];
|
||||||
auto this_hist = builder->hist_[entry.nid];
|
auto this_hist = builder->hist_[entry.nid];
|
||||||
// Merging histograms from each thread into once
|
// Merging histograms from each thread into once
|
||||||
builder->hist_buffer_.ReduceHist(node, r.begin(), r.end());
|
builder->hist_buffer_.ReduceHist(node, r.begin(), r.end());
|
||||||
@ -202,7 +202,7 @@ void DistributedHistSynchronizer<GradientSumT>::ParallelSubtractionHist(
|
|||||||
const std::vector<ExpandEntryT>& nodes,
|
const std::vector<ExpandEntryT>& nodes,
|
||||||
const RegTree * p_tree) {
|
const RegTree * p_tree) {
|
||||||
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
|
common::ParallelFor2d(space, builder->nthread_, [&](size_t node, common::Range1d r) {
|
||||||
const auto entry = nodes[node];
|
const auto& entry = nodes[node];
|
||||||
if (!((*p_tree)[entry.nid].IsLeftChild())) {
|
if (!((*p_tree)[entry.nid].IsLeftChild())) {
|
||||||
auto this_hist = builder->hist_[entry.nid];
|
auto this_hist = builder->hist_[entry.nid];
|
||||||
|
|
||||||
@ -827,18 +827,18 @@ void QuantileHistMaker::Builder<GradientSumT>::InitData(const GHistIndexMatrix&
|
|||||||
const uint32_t nbins_f0 = gmat.cut.Ptrs()[1] - gmat.cut.Ptrs()[0];
|
const uint32_t nbins_f0 = gmat.cut.Ptrs()[1] - gmat.cut.Ptrs()[0];
|
||||||
if (nrow * ncol == nnz) {
|
if (nrow * ncol == nnz) {
|
||||||
// dense data with zero-based indexing
|
// dense data with zero-based indexing
|
||||||
data_layout_ = kDenseDataZeroBased;
|
data_layout_ = DataLayout::kDenseDataZeroBased;
|
||||||
} else if (nbins_f0 == 0 && nrow * (ncol - 1) == nnz) {
|
} else if (nbins_f0 == 0 && nrow * (ncol - 1) == nnz) {
|
||||||
// dense data with one-based indexing
|
// dense data with one-based indexing
|
||||||
data_layout_ = kDenseDataOneBased;
|
data_layout_ = DataLayout::kDenseDataOneBased;
|
||||||
} else {
|
} else {
|
||||||
// sparse data
|
// sparse data
|
||||||
data_layout_ = kSparseData;
|
data_layout_ = DataLayout::kSparseData;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// store a pointer to the tree
|
// store a pointer to the tree
|
||||||
p_last_tree_ = &tree;
|
p_last_tree_ = &tree;
|
||||||
if (data_layout_ == kDenseDataOneBased) {
|
if (data_layout_ == DataLayout::kDenseDataOneBased) {
|
||||||
column_sampler_.Init(info.num_col_, info.feature_weigths.ConstHostVector(),
|
column_sampler_.Init(info.num_col_, info.feature_weigths.ConstHostVector(),
|
||||||
param_.colsample_bynode, param_.colsample_bylevel,
|
param_.colsample_bynode, param_.colsample_bylevel,
|
||||||
param_.colsample_bytree, true);
|
param_.colsample_bytree, true);
|
||||||
@ -847,7 +847,8 @@ void QuantileHistMaker::Builder<GradientSumT>::InitData(const GHistIndexMatrix&
|
|||||||
param_.colsample_bynode, param_.colsample_bylevel,
|
param_.colsample_bynode, param_.colsample_bylevel,
|
||||||
param_.colsample_bytree, false);
|
param_.colsample_bytree, false);
|
||||||
}
|
}
|
||||||
if (data_layout_ == kDenseDataZeroBased || data_layout_ == kDenseDataOneBased) {
|
if (data_layout_ == DataLayout::kDenseDataZeroBased
|
||||||
|
|| data_layout_ == DataLayout::kDenseDataOneBased) {
|
||||||
/* specialized code for dense data:
|
/* specialized code for dense data:
|
||||||
choose the column that has a least positive number of discrete bins.
|
choose the column that has a least positive number of discrete bins.
|
||||||
For dense data (with no missing value),
|
For dense data (with no missing value),
|
||||||
@ -1138,9 +1139,9 @@ void QuantileHistMaker::Builder<GradientSumT>::FindSplitConditions(
|
|||||||
// split_cond = -1 indicates that split_pt is less than all known cut points
|
// split_cond = -1 indicates that split_pt is less than all known cut points
|
||||||
CHECK_LT(upper_bound,
|
CHECK_LT(upper_bound,
|
||||||
static_cast<uint32_t>(std::numeric_limits<int32_t>::max()));
|
static_cast<uint32_t>(std::numeric_limits<int32_t>::max()));
|
||||||
for (uint32_t i = lower_bound; i < upper_bound; ++i) {
|
for (uint32_t bound = lower_bound; bound < upper_bound; ++bound) {
|
||||||
if (split_pt == gmat.cut.Values()[i]) {
|
if (split_pt == gmat.cut.Values()[bound]) {
|
||||||
split_cond = static_cast<int32_t>(i);
|
split_cond = static_cast<int32_t>(bound);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(*split_conditions)[i] = split_cond;
|
(*split_conditions)[i] = split_cond;
|
||||||
@ -1151,7 +1152,7 @@ void QuantileHistMaker::Builder<GradientSumT>::AddSplitsToRowSet(
|
|||||||
const std::vector<ExpandEntry>& nodes,
|
const std::vector<ExpandEntry>& nodes,
|
||||||
RegTree* p_tree) {
|
RegTree* p_tree) {
|
||||||
const size_t n_nodes = nodes.size();
|
const size_t n_nodes = nodes.size();
|
||||||
for (size_t i = 0; i < n_nodes; ++i) {
|
for (unsigned int i = 0; i < n_nodes; ++i) {
|
||||||
const int32_t nid = nodes[i].nid;
|
const int32_t nid = nodes[i].nid;
|
||||||
const size_t n_left = partition_builder_.GetNLeftElems(i);
|
const size_t n_left = partition_builder_.GetNLeftElems(i);
|
||||||
const size_t n_right = partition_builder_.GetNRightElems(i);
|
const size_t n_right = partition_builder_.GetNRightElems(i);
|
||||||
@ -1165,7 +1166,7 @@ template <typename GradientSumT>
|
|||||||
void QuantileHistMaker::Builder<GradientSumT>::ApplySplit(const std::vector<ExpandEntry> nodes,
|
void QuantileHistMaker::Builder<GradientSumT>::ApplySplit(const std::vector<ExpandEntry> nodes,
|
||||||
const GHistIndexMatrix& gmat,
|
const GHistIndexMatrix& gmat,
|
||||||
const ColumnMatrix& column_matrix,
|
const ColumnMatrix& column_matrix,
|
||||||
const HistCollection<GradientSumT>& hist,
|
const HistCollection<GradientSumT>&,
|
||||||
RegTree* p_tree) {
|
RegTree* p_tree) {
|
||||||
builder_monitor_.Start("ApplySplit");
|
builder_monitor_.Start("ApplySplit");
|
||||||
// 1. Find split condition for each split
|
// 1. Find split condition for each split
|
||||||
@ -1236,7 +1237,8 @@ void QuantileHistMaker::Builder<GradientSumT>::InitNewNode(int nid,
|
|||||||
GHistRowT hist = hist_[nid];
|
GHistRowT hist = hist_[nid];
|
||||||
GradientPairT grad_stat;
|
GradientPairT grad_stat;
|
||||||
if (tree[nid].IsRoot()) {
|
if (tree[nid].IsRoot()) {
|
||||||
if (data_layout_ == kDenseDataZeroBased || data_layout_ == kDenseDataOneBased) {
|
if (data_layout_ == DataLayout::kDenseDataZeroBased
|
||||||
|
|| data_layout_ == DataLayout::kDenseDataOneBased) {
|
||||||
const std::vector<uint32_t>& row_ptr = gmat.cut.Ptrs();
|
const std::vector<uint32_t>& row_ptr = gmat.cut.Ptrs();
|
||||||
const uint32_t ibegin = row_ptr[fid_least_bins_];
|
const uint32_t ibegin = row_ptr[fid_least_bins_];
|
||||||
const uint32_t iend = row_ptr[fid_least_bins_ + 1];
|
const uint32_t iend = row_ptr[fid_least_bins_ + 1];
|
||||||
|
|||||||
@ -99,7 +99,7 @@ class DistributedHistRowsAdder;
|
|||||||
// training parameters specific to this algorithm
|
// training parameters specific to this algorithm
|
||||||
struct CPUHistMakerTrainParam
|
struct CPUHistMakerTrainParam
|
||||||
: public XGBoostParameter<CPUHistMakerTrainParam> {
|
: public XGBoostParameter<CPUHistMakerTrainParam> {
|
||||||
bool single_precision_histogram;
|
bool single_precision_histogram = false;
|
||||||
// declare parameters
|
// declare parameters
|
||||||
DMLC_DECLARE_PARAMETER(CPUHistMakerTrainParam) {
|
DMLC_DECLARE_PARAMETER(CPUHistMakerTrainParam) {
|
||||||
DMLC_DECLARE_FIELD(single_precision_histogram).set_default(false).describe(
|
DMLC_DECLARE_FIELD(single_precision_histogram).set_default(false).describe(
|
||||||
@ -127,7 +127,7 @@ class QuantileHistMaker: public TreeUpdater {
|
|||||||
FromJson(config.at("train_param"), &this->param_);
|
FromJson(config.at("train_param"), &this->param_);
|
||||||
try {
|
try {
|
||||||
FromJson(config.at("cpu_hist_train_param"), &this->hist_maker_param_);
|
FromJson(config.at("cpu_hist_train_param"), &this->hist_maker_param_);
|
||||||
} catch (std::out_of_range& e) {
|
} catch (std::out_of_range&) {
|
||||||
// XGBoost model is from 1.1.x, so 'cpu_hist_train_param' is missing.
|
// XGBoost model is from 1.1.x, so 'cpu_hist_train_param' is missing.
|
||||||
// We add this compatibility check because it's just recently that we (developers) began
|
// We add this compatibility check because it's just recently that we (developers) began
|
||||||
// persuade R users away from using saveRDS() for model serialization. Hopefully, one day,
|
// persuade R users away from using saveRDS() for model serialization. Hopefully, one day,
|
||||||
@ -191,7 +191,7 @@ class QuantileHistMaker: public TreeUpdater {
|
|||||||
/*! \brief current best solution */
|
/*! \brief current best solution */
|
||||||
SplitEntry best;
|
SplitEntry best;
|
||||||
// constructor
|
// constructor
|
||||||
explicit NodeEntry(const TrainParam& param)
|
explicit NodeEntry(const TrainParam&)
|
||||||
: root_gain(0.0f), weight(0.0f) {}
|
: root_gain(0.0f), weight(0.0f) {}
|
||||||
};
|
};
|
||||||
// actual builder that runs the algorithm
|
// actual builder that runs the algorithm
|
||||||
@ -229,7 +229,8 @@ class QuantileHistMaker: public TreeUpdater {
|
|||||||
if (param_.enable_feature_grouping > 0) {
|
if (param_.enable_feature_grouping > 0) {
|
||||||
hist_builder_.BuildBlockHist(gpair, row_indices, gmatb, hist);
|
hist_builder_.BuildBlockHist(gpair, row_indices, gmatb, hist);
|
||||||
} else {
|
} else {
|
||||||
hist_builder_.BuildHist(gpair, row_indices, gmat, hist, data_layout_ != kSparseData);
|
hist_builder_.BuildHist(gpair, row_indices, gmat, hist,
|
||||||
|
data_layout_ != DataLayout::kSparseData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -442,7 +443,7 @@ class QuantileHistMaker: public TreeUpdater {
|
|||||||
// list of nodes whose histograms would be built explicitly.
|
// list of nodes whose histograms would be built explicitly.
|
||||||
std::vector<ExpandEntry> nodes_for_explicit_hist_build_;
|
std::vector<ExpandEntry> nodes_for_explicit_hist_build_;
|
||||||
|
|
||||||
enum DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData };
|
enum class DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData };
|
||||||
DataLayout data_layout_;
|
DataLayout data_layout_;
|
||||||
|
|
||||||
common::Monitor builder_monitor_;
|
common::Monitor builder_monitor_;
|
||||||
|
|||||||
@ -113,7 +113,7 @@ class TreeRefresher: public TreeUpdater {
|
|||||||
inline static void AddStats(const RegTree &tree,
|
inline static void AddStats(const RegTree &tree,
|
||||||
const RegTree::FVec &feat,
|
const RegTree::FVec &feat,
|
||||||
const std::vector<GradientPair> &gpair,
|
const std::vector<GradientPair> &gpair,
|
||||||
const MetaInfo &info,
|
const MetaInfo&,
|
||||||
const bst_uint ridx,
|
const bst_uint ridx,
|
||||||
GradStats *gstats) {
|
GradStats *gstats) {
|
||||||
// start from groups that belongs to current data
|
// start from groups that belongs to current data
|
||||||
|
|||||||
@ -22,17 +22,17 @@ DMLC_REGISTRY_FILE_TAG(updater_sync);
|
|||||||
*/
|
*/
|
||||||
class TreeSyncher: public TreeUpdater {
|
class TreeSyncher: public TreeUpdater {
|
||||||
public:
|
public:
|
||||||
void Configure(const Args& args) override {}
|
void Configure(const Args&) override {}
|
||||||
|
|
||||||
void LoadConfig(Json const& in) override {}
|
void LoadConfig(Json const&) override {}
|
||||||
void SaveConfig(Json* p_out) const override {}
|
void SaveConfig(Json*) const override {}
|
||||||
|
|
||||||
char const* Name() const override {
|
char const* Name() const override {
|
||||||
return "prune";
|
return "prune";
|
||||||
}
|
}
|
||||||
|
|
||||||
void Update(HostDeviceVector<GradientPair> *gpair,
|
void Update(HostDeviceVector<GradientPair>* ,
|
||||||
DMatrix* dmat,
|
DMatrix*,
|
||||||
const std::vector<RegTree*> &trees) override {
|
const std::vector<RegTree*> &trees) override {
|
||||||
if (rabit::GetWorldSize() == 1) return;
|
if (rabit::GetWorldSize() == 1) return;
|
||||||
std::string s_model;
|
std::string s_model;
|
||||||
|
|||||||
@ -40,7 +40,7 @@ class QuantileHistMock : public QuantileHistMaker {
|
|||||||
DMatrix* p_fmat,
|
DMatrix* p_fmat,
|
||||||
const RegTree& tree) {
|
const RegTree& tree) {
|
||||||
RealImpl::InitData(gmat, gpair, *p_fmat, tree);
|
RealImpl::InitData(gmat, gpair, *p_fmat, tree);
|
||||||
ASSERT_EQ(this->data_layout_, RealImpl::kSparseData);
|
ASSERT_EQ(this->data_layout_, RealImpl::DataLayout::kSparseData);
|
||||||
|
|
||||||
/* The creation of HistCutMatrix and GHistIndexMatrix are not technically
|
/* The creation of HistCutMatrix and GHistIndexMatrix are not technically
|
||||||
* part of QuantileHist updater logic, but we include it here because
|
* part of QuantileHist updater logic, but we include it here because
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user