Clean up warnings (#6325)
This commit is contained in:
parent
f0fe18fc28
commit
5e1e972aea
@ -421,7 +421,7 @@ class BatchIterator {
|
|||||||
return *(*impl_);
|
return *(*impl_);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool operator!=(const BatchIterator& rhs) const {
|
bool operator!=(const BatchIterator&) const {
|
||||||
CHECK(impl_ != nullptr);
|
CHECK(impl_ != nullptr);
|
||||||
return !impl_->AtEnd();
|
return !impl_->AtEnd();
|
||||||
}
|
}
|
||||||
|
|||||||
@ -117,7 +117,7 @@ class Predictor {
|
|||||||
*
|
*
|
||||||
* \param cfg The configuration.
|
* \param cfg The configuration.
|
||||||
*/
|
*/
|
||||||
virtual void Configure(const std::vector<std::pair<std::string, std::string>>& cfg);
|
virtual void Configure(const std::vector<std::pair<std::string, std::string>>&);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* \brief Generate batch predictions for a given feature matrix. May use
|
* \brief Generate batch predictions for a given feature matrix. May use
|
||||||
|
|||||||
@ -163,7 +163,7 @@ XGB_DLL int XGDMatrixCreateFromCSCEx(const size_t* col_ptr,
|
|||||||
const unsigned* indices,
|
const unsigned* indices,
|
||||||
const bst_float* data,
|
const bst_float* data,
|
||||||
size_t nindptr,
|
size_t nindptr,
|
||||||
size_t nelem,
|
size_t,
|
||||||
size_t num_row,
|
size_t num_row,
|
||||||
DMatrixHandle* out) {
|
DMatrixHandle* out) {
|
||||||
API_BEGIN();
|
API_BEGIN();
|
||||||
|
|||||||
@ -163,7 +163,7 @@ void GHistIndexMatrix::Init(DMatrix* p_fmat, int max_bins) {
|
|||||||
} else {
|
} else {
|
||||||
common::Span<uint32_t> index_data_span = {index.data<uint32_t>(), n_index};
|
common::Span<uint32_t> index_data_span = {index.data<uint32_t>(), n_index};
|
||||||
SetIndexData(index_data_span, batch_threads, batch, rbegin, nbins,
|
SetIndexData(index_data_span, batch_threads, batch, rbegin, nbins,
|
||||||
[](auto idx, auto i) { return idx; });
|
[](auto idx, auto) { return idx; });
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma omp parallel for num_threads(nthread) schedule(static)
|
#pragma omp parallel for num_threads(nthread) schedule(static)
|
||||||
|
|||||||
@ -16,7 +16,7 @@ namespace xgboost {
|
|||||||
|
|
||||||
const Version::TripletT Version::kInvalid {-1, -1, -1};
|
const Version::TripletT Version::kInvalid {-1, -1, -1};
|
||||||
|
|
||||||
Version::TripletT Version::Load(Json const& in, bool check) {
|
Version::TripletT Version::Load(Json const& in) {
|
||||||
if (get<Object const>(in).find("version") == get<Object const>(in).cend()) {
|
if (get<Object const>(in).find("version") == get<Object const>(in).cend()) {
|
||||||
return kInvalid;
|
return kInvalid;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -18,7 +18,7 @@ struct Version {
|
|||||||
static const TripletT kInvalid;
|
static const TripletT kInvalid;
|
||||||
|
|
||||||
// Save/Load version info to Json document
|
// Save/Load version info to Json document
|
||||||
static TripletT Load(Json const& in, bool check = false);
|
static TripletT Load(Json const& in);
|
||||||
static void Save(Json* out);
|
static void Save(Json* out);
|
||||||
|
|
||||||
// Save/Load version info to dmlc::Stream
|
// Save/Load version info to dmlc::Stream
|
||||||
|
|||||||
@ -37,7 +37,7 @@ class SparsePageDMatrix : public DMatrix {
|
|||||||
const MetaInfo& Info() const override;
|
const MetaInfo& Info() const override;
|
||||||
|
|
||||||
bool SingleColBlock() const override { return false; }
|
bool SingleColBlock() const override { return false; }
|
||||||
DMatrix *Slice(common::Span<int32_t const> ridxs) override {
|
DMatrix *Slice(common::Span<int32_t const>) override {
|
||||||
LOG(FATAL) << "Slicing DMatrix is not supported for external memory.";
|
LOG(FATAL) << "Slicing DMatrix is not supported for external memory.";
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -49,7 +49,7 @@ class GBLinearModel : public Model {
|
|||||||
public:
|
public:
|
||||||
explicit GBLinearModel(LearnerModelParam const* learner_model_param) :
|
explicit GBLinearModel(LearnerModelParam const* learner_model_param) :
|
||||||
learner_model_param {learner_model_param} {}
|
learner_model_param {learner_model_param} {}
|
||||||
void Configure(Args const &cfg) { }
|
void Configure(Args const &) { }
|
||||||
|
|
||||||
// weight for each of feature, bias is the last one
|
// weight for each of feature, bias is the last one
|
||||||
std::vector<bst_float> weight;
|
std::vector<bst_float> weight;
|
||||||
|
|||||||
@ -341,7 +341,7 @@ class LearnerConfiguration : public Learner {
|
|||||||
|
|
||||||
void LoadConfig(Json const& in) override {
|
void LoadConfig(Json const& in) override {
|
||||||
CHECK(IsA<Object>(in));
|
CHECK(IsA<Object>(in));
|
||||||
Version::Load(in, true);
|
Version::Load(in);
|
||||||
|
|
||||||
auto const& learner_parameters = get<Object>(in["learner"]);
|
auto const& learner_parameters = get<Object>(in["learner"]);
|
||||||
FromJson(learner_parameters.at("learner_train_param"), &tparam_);
|
FromJson(learner_parameters.at("learner_train_param"), &tparam_);
|
||||||
@ -623,7 +623,7 @@ class LearnerIO : public LearnerConfiguration {
|
|||||||
|
|
||||||
void LoadModel(Json const& in) override {
|
void LoadModel(Json const& in) override {
|
||||||
CHECK(IsA<Object>(in));
|
CHECK(IsA<Object>(in));
|
||||||
Version::Load(in, false);
|
Version::Load(in);
|
||||||
auto const& learner = get<Object>(in["learner"]);
|
auto const& learner = get<Object>(in["learner"]);
|
||||||
mparam_.FromJson(learner.at("learner_model_param"));
|
mparam_.FromJson(learner.at("learner_model_param"));
|
||||||
|
|
||||||
|
|||||||
@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
namespace xgboost {
|
namespace xgboost {
|
||||||
template <typename MetricRegistry>
|
template <typename MetricRegistry>
|
||||||
Metric* CreateMetricImpl(const std::string& name, GenericParameter const* tparam) {
|
Metric* CreateMetricImpl(const std::string& name) {
|
||||||
std::string buf = name;
|
std::string buf = name;
|
||||||
std::string prefix = name;
|
std::string prefix = name;
|
||||||
const char* param;
|
const char* param;
|
||||||
@ -44,7 +44,7 @@ Metric* CreateMetricImpl(const std::string& name, GenericParameter const* tparam
|
|||||||
|
|
||||||
Metric *
|
Metric *
|
||||||
Metric::Create(const std::string& name, GenericParameter const* tparam) {
|
Metric::Create(const std::string& name, GenericParameter const* tparam) {
|
||||||
auto metric = CreateMetricImpl<MetricReg>(name, tparam);
|
auto metric = CreateMetricImpl<MetricReg>(name);
|
||||||
if (metric == nullptr) {
|
if (metric == nullptr) {
|
||||||
LOG(FATAL) << "Unknown metric function " << name;
|
LOG(FATAL) << "Unknown metric function " << name;
|
||||||
}
|
}
|
||||||
@ -55,7 +55,7 @@ Metric::Create(const std::string& name, GenericParameter const* tparam) {
|
|||||||
|
|
||||||
Metric *
|
Metric *
|
||||||
GPUMetric::CreateGPUMetric(const std::string& name, GenericParameter const* tparam) {
|
GPUMetric::CreateGPUMetric(const std::string& name, GenericParameter const* tparam) {
|
||||||
auto metric = CreateMetricImpl<MetricGPUReg>(name, tparam);
|
auto metric = CreateMetricImpl<MetricGPUReg>(name);
|
||||||
if (metric == nullptr) {
|
if (metric == nullptr) {
|
||||||
LOG(WARNING) << "Cannot find a GPU metric builder for metric " << name
|
LOG(WARNING) << "Cannot find a GPU metric builder for metric " << name
|
||||||
<< ". Resorting to the CPU builder";
|
<< ". Resorting to the CPU builder";
|
||||||
|
|||||||
@ -55,13 +55,13 @@ class PerInstanceWeightPolicy {
|
|||||||
public:
|
public:
|
||||||
inline static xgboost::bst_float
|
inline static xgboost::bst_float
|
||||||
GetWeightOfInstance(const xgboost::MetaInfo& info,
|
GetWeightOfInstance(const xgboost::MetaInfo& info,
|
||||||
unsigned instance_id, unsigned group_id) {
|
unsigned instance_id, unsigned) {
|
||||||
return info.GetWeight(instance_id);
|
return info.GetWeight(instance_id);
|
||||||
}
|
}
|
||||||
inline static xgboost::bst_float
|
inline static xgboost::bst_float
|
||||||
GetWeightOfSortedRecord(const xgboost::MetaInfo& info,
|
GetWeightOfSortedRecord(const xgboost::MetaInfo& info,
|
||||||
const PredIndPairContainer& rec,
|
const PredIndPairContainer& rec,
|
||||||
unsigned record_id, unsigned group_id) {
|
unsigned record_id, unsigned) {
|
||||||
return info.GetWeight(rec[record_id].second);
|
return info.GetWeight(rec[record_id].second);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -70,14 +70,14 @@ class PerGroupWeightPolicy {
|
|||||||
public:
|
public:
|
||||||
inline static xgboost::bst_float
|
inline static xgboost::bst_float
|
||||||
GetWeightOfInstance(const xgboost::MetaInfo& info,
|
GetWeightOfInstance(const xgboost::MetaInfo& info,
|
||||||
unsigned instance_id, unsigned group_id) {
|
unsigned, unsigned group_id) {
|
||||||
return info.GetWeight(group_id);
|
return info.GetWeight(group_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline static xgboost::bst_float
|
inline static xgboost::bst_float
|
||||||
GetWeightOfSortedRecord(const xgboost::MetaInfo& info,
|
GetWeightOfSortedRecord(const xgboost::MetaInfo& info,
|
||||||
const PredIndPairContainer& rec,
|
const PredIndPairContainer&,
|
||||||
unsigned record_id, unsigned group_id) {
|
unsigned, unsigned group_id) {
|
||||||
return info.GetWeight(group_id);
|
return info.GetWeight(group_id);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -651,11 +651,11 @@ XGBOOST_REGISTER_METRIC(AMS, "ams")
|
|||||||
|
|
||||||
XGBOOST_REGISTER_METRIC(Auc, "auc")
|
XGBOOST_REGISTER_METRIC(Auc, "auc")
|
||||||
.describe("Area under curve for both classification and rank.")
|
.describe("Area under curve for both classification and rank.")
|
||||||
.set_body([](const char* param) { return new EvalAuc(); });
|
.set_body([](const char*) { return new EvalAuc(); });
|
||||||
|
|
||||||
XGBOOST_REGISTER_METRIC(AucPR, "aucpr")
|
XGBOOST_REGISTER_METRIC(AucPR, "aucpr")
|
||||||
.describe("Area under PR curve for both classification and rank.")
|
.describe("Area under PR curve for both classification and rank.")
|
||||||
.set_body([](const char* param) { return new EvalAucPR(); });
|
.set_body([](const char*) { return new EvalAucPR(); });
|
||||||
|
|
||||||
XGBOOST_REGISTER_METRIC(Precision, "pre")
|
XGBOOST_REGISTER_METRIC(Precision, "pre")
|
||||||
.describe("precision@k for rank.")
|
.describe("precision@k for rank.")
|
||||||
@ -671,6 +671,6 @@ XGBOOST_REGISTER_METRIC(MAP, "map")
|
|||||||
|
|
||||||
XGBOOST_REGISTER_METRIC(Cox, "cox-nloglik")
|
XGBOOST_REGISTER_METRIC(Cox, "cox-nloglik")
|
||||||
.describe("Negative log partial likelihood of Cox proportioanl hazards model.")
|
.describe("Negative log partial likelihood of Cox proportioanl hazards model.")
|
||||||
.set_body([](const char* param) { return new EvalCox(); });
|
.set_body([](const char*) { return new EvalCox(); });
|
||||||
} // namespace metric
|
} // namespace metric
|
||||||
} // namespace xgboost
|
} // namespace xgboost
|
||||||
|
|||||||
@ -47,7 +47,7 @@ decltype(PredictionContainer::container_) const& PredictionContainer::Container(
|
|||||||
}
|
}
|
||||||
|
|
||||||
void Predictor::Configure(
|
void Predictor::Configure(
|
||||||
const std::vector<std::pair<std::string, std::string>>& cfg) {
|
const std::vector<std::pair<std::string, std::string>>&) {
|
||||||
}
|
}
|
||||||
Predictor* Predictor::Create(
|
Predictor* Predictor::Create(
|
||||||
std::string const& name, GenericParameter const* generic_param) {
|
std::string const& name, GenericParameter const* generic_param) {
|
||||||
|
|||||||
@ -107,7 +107,7 @@ class TreeEvaluator {
|
|||||||
return w;
|
return w;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
XGBOOST_DEVICE float CalcGainGivenWeight(bst_node_t nid, ParamT const &p,
|
XGBOOST_DEVICE float CalcGainGivenWeight(bst_node_t, ParamT const &p,
|
||||||
tree::GradStats stats, float w) const {
|
tree::GradStats stats, float w) const {
|
||||||
if (stats.GetHess() <= 0) {
|
if (stats.GetHess() <= 0) {
|
||||||
return .0f;
|
return .0f;
|
||||||
@ -149,7 +149,7 @@ class TreeEvaluator {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
common::Transform<>::Init(
|
common::Transform<>::Init(
|
||||||
[=] XGBOOST_DEVICE(size_t idx, common::Span<float> lower,
|
[=] XGBOOST_DEVICE(size_t, common::Span<float> lower,
|
||||||
common::Span<float> upper,
|
common::Span<float> upper,
|
||||||
common::Span<int> monotone) {
|
common::Span<int> monotone) {
|
||||||
lower[leftid] = lower[nodeid];
|
lower[leftid] = lower[nodeid];
|
||||||
|
|||||||
@ -69,20 +69,24 @@ class TreeGenerator {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual std::string Indicator(RegTree const& tree, int32_t nid, uint32_t depth) const {
|
virtual std::string Indicator(RegTree const& /*tree*/,
|
||||||
|
int32_t /*nid*/, uint32_t /*depth*/) const {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
virtual std::string Integer(RegTree const& tree, int32_t nid, uint32_t depth) const {
|
virtual std::string Integer(RegTree const& /*tree*/,
|
||||||
|
int32_t /*nid*/, uint32_t /*depth*/) const {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
virtual std::string Quantitive(RegTree const& tree, int32_t nid, uint32_t depth) const {
|
virtual std::string Quantitive(RegTree const& /*tree*/,
|
||||||
|
int32_t /*nid*/, uint32_t /*depth*/) const {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
virtual std::string NodeStat(RegTree const& tree, int32_t nid) const {
|
virtual std::string NodeStat(RegTree const& /*tree*/, int32_t /*nid*/) const {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual std::string PlainNode(RegTree const& tree, int32_t nid, uint32_t depth) const = 0;
|
virtual std::string PlainNode(RegTree const& /*tree*/,
|
||||||
|
int32_t /*nid*/, uint32_t /*depth*/) const = 0;
|
||||||
|
|
||||||
virtual std::string SplitNode(RegTree const& tree, int32_t nid, uint32_t depth) {
|
virtual std::string SplitNode(RegTree const& tree, int32_t nid, uint32_t depth) {
|
||||||
auto const split_index = tree[nid].SplitIndex();
|
auto const split_index = tree[nid].SplitIndex();
|
||||||
@ -179,7 +183,7 @@ class TextGenerator : public TreeGenerator {
|
|||||||
using SuperT = TreeGenerator;
|
using SuperT = TreeGenerator;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
TextGenerator(FeatureMap const& fmap, std::string const& attrs, bool with_stats) :
|
TextGenerator(FeatureMap const& fmap, bool with_stats) :
|
||||||
TreeGenerator(fmap, with_stats) {}
|
TreeGenerator(fmap, with_stats) {}
|
||||||
|
|
||||||
std::string LeafNode(RegTree const& tree, int32_t nid, uint32_t depth) const override {
|
std::string LeafNode(RegTree const& tree, int32_t nid, uint32_t depth) const override {
|
||||||
@ -196,7 +200,7 @@ class TextGenerator : public TreeGenerator {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string Indicator(RegTree const& tree, int32_t nid, uint32_t depth) const override {
|
std::string Indicator(RegTree const& tree, int32_t nid, uint32_t) const override {
|
||||||
static std::string const kIndicatorTemplate = "{nid}:[{fname}] yes={yes},no={no}";
|
static std::string const kIndicatorTemplate = "{nid}:[{fname}] yes={yes},no={no}";
|
||||||
int32_t nyes = tree[nid].DefaultLeft() ?
|
int32_t nyes = tree[nid].DefaultLeft() ?
|
||||||
tree[nid].RightChild() : tree[nid].LeftChild();
|
tree[nid].RightChild() : tree[nid].LeftChild();
|
||||||
@ -288,14 +292,14 @@ class TextGenerator : public TreeGenerator {
|
|||||||
XGBOOST_REGISTER_TREE_IO(TextGenerator, "text")
|
XGBOOST_REGISTER_TREE_IO(TextGenerator, "text")
|
||||||
.describe("Dump text representation of tree")
|
.describe("Dump text representation of tree")
|
||||||
.set_body([](FeatureMap const& fmap, std::string const& attrs, bool with_stats) {
|
.set_body([](FeatureMap const& fmap, std::string const& attrs, bool with_stats) {
|
||||||
return new TextGenerator(fmap, attrs, with_stats);
|
return new TextGenerator(fmap, with_stats);
|
||||||
});
|
});
|
||||||
|
|
||||||
class JsonGenerator : public TreeGenerator {
|
class JsonGenerator : public TreeGenerator {
|
||||||
using SuperT = TreeGenerator;
|
using SuperT = TreeGenerator;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
JsonGenerator(FeatureMap const& fmap, std::string attrs, bool with_stats) :
|
JsonGenerator(FeatureMap const& fmap, bool with_stats) :
|
||||||
TreeGenerator(fmap, with_stats) {}
|
TreeGenerator(fmap, with_stats) {}
|
||||||
|
|
||||||
std::string Indent(uint32_t depth) const {
|
std::string Indent(uint32_t depth) const {
|
||||||
@ -306,7 +310,7 @@ class JsonGenerator : public TreeGenerator {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string LeafNode(RegTree const& tree, int32_t nid, uint32_t depth) const override {
|
std::string LeafNode(RegTree const& tree, int32_t nid, uint32_t) const override {
|
||||||
static std::string const kLeafTemplate =
|
static std::string const kLeafTemplate =
|
||||||
R"L({ "nodeid": {nid}, "leaf": {leaf} {stat}})L";
|
R"L({ "nodeid": {nid}, "leaf": {leaf} {stat}})L";
|
||||||
static std::string const kStatTemplate =
|
static std::string const kStatTemplate =
|
||||||
@ -426,7 +430,7 @@ class JsonGenerator : public TreeGenerator {
|
|||||||
XGBOOST_REGISTER_TREE_IO(JsonGenerator, "json")
|
XGBOOST_REGISTER_TREE_IO(JsonGenerator, "json")
|
||||||
.describe("Dump json representation of tree")
|
.describe("Dump json representation of tree")
|
||||||
.set_body([](FeatureMap const& fmap, std::string const& attrs, bool with_stats) {
|
.set_body([](FeatureMap const& fmap, std::string const& attrs, bool with_stats) {
|
||||||
return new JsonGenerator(fmap, attrs, with_stats);
|
return new JsonGenerator(fmap, with_stats);
|
||||||
});
|
});
|
||||||
|
|
||||||
struct GraphvizParam : public XGBoostParameter<GraphvizParam> {
|
struct GraphvizParam : public XGBoostParameter<GraphvizParam> {
|
||||||
@ -531,7 +535,7 @@ class GraphvizGenerator : public TreeGenerator {
|
|||||||
protected:
|
protected:
|
||||||
// Only indicator is different, so we combine all different node types into this
|
// Only indicator is different, so we combine all different node types into this
|
||||||
// function.
|
// function.
|
||||||
std::string PlainNode(RegTree const& tree, int32_t nid, uint32_t depth) const override {
|
std::string PlainNode(RegTree const& tree, int32_t nid, uint32_t) const override {
|
||||||
auto split = tree[nid].SplitIndex();
|
auto split = tree[nid].SplitIndex();
|
||||||
auto cond = tree[nid].SplitCond();
|
auto cond = tree[nid].SplitCond();
|
||||||
static std::string const kNodeTemplate =
|
static std::string const kNodeTemplate =
|
||||||
@ -565,7 +569,7 @@ class GraphvizGenerator : public TreeGenerator {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
|
||||||
std::string LeafNode(RegTree const& tree, int32_t nid, uint32_t depth) const override {
|
std::string LeafNode(RegTree const& tree, int32_t nid, uint32_t) const override {
|
||||||
static std::string const kLeafTemplate =
|
static std::string const kLeafTemplate =
|
||||||
" {nid} [ label=\"leaf={leaf-value}\" {params}]\n";
|
" {nid} [ label=\"leaf={leaf-value}\" {params}]\n";
|
||||||
auto result = SuperT::Match(kLeafTemplate, {
|
auto result = SuperT::Match(kLeafTemplate, {
|
||||||
|
|||||||
@ -163,7 +163,7 @@ class ColMaker: public TreeUpdater {
|
|||||||
DMatrix* p_fmat,
|
DMatrix* p_fmat,
|
||||||
RegTree* p_tree) {
|
RegTree* p_tree) {
|
||||||
std::vector<int> newnodes;
|
std::vector<int> newnodes;
|
||||||
this->InitData(gpair, *p_fmat, *p_tree);
|
this->InitData(gpair, *p_fmat);
|
||||||
this->InitNewNode(qexpand_, gpair, *p_fmat, *p_tree);
|
this->InitNewNode(qexpand_, gpair, *p_fmat, *p_tree);
|
||||||
for (int depth = 0; depth < param_.max_depth; ++depth) {
|
for (int depth = 0; depth < param_.max_depth; ++depth) {
|
||||||
this->FindSplit(depth, qexpand_, gpair, p_fmat, p_tree);
|
this->FindSplit(depth, qexpand_, gpair, p_fmat, p_tree);
|
||||||
@ -200,8 +200,7 @@ class ColMaker: public TreeUpdater {
|
|||||||
protected:
|
protected:
|
||||||
// initialize temp data structure
|
// initialize temp data structure
|
||||||
inline void InitData(const std::vector<GradientPair>& gpair,
|
inline void InitData(const std::vector<GradientPair>& gpair,
|
||||||
const DMatrix& fmat,
|
const DMatrix& fmat) {
|
||||||
const RegTree& tree) {
|
|
||||||
{
|
{
|
||||||
// setup position
|
// setup position
|
||||||
position_.resize(gpair.size());
|
position_.resize(gpair.size());
|
||||||
@ -439,7 +438,7 @@ class ColMaker: public TreeUpdater {
|
|||||||
virtual void UpdateSolution(const SparsePage &batch,
|
virtual void UpdateSolution(const SparsePage &batch,
|
||||||
const std::vector<bst_feature_t> &feat_set,
|
const std::vector<bst_feature_t> &feat_set,
|
||||||
const std::vector<GradientPair> &gpair,
|
const std::vector<GradientPair> &gpair,
|
||||||
DMatrix*p_fmat) {
|
DMatrix*) {
|
||||||
// start enumeration
|
// start enumeration
|
||||||
const auto num_features = static_cast<bst_omp_uint>(feat_set.size());
|
const auto num_features = static_cast<bst_omp_uint>(feat_set.size());
|
||||||
#if defined(_OPENMP)
|
#if defined(_OPENMP)
|
||||||
|
|||||||
@ -56,13 +56,6 @@ class HistMaker: public BaseMaker {
|
|||||||
HistUnit(const float *cut, GradStats *data, uint32_t size)
|
HistUnit(const float *cut, GradStats *data, uint32_t size)
|
||||||
: cut{cut}, data{data}, size{size} {}
|
: cut{cut}, data{data}, size{size} {}
|
||||||
/*! \brief add a histogram to data */
|
/*! \brief add a histogram to data */
|
||||||
void Add(float fv, const std::vector<GradientPair> &gpair,
|
|
||||||
const MetaInfo &info, const size_t ridx) {
|
|
||||||
unsigned bin = std::upper_bound(cut, cut + size, fv) - cut;
|
|
||||||
CHECK_NE(size, 0U) << "try insert into size=0";
|
|
||||||
CHECK_LT(bin, size);
|
|
||||||
data[bin].Add(gpair[ridx]);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
/*! \brief a set of histograms from different index */
|
/*! \brief a set of histograms from different index */
|
||||||
struct HistSet {
|
struct HistSet {
|
||||||
@ -86,7 +79,7 @@ class HistMaker: public BaseMaker {
|
|||||||
// per thread histset
|
// per thread histset
|
||||||
std::vector<HistSet> hset;
|
std::vector<HistSet> hset;
|
||||||
// initialize the hist set
|
// initialize the hist set
|
||||||
inline void Configure(const TrainParam ¶m, int nthread) {
|
inline void Configure(int nthread) {
|
||||||
hset.resize(nthread);
|
hset.resize(nthread);
|
||||||
// cleanup statistics
|
// cleanup statistics
|
||||||
for (int tid = 0; tid < nthread; ++tid) {
|
for (int tid = 0; tid < nthread; ++tid) {
|
||||||
@ -127,7 +120,7 @@ class HistMaker: public BaseMaker {
|
|||||||
// create histogram
|
// create histogram
|
||||||
this->CreateHist(gpair, p_fmat, selected_features_, *p_tree);
|
this->CreateHist(gpair, p_fmat, selected_features_, *p_tree);
|
||||||
// find split based on histogram statistics
|
// find split based on histogram statistics
|
||||||
this->FindSplit(depth, gpair, p_fmat, selected_features_, p_tree);
|
this->FindSplit(selected_features_, p_tree);
|
||||||
// reset position after split
|
// reset position after split
|
||||||
this->ResetPositionAfterSplit(p_fmat, *p_tree);
|
this->ResetPositionAfterSplit(p_fmat, *p_tree);
|
||||||
this->UpdateQueueExpand(*p_tree);
|
this->UpdateQueueExpand(*p_tree);
|
||||||
@ -159,9 +152,9 @@ class HistMaker: public BaseMaker {
|
|||||||
const RegTree &tree) {
|
const RegTree &tree) {
|
||||||
}
|
}
|
||||||
virtual void CreateHist(const std::vector<GradientPair> &gpair,
|
virtual void CreateHist(const std::vector<GradientPair> &gpair,
|
||||||
DMatrix *p_fmat,
|
DMatrix *,
|
||||||
const std::vector <bst_feature_t> &fset,
|
const std::vector <bst_feature_t> &fset,
|
||||||
const RegTree &tree) = 0;
|
const RegTree &) = 0;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void EnumerateSplit(const HistUnit &hist,
|
void EnumerateSplit(const HistUnit &hist,
|
||||||
@ -202,10 +195,7 @@ class HistMaker: public BaseMaker {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void FindSplit(int depth,
|
void FindSplit(const std::vector <bst_feature_t> &feature_set,
|
||||||
const std::vector<GradientPair> &gpair,
|
|
||||||
DMatrix *p_fmat,
|
|
||||||
const std::vector <bst_feature_t> &feature_set,
|
|
||||||
RegTree *p_tree) {
|
RegTree *p_tree) {
|
||||||
const size_t num_feature = feature_set.size();
|
const size_t num_feature = feature_set.size();
|
||||||
// get the best split condition for each node
|
// get the best split condition for each node
|
||||||
@ -288,7 +278,6 @@ class CQHistMaker: public HistMaker {
|
|||||||
*/
|
*/
|
||||||
inline void Add(bst_float fv,
|
inline void Add(bst_float fv,
|
||||||
const std::vector<GradientPair> &gpair,
|
const std::vector<GradientPair> &gpair,
|
||||||
const MetaInfo &info,
|
|
||||||
const bst_uint ridx) {
|
const bst_uint ridx) {
|
||||||
while (istart < hist.size && !(fv < hist.cut[istart])) ++istart;
|
while (istart < hist.size && !(fv < hist.cut[istart])) ++istart;
|
||||||
CHECK_NE(istart, hist.size);
|
CHECK_NE(istart, hist.size);
|
||||||
@ -342,7 +331,7 @@ class CQHistMaker: public HistMaker {
|
|||||||
feat2workindex_[fset[i]] = static_cast<int>(i);
|
feat2workindex_[fset[i]] = static_cast<int>(i);
|
||||||
}
|
}
|
||||||
// start to work
|
// start to work
|
||||||
this->wspace_.Configure(this->param_, 1);
|
this->wspace_.Configure(1);
|
||||||
// if it is C++11, use lazy evaluation for Allreduce,
|
// if it is C++11, use lazy evaluation for Allreduce,
|
||||||
// to gain speedup in recovery
|
// to gain speedup in recovery
|
||||||
auto lazy_get_hist = [&]() {
|
auto lazy_get_hist = [&]() {
|
||||||
@ -376,7 +365,7 @@ class CQHistMaker: public HistMaker {
|
|||||||
this->wspace_.hset[0].data.size(), lazy_get_hist);
|
this->wspace_.hset[0].data.size(), lazy_get_hist);
|
||||||
}
|
}
|
||||||
|
|
||||||
void ResetPositionAfterSplit(DMatrix *p_fmat,
|
void ResetPositionAfterSplit(DMatrix *,
|
||||||
const RegTree &tree) override {
|
const RegTree &tree) override {
|
||||||
this->GetSplitSet(this->qexpand_, tree, &fsplit_set_);
|
this->GetSplitSet(this->qexpand_, tree, &fsplit_set_);
|
||||||
}
|
}
|
||||||
@ -533,7 +522,7 @@ class CQHistMaker: public HistMaker {
|
|||||||
const bst_uint ridx = c.index;
|
const bst_uint ridx = c.index;
|
||||||
const int nid = this->position_[ridx];
|
const int nid = this->position_[ridx];
|
||||||
if (nid >= 0) {
|
if (nid >= 0) {
|
||||||
hbuilder[nid].Add(c.fvalue, gpair, info, ridx);
|
hbuilder[nid].Add(c.fvalue, gpair, ridx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -689,7 +678,7 @@ class GlobalProposalHistMaker: public CQHistMaker {
|
|||||||
this->feat2workindex_[fset[i]] = static_cast<int>(i);
|
this->feat2workindex_[fset[i]] = static_cast<int>(i);
|
||||||
}
|
}
|
||||||
// start to work
|
// start to work
|
||||||
this->wspace_.Configure(this->param_, 1);
|
this->wspace_.Configure(1);
|
||||||
// to gain speedup in recovery
|
// to gain speedup in recovery
|
||||||
{
|
{
|
||||||
this->thread_hist_.resize(omp_get_max_threads());
|
this->thread_hist_.resize(omp_get_max_threads());
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user