[SYCL]. Add implementation for loss-guided policy (#10681)
--------- Co-authored-by: Dmitry Razdoburdin <>
This commit is contained in:
parent
cc3b56fc37
commit
e555a238bc
@ -79,6 +79,78 @@ void HistUpdater<GradientSumT>::BuildLocalHistograms(
|
|||||||
builder_monitor_.Stop("BuildLocalHistograms");
|
builder_monitor_.Stop("BuildLocalHistograms");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<typename GradientSumT>
|
||||||
|
void HistUpdater<GradientSumT>::ExpandWithLossGuide(
|
||||||
|
const common::GHistIndexMatrix& gmat,
|
||||||
|
RegTree* p_tree,
|
||||||
|
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
|
||||||
|
builder_monitor_.Start("ExpandWithLossGuide");
|
||||||
|
int num_leaves = 0;
|
||||||
|
const auto lr = param_.learning_rate;
|
||||||
|
|
||||||
|
ExpandEntry node(ExpandEntry::kRootNid, p_tree->GetDepth(ExpandEntry::kRootNid));
|
||||||
|
BuildHistogramsLossGuide(node, gmat, p_tree, gpair);
|
||||||
|
|
||||||
|
this->InitNewNode(ExpandEntry::kRootNid, gmat, gpair, *p_tree);
|
||||||
|
|
||||||
|
this->EvaluateSplits({node}, gmat, *p_tree);
|
||||||
|
node.split.loss_chg = snode_host_[ExpandEntry::kRootNid].best.loss_chg;
|
||||||
|
|
||||||
|
qexpand_loss_guided_->push(node);
|
||||||
|
++num_leaves;
|
||||||
|
|
||||||
|
while (!qexpand_loss_guided_->empty()) {
|
||||||
|
const ExpandEntry candidate = qexpand_loss_guided_->top();
|
||||||
|
const int nid = candidate.nid;
|
||||||
|
qexpand_loss_guided_->pop();
|
||||||
|
if (!candidate.IsValid(param_, num_leaves)) {
|
||||||
|
(*p_tree)[nid].SetLeaf(snode_host_[nid].weight * lr);
|
||||||
|
} else {
|
||||||
|
auto evaluator = tree_evaluator_.GetEvaluator();
|
||||||
|
NodeEntry<GradientSumT>& e = snode_host_[nid];
|
||||||
|
bst_float left_leaf_weight =
|
||||||
|
evaluator.CalcWeight(nid, GradStats<GradientSumT>{e.best.left_sum}) * lr;
|
||||||
|
bst_float right_leaf_weight =
|
||||||
|
evaluator.CalcWeight(nid, GradStats<GradientSumT>{e.best.right_sum}) * lr;
|
||||||
|
p_tree->ExpandNode(nid, e.best.SplitIndex(), e.best.split_value,
|
||||||
|
e.best.DefaultLeft(), e.weight, left_leaf_weight,
|
||||||
|
right_leaf_weight, e.best.loss_chg, e.stats.GetHess(),
|
||||||
|
e.best.left_sum.GetHess(), e.best.right_sum.GetHess());
|
||||||
|
|
||||||
|
this->ApplySplit({candidate}, gmat, p_tree);
|
||||||
|
|
||||||
|
const int cleft = (*p_tree)[nid].LeftChild();
|
||||||
|
const int cright = (*p_tree)[nid].RightChild();
|
||||||
|
|
||||||
|
ExpandEntry left_node(cleft, p_tree->GetDepth(cleft));
|
||||||
|
ExpandEntry right_node(cright, p_tree->GetDepth(cright));
|
||||||
|
|
||||||
|
if (row_set_collection_[cleft].Size() < row_set_collection_[cright].Size()) {
|
||||||
|
BuildHistogramsLossGuide(left_node, gmat, p_tree, gpair);
|
||||||
|
} else {
|
||||||
|
BuildHistogramsLossGuide(right_node, gmat, p_tree, gpair);
|
||||||
|
}
|
||||||
|
|
||||||
|
this->InitNewNode(cleft, gmat, gpair, *p_tree);
|
||||||
|
this->InitNewNode(cright, gmat, gpair, *p_tree);
|
||||||
|
bst_uint featureid = snode_host_[nid].best.SplitIndex();
|
||||||
|
tree_evaluator_.AddSplit(nid, cleft, cright, featureid,
|
||||||
|
snode_host_[cleft].weight, snode_host_[cright].weight);
|
||||||
|
interaction_constraints_.Split(nid, featureid, cleft, cright);
|
||||||
|
|
||||||
|
this->EvaluateSplits({left_node, right_node}, gmat, *p_tree);
|
||||||
|
left_node.split.loss_chg = snode_host_[cleft].best.loss_chg;
|
||||||
|
right_node.split.loss_chg = snode_host_[cright].best.loss_chg;
|
||||||
|
|
||||||
|
qexpand_loss_guided_->push(left_node);
|
||||||
|
qexpand_loss_guided_->push(right_node);
|
||||||
|
|
||||||
|
++num_leaves; // give two and take one, as parent is no longer a leaf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
builder_monitor_.Stop("ExpandWithLossGuide");
|
||||||
|
}
|
||||||
|
|
||||||
template<typename GradientSumT>
|
template<typename GradientSumT>
|
||||||
void HistUpdater<GradientSumT>::InitSampling(
|
void HistUpdater<GradientSumT>::InitSampling(
|
||||||
const USMVector<GradientPair, MemoryType::on_device> &gpair,
|
const USMVector<GradientPair, MemoryType::on_device> &gpair,
|
||||||
@ -249,6 +321,14 @@ void HistUpdater<GradientSumT>::InitData(
|
|||||||
}
|
}
|
||||||
|
|
||||||
std::fill(snode_host_.begin(), snode_host_.end(), NodeEntry<GradientSumT>(param_));
|
std::fill(snode_host_.begin(), snode_host_.end(), NodeEntry<GradientSumT>(param_));
|
||||||
|
|
||||||
|
{
|
||||||
|
if (param_.grow_policy == xgboost::tree::TrainParam::kLossGuide) {
|
||||||
|
qexpand_loss_guided_.reset(new ExpandQueue(LossGuide));
|
||||||
|
} else {
|
||||||
|
LOG(WARNING) << "Depth-wise building is not yet implemented";
|
||||||
|
}
|
||||||
|
}
|
||||||
builder_monitor_.Stop("InitData");
|
builder_monitor_.Stop("InitData");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -305,8 +385,7 @@ template <typename GradientSumT>
|
|||||||
void HistUpdater<GradientSumT>::InitNewNode(int nid,
|
void HistUpdater<GradientSumT>::InitNewNode(int nid,
|
||||||
const common::GHistIndexMatrix& gmat,
|
const common::GHistIndexMatrix& gmat,
|
||||||
const USMVector<GradientPair,
|
const USMVector<GradientPair,
|
||||||
MemoryType::on_device> &gpair,
|
MemoryType::on_device> &gpair,
|
||||||
const DMatrix& fmat,
|
|
||||||
const RegTree& tree) {
|
const RegTree& tree) {
|
||||||
builder_monitor_.Start("InitNewNode");
|
builder_monitor_.Start("InitNewNode");
|
||||||
|
|
||||||
|
|||||||
@ -14,6 +14,7 @@
|
|||||||
#include <utility>
|
#include <utility>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
#include <queue>
|
||||||
|
|
||||||
#include "../common/partition_builder.h"
|
#include "../common/partition_builder.h"
|
||||||
#include "split_evaluator.h"
|
#include "split_evaluator.h"
|
||||||
@ -126,7 +127,6 @@ class HistUpdater {
|
|||||||
void InitNewNode(int nid,
|
void InitNewNode(int nid,
|
||||||
const common::GHistIndexMatrix& gmat,
|
const common::GHistIndexMatrix& gmat,
|
||||||
const USMVector<GradientPair, MemoryType::on_device> &gpair,
|
const USMVector<GradientPair, MemoryType::on_device> &gpair,
|
||||||
const DMatrix& fmat,
|
|
||||||
const RegTree& tree);
|
const RegTree& tree);
|
||||||
|
|
||||||
void BuildLocalHistograms(const common::GHistIndexMatrix &gmat,
|
void BuildLocalHistograms(const common::GHistIndexMatrix &gmat,
|
||||||
@ -139,6 +139,18 @@ class HistUpdater {
|
|||||||
RegTree *p_tree,
|
RegTree *p_tree,
|
||||||
const USMVector<GradientPair, MemoryType::on_device> &gpair);
|
const USMVector<GradientPair, MemoryType::on_device> &gpair);
|
||||||
|
|
||||||
|
void ExpandWithLossGuide(const common::GHistIndexMatrix& gmat,
|
||||||
|
RegTree* p_tree,
|
||||||
|
const USMVector<GradientPair, MemoryType::on_device>& gpair);
|
||||||
|
|
||||||
|
inline static bool LossGuide(ExpandEntry lhs, ExpandEntry rhs) {
|
||||||
|
if (lhs.GetLossChange() == rhs.GetLossChange()) {
|
||||||
|
return lhs.GetNodeId() > rhs.GetNodeId(); // favor small timestamp
|
||||||
|
} else {
|
||||||
|
return lhs.GetLossChange() < rhs.GetLossChange(); // favor large loss_chg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// --data fields--
|
// --data fields--
|
||||||
const Context* ctx_;
|
const Context* ctx_;
|
||||||
size_t sub_group_size_;
|
size_t sub_group_size_;
|
||||||
@ -163,6 +175,12 @@ class HistUpdater {
|
|||||||
const RegTree* p_last_tree_;
|
const RegTree* p_last_tree_;
|
||||||
DMatrix const* const p_last_fmat_;
|
DMatrix const* const p_last_fmat_;
|
||||||
|
|
||||||
|
using ExpandQueue =
|
||||||
|
std::priority_queue<ExpandEntry, std::vector<ExpandEntry>,
|
||||||
|
std::function<bool(ExpandEntry, ExpandEntry)>>;
|
||||||
|
|
||||||
|
std::unique_ptr<ExpandQueue> qexpand_loss_guided_;
|
||||||
|
|
||||||
enum DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData };
|
enum DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData };
|
||||||
DataLayout data_layout_;
|
DataLayout data_layout_;
|
||||||
|
|
||||||
|
|||||||
@ -51,9 +51,8 @@ class TestHistUpdater : public HistUpdater<GradientSumT> {
|
|||||||
auto TestInitNewNode(int nid,
|
auto TestInitNewNode(int nid,
|
||||||
const common::GHistIndexMatrix& gmat,
|
const common::GHistIndexMatrix& gmat,
|
||||||
const USMVector<GradientPair, MemoryType::on_device> &gpair,
|
const USMVector<GradientPair, MemoryType::on_device> &gpair,
|
||||||
const DMatrix& fmat,
|
|
||||||
const RegTree& tree) {
|
const RegTree& tree) {
|
||||||
HistUpdater<GradientSumT>::InitNewNode(nid, gmat, gpair, fmat, tree);
|
HistUpdater<GradientSumT>::InitNewNode(nid, gmat, gpair, tree);
|
||||||
return HistUpdater<GradientSumT>::snode_host_[nid];
|
return HistUpdater<GradientSumT>::snode_host_[nid];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -69,6 +68,13 @@ class TestHistUpdater : public HistUpdater<GradientSumT> {
|
|||||||
RegTree* p_tree) {
|
RegTree* p_tree) {
|
||||||
HistUpdater<GradientSumT>::ApplySplit(nodes, gmat, p_tree);
|
HistUpdater<GradientSumT>::ApplySplit(nodes, gmat, p_tree);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto TestExpandWithLossGuide(const common::GHistIndexMatrix& gmat,
|
||||||
|
DMatrix *p_fmat,
|
||||||
|
RegTree* p_tree,
|
||||||
|
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
|
||||||
|
HistUpdater<GradientSumT>::ExpandWithLossGuide(gmat, p_tree, gpair);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
void GenerateRandomGPairs(::sycl::queue* qu, GradientPair* gpair_ptr, size_t num_rows, bool has_neg_hess) {
|
void GenerateRandomGPairs(::sycl::queue* qu, GradientPair* gpair_ptr, size_t num_rows, bool has_neg_hess) {
|
||||||
@ -300,7 +306,7 @@ void TestHistUpdaterInitNewNode(const xgboost::tree::TrainParam& param, float sp
|
|||||||
auto& row_idxs = row_set_collection->Data();
|
auto& row_idxs = row_set_collection->Data();
|
||||||
const size_t* row_idxs_ptr = row_idxs.DataConst();
|
const size_t* row_idxs_ptr = row_idxs.DataConst();
|
||||||
updater.TestBuildHistogramsLossGuide(node, gmat, &tree, gpair);
|
updater.TestBuildHistogramsLossGuide(node, gmat, &tree, gpair);
|
||||||
const auto snode = updater.TestInitNewNode(ExpandEntry::kRootNid, gmat, gpair, *p_fmat, tree);
|
const auto snode = updater.TestInitNewNode(ExpandEntry::kRootNid, gmat, gpair, tree);
|
||||||
|
|
||||||
GradStats<GradientSumT> grad_stat;
|
GradStats<GradientSumT> grad_stat;
|
||||||
{
|
{
|
||||||
@ -360,7 +366,7 @@ void TestHistUpdaterEvaluateSplits(const xgboost::tree::TrainParam& param) {
|
|||||||
auto& row_idxs = row_set_collection->Data();
|
auto& row_idxs = row_set_collection->Data();
|
||||||
const size_t* row_idxs_ptr = row_idxs.DataConst();
|
const size_t* row_idxs_ptr = row_idxs.DataConst();
|
||||||
const auto* hist = updater.TestBuildHistogramsLossGuide(node, gmat, &tree, gpair);
|
const auto* hist = updater.TestBuildHistogramsLossGuide(node, gmat, &tree, gpair);
|
||||||
const auto snode_init = updater.TestInitNewNode(ExpandEntry::kRootNid, gmat, gpair, *p_fmat, tree);
|
const auto snode_init = updater.TestInitNewNode(ExpandEntry::kRootNid, gmat, gpair, tree);
|
||||||
|
|
||||||
const auto snode_updated = updater.TestEvaluateSplits({node}, gmat, tree);
|
const auto snode_updated = updater.TestEvaluateSplits({node}, gmat, tree);
|
||||||
auto best_loss_chg = snode_updated[0].best.loss_chg;
|
auto best_loss_chg = snode_updated[0].best.loss_chg;
|
||||||
@ -488,6 +494,56 @@ void TestHistUpdaterApplySplit(const xgboost::tree::TrainParam& param, float spa
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename GradientSumT>
|
||||||
|
void TestHistUpdaterExpandWithLossGuide(const xgboost::tree::TrainParam& param) {
|
||||||
|
const size_t num_rows = 3;
|
||||||
|
const size_t num_columns = 1;
|
||||||
|
const size_t n_bins = 16;
|
||||||
|
|
||||||
|
Context ctx;
|
||||||
|
ctx.UpdateAllowUnknown(Args{{"device", "sycl"}});
|
||||||
|
|
||||||
|
DeviceManager device_manager;
|
||||||
|
auto qu = device_manager.GetQueue(ctx.Device());
|
||||||
|
|
||||||
|
std::vector<float> data = {7, 3, 15};
|
||||||
|
auto p_fmat = GetDMatrixFromData(data, num_rows, num_columns);
|
||||||
|
|
||||||
|
DeviceMatrix dmat;
|
||||||
|
dmat.Init(qu, p_fmat.get());
|
||||||
|
common::GHistIndexMatrix gmat;
|
||||||
|
gmat.Init(qu, &ctx, dmat, n_bins);
|
||||||
|
|
||||||
|
std::vector<GradientPair> gpair_host = {{1, 2}, {3, 1}, {1, 1}};
|
||||||
|
USMVector<GradientPair, MemoryType::on_device> gpair(&qu, gpair_host);
|
||||||
|
|
||||||
|
RegTree tree;
|
||||||
|
FeatureInteractionConstraintHost int_constraints;
|
||||||
|
ObjInfo task{ObjInfo::kRegression};
|
||||||
|
std::unique_ptr<TreeUpdater> pruner{TreeUpdater::Create("prune", &ctx, &task)};
|
||||||
|
TestHistUpdater<GradientSumT> updater(&ctx, qu, param, std::move(pruner), int_constraints, p_fmat.get());
|
||||||
|
updater.SetHistSynchronizer(new BatchHistSynchronizer<GradientSumT>());
|
||||||
|
updater.SetHistRowsAdder(new BatchHistRowsAdder<GradientSumT>());
|
||||||
|
auto* row_set_collection = updater.TestInitData(gmat, gpair, *p_fmat, tree);
|
||||||
|
|
||||||
|
updater.TestExpandWithLossGuide(gmat, p_fmat.get(), &tree, gpair);
|
||||||
|
|
||||||
|
const auto& nodes = tree.GetNodes();
|
||||||
|
std::vector<float> ans(data.size());
|
||||||
|
for (size_t data_idx = 0; data_idx < data.size(); ++data_idx) {
|
||||||
|
size_t node_idx = 0;
|
||||||
|
while (!nodes[node_idx].IsLeaf()) {
|
||||||
|
node_idx = data[data_idx] < nodes[node_idx].SplitCond() ? nodes[node_idx].LeftChild() : nodes[node_idx].RightChild();
|
||||||
|
}
|
||||||
|
ans[data_idx] = nodes[node_idx].LeafValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
ASSERT_NEAR(ans[0], -0.15, 1e-6);
|
||||||
|
ASSERT_NEAR(ans[1], -0.45, 1e-6);
|
||||||
|
ASSERT_NEAR(ans[2], -0.15, 1e-6);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
TEST(SyclHistUpdater, Sampling) {
|
TEST(SyclHistUpdater, Sampling) {
|
||||||
xgboost::tree::TrainParam param;
|
xgboost::tree::TrainParam param;
|
||||||
param.UpdateAllowUnknown(Args{{"subsample", "0.7"}});
|
param.UpdateAllowUnknown(Args{{"subsample", "0.7"}});
|
||||||
@ -555,4 +611,13 @@ TEST(SyclHistUpdater, ApplySplitDence) {
|
|||||||
TestHistUpdaterApplySplit<double>(param, 0.0, (1u << 16) + 1);
|
TestHistUpdaterApplySplit<double>(param, 0.0, (1u << 16) + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(SyclHistUpdater, ExpandWithLossGuide) {
|
||||||
|
xgboost::tree::TrainParam param;
|
||||||
|
param.UpdateAllowUnknown(Args{{"max_depth", "2"},
|
||||||
|
{"grow_policy", "lossguide"}});
|
||||||
|
|
||||||
|
TestHistUpdaterExpandWithLossGuide<float>(param);
|
||||||
|
TestHistUpdaterExpandWithLossGuide<double>(param);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace xgboost::sycl::tree
|
} // namespace xgboost::sycl::tree
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user