Merge branch 'master' into dev-hui

This commit is contained in:
amdsc21
2023-03-08 00:39:33 +01:00
221 changed files with 3122 additions and 1486 deletions

View File

@@ -1,4 +1,6 @@
// Copyright by Contributors
/**
* Copyright 2016-2023 by XGBoost contributors
*/
#include <gtest/gtest.h>
#include <xgboost/context.h>
#include <xgboost/objective.h>
@@ -25,11 +27,14 @@ TEST(Objective, PredTransform) {
tparam.UpdateAllowUnknown(Args{{"gpu_id", "0"}});
size_t n = 100;
for (const auto &entry :
::dmlc::Registry<::xgboost::ObjFunctionReg>::List()) {
std::unique_ptr<xgboost::ObjFunction> obj{
xgboost::ObjFunction::Create(entry->name, &tparam)};
obj->Configure(Args{{"num_class", "2"}});
for (const auto& entry : ::dmlc::Registry<::xgboost::ObjFunctionReg>::List()) {
std::unique_ptr<xgboost::ObjFunction> obj{xgboost::ObjFunction::Create(entry->name, &tparam)};
if (entry->name.find("multi") != std::string::npos) {
obj->Configure(Args{{"num_class", "2"}});
}
if (entry->name.find("quantile") != std::string::npos) {
obj->Configure(Args{{"quantile_alpha", "0.5"}});
}
HostDeviceVector<float> predts;
predts.Resize(n, 3.14f); // prediction is performed on host.
ASSERT_FALSE(predts.DeviceCanRead());

View File

@@ -0,0 +1,74 @@
/**
* Copyright 2023 by XGBoost contributors
*/
#include <gtest/gtest.h>
#include <xgboost/base.h> // Args
#include <xgboost/context.h> // Context
#include <xgboost/objective.h> // ObjFunction
#include <xgboost/span.h> // Span
#include <memory> // std::unique_ptr
#include <vector> // std::vector
#include "../helpers.h" // CheckConfigReload,CreateEmptyGenericParam,DeclareUnifiedTest
namespace xgboost {
TEST(Objective, DeclareUnifiedTest(Quantile)) {
Context ctx = CreateEmptyGenericParam(GPUIDX);
{
Args args{{"quantile_alpha", "[0.6, 0.8]"}};
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("reg:quantileerror", &ctx)};
obj->Configure(args);
CheckConfigReload(obj, "reg:quantileerror");
}
Args args{{"quantile_alpha", "0.6"}};
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("reg:quantileerror", &ctx)};
obj->Configure(args);
CheckConfigReload(obj, "reg:quantileerror");
std::vector<float> predts{1.0f, 2.0f, 3.0f};
std::vector<float> labels{3.0f, 2.0f, 1.0f};
std::vector<float> weights{1.0f, 1.0f, 1.0f};
std::vector<float> grad{-0.6f, 0.4f, 0.4f};
std::vector<float> hess = weights;
CheckObjFunction(obj, predts, labels, weights, grad, hess);
}
TEST(Objective, DeclareUnifiedTest(QuantileIntercept)) {
Context ctx = CreateEmptyGenericParam(GPUIDX);
Args args{{"quantile_alpha", "[0.6, 0.8]"}};
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("reg:quantileerror", &ctx)};
obj->Configure(args);
MetaInfo info;
info.num_row_ = 10;
info.labels.ModifyInplace([&](HostDeviceVector<float>* data, common::Span<std::size_t> shape) {
data->SetDevice(ctx.gpu_id);
data->Resize(info.num_row_);
shape[0] = info.num_row_;
shape[1] = 1;
auto& h_labels = data->HostVector();
for (std::size_t i = 0; i < info.num_row_; ++i) {
h_labels[i] = i;
}
});
linalg::Vector<float> base_scores;
obj->InitEstimation(info, &base_scores);
ASSERT_EQ(base_scores.Size(), 1) << "Vector is not yet supported.";
// mean([5.6, 7.8])
ASSERT_NEAR(base_scores(0), 6.7, kRtEps);
for (std::size_t i = 0; i < info.num_row_; ++i) {
info.weights_.HostVector().emplace_back(info.num_row_ - i - 1.0);
}
obj->InitEstimation(info, &base_scores);
ASSERT_EQ(base_scores.Size(), 1) << "Vector is not yet supported.";
// mean([3, 5])
ASSERT_NEAR(base_scores(0), 4.0, kRtEps);
}
} // namespace xgboost

View File

@@ -0,0 +1,5 @@
/**
* Copyright 2023 XGBoost contributors
*/
// Dummy file to enable the CUDA tests.
#include "test_quantile_obj.cc"

View File

@@ -6,8 +6,9 @@
#include <xgboost/json.h>
#include <xgboost/objective.h>
#include "../../../src/common/linalg_op.h" // begin,end
#include "../../../src/common/linalg_op.h" // for begin, end
#include "../../../src/objective/adaptive.h"
#include "../../../src/tree/param.h" // for TrainParam
#include "../helpers.h"
#include "xgboost/base.h"
#include "xgboost/data.h"
@@ -157,7 +158,7 @@ TEST(Objective, DeclareUnifiedTest(PoissonRegressionGPair)) {
ObjFunction::Create("count:poisson", &ctx)
};
args.emplace_back(std::make_pair("max_delta_step", "0.1f"));
args.emplace_back("max_delta_step", "0.1f");
obj->Configure(args);
CheckObjFunction(obj,
@@ -259,7 +260,7 @@ TEST(Objective, DeclareUnifiedTest(TweedieRegressionGPair)) {
std::vector<std::pair<std::string, std::string>> args;
std::unique_ptr<ObjFunction> obj{ObjFunction::Create("reg:tweedie", &ctx)};
args.emplace_back(std::make_pair("tweedie_variance_power", "1.1f"));
args.emplace_back("tweedie_variance_power", "1.1f");
obj->Configure(args);
CheckObjFunction(obj,
@@ -408,9 +409,13 @@ TEST(Objective, DeclareUnifiedTest(AbsoluteError)) {
h_predt[i] = labels[i] + i;
}
obj->UpdateTreeLeaf(position, info, predt, 0, &tree);
ASSERT_EQ(tree[1].LeafValue(), -1);
ASSERT_EQ(tree[2].LeafValue(), -4);
tree::TrainParam param;
param.Init(Args{});
auto lr = param.learning_rate;
obj->UpdateTreeLeaf(position, info, param.learning_rate, predt, 0, &tree);
ASSERT_EQ(tree[1].LeafValue(), -1.0f * lr);
ASSERT_EQ(tree[2].LeafValue(), -4.0f * lr);
}
TEST(Objective, DeclareUnifiedTest(AbsoluteErrorLeaf)) {
@@ -428,8 +433,8 @@ TEST(Objective, DeclareUnifiedTest(AbsoluteErrorLeaf)) {
auto h_labels = info.labels.HostView().Slice(linalg::All(), t);
std::iota(linalg::begin(h_labels), linalg::end(h_labels), 0);
auto h_predt = linalg::MakeTensorView(predt.HostSpan(), {kRows, kTargets}, Context::kCpuId)
.Slice(linalg::All(), t);
auto h_predt =
linalg::MakeTensorView(&ctx, predt.HostSpan(), kRows, kTargets).Slice(linalg::All(), t);
for (size_t i = 0; i < h_predt.Size(); ++i) {
h_predt(i) = h_labels(i) + i;
}
@@ -457,11 +462,16 @@ TEST(Objective, DeclareUnifiedTest(AbsoluteErrorLeaf)) {
ASSERT_EQ(tree.GetNumLeaves(), 4);
auto empty_leaf = tree[4].LeafValue();
obj->UpdateTreeLeaf(position, info, predt, t, &tree);
ASSERT_EQ(tree[3].LeafValue(), -5);
ASSERT_EQ(tree[4].LeafValue(), empty_leaf);
ASSERT_EQ(tree[5].LeafValue(), -10);
ASSERT_EQ(tree[6].LeafValue(), -14);
tree::TrainParam param;
param.Init(Args{});
auto lr = param.learning_rate;
obj->UpdateTreeLeaf(position, info, lr, predt, t, &tree);
ASSERT_EQ(tree[3].LeafValue(), -5.0f * lr);
ASSERT_EQ(tree[4].LeafValue(), empty_leaf * lr);
ASSERT_EQ(tree[5].LeafValue(), -10.0f * lr);
ASSERT_EQ(tree[6].LeafValue(), -14.0f * lr);
}
}