parent
096047c547
commit
ace7713201
@ -1317,7 +1317,9 @@ class LearnerImpl : public LearnerIO {
|
||||
if (metrics_.empty() && tparam_.disable_default_eval_metric <= 0) {
|
||||
metrics_.emplace_back(Metric::Create(obj_->DefaultEvalMetric(), &ctx_));
|
||||
auto config = obj_->DefaultMetricConfig();
|
||||
if (!IsA<Null>(config)) {
|
||||
metrics_.back()->LoadConfig(config);
|
||||
}
|
||||
metrics_.back()->Configure({cfg_.begin(), cfg_.end()});
|
||||
}
|
||||
|
||||
|
||||
@ -268,6 +268,13 @@ class PseudoHuberRegression : public FitIntercept {
|
||||
}
|
||||
FromJson(in["pseudo_huber_param"], ¶m_);
|
||||
}
|
||||
[[nodiscard]] Json DefaultMetricConfig() const override {
|
||||
CHECK(param_.GetInitialised());
|
||||
Json config{Object{}};
|
||||
config["name"] = String{this->DefaultEvalMetric()};
|
||||
config["pseudo_huber_param"] = ToJson(param_);
|
||||
return config;
|
||||
}
|
||||
};
|
||||
|
||||
XGBOOST_REGISTER_OBJECTIVE(PseudoHuberRegression, "reg:pseudohubererror")
|
||||
|
||||
@ -6,6 +6,7 @@
|
||||
#include <xgboost/objective.h>
|
||||
|
||||
#include "../helpers.h"
|
||||
#include "../objective_helpers.h"
|
||||
|
||||
TEST(Objective, UnknownFunction) {
|
||||
xgboost::ObjFunction* obj = nullptr;
|
||||
@ -43,4 +44,61 @@ TEST(Objective, PredTransform) {
|
||||
ASSERT_TRUE(predts.HostCanWrite());
|
||||
}
|
||||
}
|
||||
|
||||
class TestDefaultObjConfig : public ::testing::TestWithParam<std::string> {
|
||||
Context ctx_;
|
||||
|
||||
public:
|
||||
void Run(std::string objective) {
|
||||
auto Xy = MakeFmatForObjTest(objective);
|
||||
std::unique_ptr<Learner> learner{Learner::Create({Xy})};
|
||||
std::unique_ptr<ObjFunction> objfn{ObjFunction::Create(objective, &ctx_)};
|
||||
|
||||
learner->SetParam("objective", objective);
|
||||
if (objective.find("multi") != std::string::npos) {
|
||||
learner->SetParam("num_class", "3");
|
||||
objfn->Configure(Args{{"num_class", "3"}});
|
||||
} else if (objective.find("quantile") != std::string::npos) {
|
||||
learner->SetParam("quantile_alpha", "0.5");
|
||||
objfn->Configure(Args{{"quantile_alpha", "0.5"}});
|
||||
} else {
|
||||
objfn->Configure(Args{});
|
||||
}
|
||||
learner->Configure();
|
||||
learner->UpdateOneIter(0, Xy);
|
||||
learner->EvalOneIter(0, {Xy}, {"train"});
|
||||
Json config{Object{}};
|
||||
learner->SaveConfig(&config);
|
||||
auto jobj = get<Object const>(config["learner"]["objective"]);
|
||||
|
||||
ASSERT_TRUE(jobj.find("name") != jobj.cend());
|
||||
// FIXME(jiamingy): We should have the following check, but some legacy parameter like
|
||||
// "pos_weight", "delta_step" in objectives are not in metrics.
|
||||
|
||||
// if (jobj.size() > 1) {
|
||||
// ASSERT_FALSE(IsA<Null>(objfn->DefaultMetricConfig()));
|
||||
// }
|
||||
auto mconfig = objfn->DefaultMetricConfig();
|
||||
if (!IsA<Null>(mconfig)) {
|
||||
// make sure metric can handle it
|
||||
std::unique_ptr<Metric> metricfn{Metric::Create(get<String const>(mconfig["name"]), &ctx_)};
|
||||
metricfn->LoadConfig(mconfig);
|
||||
Json loaded(Object{});
|
||||
metricfn->SaveConfig(&loaded);
|
||||
metricfn->Configure(Args{});
|
||||
ASSERT_EQ(mconfig, loaded);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
TEST_P(TestDefaultObjConfig, Objective) {
|
||||
std::string objective = GetParam();
|
||||
this->Run(objective);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(Objective, TestDefaultObjConfig,
|
||||
::testing::ValuesIn(MakeObjNamesForTest()),
|
||||
[](const ::testing::TestParamInfo<TestDefaultObjConfig::ParamType>& info) {
|
||||
return ObjTestNameGenerator(info);
|
||||
});
|
||||
} // namespace xgboost
|
||||
|
||||
31
tests/cpp/objective_helpers.cc
Normal file
31
tests/cpp/objective_helpers.cc
Normal file
@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Copyright (c) 2023, XGBoost contributors
|
||||
*/
|
||||
#include "objective_helpers.h"
|
||||
|
||||
#include "../../src/common/linalg_op.h" // for begin, end
|
||||
#include "helpers.h" // for RandomDataGenerator
|
||||
|
||||
namespace xgboost {
|
||||
std::shared_ptr<DMatrix> MakeFmatForObjTest(std::string const& obj) {
|
||||
auto constexpr kRows = 10, kCols = 10;
|
||||
auto p_fmat = RandomDataGenerator{kRows, kCols, 0}.GenerateDMatrix(true);
|
||||
auto& h_upper = p_fmat->Info().labels_upper_bound_.HostVector();
|
||||
auto& h_lower = p_fmat->Info().labels_lower_bound_.HostVector();
|
||||
h_lower.resize(kRows);
|
||||
h_upper.resize(kRows);
|
||||
for (size_t i = 0; i < kRows; ++i) {
|
||||
h_lower[i] = 1;
|
||||
h_upper[i] = 10;
|
||||
}
|
||||
if (obj.find("rank:") != std::string::npos) {
|
||||
auto h_label = p_fmat->Info().labels.HostView();
|
||||
std::size_t k = 0;
|
||||
for (auto& v : h_label) {
|
||||
v = k % 2 == 0;
|
||||
++k;
|
||||
}
|
||||
}
|
||||
return p_fmat;
|
||||
};
|
||||
} // namespace xgboost
|
||||
@ -1,6 +1,8 @@
|
||||
/**
|
||||
* Copyright (c) 2023, XGBoost contributors
|
||||
*/
|
||||
#pragma once
|
||||
|
||||
#include <dmlc/registry.h> // for Registry
|
||||
#include <gtest/gtest.h>
|
||||
#include <xgboost/objective.h> // for ObjFunctionReg
|
||||
@ -29,4 +31,6 @@ inline std::string ObjTestNameGenerator(const ::testing::TestParamInfo<ParamType
|
||||
}
|
||||
return name;
|
||||
};
|
||||
|
||||
std::shared_ptr<DMatrix> MakeFmatForObjTest(std::string const& obj);
|
||||
} // namespace xgboost
|
||||
|
||||
@ -655,33 +655,11 @@ TEST_F(InitBaseScore, InitWithPredict) { this->TestInitWithPredt(); }
|
||||
TEST_F(InitBaseScore, UpdateProcess) { this->TestUpdateProcess(); }
|
||||
|
||||
class TestColumnSplit : public ::testing::TestWithParam<std::string> {
|
||||
static auto MakeFmat(std::string const& obj) {
|
||||
auto constexpr kRows = 10, kCols = 10;
|
||||
auto p_fmat = RandomDataGenerator{kRows, kCols, 0}.GenerateDMatrix(true);
|
||||
auto& h_upper = p_fmat->Info().labels_upper_bound_.HostVector();
|
||||
auto& h_lower = p_fmat->Info().labels_lower_bound_.HostVector();
|
||||
h_lower.resize(kRows);
|
||||
h_upper.resize(kRows);
|
||||
for (size_t i = 0; i < kRows; ++i) {
|
||||
h_lower[i] = 1;
|
||||
h_upper[i] = 10;
|
||||
}
|
||||
if (obj.find("rank:") != std::string::npos) {
|
||||
auto h_label = p_fmat->Info().labels.HostView();
|
||||
std::size_t k = 0;
|
||||
for (auto& v : h_label) {
|
||||
v = k % 2 == 0;
|
||||
++k;
|
||||
}
|
||||
}
|
||||
return p_fmat;
|
||||
};
|
||||
|
||||
void TestBaseScore(std::string objective, float expected_base_score, Json expected_model) {
|
||||
auto const world_size = collective::GetWorldSize();
|
||||
auto const rank = collective::GetRank();
|
||||
|
||||
auto p_fmat = MakeFmat(objective);
|
||||
auto p_fmat = MakeFmatForObjTest(objective);
|
||||
std::shared_ptr<DMatrix> sliced{p_fmat->SliceCol(world_size, rank)};
|
||||
std::unique_ptr<Learner> learner{Learner::Create({sliced})};
|
||||
learner->SetParam("tree_method", "approx");
|
||||
@ -705,7 +683,7 @@ class TestColumnSplit : public ::testing::TestWithParam<std::string> {
|
||||
|
||||
public:
|
||||
void Run(std::string objective) {
|
||||
auto p_fmat = MakeFmat(objective);
|
||||
auto p_fmat = MakeFmatForObjTest(objective);
|
||||
std::unique_ptr<Learner> learner{Learner::Create({p_fmat})};
|
||||
learner->SetParam("tree_method", "approx");
|
||||
learner->SetParam("objective", objective);
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user