* Refactor configuration [Part II].
* General changes:
** Remove `Init` methods to avoid ambiguity.
** Remove `Configure(std::map<>)` to avoid redundant copying and prepare for
parameter validation. (`std::vector` is returned from `InitAllowUnknown`).
** Add name to tree updaters for easier debugging.
* Learner changes:
** Make `LearnerImpl` the only source of configuration.
All configurations are stored and carried out by `LearnerImpl::Configure()`.
** Remove booster in C API.
Originally kept for "compatibility reason", but did not state why. So here
we just remove it.
** Add a `metric_names_` field in `LearnerImpl`.
** Remove `LazyInit`. Configuration will always be lazy.
** Run `Configure` before every iteration.
* Predictor changes:
** Allocate both cpu and gpu predictor.
** Remove cpu_predictor from gpu_predictor.
`GBTree` is now used to dispatch the predictor.
** Remove some GPU Predictor tests.
* IO
No IO changes. The binary model format stability is tested by comparing
hashing value of save models between two commits
68 lines
2.3 KiB
C++
68 lines
2.3 KiB
C++
/*!
|
|
* Copyright 2018-2019 XGBoost contributors
|
|
*/
|
|
#include <xgboost/objective.h>
|
|
#include <xgboost/generic_parameters.h>
|
|
#include "../../src/common/common.h"
|
|
#include "../helpers.h"
|
|
|
|
TEST(Objective, DeclareUnifiedTest(SoftmaxMultiClassObjGPair)) {
|
|
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
|
std::vector<std::pair<std::string, std::string>> args {{"num_class", "3"}};
|
|
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("multi:softmax", &lparam);
|
|
|
|
obj->Configure(args);
|
|
CheckObjFunction(obj,
|
|
{1.0f, 0.0f, 2.0f, 2.0f, 0.0f, 1.0f}, // preds
|
|
{1.0f, 0.0f}, // labels
|
|
{1.0f, 1.0f}, // weights
|
|
{0.24f, -0.91f, 0.66f, -0.33f, 0.09f, 0.24f}, // grad
|
|
{0.36f, 0.16f, 0.44f, 0.45f, 0.16f, 0.37f}); // hess
|
|
|
|
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
|
|
|
delete obj;
|
|
}
|
|
|
|
TEST(Objective, DeclareUnifiedTest(SoftmaxMultiClassBasic)) {
|
|
auto lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
|
std::vector<std::pair<std::string, std::string>> args{
|
|
std::pair<std::string, std::string>("num_class", "3")};
|
|
|
|
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("multi:softmax", &lparam);
|
|
obj->Configure(args);
|
|
|
|
xgboost::HostDeviceVector<xgboost::bst_float> io_preds = {2.0f, 0.0f, 1.0f,
|
|
1.0f, 0.0f, 2.0f};
|
|
std::vector<xgboost::bst_float> out_preds = {0.0f, 2.0f};
|
|
obj->PredTransform(&io_preds);
|
|
|
|
auto& preds = io_preds.HostVector();
|
|
|
|
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
|
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
|
}
|
|
|
|
delete obj;
|
|
}
|
|
|
|
TEST(Objective, DeclareUnifiedTest(SoftprobMultiClassBasic)) {
|
|
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
|
std::vector<std::pair<std::string, std::string>> args {
|
|
std::pair<std::string, std::string>("num_class", "3")};
|
|
|
|
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("multi:softprob", &lparam);
|
|
obj->Configure(args);
|
|
|
|
xgboost::HostDeviceVector<xgboost::bst_float> io_preds = {2.0f, 0.0f, 1.0f};
|
|
std::vector<xgboost::bst_float> out_preds = {0.66524096f, 0.09003057f, 0.24472847f};
|
|
|
|
obj->PredTransform(&io_preds);
|
|
auto& preds = io_preds.HostVector();
|
|
|
|
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
|
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
|
}
|
|
delete obj;
|
|
}
|