Refactor configuration [Part II]. (#4577)
* Refactor configuration [Part II].
* General changes:
** Remove `Init` methods to avoid ambiguity.
** Remove `Configure(std::map<>)` to avoid redundant copying and prepare for
parameter validation. (`std::vector` is returned from `InitAllowUnknown`).
** Add name to tree updaters for easier debugging.
* Learner changes:
** Make `LearnerImpl` the only source of configuration.
All configurations are stored and carried out by `LearnerImpl::Configure()`.
** Remove booster in C API.
Originally kept for "compatibility reason", but did not state why. So here
we just remove it.
** Add a `metric_names_` field in `LearnerImpl`.
** Remove `LazyInit`. Configuration will always be lazy.
** Run `Configure` before every iteration.
* Predictor changes:
** Allocate both cpu and gpu predictor.
** Remove cpu_predictor from gpu_predictor.
`GBTree` is now used to dispatch the predictor.
** Remove some GPU Predictor tests.
* IO
No IO changes. The binary model format stability is tested by comparing
hashing value of save models between two commits
This commit is contained in:
@@ -46,14 +46,14 @@ TEST(GPUSet, Verbose) {
|
||||
args["verbosity"] = "3"; // LOG INFO
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
ConsoleLogger::Configure({args.cbegin(), args.cend()});
|
||||
GPUSet::All(0, 1);
|
||||
std::string output = testing::internal::GetCapturedStderr();
|
||||
ASSERT_NE(output.find("GPU ID: 0"), std::string::npos);
|
||||
ASSERT_NE(output.find("GPUs: 1"), std::string::npos);
|
||||
|
||||
args["verbosity"] = "1"; // restore
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
ConsoleLogger::Configure({args.cbegin(), args.cend()});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,8 +14,8 @@ TEST(Monitor, Logging) {
|
||||
monitor_.Stop("basic");
|
||||
};
|
||||
|
||||
std::map<std::string, std::string> args = {std::make_pair("verbosity", "3")};
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
Args args = {std::make_pair("verbosity", "3")};
|
||||
ConsoleLogger::Configure(args);
|
||||
testing::internal::CaptureStderr();
|
||||
run_monitor();
|
||||
std::string output = testing::internal::GetCapturedStderr();
|
||||
@@ -23,7 +23,7 @@ TEST(Monitor, Logging) {
|
||||
|
||||
// Monitor only prints messages when set to DEBUG.
|
||||
args = {std::make_pair("verbosity", "2")};
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
ConsoleLogger::Configure(args);
|
||||
testing::internal::CaptureStderr();
|
||||
run_monitor();
|
||||
output = testing::internal::GetCapturedStderr();
|
||||
|
||||
@@ -8,42 +8,47 @@ TEST(GBTree, SelectTreeMethod) {
|
||||
using Arg = std::pair<std::string, std::string>;
|
||||
size_t constexpr kRows = 10;
|
||||
size_t constexpr kCols = 10;
|
||||
auto mat_ptr = CreateDMatrix(kRows, kCols, 0);
|
||||
std::vector<std::shared_ptr<xgboost::DMatrix>> mat = {*mat_ptr};
|
||||
auto p_shared_ptr_dmat = CreateDMatrix(kRows, kCols, 0);
|
||||
auto p_dmat {(*p_shared_ptr_dmat).get()};
|
||||
|
||||
LearnerTrainParam learner_param;
|
||||
learner_param.InitAllowUnknown(std::vector<Arg>{Arg("n_gpus", "0")});
|
||||
GenericParameter generic_param;
|
||||
generic_param.InitAllowUnknown(std::vector<Arg>{Arg("n_gpus", "0")});
|
||||
std::unique_ptr<GradientBooster> p_gbm{
|
||||
GradientBooster::Create("gbtree", &learner_param, {}, 0)};
|
||||
GradientBooster::Create("gbtree", &generic_param, {}, 0)};
|
||||
auto& gbtree = dynamic_cast<gbm::GBTree&> (*p_gbm);
|
||||
|
||||
// Test if `tree_method` can be set
|
||||
std::string n_feat = std::to_string(kCols);
|
||||
gbtree.Configure({Arg{"tree_method", "approx"}, Arg{"num_feature", n_feat}});
|
||||
std::map<std::string, std::string> args {Arg{"tree_method", "approx"}, Arg{"num_feature", n_feat}};
|
||||
gbtree.Configure({args.cbegin(), args.cend()});
|
||||
|
||||
gbtree.ConfigureWithKnownData(args, p_dmat);
|
||||
auto const& tparam = gbtree.GetTrainParam();
|
||||
gbtree.ConfigureWithKnownData({Arg{"tree_method", "approx"}, Arg{"num_feature", n_feat}}, p_dmat);
|
||||
ASSERT_EQ(tparam.updater_seq, "grow_histmaker,prune");
|
||||
gbtree.Configure({Arg("tree_method", "exact"), Arg("num_feature", n_feat)});
|
||||
gbtree.ConfigureWithKnownData({Arg("tree_method", "exact"), Arg("num_feature", n_feat)}, p_dmat);
|
||||
ASSERT_EQ(tparam.updater_seq, "grow_colmaker,prune");
|
||||
gbtree.Configure({Arg("tree_method", "hist"), Arg("num_feature", n_feat)});
|
||||
gbtree.ConfigureWithKnownData({Arg("tree_method", "hist"), Arg("num_feature", n_feat)}, p_dmat);
|
||||
ASSERT_EQ(tparam.updater_seq, "grow_quantile_histmaker");
|
||||
ASSERT_EQ(tparam.predictor, "cpu_predictor");
|
||||
gbtree.Configure({Arg{"booster", "dart"}, Arg{"tree_method", "hist"},
|
||||
Arg{"num_feature", n_feat}});
|
||||
gbtree.ConfigureWithKnownData({Arg{"booster", "dart"}, Arg{"tree_method", "hist"},
|
||||
Arg{"num_feature", n_feat}}, p_dmat);
|
||||
ASSERT_EQ(tparam.updater_seq, "grow_quantile_histmaker");
|
||||
#ifdef XGBOOST_USE_CUDA
|
||||
learner_param.InitAllowUnknown(std::vector<Arg>{Arg{"n_gpus", "1"}});
|
||||
gbtree.Configure({Arg("tree_method", "gpu_exact"),
|
||||
Arg("num_feature", n_feat)});
|
||||
generic_param.InitAllowUnknown(std::vector<Arg>{Arg{"n_gpus", "1"}});
|
||||
gbtree.ConfigureWithKnownData({Arg("tree_method", "gpu_exact"),
|
||||
Arg("num_feature", n_feat)}, p_dmat);
|
||||
ASSERT_EQ(tparam.updater_seq, "grow_gpu,prune");
|
||||
ASSERT_EQ(tparam.predictor, "gpu_predictor");
|
||||
gbtree.Configure({Arg("tree_method", "gpu_hist"), Arg("num_feature", n_feat)});
|
||||
gbtree.ConfigureWithKnownData({Arg("tree_method", "gpu_hist"), Arg("num_feature", n_feat)},
|
||||
p_dmat);
|
||||
ASSERT_EQ(tparam.updater_seq, "grow_gpu_hist");
|
||||
ASSERT_EQ(tparam.predictor, "gpu_predictor");
|
||||
gbtree.Configure({Arg{"booster", "dart"}, Arg{"tree_method", "gpu_hist"},
|
||||
Arg{"num_feature", n_feat}});
|
||||
gbtree.ConfigureWithKnownData({Arg{"booster", "dart"}, Arg{"tree_method", "gpu_hist"},
|
||||
Arg{"num_feature", n_feat}}, p_dmat);
|
||||
ASSERT_EQ(tparam.updater_seq, "grow_gpu_hist");
|
||||
#endif
|
||||
|
||||
delete mat_ptr;
|
||||
delete p_shared_ptr_dmat;
|
||||
}
|
||||
} // namespace xgboost
|
||||
|
||||
@@ -189,8 +189,8 @@ std::unique_ptr<DMatrix> CreateSparsePageDMatrixWithRC(size_t n_rows, size_t n_c
|
||||
|
||||
gbm::GBTreeModel CreateTestModel();
|
||||
|
||||
inline LearnerTrainParam CreateEmptyGenericParam(int gpu_id, int n_gpus) {
|
||||
xgboost::LearnerTrainParam tparam;
|
||||
inline GenericParameter CreateEmptyGenericParam(int gpu_id, int n_gpus) {
|
||||
xgboost::GenericParameter tparam;
|
||||
std::vector<std::pair<std::string, std::string>> args {
|
||||
{"gpu_id", std::to_string(gpu_id)},
|
||||
{"n_gpus", std::to_string(n_gpus)}};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*!
|
||||
* Copyright 2018 by Contributors
|
||||
* Copyright 2018-2019 by Contributors
|
||||
*/
|
||||
#include <xgboost/linear_updater.h>
|
||||
#include "../helpers.h"
|
||||
@@ -11,7 +11,7 @@ TEST(Linear, shotgun) {
|
||||
{
|
||||
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
||||
xgboost::LinearUpdater::Create("shotgun", &lparam));
|
||||
updater->Init({{"eta", "1."}});
|
||||
updater->Configure({{"eta", "1."}});
|
||||
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
||||
(*mat)->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
||||
xgboost::gbm::GBLinearModel model;
|
||||
@@ -26,7 +26,7 @@ TEST(Linear, shotgun) {
|
||||
{
|
||||
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
||||
xgboost::LinearUpdater::Create("shotgun", &lparam));
|
||||
EXPECT_ANY_THROW(updater->Init({{"feature_selector", "random"}}));
|
||||
EXPECT_ANY_THROW(updater->Configure({{"feature_selector", "random"}}));
|
||||
}
|
||||
delete mat;
|
||||
}
|
||||
@@ -36,7 +36,7 @@ TEST(Linear, coordinate) {
|
||||
auto lparam = xgboost::CreateEmptyGenericParam(0, 0);
|
||||
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
||||
xgboost::LinearUpdater::Create("coord_descent", &lparam));
|
||||
updater->Init({{"eta", "1."}});
|
||||
updater->Configure({{"eta", "1."}});
|
||||
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
||||
(*mat)->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
||||
xgboost::gbm::GBLinearModel model;
|
||||
|
||||
@@ -11,7 +11,7 @@ TEST(Linear, GPUCoordinate) {
|
||||
lparam.n_gpus = 1;
|
||||
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
||||
xgboost::LinearUpdater::Create("gpu_coord_descent", &lparam));
|
||||
updater->Init({{"eta", "1."}});
|
||||
updater->Configure({{"eta", "1."}});
|
||||
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
||||
(*mat)->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
||||
xgboost::gbm::GBLinearModel model;
|
||||
@@ -33,7 +33,7 @@ TEST(Linear, MGPU_GPUCoordinate) {
|
||||
lparam.n_gpus = -1;
|
||||
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
||||
xgboost::LinearUpdater::Create("gpu_coord_descent", &lparam));
|
||||
updater->Init({{"eta", "1."}});
|
||||
updater->Configure({{"eta", "1."}});
|
||||
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
||||
(*mat)->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
||||
xgboost::gbm::GBLinearModel model;
|
||||
@@ -52,7 +52,7 @@ TEST(Linear, MGPU_GPUCoordinate) {
|
||||
auto mat = xgboost::CreateDMatrix(10, 10, 0);
|
||||
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
||||
xgboost::LinearUpdater::Create("gpu_coord_descent", &lparam));
|
||||
updater->Init({{"eta", "1."}});
|
||||
updater->Configure({{"eta", "1."}});
|
||||
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
||||
(*mat)->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
||||
xgboost::gbm::GBLinearModel model;
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
#include "../helpers.h"
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(HingeObj)) {
|
||||
xgboost::LearnerTrainParam tparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter tparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("binary:hinge", &tparam);
|
||||
|
||||
xgboost::bst_float eps = std::numeric_limits<xgboost::bst_float>::min();
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
#include "../helpers.h"
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(SoftmaxMultiClassObjGPair)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args {{"num_class", "3"}};
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("multi:softmax", &lparam);
|
||||
|
||||
@@ -47,7 +47,7 @@ TEST(Objective, DeclareUnifiedTest(SoftmaxMultiClassBasic)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(SoftprobMultiClassBasic)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args {
|
||||
std::pair<std::string, std::string>("num_class", "3")};
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
TEST(Objective, UnknownFunction) {
|
||||
xgboost::ObjFunction* obj = nullptr;
|
||||
xgboost::LearnerTrainParam tparam;
|
||||
xgboost::GenericParameter tparam;
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
tparam.InitAllowUnknown(args);
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
#include "../helpers.h"
|
||||
|
||||
TEST(Objective, PairwiseRankingGPair) {
|
||||
xgboost::LearnerTrainParam tparam;
|
||||
xgboost::GenericParameter tparam;
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
tparam.InitAllowUnknown(args);
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
#include "../helpers.h"
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(LinearRegressionGPair)) {
|
||||
xgboost::LearnerTrainParam tparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter tparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
|
||||
xgboost::ObjFunction * obj =
|
||||
@@ -32,7 +32,7 @@ TEST(Objective, DeclareUnifiedTest(LinearRegressionGPair)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(SquaredLog)) {
|
||||
xgboost::LearnerTrainParam tparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter tparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
|
||||
xgboost::ObjFunction * obj =
|
||||
@@ -56,7 +56,7 @@ TEST(Objective, DeclareUnifiedTest(SquaredLog)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(LogisticRegressionGPair)) {
|
||||
xgboost::LearnerTrainParam tparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter tparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:logistic", &tparam);
|
||||
|
||||
@@ -72,7 +72,7 @@ TEST(Objective, DeclareUnifiedTest(LogisticRegressionGPair)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(LogisticRegressionBasic)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:logistic", &lparam);
|
||||
|
||||
@@ -102,7 +102,7 @@ TEST(Objective, DeclareUnifiedTest(LogisticRegressionBasic)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(LogisticRawGPair)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("binary:logitraw", &lparam);
|
||||
|
||||
@@ -118,7 +118,7 @@ TEST(Objective, DeclareUnifiedTest(LogisticRawGPair)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(PoissonRegressionGPair)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("count:poisson", &lparam);
|
||||
|
||||
@@ -140,7 +140,7 @@ TEST(Objective, DeclareUnifiedTest(PoissonRegressionGPair)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(PoissonRegressionBasic)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("count:poisson", &lparam);
|
||||
|
||||
@@ -168,7 +168,7 @@ TEST(Objective, DeclareUnifiedTest(PoissonRegressionBasic)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(GammaRegressionGPair)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:gamma", &lparam);
|
||||
|
||||
@@ -189,7 +189,7 @@ TEST(Objective, DeclareUnifiedTest(GammaRegressionGPair)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(GammaRegressionBasic)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:gamma", &lparam);
|
||||
|
||||
@@ -217,7 +217,7 @@ TEST(Objective, DeclareUnifiedTest(GammaRegressionBasic)) {
|
||||
}
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(TweedieRegressionGPair)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:tweedie", &lparam);
|
||||
|
||||
@@ -241,7 +241,7 @@ TEST(Objective, DeclareUnifiedTest(TweedieRegressionGPair)) {
|
||||
|
||||
#if defined(__CUDACC__)
|
||||
TEST(Objective, CPU_vs_CUDA) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, 1);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, 1);
|
||||
|
||||
xgboost::ObjFunction * obj =
|
||||
xgboost::ObjFunction::Create("reg:squarederror", &lparam);
|
||||
@@ -294,7 +294,7 @@ TEST(Objective, CPU_vs_CUDA) {
|
||||
#endif
|
||||
|
||||
TEST(Objective, DeclareUnifiedTest(TweedieRegressionBasic)) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, NGPUS);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:tweedie", &lparam);
|
||||
|
||||
@@ -325,7 +325,7 @@ TEST(Objective, DeclareUnifiedTest(TweedieRegressionBasic)) {
|
||||
// CoxRegression not implemented in GPU code, no need for testing.
|
||||
#if !defined(__CUDACC__)
|
||||
TEST(Objective, CoxRegressionGPair) {
|
||||
xgboost::LearnerTrainParam lparam = xgboost::CreateEmptyGenericParam(0, 0);
|
||||
xgboost::GenericParameter lparam = xgboost::CreateEmptyGenericParam(0, 0);
|
||||
std::vector<std::pair<std::string, std::string>> args;
|
||||
xgboost::ObjFunction * obj =
|
||||
xgboost::ObjFunction::Create("survival:cox", &lparam);
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
/*!
|
||||
* Copyright 2017-2019 XGBoost contributors
|
||||
*/
|
||||
#include <dmlc/logging.h>
|
||||
#include <dmlc/filesystem.h>
|
||||
#include <xgboost/c_api.h>
|
||||
#include <xgboost/predictor.h>
|
||||
#include <xgboost/logging.h>
|
||||
#include <xgboost/learner.h>
|
||||
|
||||
#include <string>
|
||||
#include "gtest/gtest.h"
|
||||
#include "../helpers.h"
|
||||
@@ -20,8 +22,14 @@ inline void CheckCAPICall(int ret) {
|
||||
} // namespace anonymous
|
||||
#endif
|
||||
|
||||
extern const std::map<std::string, std::string>&
|
||||
QueryBoosterConfigurationArguments(BoosterHandle handle);
|
||||
const std::map<std::string, std::string>&
|
||||
QueryBoosterConfigurationArguments(BoosterHandle handle) {
|
||||
CHECK_NE(handle, static_cast<void*>(nullptr));
|
||||
auto* bst = static_cast<xgboost::Learner*>(handle);
|
||||
bst->Configure();
|
||||
return bst->GetConfigurationArguments();
|
||||
}
|
||||
|
||||
|
||||
namespace xgboost {
|
||||
namespace predictor {
|
||||
@@ -35,8 +43,8 @@ TEST(gpu_predictor, Test) {
|
||||
std::unique_ptr<Predictor> cpu_predictor =
|
||||
std::unique_ptr<Predictor>(Predictor::Create("cpu_predictor", &cpu_lparam));
|
||||
|
||||
gpu_predictor->Init({}, {});
|
||||
cpu_predictor->Init({}, {});
|
||||
gpu_predictor->Configure({}, {});
|
||||
cpu_predictor->Configure({}, {});
|
||||
|
||||
int n_row = 5;
|
||||
int n_col = 5;
|
||||
@@ -56,35 +64,6 @@ TEST(gpu_predictor, Test) {
|
||||
for (int i = 0; i < gpu_out_predictions.Size(); i++) {
|
||||
ASSERT_NEAR(gpu_out_predictions_h[i], cpu_out_predictions_h[i], abs_tolerance);
|
||||
}
|
||||
// Test predict instance
|
||||
const auto &batch = *(*dmat)->GetRowBatches().begin();
|
||||
for (int i = 0; i < batch.Size(); i++) {
|
||||
std::vector<float> gpu_instance_out_predictions;
|
||||
std::vector<float> cpu_instance_out_predictions;
|
||||
cpu_predictor->PredictInstance(batch[i], &cpu_instance_out_predictions,
|
||||
model);
|
||||
gpu_predictor->PredictInstance(batch[i], &gpu_instance_out_predictions,
|
||||
model);
|
||||
ASSERT_EQ(gpu_instance_out_predictions[0], cpu_instance_out_predictions[0]);
|
||||
}
|
||||
|
||||
// Test predict leaf
|
||||
std::vector<float> gpu_leaf_out_predictions;
|
||||
std::vector<float> cpu_leaf_out_predictions;
|
||||
cpu_predictor->PredictLeaf((*dmat).get(), &cpu_leaf_out_predictions, model);
|
||||
gpu_predictor->PredictLeaf((*dmat).get(), &gpu_leaf_out_predictions, model);
|
||||
for (int i = 0; i < gpu_leaf_out_predictions.size(); i++) {
|
||||
ASSERT_EQ(gpu_leaf_out_predictions[i], cpu_leaf_out_predictions[i]);
|
||||
}
|
||||
|
||||
// Test predict contribution
|
||||
std::vector<float> gpu_out_contribution;
|
||||
std::vector<float> cpu_out_contribution;
|
||||
cpu_predictor->PredictContribution((*dmat).get(), &cpu_out_contribution, model);
|
||||
gpu_predictor->PredictContribution((*dmat).get(), &gpu_out_contribution, model);
|
||||
for (int i = 0; i < gpu_out_contribution.size(); i++) {
|
||||
ASSERT_EQ(gpu_out_contribution[i], cpu_out_contribution[i]);
|
||||
}
|
||||
|
||||
delete dmat;
|
||||
}
|
||||
@@ -93,7 +72,7 @@ TEST(gpu_predictor, ExternalMemoryTest) {
|
||||
auto lparam = CreateEmptyGenericParam(0, 1);
|
||||
std::unique_ptr<Predictor> gpu_predictor =
|
||||
std::unique_ptr<Predictor>(Predictor::Create("gpu_predictor", &lparam));
|
||||
gpu_predictor->Init({}, {});
|
||||
gpu_predictor->Configure({}, {});
|
||||
gbm::GBTreeModel model = CreateTestModel();
|
||||
int n_col = 3;
|
||||
model.param.num_feature = n_col;
|
||||
@@ -108,38 +87,6 @@ TEST(gpu_predictor, ExternalMemoryTest) {
|
||||
for (const auto& v : out_predictions.HostVector()) {
|
||||
ASSERT_EQ(v, 1.5);
|
||||
}
|
||||
|
||||
// Test predict leaf
|
||||
std::vector<float> leaf_out_predictions;
|
||||
gpu_predictor->PredictLeaf(dmat.get(), &leaf_out_predictions, model);
|
||||
EXPECT_EQ(leaf_out_predictions.size(), dmat->Info().num_row_);
|
||||
for (const auto& v : leaf_out_predictions) {
|
||||
ASSERT_EQ(v, 0);
|
||||
}
|
||||
|
||||
// Test predict contribution
|
||||
std::vector<float> out_contribution;
|
||||
gpu_predictor->PredictContribution(dmat.get(), &out_contribution, model);
|
||||
EXPECT_EQ(out_contribution.size(), dmat->Info().num_row_ * (n_col + 1));
|
||||
for (int i = 0; i < out_contribution.size(); i++) {
|
||||
if (i % (n_col + 1) == n_col) {
|
||||
ASSERT_EQ(out_contribution[i], 1.5);
|
||||
} else {
|
||||
ASSERT_EQ(out_contribution[i], 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Test predict contribution (approximate method)
|
||||
std::vector<float> out_contribution_approximate;
|
||||
gpu_predictor->PredictContribution(dmat.get(), &out_contribution_approximate, model, true);
|
||||
EXPECT_EQ(out_contribution.size(), dmat->Info().num_row_ * (n_col + 1));
|
||||
for (int i = 0; i < out_contribution.size(); i++) {
|
||||
if (i % (n_col + 1) == n_col) {
|
||||
ASSERT_EQ(out_contribution[i], 1.5);
|
||||
} else {
|
||||
ASSERT_EQ(out_contribution[i], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(XGBOOST_USE_NCCL)
|
||||
@@ -231,7 +178,7 @@ TEST(gpu_predictor, MGPU_Test) {
|
||||
std::unique_ptr<Predictor> cpu_predictor =
|
||||
std::unique_ptr<Predictor>(Predictor::Create("cpu_predictor", &cpu_lparam));
|
||||
|
||||
cpu_predictor->Init({}, {});
|
||||
cpu_predictor->Configure({}, {});
|
||||
|
||||
for (size_t i = 1; i < 33; i *= 2) {
|
||||
int n_row = i, n_col = i;
|
||||
@@ -263,7 +210,7 @@ TEST(gpu_predictor, MGPU_ExternalMemoryTest) {
|
||||
|
||||
std::unique_ptr<Predictor> gpu_predictor =
|
||||
std::unique_ptr<Predictor>(Predictor::Create("gpu_predictor", &gpu_lparam));
|
||||
gpu_predictor->Init({}, {});
|
||||
gpu_predictor->Configure({}, {});
|
||||
|
||||
gbm::GBTreeModel model = CreateTestModel();
|
||||
model.param.num_feature = 3;
|
||||
|
||||
@@ -14,7 +14,7 @@ TEST(Learner, Basic) {
|
||||
auto mat_ptr = CreateDMatrix(10, 10, 0);
|
||||
std::vector<std::shared_ptr<xgboost::DMatrix>> mat = {*mat_ptr};
|
||||
auto learner = std::unique_ptr<Learner>(Learner::Create(mat));
|
||||
learner->Configure(args);
|
||||
learner->SetParams(args);
|
||||
|
||||
delete mat_ptr;
|
||||
}
|
||||
@@ -46,9 +46,7 @@ TEST(Learner, CheckGroup) {
|
||||
|
||||
std::vector<std::shared_ptr<xgboost::DMatrix>> mat = {p_mat};
|
||||
auto learner = std::unique_ptr<Learner>(Learner::Create(mat));
|
||||
learner->Configure({Arg{"objective", "rank:pairwise"}});
|
||||
learner->InitModel();
|
||||
|
||||
learner->SetParams({Arg{"objective", "rank:pairwise"}});
|
||||
EXPECT_NO_THROW(learner->UpdateOneIter(0, p_mat.get()));
|
||||
|
||||
group.resize(kNumGroups+1);
|
||||
@@ -77,11 +75,34 @@ TEST(Learner, SLOW_CheckMultiBatch) {
|
||||
dmat->Info().SetInfo("label", labels.data(), DataType::kFloat32, num_row);
|
||||
std::vector<std::shared_ptr<DMatrix>> mat{dmat};
|
||||
auto learner = std::unique_ptr<Learner>(Learner::Create(mat));
|
||||
learner->Configure({Arg{"objective", "binary:logistic"}});
|
||||
learner->InitModel();
|
||||
learner->SetParams({Arg{"objective", "binary:logistic"}, Arg{"verbosity", "3"}});
|
||||
learner->UpdateOneIter(0, dmat.get());
|
||||
}
|
||||
|
||||
TEST(Learner, Configuration) {
|
||||
std::string const emetric = "eval_metric";
|
||||
{
|
||||
std::unique_ptr<Learner> learner { Learner::Create({nullptr}) };
|
||||
learner->SetParam(emetric, "auc");
|
||||
learner->SetParam(emetric, "rmsle");
|
||||
learner->SetParam("foo", "bar");
|
||||
|
||||
// eval_metric is not part of configuration
|
||||
auto attr_names = learner->GetConfigurationArguments();
|
||||
ASSERT_EQ(attr_names.size(), 1);
|
||||
ASSERT_EQ(attr_names.find(emetric), attr_names.cend());
|
||||
ASSERT_EQ(attr_names.at("foo"), "bar");
|
||||
}
|
||||
|
||||
{
|
||||
std::unique_ptr<Learner> learner { Learner::Create({nullptr}) };
|
||||
learner->SetParams({{"foo", "bar"}, {emetric, "auc"}, {emetric, "entropy"}, {emetric, "KL"}});
|
||||
auto attr_names = learner->GetConfigurationArguments();
|
||||
ASSERT_EQ(attr_names.size(), 1);
|
||||
ASSERT_EQ(attr_names.at("foo"), "bar");
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(XGBOOST_USE_CUDA)
|
||||
|
||||
TEST(Learner, IO) {
|
||||
@@ -98,13 +119,12 @@ TEST(Learner, IO) {
|
||||
std::vector<std::shared_ptr<DMatrix>> mat {p_dmat};
|
||||
|
||||
std::unique_ptr<Learner> learner {Learner::Create(mat)};
|
||||
learner->Configure({Arg{"tree_method", "auto"},
|
||||
learner->SetParams({Arg{"tree_method", "auto"},
|
||||
Arg{"predictor", "gpu_predictor"},
|
||||
Arg{"n_gpus", "-1"}});
|
||||
learner->InitModel();
|
||||
learner->UpdateOneIter(0, p_dmat.get());
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().n_gpus, -1);
|
||||
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().n_gpus, -1);
|
||||
|
||||
dmlc::TemporaryDirectory tempdir;
|
||||
const std::string fname = tempdir.path + "/model.bst";
|
||||
@@ -117,8 +137,8 @@ TEST(Learner, IO) {
|
||||
|
||||
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(fname.c_str(), "r"));
|
||||
learner->Load(fi.get());
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().n_gpus, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().n_gpus, 0);
|
||||
|
||||
delete pp_dmat;
|
||||
}
|
||||
@@ -137,59 +157,53 @@ TEST(Learner, GPUConfiguration) {
|
||||
p_dmat->Info().labels_.HostVector() = labels;
|
||||
{
|
||||
std::unique_ptr<Learner> learner {Learner::Create(mat)};
|
||||
learner->Configure({Arg{"booster", "gblinear"},
|
||||
learner->SetParams({Arg{"booster", "gblinear"},
|
||||
Arg{"updater", "gpu_coord_descent"}});
|
||||
learner->InitModel();
|
||||
learner->UpdateOneIter(0, p_dmat.get());
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().n_gpus, 1);
|
||||
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().n_gpus, 1);
|
||||
}
|
||||
{
|
||||
std::unique_ptr<Learner> learner {Learner::Create(mat)};
|
||||
learner->Configure({Arg{"tree_method", "gpu_exact"}});
|
||||
learner->InitModel();
|
||||
learner->SetParams({Arg{"tree_method", "gpu_exact"}});
|
||||
learner->UpdateOneIter(0, p_dmat.get());
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().n_gpus, 1);
|
||||
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().n_gpus, 1);
|
||||
}
|
||||
{
|
||||
std::unique_ptr<Learner> learner {Learner::Create(mat)};
|
||||
learner->Configure({Arg{"tree_method", "gpu_hist"}});
|
||||
learner->InitModel();
|
||||
learner->SetParams({Arg{"tree_method", "gpu_hist"}});
|
||||
learner->UpdateOneIter(0, p_dmat.get());
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().n_gpus, 1);
|
||||
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().n_gpus, 1);
|
||||
}
|
||||
{
|
||||
// with CPU algorithm
|
||||
std::unique_ptr<Learner> learner {Learner::Create(mat)};
|
||||
learner->Configure({Arg{"tree_method", "hist"}});
|
||||
learner->InitModel();
|
||||
learner->SetParams({Arg{"tree_method", "hist"}});
|
||||
learner->UpdateOneIter(0, p_dmat.get());
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().n_gpus, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().n_gpus, 0);
|
||||
}
|
||||
{
|
||||
// with CPU algorithm, but `n_gpus` takes priority
|
||||
std::unique_ptr<Learner> learner {Learner::Create(mat)};
|
||||
learner->Configure({Arg{"tree_method", "hist"},
|
||||
learner->SetParams({Arg{"tree_method", "hist"},
|
||||
Arg{"n_gpus", "1"}});
|
||||
learner->InitModel();
|
||||
learner->UpdateOneIter(0, p_dmat.get());
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().n_gpus, 1);
|
||||
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().n_gpus, 1);
|
||||
}
|
||||
{
|
||||
// With CPU algorithm but GPU Predictor, this is to simulate when
|
||||
// XGBoost is only used for prediction, so tree method is not
|
||||
// specified.
|
||||
std::unique_ptr<Learner> learner {Learner::Create(mat)};
|
||||
learner->Configure({Arg{"tree_method", "hist"},
|
||||
learner->SetParams({Arg{"tree_method", "hist"},
|
||||
Arg{"predictor", "gpu_predictor"}});
|
||||
learner->InitModel();
|
||||
learner->UpdateOneIter(0, p_dmat.get());
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetLearnerTrainParameter().n_gpus, 1);
|
||||
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
|
||||
ASSERT_EQ(learner->GetGenericParameter().n_gpus, 1);
|
||||
}
|
||||
|
||||
delete pp_dmat;
|
||||
|
||||
@@ -10,14 +10,14 @@ TEST(Logging, Basic) {
|
||||
std::string output;
|
||||
|
||||
args["verbosity"] = "0"; // silent
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
ConsoleLogger::Configure({args.cbegin(), args.cend()});
|
||||
testing::internal::CaptureStderr();
|
||||
LOG(DEBUG) << "Test silent.";
|
||||
output = testing::internal::GetCapturedStderr();
|
||||
ASSERT_EQ(output.length(), 0);
|
||||
|
||||
args["verbosity"] = "3"; // debug
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
ConsoleLogger::Configure({args.cbegin(), args.cend()});
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
LOG(WARNING) << "Test Log Warning.";
|
||||
@@ -35,14 +35,14 @@ TEST(Logging, Basic) {
|
||||
ASSERT_NE(output.find("DEBUG"), std::string::npos);
|
||||
|
||||
args["verbosity"] = "1"; // warning
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
ConsoleLogger::Configure({args.cbegin(), args.cend()});
|
||||
testing::internal::CaptureStderr();
|
||||
LOG(INFO) << "INFO should not be displayed when set to warning.";
|
||||
output = testing::internal::GetCapturedStderr();
|
||||
ASSERT_EQ(output.size(), 0);
|
||||
|
||||
args["silent"] = "True";
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
ConsoleLogger::Configure({args.cbegin(), args.cend()});
|
||||
testing::internal::CaptureStderr();
|
||||
LOG(INFO) << "Test silent parameter.";
|
||||
output = testing::internal::GetCapturedStderr();
|
||||
@@ -54,7 +54,7 @@ TEST(Logging, Basic) {
|
||||
ASSERT_NE(output.find("Test Log Console"), std::string::npos);
|
||||
|
||||
args["verbosity"] = "1"; // restore
|
||||
ConsoleLogger::Configure(args.cbegin(), args.cend());
|
||||
ConsoleLogger::Configure({args.cbegin(), args.cend()});
|
||||
}
|
||||
|
||||
} // namespace xgboost
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
// Copyright by Contributors
|
||||
#include <gtest/gtest.h>
|
||||
#include <xgboost/base.h>
|
||||
#include <xgboost/logging.h>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
int main(int argc, char ** argv) {
|
||||
std::vector<std::pair<std::string, std::string>> args {{"verbosity", "2"}};
|
||||
xgboost::ConsoleLogger::Configure(args.begin(), args.end());
|
||||
xgboost::Args args {{"verbosity", "2"}};
|
||||
xgboost::ConsoleLogger::Configure(args);
|
||||
testing::InitGoogleTest(&argc, argv);
|
||||
testing::FLAGS_gtest_death_test_style = "threadsafe";
|
||||
return RUN_ALL_TESTS();
|
||||
|
||||
@@ -16,7 +16,7 @@ TEST(GPUExact, Update) {
|
||||
std::vector<Arg> args{{"max_depth", "1"}};
|
||||
|
||||
auto* p_gpuexact_maker = TreeUpdater::Create("grow_gpu", &lparam);
|
||||
p_gpuexact_maker->Init(args);
|
||||
p_gpuexact_maker->Configure(args);
|
||||
|
||||
size_t constexpr kNRows = 4;
|
||||
size_t constexpr kNCols = 8;
|
||||
|
||||
@@ -384,10 +384,11 @@ void TestHistogramIndexImpl(int n_gpus) {
|
||||
{"max_leaves", "0"}
|
||||
};
|
||||
|
||||
LearnerTrainParam learner_param(CreateEmptyGenericParam(0, n_gpus));
|
||||
hist_maker.Init(training_params, &learner_param);
|
||||
GenericParameter generic_param(CreateEmptyGenericParam(0, n_gpus));
|
||||
hist_maker.Configure(training_params, &generic_param);
|
||||
|
||||
hist_maker.InitDataOnce(hist_maker_dmat.get());
|
||||
hist_maker_ext.Init(training_params, &learner_param);
|
||||
hist_maker_ext.Configure(training_params, &generic_param);
|
||||
hist_maker_ext.InitDataOnce(hist_maker_ext_dmat.get());
|
||||
|
||||
ASSERT_EQ(hist_maker.shards_.size(), hist_maker_ext.shards_.size());
|
||||
|
||||
@@ -37,7 +37,7 @@ TEST(Updater, Prune) {
|
||||
std::vector<RegTree*> trees {&tree};
|
||||
// prepare pruner
|
||||
std::unique_ptr<TreeUpdater> pruner(TreeUpdater::Create("prune", &lparam));
|
||||
pruner->Init(cfg);
|
||||
pruner->Configure(cfg);
|
||||
|
||||
// loss_chg < min_split_loss;
|
||||
tree.ExpandNode(0, 0, 0, true, 0.0f, 0.3f, 0.4f, 0.0f, 0.0f);
|
||||
|
||||
@@ -236,7 +236,7 @@ class QuantileHistMock : public QuantileHistMaker {
|
||||
explicit QuantileHistMock(
|
||||
const std::vector<std::pair<std::string, std::string> >& args) :
|
||||
cfg_{args} {
|
||||
QuantileHistMaker::Init(args);
|
||||
QuantileHistMaker::Configure(args);
|
||||
builder_.reset(
|
||||
new BuilderMock(
|
||||
param_,
|
||||
|
||||
@@ -37,7 +37,7 @@ TEST(Updater, Refresh) {
|
||||
tree.Stat(cleft).base_weight = 1.2;
|
||||
tree.Stat(cright).base_weight = 1.3;
|
||||
|
||||
refresher->Init(cfg);
|
||||
refresher->Configure(cfg);
|
||||
refresher->Update(&gpair, dmat->get(), trees);
|
||||
|
||||
bst_float constexpr kEps = 1e-6;
|
||||
|
||||
@@ -32,7 +32,7 @@ TEST(SplitEvaluator, Interaction) {
|
||||
}
|
||||
std::vector<int32_t> solutions{4, 5};
|
||||
ASSERT_EQ(accepted_features.size(), solutions.size());
|
||||
for (int32_t f = 0; f < accepted_features.size(); ++f) {
|
||||
for (size_t f = 0; f < accepted_features.size(); ++f) {
|
||||
ASSERT_EQ(accepted_features[f], solutions[f]);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user