Refactor configuration [Part II]. (#4577)
* Refactor configuration [Part II].
* General changes:
** Remove `Init` methods to avoid ambiguity.
** Remove `Configure(std::map<>)` to avoid redundant copying and prepare for
parameter validation. (`std::vector` is returned from `InitAllowUnknown`).
** Add name to tree updaters for easier debugging.
* Learner changes:
** Make `LearnerImpl` the only source of configuration.
All configurations are stored and carried out by `LearnerImpl::Configure()`.
** Remove booster in C API.
Originally kept for "compatibility reason", but did not state why. So here
we just remove it.
** Add a `metric_names_` field in `LearnerImpl`.
** Remove `LazyInit`. Configuration will always be lazy.
** Run `Configure` before every iteration.
* Predictor changes:
** Allocate both cpu and gpu predictor.
** Remove cpu_predictor from gpu_predictor.
`GBTree` is now used to dispatch the predictor.
** Remove some GPU Predictor tests.
* IO
No IO changes. The binary model format stability is tested by comparing
hashing value of save models between two commits
This commit is contained in:
@@ -16,7 +16,7 @@ TEST(GPUExact, Update) {
|
||||
std::vector<Arg> args{{"max_depth", "1"}};
|
||||
|
||||
auto* p_gpuexact_maker = TreeUpdater::Create("grow_gpu", &lparam);
|
||||
p_gpuexact_maker->Init(args);
|
||||
p_gpuexact_maker->Configure(args);
|
||||
|
||||
size_t constexpr kNRows = 4;
|
||||
size_t constexpr kNCols = 8;
|
||||
|
||||
@@ -384,10 +384,11 @@ void TestHistogramIndexImpl(int n_gpus) {
|
||||
{"max_leaves", "0"}
|
||||
};
|
||||
|
||||
LearnerTrainParam learner_param(CreateEmptyGenericParam(0, n_gpus));
|
||||
hist_maker.Init(training_params, &learner_param);
|
||||
GenericParameter generic_param(CreateEmptyGenericParam(0, n_gpus));
|
||||
hist_maker.Configure(training_params, &generic_param);
|
||||
|
||||
hist_maker.InitDataOnce(hist_maker_dmat.get());
|
||||
hist_maker_ext.Init(training_params, &learner_param);
|
||||
hist_maker_ext.Configure(training_params, &generic_param);
|
||||
hist_maker_ext.InitDataOnce(hist_maker_ext_dmat.get());
|
||||
|
||||
ASSERT_EQ(hist_maker.shards_.size(), hist_maker_ext.shards_.size());
|
||||
|
||||
@@ -37,7 +37,7 @@ TEST(Updater, Prune) {
|
||||
std::vector<RegTree*> trees {&tree};
|
||||
// prepare pruner
|
||||
std::unique_ptr<TreeUpdater> pruner(TreeUpdater::Create("prune", &lparam));
|
||||
pruner->Init(cfg);
|
||||
pruner->Configure(cfg);
|
||||
|
||||
// loss_chg < min_split_loss;
|
||||
tree.ExpandNode(0, 0, 0, true, 0.0f, 0.3f, 0.4f, 0.0f, 0.0f);
|
||||
|
||||
@@ -236,7 +236,7 @@ class QuantileHistMock : public QuantileHistMaker {
|
||||
explicit QuantileHistMock(
|
||||
const std::vector<std::pair<std::string, std::string> >& args) :
|
||||
cfg_{args} {
|
||||
QuantileHistMaker::Init(args);
|
||||
QuantileHistMaker::Configure(args);
|
||||
builder_.reset(
|
||||
new BuilderMock(
|
||||
param_,
|
||||
|
||||
@@ -37,7 +37,7 @@ TEST(Updater, Refresh) {
|
||||
tree.Stat(cleft).base_weight = 1.2;
|
||||
tree.Stat(cright).base_weight = 1.3;
|
||||
|
||||
refresher->Init(cfg);
|
||||
refresher->Configure(cfg);
|
||||
refresher->Update(&gpair, dmat->get(), trees);
|
||||
|
||||
bst_float constexpr kEps = 1e-6;
|
||||
|
||||
@@ -32,7 +32,7 @@ TEST(SplitEvaluator, Interaction) {
|
||||
}
|
||||
std::vector<int32_t> solutions{4, 5};
|
||||
ASSERT_EQ(accepted_features.size(), solutions.size());
|
||||
for (int32_t f = 0; f < accepted_features.size(); ++f) {
|
||||
for (size_t f = 0; f < accepted_features.size(); ++f) {
|
||||
ASSERT_EQ(accepted_features[f], solutions[f]);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user