Move num_parallel_tree to model parameter. (#7751)

The size of forest should be a property of model itself instead of a training
hyper-parameter.
This commit is contained in:
Jiaming Yuan
2022-03-29 02:32:42 +08:00
committed by GitHub
parent 8b3ecfca25
commit 3c9b04460a
11 changed files with 158 additions and 101 deletions

View File

@@ -129,18 +129,16 @@ inline uint32_t GetIterationFromTreeLimit(uint32_t ntree_limit, Learner *learner
Json config{Object()};
learner->SaveConfig(&config);
auto const &booster =
get<String const>(config["learner"]["gradient_booster"]["name"]);
auto const &booster = get<String const>(config["learner"]["gradient_booster"]["name"]);
if (booster == "gblinear") {
num_parallel_tree = 0;
} else if (booster == "dart") {
num_parallel_tree = std::stoi(
get<String const>(config["learner"]["gradient_booster"]["gbtree"]
["gbtree_train_param"]["num_parallel_tree"]));
num_parallel_tree =
std::stoi(get<String const>(config["learner"]["gradient_booster"]["gbtree"]
["gbtree_model_param"]["num_parallel_tree"]));
} else if (booster == "gbtree") {
num_parallel_tree = std::stoi(get<String const>(
(config["learner"]["gradient_booster"]["gbtree_train_param"]
["num_parallel_tree"])));
(config["learner"]["gradient_booster"]["gbtree_model_param"]["num_parallel_tree"])));
} else {
LOG(FATAL) << "Unknown booster:" << booster;
}