Removed outdated configuration serialization logic. (#8942)

- `saved_params` is empty.
- `saved_configs_` contains `num_round`, which is not used anywhere inside xgboost.
This commit is contained in:
Jiaming Yuan 2023-03-23 01:31:46 +08:00 committed by GitHub
parent 151882dd26
commit 15a2724ff7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -882,7 +882,6 @@ std::string const LearnerConfiguration::kEvalMetric {"eval_metric"}; // NOLINT
class LearnerIO : public LearnerConfiguration { class LearnerIO : public LearnerConfiguration {
private: private:
std::set<std::string> saved_configs_ = {"num_round"};
// Used to identify the offset of JSON string when // Used to identify the offset of JSON string when
// Will be removed once JSON takes over. Right now we still loads some RDS files from R. // Will be removed once JSON takes over. Right now we still loads some RDS files from R.
std::string const serialisation_header_ { u8"CONFIG-offset:" }; std::string const serialisation_header_ { u8"CONFIG-offset:" };
@ -1035,21 +1034,11 @@ class LearnerIO : public LearnerConfiguration {
CHECK(fi->Read(&tparam_.booster)) << "BoostLearner: wrong model format"; CHECK(fi->Read(&tparam_.booster)) << "BoostLearner: wrong model format";
obj_.reset(ObjFunction::Create(tparam_.objective, &ctx_)); obj_.reset(ObjFunction::Create(tparam_.objective, &ctx_));
gbm_.reset(GradientBooster::Create(tparam_.booster, &ctx_, gbm_.reset(GradientBooster::Create(tparam_.booster, &ctx_, &learner_model_param_));
&learner_model_param_));
gbm_->Load(fi); gbm_->Load(fi);
if (mparam_.contain_extra_attrs != 0) { if (mparam_.contain_extra_attrs != 0) {
std::vector<std::pair<std::string, std::string> > attr; std::vector<std::pair<std::string, std::string> > attr;
fi->Read(&attr); fi->Read(&attr);
for (auto& kv : attr) {
const std::string prefix = "SAVED_PARAM_";
if (kv.first.find(prefix) == 0) {
const std::string saved_param = kv.first.substr(prefix.length());
if (saved_configs_.find(saved_param) != saved_configs_.end()) {
cfg_[saved_param] = kv.second;
}
}
}
attributes_ = std::map<std::string, std::string>(attr.begin(), attr.end()); attributes_ = std::map<std::string, std::string>(attr.begin(), attr.end());
} }
bool warn_old_model { false }; bool warn_old_model { false };
@ -1132,16 +1121,6 @@ class LearnerIO : public LearnerConfiguration {
std::vector<std::pair<std::string, std::string> > extra_attr; std::vector<std::pair<std::string, std::string> > extra_attr;
mparam.contain_extra_attrs = 1; mparam.contain_extra_attrs = 1;
{
std::vector<std::string> saved_params;
for (const auto& key : saved_params) {
auto it = cfg_.find(key);
if (it != cfg_.end()) {
mparam.contain_extra_attrs = 1;
extra_attr.emplace_back("SAVED_PARAM_" + key, it->second);
}
}
}
{ {
// Similar to JSON model IO, we save the objective. // Similar to JSON model IO, we save the objective.
Json j_obj { Object() }; Json j_obj { Object() };