[backport] Fix save_best. (#6523)

This commit is contained in:
Jiaming Yuan 2020-12-18 20:00:29 +08:00 committed by GitHub
parent 8be2cd8c91
commit bce7ca313c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 4 additions and 2 deletions

View File

@ -456,6 +456,7 @@ class LearningRateScheduler(TrainingCallback):
def after_iteration(self, model, epoch, evals_log):
model.set_param('learning_rate', self.learning_rates(epoch))
return False
# pylint: disable=too-many-instance-attributes
@ -565,7 +566,7 @@ class EarlyStopping(TrainingCallback):
def after_training(self, model: Booster):
try:
if self.save_best:
model = model[: int(model.attr('best_iteration'))]
model = model[: int(model.attr('best_iteration')) + 1]
except XGBoostError as e:
raise XGBoostError('`save_best` is not applicable to current booster') from e
return model
@ -677,6 +678,7 @@ class TrainingCheckPoint(TrainingCallback):
else:
model.save_model(path)
self._epoch += 1
return False
class LegacyCallbacks:

View File

@ -148,7 +148,7 @@ class TestCallbacks:
eval_metric=tm.eval_error_metric, callbacks=[early_stop])
booster = cls.get_booster()
dump = booster.get_dump(dump_format='json')
assert len(dump) == booster.best_iteration
assert len(dump) == booster.best_iteration + 1
early_stop = xgb.callback.EarlyStopping(rounds=early_stopping_rounds,
save_best=True)