[backport] Fix save_best. (#6523)
This commit is contained in:
parent
8be2cd8c91
commit
bce7ca313c
@ -456,6 +456,7 @@ class LearningRateScheduler(TrainingCallback):
|
||||
|
||||
def after_iteration(self, model, epoch, evals_log):
|
||||
model.set_param('learning_rate', self.learning_rates(epoch))
|
||||
return False
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
@ -565,7 +566,7 @@ class EarlyStopping(TrainingCallback):
|
||||
def after_training(self, model: Booster):
|
||||
try:
|
||||
if self.save_best:
|
||||
model = model[: int(model.attr('best_iteration'))]
|
||||
model = model[: int(model.attr('best_iteration')) + 1]
|
||||
except XGBoostError as e:
|
||||
raise XGBoostError('`save_best` is not applicable to current booster') from e
|
||||
return model
|
||||
@ -677,6 +678,7 @@ class TrainingCheckPoint(TrainingCallback):
|
||||
else:
|
||||
model.save_model(path)
|
||||
self._epoch += 1
|
||||
return False
|
||||
|
||||
|
||||
class LegacyCallbacks:
|
||||
|
||||
@ -148,7 +148,7 @@ class TestCallbacks:
|
||||
eval_metric=tm.eval_error_metric, callbacks=[early_stop])
|
||||
booster = cls.get_booster()
|
||||
dump = booster.get_dump(dump_format='json')
|
||||
assert len(dump) == booster.best_iteration
|
||||
assert len(dump) == booster.best_iteration + 1
|
||||
|
||||
early_stop = xgb.callback.EarlyStopping(rounds=early_stopping_rounds,
|
||||
save_best=True)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user