Fix learning rate scheduler with cv. (#6720)

* Expose more methods in cvpack and packed booster.
* Fix cv context in deprecated callbacks.
* Fix document.
This commit is contained in:
Jiaming Yuan
2021-02-28 13:57:42 +08:00
committed by GitHub
parent 9c8523432a
commit a9b4a95225
3 changed files with 46 additions and 20 deletions

View File

@@ -20,6 +20,8 @@ def _get_callback_context(env):
context = 'train'
elif env.model is None and env.cvfolds is not None:
context = 'cv'
else:
raise ValueError("Unexpected input with both model and cvfolds.")
return context
@@ -751,7 +753,7 @@ class LegacyCallbacks:
'''Called before each iteration.'''
for cb in self.callbacks_before_iter:
rank = rabit.get_rank()
cb(CallbackEnv(model=model,
cb(CallbackEnv(model=None if self.cvfolds is not None else model,
cvfolds=self.cvfolds,
iteration=epoch,
begin_iteration=self.start_iteration,
@@ -764,6 +766,7 @@ class LegacyCallbacks:
'''Called after each iteration.'''
evaluation_result_list = []
if self.cvfolds is not None:
# dtrain is not used here.
scores = model.eval(epoch, self.feval)
self.aggregated_cv = _aggcv(scores)
evaluation_result_list = self.aggregated_cv
@@ -782,7 +785,7 @@ class LegacyCallbacks:
try:
for cb in self.callbacks_after_iter:
rank = rabit.get_rank()
cb(CallbackEnv(model=model,
cb(CallbackEnv(model=None if self.cvfolds is not None else model,
cvfolds=self.cvfolds,
iteration=epoch,
begin_iteration=self.start_iteration,

View File

@@ -180,7 +180,7 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
.. code-block:: python
[xgb.callback.reset_learning_rate(custom_rates)]
[xgb.callback.LearningRateScheduler(custom_rates)]
Returns
-------
@@ -207,6 +207,11 @@ class CVPack(object):
self.watchlist = [(dtrain, 'train'), (dtest, 'test')]
self.bst = Booster(param, [dtrain, dtest])
def __getattr__(self, name):
def _inner(*args, **kwargs):
return getattr(self.bst, name)(*args, **kwargs)
return _inner
def update(self, iteration, fobj):
""""Update the boosters for one iteration"""
self.bst.update(self.dtrain, iteration, fobj)
@@ -239,15 +244,24 @@ class _PackedBooster:
'''Redirect to booster attr.'''
return self.cvfolds[0].bst.attr(key)
def set_param(self, params, value=None):
"""Iterate through folds for set_param"""
for f in self.cvfolds:
f.bst.set_param(params, value)
def num_boosted_rounds(self):
'''Number of boosted rounds.'''
return self.cvfolds[0].num_boosted_rounds()
@property
def best_iteration(self):
'''Get best_iteration'''
ret = self.cvfolds[0].bst.attr('best_iteration')
return int(ret)
return int(self.cvfolds[0].bst.attr("best_iteration"))
def num_boosted_rounds(self) -> int:
'''Number of boosted rounds.'''
return self.cvfolds[0].bst.num_boosted_rounds()
@property
def best_score(self):
"""Get best_score."""
return float(self.cvfolds[0].bst.attr("best_score"))
def groups_to_rows(groups, boundaries):
@@ -419,7 +433,7 @@ def cv(params, dtrain, num_boost_round=10, nfold=3, stratified=False, folds=None
.. code-block:: python
[xgb.callback.reset_learning_rate(custom_rates)]
[xgb.callback.LearningRateScheduler(custom_rates)]
shuffle : bool
Shuffle data before creating folds.