* Fix #3397: early_stop callback does not maximize metric of form NDCG@n- Early stopping callback makes splits with '-' letter, which interferes with metrics of form NDCG@n-. As a result, XGBoost tries to minimize NDCG@n-, where it should be maximized instead. Fix. Specify maxsplit=1. * Python 2.x compatibility fix
This commit is contained in:
parent
f606cb8ef4
commit
3564b68b98
@ -191,19 +191,18 @@ def early_stop(stopping_rounds, maximize=False, verbose=True):
|
||||
maximize_metrics = ('auc', 'map', 'ndcg')
|
||||
maximize_at_n_metrics = ('auc@', 'map@', 'ndcg@')
|
||||
maximize_score = maximize
|
||||
metric = env.evaluation_result_list[-1][0]
|
||||
metric_label = env.evaluation_result_list[-1][0]
|
||||
metric = metric_label.split('-', 1)[-1]
|
||||
|
||||
if any(env.evaluation_result_list[-1][0].split('-')[-1].startswith(x)
|
||||
for x in maximize_at_n_metrics):
|
||||
if any(metric.startswith(x) for x in maximize_at_n_metrics):
|
||||
maximize_score = True
|
||||
|
||||
if any(env.evaluation_result_list[-1][0].split('-')[-1].split(":")[0] == x
|
||||
for x in maximize_metrics):
|
||||
if any(metric.split(":")[0] == x for x in maximize_metrics):
|
||||
maximize_score = True
|
||||
|
||||
if verbose and env.rank == 0:
|
||||
msg = "Will train until {} hasn't improved in {} rounds.\n"
|
||||
rabit.tracker_print(msg.format(metric, stopping_rounds))
|
||||
rabit.tracker_print(msg.format(metric_label, stopping_rounds))
|
||||
|
||||
state['maximize_score'] = maximize_score
|
||||
state['best_iteration'] = 0
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user