From b8bc85b534ce16ee62a09d3b47eab65490817880 Mon Sep 17 00:00:00 2001 From: "Yuan (Terry) Tang" Date: Sun, 8 Nov 2015 21:10:04 -0600 Subject: [PATCH] Clarification for learning_rates --- python-package/xgboost/training.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/python-package/xgboost/training.py b/python-package/xgboost/training.py index d2beba4e4..0ad7e4e14 100644 --- a/python-package/xgboost/training.py +++ b/python-package/xgboost/training.py @@ -50,7 +50,9 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None, If `verbose_eval` then the evaluation metric on the validation set, if given, is printed at each boosting stage. learning_rates: list or function - Learning rate for each boosting round (yields learning rate decay). + List of learning rate for each boosting round + or a customized function that calculates eta in terms of + current number of round and the total number of boosting round (e.g. yields learning rate decay) - list l: eta = l[boosting round] - function f: eta = f(boosting round, num_boost_round) xgb_model : file name of stored xgb model or 'Booster' instance