Merge pull request #611 from antonymayi/master

python 2.6 compatibility
This commit is contained in:
Yuan (Terry) Tang 2015-11-09 08:45:26 -06:00
commit 7491413de5

View File

@ -52,7 +52,8 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
learning_rates: list or function
List of learning rate for each boosting round
or a customized function that calculates eta in terms of
current number of round and the total number of boosting round (e.g. yields learning rate decay)
current number of round and the total number of boosting round (e.g. yields
learning rate decay)
- list l: eta = l[boosting round]
- function f: eta = f(boosting round, num_boost_round)
xgb_model : file name of stored xgb model or 'Booster' instance
@ -89,7 +90,7 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
else:
evals_name = [d[1] for d in evals]
evals_result.clear()
evals_result.update({key: {} for key in evals_name})
evals_result.update(dict([(key, {}) for key in evals_name]))
if not early_stopping_rounds:
for i in range(num_boost_round):