Merge pull request #611 from antonymayi/master
python 2.6 compatibility
This commit is contained in:
commit
7491413de5
@ -50,9 +50,10 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
|
|||||||
If `verbose_eval` then the evaluation metric on the validation set, if
|
If `verbose_eval` then the evaluation metric on the validation set, if
|
||||||
given, is printed at each boosting stage.
|
given, is printed at each boosting stage.
|
||||||
learning_rates: list or function
|
learning_rates: list or function
|
||||||
List of learning rate for each boosting round
|
List of learning rate for each boosting round
|
||||||
or a customized function that calculates eta in terms of
|
or a customized function that calculates eta in terms of
|
||||||
current number of round and the total number of boosting round (e.g. yields learning rate decay)
|
current number of round and the total number of boosting round (e.g. yields
|
||||||
|
learning rate decay)
|
||||||
- list l: eta = l[boosting round]
|
- list l: eta = l[boosting round]
|
||||||
- function f: eta = f(boosting round, num_boost_round)
|
- function f: eta = f(boosting round, num_boost_round)
|
||||||
xgb_model : file name of stored xgb model or 'Booster' instance
|
xgb_model : file name of stored xgb model or 'Booster' instance
|
||||||
@ -89,7 +90,7 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
|
|||||||
else:
|
else:
|
||||||
evals_name = [d[1] for d in evals]
|
evals_name = [d[1] for d in evals]
|
||||||
evals_result.clear()
|
evals_result.clear()
|
||||||
evals_result.update({key: {} for key in evals_name})
|
evals_result.update(dict([(key, {}) for key in evals_name]))
|
||||||
|
|
||||||
if not early_stopping_rounds:
|
if not early_stopping_rounds:
|
||||||
for i in range(num_boost_round):
|
for i in range(num_boost_round):
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user