diff --git a/demo/guide-python/boost_from_prediction.py b/demo/guide-python/boost_from_prediction.py index 4870fc49c..dfb7d2ff9 100755 --- a/demo/guide-python/boost_from_prediction.py +++ b/demo/guide-python/boost_from_prediction.py @@ -4,21 +4,21 @@ import xgboost as xgb dtrain = xgb.DMatrix('../data/agaricus.txt.train') dtest = xgb.DMatrix('../data/agaricus.txt.test') -watchlist = [(dtest,'eval'), (dtrain,'train')] +watchlist = [(dtest, 'eval'), (dtrain, 'train')] ### # advanced: start from a initial base prediction # print ('start running example to start from a initial prediction') # specify parameters via map, definition are same as c++ version -param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic' } +param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'} # train xgboost for 1 round -bst = xgb.train( param, dtrain, 1, watchlist ) +bst = xgb.train(param, dtrain, 1, watchlist) # Note: we need the margin value instead of transformed prediction in set_base_margin # do predict with output_margin=True, will always give you margin values before logistic transformation ptrain = bst.predict(dtrain, output_margin=True) -ptest = bst.predict(dtest, output_margin=True) +ptest = bst.predict(dtest, output_margin=True) dtrain.set_base_margin(ptrain) dtest.set_base_margin(ptest) print ('this is result of running from initial prediction') -bst = xgb.train( param, dtrain, 1, watchlist ) +bst = xgb.train(param, dtrain, 1, watchlist)