Minor edits to coding style (#2835)
* Some minor changes to the code style Some minor changes to the code style in file basic_walkthrough.py * coding style changes * coding style changes arrcording PEP8 * Update basic_walkthrough.py
This commit is contained in:
committed by
Yuan (Terry) Tang
parent
d9d5293cdb
commit
91af8f7106
@@ -7,30 +7,30 @@ dtrain = xgb.DMatrix('../data/agaricus.txt.train')
|
||||
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'}
|
||||
num_round = 2
|
||||
|
||||
print ('running cross validation')
|
||||
print('running cross validation')
|
||||
# do cross validation, this will print result out as
|
||||
# [iteration] metric_name:mean_value+std_value
|
||||
# std_value is standard deviation of the metric
|
||||
xgb.cv(param, dtrain, num_round, nfold=5,
|
||||
metrics={'error'}, seed = 0,
|
||||
metrics={'error'}, seed=0,
|
||||
callbacks=[xgb.callback.print_evaluation(show_stdv=True)])
|
||||
|
||||
print ('running cross validation, disable standard deviation display')
|
||||
print('running cross validation, disable standard deviation display')
|
||||
# do cross validation, this will print result out as
|
||||
# [iteration] metric_name:mean_value
|
||||
res = xgb.cv(param, dtrain, num_boost_round=10, nfold=5,
|
||||
metrics={'error'}, seed = 0,
|
||||
metrics={'error'}, seed=0,
|
||||
callbacks=[xgb.callback.print_evaluation(show_stdv=False),
|
||||
xgb.callback.early_stop(3)])
|
||||
print (res)
|
||||
print ('running cross validation, with preprocessing function')
|
||||
print(res)
|
||||
print('running cross validation, with preprocessing function')
|
||||
# define the preprocessing function
|
||||
# used to return the preprocessed training, test data, and parameter
|
||||
# we can use this to do weight rescale, etc.
|
||||
# as a example, we try to set scale_pos_weight
|
||||
def fpreproc(dtrain, dtest, param):
|
||||
label = dtrain.get_label()
|
||||
ratio = float(np.sum(label == 0)) / np.sum(label==1)
|
||||
ratio = float(np.sum(label == 0)) / np.sum(label == 1)
|
||||
param['scale_pos_weight'] = ratio
|
||||
return (dtrain, dtest, param)
|
||||
|
||||
@@ -39,18 +39,18 @@ def fpreproc(dtrain, dtest, param):
|
||||
# then the return value of fpreproc will be used to generate
|
||||
# results of that fold
|
||||
xgb.cv(param, dtrain, num_round, nfold=5,
|
||||
metrics={'auc'}, seed = 0, fpreproc = fpreproc)
|
||||
metrics={'auc'}, seed=0, fpreproc=fpreproc)
|
||||
|
||||
###
|
||||
# you can also do cross validation with cutomized loss function
|
||||
# See custom_objective.py
|
||||
##
|
||||
print ('running cross validation, with cutomsized loss function')
|
||||
print('running cross validation, with cutomsized loss function')
|
||||
def logregobj(preds, dtrain):
|
||||
labels = dtrain.get_label()
|
||||
preds = 1.0 / (1.0 + np.exp(-preds))
|
||||
grad = preds - labels
|
||||
hess = preds * (1.0-preds)
|
||||
hess = preds * (1.0 - preds)
|
||||
return grad, hess
|
||||
def evalerror(preds, dtrain):
|
||||
labels = dtrain.get_label()
|
||||
@@ -58,5 +58,5 @@ def evalerror(preds, dtrain):
|
||||
|
||||
param = {'max_depth':2, 'eta':1, 'silent':1}
|
||||
# train with customized objective
|
||||
xgb.cv(param, dtrain, num_round, nfold = 5, seed = 0,
|
||||
obj = logregobj, feval=evalerror)
|
||||
xgb.cv(param, dtrain, num_round, nfold=5, seed=0,
|
||||
obj=logregobj, feval=evalerror)
|
||||
|
||||
Reference in New Issue
Block a user