37 lines
1.1 KiB
Plaintext
37 lines
1.1 KiB
Plaintext
# General Parameters, see comment for each definition
|
|
# choose the booster, can be gbtree or gblinear
|
|
booster = gbtree
|
|
# choose logistic regression loss function for binary classification
|
|
objective = binary:logistic
|
|
|
|
# Tree Booster Parameters
|
|
# step size shrinkage
|
|
eta = 1.0
|
|
# minimum loss reduction required to make a further partition
|
|
gamma = 1.0
|
|
# minimum sum of instance weight(hessian) needed in a child
|
|
min_child_weight = 1
|
|
# maximum depth of a tree
|
|
max_depth = 3
|
|
|
|
# Task Parameters
|
|
# the number of round to do boosting
|
|
num_round = 2
|
|
# 0 means do not save any model except the final round model
|
|
save_period = 0
|
|
# evaluate on training data as well each round
|
|
# eval_train = 1
|
|
# The path of validation data, used to monitor training process, here [test] sets name of the validation set
|
|
# eval[test] = "agaricus.txt.test"
|
|
|
|
# Plz donot modify the following parameters
|
|
# The path of training data, with prefix hdfs
|
|
#data = hdfs:/data/
|
|
# The path of model file
|
|
#model_out =
|
|
# split pattern of xgboost
|
|
dsplit = row
|
|
# evaluate on training data as well each round
|
|
eval_train = 1
|
|
|