xgboost/multi-node/hadoop/mushroom.hadoop.conf
tqchen 48a44b24f9 Merge branch 'unity' of https://github.com/cblsjtu/xgboost into cblsjtu-unity
Conflicts:
	multi-node/hadoop/README.md
	multi-node/hadoop/mushroom.hadoop.conf
	multi-node/hadoop/run_hadoop_mushroom.sh
2015-01-12 11:41:07 -08:00

40 lines
1.1 KiB
Plaintext

# General Parameters, see comment for each definition
# choose the booster, can be gbtree or gblinear
booster = gbtree
# choose logistic regression loss function for binary classification
objective = binary:logistic
# Tree Booster Parameters
# step size shrinkage
eta = 1.0
# minimum loss reduction required to make a further partition
gamma = 1.0
# minimum sum of instance weight(hessian) needed in a child
min_child_weight = 1
# maximum depth of a tree
max_depth = 3
# Task Parameters
# the number of round to do boosting
num_round = 2
# 0 means do not save any model except the final round model
save_period = 0
# evaluate on training data as well each round
# eval_train = 1
# The path of validation data, used to monitor training process, here [test] sets name of the validation set
# eval[test] = "agaricus.txt.test"
# Plz donot modify the following parameters
# The path of training data
data = stdin
# The path of model file
model_out = stdout
# split pattern of xgboost
dsplit = row
<<<<<<< HEAD
# evaluate on training data as well each round
eval_train = 1
=======
>>>>>>> df3f87c182cc12ccc9ac1f9cafbe01ea7ebf0ac4