[DIST] Add Distributed XGBoost on AWS Tutorial

This commit is contained in:
tqchen
2016-02-25 20:42:16 -08:00
parent 61d9edcaa4
commit a71ba04109
11 changed files with 355 additions and 86 deletions

View File

@@ -6,24 +6,24 @@ objective = binary:logistic
# Tree Booster Parameters
# step size shrinkage
eta = 1.0
eta = 1.0
# minimum loss reduction required to make a further partition
gamma = 1.0
gamma = 1.0
# minimum sum of instance weight(hessian) needed in a child
min_child_weight = 1
min_child_weight = 1
# maximum depth of a tree
max_depth = 3
max_depth = 3
# Task Parameters
# the number of round to do boosting
num_round = 2
# 0 means do not save any model except the final round model
save_period = 0
save_period = 0
# The path of training data
data = "agaricus.txt.train"
data = "agaricus.txt.train"
# The path of validation data, used to monitor training process, here [test] sets name of the validation set
eval[test] = "agaricus.txt.test"
eval[test] = "agaricus.txt.test"
# evaluate on training data as well each round
eval_train = 1
# The path of test data
test:data = "agaricus.txt.test"
# The path of test data
test:data = "agaricus.txt.test"