From cfd6c9e3b7c3fa7d97d86c9e40164be6b11f7605 Mon Sep 17 00:00:00 2001 From: Tianqi Chen Date: Fri, 16 May 2014 20:16:10 -0700 Subject: [PATCH] Update train.py --- demo/multi_classification/train.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/demo/multi_classification/train.py b/demo/multi_classification/train.py index d51824a16..d4cf2a0d4 100755 --- a/demo/multi_classification/train.py +++ b/demo/multi_classification/train.py @@ -22,8 +22,7 @@ xg_train = xgb.DMatrix( train_X, label=train_Y) xg_test = xgb.DMatrix(test_X, label=test_Y) # setup parameters for xgboost param = {} -# use logistic regression loss, use raw prediction before logistic transformation -# since we only need the rank +# use softmax multi-class classification param['objective'] = 'multi:softmax' # scale weight of positive examples param['bst:eta'] = 0.1 @@ -35,4 +34,9 @@ param['num_class'] = 6 watchlist = [ (xg_train,'train'), (xg_test, 'test') ] num_round = 5 bst = xgb.train(param, xg_train, num_round, watchlist ); +# get prediction +pred = bst.predict( xg_test ); + +print 'error=%f' % sum(int(pred[i]) != test_Y[i] for i in len(test_Y)) / float(len(test_Y)) +