From e0a0343ae671138a0e119c7f954c7ce798554547 Mon Sep 17 00:00:00 2001 From: antinucleon Date: Fri, 16 May 2014 17:48:03 -0600 Subject: [PATCH] speedtest --- demo/kaggle-higgs/speedtest.py | 63 ++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100755 demo/kaggle-higgs/speedtest.py diff --git a/demo/kaggle-higgs/speedtest.py b/demo/kaggle-higgs/speedtest.py new file mode 100755 index 000000000..1b07d4619 --- /dev/null +++ b/demo/kaggle-higgs/speedtest.py @@ -0,0 +1,63 @@ +#!/usr/local/bin/python +# this is the example script to use xgboost to train +import sys +import numpy as np +# add path of xgboost python module +sys.path.append('../../python/') +import xgboost as xgb +from sklearn.ensemble import GradientBoostingClassifier +import time +test_size = 550000 + +# path to where the data lies +dpath = 'data' + +# load in training data, directly use numpy +dtrain = np.loadtxt( dpath+'/training.csv', delimiter=',', skiprows=1, converters={32: lambda x:int(x=='s') } ) +print 'finish loading from csv ' + +label = dtrain[:,32] +data = dtrain[:,1:31] +# rescale weight to make it same as test set +weight = dtrain[:,31] * float(test_size) / len(label) + +sum_wpos = sum( weight[i] for i in xrange(len(label)) if label[i] == 1.0 ) +sum_wneg = sum( weight[i] for i in xrange(len(label)) if label[i] == 0.0 ) + +# print weight statistics +print 'weight statistics: wpos=%g, wneg=%g, ratio=%g' % ( sum_wpos, sum_wneg, sum_wneg/sum_wpos ) + +# construct xgboost.DMatrix from numpy array, treat -999.0 as missing value +xgmat = xgb.DMatrix( data, label=label, missing = -999.0, weight=weight ) + +# setup parameters for xgboost +param = {} +# use logistic regression loss +param['loss_type'] = 1 +# scale weight of positive examples +param['scale_pos_weight'] = sum_wneg/sum_wpos +param['bst:eta'] = 0.1 +param['bst:max_depth'] = 6 +param['eval_metric'] = 'auc' +param['silent'] = 1 +param['nthread'] = 4 + +# you can directly throw param in, though we want to watch multiple metrics here +plst = param.items()+[('eval_metric', 'ams@0.15')] + +watchlist = [ (xgmat,'train') ] +# boost 135 tres +num_round = 135 +print 'loading data end, start to boost trees' +print "training GBM from sklearn" +tmp = time.time() +gbm = GradientBoostingClassifier(n_estimators=135, max_depth=6, verbose=2) +gbm.fit(data, label) +print "GBM costs: %s seconds" % str(time.time() - tmp) +#raw_input() +print "training xgboost" +tmp = time.time() +bst = xgb.train( plst, xgmat, num_round, watchlist ); +print "XGBoost costs: %s seconds" % str(time.time() - tmp) + +print 'finish training'