From 9f3e5a2778ff3c01ac462e53cfce99bd4eb0258b Mon Sep 17 00:00:00 2001 From: antinucleon Date: Sat, 17 May 2014 03:57:38 +0000 Subject: [PATCH] del --- demo/multi_classification/train.py | 42 --------------------------- demo/multi_classification/wgetdata.sh | 2 -- 2 files changed, 44 deletions(-) delete mode 100755 demo/multi_classification/train.py delete mode 100755 demo/multi_classification/wgetdata.sh diff --git a/demo/multi_classification/train.py b/demo/multi_classification/train.py deleted file mode 100755 index 2dc98f4d6..000000000 --- a/demo/multi_classification/train.py +++ /dev/null @@ -1,42 +0,0 @@ - -import sys -import numpy as np -sys.path.append('../../python/') -import xgboost as xgb - - - -data = np.loadtxt('./dermatology.data', delimiter=',',converters={33: lambda x:int(x == '?'), 34: lambda x:int(x) } ) -sz = data.shape - -train = data[:int(sz[0] * 0.7), :] -test = data[int(sz[0] * 0.7):, :] - -train_X = train[:,0:33] -train_Y = train[:, 34] - - -test_X = test[:,0:33] -test_Y = test[:, 34] - -xg_train = xgb.DMatrix( train_X, label=train_Y) -xg_test = xgb.DMatrix(test_X, label=test_Y) -# setup parameters for xgboost -param = {} -# use logistic regression loss, use raw prediction before logistic transformation -# since we only need the rank -param['objective'] = 'multi:softmax' -# scale weight of positive examples -param['bst:eta'] = 0.1 -param['bst:max_depth'] = 6 -param['eval_metric'] = 'auc' -param['silent'] = 1 -param['nthread'] = 4 -param['num_class'] = 5 - -watchlist = [ (xg_train,'train'), (xg_test, 'test') ] -num_round = 5 -bst = xgb.train(param, xg_train, num_round, watchlist ); - - - diff --git a/demo/multi_classification/wgetdata.sh b/demo/multi_classification/wgetdata.sh deleted file mode 100755 index 10dbcd8fb..000000000 --- a/demo/multi_classification/wgetdata.sh +++ /dev/null @@ -1,2 +0,0 @@ -#! /bin/bash -wget https://archive.ics.uci.edu/ml/machine-learning-databases/dermatology/dermatology.data