Merge pull request #582 from terrytangyuan/test

Test (eta decay) and bug fix
This commit is contained in:
Yuan (Terry) Tang 2015-10-31 13:07:33 -04:00
commit 9e1690defe
7 changed files with 83 additions and 64 deletions

View File

@ -5,7 +5,7 @@
\alias{predict,xgb.Booster-method} \alias{predict,xgb.Booster-method}
\title{Predict method for eXtreme Gradient Boosting model} \title{Predict method for eXtreme Gradient Boosting model}
\usage{ \usage{
\S4method{predict}{xgb.Booster}(object, newdata, missing = NULL, \S4method{predict}{xgb.Booster}(object, newdata, missing = NA,
outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE) outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE)
} }
\arguments{ \arguments{

View File

@ -4,7 +4,7 @@
\alias{xgb.DMatrix} \alias{xgb.DMatrix}
\title{Contruct xgb.DMatrix object} \title{Contruct xgb.DMatrix object}
\usage{ \usage{
xgb.DMatrix(data, info = list(), missing = 0, ...) xgb.DMatrix(data, info = list(), missing = NA, ...)
} }
\arguments{ \arguments{
\item{data}{a \code{matrix} object, a \code{dgCMatrix} object or a character \item{data}{a \code{matrix} object, a \code{dgCMatrix} object or a character

View File

@ -4,11 +4,10 @@
\alias{xgb.cv} \alias{xgb.cv}
\title{Cross Validation} \title{Cross Validation}
\usage{ \usage{
xgb.cv(params = list(), data, nrounds, nfold, label = NULL, xgb.cv(params = list(), data, nrounds, nfold, label = NULL, missing = NA,
missing = NULL, prediction = FALSE, showsd = TRUE, metrics = list(), prediction = FALSE, showsd = TRUE, metrics = list(), obj = NULL,
obj = NULL, feval = NULL, stratified = TRUE, folds = NULL, feval = NULL, stratified = TRUE, folds = NULL, verbose = T,
verbose = T, print.every.n = 1L, early.stop.round = NULL, print.every.n = 1L, early.stop.round = NULL, maximize = NULL, ...)
maximize = NULL, ...)
} }
\arguments{ \arguments{
\item{params}{the list of parameters. Commonly used ones are: \item{params}{the list of parameters. Commonly used ones are:

View File

@ -4,7 +4,7 @@
\alias{xgboost} \alias{xgboost}
\title{eXtreme Gradient Boosting (Tree) library} \title{eXtreme Gradient Boosting (Tree) library}
\usage{ \usage{
xgboost(data = NULL, label = NULL, missing = NULL, weight = NULL, xgboost(data = NULL, label = NULL, missing = NA, weight = NULL,
params = list(), nrounds, verbose = 1, print.every.n = 1L, params = list(), nrounds, verbose = 1, print.every.n = 1L,
early.stop.round = NULL, maximize = NULL, save_period = 0, early.stop.round = NULL, maximize = NULL, save_period = 0,
save_name = "xgboost.model", ...) save_name = "xgboost.model", ...)

View File

@ -2,11 +2,12 @@ context('Test models with custom objective')
require(xgboost) require(xgboost)
data(agaricus.train, package='xgboost')
data(agaricus.test, package='xgboost')
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
test_that("custom objective works", { test_that("custom objective works", {
data(agaricus.train, package='xgboost')
data(agaricus.test, package='xgboost')
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
watchlist <- list(eval = dtest, train = dtrain) watchlist <- list(eval = dtest, train = dtrain)
num_round <- 2 num_round <- 2

View File

@ -98,6 +98,7 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
if len(evals) < 1: if len(evals) < 1:
raise ValueError('For early stopping you need at least one set in evals.') raise ValueError('For early stopping you need at least one set in evals.')
if verbose_eval:
sys.stderr.write("Will train until {} error hasn't decreased in {} rounds.\n".format(\ sys.stderr.write("Will train until {} error hasn't decreased in {} rounds.\n".format(\
evals[-1][1], early_stopping_rounds)) evals[-1][1], early_stopping_rounds))
@ -123,7 +124,7 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
best_msg = '' best_msg = ''
best_score_i = 0 best_score_i = 0
if isinstance(learning_rates, list) and len(learning_rates) < num_boost_round: if isinstance(learning_rates, list) and len(learning_rates) != num_boost_round:
raise ValueError("Length of list 'learning_rates' has to equal 'num_boost_round'.") raise ValueError("Length of list 'learning_rates' has to equal 'num_boost_round'.")
for i in range(num_boost_round): for i in range(num_boost_round):

View File

@ -1,5 +1,6 @@
import numpy as np import numpy as np
import xgboost as xgb import xgboost as xgb
import unittest
dpath = 'demo/data/' dpath = 'demo/data/'
dtrain = xgb.DMatrix(dpath + 'agaricus.txt.train') dtrain = xgb.DMatrix(dpath + 'agaricus.txt.train')
@ -7,7 +8,9 @@ dtest = xgb.DMatrix(dpath + 'agaricus.txt.test')
rng = np.random.RandomState(1994) rng = np.random.RandomState(1994)
def test_glm(): class TestModels(unittest.TestCase):
def test_glm(self):
param = {'silent':1, 'objective':'binary:logistic', 'booster':'gblinear', 'alpha': 0.0001, 'lambda': 1 } param = {'silent':1, 'objective':'binary:logistic', 'booster':'gblinear', 'alpha': 0.0001, 'lambda': 1 }
watchlist = [(dtest,'eval'), (dtrain,'train')] watchlist = [(dtest,'eval'), (dtrain,'train')]
num_round = 4 num_round = 4
@ -18,7 +21,22 @@ def test_glm():
err = sum(1 for i in range(len(preds)) if int(preds[i]>0.5)!=labels[i]) / float(len(preds)) err = sum(1 for i in range(len(preds)) if int(preds[i]>0.5)!=labels[i]) / float(len(preds))
assert err < 0.1 assert err < 0.1
def test_custom_objective(): def test_eta_decay(self):
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic' }
watchlist = [(dtest,'eval'), (dtrain,'train')]
num_round = 2
# learning_rates as a list
bst = xgb.train(param, dtrain, num_round, watchlist, learning_rates=[0.4, 0.3])
assert isinstance(bst, xgb.core.Booster)
# learning_rates as a customized decay function
def eta_decay(ithround, num_boost_round):
return num_boost_round / ithround
bst = xgb.train(param, dtrain, num_round, watchlist, learning_rates=eta_decay)
assert isinstance(bst, xgb.core.Booster)
def test_custom_objective(self):
param = {'max_depth':2, 'eta':1, 'silent':1 } param = {'max_depth':2, 'eta':1, 'silent':1 }
watchlist = [(dtest,'eval'), (dtrain,'train')] watchlist = [(dtest,'eval'), (dtrain,'train')]
num_round = 2 num_round = 2
@ -44,7 +62,7 @@ def test_custom_objective():
xgb.cv(param, dtrain, num_round, nfold = 5, seed = 0, xgb.cv(param, dtrain, num_round, nfold = 5, seed = 0,
obj = logregobj, feval=evalerror) obj = logregobj, feval=evalerror)
def test_fpreproc(): def test_fpreproc(self):
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'} param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'}
num_round = 2 num_round = 2
def fpreproc(dtrain, dtest, param): def fpreproc(dtrain, dtest, param):
@ -55,7 +73,7 @@ def test_fpreproc():
xgb.cv(param, dtrain, num_round, nfold=5, xgb.cv(param, dtrain, num_round, nfold=5,
metrics={'auc'}, seed = 0, fpreproc = fpreproc) metrics={'auc'}, seed = 0, fpreproc = fpreproc)
def test_show_stdv(): def test_show_stdv(self):
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'} param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'}
num_round = 2 num_round = 2
xgb.cv(param, dtrain, num_round, nfold=5, xgb.cv(param, dtrain, num_round, nfold=5,