Added test for eta decay (+3 squashed commits)
Squashed commits: [9109887] Added test for eta decay(+1 squashed commit) Squashed commits: [1336bd4] Added tests for eta decay (+2 squashed commit) Squashed commits: [91aac2d] Added tests for eta decay (+1 squashed commit) Squashed commits: [3ff48e7] Added test for eta decay [6bb1eed] Rewrote Rd files [bf0dec4] Added learning_rates for diff eta in each boosting round
This commit is contained in:
parent
c817efbd8a
commit
888edba03f
@ -5,7 +5,7 @@
|
|||||||
\alias{predict,xgb.Booster-method}
|
\alias{predict,xgb.Booster-method}
|
||||||
\title{Predict method for eXtreme Gradient Boosting model}
|
\title{Predict method for eXtreme Gradient Boosting model}
|
||||||
\usage{
|
\usage{
|
||||||
\S4method{predict}{xgb.Booster}(object, newdata, missing = NULL,
|
\S4method{predict}{xgb.Booster}(object, newdata, missing = NA,
|
||||||
outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE)
|
outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
|
|||||||
@ -4,7 +4,7 @@
|
|||||||
\alias{xgb.DMatrix}
|
\alias{xgb.DMatrix}
|
||||||
\title{Contruct xgb.DMatrix object}
|
\title{Contruct xgb.DMatrix object}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.DMatrix(data, info = list(), missing = 0, ...)
|
xgb.DMatrix(data, info = list(), missing = NA, ...)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{data}{a \code{matrix} object, a \code{dgCMatrix} object or a character
|
\item{data}{a \code{matrix} object, a \code{dgCMatrix} object or a character
|
||||||
|
|||||||
@ -4,11 +4,10 @@
|
|||||||
\alias{xgb.cv}
|
\alias{xgb.cv}
|
||||||
\title{Cross Validation}
|
\title{Cross Validation}
|
||||||
\usage{
|
\usage{
|
||||||
xgb.cv(params = list(), data, nrounds, nfold, label = NULL,
|
xgb.cv(params = list(), data, nrounds, nfold, label = NULL, missing = NA,
|
||||||
missing = NULL, prediction = FALSE, showsd = TRUE, metrics = list(),
|
prediction = FALSE, showsd = TRUE, metrics = list(), obj = NULL,
|
||||||
obj = NULL, feval = NULL, stratified = TRUE, folds = NULL,
|
feval = NULL, stratified = TRUE, folds = NULL, verbose = T,
|
||||||
verbose = T, print.every.n = 1L, early.stop.round = NULL,
|
print.every.n = 1L, early.stop.round = NULL, maximize = NULL, ...)
|
||||||
maximize = NULL, ...)
|
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{params}{the list of parameters. Commonly used ones are:
|
\item{params}{the list of parameters. Commonly used ones are:
|
||||||
|
|||||||
@ -4,7 +4,7 @@
|
|||||||
\alias{xgboost}
|
\alias{xgboost}
|
||||||
\title{eXtreme Gradient Boosting (Tree) library}
|
\title{eXtreme Gradient Boosting (Tree) library}
|
||||||
\usage{
|
\usage{
|
||||||
xgboost(data = NULL, label = NULL, missing = NULL, weight = NULL,
|
xgboost(data = NULL, label = NULL, missing = NA, weight = NULL,
|
||||||
params = list(), nrounds, verbose = 1, print.every.n = 1L,
|
params = list(), nrounds, verbose = 1, print.every.n = 1L,
|
||||||
early.stop.round = NULL, maximize = NULL, save_period = 0,
|
early.stop.round = NULL, maximize = NULL, save_period = 0,
|
||||||
save_name = "xgboost.model", ...)
|
save_name = "xgboost.model", ...)
|
||||||
|
|||||||
@ -2,11 +2,12 @@ context('Test models with custom objective')
|
|||||||
|
|
||||||
require(xgboost)
|
require(xgboost)
|
||||||
|
|
||||||
|
data(agaricus.train, package='xgboost')
|
||||||
|
data(agaricus.test, package='xgboost')
|
||||||
|
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||||
|
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||||
|
|
||||||
test_that("custom objective works", {
|
test_that("custom objective works", {
|
||||||
data(agaricus.train, package='xgboost')
|
|
||||||
data(agaricus.test, package='xgboost')
|
|
||||||
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
|
||||||
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
|
||||||
|
|
||||||
watchlist <- list(eval = dtest, train = dtrain)
|
watchlist <- list(eval = dtest, train = dtrain)
|
||||||
num_round <- 2
|
num_round <- 2
|
||||||
@ -45,3 +46,13 @@ test_that("custom objective works", {
|
|||||||
expect_equal(class(bst), "xgb.Booster")
|
expect_equal(class(bst), "xgb.Booster")
|
||||||
expect_equal(length(bst$raw), 1064)
|
expect_equal(length(bst$raw), 1064)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test_that("different eta for each boosting round works", {
|
||||||
|
num_round <- 2
|
||||||
|
watchlist <- list(eval = dtest, train = dtrain)
|
||||||
|
param <- list(max.depth=2, eta=1, nthread = 2, silent=1)
|
||||||
|
|
||||||
|
bst <- xgb.train(param, dtrain, num_round, watchlist, learning_rates = c(0.2, 0.3))
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import xgboost as xgb
|
import xgboost as xgb
|
||||||
|
import unittest
|
||||||
|
|
||||||
dpath = 'demo/data/'
|
dpath = 'demo/data/'
|
||||||
dtrain = xgb.DMatrix(dpath + 'agaricus.txt.train')
|
dtrain = xgb.DMatrix(dpath + 'agaricus.txt.train')
|
||||||
@ -7,56 +8,76 @@ dtest = xgb.DMatrix(dpath + 'agaricus.txt.test')
|
|||||||
|
|
||||||
rng = np.random.RandomState(1994)
|
rng = np.random.RandomState(1994)
|
||||||
|
|
||||||
def test_glm():
|
class TestModels(unittest.TestCase):
|
||||||
param = {'silent':1, 'objective':'binary:logistic', 'booster':'gblinear', 'alpha': 0.0001, 'lambda': 1 }
|
|
||||||
watchlist = [(dtest,'eval'), (dtrain,'train')]
|
|
||||||
num_round = 4
|
|
||||||
bst = xgb.train(param, dtrain, num_round, watchlist)
|
|
||||||
assert isinstance(bst, xgb.core.Booster)
|
|
||||||
preds = bst.predict(dtest)
|
|
||||||
labels = dtest.get_label()
|
|
||||||
err = sum(1 for i in range(len(preds)) if int(preds[i]>0.5)!=labels[i]) / float(len(preds))
|
|
||||||
assert err < 0.1
|
|
||||||
|
|
||||||
def test_custom_objective():
|
def test_glm(self):
|
||||||
param = {'max_depth':2, 'eta':1, 'silent':1 }
|
param = {'silent':1, 'objective':'binary:logistic', 'booster':'gblinear', 'alpha': 0.0001, 'lambda': 1 }
|
||||||
watchlist = [(dtest,'eval'), (dtrain,'train')]
|
watchlist = [(dtest,'eval'), (dtrain,'train')]
|
||||||
num_round = 2
|
num_round = 4
|
||||||
def logregobj(preds, dtrain):
|
bst = xgb.train(param, dtrain, num_round, watchlist)
|
||||||
labels = dtrain.get_label()
|
assert isinstance(bst, xgb.core.Booster)
|
||||||
preds = 1.0 / (1.0 + np.exp(-preds))
|
preds = bst.predict(dtest)
|
||||||
grad = preds - labels
|
labels = dtest.get_label()
|
||||||
hess = preds * (1.0-preds)
|
err = sum(1 for i in range(len(preds)) if int(preds[i]>0.5)!=labels[i]) / float(len(preds))
|
||||||
return grad, hess
|
assert err < 0.1
|
||||||
def evalerror(preds, dtrain):
|
|
||||||
labels = dtrain.get_label()
|
|
||||||
return 'error', float(sum(labels != (preds > 0.0))) / len(labels)
|
|
||||||
|
|
||||||
# test custom_objective in training
|
def test_eta_decay(self):
|
||||||
bst = xgb.train(param, dtrain, num_round, watchlist, logregobj, evalerror)
|
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic' }
|
||||||
assert isinstance(bst, xgb.core.Booster)
|
watchlist = [(dtest,'eval'), (dtrain,'train')]
|
||||||
preds = bst.predict(dtest)
|
num_round = 2
|
||||||
labels = dtest.get_label()
|
# learning_rates as a list
|
||||||
err = sum(1 for i in range(len(preds)) if int(preds[i]>0.5)!=labels[i]) / float(len(preds))
|
bst = xgb.train(param, dtrain, num_round, watchlist, learning_rates=[0.4, 0.3])
|
||||||
assert err < 0.1
|
assert isinstance(bst, xgb.core.Booster)
|
||||||
|
# different length
|
||||||
|
num_round = 4
|
||||||
|
self.assertRaises(ValueError, xgb.train, param, dtrain, num_round, watchlist, learning_rates=[0.4, 0.3, 0.2])
|
||||||
|
|
||||||
# test custom_objective in cross-validation
|
# learning_rates as a customized decay function
|
||||||
xgb.cv(param, dtrain, num_round, nfold = 5, seed = 0,
|
def eta_decay(ithround, num_boost_round):
|
||||||
obj = logregobj, feval=evalerror)
|
return num_boost_round / ithround
|
||||||
|
bst = xgb.train(param, dtrain, num_round, watchlist, learning_rates=eta_decay)
|
||||||
|
assert isinstance(bst, xgb.core.Booster)
|
||||||
|
|
||||||
def test_fpreproc():
|
|
||||||
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'}
|
|
||||||
num_round = 2
|
|
||||||
def fpreproc(dtrain, dtest, param):
|
|
||||||
label = dtrain.get_label()
|
|
||||||
ratio = float(np.sum(label == 0)) / np.sum(label==1)
|
|
||||||
param['scale_pos_weight'] = ratio
|
|
||||||
return (dtrain, dtest, param)
|
|
||||||
xgb.cv(param, dtrain, num_round, nfold=5,
|
|
||||||
metrics={'auc'}, seed = 0, fpreproc = fpreproc)
|
|
||||||
|
|
||||||
def test_show_stdv():
|
def test_custom_objective(self):
|
||||||
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'}
|
param = {'max_depth':2, 'eta':1, 'silent':1 }
|
||||||
num_round = 2
|
watchlist = [(dtest,'eval'), (dtrain,'train')]
|
||||||
xgb.cv(param, dtrain, num_round, nfold=5,
|
num_round = 2
|
||||||
metrics={'error'}, seed = 0, show_stdv = False)
|
def logregobj(preds, dtrain):
|
||||||
|
labels = dtrain.get_label()
|
||||||
|
preds = 1.0 / (1.0 + np.exp(-preds))
|
||||||
|
grad = preds - labels
|
||||||
|
hess = preds * (1.0-preds)
|
||||||
|
return grad, hess
|
||||||
|
def evalerror(preds, dtrain):
|
||||||
|
labels = dtrain.get_label()
|
||||||
|
return 'error', float(sum(labels != (preds > 0.0))) / len(labels)
|
||||||
|
|
||||||
|
# test custom_objective in training
|
||||||
|
bst = xgb.train(param, dtrain, num_round, watchlist, logregobj, evalerror)
|
||||||
|
assert isinstance(bst, xgb.core.Booster)
|
||||||
|
preds = bst.predict(dtest)
|
||||||
|
labels = dtest.get_label()
|
||||||
|
err = sum(1 for i in range(len(preds)) if int(preds[i]>0.5)!=labels[i]) / float(len(preds))
|
||||||
|
assert err < 0.1
|
||||||
|
|
||||||
|
# test custom_objective in cross-validation
|
||||||
|
xgb.cv(param, dtrain, num_round, nfold = 5, seed = 0,
|
||||||
|
obj = logregobj, feval=evalerror)
|
||||||
|
|
||||||
|
def test_fpreproc(self):
|
||||||
|
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'}
|
||||||
|
num_round = 2
|
||||||
|
def fpreproc(dtrain, dtest, param):
|
||||||
|
label = dtrain.get_label()
|
||||||
|
ratio = float(np.sum(label == 0)) / np.sum(label==1)
|
||||||
|
param['scale_pos_weight'] = ratio
|
||||||
|
return (dtrain, dtest, param)
|
||||||
|
xgb.cv(param, dtrain, num_round, nfold=5,
|
||||||
|
metrics={'auc'}, seed = 0, fpreproc = fpreproc)
|
||||||
|
|
||||||
|
def test_show_stdv(self):
|
||||||
|
param = {'max_depth':2, 'eta':1, 'silent':1, 'objective':'binary:logistic'}
|
||||||
|
num_round = 2
|
||||||
|
xgb.cv(param, dtrain, num_round, nfold=5,
|
||||||
|
metrics={'error'}, seed = 0, show_stdv = False)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user