Added test for eta decay (+3 squashed commits)

Squashed commits:
[9109887] Added test for eta decay(+1 squashed commit)
Squashed commits:
[1336bd4] Added tests for eta decay (+2 squashed commit)
Squashed commits:
[91aac2d] Added tests for eta decay (+1 squashed commit)
Squashed commits:
[3ff48e7] Added test for eta decay
[6bb1eed] Rewrote Rd files
[bf0dec4] Added learning_rates for diff eta in each boosting round
This commit is contained in:
terrytangyuan
2015-10-31 10:35:01 -04:00
parent c817efbd8a
commit 888edba03f
6 changed files with 93 additions and 62 deletions

View File

@@ -5,7 +5,7 @@
\alias{predict,xgb.Booster-method}
\title{Predict method for eXtreme Gradient Boosting model}
\usage{
\S4method{predict}{xgb.Booster}(object, newdata, missing = NULL,
\S4method{predict}{xgb.Booster}(object, newdata, missing = NA,
outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE)
}
\arguments{

View File

@@ -4,7 +4,7 @@
\alias{xgb.DMatrix}
\title{Contruct xgb.DMatrix object}
\usage{
xgb.DMatrix(data, info = list(), missing = 0, ...)
xgb.DMatrix(data, info = list(), missing = NA, ...)
}
\arguments{
\item{data}{a \code{matrix} object, a \code{dgCMatrix} object or a character

View File

@@ -4,11 +4,10 @@
\alias{xgb.cv}
\title{Cross Validation}
\usage{
xgb.cv(params = list(), data, nrounds, nfold, label = NULL,
missing = NULL, prediction = FALSE, showsd = TRUE, metrics = list(),
obj = NULL, feval = NULL, stratified = TRUE, folds = NULL,
verbose = T, print.every.n = 1L, early.stop.round = NULL,
maximize = NULL, ...)
xgb.cv(params = list(), data, nrounds, nfold, label = NULL, missing = NA,
prediction = FALSE, showsd = TRUE, metrics = list(), obj = NULL,
feval = NULL, stratified = TRUE, folds = NULL, verbose = T,
print.every.n = 1L, early.stop.round = NULL, maximize = NULL, ...)
}
\arguments{
\item{params}{the list of parameters. Commonly used ones are:

View File

@@ -4,7 +4,7 @@
\alias{xgboost}
\title{eXtreme Gradient Boosting (Tree) library}
\usage{
xgboost(data = NULL, label = NULL, missing = NULL, weight = NULL,
xgboost(data = NULL, label = NULL, missing = NA, weight = NULL,
params = list(), nrounds, verbose = 1, print.every.n = 1L,
early.stop.round = NULL, maximize = NULL, save_period = 0,
save_name = "xgboost.model", ...)

View File

@@ -2,11 +2,12 @@ context('Test models with custom objective')
require(xgboost)
data(agaricus.train, package='xgboost')
data(agaricus.test, package='xgboost')
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
test_that("custom objective works", {
data(agaricus.train, package='xgboost')
data(agaricus.test, package='xgboost')
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
watchlist <- list(eval = dtest, train = dtrain)
num_round <- 2
@@ -44,4 +45,14 @@ test_that("custom objective works", {
bst <- xgb.train(param, dtrain, num_round, watchlist)
expect_equal(class(bst), "xgb.Booster")
expect_equal(length(bst$raw), 1064)
})
})
test_that("different eta for each boosting round works", {
num_round <- 2
watchlist <- list(eval = dtest, train = dtrain)
param <- list(max.depth=2, eta=1, nthread = 2, silent=1)
bst <- xgb.train(param, dtrain, num_round, watchlist, learning_rates = c(0.2, 0.3))
})