Fixed bug in eta decay (+2 squashed commits)

Squashed commits:
[b67caf2] Fix build
[365ceaa] Fixed bug in eta decay
This commit is contained in:
terrytangyuan 2015-10-31 12:40:19 -04:00
parent 888edba03f
commit 15a0d27eed
3 changed files with 1 additions and 14 deletions

View File

@ -46,13 +46,3 @@ test_that("custom objective works", {
expect_equal(class(bst), "xgb.Booster")
expect_equal(length(bst$raw), 1064)
})
test_that("different eta for each boosting round works", {
num_round <- 2
watchlist <- list(eval = dtest, train = dtrain)
param <- list(max.depth=2, eta=1, nthread = 2, silent=1)
bst <- xgb.train(param, dtrain, num_round, watchlist, learning_rates = c(0.2, 0.3))
})

View File

@ -123,7 +123,7 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
best_msg = ''
best_score_i = 0
if isinstance(learning_rates, list) and len(learning_rates) < num_boost_round:
if isinstance(learning_rates, list) and len(learning_rates) != num_boost_round:
raise ValueError("Length of list 'learning_rates' has to equal 'num_boost_round'.")
for i in range(num_boost_round):

View File

@ -28,9 +28,6 @@ class TestModels(unittest.TestCase):
# learning_rates as a list
bst = xgb.train(param, dtrain, num_round, watchlist, learning_rates=[0.4, 0.3])
assert isinstance(bst, xgb.core.Booster)
# different length
num_round = 4
self.assertRaises(ValueError, xgb.train, param, dtrain, num_round, watchlist, learning_rates=[0.4, 0.3, 0.2])
# learning_rates as a customized decay function
def eta_decay(ithround, num_boost_round):