Init estimation for regression. (#8272)

This commit is contained in:
Jiaming Yuan
2023-01-11 02:04:56 +08:00
committed by GitHub
parent 1b58d81315
commit badeff1d74
29 changed files with 466 additions and 132 deletions

View File

@@ -320,7 +320,7 @@ test_that("prediction in early-stopping xgb.cv works", {
expect_output(
cv <- xgb.cv(param, dtrain, nfold = 5, eta = 0.1, nrounds = 20,
early_stopping_rounds = 5, maximize = FALSE, stratified = FALSE,
prediction = TRUE)
prediction = TRUE, base_score = 0.5)
, "Stopping. Best iteration")
expect_false(is.null(cv$best_iteration))

View File

@@ -27,11 +27,13 @@ if (isTRUE(VCD_AVAILABLE)) {
# binary
bst.Tree <- xgboost(data = sparse_matrix, label = label, max_depth = 9,
eta = 1, nthread = 2, nrounds = nrounds, verbose = 0,
objective = "binary:logistic", booster = "gbtree")
objective = "binary:logistic", booster = "gbtree",
base_score = 0.5)
bst.GLM <- xgboost(data = sparse_matrix, label = label,
eta = 1, nthread = 1, nrounds = nrounds, verbose = 0,
objective = "binary:logistic", booster = "gblinear")
objective = "binary:logistic", booster = "gblinear",
base_score = 0.5)
feature.names <- colnames(sparse_matrix)
}
@@ -360,7 +362,8 @@ test_that("xgb.importance works with and without feature names", {
m <- xgboost::xgboost(
data = as.matrix(data.frame(x = c(0, 1))),
label = c(1, 2),
nrounds = 1
nrounds = 1,
base_score = 0.5
)
df <- xgb.model.dt.tree(model = m)
expect_equal(df$Feature, "Leaf")