[R] [CI] enforce lintr::function_left_parentheses_linter check (#9631)
This commit is contained in:
parent
4d7a187cb0
commit
799f8485e2
@ -21,13 +21,13 @@ xgb.Booster.handle <- function(params, cachelist, modelfile, handle) {
|
|||||||
## A memory buffer
|
## A memory buffer
|
||||||
bst <- xgb.unserialize(modelfile, handle)
|
bst <- xgb.unserialize(modelfile, handle)
|
||||||
xgb.parameters(bst) <- params
|
xgb.parameters(bst) <- params
|
||||||
return (bst)
|
return(bst)
|
||||||
} else if (inherits(modelfile, "xgb.Booster")) {
|
} else if (inherits(modelfile, "xgb.Booster")) {
|
||||||
## A booster object
|
## A booster object
|
||||||
bst <- xgb.Booster.complete(modelfile, saveraw = TRUE)
|
bst <- xgb.Booster.complete(modelfile, saveraw = TRUE)
|
||||||
bst <- xgb.unserialize(bst$raw)
|
bst <- xgb.unserialize(bst$raw)
|
||||||
xgb.parameters(bst) <- params
|
xgb.parameters(bst) <- params
|
||||||
return (bst)
|
return(bst)
|
||||||
} else {
|
} else {
|
||||||
stop("modelfile must be either character filename, or raw booster dump, or xgb.Booster object")
|
stop("modelfile must be either character filename, or raw booster dump, or xgb.Booster object")
|
||||||
}
|
}
|
||||||
@ -382,7 +382,7 @@ predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FA
|
|||||||
cval[0] <- val
|
cval[0] <- val
|
||||||
return(cval)
|
return(cval)
|
||||||
}
|
}
|
||||||
return (val)
|
return(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
## We set strict_shape to TRUE then drop the dimensions conditionally
|
## We set strict_shape to TRUE then drop the dimensions conditionally
|
||||||
|
|||||||
@ -117,7 +117,7 @@ xgb.get.DMatrix <- function(data, label, missing, weight, nthread) {
|
|||||||
stop("xgboost: invalid input data")
|
stop("xgboost: invalid input data")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return (dtrain)
|
return(dtrain)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -18,6 +18,6 @@ xgb.load.raw <- function(buffer, as_booster = FALSE) {
|
|||||||
booster <- xgb.Booster.complete(booster, saveraw = TRUE)
|
booster <- xgb.Booster.complete(booster, saveraw = TRUE)
|
||||||
return(booster)
|
return(booster)
|
||||||
} else {
|
} else {
|
||||||
return (handle)
|
return(handle)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -37,5 +37,5 @@ xgb.unserialize <- function(buffer, handle = NULL) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
class(handle) <- "xgb.Booster.handle"
|
class(handle) <- "xgb.Booster.handle"
|
||||||
return (handle)
|
return(handle)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -24,7 +24,7 @@ xgboost <- function(data = NULL, label = NULL, missing = NA, weight = NULL,
|
|||||||
early_stopping_rounds = early_stopping_rounds, maximize = maximize,
|
early_stopping_rounds = early_stopping_rounds, maximize = maximize,
|
||||||
save_period = save_period, save_name = save_name,
|
save_period = save_period, save_name = save_name,
|
||||||
xgb_model = xgb_model, callbacks = callbacks, ...)
|
xgb_model = xgb_model, callbacks = callbacks, ...)
|
||||||
return (bst)
|
return(bst)
|
||||||
}
|
}
|
||||||
|
|
||||||
#' Training part from Mushroom Data Set
|
#' Training part from Mushroom Data Set
|
||||||
|
|||||||
@ -25,7 +25,7 @@ xgb.cv(param, dtrain, nrounds, nfold = 5,
|
|||||||
# you can also do cross validation with customized loss function
|
# you can also do cross validation with customized loss function
|
||||||
# See custom_objective.R
|
# See custom_objective.R
|
||||||
##
|
##
|
||||||
print ('running cross validation, with customized loss function')
|
print('running cross validation, with customized loss function')
|
||||||
|
|
||||||
logregobj <- function(preds, dtrain) {
|
logregobj <- function(preds, dtrain) {
|
||||||
labels <- getinfo(dtrain, "label")
|
labels <- getinfo(dtrain, "label")
|
||||||
|
|||||||
@ -35,7 +35,7 @@ evalerror <- function(preds, dtrain) {
|
|||||||
|
|
||||||
param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0,
|
param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0,
|
||||||
objective = logregobj, eval_metric = evalerror)
|
objective = logregobj, eval_metric = evalerror)
|
||||||
print ('start training with user customized objective')
|
print('start training with user customized objective')
|
||||||
# training with customized objective, we can also do step by step training
|
# training with customized objective, we can also do step by step training
|
||||||
# simply look at xgboost.py's implementation of train
|
# simply look at xgboost.py's implementation of train
|
||||||
bst <- xgb.train(param, dtrain, num_round, watchlist)
|
bst <- xgb.train(param, dtrain, num_round, watchlist)
|
||||||
@ -59,7 +59,7 @@ logregobjattr <- function(preds, dtrain) {
|
|||||||
}
|
}
|
||||||
param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0,
|
param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0,
|
||||||
objective = logregobjattr, eval_metric = evalerror)
|
objective = logregobjattr, eval_metric = evalerror)
|
||||||
print ('start training with user customized objective, with additional attributes in DMatrix')
|
print('start training with user customized objective, with additional attributes in DMatrix')
|
||||||
# training with customized objective, we can also do step by step training
|
# training with customized objective, we can also do step by step training
|
||||||
# simply look at xgboost.py's implementation of train
|
# simply look at xgboost.py's implementation of train
|
||||||
bst <- xgb.train(param, dtrain, num_round, watchlist)
|
bst <- xgb.train(param, dtrain, num_round, watchlist)
|
||||||
|
|||||||
@ -30,7 +30,7 @@ evalerror <- function(preds, dtrain) {
|
|||||||
err <- as.numeric(sum(labels != (preds > 0))) / length(labels)
|
err <- as.numeric(sum(labels != (preds > 0))) / length(labels)
|
||||||
return(list(metric = "error", value = err))
|
return(list(metric = "error", value = err))
|
||||||
}
|
}
|
||||||
print ('start training with early Stopping setting')
|
print('start training with early Stopping setting')
|
||||||
|
|
||||||
bst <- xgb.train(param, dtrain, num_round, watchlist,
|
bst <- xgb.train(param, dtrain, num_round, watchlist,
|
||||||
objective = logregobj, eval_metric = evalerror, maximize = FALSE,
|
objective = logregobj, eval_metric = evalerror, maximize = FALSE,
|
||||||
|
|||||||
@ -19,15 +19,15 @@ w <- runif(metadata$kRows)
|
|||||||
version <- packageVersion('xgboost')
|
version <- packageVersion('xgboost')
|
||||||
target_dir <- 'models'
|
target_dir <- 'models'
|
||||||
|
|
||||||
save_booster <- function (booster, model_name) {
|
save_booster <- function(booster, model_name) {
|
||||||
booster_bin <- function (model_name) {
|
booster_bin <- function(model_name) {
|
||||||
return (file.path(target_dir, paste('xgboost-', version, '.', model_name, '.bin', sep = '')))
|
return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.bin', sep = '')))
|
||||||
}
|
}
|
||||||
booster_json <- function (model_name) {
|
booster_json <- function(model_name) {
|
||||||
return (file.path(target_dir, paste('xgboost-', version, '.', model_name, '.json', sep = '')))
|
return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.json', sep = '')))
|
||||||
}
|
}
|
||||||
booster_rds <- function (model_name) {
|
booster_rds <- function(model_name) {
|
||||||
return (file.path(target_dir, paste('xgboost-', version, '.', model_name, '.rds', sep = '')))
|
return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.rds', sep = '')))
|
||||||
}
|
}
|
||||||
xgb.save(booster, booster_bin(model_name))
|
xgb.save(booster, booster_bin(model_name))
|
||||||
saveRDS(booster, booster_rds(model_name))
|
saveRDS(booster, booster_rds(model_name))
|
||||||
@ -36,7 +36,7 @@ save_booster <- function (booster, model_name) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
generate_regression_model <- function () {
|
generate_regression_model <- function() {
|
||||||
print('Regression')
|
print('Regression')
|
||||||
y <- rnorm(metadata$kRows)
|
y <- rnorm(metadata$kRows)
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ generate_regression_model <- function () {
|
|||||||
save_booster(booster, 'reg')
|
save_booster(booster, 'reg')
|
||||||
}
|
}
|
||||||
|
|
||||||
generate_logistic_model <- function () {
|
generate_logistic_model <- function() {
|
||||||
print('Binary classification with logistic loss')
|
print('Binary classification with logistic loss')
|
||||||
y <- sample(0:1, size = metadata$kRows, replace = TRUE)
|
y <- sample(0:1, size = metadata$kRows, replace = TRUE)
|
||||||
stopifnot(max(y) == 1, min(y) == 0)
|
stopifnot(max(y) == 1, min(y) == 0)
|
||||||
@ -64,7 +64,7 @@ generate_logistic_model <- function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
generate_classification_model <- function () {
|
generate_classification_model <- function() {
|
||||||
print('Multi-class classification')
|
print('Multi-class classification')
|
||||||
y <- sample(0:(metadata$kClasses - 1), size = metadata$kRows, replace = TRUE)
|
y <- sample(0:(metadata$kClasses - 1), size = metadata$kRows, replace = TRUE)
|
||||||
stopifnot(max(y) == metadata$kClasses - 1, min(y) == 0)
|
stopifnot(max(y) == metadata$kClasses - 1, min(y) == 0)
|
||||||
@ -77,7 +77,7 @@ generate_classification_model <- function () {
|
|||||||
save_booster(booster, 'cls')
|
save_booster(booster, 'cls')
|
||||||
}
|
}
|
||||||
|
|
||||||
generate_ranking_model <- function () {
|
generate_ranking_model <- function() {
|
||||||
print('Learning to rank')
|
print('Learning to rank')
|
||||||
y <- sample(0:4, size = metadata$kRows, replace = TRUE)
|
y <- sample(0:4, size = metadata$kRows, replace = TRUE)
|
||||||
stopifnot(max(y) == 4, min(y) == 0)
|
stopifnot(max(y) == 4, min(y) == 0)
|
||||||
|
|||||||
@ -9,20 +9,20 @@ metadata <- list(
|
|||||||
kClasses = 3
|
kClasses = 3
|
||||||
)
|
)
|
||||||
|
|
||||||
run_model_param_check <- function (config) {
|
run_model_param_check <- function(config) {
|
||||||
testthat::expect_equal(config$learner$learner_model_param$num_feature, '4')
|
testthat::expect_equal(config$learner$learner_model_param$num_feature, '4')
|
||||||
testthat::expect_equal(config$learner$learner_train_param$booster, 'gbtree')
|
testthat::expect_equal(config$learner$learner_train_param$booster, 'gbtree')
|
||||||
}
|
}
|
||||||
|
|
||||||
get_num_tree <- function (booster) {
|
get_num_tree <- function(booster) {
|
||||||
dump <- xgb.dump(booster)
|
dump <- xgb.dump(booster)
|
||||||
m <- regexec('booster\\[[0-9]+\\]', dump, perl = TRUE)
|
m <- regexec('booster\\[[0-9]+\\]', dump, perl = TRUE)
|
||||||
m <- regmatches(dump, m)
|
m <- regmatches(dump, m)
|
||||||
num_tree <- Reduce('+', lapply(m, length))
|
num_tree <- Reduce('+', lapply(m, length))
|
||||||
return (num_tree)
|
return(num_tree)
|
||||||
}
|
}
|
||||||
|
|
||||||
run_booster_check <- function (booster, name) {
|
run_booster_check <- function(booster, name) {
|
||||||
# If given a handle, we need to call xgb.Booster.complete() prior to using xgb.config().
|
# If given a handle, we need to call xgb.Booster.complete() prior to using xgb.config().
|
||||||
if (inherits(booster, "xgb.Booster") && xgboost:::is.null.handle(booster$handle)) {
|
if (inherits(booster, "xgb.Booster") && xgboost:::is.null.handle(booster$handle)) {
|
||||||
booster <- xgb.Booster.complete(booster)
|
booster <- xgb.Booster.complete(booster)
|
||||||
@ -68,7 +68,7 @@ test_that("Models from previous versions of XGBoost can be loaded", {
|
|||||||
|
|
||||||
pred_data <- xgb.DMatrix(matrix(c(0, 0, 0, 0), nrow = 1, ncol = 4), nthread = 2)
|
pred_data <- xgb.DMatrix(matrix(c(0, 0, 0, 0), nrow = 1, ncol = 4), nthread = 2)
|
||||||
|
|
||||||
lapply(list.files(model_dir), function (x) {
|
lapply(list.files(model_dir), function(x) {
|
||||||
model_file <- file.path(model_dir, x)
|
model_file <- file.path(model_dir, x)
|
||||||
m <- regexec("xgboost-([0-9\\.]+)\\.([a-z]+)\\.[a-z]+", model_file, perl = TRUE)
|
m <- regexec("xgboost-([0-9\\.]+)\\.([a-z]+)\\.[a-z]+", model_file, perl = TRUE)
|
||||||
m <- regmatches(model_file, m)[[1]]
|
m <- regmatches(model_file, m)[[1]]
|
||||||
|
|||||||
@ -47,7 +47,7 @@ test_that('Test ranking with weighted data', {
|
|||||||
pred <- predict(bst, newdata = dtrain, ntreelimit = i)
|
pred <- predict(bst, newdata = dtrain, ntreelimit = i)
|
||||||
# is_sorted[i]: is i-th group correctly sorted by the ranking predictor?
|
# is_sorted[i]: is i-th group correctly sorted by the ranking predictor?
|
||||||
is_sorted <- lapply(seq(1, 20, by = 5),
|
is_sorted <- lapply(seq(1, 20, by = 5),
|
||||||
function (k) {
|
function(k) {
|
||||||
ind <- order(-pred[k:(k + 4)])
|
ind <- order(-pred[k:(k + 4)])
|
||||||
z <- y[ind + (k - 1)]
|
z <- y[ind + (k - 1)]
|
||||||
all(diff(z) <= 0) # Check if z is monotone decreasing
|
all(diff(z) <= 0) # Check if z is monotone decreasing
|
||||||
|
|||||||
@ -24,8 +24,8 @@ param <- list("objective" = "binary:logitraw",
|
|||||||
"nthread" = 16)
|
"nthread" = 16)
|
||||||
watchlist <- list("train" = xgmat)
|
watchlist <- list("train" = xgmat)
|
||||||
nrounds <- 120
|
nrounds <- 120
|
||||||
print ("loading data end, start to boost trees")
|
print("loading data end, start to boost trees")
|
||||||
bst <- xgb.train(param, xgmat, nrounds, watchlist)
|
bst <- xgb.train(param, xgmat, nrounds, watchlist)
|
||||||
# save out model
|
# save out model
|
||||||
xgb.save(bst, "higgs.model")
|
xgb.save(bst, "higgs.model")
|
||||||
print ('finish training')
|
print('finish training')
|
||||||
|
|||||||
@ -39,11 +39,11 @@ for (i in seq_along(threads)){
|
|||||||
"nthread" = thread)
|
"nthread" = thread)
|
||||||
watchlist <- list("train" = xgmat)
|
watchlist <- list("train" = xgmat)
|
||||||
nrounds <- 120
|
nrounds <- 120
|
||||||
print ("loading data end, start to boost trees")
|
print("loading data end, start to boost trees")
|
||||||
bst <- xgb.train(param, xgmat, nrounds, watchlist)
|
bst <- xgb.train(param, xgmat, nrounds, watchlist)
|
||||||
# save out model
|
# save out model
|
||||||
xgb.save(bst, "higgs.model")
|
xgb.save(bst, "higgs.model")
|
||||||
print ('finish training')
|
print('finish training')
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -28,6 +28,7 @@ my_linters <- list(
|
|||||||
equals_na = lintr::equals_na_linter(),
|
equals_na = lintr::equals_na_linter(),
|
||||||
fixed_regex = lintr::fixed_regex_linter(),
|
fixed_regex = lintr::fixed_regex_linter(),
|
||||||
for_loop_index = lintr::for_loop_index_linter(),
|
for_loop_index = lintr::for_loop_index_linter(),
|
||||||
|
function_left_parentheses = lintr::function_left_parentheses_linter(),
|
||||||
function_return = lintr::function_return_linter(),
|
function_return = lintr::function_return_linter(),
|
||||||
infix_spaces_linter = lintr::infix_spaces_linter(),
|
infix_spaces_linter = lintr::infix_spaces_linter(),
|
||||||
is_numeric = lintr::is_numeric_linter(),
|
is_numeric = lintr::is_numeric_linter(),
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user