diff --git a/R-package/R/xgb.Booster.R b/R-package/R/xgb.Booster.R index f196b1a1d..37cfc199e 100644 --- a/R-package/R/xgb.Booster.R +++ b/R-package/R/xgb.Booster.R @@ -21,13 +21,13 @@ xgb.Booster.handle <- function(params, cachelist, modelfile, handle) { ## A memory buffer bst <- xgb.unserialize(modelfile, handle) xgb.parameters(bst) <- params - return (bst) + return(bst) } else if (inherits(modelfile, "xgb.Booster")) { ## A booster object bst <- xgb.Booster.complete(modelfile, saveraw = TRUE) bst <- xgb.unserialize(bst$raw) xgb.parameters(bst) <- params - return (bst) + return(bst) } else { stop("modelfile must be either character filename, or raw booster dump, or xgb.Booster object") } @@ -382,7 +382,7 @@ predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FA cval[0] <- val return(cval) } - return (val) + return(val) } ## We set strict_shape to TRUE then drop the dimensions conditionally diff --git a/R-package/R/xgb.DMatrix.R b/R-package/R/xgb.DMatrix.R index 8586ae086..b01e98637 100644 --- a/R-package/R/xgb.DMatrix.R +++ b/R-package/R/xgb.DMatrix.R @@ -117,7 +117,7 @@ xgb.get.DMatrix <- function(data, label, missing, weight, nthread) { stop("xgboost: invalid input data") } } - return (dtrain) + return(dtrain) } diff --git a/R-package/R/xgb.load.raw.R b/R-package/R/xgb.load.raw.R index d531da6c9..b159e9de1 100644 --- a/R-package/R/xgb.load.raw.R +++ b/R-package/R/xgb.load.raw.R @@ -18,6 +18,6 @@ xgb.load.raw <- function(buffer, as_booster = FALSE) { booster <- xgb.Booster.complete(booster, saveraw = TRUE) return(booster) } else { - return (handle) + return(handle) } } diff --git a/R-package/R/xgb.unserialize.R b/R-package/R/xgb.unserialize.R index e666eb055..291d3e7da 100644 --- a/R-package/R/xgb.unserialize.R +++ b/R-package/R/xgb.unserialize.R @@ -37,5 +37,5 @@ xgb.unserialize <- function(buffer, handle = NULL) { } }) class(handle) <- "xgb.Booster.handle" - return (handle) + return(handle) } diff --git a/R-package/R/xgboost.R b/R-package/R/xgboost.R index db4fd67aa..e60ea2de8 100644 --- a/R-package/R/xgboost.R +++ b/R-package/R/xgboost.R @@ -24,7 +24,7 @@ xgboost <- function(data = NULL, label = NULL, missing = NA, weight = NULL, early_stopping_rounds = early_stopping_rounds, maximize = maximize, save_period = save_period, save_name = save_name, xgb_model = xgb_model, callbacks = callbacks, ...) - return (bst) + return(bst) } #' Training part from Mushroom Data Set diff --git a/R-package/demo/cross_validation.R b/R-package/demo/cross_validation.R index 33c70a3be..cf048c5ed 100644 --- a/R-package/demo/cross_validation.R +++ b/R-package/demo/cross_validation.R @@ -25,7 +25,7 @@ xgb.cv(param, dtrain, nrounds, nfold = 5, # you can also do cross validation with customized loss function # See custom_objective.R ## -print ('running cross validation, with customized loss function') +print('running cross validation, with customized loss function') logregobj <- function(preds, dtrain) { labels <- getinfo(dtrain, "label") diff --git a/R-package/demo/custom_objective.R b/R-package/demo/custom_objective.R index 2d0914ab5..35201332c 100644 --- a/R-package/demo/custom_objective.R +++ b/R-package/demo/custom_objective.R @@ -35,7 +35,7 @@ evalerror <- function(preds, dtrain) { param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0, objective = logregobj, eval_metric = evalerror) -print ('start training with user customized objective') +print('start training with user customized objective') # training with customized objective, we can also do step by step training # simply look at xgboost.py's implementation of train bst <- xgb.train(param, dtrain, num_round, watchlist) @@ -59,7 +59,7 @@ logregobjattr <- function(preds, dtrain) { } param <- list(max_depth = 2, eta = 1, nthread = 2, verbosity = 0, objective = logregobjattr, eval_metric = evalerror) -print ('start training with user customized objective, with additional attributes in DMatrix') +print('start training with user customized objective, with additional attributes in DMatrix') # training with customized objective, we can also do step by step training # simply look at xgboost.py's implementation of train bst <- xgb.train(param, dtrain, num_round, watchlist) diff --git a/R-package/demo/early_stopping.R b/R-package/demo/early_stopping.R index f733dce8d..04da1382f 100644 --- a/R-package/demo/early_stopping.R +++ b/R-package/demo/early_stopping.R @@ -30,7 +30,7 @@ evalerror <- function(preds, dtrain) { err <- as.numeric(sum(labels != (preds > 0))) / length(labels) return(list(metric = "error", value = err)) } -print ('start training with early Stopping setting') +print('start training with early Stopping setting') bst <- xgb.train(param, dtrain, num_round, watchlist, objective = logregobj, eval_metric = evalerror, maximize = FALSE, diff --git a/R-package/tests/helper_scripts/generate_models.R b/R-package/tests/helper_scripts/generate_models.R index 5d64fa6c5..ef2aeded6 100644 --- a/R-package/tests/helper_scripts/generate_models.R +++ b/R-package/tests/helper_scripts/generate_models.R @@ -19,15 +19,15 @@ w <- runif(metadata$kRows) version <- packageVersion('xgboost') target_dir <- 'models' -save_booster <- function (booster, model_name) { - booster_bin <- function (model_name) { - return (file.path(target_dir, paste('xgboost-', version, '.', model_name, '.bin', sep = ''))) +save_booster <- function(booster, model_name) { + booster_bin <- function(model_name) { + return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.bin', sep = ''))) } - booster_json <- function (model_name) { - return (file.path(target_dir, paste('xgboost-', version, '.', model_name, '.json', sep = ''))) + booster_json <- function(model_name) { + return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.json', sep = ''))) } - booster_rds <- function (model_name) { - return (file.path(target_dir, paste('xgboost-', version, '.', model_name, '.rds', sep = ''))) + booster_rds <- function(model_name) { + return(file.path(target_dir, paste('xgboost-', version, '.', model_name, '.rds', sep = ''))) } xgb.save(booster, booster_bin(model_name)) saveRDS(booster, booster_rds(model_name)) @@ -36,7 +36,7 @@ save_booster <- function (booster, model_name) { } } -generate_regression_model <- function () { +generate_regression_model <- function() { print('Regression') y <- rnorm(metadata$kRows) @@ -47,7 +47,7 @@ generate_regression_model <- function () { save_booster(booster, 'reg') } -generate_logistic_model <- function () { +generate_logistic_model <- function() { print('Binary classification with logistic loss') y <- sample(0:1, size = metadata$kRows, replace = TRUE) stopifnot(max(y) == 1, min(y) == 0) @@ -64,7 +64,7 @@ generate_logistic_model <- function () { } } -generate_classification_model <- function () { +generate_classification_model <- function() { print('Multi-class classification') y <- sample(0:(metadata$kClasses - 1), size = metadata$kRows, replace = TRUE) stopifnot(max(y) == metadata$kClasses - 1, min(y) == 0) @@ -77,7 +77,7 @@ generate_classification_model <- function () { save_booster(booster, 'cls') } -generate_ranking_model <- function () { +generate_ranking_model <- function() { print('Learning to rank') y <- sample(0:4, size = metadata$kRows, replace = TRUE) stopifnot(max(y) == 4, min(y) == 0) diff --git a/R-package/tests/testthat/test_model_compatibility.R b/R-package/tests/testthat/test_model_compatibility.R index 87380e85d..ce1725dc9 100644 --- a/R-package/tests/testthat/test_model_compatibility.R +++ b/R-package/tests/testthat/test_model_compatibility.R @@ -9,20 +9,20 @@ metadata <- list( kClasses = 3 ) -run_model_param_check <- function (config) { +run_model_param_check <- function(config) { testthat::expect_equal(config$learner$learner_model_param$num_feature, '4') testthat::expect_equal(config$learner$learner_train_param$booster, 'gbtree') } -get_num_tree <- function (booster) { +get_num_tree <- function(booster) { dump <- xgb.dump(booster) m <- regexec('booster\\[[0-9]+\\]', dump, perl = TRUE) m <- regmatches(dump, m) num_tree <- Reduce('+', lapply(m, length)) - return (num_tree) + return(num_tree) } -run_booster_check <- function (booster, name) { +run_booster_check <- function(booster, name) { # If given a handle, we need to call xgb.Booster.complete() prior to using xgb.config(). if (inherits(booster, "xgb.Booster") && xgboost:::is.null.handle(booster$handle)) { booster <- xgb.Booster.complete(booster) @@ -68,7 +68,7 @@ test_that("Models from previous versions of XGBoost can be loaded", { pred_data <- xgb.DMatrix(matrix(c(0, 0, 0, 0), nrow = 1, ncol = 4), nthread = 2) - lapply(list.files(model_dir), function (x) { + lapply(list.files(model_dir), function(x) { model_file <- file.path(model_dir, x) m <- regexec("xgboost-([0-9\\.]+)\\.([a-z]+)\\.[a-z]+", model_file, perl = TRUE) m <- regmatches(model_file, m)[[1]] diff --git a/R-package/tests/testthat/test_ranking.R b/R-package/tests/testthat/test_ranking.R index b43292e18..d4102dfce 100644 --- a/R-package/tests/testthat/test_ranking.R +++ b/R-package/tests/testthat/test_ranking.R @@ -47,7 +47,7 @@ test_that('Test ranking with weighted data', { pred <- predict(bst, newdata = dtrain, ntreelimit = i) # is_sorted[i]: is i-th group correctly sorted by the ranking predictor? is_sorted <- lapply(seq(1, 20, by = 5), - function (k) { + function(k) { ind <- order(-pred[k:(k + 4)]) z <- y[ind + (k - 1)] all(diff(z) <= 0) # Check if z is monotone decreasing diff --git a/demo/kaggle-higgs/higgs-train.R b/demo/kaggle-higgs/higgs-train.R index 6f37040f8..4730d7b3d 100644 --- a/demo/kaggle-higgs/higgs-train.R +++ b/demo/kaggle-higgs/higgs-train.R @@ -24,8 +24,8 @@ param <- list("objective" = "binary:logitraw", "nthread" = 16) watchlist <- list("train" = xgmat) nrounds <- 120 -print ("loading data end, start to boost trees") +print("loading data end, start to boost trees") bst <- xgb.train(param, xgmat, nrounds, watchlist) # save out model xgb.save(bst, "higgs.model") -print ('finish training') +print('finish training') diff --git a/demo/kaggle-higgs/speedtest.R b/demo/kaggle-higgs/speedtest.R index c0e96a010..c6de1511b 100644 --- a/demo/kaggle-higgs/speedtest.R +++ b/demo/kaggle-higgs/speedtest.R @@ -39,11 +39,11 @@ for (i in seq_along(threads)){ "nthread" = thread) watchlist <- list("train" = xgmat) nrounds <- 120 - print ("loading data end, start to boost trees") + print("loading data end, start to boost trees") bst <- xgb.train(param, xgmat, nrounds, watchlist) # save out model xgb.save(bst, "higgs.model") - print ('finish training') + print('finish training') }) } diff --git a/tests/ci_build/lint_r.R b/tests/ci_build/lint_r.R index ce512482d..9b55ebfce 100644 --- a/tests/ci_build/lint_r.R +++ b/tests/ci_build/lint_r.R @@ -28,6 +28,7 @@ my_linters <- list( equals_na = lintr::equals_na_linter(), fixed_regex = lintr::fixed_regex_linter(), for_loop_index = lintr::for_loop_index_linter(), + function_left_parentheses = lintr::function_left_parentheses_linter(), function_return = lintr::function_return_linter(), infix_spaces_linter = lintr::infix_spaces_linter(), is_numeric = lintr::is_numeric_linter(),