diff --git a/R-package/R/callbacks.R b/R-package/R/callbacks.R index 62bcd4050..957fca6d4 100644 --- a/R-package/R/callbacks.R +++ b/R-package/R/callbacks.R @@ -615,9 +615,11 @@ cb.gblinear.history <- function(sparse=FALSE) { coefs <- NULL init <- function(env) { - if (!is.null(env$bst)) { # xgb.train: - } else if (!is.null(env$bst_folds)) { # xgb.cv: - } else stop("Parent frame has neither 'bst' nor 'bst_folds'") + # xgb.train(): bst will be present + # xgb.cv(): bst_folds will be present + if (is.null(env$bst) && is.null(env$bst_folds)) { + stop("Parent frame has neither 'bst' nor 'bst_folds'") + } } # convert from list to (sparse) matrix diff --git a/R-package/R/utils.R b/R-package/R/utils.R index 8d9413d8a..e1f1df299 100644 --- a/R-package/R/utils.R +++ b/R-package/R/utils.R @@ -82,7 +82,7 @@ check.booster.params <- function(params, ...) { # interaction constraints parser (convert from list of column indices to string) if (!is.null(params[['interaction_constraints']]) && - typeof(params[['interaction_constraints']]) != "character"){ + typeof(params[['interaction_constraints']]) != "character") { # check input class if (!identical(class(params[['interaction_constraints']]), 'list')) stop('interaction_constraints should be class list') if (!all(unique(sapply(params[['interaction_constraints']], class)) %in% c('numeric', 'integer'))) { @@ -251,8 +251,7 @@ generate.cv.folds <- function(nfold, nrows, stratified, label, params) { # Creates CV folds stratified by the values of y. # It was borrowed from caret::createFolds and simplified # by always returning an unnamed list of fold indices. -xgb.createFolds <- function(y, k = 10) -{ +xgb.createFolds <- function(y, k = 10) { if (is.numeric(y)) { ## Group the numeric data based on their magnitudes ## and sample within those groups. diff --git a/R-package/R/xgb.DMatrix.R b/R-package/R/xgb.DMatrix.R index b843d6518..71c1af1ab 100644 --- a/R-package/R/xgb.DMatrix.R +++ b/R-package/R/xgb.DMatrix.R @@ -76,7 +76,7 @@ xgb.get.DMatrix <- function(data, label = NULL, missing = NA, weight = NULL, nth stop("label must be provided when data is a matrix") } dtrain <- xgb.DMatrix(data, label = label, missing = missing, nthread = nthread) - if (!is.null(weight)){ + if (!is.null(weight)) { setinfo(dtrain, "weight", weight) } } else { @@ -218,7 +218,7 @@ getinfo.xgb.DMatrix <- function(object, name, ...) { } if (name == "feature_name" || name == "feature_type") { ret <- .Call(XGDMatrixGetStrFeatureInfo_R, object, name) - } else if (name != "nrow"){ + } else if (name != "nrow") { ret <- .Call(XGDMatrixGetInfo_R, object, name) } else { ret <- nrow(object) diff --git a/R-package/R/xgb.importance.R b/R-package/R/xgb.importance.R index 67dbed0a5..844e36cdf 100644 --- a/R-package/R/xgb.importance.R +++ b/R-package/R/xgb.importance.R @@ -82,7 +82,7 @@ #' #' @export xgb.importance <- function(feature_names = NULL, model = NULL, trees = NULL, - data = NULL, label = NULL, target = NULL){ + data = NULL, label = NULL, target = NULL) { if (!(is.null(data) && is.null(label) && is.null(target))) warning("xgb.importance: parameters 'data', 'label' and 'target' are deprecated") @@ -104,7 +104,11 @@ xgb.importance <- function(feature_names = NULL, model = NULL, trees = NULL, XGBoosterFeatureScore_R, model$handle, jsonlite::toJSON(args, auto_unbox = TRUE, null = "null") ) names(results) <- c("features", "shape", "weight") - n_classes <- if (length(results$shape) == 2) { results$shape[2] } else { 0 } + if (length(results$shape) == 2) { + n_classes <- results$shape[2] + } else { + n_classes <- 0 + } importance <- if (n_classes == 0) { data.table(Feature = results$features, Weight = results$weight)[order(-abs(Weight))] } else { diff --git a/R-package/R/xgb.model.dt.tree.R b/R-package/R/xgb.model.dt.tree.R index 5411c35d2..987117d56 100644 --- a/R-package/R/xgb.model.dt.tree.R +++ b/R-package/R/xgb.model.dt.tree.R @@ -62,7 +62,7 @@ #' #' @export xgb.model.dt.tree <- function(feature_names = NULL, model = NULL, text = NULL, - trees = NULL, use_int_id = FALSE, ...){ + trees = NULL, use_int_id = FALSE, ...) { check.deprecation(...) if (!inherits(model, "xgb.Booster") && !is.character(text)) { @@ -82,7 +82,7 @@ xgb.model.dt.tree <- function(feature_names = NULL, model = NULL, text = NULL, stop("trees: must be a vector of integers.") } - if (is.null(text)){ + if (is.null(text)) { text <- xgb.dump(model = model, with_stats = TRUE) } diff --git a/R-package/R/xgb.plot.importance.R b/R-package/R/xgb.plot.importance.R index ab0951463..7104d701f 100644 --- a/R-package/R/xgb.plot.importance.R +++ b/R-package/R/xgb.plot.importance.R @@ -102,7 +102,9 @@ xgb.plot.importance <- function(importance_matrix = NULL, top_n = NULL, measure original_mar <- par()$mar # reset margins so this function doesn't have side effects - on.exit({par(mar = original_mar)}) + on.exit({ + par(mar = original_mar) + }) mar <- original_mar if (!is.null(left_margin)) diff --git a/R-package/R/xgb.plot.multi.trees.R b/R-package/R/xgb.plot.multi.trees.R index fc54d777d..1b3a7d781 100644 --- a/R-package/R/xgb.plot.multi.trees.R +++ b/R-package/R/xgb.plot.multi.trees.R @@ -61,7 +61,7 @@ #' #' @export xgb.plot.multi.trees <- function(model, feature_names = NULL, features_keep = 5, plot_width = NULL, plot_height = NULL, - render = TRUE, ...){ + render = TRUE, ...) { if (!requireNamespace("DiagrammeR", quietly = TRUE)) { stop("DiagrammeR is required for xgb.plot.multi.trees") } diff --git a/R-package/R/xgb.plot.tree.R b/R-package/R/xgb.plot.tree.R index dc2656170..cb6cb25ad 100644 --- a/R-package/R/xgb.plot.tree.R +++ b/R-package/R/xgb.plot.tree.R @@ -68,7 +68,7 @@ #' #' @export xgb.plot.tree <- function(feature_names = NULL, model = NULL, trees = NULL, plot_width = NULL, plot_height = NULL, - render = TRUE, show_node_id = FALSE, ...){ + render = TRUE, show_node_id = FALSE, ...) { check.deprecation(...) if (!inherits(model, "xgb.Booster")) { stop("model: Has to be an object of class xgb.Booster") diff --git a/R-package/demo/cross_validation.R b/R-package/demo/cross_validation.R index 639533a2c..33c70a3be 100644 --- a/R-package/demo/cross_validation.R +++ b/R-package/demo/cross_validation.R @@ -12,7 +12,7 @@ cat('running cross validation\n') # do cross validation, this will print result out as # [iteration] metric_name:mean_value+std_value # std_value is standard deviation of the metric -xgb.cv(param, dtrain, nrounds, nfold = 5, metrics = {'error'}) +xgb.cv(param, dtrain, nrounds, nfold = 5, metrics = 'error') cat('running cross validation, disable standard deviation display\n') # do cross validation, this will print result out as diff --git a/R-package/demo/predict_leaf_indices.R b/R-package/demo/predict_leaf_indices.R index 0f5d8f329..5ef9372ac 100644 --- a/R-package/demo/predict_leaf_indices.R +++ b/R-package/demo/predict_leaf_indices.R @@ -24,7 +24,7 @@ accuracy.before <- (sum((predict(bst, agaricus.test$data) >= 0.5) == agaricus.te pred_with_leaf <- predict(bst, dtest, predleaf = TRUE) head(pred_with_leaf) -create.new.tree.features <- function(model, original.features){ +create.new.tree.features <- function(model, original.features) { pred_with_leaf <- predict(model, original.features, predleaf = TRUE) cols <- list() for (i in 1:model$niter) { diff --git a/R-package/tests/testthat/test_helpers.R b/R-package/tests/testthat/test_helpers.R index fdd0ce02b..dc61fd1f8 100644 --- a/R-package/tests/testthat/test_helpers.R +++ b/R-package/tests/testthat/test_helpers.R @@ -170,8 +170,9 @@ test_that("SHAPs sum to predictions, with or without DART", { label = y, nrounds = nrounds) - pr <- function(...) + pr <- function(...) { predict(fit, newdata = d, ...) + } pred <- pr() shap <- pr(predcontrib = TRUE) shapi <- pr(predinteraction = TRUE) diff --git a/R-package/tests/testthat/test_interaction_constraints.R b/R-package/tests/testthat/test_interaction_constraints.R index 7f6a8b09b..680709638 100644 --- a/R-package/tests/testthat/test_interaction_constraints.R +++ b/R-package/tests/testthat/test_interaction_constraints.R @@ -17,7 +17,7 @@ test_that("interaction constraints for regression", { # Set all observations to have the same x3 values then increment # by the same amount - preds <- lapply(c(1, 2, 3), function(x){ + preds <- lapply(c(1, 2, 3), function(x) { tmat <- matrix(c(x1, x2, rep(x, 1000)), ncol = 3) return(predict(bst, tmat)) }) diff --git a/demo/kaggle-otto/understandingXGBoostModel.Rmd b/demo/kaggle-otto/understandingXGBoostModel.Rmd index c5776e005..0f4f82bc0 100644 --- a/demo/kaggle-otto/understandingXGBoostModel.Rmd +++ b/demo/kaggle-otto/understandingXGBoostModel.Rmd @@ -86,7 +86,10 @@ For that purpose, we will: ```{r classToIntegers} # Convert from classes to numbers -y <- train[, nameLastCol, with = FALSE][[1]] %>% gsub('Class_','',.) %>% {as.integer(.) -1} +y <- train[, nameLastCol, with = FALSE][[1]] %>% + gsub('Class_','',.) %>% + as.integer %>% + subtract(., 1) # Display the first 5 levels y[1:5]