diff --git a/.github/workflows/i386.yml b/.github/workflows/i386.yml
new file mode 100644
index 000000000..4a4d65b25
--- /dev/null
+++ b/.github/workflows/i386.yml
@@ -0,0 +1,39 @@
+name: XGBoost-i386-test
+
+on: [push, pull_request]
+
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
+jobs:
+ build-32bit:
+ name: Build 32-bit
+ runs-on: ubuntu-latest
+ services:
+ registry:
+ image: registry:2
+ ports:
+ - 5000:5000
+ steps:
+ - uses: actions/checkout@v2.5.0
+ with:
+ submodules: 'true'
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ with:
+ driver-opts: network=host
+ - name: Build and push container
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ file: tests/ci_build/Dockerfile.i386
+ push: true
+ tags: localhost:5000/xgboost/build-32bit:latest
+ cache-from: type=gha
+ cache-to: type=gha,mode=max
+ - name: Build XGBoost
+ run: |
+ docker run --rm -v $PWD:/workspace -w /workspace \
+ -e CXXFLAGS='-Wno-error=overloaded-virtual -Wno-error=maybe-uninitialized -Wno-error=redundant-move' \
+ localhost:5000/xgboost/build-32bit:latest \
+ tests/ci_build/build_via_cmake.sh
diff --git a/R-package/DESCRIPTION b/R-package/DESCRIPTION
index bbaf3e75d..66e2b5692 100644
--- a/R-package/DESCRIPTION
+++ b/R-package/DESCRIPTION
@@ -65,6 +65,6 @@ Imports:
data.table (>= 1.9.6),
jsonlite (>= 1.0)
Roxygen: list(markdown = TRUE)
-RoxygenNote: 7.3.0
+RoxygenNote: 7.3.1
Encoding: UTF-8
SystemRequirements: GNU make, C++17
diff --git a/R-package/NAMESPACE b/R-package/NAMESPACE
index 398b0da5a..580d1f873 100644
--- a/R-package/NAMESPACE
+++ b/R-package/NAMESPACE
@@ -15,7 +15,6 @@ S3method(print,xgb.DMatrix)
S3method(print,xgb.cv.synchronous)
S3method(setinfo,xgb.Booster)
S3method(setinfo,xgb.DMatrix)
-S3method(slice,xgb.DMatrix)
S3method(variable.names,xgb.Booster)
export("xgb.attr<-")
export("xgb.attributes<-")
@@ -30,10 +29,14 @@ export(cb.reset.parameters)
export(cb.save.model)
export(getinfo)
export(setinfo)
-export(slice)
export(xgb.DMatrix)
export(xgb.DMatrix.hasinfo)
export(xgb.DMatrix.save)
+export(xgb.DataBatch)
+export(xgb.DataIter)
+export(xgb.ExternalDMatrix)
+export(xgb.QuantileDMatrix)
+export(xgb.QuantileDMatrix.from_iterator)
export(xgb.attr)
export(xgb.attributes)
export(xgb.config)
@@ -65,6 +68,7 @@ export(xgb.save)
export(xgb.save.raw)
export(xgb.set.config)
export(xgb.slice.Booster)
+export(xgb.slice.DMatrix)
export(xgb.train)
export(xgboost)
import(methods)
diff --git a/R-package/R/xgb.Booster.R b/R-package/R/xgb.Booster.R
index 7613c9152..febefb757 100644
--- a/R-package/R/xgb.Booster.R
+++ b/R-package/R/xgb.Booster.R
@@ -111,6 +111,21 @@ xgb.get.handle <- function(object) {
#' If passing "all", will use all of the rounds regardless of whether the model had early stopping or not.
#' @param strict_shape Default is `FALSE`. When set to `TRUE`, the output
#' type and shape of predictions are invariant to the model type.
+#' @param validate_features When `TRUE`, validate that the Booster's and newdata's feature_names
+#' match (only applicable when both `object` and `newdata` have feature names).
+#'
+#' If the column names differ and `newdata` is not an `xgb.DMatrix`, will try to reorder
+#' the columns in `newdata` to match with the booster's.
+#'
+#' If the booster has feature types and `newdata` is either an `xgb.DMatrix` or `data.frame`,
+#' will additionally verify that categorical columns are of the correct type in `newdata`,
+#' throwing an error if they do not match.
+#'
+#' If passing `FALSE`, it is assumed that the feature names and types are the same,
+#' and come in the same order as in the training data.
+#'
+#' Note that this check might add some sizable latency to the predictions, so it's
+#' recommended to disable it for performance-sensitive applications.
#' @param ... Not used.
#'
#' @details
@@ -271,7 +286,11 @@ xgb.get.handle <- function(object) {
#' @export
predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FALSE,
predleaf = FALSE, predcontrib = FALSE, approxcontrib = FALSE, predinteraction = FALSE,
- reshape = FALSE, training = FALSE, iterationrange = NULL, strict_shape = FALSE, ...) {
+ reshape = FALSE, training = FALSE, iterationrange = NULL, strict_shape = FALSE,
+ validate_features = FALSE, ...) {
+ if (validate_features) {
+ newdata <- validate.features(object, newdata)
+ }
if (!inherits(newdata, "xgb.DMatrix")) {
nthread <- xgb.nthread(object)
newdata <- xgb.DMatrix(
@@ -418,6 +437,85 @@ predict.xgb.Booster <- function(object, newdata, missing = NA, outputmargin = FA
return(arr)
}
+validate.features <- function(bst, newdata) {
+ if (is.character(newdata)) {
+ # this will be encountered when passing file paths
+ return(newdata)
+ }
+ if (inherits(newdata, "sparseVector")) {
+ # in this case, newdata won't have metadata
+ return(newdata)
+ }
+ if (is.vector(newdata)) {
+ newdata <- as.matrix(newdata)
+ }
+
+ booster_names <- getinfo(bst, "feature_name")
+ checked_names <- FALSE
+ if (NROW(booster_names)) {
+
+ try_reorder <- FALSE
+ if (inherits(newdata, "xgb.DMatrix")) {
+ curr_names <- getinfo(newdata, "feature_name")
+ } else {
+ curr_names <- colnames(newdata)
+ try_reorder <- TRUE
+ }
+
+ if (NROW(curr_names)) {
+ checked_names <- TRUE
+
+ if (length(curr_names) != length(booster_names) || any(curr_names != booster_names)) {
+
+ if (!try_reorder) {
+ stop("Feature names in 'newdata' do not match with booster's.")
+ } else {
+ if (inherits(newdata, "data.table")) {
+ newdata <- newdata[, booster_names, with = FALSE]
+ } else {
+ newdata <- newdata[, booster_names, drop = FALSE]
+ }
+ }
+
+ }
+
+ } # if (NROW(curr_names)) {
+
+ } # if (NROW(booster_names)) {
+
+ if (inherits(newdata, c("data.frame", "xgb.DMatrix"))) {
+
+ booster_types <- getinfo(bst, "feature_type")
+ if (!NROW(booster_types)) {
+ # Note: types in the booster are optional. Other interfaces
+ # might not even save it as booster attributes for example,
+ # even if the model uses categorical features.
+ return(newdata)
+ }
+ if (inherits(newdata, "xgb.DMatrix")) {
+ curr_types <- getinfo(newdata, "feature_type")
+ if (length(curr_types) != length(booster_types) || any(curr_types != booster_types)) {
+ stop("Feature types in 'newdata' do not match with booster's.")
+ }
+ }
+ if (inherits(newdata, "data.frame")) {
+ is_factor <- sapply(newdata, is.factor)
+ if (any(is_factor != (booster_types == "c"))) {
+ stop(
+ paste0(
+ "Feature types in 'newdata' do not match with booster's for same columns (by ",
+ ifelse(checked_names, "name", "position"),
+ ")."
+ )
+ )
+ }
+ }
+
+ }
+
+ return(newdata)
+}
+
#' @title Accessors for serializable attributes of a model
#'
diff --git a/R-package/R/xgb.DMatrix.R b/R-package/R/xgb.DMatrix.R
index 7c4c30bd3..ba0686cf9 100644
--- a/R-package/R/xgb.DMatrix.R
+++ b/R-package/R/xgb.DMatrix.R
@@ -1,14 +1,40 @@
#' Construct xgb.DMatrix object
#'
-#' Construct xgb.DMatrix object from either a dense matrix, a sparse matrix, or a local file.
-#' Supported input file formats are either a LIBSVM text file or a binary file that was created previously by
-#' \code{\link{xgb.DMatrix.save}}).
+#' Construct an 'xgb.DMatrix' object from a given data source, which can then be passed to functions
+#' such as \link{xgb.train} or \link{predict.xgb.Booster}.
#'
-#' @param data a \code{matrix} object (either numeric or integer), a \code{dgCMatrix} object,
-#' a \code{dgRMatrix} object,
-#' a \code{dsparseVector} object (only when making predictions from a fitted model, will be
-#' interpreted as a row vector), or a character string representing a filename.
-#' @param label Label of the training data.
+#' Function 'xgb.QuantileDMatrix' will construct a DMatrix with quantization for the histogram
+#' method already applied to it, which can be used to reduce memory usage (compared to using a
+#' a regular DMatrix first and then creating a quantization out of it) when using the histogram
+#' method (`tree_method = "hist"`, which is the default algorithm), but is not usable for the
+#' sorted-indices method (`tree_method = "exact"`), nor for the approximate method
+#' (`tree_method = "approx"`).
+#' @param data Data from which to create a DMatrix, which can then be used for fitting models or
+#' for getting predictions out of a fitted model.
+#'
+#' Supported input types are as follows:\itemize{
+#' \item `matrix` objects, with types `numeric`, `integer`, or `logical`.
+#' \item `data.frame` objects, with columns of types `numeric`, `integer`, `logical`, or `factor`.
+#'
+#' Note that xgboost uses base-0 encoding for categorical types, hence `factor` types (which use base-1
+#' encoding') will be converted inside the function call. Be aware that the encoding used for `factor`
+#' types is not kept as part of the model, so in subsequent calls to `predict`, it is the user's
+#' responsibility to ensure that factor columns have the same levels as the ones from which the DMatrix
+#' was constructed.
+#'
+#' Other column types are not supported.
+#' \item CSR matrices, as class `dgRMatrix` from package `Matrix`.
+#' \item CSC matrices, as class `dgCMatrix` from package `Matrix`. These are \bold{not} supported for
+#' 'xgb.QuantileDMatrix'.
+#' \item Single-row CSR matrices, as class `dsparseVector` from package `Matrix`, which is interpreted
+#' as a single row (only when making predictions from a fitted model).
+#' \item Text files in SVMLight / LibSVM formats, passed as a path to the file. These are \bold{not}
+#' supported for xgb.QuantileDMatrix'.
+#' \item Binary files generated by \link{xgb.DMatrix.save}, passed as a path to the file. These are
+#' \bold{not} supported for xgb.QuantileDMatrix'.
+#' }
+#' @param label Label of the training data. For classification problems, should be passed encoded as
+#' integers with numeration starting at zero.
#' @param weight Weight for each instance.
#'
#' Note that, for ranking task, weights are per-group. In ranking task, one weight
@@ -18,29 +44,45 @@
#' @param base_margin Base margin used for boosting from existing model.
#'
#' In the case of multi-output models, one can also pass multi-dimensional base_margin.
-#' @param missing a float value to represents missing values in data (used only when input is a dense matrix).
-#' It is useful when a 0 or some other extreme value represents missing values in data.
+#' @param missing A float value to represents missing values in data (not used when creating DMatrix
+#' from text files).
+#' It is useful to change when a zero, infinite, or some other extreme value represents missing
+#' values in data.
#' @param silent whether to suppress printing an informational message after loading from a file.
#' @param feature_names Set names for features. Overrides column names in data
#' frame and matrix.
+#'
+#' Note: columns are not referenced by name when calling `predict`, so the column order there
+#' must be the same as in the DMatrix construction, regardless of the column names.
+#' @param feature_types Set types for features.
+#'
+#' If `data` is a `data.frame` and passing `feature_types` is not supplied, feature types will be deduced
+#' automatically from the column types.
+#'
+#' Otherwise, one can pass a character vector with the same length as number of columns in `data`,
+#' with the following possible values:\itemize{
+#' \item "c", which represents categorical columns.
+#' \item "q", which represents numeric columns.
+#' \item "int", which represents integer columns.
+#' \item "i", which represents logical (boolean) columns.
+#' }
+#'
+#' Note that, while categorical types are treated differently from the rest for model fitting
+#' purposes, the other types do not influence the generated model, but have effects in other
+#' functionalities such as feature importances.
+#'
+#' \bold{Important}: categorical features, if specified manually through `feature_types`, must
+#' be encoded as integers with numeration starting at zero, and the same encoding needs to be
+#' applied when passing data to `predict`. Even if passing `factor` types, the encoding will
+#' not be saved, so make sure that `factor` columns passed to `predict` have the same `levels`.
#' @param nthread Number of threads used for creating DMatrix.
#' @param group Group size for all ranking group.
#' @param qid Query ID for data samples, used for ranking.
#' @param label_lower_bound Lower bound for survival training.
#' @param label_upper_bound Upper bound for survival training.
#' @param feature_weights Set feature weights for column sampling.
-#' @param enable_categorical Experimental support of specializing for categorical features.
-#'
-#' If passing 'TRUE' and 'data' is a data frame,
-#' columns of categorical types will automatically
-#' be set to be of categorical type (feature_type='c') in the resulting DMatrix.
-#'
-#' If passing 'FALSE' and 'data' is a data frame with categorical columns,
-#' it will result in an error being thrown.
-#'
-#' If 'data' is not a data frame, this argument is ignored.
-#'
-#' JSON/UBJSON serialization format is required for this.
+#' @return An 'xgb.DMatrix' object. If calling 'xgb.QuantileDMatrix', it will have additional
+#' subclass 'xgb.QuantileDMatrix'.
#'
#' @details
#' Note that DMatrix objects are not serializable through R functions such as \code{saveRDS} or \code{save}.
@@ -60,6 +102,7 @@
#' xgb.DMatrix.save(dtrain, fname)
#' dtrain <- xgb.DMatrix(fname)
#' @export
+#' @rdname xgb.DMatrix
xgb.DMatrix <- function(
data,
label = NULL,
@@ -68,18 +111,18 @@ xgb.DMatrix <- function(
missing = NA,
silent = FALSE,
feature_names = colnames(data),
+ feature_types = NULL,
nthread = NULL,
group = NULL,
qid = NULL,
label_lower_bound = NULL,
label_upper_bound = NULL,
- feature_weights = NULL,
- enable_categorical = FALSE
+ feature_weights = NULL
) {
if (!is.null(group) && !is.null(qid)) {
stop("Either one of 'group' or 'qid' should be NULL")
}
- ctypes <- NULL
+ nthread <- as.integer(NVL(nthread, -1L))
if (typeof(data) == "character") {
if (length(data) > 1) {
stop(
@@ -91,7 +134,7 @@ xgb.DMatrix <- function(
handle <- .Call(XGDMatrixCreateFromFile_R, data, as.integer(silent))
} else if (is.matrix(data)) {
handle <- .Call(
- XGDMatrixCreateFromMat_R, data, missing, as.integer(NVL(nthread, -1))
+ XGDMatrixCreateFromMat_R, data, missing, nthread
)
} else if (inherits(data, "dgCMatrix")) {
handle <- .Call(
@@ -101,7 +144,7 @@ xgb.DMatrix <- function(
data@x,
nrow(data),
missing,
- as.integer(NVL(nthread, -1))
+ nthread
)
} else if (inherits(data, "dgRMatrix")) {
handle <- .Call(
@@ -111,7 +154,7 @@ xgb.DMatrix <- function(
data@x,
ncol(data),
missing,
- as.integer(NVL(nthread, -1))
+ nthread
)
} else if (inherits(data, "dsparseVector")) {
indptr <- c(0L, as.integer(length(data@i)))
@@ -123,41 +166,15 @@ xgb.DMatrix <- function(
data@x,
length(data),
missing,
- as.integer(NVL(nthread, -1))
+ nthread
)
} else if (is.data.frame(data)) {
- ctypes <- sapply(data, function(x) {
- if (is.factor(x)) {
- if (!enable_categorical) {
- stop(
- "When factor type is used, the parameter `enable_categorical`",
- " must be set to TRUE."
- )
- }
- "c"
- } else if (is.integer(x)) {
- "int"
- } else if (is.logical(x)) {
- "i"
- } else {
- if (!is.numeric(x)) {
- stop("Invalid type in dataframe.")
- }
- "float"
- }
- })
- ## as.data.frame somehow converts integer/logical into real.
- data <- as.data.frame(sapply(data, function(x) {
- if (is.factor(x)) {
- ## XGBoost uses 0-based indexing.
- as.numeric(x) - 1
- } else {
- x
- }
- }))
+ tmp <- .process.df.for.dmatrix(data, feature_types)
+ feature_types <- tmp$feature_types
handle <- .Call(
- XGDMatrixCreateFromDF_R, data, missing, as.integer(NVL(nthread, -1))
+ XGDMatrixCreateFromDF_R, tmp$lst, missing, nthread
)
+ rm(tmp)
} else {
stop("xgb.DMatrix does not support construction from ", typeof(data))
}
@@ -167,7 +184,75 @@ xgb.DMatrix <- function(
class = "xgb.DMatrix",
fields = new.env()
)
+ .set.dmatrix.fields(
+ dmat = dmat,
+ label = label,
+ weight = weight,
+ base_margin = base_margin,
+ feature_names = feature_names,
+ feature_types = feature_types,
+ group = group,
+ qid = qid,
+ label_lower_bound = label_lower_bound,
+ label_upper_bound = label_upper_bound,
+ feature_weights = feature_weights
+ )
+ return(dmat)
+}
+
+.process.df.for.dmatrix <- function(df, feature_types) {
+ if (!nrow(df) || !ncol(df)) {
+ stop("'data' is an empty data.frame.")
+ }
+ if (!is.null(feature_types)) {
+ if (!is.character(feature_types) || length(feature_types) != ncol(df)) {
+ stop(
+ "'feature_types' must be a character vector with one entry per column in 'data'."
+ )
+ }
+ } else {
+ feature_types <- sapply(df, function(col) {
+ if (is.factor(col)) {
+ return("c")
+ } else if (is.integer(col)) {
+ return("int")
+ } else if (is.logical(col)) {
+ return("i")
+ } else {
+ if (!is.numeric(col)) {
+ stop("Invalid type in dataframe.")
+ }
+ return("float")
+ }
+ })
+ }
+
+ lst <- lapply(df, function(col) {
+ is_factor <- is.factor(col)
+ col <- as.numeric(col)
+ if (is_factor) {
+ col <- col - 1
+ }
+ return(col)
+ })
+
+ return(list(lst = lst, feature_types = feature_types))
+}
+
+.set.dmatrix.fields <- function(
+ dmat,
+ label,
+ weight,
+ base_margin,
+ feature_names,
+ feature_types,
+ group,
+ qid,
+ label_lower_bound,
+ label_upper_bound,
+ feature_weights
+) {
if (!is.null(label)) {
setinfo(dmat, "label", label)
}
@@ -180,6 +265,9 @@ xgb.DMatrix <- function(
if (!is.null(feature_names)) {
setinfo(dmat, "feature_name", feature_names)
}
+ if (!is.null(feature_types)) {
+ setinfo(dmat, "feature_type", feature_types)
+ }
if (!is.null(group)) {
setinfo(dmat, "group", group)
}
@@ -195,10 +283,511 @@ xgb.DMatrix <- function(
if (!is.null(feature_weights)) {
setinfo(dmat, "feature_weights", feature_weights)
}
- if (!is.null(ctypes)) {
- setinfo(dmat, "feature_type", ctypes)
+}
+
+#' @param ref The training dataset that provides quantile information, needed when creating
+#' validation/test dataset with `xgb.QuantileDMatrix`. Supplying the training DMatrix
+#' as a reference means that the same quantisation applied to the training data is
+#' applied to the validation/test data
+#' @param max_bin The number of histogram bin, should be consistent with the training parameter
+#' `max_bin`.
+#'
+#' This is only supported when constructing a QuantileDMatrix.
+#' @export
+#' @rdname xgb.DMatrix
+xgb.QuantileDMatrix <- function(
+ data,
+ label = NULL,
+ weight = NULL,
+ base_margin = NULL,
+ missing = NA,
+ feature_names = colnames(data),
+ feature_types = NULL,
+ nthread = NULL,
+ group = NULL,
+ qid = NULL,
+ label_lower_bound = NULL,
+ label_upper_bound = NULL,
+ feature_weights = NULL,
+ ref = NULL,
+ max_bin = NULL
+) {
+ nthread <- as.integer(NVL(nthread, -1L))
+ if (!is.null(ref) && !inherits(ref, "xgb.DMatrix")) {
+ stop("'ref' must be an xgb.DMatrix object.")
}
+ # Note: when passing an integer matrix, it won't get casted to numeric.
+ # Since 'int' values as understood by languages like C cannot have missing values,
+ # R represents missingness there by assigning them a value equal to the minimum
+ # integer. The 'missing' value here is set before the data, so in case of integers,
+ # need to make the conversion manually beforehand.
+ if (is.matrix(data) && storage.mode(data) %in% c("integer", "logical") && is.na(missing)) {
+ missing <- .Call(XGGetRNAIntAsDouble)
+ }
+
+ iterator_env <- as.environment(
+ list(
+ data = data,
+ label = label,
+ weight = weight,
+ base_margin = base_margin,
+ missing = missing,
+ feature_names = feature_names,
+ feature_types = feature_types,
+ group = group,
+ qid = qid,
+ label_lower_bound = label_lower_bound,
+ label_upper_bound = label_upper_bound,
+ feature_weights = feature_weights
+ )
+ )
+ data_iterator <- .single.data.iterator(iterator_env)
+
+ # Note: the ProxyDMatrix has its finalizer assigned in the R externalptr
+ # object, but that finalizer will only be called once the object is
+ # garbage-collected, which doesn't happen immediately after it goes out
+ # of scope, hence this piece of code to tigger its destruction earlier
+ # and free memory right away.
+ proxy_handle <- .make.proxy.handle()
+ on.exit({
+ .Call(XGDMatrixFree_R, proxy_handle)
+ })
+ iterator_next <- function() {
+ return(xgb.ProxyDMatrix(proxy_handle, data_iterator))
+ }
+ iterator_reset <- function() {
+ return(data_iterator$f_reset(iterator_env))
+ }
+ calling_env <- environment()
+
+ dmat <- .Call(
+ XGQuantileDMatrixCreateFromCallback_R,
+ iterator_next,
+ iterator_reset,
+ calling_env,
+ proxy_handle,
+ nthread,
+ missing,
+ max_bin,
+ ref
+ )
+ attributes(dmat) <- list(
+ class = c("xgb.DMatrix", "xgb.QuantileDMatrix"),
+ fields = attributes(proxy_handle)$fields
+ )
+ return(dmat)
+}
+
+#' @title XGBoost Data Iterator
+#' @description Interface to create a custom data iterator in order to construct a DMatrix
+#' from external memory.
+#'
+#' This function is responsible for generating an R object structure containing callback
+#' functions and an environment shared with them.
+#'
+#' The output structure from this function is then meant to be passed to \link{xgb.ExternalDMatrix},
+#' which will consume the data and create a DMatrix from it by executing the callback functions.
+#'
+#' For more information, and for a usage example, see the documentation for \link{xgb.ExternalDMatrix}.
+#' @param env An R environment to pass to the callback functions supplied here, which can be
+#' used to keep track of variables to determine how to handle the batches.
+#'
+#' For example, one might want to keep track of an iteration number in this environment in order
+#' to know which part of the data to pass next.
+#' @param f_next `function(env)` which is responsible for:\itemize{
+#' \item Accessing or retrieving the next batch of data in the iterator.
+#' \item Supplying this data by calling function \link{xgb.DataBatch} on it and returning the result.
+#' \item Keeping track of where in the iterator batch it is or will go next, which can for example
+#' be done by modifiying variables in the `env` variable that is passed here.
+#' \item Signaling whether there are more batches to be consumed or not, by returning `NULL`
+#' when the stream of data ends (all batches in the iterator have been consumed), or the result from
+#' calling \link{xgb.DataBatch} when there are more batches in the line to be consumed.
+#' }
+#' @param f_reset `function(env)` which is responsible for reseting the data iterator
+#' (i.e. taking it back to the first batch, called before and after the sequence of batches
+#' has been consumed).
+#'
+#' Note that, after resetting the iterator, the batches will be accessed again, so the same data
+#' (and in the same order) must be passed in subsequent iterations.
+#' @return An `xgb.DataIter` object, containing the same inputs supplied here, which can then
+#' be passed to \link{xgb.ExternalDMatrix}.
+#' @seealso \link{xgb.ExternalDMatrix}, \link{xgb.DataBatch}.
+#' @export
+xgb.DataIter <- function(env = new.env(), f_next, f_reset) {
+ if (!is.function(f_next)) {
+ stop("'f_next' must be a function.")
+ }
+ if (!is.function(f_reset)) {
+ stop("'f_reset' must be a function.")
+ }
+ out <- list(
+ env = env,
+ f_next = f_next,
+ f_reset = f_reset
+ )
+ class(out) <- "xgb.DataIter"
+ return(out)
+}
+
+.qdm.single.fnext <- function(env) {
+ curr_iter <- env[["iter"]]
+ if (curr_iter >= 1L) {
+ return(NULL)
+ }
+
+ on.exit({
+ env[["iter"]] <- curr_iter + 1L
+ })
+ return(
+ xgb.DataBatch(
+ data = env[["data"]],
+ label = env[["label"]],
+ weight = env[["weight"]],
+ base_margin = env[["base_margin"]],
+ feature_names = env[["feature_names"]],
+ feature_types = env[["feature_types"]],
+ group = env[["group"]],
+ qid = env[["qid"]],
+ label_lower_bound = env[["label_lower_bound"]],
+ label_upper_bound = env[["label_upper_bound"]],
+ feature_weights = env[["feature_weights"]]
+ )
+ )
+}
+
+.qdm.single.freset <- function(env) {
+ env[["iter"]] <- 0L
+ return(invisible(NULL))
+}
+
+.single.data.iterator <- function(env) {
+ env[["iter"]] <- 0L
+ return(xgb.DataIter(env, .qdm.single.fnext, .qdm.single.freset))
+}
+
+# Only for internal usage
+.make.proxy.handle <- function() {
+ out <- .Call(XGProxyDMatrixCreate_R)
+ attributes(out) <- list(
+ class = c("xgb.DMatrix", "xgb.ProxyDMatrix"),
+ fields = new.env()
+ )
+ return(out)
+}
+
+#' @title Structure for Data Batches
+#' @description Helper function to supply data in batches of a data iterator when
+#' constructing a DMatrix from external memory through \link{xgb.ExternalDMatrix}
+#' or through \link{xgb.QuantileDMatrix.from_iterator}.
+#'
+#' This function is \bold{only} meant to be called inside of a callback function (which
+#' is passed as argument to function \link{xgb.DataIter} to construct a data iterator)
+#' when constructing a DMatrix through external memory - otherwise, one should call
+#' \link{xgb.DMatrix} or \link{xgb.QuantileDMatrix}.
+#'
+#' The object that results from calling this function directly is \bold{not} like
+#' an `xgb.DMatrix` - i.e. cannot be used to train a model, nor to get predictions - only
+#' possible usage is to supply data to an iterator, from which a DMatrix is then constructed.
+#'
+#' For more information and for example usage, see the documentation for \link{xgb.ExternalDMatrix}.
+#' @inheritParams xgb.DMatrix
+#' @param data Batch of data belonging to this batch.
+#'
+#' Note that not all of the input types supported by \link{xgb.DMatrix} are possible
+#' to pass here. Supported types are:\itemize{
+#' \item `matrix`, with types `numeric`, `integer`, and `logical`. Note that for types
+#' `integer` and `logical`, missing values might not be automatically recognized as
+#' as such - see the documentation for parameter `missing` in \link{xgb.ExternalDMatrix}
+#' for details on this.
+#' \item `data.frame`, with the same types as supported by 'xgb.DMatrix' and same
+#' conversions applied to it. See the documentation for parameter `data` in
+#' \link{xgb.DMatrix} for details on it.
+#' \item CSR matrices, as class `dgRMatrix` from package `Matrix`.
+#' }
+#' @return An object of class `xgb.DataBatch`, which is just a list containing the
+#' data and parameters passed here. It does \bold{not} inherit from `xgb.DMatrix`.
+#' @seealso \link{xgb.DataIter}, \link{xgb.ExternalDMatrix}.
+#' @export
+xgb.DataBatch <- function(
+ data,
+ label = NULL,
+ weight = NULL,
+ base_margin = NULL,
+ feature_names = colnames(data),
+ feature_types = NULL,
+ group = NULL,
+ qid = NULL,
+ label_lower_bound = NULL,
+ label_upper_bound = NULL,
+ feature_weights = NULL
+) {
+ stopifnot(inherits(data, c("matrix", "data.frame", "dgRMatrix")))
+ out <- list(
+ data = data,
+ label = label,
+ weight = weight,
+ base_margin = base_margin,
+ feature_names = feature_names,
+ feature_types = feature_types,
+ group = group,
+ qid = qid,
+ label_lower_bound = label_lower_bound,
+ label_upper_bound = label_upper_bound,
+ feature_weights = feature_weights
+ )
+ class(out) <- "xgb.DataBatch"
+ return(out)
+}
+
+# This is only for internal usage, class is not exposed to the user.
+xgb.ProxyDMatrix <- function(proxy_handle, data_iterator) {
+ lst <- data_iterator$f_next(data_iterator$env)
+ if (is.null(lst)) {
+ return(0L)
+ }
+ if (!inherits(lst, "xgb.DataBatch")) {
+ stop("DataIter 'f_next' must return either NULL or the result from calling 'xgb.DataBatch'.")
+ }
+
+ if (!is.null(lst$group) && !is.null(lst$qid)) {
+ stop("Either one of 'group' or 'qid' should be NULL")
+ }
+ if (is.data.frame(lst$data)) {
+ tmp <- .process.df.for.dmatrix(lst$data, lst$feature_types)
+ lst$feature_types <- tmp$feature_types
+ .Call(XGProxyDMatrixSetDataColumnar_R, proxy_handle, tmp$lst)
+ rm(tmp)
+ } else if (is.matrix(lst$data)) {
+ .Call(XGProxyDMatrixSetDataDense_R, proxy_handle, lst$data)
+ } else if (inherits(lst$data, "dgRMatrix")) {
+ tmp <- list(p = lst$data@p, j = lst$data@j, x = lst$data@x, ncol = ncol(lst$data))
+ .Call(XGProxyDMatrixSetDataCSR_R, proxy_handle, tmp)
+ } else {
+ stop("'data' has unsupported type.")
+ }
+
+ .set.dmatrix.fields(
+ dmat = proxy_handle,
+ label = lst$label,
+ weight = lst$weight,
+ base_margin = lst$base_margin,
+ feature_names = lst$feature_names,
+ feature_types = lst$feature_types,
+ group = lst$group,
+ qid = lst$qid,
+ label_lower_bound = lst$label_lower_bound,
+ label_upper_bound = lst$label_upper_bound,
+ feature_weights = lst$feature_weights
+ )
+
+ return(1L)
+}
+
+#' @title DMatrix from External Data
+#' @description Create a special type of xgboost 'DMatrix' object from external data
+#' supplied by an \link{xgb.DataIter} object, potentially passed in batches from a
+#' bigger set that might not fit entirely in memory.
+#'
+#' The data supplied by the iterator is accessed on-demand as needed, multiple times,
+#' without being concatenated, but note that fields like 'label' \bold{will} be
+#' concatenated from multiple calls to the data iterator.
+#'
+#' For more information, see the guide 'Using XGBoost External Memory Version':
+#' \url{https://xgboost.readthedocs.io/en/stable/tutorials/external_memory.html}
+#' @inheritParams xgb.DMatrix
+#' @param data_iterator A data iterator structure as returned by \link{xgb.DataIter},
+#' which includes an environment shared between function calls, and functions to access
+#' the data in batches on-demand.
+#' @param cache_prefix The path of cache file, caller must initialize all the directories in this path.
+#' @param missing A float value to represents missing values in data.
+#'
+#' Note that, while functions like \link{xgb.DMatrix} can take a generic `NA` and interpret it
+#' correctly for different types like `numeric` and `integer`, if an `NA` value is passed here,
+#' it will not be adapted for different input types.
+#'
+#' For example, in R `integer` types, missing values are represented by integer number `-2147483648`
+#' (since machine 'integer' types do not have an inherent 'NA' value) - hence, if one passes `NA`,
+#' which is interpreted as a floating-point NaN by 'xgb.ExternalDMatrix' and by
+#' 'xgb.QuantileDMatrix.from_iterator', these integer missing values will not be treated as missing.
+#' This should not pose any problem for `numeric` types, since they do have an inheret NaN value.
+#' @return An 'xgb.DMatrix' object, with subclass 'xgb.ExternalDMatrix', in which the data is not
+#' held internally but accessed through the iterator when needed.
+#' @seealso \link{xgb.DataIter}, \link{xgb.DataBatch}, \link{xgb.QuantileDMatrix.from_iterator}
+#' @examples
+#' library(xgboost)
+#' data(mtcars)
+#'
+#' # this custom environment will be passed to the iterator
+#' # functions at each call. It's up to the user to keep
+#' # track of the iteration number in this environment.
+#' iterator_env <- as.environment(
+#' list(
+#' iter = 0,
+#' x = mtcars[, -1],
+#' y = mtcars[, 1]
+#' )
+#' )
+#'
+#' # Data is passed in two batches.
+#' # In this example, batches are obtained by subsetting the 'x' variable.
+#' # This is not advantageous to do, since the data is already loaded in memory
+#' # and can be passed in full in one go, but there can be situations in which
+#' # only a subset of the data will fit in the computer's memory, and it can
+#' # be loaded in batches that are accessed one-at-a-time only.
+#' iterator_next <- function(iterator_env) {
+#' curr_iter <- iterator_env[["iter"]]
+#' if (curr_iter >= 2) {
+#' # there are only two batches, so this signals end of the stream
+#' return(NULL)
+#' }
+#'
+#' if (curr_iter == 0) {
+#' x_batch <- iterator_env[["x"]][1:16, ]
+#' y_batch <- iterator_env[["y"]][1:16]
+#' } else {
+#' x_batch <- iterator_env[["x"]][17:32, ]
+#' y_batch <- iterator_env[["y"]][17:32]
+#' }
+#' on.exit({
+#' iterator_env[["iter"]] <- curr_iter + 1
+#' })
+#'
+#' # Function 'xgb.DataBatch' must be called manually
+#' # at each batch with all the appropriate attributes,
+#' # such as feature names and feature types.
+#' return(xgb.DataBatch(data = x_batch, label = y_batch))
+#' }
+#'
+#' # This moves the iterator back to its beginning
+#' iterator_reset <- function(iterator_env) {
+#' iterator_env[["iter"]] <- 0
+#' }
+#'
+#' data_iterator <- xgb.DataIter(
+#' env = iterator_env,
+#' f_next = iterator_next,
+#' f_reset = iterator_reset
+#' )
+#' cache_prefix <- tempdir()
+#'
+#' # DMatrix will be constructed from the iterator's batches
+#' dm <- xgb.ExternalDMatrix(data_iterator, cache_prefix, nthread = 1)
+#'
+#' # After construction, can be used as a regular DMatrix
+#' params <- list(nthread = 1, objective = "reg:squarederror")
+#' model <- xgb.train(data = dm, nrounds = 2, params = params)
+#'
+#' # Predictions can also be called on it, and should be the same
+#' # as if the data were passed differently.
+#' pred_dm <- predict(model, dm)
+#' pred_mat <- predict(model, as.matrix(mtcars[, -1]))
+#' @export
+xgb.ExternalDMatrix <- function(
+ data_iterator,
+ cache_prefix = tempdir(),
+ missing = NA,
+ nthread = NULL
+) {
+ stopifnot(inherits(data_iterator, "xgb.DataIter"))
+ stopifnot(is.character(cache_prefix))
+
+ cache_prefix <- path.expand(cache_prefix)
+ nthread <- as.integer(NVL(nthread, -1L))
+
+ proxy_handle <- .make.proxy.handle()
+ on.exit({
+ .Call(XGDMatrixFree_R, proxy_handle)
+ })
+ iterator_next <- function() {
+ return(xgb.ProxyDMatrix(proxy_handle, data_iterator))
+ }
+ iterator_reset <- function() {
+ return(data_iterator$f_reset(data_iterator$env))
+ }
+ calling_env <- environment()
+
+ dmat <- .Call(
+ XGDMatrixCreateFromCallback_R,
+ iterator_next,
+ iterator_reset,
+ calling_env,
+ proxy_handle,
+ nthread,
+ missing,
+ cache_prefix
+ )
+
+ attributes(dmat) <- list(
+ class = c("xgb.DMatrix", "xgb.ExternalDMatrix"),
+ fields = attributes(proxy_handle)$fields
+ )
+ return(dmat)
+}
+
+
+#' @title QuantileDMatrix from External Data
+#' @description Create an `xgb.QuantileDMatrix` object (exact same class as would be returned by
+#' calling function \link{xgb.QuantileDMatrix}, with the same advantages and limitations) from
+#' external data supplied by an \link{xgb.DataIter} object, potentially passed in batches from
+#' a bigger set that might not fit entirely in memory, same way as \link{xgb.ExternalDMatrix}.
+#'
+#' Note that, while external data will only be loaded through the iterator (thus the full data
+#' might not be held entirely in-memory), the quantized representation of the data will get
+#' created in-memory, being concatenated from multiple calls to the data iterator. The quantized
+#' version is typically lighter than the original data, so there might be cases in which this
+#' representation could potentially fit in memory even if the full data doesn't.
+#'
+#' For more information, see the guide 'Using XGBoost External Memory Version':
+#' \url{https://xgboost.readthedocs.io/en/stable/tutorials/external_memory.html}
+#' @inheritParams xgb.ExternalDMatrix
+#' @inheritParams xgb.QuantileDMatrix
+#' @return An 'xgb.DMatrix' object, with subclass 'xgb.QuantileDMatrix'.
+#' @seealso \link{xgb.DataIter}, \link{xgb.DataBatch}, \link{xgb.ExternalDMatrix},
+#' \link{xgb.QuantileDMatrix}
+#' @export
+xgb.QuantileDMatrix.from_iterator <- function( # nolint
+ data_iterator,
+ missing = NA,
+ nthread = NULL,
+ ref = NULL,
+ max_bin = NULL
+) {
+ stopifnot(inherits(data_iterator, "xgb.DataIter"))
+ if (!is.null(ref) && !inherits(ref, "xgb.DMatrix")) {
+ stop("'ref' must be an xgb.DMatrix object.")
+ }
+
+ nthread <- as.integer(NVL(nthread, -1L))
+
+ proxy_handle <- .make.proxy.handle()
+ on.exit({
+ .Call(XGDMatrixFree_R, proxy_handle)
+ })
+ iterator_next <- function() {
+ return(xgb.ProxyDMatrix(proxy_handle, data_iterator))
+ }
+ iterator_reset <- function() {
+ return(data_iterator$f_reset(data_iterator$env))
+ }
+ calling_env <- environment()
+
+ dmat <- .Call(
+ XGQuantileDMatrixCreateFromCallback_R,
+ iterator_next,
+ iterator_reset,
+ calling_env,
+ proxy_handle,
+ nthread,
+ missing,
+ max_bin,
+ ref
+ )
+
+ attributes(dmat) <- list(
+ class = c("xgb.DMatrix", "xgb.QuantileDMatrix"),
+ fields = attributes(proxy_handle)$fields
+ )
return(dmat)
}
@@ -646,19 +1235,15 @@ xgb.get.DMatrix.data <- function(dmat) {
#' data(agaricus.train, package='xgboost')
#' dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
#'
-#' dsub <- slice(dtrain, 1:42)
+#' dsub <- xgb.slice.DMatrix(dtrain, 1:42)
#' labels1 <- getinfo(dsub, 'label')
#' dsub <- dtrain[1:42, ]
#' labels2 <- getinfo(dsub, 'label')
#' all.equal(labels1, labels2)
#'
-#' @rdname slice.xgb.DMatrix
+#' @rdname xgb.slice.DMatrix
#' @export
-slice <- function(object, idxset) UseMethod("slice")
-
-#' @rdname slice.xgb.DMatrix
-#' @export
-slice.xgb.DMatrix <- function(object, idxset) {
+xgb.slice.DMatrix <- function(object, idxset) {
if (!inherits(object, "xgb.DMatrix")) {
stop("object must be xgb.DMatrix")
}
@@ -682,10 +1267,10 @@ slice.xgb.DMatrix <- function(object, idxset) {
return(structure(ret, class = "xgb.DMatrix"))
}
-#' @rdname slice.xgb.DMatrix
+#' @rdname xgb.slice.DMatrix
#' @export
`[.xgb.DMatrix` <- function(object, idxset, colset = NULL) {
- slice(object, idxset)
+ xgb.slice.DMatrix(object, idxset)
}
@@ -712,7 +1297,17 @@ print.xgb.DMatrix <- function(x, verbose = FALSE, ...) {
cat("INVALID xgb.DMatrix object. Must be constructed anew.\n")
return(invisible(x))
}
- cat('xgb.DMatrix dim:', nrow(x), 'x', ncol(x), ' info: ')
+ class_print <- if (inherits(x, "xgb.QuantileDMatrix")) {
+ "xgb.QuantileDMatrix"
+ } else if (inherits(x, "xgb.ExternalDMatrix")) {
+ "xgb.ExternalDMatrix"
+ } else if (inherits(x, "xgb.ProxyDMatrix")) {
+ "xgb.ProxyDMatrix"
+ } else {
+ "xgb.DMatrix"
+ }
+
+ cat(class_print, ' dim:', nrow(x), 'x', ncol(x), ' info: ')
infos <- character(0)
if (xgb.DMatrix.hasinfo(x, 'label')) infos <- 'label'
if (xgb.DMatrix.hasinfo(x, 'weight')) infos <- c(infos, 'weight')
diff --git a/R-package/R/xgb.cv.R b/R-package/R/xgb.cv.R
index eb0495631..29bddb57f 100644
--- a/R-package/R/xgb.cv.R
+++ b/R-package/R/xgb.cv.R
@@ -197,12 +197,12 @@ xgb.cv <- function(params = list(), data, nrounds, nfold, label = NULL, missing
nthread = params$nthread
)
bst_folds <- lapply(seq_along(folds), function(k) {
- dtest <- slice(dall, folds[[k]])
+ dtest <- xgb.slice.DMatrix(dall, folds[[k]])
# code originally contributed by @RolandASc on stackoverflow
if (is.null(train_folds))
- dtrain <- slice(dall, unlist(folds[-k]))
+ dtrain <- xgb.slice.DMatrix(dall, unlist(folds[-k]))
else
- dtrain <- slice(dall, train_folds[[k]])
+ dtrain <- xgb.slice.DMatrix(dall, train_folds[[k]])
bst <- xgb.Booster(
params = params,
cachelist = list(dtrain, dtest),
diff --git a/R-package/man/predict.xgb.Booster.Rd b/R-package/man/predict.xgb.Booster.Rd
index 7a6dd6c13..95e7a51fd 100644
--- a/R-package/man/predict.xgb.Booster.Rd
+++ b/R-package/man/predict.xgb.Booster.Rd
@@ -17,6 +17,7 @@
training = FALSE,
iterationrange = NULL,
strict_shape = FALSE,
+ validate_features = FALSE,
...
)
}
@@ -66,6 +67,23 @@ base-1 indexing, and inclusive of both ends).
\item{strict_shape}{Default is \code{FALSE}. When set to \code{TRUE}, the output
type and shape of predictions are invariant to the model type.}
+\item{validate_features}{When \code{TRUE}, validate that the Booster's and newdata's feature_names
+match (only applicable when both \code{object} and \code{newdata} have feature names).
+
+\if{html}{\out{
}}\preformatted{ If the column names differ and `newdata` is not an `xgb.DMatrix`, will try to reorder
+ the columns in `newdata` to match with the booster's.
+
+ If the booster has feature types and `newdata` is either an `xgb.DMatrix` or `data.frame`,
+ will additionally verify that categorical columns are of the correct type in `newdata`,
+ throwing an error if they do not match.
+
+ If passing `FALSE`, it is assumed that the feature names and types are the same,
+ and come in the same order as in the training data.
+
+ Note that this check might add some sizable latency to the predictions, so it's
+ recommended to disable it for performance-sensitive applications.
+}\if{html}{\out{
}}}
+
\item{...}{Not used.}
}
\value{
diff --git a/R-package/man/xgb.DMatrix.Rd b/R-package/man/xgb.DMatrix.Rd
index eb667377f..d18270733 100644
--- a/R-package/man/xgb.DMatrix.Rd
+++ b/R-package/man/xgb.DMatrix.Rd
@@ -2,6 +2,7 @@
% Please edit documentation in R/xgb.DMatrix.R
\name{xgb.DMatrix}
\alias{xgb.DMatrix}
+\alias{xgb.QuantileDMatrix}
\title{Construct xgb.DMatrix object}
\usage{
xgb.DMatrix(
@@ -12,22 +13,61 @@ xgb.DMatrix(
missing = NA,
silent = FALSE,
feature_names = colnames(data),
+ feature_types = NULL,
+ nthread = NULL,
+ group = NULL,
+ qid = NULL,
+ label_lower_bound = NULL,
+ label_upper_bound = NULL,
+ feature_weights = NULL
+)
+
+xgb.QuantileDMatrix(
+ data,
+ label = NULL,
+ weight = NULL,
+ base_margin = NULL,
+ missing = NA,
+ feature_names = colnames(data),
+ feature_types = NULL,
nthread = NULL,
group = NULL,
qid = NULL,
label_lower_bound = NULL,
label_upper_bound = NULL,
feature_weights = NULL,
- enable_categorical = FALSE
+ ref = NULL,
+ max_bin = NULL
)
}
\arguments{
-\item{data}{a \code{matrix} object (either numeric or integer), a \code{dgCMatrix} object,
-a \code{dgRMatrix} object,
-a \code{dsparseVector} object (only when making predictions from a fitted model, will be
-interpreted as a row vector), or a character string representing a filename.}
+\item{data}{Data from which to create a DMatrix, which can then be used for fitting models or
+for getting predictions out of a fitted model.
-\item{label}{Label of the training data.}
+Supported input types are as follows:\itemize{
+\item \code{matrix} objects, with types \code{numeric}, \code{integer}, or \code{logical}.
+\item \code{data.frame} objects, with columns of types \code{numeric}, \code{integer}, \code{logical}, or \code{factor}.
+
+Note that xgboost uses base-0 encoding for categorical types, hence \code{factor} types (which use base-1
+encoding') will be converted inside the function call. Be aware that the encoding used for \code{factor}
+types is not kept as part of the model, so in subsequent calls to \code{predict}, it is the user's
+responsibility to ensure that factor columns have the same levels as the ones from which the DMatrix
+was constructed.
+
+Other column types are not supported.
+\item CSR matrices, as class \code{dgRMatrix} from package \code{Matrix}.
+\item CSC matrices, as class \code{dgCMatrix} from package \code{Matrix}. These are \bold{not} supported for
+'xgb.QuantileDMatrix'.
+\item Single-row CSR matrices, as class \code{dsparseVector} from package \code{Matrix}, which is interpreted
+as a single row (only when making predictions from a fitted model).
+\item Text files in SVMLight / LibSVM formats, passed as a path to the file. These are \bold{not}
+supported for xgb.QuantileDMatrix'.
+\item Binary files generated by \link{xgb.DMatrix.save}, passed as a path to the file. These are
+\bold{not} supported for xgb.QuantileDMatrix'.
+}}
+
+\item{label}{Label of the training data. For classification problems, should be passed encoded as
+integers with numeration starting at zero.}
\item{weight}{Weight for each instance.
@@ -41,13 +81,41 @@ so it doesn't make sense to assign weights to individual data points.}
\if{html}{\out{}}\preformatted{ In the case of multi-output models, one can also pass multi-dimensional base_margin.
}\if{html}{\out{
}}}
-\item{missing}{a float value to represents missing values in data (used only when input is a dense matrix).
-It is useful when a 0 or some other extreme value represents missing values in data.}
+\item{missing}{A float value to represents missing values in data (not used when creating DMatrix
+from text files).
+It is useful to change when a zero, infinite, or some other extreme value represents missing
+values in data.}
\item{silent}{whether to suppress printing an informational message after loading from a file.}
\item{feature_names}{Set names for features. Overrides column names in data
-frame and matrix.}
+frame and matrix.
+
+\if{html}{\out{}}\preformatted{ Note: columns are not referenced by name when calling `predict`, so the column order there
+ must be the same as in the DMatrix construction, regardless of the column names.
+}\if{html}{\out{
}}}
+
+\item{feature_types}{Set types for features.
+
+If \code{data} is a \code{data.frame} and passing \code{feature_types} is not supplied, feature types will be deduced
+automatically from the column types.
+
+Otherwise, one can pass a character vector with the same length as number of columns in \code{data},
+with the following possible values:\itemize{
+\item "c", which represents categorical columns.
+\item "q", which represents numeric columns.
+\item "int", which represents integer columns.
+\item "i", which represents logical (boolean) columns.
+}
+
+Note that, while categorical types are treated differently from the rest for model fitting
+purposes, the other types do not influence the generated model, but have effects in other
+functionalities such as feature importances.
+
+\bold{Important}: categorical features, if specified manually through \code{feature_types}, must
+be encoded as integers with numeration starting at zero, and the same encoding needs to be
+applied when passing data to \code{predict}. Even if passing \code{factor} types, the encoding will
+not be saved, so make sure that \code{factor} columns passed to \code{predict} have the same \code{levels}.}
\item{nthread}{Number of threads used for creating DMatrix.}
@@ -61,26 +129,32 @@ frame and matrix.}
\item{feature_weights}{Set feature weights for column sampling.}
-\item{enable_categorical}{Experimental support of specializing for categorical features.
+\item{ref}{The training dataset that provides quantile information, needed when creating
+validation/test dataset with \code{xgb.QuantileDMatrix}. Supplying the training DMatrix
+as a reference means that the same quantisation applied to the training data is
+applied to the validation/test data}
-\if{html}{\out{}}\preformatted{ If passing 'TRUE' and 'data' is a data frame,
- columns of categorical types will automatically
- be set to be of categorical type (feature_type='c') in the resulting DMatrix.
+\item{max_bin}{The number of histogram bin, should be consistent with the training parameter
+\code{max_bin}.
- If passing 'FALSE' and 'data' is a data frame with categorical columns,
- it will result in an error being thrown.
-
- If 'data' is not a data frame, this argument is ignored.
-
- JSON/UBJSON serialization format is required for this.
-}\if{html}{\out{
}}}
+This is only supported when constructing a QuantileDMatrix.}
+}
+\value{
+An 'xgb.DMatrix' object. If calling 'xgb.QuantileDMatrix', it will have additional
+subclass 'xgb.QuantileDMatrix'.
}
\description{
-Construct xgb.DMatrix object from either a dense matrix, a sparse matrix, or a local file.
-Supported input file formats are either a LIBSVM text file or a binary file that was created previously by
-\code{\link{xgb.DMatrix.save}}).
+Construct an 'xgb.DMatrix' object from a given data source, which can then be passed to functions
+such as \link{xgb.train} or \link{predict.xgb.Booster}.
}
\details{
+Function 'xgb.QuantileDMatrix' will construct a DMatrix with quantization for the histogram
+method already applied to it, which can be used to reduce memory usage (compared to using a
+a regular DMatrix first and then creating a quantization out of it) when using the histogram
+method (\code{tree_method = "hist"}, which is the default algorithm), but is not usable for the
+sorted-indices method (\code{tree_method = "exact"}), nor for the approximate method
+(\code{tree_method = "approx"}).
+
Note that DMatrix objects are not serializable through R functions such as \code{saveRDS} or \code{save}.
If a DMatrix gets serialized and then de-serialized (for example, when saving data in an R session or caching
chunks in an Rmd file), the resulting object will not be usable anymore and will need to be reconstructed
diff --git a/R-package/man/xgb.DataBatch.Rd b/R-package/man/xgb.DataBatch.Rd
new file mode 100644
index 000000000..063b82b03
--- /dev/null
+++ b/R-package/man/xgb.DataBatch.Rd
@@ -0,0 +1,112 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/xgb.DMatrix.R
+\name{xgb.DataBatch}
+\alias{xgb.DataBatch}
+\title{Structure for Data Batches}
+\usage{
+xgb.DataBatch(
+ data,
+ label = NULL,
+ weight = NULL,
+ base_margin = NULL,
+ feature_names = colnames(data),
+ feature_types = NULL,
+ group = NULL,
+ qid = NULL,
+ label_lower_bound = NULL,
+ label_upper_bound = NULL,
+ feature_weights = NULL
+)
+}
+\arguments{
+\item{data}{Batch of data belonging to this batch.
+
+Note that not all of the input types supported by \link{xgb.DMatrix} are possible
+to pass here. Supported types are:\itemize{
+\item \code{matrix}, with types \code{numeric}, \code{integer}, and \code{logical}. Note that for types
+\code{integer} and \code{logical}, missing values might not be automatically recognized as
+as such - see the documentation for parameter \code{missing} in \link{xgb.ExternalDMatrix}
+for details on this.
+\item \code{data.frame}, with the same types as supported by 'xgb.DMatrix' and same
+conversions applied to it. See the documentation for parameter \code{data} in
+\link{xgb.DMatrix} for details on it.
+\item CSR matrices, as class \code{dgRMatrix} from package \code{Matrix}.
+}}
+
+\item{label}{Label of the training data. For classification problems, should be passed encoded as
+integers with numeration starting at zero.}
+
+\item{weight}{Weight for each instance.
+
+Note that, for ranking task, weights are per-group. In ranking task, one weight
+is assigned to each group (not each data point). This is because we
+only care about the relative ordering of data points within each group,
+so it doesn't make sense to assign weights to individual data points.}
+
+\item{base_margin}{Base margin used for boosting from existing model.
+
+\if{html}{\out{}}\preformatted{ In the case of multi-output models, one can also pass multi-dimensional base_margin.
+}\if{html}{\out{
}}}
+
+\item{feature_names}{Set names for features. Overrides column names in data
+frame and matrix.
+
+\if{html}{\out{}}\preformatted{ Note: columns are not referenced by name when calling `predict`, so the column order there
+ must be the same as in the DMatrix construction, regardless of the column names.
+}\if{html}{\out{
}}}
+
+\item{feature_types}{Set types for features.
+
+If \code{data} is a \code{data.frame} and passing \code{feature_types} is not supplied, feature types will be deduced
+automatically from the column types.
+
+Otherwise, one can pass a character vector with the same length as number of columns in \code{data},
+with the following possible values:\itemize{
+\item "c", which represents categorical columns.
+\item "q", which represents numeric columns.
+\item "int", which represents integer columns.
+\item "i", which represents logical (boolean) columns.
+}
+
+Note that, while categorical types are treated differently from the rest for model fitting
+purposes, the other types do not influence the generated model, but have effects in other
+functionalities such as feature importances.
+
+\bold{Important}: categorical features, if specified manually through \code{feature_types}, must
+be encoded as integers with numeration starting at zero, and the same encoding needs to be
+applied when passing data to \code{predict}. Even if passing \code{factor} types, the encoding will
+not be saved, so make sure that \code{factor} columns passed to \code{predict} have the same \code{levels}.}
+
+\item{group}{Group size for all ranking group.}
+
+\item{qid}{Query ID for data samples, used for ranking.}
+
+\item{label_lower_bound}{Lower bound for survival training.}
+
+\item{label_upper_bound}{Upper bound for survival training.}
+
+\item{feature_weights}{Set feature weights for column sampling.}
+}
+\value{
+An object of class \code{xgb.DataBatch}, which is just a list containing the
+data and parameters passed here. It does \bold{not} inherit from \code{xgb.DMatrix}.
+}
+\description{
+Helper function to supply data in batches of a data iterator when
+constructing a DMatrix from external memory through \link{xgb.ExternalDMatrix}
+or through \link{xgb.QuantileDMatrix.from_iterator}.
+
+This function is \bold{only} meant to be called inside of a callback function (which
+is passed as argument to function \link{xgb.DataIter} to construct a data iterator)
+when constructing a DMatrix through external memory - otherwise, one should call
+\link{xgb.DMatrix} or \link{xgb.QuantileDMatrix}.
+
+The object that results from calling this function directly is \bold{not} like
+an \code{xgb.DMatrix} - i.e. cannot be used to train a model, nor to get predictions - only
+possible usage is to supply data to an iterator, from which a DMatrix is then constructed.
+
+For more information and for example usage, see the documentation for \link{xgb.ExternalDMatrix}.
+}
+\seealso{
+\link{xgb.DataIter}, \link{xgb.ExternalDMatrix}.
+}
diff --git a/R-package/man/xgb.DataIter.Rd b/R-package/man/xgb.DataIter.Rd
new file mode 100644
index 000000000..2bd68ce51
--- /dev/null
+++ b/R-package/man/xgb.DataIter.Rd
@@ -0,0 +1,51 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/xgb.DMatrix.R
+\name{xgb.DataIter}
+\alias{xgb.DataIter}
+\title{XGBoost Data Iterator}
+\usage{
+xgb.DataIter(env = new.env(), f_next, f_reset)
+}
+\arguments{
+\item{env}{An R environment to pass to the callback functions supplied here, which can be
+used to keep track of variables to determine how to handle the batches.
+
+For example, one might want to keep track of an iteration number in this environment in order
+to know which part of the data to pass next.}
+
+\item{f_next}{\verb{function(env)} which is responsible for:\itemize{
+\item Accessing or retrieving the next batch of data in the iterator.
+\item Supplying this data by calling function \link{xgb.DataBatch} on it and returning the result.
+\item Keeping track of where in the iterator batch it is or will go next, which can for example
+be done by modifiying variables in the \code{env} variable that is passed here.
+\item Signaling whether there are more batches to be consumed or not, by returning \code{NULL}
+when the stream of data ends (all batches in the iterator have been consumed), or the result from
+calling \link{xgb.DataBatch} when there are more batches in the line to be consumed.
+}}
+
+\item{f_reset}{\verb{function(env)} which is responsible for reseting the data iterator
+(i.e. taking it back to the first batch, called before and after the sequence of batches
+has been consumed).
+
+Note that, after resetting the iterator, the batches will be accessed again, so the same data
+(and in the same order) must be passed in subsequent iterations.}
+}
+\value{
+An \code{xgb.DataIter} object, containing the same inputs supplied here, which can then
+be passed to \link{xgb.ExternalDMatrix}.
+}
+\description{
+Interface to create a custom data iterator in order to construct a DMatrix
+from external memory.
+
+This function is responsible for generating an R object structure containing callback
+functions and an environment shared with them.
+
+The output structure from this function is then meant to be passed to \link{xgb.ExternalDMatrix},
+which will consume the data and create a DMatrix from it by executing the callback functions.
+
+For more information, and for a usage example, see the documentation for \link{xgb.ExternalDMatrix}.
+}
+\seealso{
+\link{xgb.ExternalDMatrix}, \link{xgb.DataBatch}.
+}
diff --git a/R-package/man/xgb.ExternalDMatrix.Rd b/R-package/man/xgb.ExternalDMatrix.Rd
new file mode 100644
index 000000000..14a872cb5
--- /dev/null
+++ b/R-package/man/xgb.ExternalDMatrix.Rd
@@ -0,0 +1,122 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/xgb.DMatrix.R
+\name{xgb.ExternalDMatrix}
+\alias{xgb.ExternalDMatrix}
+\title{DMatrix from External Data}
+\usage{
+xgb.ExternalDMatrix(
+ data_iterator,
+ cache_prefix = tempdir(),
+ missing = NA,
+ nthread = NULL
+)
+}
+\arguments{
+\item{data_iterator}{A data iterator structure as returned by \link{xgb.DataIter},
+which includes an environment shared between function calls, and functions to access
+the data in batches on-demand.}
+
+\item{cache_prefix}{The path of cache file, caller must initialize all the directories in this path.}
+
+\item{missing}{A float value to represents missing values in data.
+
+Note that, while functions like \link{xgb.DMatrix} can take a generic \code{NA} and interpret it
+correctly for different types like \code{numeric} and \code{integer}, if an \code{NA} value is passed here,
+it will not be adapted for different input types.
+
+For example, in R \code{integer} types, missing values are represented by integer number \code{-2147483648}
+(since machine 'integer' types do not have an inherent 'NA' value) - hence, if one passes \code{NA},
+which is interpreted as a floating-point NaN by 'xgb.ExternalDMatrix' and by
+'xgb.QuantileDMatrix.from_iterator', these integer missing values will not be treated as missing.
+This should not pose any problem for \code{numeric} types, since they do have an inheret NaN value.}
+
+\item{nthread}{Number of threads used for creating DMatrix.}
+}
+\value{
+An 'xgb.DMatrix' object, with subclass 'xgb.ExternalDMatrix', in which the data is not
+held internally but accessed through the iterator when needed.
+}
+\description{
+Create a special type of xgboost 'DMatrix' object from external data
+supplied by an \link{xgb.DataIter} object, potentially passed in batches from a
+bigger set that might not fit entirely in memory.
+
+The data supplied by the iterator is accessed on-demand as needed, multiple times,
+without being concatenated, but note that fields like 'label' \bold{will} be
+concatenated from multiple calls to the data iterator.
+
+For more information, see the guide 'Using XGBoost External Memory Version':
+\url{https://xgboost.readthedocs.io/en/stable/tutorials/external_memory.html}
+}
+\examples{
+library(xgboost)
+data(mtcars)
+
+# this custom environment will be passed to the iterator
+# functions at each call. It's up to the user to keep
+# track of the iteration number in this environment.
+iterator_env <- as.environment(
+ list(
+ iter = 0,
+ x = mtcars[, -1],
+ y = mtcars[, 1]
+ )
+)
+
+# Data is passed in two batches.
+# In this example, batches are obtained by subsetting the 'x' variable.
+# This is not advantageous to do, since the data is already loaded in memory
+# and can be passed in full in one go, but there can be situations in which
+# only a subset of the data will fit in the computer's memory, and it can
+# be loaded in batches that are accessed one-at-a-time only.
+iterator_next <- function(iterator_env) {
+ curr_iter <- iterator_env[["iter"]]
+ if (curr_iter >= 2) {
+ # there are only two batches, so this signals end of the stream
+ return(NULL)
+ }
+
+ if (curr_iter == 0) {
+ x_batch <- iterator_env[["x"]][1:16, ]
+ y_batch <- iterator_env[["y"]][1:16]
+ } else {
+ x_batch <- iterator_env[["x"]][17:32, ]
+ y_batch <- iterator_env[["y"]][17:32]
+ }
+ on.exit({
+ iterator_env[["iter"]] <- curr_iter + 1
+ })
+
+ # Function 'xgb.DataBatch' must be called manually
+ # at each batch with all the appropriate attributes,
+ # such as feature names and feature types.
+ return(xgb.DataBatch(data = x_batch, label = y_batch))
+}
+
+# This moves the iterator back to its beginning
+iterator_reset <- function(iterator_env) {
+ iterator_env[["iter"]] <- 0
+}
+
+data_iterator <- xgb.DataIter(
+ env = iterator_env,
+ f_next = iterator_next,
+ f_reset = iterator_reset
+)
+cache_prefix <- tempdir()
+
+# DMatrix will be constructed from the iterator's batches
+dm <- xgb.ExternalDMatrix(data_iterator, cache_prefix, nthread = 1)
+
+# After construction, can be used as a regular DMatrix
+params <- list(nthread = 1, objective = "reg:squarederror")
+model <- xgb.train(data = dm, nrounds = 2, params = params)
+
+# Predictions can also be called on it, and should be the same
+# as if the data were passed differently.
+pred_dm <- predict(model, dm)
+pred_mat <- predict(model, as.matrix(mtcars[, -1]))
+}
+\seealso{
+\link{xgb.DataIter}, \link{xgb.DataBatch}, \link{xgb.QuantileDMatrix.from_iterator}
+}
diff --git a/R-package/man/xgb.QuantileDMatrix.from_iterator.Rd b/R-package/man/xgb.QuantileDMatrix.from_iterator.Rd
new file mode 100644
index 000000000..791b5576e
--- /dev/null
+++ b/R-package/man/xgb.QuantileDMatrix.from_iterator.Rd
@@ -0,0 +1,65 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/xgb.DMatrix.R
+\name{xgb.QuantileDMatrix.from_iterator}
+\alias{xgb.QuantileDMatrix.from_iterator}
+\title{QuantileDMatrix from External Data}
+\usage{
+xgb.QuantileDMatrix.from_iterator(
+ data_iterator,
+ missing = NA,
+ nthread = NULL,
+ ref = NULL,
+ max_bin = NULL
+)
+}
+\arguments{
+\item{data_iterator}{A data iterator structure as returned by \link{xgb.DataIter},
+which includes an environment shared between function calls, and functions to access
+the data in batches on-demand.}
+
+\item{missing}{A float value to represents missing values in data.
+
+Note that, while functions like \link{xgb.DMatrix} can take a generic \code{NA} and interpret it
+correctly for different types like \code{numeric} and \code{integer}, if an \code{NA} value is passed here,
+it will not be adapted for different input types.
+
+For example, in R \code{integer} types, missing values are represented by integer number \code{-2147483648}
+(since machine 'integer' types do not have an inherent 'NA' value) - hence, if one passes \code{NA},
+which is interpreted as a floating-point NaN by 'xgb.ExternalDMatrix' and by
+'xgb.QuantileDMatrix.from_iterator', these integer missing values will not be treated as missing.
+This should not pose any problem for \code{numeric} types, since they do have an inheret NaN value.}
+
+\item{nthread}{Number of threads used for creating DMatrix.}
+
+\item{ref}{The training dataset that provides quantile information, needed when creating
+validation/test dataset with \code{xgb.QuantileDMatrix}. Supplying the training DMatrix
+as a reference means that the same quantisation applied to the training data is
+applied to the validation/test data}
+
+\item{max_bin}{The number of histogram bin, should be consistent with the training parameter
+\code{max_bin}.
+
+This is only supported when constructing a QuantileDMatrix.}
+}
+\value{
+An 'xgb.DMatrix' object, with subclass 'xgb.QuantileDMatrix'.
+}
+\description{
+Create an \code{xgb.QuantileDMatrix} object (exact same class as would be returned by
+calling function \link{xgb.QuantileDMatrix}, with the same advantages and limitations) from
+external data supplied by an \link{xgb.DataIter} object, potentially passed in batches from
+a bigger set that might not fit entirely in memory, same way as \link{xgb.ExternalDMatrix}.
+
+Note that, while external data will only be loaded through the iterator (thus the full data
+might not be held entirely in-memory), the quantized representation of the data will get
+created in-memory, being concatenated from multiple calls to the data iterator. The quantized
+version is typically lighter than the original data, so there might be cases in which this
+representation could potentially fit in memory even if the full data doesn't.
+
+For more information, see the guide 'Using XGBoost External Memory Version':
+\url{https://xgboost.readthedocs.io/en/stable/tutorials/external_memory.html}
+}
+\seealso{
+\link{xgb.DataIter}, \link{xgb.DataBatch}, \link{xgb.ExternalDMatrix},
+\link{xgb.QuantileDMatrix}
+}
diff --git a/R-package/man/slice.xgb.DMatrix.Rd b/R-package/man/xgb.slice.DMatrix.Rd
similarity index 84%
rename from R-package/man/slice.xgb.DMatrix.Rd
rename to R-package/man/xgb.slice.DMatrix.Rd
index a2dfb699b..c9695996b 100644
--- a/R-package/man/slice.xgb.DMatrix.Rd
+++ b/R-package/man/xgb.slice.DMatrix.Rd
@@ -1,15 +1,12 @@
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/xgb.DMatrix.R
-\name{slice}
-\alias{slice}
-\alias{slice.xgb.DMatrix}
+\name{xgb.slice.DMatrix}
+\alias{xgb.slice.DMatrix}
\alias{[.xgb.DMatrix}
\title{Get a new DMatrix containing the specified rows of
original xgb.DMatrix object}
\usage{
-slice(object, idxset)
-
-\method{slice}{xgb.DMatrix}(object, idxset)
+xgb.slice.DMatrix(object, idxset)
\method{[}{xgb.DMatrix}(object, idxset, colset = NULL)
}
@@ -28,7 +25,7 @@ original xgb.DMatrix object
data(agaricus.train, package='xgboost')
dtrain <- with(agaricus.train, xgb.DMatrix(data, label = label, nthread = 2))
-dsub <- slice(dtrain, 1:42)
+dsub <- xgb.slice.DMatrix(dtrain, 1:42)
labels1 <- getinfo(dsub, 'label')
dsub <- dtrain[1:42, ]
labels2 <- getinfo(dsub, 'label')
diff --git a/R-package/src/init.c b/R-package/src/init.c
index fff5d9f90..a9f3f3e38 100644
--- a/R-package/src/init.c
+++ b/R-package/src/init.c
@@ -54,6 +54,14 @@ extern SEXP XGDMatrixCreateFromDF_R(SEXP, SEXP, SEXP);
extern SEXP XGDMatrixGetStrFeatureInfo_R(SEXP, SEXP);
extern SEXP XGDMatrixNumCol_R(SEXP);
extern SEXP XGDMatrixNumRow_R(SEXP);
+extern SEXP XGProxyDMatrixCreate_R();
+extern SEXP XGProxyDMatrixSetDataDense_R(SEXP, SEXP);
+extern SEXP XGProxyDMatrixSetDataCSR_R(SEXP, SEXP);
+extern SEXP XGProxyDMatrixSetDataColumnar_R(SEXP, SEXP);
+extern SEXP XGDMatrixCreateFromCallback_R(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP);
+extern SEXP XGQuantileDMatrixCreateFromCallback_R(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP);
+extern SEXP XGDMatrixFree_R(SEXP);
+extern SEXP XGGetRNAIntAsDouble();
extern SEXP XGDMatrixGetQuantileCut_R(SEXP);
extern SEXP XGDMatrixNumNonMissing_R(SEXP);
extern SEXP XGDMatrixGetDataAsCSR_R(SEXP);
@@ -105,6 +113,14 @@ static const R_CallMethodDef CallEntries[] = {
{"XGDMatrixGetStrFeatureInfo_R", (DL_FUNC) &XGDMatrixGetStrFeatureInfo_R, 2},
{"XGDMatrixNumCol_R", (DL_FUNC) &XGDMatrixNumCol_R, 1},
{"XGDMatrixNumRow_R", (DL_FUNC) &XGDMatrixNumRow_R, 1},
+ {"XGProxyDMatrixCreate_R", (DL_FUNC) &XGProxyDMatrixCreate_R, 0},
+ {"XGProxyDMatrixSetDataDense_R", (DL_FUNC) &XGProxyDMatrixSetDataDense_R, 2},
+ {"XGProxyDMatrixSetDataCSR_R", (DL_FUNC) &XGProxyDMatrixSetDataCSR_R, 2},
+ {"XGProxyDMatrixSetDataColumnar_R", (DL_FUNC) &XGProxyDMatrixSetDataColumnar_R, 2},
+ {"XGDMatrixCreateFromCallback_R", (DL_FUNC) &XGDMatrixCreateFromCallback_R, 7},
+ {"XGQuantileDMatrixCreateFromCallback_R", (DL_FUNC) &XGQuantileDMatrixCreateFromCallback_R, 8},
+ {"XGDMatrixFree_R", (DL_FUNC) &XGDMatrixFree_R, 1},
+ {"XGGetRNAIntAsDouble", (DL_FUNC) &XGGetRNAIntAsDouble, 0},
{"XGDMatrixGetQuantileCut_R", (DL_FUNC) &XGDMatrixGetQuantileCut_R, 1},
{"XGDMatrixNumNonMissing_R", (DL_FUNC) &XGDMatrixNumNonMissing_R, 1},
{"XGDMatrixGetDataAsCSR_R", (DL_FUNC) &XGDMatrixGetDataAsCSR_R, 1},
diff --git a/R-package/src/xgboost_R.cc b/R-package/src/xgboost_R.cc
index 1d01b9aae..c91fb94c4 100644
--- a/R-package/src/xgboost_R.cc
+++ b/R-package/src/xgboost_R.cc
@@ -27,7 +27,12 @@
#include "./xgboost_R.h" // Must follow other includes.
namespace {
-struct ErrorWithUnwind : public std::exception {};
+
+/* Note: this class is used as a throwable exception.
+Some xgboost C functions that use callbacks will catch exceptions
+that happen inside of the callback execution, hence it purposefully
+doesn't inherit from 'std::exception' even if used as such. */
+struct ErrorWithUnwind {};
void ThrowExceptionFromRError(void *, Rboolean jump) {
if (jump) {
@@ -51,6 +56,27 @@ SEXP SafeMkChar(const char *c_str, SEXP continuation_token) {
continuation_token);
}
+struct RFunAndEnv {
+ SEXP R_fun;
+ SEXP R_calling_env;
+};
+
+SEXP WrappedExecFun(void *void_ptr) {
+ RFunAndEnv *r_fun_and_env = static_cast(void_ptr);
+ SEXP f_expr = Rf_protect(Rf_lang1(r_fun_and_env->R_fun));
+ SEXP out = Rf_protect(Rf_eval(f_expr, r_fun_and_env->R_calling_env));
+ Rf_unprotect(2);
+ return out;
+}
+
+SEXP SafeExecFun(SEXP R_fun, SEXP R_calling_env, SEXP continuation_token) {
+ RFunAndEnv r_fun_and_env{R_fun, R_calling_env};
+ return R_UnwindProtect(
+ WrappedExecFun, static_cast(&r_fun_and_env),
+ ThrowExceptionFromRError, nullptr,
+ continuation_token);
+}
+
SEXP WrappedAllocReal(void *void_ptr) {
size_t *size = static_cast(void_ptr);
return Rf_allocVector(REALSXP, *size);
@@ -140,6 +166,47 @@ SEXP SafeAllocInteger(size_t size, SEXP continuation_token) {
return "";
}
+[[nodiscard]] std::string MakeArrayInterfaceFromRDataFrame(SEXP R_df) {
+ auto make_vec = [&](auto const *ptr, std::size_t len) {
+ auto v = xgboost::linalg::MakeVec(ptr, len);
+ return xgboost::linalg::ArrayInterface(v);
+ };
+
+ R_xlen_t n_features = Rf_xlength(R_df);
+ std::vector array(n_features);
+ CHECK_GT(n_features, 0);
+ std::size_t len = Rf_xlength(VECTOR_ELT(R_df, 0));
+
+ // The `data.frame` in R actually converts all data into numeric. The other type
+ // handlers here are not used. At the moment they are kept as a reference for when we
+ // can avoid making data copies during transformation.
+ for (R_xlen_t i = 0; i < n_features; ++i) {
+ switch (TYPEOF(VECTOR_ELT(R_df, i))) {
+ case INTSXP: {
+ auto const *ptr = INTEGER(VECTOR_ELT(R_df, i));
+ array[i] = make_vec(ptr, len);
+ break;
+ }
+ case REALSXP: {
+ auto const *ptr = REAL(VECTOR_ELT(R_df, i));
+ array[i] = make_vec(ptr, len);
+ break;
+ }
+ case LGLSXP: {
+ auto const *ptr = LOGICAL(VECTOR_ELT(R_df, i));
+ array[i] = make_vec(ptr, len);
+ break;
+ }
+ default: {
+ LOG(FATAL) << "data.frame has unsupported type.";
+ }
+ }
+ }
+
+ xgboost::Json jinterface{std::move(array)};
+ return xgboost::Json::Dump(jinterface);
+}
+
[[nodiscard]] std::string MakeJsonConfigForArray(SEXP missing, SEXP n_threads, SEXPTYPE arr_type) {
using namespace ::xgboost; // NOLINT
Json jconfig{Object{}};
@@ -335,51 +402,13 @@ XGB_DLL SEXP XGDMatrixCreateFromDF_R(SEXP df, SEXP missing, SEXP n_threads) {
R_API_BEGIN();
DMatrixHandle handle;
-
- auto make_vec = [&](auto const *ptr, std::int32_t len) {
- auto v = xgboost::linalg::MakeVec(ptr, len);
- return xgboost::linalg::ArrayInterface(v);
- };
-
std::int32_t rc{0};
{
- using xgboost::Json;
- auto n_features = Rf_xlength(df);
- std::vector array(n_features);
- CHECK_GT(n_features, 0);
- auto len = Rf_xlength(VECTOR_ELT(df, 0));
- // The `data.frame` in R actually converts all data into numeric. The other type
- // handlers here are not used. At the moment they are kept as a reference for when we
- // can avoid making data copies during transformation.
- for (decltype(n_features) i = 0; i < n_features; ++i) {
- switch (TYPEOF(VECTOR_ELT(df, i))) {
- case INTSXP: {
- auto const *ptr = INTEGER(VECTOR_ELT(df, i));
- array[i] = make_vec(ptr, len);
- break;
- }
- case REALSXP: {
- auto const *ptr = REAL(VECTOR_ELT(df, i));
- array[i] = make_vec(ptr, len);
- break;
- }
- case LGLSXP: {
- auto const *ptr = LOGICAL(VECTOR_ELT(df, i));
- array[i] = make_vec(ptr, len);
- break;
- }
- default: {
- LOG(FATAL) << "data.frame has unsupported type.";
- }
- }
- }
-
- Json jinterface{std::move(array)};
- auto sinterface = Json::Dump(jinterface);
- Json jconfig{xgboost::Object{}};
+ std::string sinterface = MakeArrayInterfaceFromRDataFrame(df);
+ xgboost::Json jconfig{xgboost::Object{}};
jconfig["missing"] = asReal(missing);
jconfig["nthread"] = asInteger(n_threads);
- auto sconfig = Json::Dump(jconfig);
+ std::string sconfig = xgboost::Json::Dump(jconfig);
rc = XGDMatrixCreateFromColumnar(sinterface.c_str(), sconfig.c_str(), &handle);
}
@@ -632,6 +661,192 @@ XGB_DLL SEXP XGDMatrixNumCol_R(SEXP handle) {
return ScalarInteger(static_cast(ncol));
}
+XGB_DLL SEXP XGProxyDMatrixCreate_R() {
+ SEXP out = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue));
+ R_API_BEGIN();
+ DMatrixHandle proxy_dmat_handle;
+ CHECK_CALL(XGProxyDMatrixCreate(&proxy_dmat_handle));
+ R_SetExternalPtrAddr(out, proxy_dmat_handle);
+ R_RegisterCFinalizerEx(out, _DMatrixFinalizer, TRUE);
+ Rf_unprotect(1);
+ R_API_END();
+ return out;
+}
+
+XGB_DLL SEXP XGProxyDMatrixSetDataDense_R(SEXP handle, SEXP R_mat) {
+ R_API_BEGIN();
+ DMatrixHandle proxy_dmat = R_ExternalPtrAddr(handle);
+ int res_code;
+ {
+ std::string array_str = MakeArrayInterfaceFromRMat(R_mat);
+ res_code = XGProxyDMatrixSetDataDense(proxy_dmat, array_str.c_str());
+ }
+ CHECK_CALL(res_code);
+ R_API_END();
+ return R_NilValue;
+}
+
+XGB_DLL SEXP XGProxyDMatrixSetDataCSR_R(SEXP handle, SEXP lst) {
+ R_API_BEGIN();
+ DMatrixHandle proxy_dmat = R_ExternalPtrAddr(handle);
+ int res_code;
+ {
+ std::string array_str_indptr = MakeArrayInterfaceFromRVector(VECTOR_ELT(lst, 0));
+ std::string array_str_indices = MakeArrayInterfaceFromRVector(VECTOR_ELT(lst, 1));
+ std::string array_str_data = MakeArrayInterfaceFromRVector(VECTOR_ELT(lst, 2));
+ const int ncol = Rf_asInteger(VECTOR_ELT(lst, 3));
+ res_code = XGProxyDMatrixSetDataCSR(proxy_dmat,
+ array_str_indptr.c_str(),
+ array_str_indices.c_str(),
+ array_str_data.c_str(),
+ ncol);
+ }
+ CHECK_CALL(res_code);
+ R_API_END();
+ return R_NilValue;
+}
+
+XGB_DLL SEXP XGProxyDMatrixSetDataColumnar_R(SEXP handle, SEXP lst) {
+ R_API_BEGIN();
+ DMatrixHandle proxy_dmat = R_ExternalPtrAddr(handle);
+ int res_code;
+ {
+ std::string sinterface = MakeArrayInterfaceFromRDataFrame(lst);
+ res_code = XGProxyDMatrixSetDataColumnar(proxy_dmat, sinterface.c_str());
+ }
+ CHECK_CALL(res_code);
+ R_API_END();
+ return R_NilValue;
+}
+
+namespace {
+
+struct _RDataIterator {
+ SEXP f_next;
+ SEXP f_reset;
+ SEXP calling_env;
+ SEXP continuation_token;
+
+ _RDataIterator(
+ SEXP f_next, SEXP f_reset, SEXP calling_env, SEXP continuation_token) :
+ f_next(f_next), f_reset(f_reset), calling_env(calling_env),
+ continuation_token(continuation_token) {}
+
+ void reset() {
+ SafeExecFun(this->f_reset, this->calling_env, this->continuation_token);
+ }
+
+ int next() {
+ SEXP R_res = Rf_protect(
+ SafeExecFun(this->f_next, this->calling_env, this->continuation_token));
+ int res = Rf_asInteger(R_res);
+ Rf_unprotect(1);
+ return res;
+ }
+};
+
+void _reset_RDataIterator(DataIterHandle iter) {
+ static_cast<_RDataIterator*>(iter)->reset();
+}
+
+int _next_RDataIterator(DataIterHandle iter) {
+ return static_cast<_RDataIterator*>(iter)->next();
+}
+
+SEXP XGDMatrixCreateFromCallbackGeneric_R(
+ SEXP f_next, SEXP f_reset, SEXP calling_env, SEXP proxy_dmat,
+ SEXP n_threads, SEXP missing, SEXP max_bin, SEXP ref_dmat,
+ SEXP cache_prefix, bool as_quantile_dmatrix) {
+ SEXP continuation_token = Rf_protect(R_MakeUnwindCont());
+ SEXP out = Rf_protect(R_MakeExternalPtr(nullptr, R_NilValue, R_NilValue));
+ R_API_BEGIN();
+ DMatrixHandle out_dmat;
+
+ int res_code;
+ try {
+ _RDataIterator data_iterator(f_next, f_reset, calling_env, continuation_token);
+
+ std::string str_cache_prefix;
+ xgboost::Json jconfig{xgboost::Object{}};
+ jconfig["missing"] = Rf_asReal(missing);
+ if (!Rf_isNull(n_threads)) {
+ jconfig["nthread"] = Rf_asInteger(n_threads);
+ }
+ if (as_quantile_dmatrix) {
+ if (!Rf_isNull(max_bin)) {
+ jconfig["max_bin"] = Rf_asInteger(max_bin);
+ }
+ } else {
+ str_cache_prefix = std::string(CHAR(Rf_asChar(cache_prefix)));
+ jconfig["cache_prefix"] = str_cache_prefix;
+ }
+ std::string json_str = xgboost::Json::Dump(jconfig);
+
+ DMatrixHandle ref_dmat_handle = nullptr;
+ if (as_quantile_dmatrix && !Rf_isNull(ref_dmat)) {
+ ref_dmat_handle = R_ExternalPtrAddr(ref_dmat);
+ }
+
+ if (as_quantile_dmatrix) {
+ res_code = XGQuantileDMatrixCreateFromCallback(
+ &data_iterator,
+ R_ExternalPtrAddr(proxy_dmat),
+ ref_dmat_handle,
+ _reset_RDataIterator,
+ _next_RDataIterator,
+ json_str.c_str(),
+ &out_dmat);
+ } else {
+ res_code = XGDMatrixCreateFromCallback(
+ &data_iterator,
+ R_ExternalPtrAddr(proxy_dmat),
+ _reset_RDataIterator,
+ _next_RDataIterator,
+ json_str.c_str(),
+ &out_dmat);
+ }
+ } catch (ErrorWithUnwind &e) {
+ R_ContinueUnwind(continuation_token);
+ }
+ CHECK_CALL(res_code);
+
+ R_SetExternalPtrAddr(out, out_dmat);
+ R_RegisterCFinalizerEx(out, _DMatrixFinalizer, TRUE);
+ Rf_unprotect(2);
+ R_API_END();
+ return out;
+}
+
+} /* namespace */
+
+XGB_DLL SEXP XGQuantileDMatrixCreateFromCallback_R(
+ SEXP f_next, SEXP f_reset, SEXP calling_env, SEXP proxy_dmat,
+ SEXP n_threads, SEXP missing, SEXP max_bin, SEXP ref_dmat) {
+ return XGDMatrixCreateFromCallbackGeneric_R(
+ f_next, f_reset, calling_env, proxy_dmat,
+ n_threads, missing, max_bin, ref_dmat,
+ R_NilValue, true);
+}
+
+XGB_DLL SEXP XGDMatrixCreateFromCallback_R(
+ SEXP f_next, SEXP f_reset, SEXP calling_env, SEXP proxy_dmat,
+ SEXP n_threads, SEXP missing, SEXP cache_prefix) {
+ return XGDMatrixCreateFromCallbackGeneric_R(
+ f_next, f_reset, calling_env, proxy_dmat,
+ n_threads, missing, R_NilValue, R_NilValue,
+ cache_prefix, false);
+}
+
+XGB_DLL SEXP XGDMatrixFree_R(SEXP proxy_dmat) {
+ _DMatrixFinalizer(proxy_dmat);
+ return R_NilValue;
+}
+
+XGB_DLL SEXP XGGetRNAIntAsDouble() {
+ double sentinel_as_double = static_cast(R_NaInt);
+ return Rf_ScalarReal(sentinel_as_double);
+}
+
XGB_DLL SEXP XGDuplicate_R(SEXP obj) {
return Rf_duplicate(obj);
}
diff --git a/R-package/src/xgboost_R.h b/R-package/src/xgboost_R.h
index ec30dbada..d2e0ae828 100644
--- a/R-package/src/xgboost_R.h
+++ b/R-package/src/xgboost_R.h
@@ -161,6 +161,84 @@ XGB_DLL SEXP XGDMatrixNumRow_R(SEXP handle);
*/
XGB_DLL SEXP XGDMatrixNumCol_R(SEXP handle);
+/*!
+<<<<<<< HEAD
+ * \brief create a ProxyDMatrix and get an R externalptr object for it
+ */
+XGB_DLL SEXP XGProxyDMatrixCreate_R();
+
+/*!
+ * \brief Set dense matrix data on a proxy dmatrix
+ * \param handle R externalptr pointing to a ProxyDMatrix
+ * \param R_mat R matrix to set in the proxy dmatrix
+ */
+XGB_DLL SEXP XGProxyDMatrixSetDataDense_R(SEXP handle, SEXP R_mat);
+
+/*!
+ * \brief Set dense matrix data on a proxy dmatrix
+ * \param handle R externalptr pointing to a ProxyDMatrix
+ * \param lst R list containing, in this order:
+ * 1. 'p' or 'indptr' vector of the CSR matrix.
+ * 2. 'j' or 'indices' vector of the CSR matrix.
+ * 3. 'x' or 'data' vector of the CSR matrix.
+ * 4. Number of columns in the CSR matrix.
+ */
+XGB_DLL SEXP XGProxyDMatrixSetDataCSR_R(SEXP handle, SEXP lst);
+
+/*!
+ * \brief Set dense matrix data on a proxy dmatrix
+ * \param handle R externalptr pointing to a ProxyDMatrix
+ * \param lst R list or data.frame object containing its columns as numeric vectors
+ */
+XGB_DLL SEXP XGProxyDMatrixSetDataColumnar_R(SEXP handle, SEXP lst);
+
+/*!
+ * \brief Create a DMatrix from a DataIter with callbacks
+ * \param expr_f_next expression for function(env, proxy_dmat) that sets the data on the proxy
+ * dmatrix and returns either zero (end of batch) or one (batch continues).
+ * \param expr_f_reset expression for function(env) that resets the data iterator to
+ * the beginning (first batch).
+ * \param calling_env R environment where to evaluate the expressions above
+ * \param proxy_dmat R externalptr holding a ProxyDMatrix.
+ * \param n_threads number of parallel threads to use for constructing the DMatrix.
+ * \param missing which value to represent missing value.
+ * \param cache_prefix path of cache file
+ * \return handle R externalptr holding the resulting DMatrix.
+ */
+XGB_DLL SEXP XGDMatrixCreateFromCallback_R(
+ SEXP expr_f_next, SEXP expr_f_reset, SEXP calling_env, SEXP proxy_dmat,
+ SEXP n_threads, SEXP missing, SEXP cache_prefix);
+
+/*!
+ * \brief Create a QuantileDMatrix from a DataIter with callbacks
+ * \param expr_f_next expression for function(env, proxy_dmat) that sets the data on the proxy
+ * dmatrix and returns either zero (end of batch) or one (batch continues).
+ * \param expr_f_reset expression for function(env) that resets the data iterator to
+ * the beginning (first batch).
+ * \param calling_env R environment where to evaluate the expressions above
+ * \param proxy_dmat R externalptr holding a ProxyDMatrix.
+ * \param n_threads number of parallel threads to use for constructing the QuantileDMatrix.
+ * \param missing which value to represent missing value.
+ * \param max_bin maximum number of bins to have in the resulting QuantileDMatrix.
+ * \param ref_dmat an optional reference DMatrix from which to get the bin boundaries.
+ * \return handle R externalptr holding the resulting QuantileDMatrix.
+ */
+XGB_DLL SEXP XGQuantileDMatrixCreateFromCallback_R(
+ SEXP expr_f_next, SEXP expr_f_reset, SEXP calling_env, SEXP proxy_dmat,
+ SEXP n_threads, SEXP missing, SEXP max_bin, SEXP ref_dmat);
+
+/*!
+ * \brief Frees a ProxyDMatrix and empties out the R externalptr object that holds it
+ * \param proxy_dmat R externalptr containing a ProxyDMatrix
+ * \return NULL
+ */
+XGB_DLL SEXP XGDMatrixFree_R(SEXP proxy_dmat);
+
+/*!
+ * \brief Get the value that represents missingness in R integers as a numeric non-missing value.
+ */
+XGB_DLL SEXP XGGetRNAIntAsDouble();
+
/*!
* \brief Call R C-level function 'duplicate'
* \param obj Object to duplicate
diff --git a/R-package/tests/testthat/test_dmatrix.R b/R-package/tests/testthat/test_dmatrix.R
index 568aaa3bd..50621f241 100644
--- a/R-package/tests/testthat/test_dmatrix.R
+++ b/R-package/tests/testthat/test_dmatrix.R
@@ -166,7 +166,7 @@ test_that("xgb.DMatrix: getinfo & setinfo", {
test_that("xgb.DMatrix: slice, dim", {
dtest <- xgb.DMatrix(test_data, label = test_label, nthread = n_threads)
expect_equal(dim(dtest), dim(test_data))
- dsub1 <- slice(dtest, 1:42)
+ dsub1 <- xgb.slice.DMatrix(dtest, 1:42)
expect_equal(nrow(dsub1), 42)
expect_equal(ncol(dsub1), ncol(test_data))
@@ -182,12 +182,12 @@ test_that("xgb.DMatrix: slice, trailing empty rows", {
dtrain <- xgb.DMatrix(
data = train_data, label = train_label, nthread = n_threads
)
- slice(dtrain, 6513L)
+ xgb.slice.DMatrix(dtrain, 6513L)
train_data[6513, ] <- 0
dtrain <- xgb.DMatrix(
data = train_data, label = train_label, nthread = n_threads
)
- slice(dtrain, 6513L)
+ xgb.slice.DMatrix(dtrain, 6513L)
expect_equal(nrow(dtrain), 6513)
})
@@ -338,19 +338,18 @@ test_that("xgb.DMatrix: data.frame", {
stringsAsFactors = TRUE
)
- m <- xgb.DMatrix(df, enable_categorical = TRUE)
+ m <- xgb.DMatrix(df)
expect_equal(colnames(m), colnames(df))
expect_equal(
getinfo(m, "feature_type"), c("float", "float", "int", "i", "c", "c")
)
- expect_error(xgb.DMatrix(df))
df <- data.frame(
missing = c("a", "b", "d", NA),
valid = c("a", "b", "d", "c"),
stringsAsFactors = TRUE
)
- m <- xgb.DMatrix(df, enable_categorical = TRUE)
+ m <- xgb.DMatrix(df)
expect_equal(getinfo(m, "feature_type"), c("c", "c"))
})
@@ -380,6 +379,261 @@ test_that("xgb.DMatrix: can take multi-dimensional 'base_margin'", {
expect_equal(pred_only_x, pred_w_base - b, tolerance = 1e-5)
})
+test_that("xgb.DMatrix: QuantileDMatrix produces same result as DMatrix", {
+ data(mtcars)
+ y <- mtcars[, 1]
+ x <- mtcars[, -1]
+
+ cast_matrix <- function(x) as.matrix(x)
+ cast_df <- function(x) as.data.frame(x)
+ cast_csr <- function(x) as(as.matrix(x), "RsparseMatrix")
+ casting_funs <- list(cast_matrix, cast_df, cast_csr)
+
+ for (casting_fun in casting_funs) {
+
+ qdm <- xgb.QuantileDMatrix(
+ data = casting_fun(x),
+ label = y,
+ nthread = n_threads,
+ max_bin = 5
+ )
+ params <- list(
+ tree_method = "hist",
+ objective = "reg:squarederror",
+ nthread = n_threads,
+ max_bin = 5
+ )
+ model_qdm <- xgb.train(
+ params = params,
+ data = qdm,
+ nrounds = 2
+ )
+ pred_qdm <- predict(model_qdm, x)
+
+ dm <- xgb.DMatrix(
+ data = x,
+ label = y,
+ nthread = n_threads
+ )
+ model_dm <- xgb.train(
+ params = params,
+ data = dm,
+ nrounds = 2
+ )
+ pred_dm <- predict(model_dm, x)
+
+ expect_equal(pred_qdm, pred_dm)
+ }
+})
+
+test_that("xgb.DMatrix: QuantileDMatrix is not accepted by exact method", {
+ data(mtcars)
+ y <- mtcars[, 1]
+ x <- as.matrix(mtcars[, -1])
+ qdm <- xgb.QuantileDMatrix(
+ data = x,
+ label = y,
+ nthread = n_threads
+ )
+ params <- list(
+ tree_method = "exact",
+ objective = "reg:squarederror",
+ nthread = n_threads
+ )
+ expect_error({
+ xgb.train(
+ params = params,
+ data = qdm,
+ nrounds = 2
+ )
+ })
+})
+
+test_that("xgb.DMatrix: ExternalDMatrix produces the same results as regular DMatrix", {
+ data(mtcars)
+ y <- mtcars[, 1]
+ x <- as.matrix(mtcars[, -1])
+ set.seed(123)
+ params <- list(
+ objective = "reg:squarederror",
+ nthread = n_threads
+ )
+ model <- xgb.train(
+ data = xgb.DMatrix(x, label = y),
+ params = params,
+ nrounds = 5
+ )
+ pred <- predict(model, x)
+
+ iterator_env <- as.environment(
+ list(
+ iter = 0,
+ x = mtcars[, -1],
+ y = mtcars[, 1]
+ )
+ )
+ iterator_next <- function(iterator_env) {
+ curr_iter <- iterator_env[["iter"]]
+ if (curr_iter >= 2) {
+ return(NULL)
+ }
+ if (curr_iter == 0) {
+ x_batch <- iterator_env[["x"]][1:16, ]
+ y_batch <- iterator_env[["y"]][1:16]
+ } else {
+ x_batch <- iterator_env[["x"]][17:32, ]
+ y_batch <- iterator_env[["y"]][17:32]
+ }
+ on.exit({
+ iterator_env[["iter"]] <- curr_iter + 1
+ })
+ return(xgb.DataBatch(data = x_batch, label = y_batch))
+ }
+ iterator_reset <- function(iterator_env) {
+ iterator_env[["iter"]] <- 0
+ }
+ data_iterator <- xgb.DataIter(
+ env = iterator_env,
+ f_next = iterator_next,
+ f_reset = iterator_reset
+ )
+ cache_prefix <- tempdir()
+ edm <- xgb.ExternalDMatrix(data_iterator, cache_prefix, nthread = 1)
+ expect_true(inherits(edm, "xgb.ExternalDMatrix"))
+ expect_true(inherits(edm, "xgb.DMatrix"))
+ set.seed(123)
+ model_ext <- xgb.train(
+ data = edm,
+ params = params,
+ nrounds = 5
+ )
+
+ pred_model1_edm <- predict(model, edm)
+ pred_model2_mat <- predict(model_ext, x)
+ pred_model2_edm <- predict(model_ext, edm)
+
+ expect_equal(pred_model1_edm, pred)
+ expect_equal(pred_model2_mat, pred)
+ expect_equal(pred_model2_edm, pred)
+})
+
+test_that("xgb.DMatrix: External QDM produces same results as regular QDM", {
+ data(mtcars)
+ y <- mtcars[, 1]
+ x <- as.matrix(mtcars[, -1])
+ set.seed(123)
+ params <- list(
+ objective = "reg:squarederror",
+ nthread = n_threads,
+ max_bin = 3
+ )
+ model <- xgb.train(
+ data = xgb.QuantileDMatrix(
+ x,
+ label = y,
+ nthread = 1,
+ max_bin = 3
+ ),
+ params = params,
+ nrounds = 5
+ )
+ pred <- predict(model, x)
+
+ iterator_env <- as.environment(
+ list(
+ iter = 0,
+ x = mtcars[, -1],
+ y = mtcars[, 1]
+ )
+ )
+ iterator_next <- function(iterator_env) {
+ curr_iter <- iterator_env[["iter"]]
+ if (curr_iter >= 2) {
+ return(NULL)
+ }
+ if (curr_iter == 0) {
+ x_batch <- iterator_env[["x"]][1:16, ]
+ y_batch <- iterator_env[["y"]][1:16]
+ } else {
+ x_batch <- iterator_env[["x"]][17:32, ]
+ y_batch <- iterator_env[["y"]][17:32]
+ }
+ on.exit({
+ iterator_env[["iter"]] <- curr_iter + 1
+ })
+ return(xgb.DataBatch(data = x_batch, label = y_batch))
+ }
+ iterator_reset <- function(iterator_env) {
+ iterator_env[["iter"]] <- 0
+ }
+ data_iterator <- xgb.DataIter(
+ env = iterator_env,
+ f_next = iterator_next,
+ f_reset = iterator_reset
+ )
+ cache_prefix <- tempdir()
+ qdm <- xgb.QuantileDMatrix.from_iterator(
+ data_iterator,
+ max_bin = 3,
+ nthread = 1
+ )
+ expect_true(inherits(qdm, "xgb.QuantileDMatrix"))
+ expect_true(inherits(qdm, "xgb.DMatrix"))
+ set.seed(123)
+ model_ext <- xgb.train(
+ data = qdm,
+ params = params,
+ nrounds = 5
+ )
+
+ pred_model1_qdm <- predict(model, qdm)
+ pred_model2_mat <- predict(model_ext, x)
+ pred_model2_qdm <- predict(model_ext, qdm)
+
+ expect_equal(pred_model1_qdm, pred)
+ expect_equal(pred_model2_mat, pred)
+ expect_equal(pred_model2_qdm, pred)
+})
+
+test_that("xgb.DMatrix: R errors thrown on DataIterator are thrown back to the user", {
+ data(mtcars)
+ iterator_env <- as.environment(
+ list(
+ iter = 0,
+ x = mtcars[, -1],
+ y = mtcars[, 1]
+ )
+ )
+ iterator_next <- function(iterator_env) {
+ curr_iter <- iterator_env[["iter"]]
+ if (curr_iter >= 2) {
+ return(0)
+ }
+ if (curr_iter == 0) {
+ x_batch <- iterator_env[["x"]][1:16, ]
+ y_batch <- iterator_env[["y"]][1:16]
+ } else {
+ stop("custom error")
+ }
+ on.exit({
+ iterator_env[["iter"]] <- curr_iter + 1
+ })
+ return(xgb.DataBatch(data = x_batch, label = y_batch))
+ }
+ iterator_reset <- function(iterator_env) {
+ iterator_env[["iter"]] <- 0
+ }
+ data_iterator <- xgb.DataIter(
+ env = iterator_env,
+ f_next = iterator_next,
+ f_reset = iterator_reset
+ )
+ expect_error(
+ {xgb.ExternalDMatrix(data_iterator, nthread = 1)},
+ "custom error"
+ )
+})
+
test_that("xgb.DMatrix: number of non-missing matches data", {
x <- matrix(1:10, nrow = 5)
dm1 <- xgb.DMatrix(x)
diff --git a/R-package/tests/testthat/test_helpers.R b/R-package/tests/testthat/test_helpers.R
index badac0213..38b5ca066 100644
--- a/R-package/tests/testthat/test_helpers.R
+++ b/R-package/tests/testthat/test_helpers.R
@@ -511,3 +511,82 @@ test_that('convert.labels works', {
expect_equal(class(res), 'numeric')
}
})
+
+test_that("validate.features works as expected", {
+ data(mtcars)
+ y <- mtcars$mpg
+ x <- as.matrix(mtcars[, -1])
+ dm <- xgb.DMatrix(x, label = y, nthread = 1)
+ model <- xgb.train(
+ params = list(nthread = 1),
+ data = dm,
+ nrounds = 3
+ )
+
+ # result is output as-is when needed
+ res <- validate.features(model, x)
+ expect_equal(res, x)
+ res <- validate.features(model, dm)
+ expect_identical(res, dm)
+ res <- validate.features(model, as(x[1, ], "dsparseVector"))
+ expect_equal(as.numeric(res), unname(x[1, ]))
+ res <- validate.features(model, "file.txt")
+ expect_equal(res, "file.txt")
+
+ # columns are reordered
+ res <- validate.features(model, mtcars[, rev(names(mtcars))])
+ expect_equal(names(res), colnames(x))
+ expect_equal(as.matrix(res), x)
+ res <- validate.features(model, as.matrix(mtcars[, rev(names(mtcars))]))
+ expect_equal(colnames(res), colnames(x))
+ expect_equal(res, x)
+ res <- validate.features(model, mtcars[1, rev(names(mtcars)), drop = FALSE])
+ expect_equal(names(res), colnames(x))
+ expect_equal(unname(as.matrix(res)), unname(x[1, , drop = FALSE]))
+ res <- validate.features(model, as.data.table(mtcars[, rev(names(mtcars))]))
+ expect_equal(names(res), colnames(x))
+ expect_equal(unname(as.matrix(res)), unname(x))
+
+ # error when columns are missing
+ expect_error({
+ validate.features(model, mtcars[, 1:3])
+ })
+ expect_error({
+ validate.features(model, as.matrix(mtcars[, 1:ncol(x)])) # nolint
+ })
+ expect_error({
+ validate.features(model, xgb.DMatrix(mtcars[, 1:3]))
+ })
+ expect_error({
+ validate.features(model, as(x[, 1:3], "CsparseMatrix"))
+ })
+
+ # error when it cannot reorder or subset
+ expect_error({
+ validate.features(model, xgb.DMatrix(mtcars))
+ }, "Feature names")
+ expect_error({
+ validate.features(model, xgb.DMatrix(x[, rev(colnames(x))]))
+ }, "Feature names")
+
+ # no error about types if the booster doesn't have types
+ expect_error({
+ validate.features(model, xgb.DMatrix(x, feature_types = c(rep("q", 5), rep("c", 5))))
+ }, NA)
+ tmp <- mtcars
+ tmp[["vs"]] <- factor(tmp[["vs"]])
+ expect_error({
+ validate.features(model, tmp)
+ }, NA)
+
+ # error when types do not match
+ setinfo(model, "feature_type", rep("q", 10))
+ expect_error({
+ validate.features(model, xgb.DMatrix(x, feature_types = c(rep("q", 5), rep("c", 5))))
+ }, "Feature types")
+ tmp <- mtcars
+ tmp[["vs"]] <- factor(tmp[["vs"]])
+ expect_error({
+ validate.features(model, tmp)
+ }, "Feature types")
+})
diff --git a/demo/aft_survival/aft_survival_viz_demo.py b/demo/aft_survival/aft_survival_viz_demo.py
index b925ca547..0e434a151 100644
--- a/demo/aft_survival/aft_survival_viz_demo.py
+++ b/demo/aft_survival/aft_survival_viz_demo.py
@@ -6,6 +6,7 @@ This demo uses 1D toy data and visualizes how XGBoost fits a tree ensemble. The
model starts out as a flat line and evolves into a step function in order to account for
all ranged labels.
"""
+
import matplotlib.pyplot as plt
import numpy as np
diff --git a/demo/dask/cpu_training.py b/demo/dask/cpu_training.py
index 0f3316741..2bee444f7 100644
--- a/demo/dask/cpu_training.py
+++ b/demo/dask/cpu_training.py
@@ -3,6 +3,7 @@ Example of training with Dask on CPU
====================================
"""
+
from dask import array as da
from dask.distributed import Client, LocalCluster
@@ -14,8 +15,9 @@ def main(client):
# generate some random data for demonstration
m = 100000
n = 100
- X = da.random.random(size=(m, n), chunks=100)
- y = da.random.random(size=(m,), chunks=100)
+ rng = da.random.default_rng(1)
+ X = rng.normal(size=(m, n))
+ y = X.sum(axis=1)
# DaskDMatrix acts like normal DMatrix, works as a proxy for local
# DMatrix scatter around workers.
diff --git a/demo/dask/dask_callbacks.py b/demo/dask/dask_callbacks.py
index a4b0f5648..4a7ec0f19 100644
--- a/demo/dask/dask_callbacks.py
+++ b/demo/dask/dask_callbacks.py
@@ -2,6 +2,7 @@
Example of using callbacks with Dask
====================================
"""
+
import numpy as np
from dask.distributed import Client, LocalCluster
from dask_ml.datasets import make_regression
diff --git a/demo/dask/gpu_training.py b/demo/dask/gpu_training.py
index fd5b35bf3..f53835ffb 100644
--- a/demo/dask/gpu_training.py
+++ b/demo/dask/gpu_training.py
@@ -2,6 +2,8 @@
Example of training with Dask on GPU
====================================
"""
+
+import cupy as cp
import dask_cudf
from dask import array as da
from dask import dataframe as dd
@@ -72,10 +74,12 @@ if __name__ == "__main__":
with LocalCUDACluster(n_workers=2, threads_per_worker=4) as cluster:
with Client(cluster) as client:
# generate some random data for demonstration
+ rng = da.random.default_rng(1)
+
m = 100000
n = 100
- X = da.random.random(size=(m, n), chunks=10000)
- y = da.random.random(size=(m,), chunks=10000)
+ X = rng.normal(size=(m, n))
+ y = X.sum(axis=1)
print("Using DaskQuantileDMatrix")
from_ddqdm = using_quantile_device_dmatrix(client, X, y)
diff --git a/demo/dask/sklearn_cpu_training.py b/demo/dask/sklearn_cpu_training.py
index 38ea25e61..e91babb84 100644
--- a/demo/dask/sklearn_cpu_training.py
+++ b/demo/dask/sklearn_cpu_training.py
@@ -2,6 +2,7 @@
Use scikit-learn regressor interface with CPU histogram tree method
===================================================================
"""
+
from dask import array as da
from dask.distributed import Client, LocalCluster
diff --git a/demo/guide-python/callbacks.py b/demo/guide-python/callbacks.py
index 0676c732e..2f8ac5c79 100644
--- a/demo/guide-python/callbacks.py
+++ b/demo/guide-python/callbacks.py
@@ -4,6 +4,7 @@ Demo for using and defining callback functions
.. versionadded:: 1.3.0
"""
+
import argparse
import os
import tempfile
diff --git a/demo/guide-python/cat_pipeline.py b/demo/guide-python/cat_pipeline.py
index 0f2ba8f8d..72e786edd 100644
--- a/demo/guide-python/cat_pipeline.py
+++ b/demo/guide-python/cat_pipeline.py
@@ -13,6 +13,7 @@ See Also
- :ref:`sphx_glr_python_examples_cat_in_the_dat.py`
"""
+
from typing import List, Tuple
import numpy as np
diff --git a/demo/guide-python/categorical.py b/demo/guide-python/categorical.py
index b639f4359..d42fb8b77 100644
--- a/demo/guide-python/categorical.py
+++ b/demo/guide-python/categorical.py
@@ -17,6 +17,7 @@ See Also
- :ref:`sphx_glr_python_examples_cat_pipeline.py`
"""
+
from typing import Tuple
import numpy as np
diff --git a/demo/guide-python/external_memory.py b/demo/guide-python/external_memory.py
index 6d789486e..e4d1895d1 100644
--- a/demo/guide-python/external_memory.py
+++ b/demo/guide-python/external_memory.py
@@ -11,6 +11,7 @@ instead of Quantile DMatrix. The feature is not ready for production use yet.
See :doc:`the tutorial ` for more details.
"""
+
import os
import tempfile
from typing import Callable, List, Tuple
diff --git a/demo/guide-python/individual_trees.py b/demo/guide-python/individual_trees.py
index 93a9aad2b..b10fabf64 100644
--- a/demo/guide-python/individual_trees.py
+++ b/demo/guide-python/individual_trees.py
@@ -2,6 +2,7 @@
Demo for prediction using individual trees and model slices
===========================================================
"""
+
import os
import numpy as np
diff --git a/demo/guide-python/learning_to_rank.py b/demo/guide-python/learning_to_rank.py
index 62df8253b..b131b31f7 100644
--- a/demo/guide-python/learning_to_rank.py
+++ b/demo/guide-python/learning_to_rank.py
@@ -15,6 +15,7 @@ position debiasing training.
For an overview of learning to rank in XGBoost, please see
:doc:`Learning to Rank `.
"""
+
from __future__ import annotations
import argparse
diff --git a/demo/guide-python/quantile_regression.py b/demo/guide-python/quantile_regression.py
index a9e4532ba..f44b5c9b4 100644
--- a/demo/guide-python/quantile_regression.py
+++ b/demo/guide-python/quantile_regression.py
@@ -13,6 +13,7 @@ https://scikit-learn.org/stable/auto_examples/ensemble/plot_gradient_boosting_qu
crossing can happen due to limitation in the algorithm.
"""
+
import argparse
from typing import Dict
diff --git a/demo/guide-python/sklearn_examples.py b/demo/guide-python/sklearn_examples.py
index 0fe7a8e24..42baf6883 100644
--- a/demo/guide-python/sklearn_examples.py
+++ b/demo/guide-python/sklearn_examples.py
@@ -9,6 +9,7 @@ Created on 1 Apr 2015
@author: Jamie Hall
"""
+
import pickle
import numpy as np
diff --git a/demo/guide-python/sklearn_parallel.py b/demo/guide-python/sklearn_parallel.py
index 55e0bff74..db5303ab7 100644
--- a/demo/guide-python/sklearn_parallel.py
+++ b/demo/guide-python/sklearn_parallel.py
@@ -2,6 +2,7 @@
Demo for using xgboost with sklearn
===================================
"""
+
import multiprocessing
from sklearn.datasets import fetch_california_housing
diff --git a/demo/guide-python/spark_estimator_examples.py b/demo/guide-python/spark_estimator_examples.py
index 97caef610..ac36065bc 100644
--- a/demo/guide-python/spark_estimator_examples.py
+++ b/demo/guide-python/spark_estimator_examples.py
@@ -4,6 +4,7 @@ Collection of examples for using xgboost.spark estimator interface
@author: Weichen Xu
"""
+
import sklearn.datasets
from pyspark.ml.evaluation import MulticlassClassificationEvaluator, RegressionEvaluator
from pyspark.ml.linalg import Vectors
diff --git a/demo/rmm_plugin/rmm_mgpu_with_dask.py b/demo/rmm_plugin/rmm_mgpu_with_dask.py
index 2384b209e..467827074 100644
--- a/demo/rmm_plugin/rmm_mgpu_with_dask.py
+++ b/demo/rmm_plugin/rmm_mgpu_with_dask.py
@@ -2,6 +2,7 @@
Using rmm with Dask
===================
"""
+
import dask
from dask.distributed import Client
from dask_cuda import LocalCUDACluster
diff --git a/demo/rmm_plugin/rmm_singlegpu.py b/demo/rmm_plugin/rmm_singlegpu.py
index b4dccd805..a1457406b 100644
--- a/demo/rmm_plugin/rmm_singlegpu.py
+++ b/demo/rmm_plugin/rmm_singlegpu.py
@@ -2,6 +2,7 @@
Using rmm on a single node device
=================================
"""
+
import rmm
from sklearn.datasets import make_classification
diff --git a/doc/R-package/index.rst b/doc/R-package/index.rst
index ebd49bb9c..8a27d0174 100644
--- a/doc/R-package/index.rst
+++ b/doc/R-package/index.rst
@@ -26,3 +26,12 @@ Tutorials
Introduction to XGBoost in R
Understanding your dataset with XGBoost
+
+************
+Other topics
+************
+
+.. toctree::
+ :maxdepth: 2
+ :titlesonly:
+ Handling of indexable elements
diff --git a/doc/R-package/index_base.rst b/doc/R-package/index_base.rst
new file mode 100644
index 000000000..495b2e760
--- /dev/null
+++ b/doc/R-package/index_base.rst
@@ -0,0 +1,29 @@
+.. _index_base:
+
+Handling of indexable elements
+==============================
+
+There are many functionalities in XGBoost which refer to indexable elements in a countable set, such as boosting rounds / iterations / trees in a model (which can be referred to by number), classes, categories / levels in categorical features, among others.
+
+XGBoost, being written in C++, uses base-0 indexing and considers ranges / sequences to be inclusive of the left end but not the right one - for example, a range (0, 3) would include the first three elements, numbered 0, 1, and 2.
+
+The Python interface uses this same logic, since this is also the way that indexing in Python works, but other languages like R have different logic. In R, indexing is base-1 and ranges / sequences are inclusive of both ends - for example, to refer to the first three elements in a sequence, the interval would be written as (1, 3), and the elements numbered 1, 2, and 3.
+
+In order to provide a more idiomatic R interface, XGBoost adjusts its user-facing R interface to follow this and similar R conventions, but internally, it needs to convert all these numbers to the format that the C interface uses. This is made more problematic by the fact that models are meant to be serializable and loadable in other interfaces, which will have different indexing logic.
+
+The following adjustments are made in the R interface:
+
+- Slicing method for DMatrix, which takes an array of integers, is converted to base-0 indexing by subtracting 1 from each element. Note that this is done in the C-level wrapper function for R, unlike all other conversions which are done in R before being passed to C.
+- Slicing method for Booster takes a sequence defined by start, end, and step. The R interface is made to work the same way as R's ``seq`` from the user's POV, so it always adjusts the left end by subtracting one, and depending on whether the step size ends exactly or not at the right end, will also adjust the right end to be non-inclusive in C indexing.
+- Parameter ``iterationrange`` in ``predict`` is also made to behave the same way as R's ``seq``. Since it doesn't have a step size, just adjusting the left end by subtracting 1 suffices here.
+- ``best_iteration``, depending on the context, might be stored as both a C-level booster attribute, and as an R attribute. Since the C-level attributes are shared across interfaces and used in prediction methods, in order to improve compatibility, it leaves this C-level attribute in base-0 indexing, but the R attribute, if present, will be adjusted to base-1 indexing. Note that the ``predict`` method in R and other interfaces will look at the C-level attribute only.
+- Other references to iteration numbers or boosting rounds, such as when printing metrics or saving model snapshots, also follow base-1 indexing. These other references are coded entirely in R, as the C-level functions do not handle such functionalities.
+- Terminal leaf / node numbers are returned in base-0 indexing, just like they come from the C interface.
+- Tree numbers in plots follow base-1 indexing. Note that these are only displayed when producing these plots through the R interface's own handling of DiagrammeR objects, but not when using the C-level GraphViz 'dot' format generator for plots.
+- Feature numbers when producing feature importances, JSONs, trees-to-tables, and SHAP; are all following base-0 indexing.
+- Categorical features are defined in R as a ``factor`` type which encodes with base-1 indexing. When categorical features are passed as R ``factor`` types, the conversion is done automatically to base-0 indexing, but if the user whishes to manually supply categorical features as already-encoded integers, then those integers need to already be in base-0 encoding.
+- Categorical levels (categories) in outputs such as plots, JSONs, and trees-to-tables; are also referred to using base-0 indexing, regardless of whether they went into the model as integers or as ``factor``-typed columns.
+- Categorical labels for DMatrices do not undergo any extra processing - the user must supply base-0 encoded labels.
+- A function to retrieve class-specific coefficients when using the linear coefficients history callback takes a class index parameter, which also does not undergo any conversion (i.e. user must pass a base-0 index), in order to match with the label logic - that is, the same class index will refer to the class encoded with that number in the DMatrix ``label`` field.
+
+New additions to the R interface that take on indexable elements should be mindful of these conventions and try to mimic R's behavior as much as possible.
diff --git a/doc/python/sklearn_estimator.rst b/doc/python/sklearn_estimator.rst
index a4835dcac..207b9fa30 100644
--- a/doc/python/sklearn_estimator.rst
+++ b/doc/python/sklearn_estimator.rst
@@ -104,7 +104,7 @@ using cross validation with early stopping, here is a snippet to begin with:
clf = xgb.XGBClassifier(tree_method="hist", early_stopping_rounds=3)
- resutls = {}
+ results = {}
for train, test in cv.split(X, y):
X_train = X[train]
@@ -114,7 +114,7 @@ using cross validation with early stopping, here is a snippet to begin with:
est, train_score, test_score = fit_and_score(
clone(clf), X_train, X_test, y_train, y_test
)
- resutls[est] = (train_score, test_score)
+ results[est] = (train_score, test_score)
***********************************
diff --git a/doc/tutorials/dart.rst b/doc/tutorials/dart.rst
index 0cd9109f4..af6384eba 100644
--- a/doc/tutorials/dart.rst
+++ b/doc/tutorials/dart.rst
@@ -96,8 +96,8 @@ Sample Script
import xgboost as xgb
# read in data
- dtrain = xgb.DMatrix('demo/data/agaricus.txt.train')
- dtest = xgb.DMatrix('demo/data/agaricus.txt.test')
+ dtrain = xgb.DMatrix('demo/data/agaricus.txt.train?format=libsvm')
+ dtest = xgb.DMatrix('demo/data/agaricus.txt.test?format=libsvm')
# specify parameters via map
param = {'booster': 'dart',
'max_depth': 5, 'learning_rate': 0.1,
diff --git a/plugin/sycl/common/partition_builder.h b/plugin/sycl/common/partition_builder.h
new file mode 100644
index 000000000..37d1af241
--- /dev/null
+++ b/plugin/sycl/common/partition_builder.h
@@ -0,0 +1,101 @@
+/*!
+ * Copyright 2017-2024 XGBoost contributors
+ */
+#ifndef PLUGIN_SYCL_COMMON_PARTITION_BUILDER_H_
+#define PLUGIN_SYCL_COMMON_PARTITION_BUILDER_H_
+
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wtautological-constant-compare"
+#pragma GCC diagnostic ignored "-W#pragma-messages"
+#include
+#pragma GCC diagnostic pop
+#include
+
+#include
+#include
+#include
+
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wtautological-constant-compare"
+#include "../../../src/common/column_matrix.h"
+#pragma GCC diagnostic pop
+
+#include "../data.h"
+
+#include
+
+namespace xgboost {
+namespace sycl {
+namespace common {
+
+// The builder is required for samples partition to left and rights children for set of nodes
+class PartitionBuilder {
+ public:
+ template
+ void Init(::sycl::queue* qu, size_t n_nodes, Func funcNTaks) {
+ qu_ = qu;
+ nodes_offsets_.resize(n_nodes+1);
+ result_rows_.resize(2 * n_nodes);
+ n_nodes_ = n_nodes;
+
+
+ nodes_offsets_[0] = 0;
+ for (size_t i = 1; i < n_nodes+1; ++i) {
+ nodes_offsets_[i] = nodes_offsets_[i-1] + funcNTaks(i-1);
+ }
+
+ if (data_.Size() < nodes_offsets_[n_nodes]) {
+ data_.Resize(qu, nodes_offsets_[n_nodes]);
+ }
+ }
+
+ size_t GetNLeftElems(int nid) const {
+ return result_rows_[2 * nid];
+ }
+
+
+ size_t GetNRightElems(int nid) const {
+ return result_rows_[2 * nid + 1];
+ }
+
+ // For test purposes only
+ void SetNLeftElems(int nid, size_t val) {
+ result_rows_[2 * nid] = val;
+ }
+
+ // For test purposes only
+ void SetNRightElems(int nid, size_t val) {
+ result_rows_[2 * nid + 1] = val;
+ }
+
+ xgboost::common::Span GetData(int nid) {
+ return { data_.Data() + nodes_offsets_[nid], nodes_offsets_[nid + 1] - nodes_offsets_[nid] };
+ }
+
+ void MergeToArray(size_t nid,
+ size_t* data_result,
+ ::sycl::event event) {
+ size_t n_nodes_total = GetNLeftElems(nid) + GetNRightElems(nid);
+ if (n_nodes_total > 0) {
+ const size_t* data = data_.Data() + nodes_offsets_[nid];
+ qu_->memcpy(data_result, data, sizeof(size_t) * n_nodes_total, event);
+ }
+ }
+
+ protected:
+ std::vector nodes_offsets_;
+ std::vector result_rows_;
+ size_t n_nodes_;
+
+ USMVector parts_size_;
+ USMVector data_;
+
+ ::sycl::queue* qu_;
+};
+
+} // namespace common
+} // namespace sycl
+} // namespace xgboost
+
+
+#endif // PLUGIN_SYCL_COMMON_PARTITION_BUILDER_H_
diff --git a/python-package/hatch_build.py b/python-package/hatch_build.py
index 696787fa2..925c917b9 100644
--- a/python-package/hatch_build.py
+++ b/python-package/hatch_build.py
@@ -2,6 +2,7 @@
Custom hook to customize the behavior of Hatchling.
Here, we customize the tag of the generated wheels.
"""
+
import sysconfig
from typing import Any, Dict
diff --git a/python-package/packager/build_config.py b/python-package/packager/build_config.py
index 4d697fe9f..6b5274726 100644
--- a/python-package/packager/build_config.py
+++ b/python-package/packager/build_config.py
@@ -1,4 +1,5 @@
"""Build configuration"""
+
import dataclasses
from typing import Any, Dict, List, Optional
diff --git a/python-package/packager/nativelib.py b/python-package/packager/nativelib.py
index 9d3fec2bc..0227cff37 100644
--- a/python-package/packager/nativelib.py
+++ b/python-package/packager/nativelib.py
@@ -1,6 +1,7 @@
"""
Functions for building libxgboost
"""
+
import logging
import os
import pathlib
diff --git a/python-package/packager/pep517.py b/python-package/packager/pep517.py
index 2c4f9e3e6..d2e671fb6 100644
--- a/python-package/packager/pep517.py
+++ b/python-package/packager/pep517.py
@@ -4,6 +4,7 @@ Builds source distribution and binary wheels, following PEP 517 / PEP 660.
Reuses components of Hatchling (https://github.com/pypa/hatch/tree/master/backend) for the sake
of brevity.
"""
+
import dataclasses
import logging
import os
diff --git a/python-package/packager/sdist.py b/python-package/packager/sdist.py
index af9fbca0d..4c70c24fe 100644
--- a/python-package/packager/sdist.py
+++ b/python-package/packager/sdist.py
@@ -1,6 +1,7 @@
"""
Functions for building sdist
"""
+
import logging
import pathlib
diff --git a/python-package/packager/util.py b/python-package/packager/util.py
index 0fff062d7..866f186b3 100644
--- a/python-package/packager/util.py
+++ b/python-package/packager/util.py
@@ -1,6 +1,7 @@
"""
Utility functions for implementing PEP 517 backend
"""
+
import logging
import pathlib
import shutil
diff --git a/python-package/xgboost/_typing.py b/python-package/xgboost/_typing.py
index a36757a81..da8215c0d 100644
--- a/python-package/xgboost/_typing.py
+++ b/python-package/xgboost/_typing.py
@@ -36,6 +36,11 @@ PandasDType = Any # real type is pandas.core.dtypes.base.ExtensionDtype
FloatCompatible = Union[float, np.float32, np.float64]
+# typing.SupportsInt is not suitable here since floating point values are convertible to
+# integers as well.
+Integer = Union[int, np.integer]
+IterationRange = Tuple[Integer, Integer]
+
# callables
FPreProcCallable = Callable
diff --git a/python-package/xgboost/collective.py b/python-package/xgboost/collective.py
index c59bd510b..421ba9b32 100644
--- a/python-package/xgboost/collective.py
+++ b/python-package/xgboost/collective.py
@@ -1,4 +1,5 @@
"""XGBoost collective communication related API."""
+
import ctypes
import json
import logging
diff --git a/python-package/xgboost/core.py b/python-package/xgboost/core.py
index 36be766b1..27331d3de 100644
--- a/python-package/xgboost/core.py
+++ b/python-package/xgboost/core.py
@@ -48,6 +48,8 @@ from ._typing import (
FeatureInfo,
FeatureNames,
FeatureTypes,
+ Integer,
+ IterationRange,
ModelIn,
NumpyOrCupy,
TransformedData,
@@ -62,13 +64,11 @@ class XGBoostError(ValueError):
@overload
-def from_pystr_to_cstr(data: str) -> bytes:
- ...
+def from_pystr_to_cstr(data: str) -> bytes: ...
@overload
-def from_pystr_to_cstr(data: List[str]) -> ctypes.Array:
- ...
+def from_pystr_to_cstr(data: List[str]) -> ctypes.Array: ...
def from_pystr_to_cstr(data: Union[str, List[str]]) -> Union[bytes, ctypes.Array]:
@@ -798,9 +798,23 @@ class DMatrix: # pylint: disable=too-many-instance-attributes,too-many-public-m
Set names for features.
feature_types :
- Set types for features. When `enable_categorical` is set to `True`, string
- "c" represents categorical data type while "q" represents numerical feature
- type. For categorical features, the input is assumed to be preprocessed and
+ Set types for features. If `data` is a DataFrame type and passing
+ `enable_categorical=True`, the types will be deduced automatically
+ from the column types.
+
+ Otherwise, one can pass a list-like input with the same length as number
+ of columns in `data`, with the following possible values:
+ - "c", which represents categorical columns.
+ - "q", which represents numeric columns.
+ - "int", which represents integer columns.
+ - "i", which represents boolean columns.
+
+ Note that, while categorical types are treated differently from
+ the rest for model fitting purposes, the other types do not influence
+ the generated model, but have effects in other functionalities such as
+ feature importances.
+
+ For categorical features, the input is assumed to be preprocessed and
encoded by the users. The encoding can be done via
:py:class:`sklearn.preprocessing.OrdinalEncoder` or pandas dataframe
`.cat.codes` method. This is useful when users want to specify categorical
@@ -1812,19 +1826,25 @@ class Booster:
state["handle"] = handle
self.__dict__.update(state)
- def __getitem__(self, val: Union[int, tuple, slice]) -> "Booster":
+ def __getitem__(self, val: Union[Integer, tuple, slice]) -> "Booster":
"""Get a slice of the tree-based model.
.. versionadded:: 1.3.0
"""
- if isinstance(val, int):
- val = slice(val, val + 1)
+ # convert to slice for all other types
+ if isinstance(val, (np.integer, int)):
+ val = slice(int(val), int(val + 1))
+ if isinstance(val, type(Ellipsis)):
+ val = slice(0, 0)
if isinstance(val, tuple):
raise ValueError("Only supports slicing through 1 dimension.")
+ # All supported types are now slice
+ # FIXME(jiamingy): Use `types.EllipsisType` once Python 3.10 is used.
if not isinstance(val, slice):
- msg = _expect((int, slice), type(val))
+ msg = _expect((int, slice, np.integer, type(Ellipsis)), type(val))
raise TypeError(msg)
+
if isinstance(val.start, type(Ellipsis)) or val.start is None:
start = 0
else:
@@ -2246,12 +2266,13 @@ class Booster:
pred_interactions: bool = False,
validate_features: bool = True,
training: bool = False,
- iteration_range: Tuple[int, int] = (0, 0),
+ iteration_range: IterationRange = (0, 0),
strict_shape: bool = False,
) -> np.ndarray:
- """Predict with data. The full model will be used unless `iteration_range` is specified,
- meaning user have to either slice the model or use the ``best_iteration``
- attribute to get prediction from best model returned from early stopping.
+ """Predict with data. The full model will be used unless `iteration_range` is
+ specified, meaning user have to either slice the model or use the
+ ``best_iteration`` attribute to get prediction from best model returned from
+ early stopping.
.. note::
@@ -2336,8 +2357,8 @@ class Booster:
args = {
"type": 0,
"training": training,
- "iteration_begin": iteration_range[0],
- "iteration_end": iteration_range[1],
+ "iteration_begin": int(iteration_range[0]),
+ "iteration_end": int(iteration_range[1]),
"strict_shape": strict_shape,
}
@@ -2373,7 +2394,7 @@ class Booster:
def inplace_predict(
self,
data: DataType,
- iteration_range: Tuple[int, int] = (0, 0),
+ iteration_range: IterationRange = (0, 0),
predict_type: str = "value",
missing: float = np.nan,
validate_features: bool = True,
@@ -2439,8 +2460,8 @@ class Booster:
args = make_jcargs(
type=1 if predict_type == "margin" else 0,
training=False,
- iteration_begin=iteration_range[0],
- iteration_end=iteration_range[1],
+ iteration_begin=int(iteration_range[0]),
+ iteration_end=int(iteration_range[1]),
missing=missing,
strict_shape=strict_shape,
cache_id=0,
diff --git a/python-package/xgboost/dask/__init__.py b/python-package/xgboost/dask/__init__.py
index a9b51f35d..a3c549a2e 100644
--- a/python-package/xgboost/dask/__init__.py
+++ b/python-package/xgboost/dask/__init__.py
@@ -61,7 +61,7 @@ from typing import (
import numpy
from xgboost import collective, config
-from xgboost._typing import _T, FeatureNames, FeatureTypes
+from xgboost._typing import _T, FeatureNames, FeatureTypes, IterationRange
from xgboost.callback import TrainingCallback
from xgboost.compat import DataFrame, LazyLoader, concat, lazy_isinstance
from xgboost.core import (
@@ -1146,9 +1146,9 @@ async def _direct_predict_impl( # pylint: disable=too-many-branches
if _can_output_df(isinstance(data, dd.DataFrame), output_shape):
if base_margin is not None and isinstance(base_margin, da.Array):
# Easier for map_partitions
- base_margin_df: Optional[
- Union[dd.DataFrame, dd.Series]
- ] = base_margin.to_dask_dataframe()
+ base_margin_df: Optional[Union[dd.DataFrame, dd.Series]] = (
+ base_margin.to_dask_dataframe()
+ )
else:
base_margin_df = base_margin
predictions = dd.map_partitions(
@@ -1263,7 +1263,7 @@ async def _predict_async(
approx_contribs: bool,
pred_interactions: bool,
validate_features: bool,
- iteration_range: Tuple[int, int],
+ iteration_range: IterationRange,
strict_shape: bool,
) -> _DaskCollection:
_booster = await _get_model_future(client, model)
@@ -1410,7 +1410,7 @@ def predict( # pylint: disable=unused-argument
approx_contribs: bool = False,
pred_interactions: bool = False,
validate_features: bool = True,
- iteration_range: Tuple[int, int] = (0, 0),
+ iteration_range: IterationRange = (0, 0),
strict_shape: bool = False,
) -> Any:
"""Run prediction with a trained booster.
@@ -1458,7 +1458,7 @@ async def _inplace_predict_async( # pylint: disable=too-many-branches
global_config: Dict[str, Any],
model: Union[Booster, Dict, "distributed.Future"],
data: _DataT,
- iteration_range: Tuple[int, int],
+ iteration_range: IterationRange,
predict_type: str,
missing: float,
validate_features: bool,
@@ -1516,7 +1516,7 @@ def inplace_predict( # pylint: disable=unused-argument
client: Optional["distributed.Client"],
model: Union[TrainReturnT, Booster, "distributed.Future"],
data: _DataT,
- iteration_range: Tuple[int, int] = (0, 0),
+ iteration_range: IterationRange = (0, 0),
predict_type: str = "value",
missing: float = numpy.nan,
validate_features: bool = True,
@@ -1624,7 +1624,7 @@ class DaskScikitLearnBase(XGBModel):
output_margin: bool,
validate_features: bool,
base_margin: Optional[_DaskCollection],
- iteration_range: Optional[Tuple[int, int]],
+ iteration_range: Optional[IterationRange],
) -> Any:
iteration_range = self._get_iteration_range(iteration_range)
if self._can_use_inplace_predict():
@@ -1664,7 +1664,7 @@ class DaskScikitLearnBase(XGBModel):
output_margin: bool = False,
validate_features: bool = True,
base_margin: Optional[_DaskCollection] = None,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> Any:
_assert_dask_support()
return self.client.sync(
@@ -1679,7 +1679,7 @@ class DaskScikitLearnBase(XGBModel):
async def _apply_async(
self,
X: _DataT,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> Any:
iteration_range = self._get_iteration_range(iteration_range)
test_dmatrix = await DaskDMatrix(
@@ -1700,7 +1700,7 @@ class DaskScikitLearnBase(XGBModel):
def apply(
self,
X: _DataT,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> Any:
_assert_dask_support()
return self.client.sync(self._apply_async, X, iteration_range=iteration_range)
@@ -1962,7 +1962,7 @@ class DaskXGBClassifier(DaskScikitLearnBase, XGBClassifierBase):
X: _DataT,
validate_features: bool,
base_margin: Optional[_DaskCollection],
- iteration_range: Optional[Tuple[int, int]],
+ iteration_range: Optional[IterationRange],
) -> _DaskCollection:
if self.objective == "multi:softmax":
raise ValueError(
@@ -1987,7 +1987,7 @@ class DaskXGBClassifier(DaskScikitLearnBase, XGBClassifierBase):
X: _DaskCollection,
validate_features: bool = True,
base_margin: Optional[_DaskCollection] = None,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> Any:
_assert_dask_support()
return self._client_sync(
@@ -2006,7 +2006,7 @@ class DaskXGBClassifier(DaskScikitLearnBase, XGBClassifierBase):
output_margin: bool,
validate_features: bool,
base_margin: Optional[_DaskCollection],
- iteration_range: Optional[Tuple[int, int]],
+ iteration_range: Optional[IterationRange],
) -> _DaskCollection:
pred_probs = await super()._predict_async(
data, output_margin, validate_features, base_margin, iteration_range
diff --git a/python-package/xgboost/dask/utils.py b/python-package/xgboost/dask/utils.py
index 98e6029b5..d433c8072 100644
--- a/python-package/xgboost/dask/utils.py
+++ b/python-package/xgboost/dask/utils.py
@@ -1,4 +1,5 @@
"""Utilities for the XGBoost Dask interface."""
+
import logging
from typing import TYPE_CHECKING, Any, Dict
diff --git a/python-package/xgboost/sklearn.py b/python-package/xgboost/sklearn.py
index a0fde2292..5d651948c 100644
--- a/python-package/xgboost/sklearn.py
+++ b/python-package/xgboost/sklearn.py
@@ -22,7 +22,7 @@ from typing import (
import numpy as np
from scipy.special import softmax
-from ._typing import ArrayLike, FeatureNames, FeatureTypes, ModelIn
+from ._typing import ArrayLike, FeatureNames, FeatureTypes, IterationRange, ModelIn
from .callback import TrainingCallback
# Do not use class names on scikit-learn directly. Re-define the classes on
@@ -1039,8 +1039,8 @@ class XGBModel(XGBModelBase):
return False
def _get_iteration_range(
- self, iteration_range: Optional[Tuple[int, int]]
- ) -> Tuple[int, int]:
+ self, iteration_range: Optional[IterationRange]
+ ) -> IterationRange:
if iteration_range is None or iteration_range[1] == 0:
# Use best_iteration if defined.
try:
@@ -1057,7 +1057,7 @@ class XGBModel(XGBModelBase):
output_margin: bool = False,
validate_features: bool = True,
base_margin: Optional[ArrayLike] = None,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> ArrayLike:
"""Predict with `X`. If the model is trained with early stopping, then
:py:attr:`best_iteration` is used automatically. The estimator uses
@@ -1129,7 +1129,7 @@ class XGBModel(XGBModelBase):
def apply(
self,
X: ArrayLike,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> np.ndarray:
"""Return the predicted leaf every tree for each sample. If the model is trained
with early stopping, then :py:attr:`best_iteration` is used automatically.
@@ -1465,7 +1465,7 @@ class XGBClassifier(XGBModel, XGBClassifierBase):
output_margin: bool = False,
validate_features: bool = True,
base_margin: Optional[ArrayLike] = None,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> ArrayLike:
with config_context(verbosity=self.verbosity):
class_probs = super().predict(
@@ -1500,7 +1500,7 @@ class XGBClassifier(XGBModel, XGBClassifierBase):
X: ArrayLike,
validate_features: bool = True,
base_margin: Optional[ArrayLike] = None,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> np.ndarray:
"""Predict the probability of each `X` example being of a given class. If the
model is trained with early stopping, then :py:attr:`best_iteration` is used
@@ -1942,7 +1942,7 @@ class XGBRanker(XGBModel, XGBRankerMixIn):
output_margin: bool = False,
validate_features: bool = True,
base_margin: Optional[ArrayLike] = None,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> ArrayLike:
X, _ = _get_qid(X, None)
return super().predict(
@@ -1956,7 +1956,7 @@ class XGBRanker(XGBModel, XGBRankerMixIn):
def apply(
self,
X: ArrayLike,
- iteration_range: Optional[Tuple[int, int]] = None,
+ iteration_range: Optional[IterationRange] = None,
) -> ArrayLike:
X, _ = _get_qid(X, None)
return super().apply(X, iteration_range)
diff --git a/python-package/xgboost/spark/core.py b/python-package/xgboost/spark/core.py
index 7ac01ff07..eb226611d 100644
--- a/python-package/xgboost/spark/core.py
+++ b/python-package/xgboost/spark/core.py
@@ -1,4 +1,5 @@
"""XGBoost pyspark integration submodule for core code."""
+
import base64
# pylint: disable=fixme, too-many-ancestors, protected-access, no-member, invalid-name
diff --git a/python-package/xgboost/spark/estimator.py b/python-package/xgboost/spark/estimator.py
index 193ca4b2a..51e2e946f 100644
--- a/python-package/xgboost/spark/estimator.py
+++ b/python-package/xgboost/spark/estimator.py
@@ -1,4 +1,5 @@
"""Xgboost pyspark integration submodule for estimator API."""
+
# pylint: disable=too-many-ancestors
# pylint: disable=fixme, too-many-ancestors, protected-access, no-member, invalid-name
# pylint: disable=unused-argument, too-many-locals
diff --git a/python-package/xgboost/spark/params.py b/python-package/xgboost/spark/params.py
index a81f6cd33..a177c73fe 100644
--- a/python-package/xgboost/spark/params.py
+++ b/python-package/xgboost/spark/params.py
@@ -1,4 +1,5 @@
"""Xgboost pyspark integration submodule for params."""
+
from typing import Dict
# pylint: disable=too-few-public-methods
@@ -55,7 +56,6 @@ class HasFeaturesCols(Params):
class HasEnableSparseDataOptim(Params):
-
"""
This is a Params based class that is extended by _SparkXGBParams
and holds the variable to store the boolean config of enabling sparse data optimization.
diff --git a/python-package/xgboost/spark/utils.py b/python-package/xgboost/spark/utils.py
index 805aa5c10..84333df53 100644
--- a/python-package/xgboost/spark/utils.py
+++ b/python-package/xgboost/spark/utils.py
@@ -1,4 +1,5 @@
"""Xgboost pyspark integration submodule for helper functions."""
+
# pylint: disable=fixme
import inspect
diff --git a/python-package/xgboost/testing/__init__.py b/python-package/xgboost/testing/__init__.py
index 46bbf8800..389066f0e 100644
--- a/python-package/xgboost/testing/__init__.py
+++ b/python-package/xgboost/testing/__init__.py
@@ -2,6 +2,7 @@
change without notice.
"""
+
# pylint: disable=invalid-name,missing-function-docstring,import-error
import gc
import importlib.util
diff --git a/python-package/xgboost/testing/continuation.py b/python-package/xgboost/testing/continuation.py
index 9d6dc0338..16037e37b 100644
--- a/python-package/xgboost/testing/continuation.py
+++ b/python-package/xgboost/testing/continuation.py
@@ -1,4 +1,5 @@
"""Tests for training continuation."""
+
import json
from typing import Any, Dict, TypeVar
diff --git a/python-package/xgboost/testing/dask.py b/python-package/xgboost/testing/dask.py
index a939170b4..f46803b29 100644
--- a/python-package/xgboost/testing/dask.py
+++ b/python-package/xgboost/testing/dask.py
@@ -1,4 +1,5 @@
"""Tests for dask shared by different test modules."""
+
import numpy as np
import pandas as pd
from dask import array as da
diff --git a/python-package/xgboost/testing/data_iter.py b/python-package/xgboost/testing/data_iter.py
index 18f8eb378..42a9dfca0 100644
--- a/python-package/xgboost/testing/data_iter.py
+++ b/python-package/xgboost/testing/data_iter.py
@@ -1,4 +1,5 @@
"""Tests related to the `DataIter` interface."""
+
import numpy as np
import xgboost
diff --git a/python-package/xgboost/testing/metrics.py b/python-package/xgboost/testing/metrics.py
index c9f449f22..515c9872c 100644
--- a/python-package/xgboost/testing/metrics.py
+++ b/python-package/xgboost/testing/metrics.py
@@ -1,4 +1,5 @@
"""Tests for evaluation metrics."""
+
from typing import Dict, List
import numpy as np
diff --git a/python-package/xgboost/testing/shared.py b/python-package/xgboost/testing/shared.py
index 930873163..0455b77d0 100644
--- a/python-package/xgboost/testing/shared.py
+++ b/python-package/xgboost/testing/shared.py
@@ -1,4 +1,5 @@
"""Testing code shared by other tests."""
+
# pylint: disable=invalid-name
import collections
import importlib.util
diff --git a/python-package/xgboost/testing/updater.py b/python-package/xgboost/testing/updater.py
index 00c982bd0..c0c014167 100644
--- a/python-package/xgboost/testing/updater.py
+++ b/python-package/xgboost/testing/updater.py
@@ -1,4 +1,5 @@
"""Tests for updaters."""
+
import json
from functools import partial, update_wrapper
from typing import Any, Dict, List
diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc
index 71d7af9db..dfd663da3 100644
--- a/src/c_api/c_api.cc
+++ b/src/c_api/c_api.cc
@@ -1,5 +1,5 @@
/**
- * Copyright 2014-2023 by XGBoost Contributors
+ * Copyright 2014-2024 by XGBoost Contributors
*/
#include "xgboost/c_api.h"
@@ -994,8 +994,8 @@ XGB_DLL int XGBoosterBoostOneIter(BoosterHandle handle, DMatrixHandle dtrain, bs
auto *learner = static_cast(handle);
auto ctx = learner->Ctx()->MakeCPU();
- auto t_grad = linalg::MakeTensorView(&ctx, common::Span{grad, len}, len);
- auto t_hess = linalg::MakeTensorView(&ctx, common::Span{hess, len}, len);
+ auto t_grad = linalg::MakeTensorView(&ctx, common::Span{grad, static_cast(len)}, len);
+ auto t_hess = linalg::MakeTensorView(&ctx, common::Span{hess, static_cast(len)}, len);
auto s_grad = linalg::ArrayInterfaceStr(t_grad);
auto s_hess = linalg::ArrayInterfaceStr(t_hess);
diff --git a/src/common/column_matrix.h b/src/common/column_matrix.h
index cee6c405c..fa71c4f52 100644
--- a/src/common/column_matrix.h
+++ b/src/common/column_matrix.h
@@ -1,5 +1,5 @@
/**
- * Copyright 2017-2023, XGBoost Contributors
+ * Copyright 2017-2024, XGBoost Contributors
* \file column_matrix.h
* \brief Utility for fast column-wise access
* \author Philip Cho
@@ -176,7 +176,7 @@ class ColumnMatrix {
void SetValid(typename LBitField32::index_type i) {missing.Clear(i);}
/** @brief assign the storage to the view. */
void InitView() {
- missing = LBitField32{Span{storage.data(), storage.size()}};
+ missing = LBitField32{Span{storage.data(), static_cast(storage.size())}};
}
void GrowTo(std::size_t n_elements, bool init) {
@@ -318,8 +318,8 @@ class ColumnMatrix {
common::Span bin_index = {
reinterpret_cast(&index_[feature_offset * bins_type_size_]),
column_size};
- return std::move(DenseColumnIter{
- bin_index, static_cast(index_base_[fidx]), missing_.missing, feature_offset});
+ return DenseColumnIter{
+ bin_index, static_cast(index_base_[fidx]), missing_.missing, feature_offset};
}
// all columns are dense column and has no missing value
@@ -332,7 +332,7 @@ class ColumnMatrix {
DispatchBinType(bins_type_size_, [&](auto t) {
using ColumnBinT = decltype(t);
auto column_index = Span{reinterpret_cast(index_.data()),
- index_.size() / sizeof(ColumnBinT)};
+ static_cast(index_.size() / sizeof(ColumnBinT))};
ParallelFor(n_samples, n_threads, [&](auto rid) {
rid += base_rowid;
const size_t ibegin = rid * n_features;
diff --git a/src/common/hist_util.h b/src/common/hist_util.h
index fbbd15b49..e829752da 100644
--- a/src/common/hist_util.h
+++ b/src/common/hist_util.h
@@ -1,5 +1,5 @@
/**
- * Copyright 2017-2023 by XGBoost Contributors
+ * Copyright 2017-2024 by XGBoost Contributors
* \file hist_util.h
* \brief Utility for fast histogram aggregation
* \author Philip Cho, Tianqi Chen
@@ -113,8 +113,8 @@ class HistogramCuts {
auto end = ptrs[column_id + 1];
auto beg = ptrs[column_id];
auto it = std::upper_bound(values.cbegin() + beg, values.cbegin() + end, value);
- auto idx = it - values.cbegin();
- idx -= !!(idx == end);
+ auto idx = static_cast(it - values.cbegin());
+ idx -= !!(idx == static_cast(end));
return idx;
}
@@ -136,8 +136,8 @@ class HistogramCuts {
auto beg = ptrs[fidx] + vals.cbegin();
// Truncates the value in case it's not perfectly rounded.
auto v = static_cast(common::AsCat(value));
- auto bin_idx = std::lower_bound(beg, end, v) - vals.cbegin();
- if (bin_idx == ptrs.at(fidx + 1)) {
+ auto bin_idx = static_cast(std::lower_bound(beg, end, v) - vals.cbegin());
+ if (bin_idx == static_cast(ptrs.at(fidx + 1))) {
bin_idx -= 1;
}
return bin_idx;
diff --git a/src/common/ref_resource_view.h b/src/common/ref_resource_view.h
index d4f82e615..61adfdb7b 100644
--- a/src/common/ref_resource_view.h
+++ b/src/common/ref_resource_view.h
@@ -1,5 +1,5 @@
/**
- * Copyright 2023, XGBoost Contributors
+ * Copyright 2023-2024, XGBoost Contributors
*/
#ifndef XGBOOST_COMMON_REF_RESOURCE_VIEW_H_
#define XGBOOST_COMMON_REF_RESOURCE_VIEW_H_
@@ -76,7 +76,7 @@ class RefResourceView {
[[nodiscard]] size_type size() const { return size_; } // NOLINT
[[nodiscard]] size_type size_bytes() const { // NOLINT
- return Span{data(), size()}.size_bytes();
+ return Span{data(), static_cast(size())}.size_bytes();
}
[[nodiscard]] value_type* data() { return ptr_; }; // NOLINT
[[nodiscard]] value_type const* data() const { return ptr_; }; // NOLINT
diff --git a/src/common/transform_iterator.h b/src/common/transform_iterator.h
index 2efb0b725..8125bd852 100644
--- a/src/common/transform_iterator.h
+++ b/src/common/transform_iterator.h
@@ -1,12 +1,12 @@
/**
- * Copyright 2022 by XGBoost Contributors
+ * Copyright 2022-2024, XGBoost Contributors
*/
#ifndef XGBOOST_COMMON_TRANSFORM_ITERATOR_H_
#define XGBOOST_COMMON_TRANSFORM_ITERATOR_H_
#include // std::size_t
#include // std::random_access_iterator_tag
-#include // std::result_of_t, std::add_pointer_t, std::add_lvalue_reference_t
+#include // for invoke_result_t, add_pointer_t, add_lvalue_reference_t
#include // std::forward
#include "xgboost/span.h" // ptrdiff_t
@@ -25,11 +25,11 @@ class IndexTransformIter {
Fn fn_;
public:
- using iterator_category = std::random_access_iterator_tag; // NOLINT
- using reference = std::result_of_t; // NOLINT
- using value_type = std::remove_cv_t>; // NOLINT
- using difference_type = detail::ptrdiff_t; // NOLINT
- using pointer = std::add_pointer_t; // NOLINT
+ using iterator_category = std::random_access_iterator_tag; // NOLINT
+ using reference = std::invoke_result_t; // NOLINT
+ using value_type = std::remove_cv_t>; // NOLINT
+ using difference_type = detail::ptrdiff_t; // NOLINT
+ using pointer = std::add_pointer_t; // NOLINT
public:
/**
diff --git a/src/data/array_interface.h b/src/data/array_interface.h
index d9e8bc802..349cda041 100644
--- a/src/data/array_interface.h
+++ b/src/data/array_interface.h
@@ -1,5 +1,5 @@
/**
- * Copyright 2019-2023 by XGBoost Contributors
+ * Copyright 2019-2024, XGBoost Contributors
* \file array_interface.h
* \brief View of __array_interface__
*/
@@ -12,7 +12,7 @@
#include // for numeric_limits
#include