[R] in predict: doc, examples, reshape parameter

This commit is contained in:
Vadim Khotilovich 2016-06-27 01:49:57 -05:00
parent c342614a81
commit b9aeeda074

View File

@ -1,14 +1,11 @@
# Construct a Booster from cachelist # Construct a Booster from cachelist
# internal utility function # internal utility function
xgb.Booster <- function(params = list(), cachelist = list(), modelfile = NULL) { xgb.Booster <- function(params = list(), cachelist = list(), modelfile = NULL) {
if (typeof(cachelist) != "list") { if (typeof(cachelist) != "list" ||
any(sapply(cachelist, class) != 'xgb.DMatrix')) {
stop("xgb.Booster only accepts list of DMatrix as cachelist") stop("xgb.Booster only accepts list of DMatrix as cachelist")
} }
for (dm in cachelist) {
if (class(dm) != "xgb.DMatrix") {
stop("xgb.Booster only accepts list of DMatrix as cachelist")
}
}
handle <- .Call("XGBoosterCreate_R", cachelist, PACKAGE = "xgboost") handle <- .Call("XGBoosterCreate_R", cachelist, PACKAGE = "xgboost")
if (!is.null(modelfile)) { if (!is.null(modelfile)) {
if (typeof(modelfile) == "character") { if (typeof(modelfile) == "character") {
@ -54,6 +51,9 @@ xgb.get.handle <- function(object) {
# Check whether an xgb.Booster object is complete # Check whether an xgb.Booster object is complete
# internal utility function # internal utility function
xgb.Booster.check <- function(bst, saveraw = TRUE) { xgb.Booster.check <- function(bst, saveraw = TRUE) {
if (class(bst) != "xgb.Booster")
stop("argument type must be xgb.Booster")
isnull <- is.null(bst$handle) isnull <- is.null(bst$handle)
if (!isnull) { if (!isnull) {
isnull <- .Call("XGCheckNullPtr_R", bst$handle, PACKAGE="xgboost") isnull <- .Call("XGCheckNullPtr_R", bst$handle, PACKAGE="xgboost")
@ -67,48 +67,118 @@ xgb.Booster.check <- function(bst, saveraw = TRUE) {
return(bst) return(bst)
} }
#' Predict method for eXtreme Gradient Boosting model #' Predict method for eXtreme Gradient Boosting model
#' #'
#' Predicted values based on either xgboost model or model handle object. #' Predicted values based on either xgboost model or model handle object.
#' #'
#' @param object Object of class \code{xgb.Booster} or \code{xgb.Booster.handle} #' @param object Object of class \code{xgb.Booster} or \code{xgb.Booster.handle}
#' @param newdata takes \code{matrix}, \code{dgCMatrix}, local data file or #' @param newdata takes \code{matrix}, \code{dgCMatrix}, local data file or \code{xgb.DMatrix}.
#' \code{xgb.DMatrix}. #' @param missing Missing is only used when input is dense matrix. Pick a float value that represents
#' @param missing Missing is only used when input is dense matrix, pick a float #' missing values in data (e.g., sometimes 0 or some other extreme value is used).
#' value that represents missing value. Sometime a data use 0 or other extreme value to represents missing values. #' @param outputmargin whether the prediction should be returned in the for of original untransformed
#' @param outputmargin whether the prediction should be shown in the original #' sum of predictions from boosting iterations' results. E.g., setting \code{outputmargin=TRUE} for
#' value of sum of functions, when outputmargin=TRUE, the prediction is #' logistic regression would result in predictions for log-odds instead of probabilities.
#' untransformed margin value. In logistic regression, outputmargin=T will #' @param ntreelimit limit the number of model's trees or boosting iterations used in prediction (see Details).
#' output value before logistic transformation. #' It will use all the trees by default (\code{NULL} value).
#' @param ntreelimit limit number of trees used in prediction, this parameter is #' @param predleaf whether predict leaf index instead.
#' only valid for gbtree, but not for gblinear. set it to be value bigger #' @param reshape whether to reshape the vector of predictions to a matrix form when there are several
#' than 0. It will use all trees by default. #' prediction outputs per case. This option has no effect when \code{predleaf = TRUE}.
#' @param predleaf whether predict leaf index instead. If set to TRUE, the output will be a matrix object. #' @param ... Parameters passed to \code{predict.xgb.Booster}
#' @param ... Parameters pass to \code{predict.xgb.Booster}
#' #'
#' @details #' @details
#' The option \code{ntreelimit} purpose is to let the user train a model with lots #' Note that \code{ntreelimit} is not necesserily equal to the number of boosting iterations
#' of trees but use only the first trees for prediction to avoid overfitting #' and it is not necesserily equal to the number of trees in a model.
#' (without having to train a new model with less trees). #' E.g., in a random forest-like model, \code{ntreelimit} would limit the number of trees.
#' But for multiclass classification, there are multiple trees per iteration,
#' but \code{ntreelimit} limits the number of boosting iterations.
#' #'
#' The option \code{predleaf} purpose is inspired from §3.1 of the paper #' Also note that \code{ntreelimit} would currently do nothing for predictions from gblinear,
#' \code{Practical Lessons from Predicting Clicks on Ads at Facebook}. #' since gblinear doesn't keep its boosting history.
#' The idea is to use the model as a generator of new features which capture non linear link #'
#' from original features. #' One possible practical applications of the \code{predleaf} option is to use the model
#' as a generator of new features which capture non-linearity and interactions,
#' e.g., as implemented in \code{\link{xgb.create.features}}.
#'
#' @return
#' For regression or binary classification, it returns a vector of length \code{nrows(newdata)}.
#' For multiclass classification, either a \code{num_class * nrows(newdata)} vector or
#' a \code{(nrows(newdata), num_class)} dimension matrix is returned, depending on
#' the \code{reshape} value.
#'
#' When \code{predleaf = TRUE}, the output is a matrix object with the
#' number of columns corresponding to the number of trees.
#'
#' @seealso
#' \code{\link{xgb.train}}.
#' #'
#' @examples #' @examples
#' ## binary classification:
#'
#' data(agaricus.train, package='xgboost') #' data(agaricus.train, package='xgboost')
#' data(agaricus.test, package='xgboost') #' data(agaricus.test, package='xgboost')
#' train <- agaricus.train #' train <- agaricus.train
#' test <- agaricus.test #' test <- agaricus.test
#' #'
#' bst <- xgboost(data = train$data, label = train$label, max.depth = 2, #' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
#' eta = 1, nthread = 2, nround = 2,objective = "binary:logistic") #' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
#' # use all trees by default
#' pred <- predict(bst, test$data) #' pred <- predict(bst, test$data)
#' # use only the 1st tree
#' pred <- predict(bst, test$data, ntreelimit = 1)
#'
#'
#' ## multiclass classification in iris dataset:
#'
#' lb <- as.numeric(iris$Species) - 1
#' num_class <- 3
#' set.seed(11)
#' bst <- xgboost(data = as.matrix(iris[, -5]), label = lb,
#' max_depth = 4, eta = 0.5, nthread = 2, nrounds = 10, subsample = 0.5,
#' objective = "multi:softprob", num_class = num_class)
#' # predict for softmax returns num_class probability numbers per case:
#' pred <- predict(bst, as.matrix(iris[, -5]))
#' str(pred)
#' # reshape it to a num_class-columns matrix
#' pred <- matrix(pred, ncol=num_class, byrow=TRUE)
#' # convert the probabilities to softmax labels
#' pred_labels <- max.col(pred) - 1
#' # the following should result in the same error as seen in the last iteration
#' sum(pred_labels != lb)/length(lb)
#'
#' # compare that to the predictions from softmax:
#' set.seed(11)
#' bst <- xgboost(data = as.matrix(iris[, -5]), label = lb,
#' max_depth = 4, eta = 0.5, nthread = 2, nrounds = 10, subsample = 0.5,
#' objective = "multi:softmax", num_class = num_class)
#' pred <- predict(bst, as.matrix(iris[, -5]))
#' str(pred)
#' all.equal(pred, pred_labels)
#' # prediction from using only 5 iterations should result
#' # in the same error as seen in iteration 5:
#' pred5 <- predict(bst, as.matrix(iris[, -5]), ntreelimit=5)
#' sum(pred5 != lb)/length(lb)
#'
#'
#' ## random forest-like model of 25 trees for binary classification:
#'
#' set.seed(11)
#' bst <- xgboost(data = train$data, label = train$label, max_depth = 5,
#' nthread = 2, nrounds = 1, objective = "binary:logistic",
#' num_parallel_tree = 25, subsample = 0.6, colsample_bytree = 0.1)
#' # Inspect the prediction error vs number of trees:
#' lb <- test$label
#' dtest <- xgb.DMatrix(test$data, label=lb)
#' err <- sapply(1:25, function(n) {
#' pred <- predict(bst, dtest, ntreelimit=n)
#' sum((pred > 0.5) != lb)/length(lb)
#' })
#' plot(err, type='l', ylim=c(0,0.1), xlab='#trees')
#'
#' @rdname predict.xgb.Booster #' @rdname predict.xgb.Booster
#' @export #' @export
predict.xgb.Booster <- function(object, newdata, missing = NA, predict.xgb.Booster <- function(object, newdata, missing = NA,
outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE) { outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE, reshape = FALSE) {
object <- xgb.Booster.check(object, saveraw = FALSE) object <- xgb.Booster.check(object, saveraw = FALSE)
if (class(newdata) != "xgb.DMatrix") if (class(newdata) != "xgb.DMatrix")
@ -116,16 +186,26 @@ predict.xgb.Booster <- function(object, newdata, missing = NA,
if (is.null(ntreelimit)) if (is.null(ntreelimit))
ntreelimit <- NVL(object$best_ntreelimit, 0) ntreelimit <- NVL(object$best_ntreelimit, 0)
if (ntreelimit < 0) if (ntreelimit < 0)
stop("ntreelimit must be positive") stop("ntreelimit cannot be negative")
option <- 0L + 1L * as.logical(outputmargin) + 2L * as.logical(predleaf) option <- 0L + 1L * as.logical(outputmargin) + 2L * as.logical(predleaf)
ret <- .Call("XGBoosterPredict_R", object$handle, newdata, option[1], ret <- .Call("XGBoosterPredict_R", object$handle, newdata, option[1],
as.integer(ntreelimit), PACKAGE = "xgboost") as.integer(ntreelimit), PACKAGE = "xgboost")
if (length(ret) %% nrow(newdata) != 0)
stop("prediction length ", length(ret)," is not multiple of nrows(newdata) ", nrow(newdata))
npred_per_case <- length(ret) / nrow(newdata)
if (predleaf){ if (predleaf){
len <- nrow(newdata) len <- nrow(newdata)
ret <- if (length(ret) == len) matrix(ret, ncol = 1) ret <- if (length(ret) == len) {
else t(matrix(ret, ncol = len)) matrix(ret, ncol = 1)
} else {
t(matrix(ret, ncol = len))
}
} else if (reshape && npred_per_case > 1) {
ret <- matrix(ret, ncol = length(ret) / nrow(newdata), byrow = TRUE)
} }
return(ret) return(ret)
} }
@ -169,9 +249,13 @@ predict.xgb.Booster.handle <- function(object, ...) {
#' #'
#' The attribute setters would usually work more efficiently for \code{xgb.Booster.handle} #' The attribute setters would usually work more efficiently for \code{xgb.Booster.handle}
#' than for \code{xgb.Booster}, since only just a handle (pointer) would need to be copied. #' than for \code{xgb.Booster}, since only just a handle (pointer) would need to be copied.
#' That would only matter if attributes need to be set many times.
#' Note, however, that when feeding a handle of an \code{xgb.Booster} object to the attribute setters,
#' the raw model cache of an \code{xgb.Booster} object would not be automatically updated,
#' and it would be user's responsibility to call \code{xgb.save.raw} to update it.
#' #'
#' The \code{xgb.attributes<-} setter either updates the existing or adds one or several attributes, #' The \code{xgb.attributes<-} setter either updates the existing or adds one or several attributes,
#' but doesn't delete the existing attributes which don't have their names in \code{names(attributes)}. #' but it doesn't delete the other existing attributes.
#' #'
#' @return #' @return
#' \code{xgb.attr} returns either a string value of an attribute #' \code{xgb.attr} returns either a string value of an attribute
@ -184,8 +268,8 @@ predict.xgb.Booster.handle <- function(object, ...) {
#' data(agaricus.train, package='xgboost') #' data(agaricus.train, package='xgboost')
#' train <- agaricus.train #' train <- agaricus.train
#' #'
#' bst <- xgboost(data = train$data, label = train$label, max.depth = 2, #' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
#' eta = 1, nthread = 2, nround = 2, objective = "binary:logistic") #' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
#' #'
#' xgb.attr(bst, "my_attribute") <- "my attribute value" #' xgb.attr(bst, "my_attribute") <- "my attribute value"
#' print(xgb.attr(bst, "my_attribute")) #' print(xgb.attr(bst, "my_attribute"))
@ -279,8 +363,8 @@ xgb.attributes <- function(object) {
#' data(agaricus.train, package='xgboost') #' data(agaricus.train, package='xgboost')
#' train <- agaricus.train #' train <- agaricus.train
#' #'
#' bst <- xgboost(data = train$data, label = train$label, max.depth = 2, #' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
#' eta = 1, nthread = 2, nround = 2, objective = "binary:logistic") #' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
#' #'
#' xgb.parameters(bst) <- list(eta = 0.1) #' xgb.parameters(bst) <- list(eta = 0.1)
#' #'
@ -304,6 +388,12 @@ xgb.attributes <- function(object) {
object object
} }
# Extract # of trees in a model
# TODO: either add a getter to C-interface, or simply set an 'ntree' attribute after each iteration
# internal utility function
xgb.ntree <- function(bst) {
length(grep('^booster', xgb.dump(bst)))
}
#' Print xgb.Booster #' Print xgb.Booster
@ -317,8 +407,8 @@ xgb.attributes <- function(object) {
#' @examples #' @examples
#' data(agaricus.train, package='xgboost') #' data(agaricus.train, package='xgboost')
#' train <- agaricus.train #' train <- agaricus.train
#' bst <- xgboost(data = train$data, label = train$label, max.depth = 2, #' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
#' eta = 1, nthread = 2, nround = 2, objective = "binary:logistic") #' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
#' attr(bst, 'myattr') <- 'memo' #' attr(bst, 'myattr') <- 'memo'
#' #'
#' print(bst) #' print(bst)
@ -334,9 +424,11 @@ print.xgb.Booster <- function(x, verbose=FALSE, ...) {
} }
cat('raw: ') cat('raw: ')
if (!is.null(x$raw)) cat(format(object.size(x$raw), units="auto"), '\n') if (!is.null(x$raw)) {
else cat('NULL\n') cat(format(object.size(x$raw), units="auto"), '\n')
} else {
cat('NULL\n')
}
if (!is.null(x$call)) { if (!is.null(x$call)) {
cat('call:\n ') cat('call:\n ')
print(x$call) print(x$call)
@ -371,7 +463,11 @@ print.xgb.Booster <- function(x, verbose=FALSE, ...) {
}) })
} }
for (n in setdiff(names(x), c('handle', 'raw', 'call', 'params', 'callbacks','evaluation_log'))) { cat('niter: ', x$niter, '\n', sep='')
# TODO: uncomment when faster xgb.ntree is implemented
#cat('ntree: ', xgb.ntree(x), '\n', sep='')
for (n in setdiff(names(x), c('handle', 'raw', 'call', 'params', 'callbacks','evaluation_log','niter'))) {
if (is.atomic(x[[n]])) { if (is.atomic(x[[n]])) {
cat(n, ': ', x[[n]], '\n', sep='') cat(n, ': ', x[[n]], '\n', sep='')
} else { } else {