diff --git a/R-package/NAMESPACE b/R-package/NAMESPACE index f8b409724..09924aa05 100644 --- a/R-package/NAMESPACE +++ b/R-package/NAMESPACE @@ -1,11 +1,2 @@ -importClassesFrom("Matrix", dgCMatrix, dgeMatrix) +# Generated by roxygen2 (4.0.1): do not edit by hand -export(xgboost) -export(xgb.DMatrix) -exportMethods(predict) -exportMethods(getinfo) -export(xgb.train) -export(xgb.save) -export(xgb.load) -export(xgb.dump) -export(xgb.DMatrix.save) diff --git a/R-package/R/getinfo.xgb.DMatrix.R b/R-package/R/getinfo.xgb.DMatrix.R index 6fe931808..52be489e1 100644 --- a/R-package/R/getinfo.xgb.DMatrix.R +++ b/R-package/R/getinfo.xgb.DMatrix.R @@ -7,9 +7,6 @@ setClass('xgb.DMatrix') #' @param object Object of class "xgb.DMatrix" #' @param name the name of the field to get #' -#' @section Value -#' return a numerical vector. -#' #' @examples #' data(iris) #' iris[,5] <- as.numeric(iris[,5]) diff --git a/R-package/R/predict.xgb.Booster.R b/R-package/R/predict.xgb.Booster.R index 0cec490dc..2633c081b 100644 --- a/R-package/R/predict.xgb.Booster.R +++ b/R-package/R/predict.xgb.Booster.R @@ -11,9 +11,6 @@ setClass("xgb.Booster") #' value of sum of functions, when outputmargin=TRUE, the prediction is #' untransformed margin value. In logistic regression, outputmargin=T will #' output value before logistic transformation. -#' -#' @section Value -#' return a numerical vector. #' #' @examples #' data(iris) diff --git a/R-package/R/xgb.train.R b/R-package/R/xgb.train.R index c0a0278ae..d006ae886 100644 --- a/R-package/R/xgb.train.R +++ b/R-package/R/xgb.train.R @@ -36,8 +36,6 @@ #' It supports advanced features such as watchlist, customized objective function, #' therefore it is more flexible than \code{\link{xgboost}}. #' -#' @section Value -#' return a \code{xgb.DMatrix} class object. #' #' @examples #' data(iris) diff --git a/R-package/R/xgboost.R b/R-package/R/xgboost.R index 9c2d05fc9..3a89d96d4 100644 --- a/R-package/R/xgboost.R +++ b/R-package/R/xgboost.R @@ -28,9 +28,6 @@ #' Parallelization is automatically enabled if OpenMP is present. #' Number of threads can also be manually specified via "nthread" parameter #' -#' @section Value -#' return a \code{xgb.DMatrix} class object. -#' #' @examples #' data(iris) #' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2) diff --git a/R-package/man/getinfo.Rd b/R-package/man/getinfo.Rd new file mode 100644 index 000000000..beee4f850 --- /dev/null +++ b/R-package/man/getinfo.Rd @@ -0,0 +1,22 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{getinfo} +\alias{getinfo} +\title{Get information of an xgb.DMatrix object} +\usage{ +getinfo(object, ...) +} +\arguments{ +\item{object}{Object of class "xgb.DMatrix"} + +\item{name}{the name of the field to get} +} +\description{ +Get information of an xgb.DMatrix object +} +\examples{ +data(iris) +iris[,5] <- as.numeric(iris[,5]) +dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5]) +labels <- getinfo(dtest, "label") +} + diff --git a/R-package/man/predict-xgb.Booster-method.Rd b/R-package/man/predict-xgb.Booster-method.Rd new file mode 100644 index 000000000..d43fd7362 --- /dev/null +++ b/R-package/man/predict-xgb.Booster-method.Rd @@ -0,0 +1,28 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\docType{methods} +\name{predict,xgb.Booster-method} +\alias{predict,xgb.Booster-method} +\title{Predict method for eXtreme Gradient Boosting model} +\usage{ +\S4method{predict}{xgb.Booster}(object, newdata, outputmargin = FALSE) +} +\arguments{ +\item{object}{Object of class "xgb.Boost"} + +\item{newdata}{takes \code{matrix}, \code{dgCMatrix}, local data file or +\code{xgb.DMatrix}.} + +\item{outputmargin}{whether the prediction should be shown in the original + value of sum of functions, when outputmargin=TRUE, the prediction is + untransformed margin value. In logistic regression, outputmargin=T will + output value before logistic transformation.} +} +\description{ +Predicted values based on xgboost model object. +} +\examples{ +data(iris) +bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2) +pred <- predict(bst, as.matrix(iris[,1:4])) +} + diff --git a/R-package/man/xgb.DMatrix.Rd b/R-package/man/xgb.DMatrix.Rd new file mode 100644 index 000000000..166d69f68 --- /dev/null +++ b/R-package/man/xgb.DMatrix.Rd @@ -0,0 +1,28 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{xgb.DMatrix} +\alias{xgb.DMatrix} +\title{Contruct xgb.DMatrix object} +\usage{ +xgb.DMatrix(data, info = list(), missing = 0, ...) +} +\arguments{ +\item{data}{a \code{matrix} object, a \code{dgCMatrix} object or a character +indicating the data file.} + +\item{info}{a list of information of the xgb.DMatrix object} + +\item{missing}{Missing is only used when input is dense matrix, pick a float} + +\item{...}{other information to pass to \code{info}.} +} +\description{ +Contruct xgb.DMatrix object from dense matrix, sparse matrix or local file. +} +\examples{ +data(iris) +iris[,5] <- as.numeric(iris[,5]) +dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5]) +xgb.DMatrix.save(dtrain, 'iris.xgb.DMatrix') +dtrain <- xgb.DMatrix('iris.xgb.DMatrix') +} + diff --git a/R-package/man/xgb.DMatrix.save.Rd b/R-package/man/xgb.DMatrix.save.Rd new file mode 100644 index 000000000..d4932fa42 --- /dev/null +++ b/R-package/man/xgb.DMatrix.save.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{xgb.DMatrix.save} +\alias{xgb.DMatrix.save} +\title{Save xgb.DMatrix object to binary file} +\usage{ +xgb.DMatrix.save(DMatrix, fname) +} +\arguments{ +\item{model}{the model object.} + +\item{fname}{the name of the binary file.} +} +\description{ +Save xgb.DMatrix object to binary file +} +\examples{ +data(iris) +iris[,5] <- as.numeric(iris[,5]) +dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5]) +xgb.DMatrix.save(dtrain, 'iris.xgb.DMatrix') +dtrain <- xgb.DMatrix('iris.xgb.DMatrix') +} + diff --git a/R-package/man/xgb.dump.Rd b/R-package/man/xgb.dump.Rd new file mode 100644 index 000000000..1e0360b31 --- /dev/null +++ b/R-package/man/xgb.dump.Rd @@ -0,0 +1,25 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{xgb.dump} +\alias{xgb.dump} +\title{Save xgboost model to text file} +\usage{ +xgb.dump(model, fname, fmap = "") +} +\arguments{ +\item{model}{the model object.} + +\item{fname}{the name of the binary file.} + +\item{fmap}{feature map file representing the type of feature, to make it + look nice, run demo/demo.R for result and demo/featmap.txt for example + Format: https://github.com/tqchen/xgboost/wiki/Binary-Classification#dump-model} +} +\description{ +Save a xgboost model to text file. Could be parsed later. +} +\examples{ +data(iris) +bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2) +xgb.dump(bst, 'iris.xgb.model.dump') +} + diff --git a/R-package/man/xgb.load.Rd b/R-package/man/xgb.load.Rd new file mode 100644 index 000000000..980daf88d --- /dev/null +++ b/R-package/man/xgb.load.Rd @@ -0,0 +1,21 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{xgb.load} +\alias{xgb.load} +\title{Load xgboost model from binary file} +\usage{ +xgb.load(modelfile) +} +\arguments{ +\item{modelfile}{the name of the binary file.} +} +\description{ +Load xgboost model from the binary model file +} +\examples{ +data(iris) +bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2) +xgb.save(bst, 'iris.xgb.model') +bst <- xgb.load('iris.xgb.model') +pred <- predict(bst, as.matrix(iris[,1:4])) +} + diff --git a/R-package/man/xgb.save.Rd b/R-package/man/xgb.save.Rd new file mode 100644 index 000000000..ba390d1b4 --- /dev/null +++ b/R-package/man/xgb.save.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{xgb.save} +\alias{xgb.save} +\title{Save xgboost model to binary file} +\usage{ +xgb.save(model, fname) +} +\arguments{ +\item{model}{the model object.} + +\item{fname}{the name of the binary file.} +} +\description{ +Save xgboost model from xgboost or xgb.train +} +\examples{ +data(iris) +bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2) +xgb.save(bst, 'iris.xgb.model') +bst <- xgb.load('iris.xgb.model') +pred <- predict(bst, as.matrix(iris[,1:4])) +} + diff --git a/R-package/man/xgb.train.Rd b/R-package/man/xgb.train.Rd new file mode 100644 index 000000000..428ac9205 --- /dev/null +++ b/R-package/man/xgb.train.Rd @@ -0,0 +1,73 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{xgb.train} +\alias{xgb.train} +\title{eXtreme Gradient Boosting Training} +\usage{ +xgb.train(params = list(), dtrain, nrounds, watchlist = list(), + obj = NULL, feval = NULL, ...) +} +\arguments{ +\item{params}{the list of parameters. Commonly used ones are: + objective: objective function, common ones are + - reg:linear linear regression + - binary:logistic logistic regression for classification + eta: step size of each boosting step + max_depth: maximum depth of the tree + nthread: number of thread used in training, if not set, all threads are used + + See \url{https://github.com/tqchen/xgboost/wiki/Parameters} for + further details. See also demo/demo.R for walkthrough example in R.} + +\item{dtrain}{takes an \code{xgb.DMatrix} as the input.} + +\item{nrounds}{the max number of iterations} + +\item{watchlist}{what information should be printed when \code{verbose=1} or + \code{verbose=2}. Watchlist is used to specify validation set monitoring + during training. For example user can specify + watchlist=list(validation1=mat1, validation2=mat2) to watch + the performance of each round's model on mat1 and mat2} + +\item{obj}{customized objective function. Given prediction and dtrain, +return gradient and second order gradient.} + +\item{feval}{custimized evaluation function. Given prediction and dtrain, +return a \code{list(metric='metric-name', value='metric-value')}.} + +\item{...}{other parameters to pass to \code{params}.} +} +\description{ +The training function of xgboost +} +\details{ +This is the training function for xgboost. + +Parallelization is automatically enabled if OpenMP is present. +Number of threads can also be manually specified via "nthread" parameter. + +This function only accepts an \code{xgb.DMatrix} object as the input. +It supports advanced features such as watchlist, customized objective function, +therefore it is more flexible than \code{\link{xgboost}}. +} +\examples{ +data(iris) +iris[,5] <- as.numeric(iris[,5]) +dtrain = xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5]) +dtest = dtrain +watchlist <- list(eval = dtest, train = dtrain) +param <- list(max_depth = 2, eta = 1, silent = 1) +logregobj <- function(preds, dtrain) { + labels <- getinfo(dtrain, "label") + preds <- 1/(1 + exp(-preds)) + grad <- preds - labels + hess <- preds * (1 - preds) + return(list(grad = grad, hess = hess)) +} +evalerror <- function(preds, dtrain) { + labels <- getinfo(dtrain, "label") + err <- as.numeric(sum(labels != (preds > 0)))/length(labels) + return(list(metric = "error", value = err)) +} +bst <- xgb.train(param, dtrain, nround = 2, watchlist, logregobj, evalerror) +} + diff --git a/R-package/man/xgboost.Rd b/R-package/man/xgboost.Rd new file mode 100644 index 000000000..d9de7a421 --- /dev/null +++ b/R-package/man/xgboost.Rd @@ -0,0 +1,48 @@ +% Generated by roxygen2 (4.0.1): do not edit by hand +\name{xgboost} +\alias{xgboost} +\title{eXtreme Gradient Boosting (Tree) library} +\usage{ +xgboost(data = NULL, label = NULL, params = list(), nrounds, + verbose = 1, ...) +} +\arguments{ +\item{data}{takes \code{matrix}, \code{dgCMatrix}, local data file or +\code{xgb.DMatrix}.} + +\item{label}{the response variable. User should not set this field,} + +\item{params}{the list of parameters. Commonly used ones are: + objective: objective function, common ones are + - reg:linear linear regression + - binary:logistic logistic regression for classification + eta: step size of each boosting step + max_depth: maximum depth of the tree + nthread: number of thread used in training, if not set, all threads are used + + See \url{https://github.com/tqchen/xgboost/wiki/Parameters} for + further details. See also demo/demo.R for walkthrough example in R.} + +\item{nrounds}{the max number of iterations} + +\item{verbose}{If 0, xgboost will stay silent. If 1, xgboost will print +information of performance. If 2, xgboost will print information of both +performance and construction progress information} + +\item{...}{other parameters to pass to \code{params}.} +} +\description{ +A simple interface for xgboost in R +} +\details{ +This is the modeling function for xgboost. + +Parallelization is automatically enabled if OpenMP is present. +Number of threads can also be manually specified via "nthread" parameter +} +\examples{ +data(iris) +bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2) +pred <- predict(bst, as.matrix(iris[,1:4])) +} +