compile Rd files, i.e. R documents

This commit is contained in:
hetong 2014-08-28 08:12:48 -07:00
parent df6cd25fd5
commit 73419f6cd7
14 changed files with 292 additions and 21 deletions

View File

@ -1,11 +1,2 @@
importClassesFrom("Matrix", dgCMatrix, dgeMatrix)
# Generated by roxygen2 (4.0.1): do not edit by hand
export(xgboost)
export(xgb.DMatrix)
exportMethods(predict)
exportMethods(getinfo)
export(xgb.train)
export(xgb.save)
export(xgb.load)
export(xgb.dump)
export(xgb.DMatrix.save)

View File

@ -7,9 +7,6 @@ setClass('xgb.DMatrix')
#' @param object Object of class "xgb.DMatrix"
#' @param name the name of the field to get
#'
#' @section Value
#' return a numerical vector.
#'
#' @examples
#' data(iris)
#' iris[,5] <- as.numeric(iris[,5])

View File

@ -12,9 +12,6 @@ setClass("xgb.Booster")
#' untransformed margin value. In logistic regression, outputmargin=T will
#' output value before logistic transformation.
#'
#' @section Value
#' return a numerical vector.
#'
#' @examples
#' data(iris)
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)

View File

@ -36,8 +36,6 @@
#' It supports advanced features such as watchlist, customized objective function,
#' therefore it is more flexible than \code{\link{xgboost}}.
#'
#' @section Value
#' return a \code{xgb.DMatrix} class object.
#'
#' @examples
#' data(iris)

View File

@ -28,9 +28,6 @@
#' Parallelization is automatically enabled if OpenMP is present.
#' Number of threads can also be manually specified via "nthread" parameter
#'
#' @section Value
#' return a \code{xgb.DMatrix} class object.
#'
#' @examples
#' data(iris)
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)

22
R-package/man/getinfo.Rd Normal file
View File

@ -0,0 +1,22 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{getinfo}
\alias{getinfo}
\title{Get information of an xgb.DMatrix object}
\usage{
getinfo(object, ...)
}
\arguments{
\item{object}{Object of class "xgb.DMatrix"}
\item{name}{the name of the field to get}
}
\description{
Get information of an xgb.DMatrix object
}
\examples{
data(iris)
iris[,5] <- as.numeric(iris[,5])
dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
labels <- getinfo(dtest, "label")
}

View File

@ -0,0 +1,28 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\docType{methods}
\name{predict,xgb.Booster-method}
\alias{predict,xgb.Booster-method}
\title{Predict method for eXtreme Gradient Boosting model}
\usage{
\S4method{predict}{xgb.Booster}(object, newdata, outputmargin = FALSE)
}
\arguments{
\item{object}{Object of class "xgb.Boost"}
\item{newdata}{takes \code{matrix}, \code{dgCMatrix}, local data file or
\code{xgb.DMatrix}.}
\item{outputmargin}{whether the prediction should be shown in the original
value of sum of functions, when outputmargin=TRUE, the prediction is
untransformed margin value. In logistic regression, outputmargin=T will
output value before logistic transformation.}
}
\description{
Predicted values based on xgboost model object.
}
\examples{
data(iris)
bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
pred <- predict(bst, as.matrix(iris[,1:4]))
}

View File

@ -0,0 +1,28 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{xgb.DMatrix}
\alias{xgb.DMatrix}
\title{Contruct xgb.DMatrix object}
\usage{
xgb.DMatrix(data, info = list(), missing = 0, ...)
}
\arguments{
\item{data}{a \code{matrix} object, a \code{dgCMatrix} object or a character
indicating the data file.}
\item{info}{a list of information of the xgb.DMatrix object}
\item{missing}{Missing is only used when input is dense matrix, pick a float}
\item{...}{other information to pass to \code{info}.}
}
\description{
Contruct xgb.DMatrix object from dense matrix, sparse matrix or local file.
}
\examples{
data(iris)
iris[,5] <- as.numeric(iris[,5])
dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
xgb.DMatrix.save(dtrain, 'iris.xgb.DMatrix')
dtrain <- xgb.DMatrix('iris.xgb.DMatrix')
}

View File

@ -0,0 +1,23 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{xgb.DMatrix.save}
\alias{xgb.DMatrix.save}
\title{Save xgb.DMatrix object to binary file}
\usage{
xgb.DMatrix.save(DMatrix, fname)
}
\arguments{
\item{model}{the model object.}
\item{fname}{the name of the binary file.}
}
\description{
Save xgb.DMatrix object to binary file
}
\examples{
data(iris)
iris[,5] <- as.numeric(iris[,5])
dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
xgb.DMatrix.save(dtrain, 'iris.xgb.DMatrix')
dtrain <- xgb.DMatrix('iris.xgb.DMatrix')
}

25
R-package/man/xgb.dump.Rd Normal file
View File

@ -0,0 +1,25 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{xgb.dump}
\alias{xgb.dump}
\title{Save xgboost model to text file}
\usage{
xgb.dump(model, fname, fmap = "")
}
\arguments{
\item{model}{the model object.}
\item{fname}{the name of the binary file.}
\item{fmap}{feature map file representing the type of feature, to make it
look nice, run demo/demo.R for result and demo/featmap.txt for example
Format: https://github.com/tqchen/xgboost/wiki/Binary-Classification#dump-model}
}
\description{
Save a xgboost model to text file. Could be parsed later.
}
\examples{
data(iris)
bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
xgb.dump(bst, 'iris.xgb.model.dump')
}

21
R-package/man/xgb.load.Rd Normal file
View File

@ -0,0 +1,21 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{xgb.load}
\alias{xgb.load}
\title{Load xgboost model from binary file}
\usage{
xgb.load(modelfile)
}
\arguments{
\item{modelfile}{the name of the binary file.}
}
\description{
Load xgboost model from the binary model file
}
\examples{
data(iris)
bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
xgb.save(bst, 'iris.xgb.model')
bst <- xgb.load('iris.xgb.model')
pred <- predict(bst, as.matrix(iris[,1:4]))
}

23
R-package/man/xgb.save.Rd Normal file
View File

@ -0,0 +1,23 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{xgb.save}
\alias{xgb.save}
\title{Save xgboost model to binary file}
\usage{
xgb.save(model, fname)
}
\arguments{
\item{model}{the model object.}
\item{fname}{the name of the binary file.}
}
\description{
Save xgboost model from xgboost or xgb.train
}
\examples{
data(iris)
bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
xgb.save(bst, 'iris.xgb.model')
bst <- xgb.load('iris.xgb.model')
pred <- predict(bst, as.matrix(iris[,1:4]))
}

View File

@ -0,0 +1,73 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{xgb.train}
\alias{xgb.train}
\title{eXtreme Gradient Boosting Training}
\usage{
xgb.train(params = list(), dtrain, nrounds, watchlist = list(),
obj = NULL, feval = NULL, ...)
}
\arguments{
\item{params}{the list of parameters. Commonly used ones are:
objective: objective function, common ones are
- reg:linear linear regression
- binary:logistic logistic regression for classification
eta: step size of each boosting step
max_depth: maximum depth of the tree
nthread: number of thread used in training, if not set, all threads are used
See \url{https://github.com/tqchen/xgboost/wiki/Parameters} for
further details. See also demo/demo.R for walkthrough example in R.}
\item{dtrain}{takes an \code{xgb.DMatrix} as the input.}
\item{nrounds}{the max number of iterations}
\item{watchlist}{what information should be printed when \code{verbose=1} or
\code{verbose=2}. Watchlist is used to specify validation set monitoring
during training. For example user can specify
watchlist=list(validation1=mat1, validation2=mat2) to watch
the performance of each round's model on mat1 and mat2}
\item{obj}{customized objective function. Given prediction and dtrain,
return gradient and second order gradient.}
\item{feval}{custimized evaluation function. Given prediction and dtrain,
return a \code{list(metric='metric-name', value='metric-value')}.}
\item{...}{other parameters to pass to \code{params}.}
}
\description{
The training function of xgboost
}
\details{
This is the training function for xgboost.
Parallelization is automatically enabled if OpenMP is present.
Number of threads can also be manually specified via "nthread" parameter.
This function only accepts an \code{xgb.DMatrix} object as the input.
It supports advanced features such as watchlist, customized objective function,
therefore it is more flexible than \code{\link{xgboost}}.
}
\examples{
data(iris)
iris[,5] <- as.numeric(iris[,5])
dtrain = xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
dtest = dtrain
watchlist <- list(eval = dtest, train = dtrain)
param <- list(max_depth = 2, eta = 1, silent = 1)
logregobj <- function(preds, dtrain) {
labels <- getinfo(dtrain, "label")
preds <- 1/(1 + exp(-preds))
grad <- preds - labels
hess <- preds * (1 - preds)
return(list(grad = grad, hess = hess))
}
evalerror <- function(preds, dtrain) {
labels <- getinfo(dtrain, "label")
err <- as.numeric(sum(labels != (preds > 0)))/length(labels)
return(list(metric = "error", value = err))
}
bst <- xgb.train(param, dtrain, nround = 2, watchlist, logregobj, evalerror)
}

48
R-package/man/xgboost.Rd Normal file
View File

@ -0,0 +1,48 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{xgboost}
\alias{xgboost}
\title{eXtreme Gradient Boosting (Tree) library}
\usage{
xgboost(data = NULL, label = NULL, params = list(), nrounds,
verbose = 1, ...)
}
\arguments{
\item{data}{takes \code{matrix}, \code{dgCMatrix}, local data file or
\code{xgb.DMatrix}.}
\item{label}{the response variable. User should not set this field,}
\item{params}{the list of parameters. Commonly used ones are:
objective: objective function, common ones are
- reg:linear linear regression
- binary:logistic logistic regression for classification
eta: step size of each boosting step
max_depth: maximum depth of the tree
nthread: number of thread used in training, if not set, all threads are used
See \url{https://github.com/tqchen/xgboost/wiki/Parameters} for
further details. See also demo/demo.R for walkthrough example in R.}
\item{nrounds}{the max number of iterations}
\item{verbose}{If 0, xgboost will stay silent. If 1, xgboost will print
information of performance. If 2, xgboost will print information of both
performance and construction progress information}
\item{...}{other parameters to pass to \code{params}.}
}
\description{
A simple interface for xgboost in R
}
\details{
This is the modeling function for xgboost.
Parallelization is automatically enabled if OpenMP is present.
Number of threads can also be manually specified via "nthread" parameter
}
\examples{
data(iris)
bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
pred <- predict(bst, as.matrix(iris[,1:4]))
}