refine doc, with Rd

This commit is contained in:
hetong 2014-09-06 11:17:38 -07:00
parent 7879db8702
commit f1d7b012a6
10 changed files with 139 additions and 15 deletions

View File

@ -5,11 +5,11 @@ export(setinfo)
export(slice)
export(xgb.DMatrix)
export(xgb.DMatrix.save)
export(xgb.cv)
export(xgb.dump)
export(xgb.load)
export(xgb.save)
export(xgb.train)
export(xgb.cv)
export(xgboost)
exportMethods(predict)
import(methods)

View File

@ -8,7 +8,10 @@ setClass('xgb.DMatrix')
#' data(iris)
#' iris[,5] <- as.numeric(iris[,5]=='setosa')
#' dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
#' labels <- getinfo(dtrain, "label")
#' labels <- getinfo(dtrain, 'label')
#' setinfo(dtrain, 'label', 1-labels)
#' labels2 <- getinfo(dtrain, 'label')
#' stopifnot(all(labels2 == 1-labels))
#' @rdname getinfo
#' @export
#'

View File

@ -6,7 +6,10 @@
#' data(iris)
#' iris[,5] <- as.numeric(iris[,5]=='setosa')
#' dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
#' labels <- getinfo(dtrain, "label")
#' labels <- getinfo(dtrain, 'label')
#' setinfo(dtrain, 'label', 1-labels)
#' labels2 <- getinfo(dtrain, 'label')
#' stopifnot(all(labels2 == 1-labels))
#' @rdname setinfo
#' @export
#'
@ -16,9 +19,10 @@ setinfo <- function(object, ...){
#' @param object Object of class "xgb.DMatrix"
#' @param name the name of the field to get
#' @param info the specific field of information to set
#' @param ... other parameters
#' @rdname getinfo
#' @method getinfo xgb.DMatrix
#' @rdname setinfo
#' @method setinfo xgb.DMatrix
setMethod("setinfo", signature = "xgb.DMatrix",
definition = function(object, name, info) {
xgb.setinfo(object, name, info)

View File

@ -1,6 +1,6 @@
#' eXtreme Gradient Boosting Training
#' Cross Validation
#'
#' The training function of xgboost
#' The cross valudation function of xgboost
#'
#' @param params the list of parameters. Commonly used ones are:
#' \itemize{
@ -61,7 +61,7 @@ xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL,
params <- append(params, list(silent=1))
for (mc in metrics) {
params <- append(params, list("eval_metric"=mc))
}
}
folds <- xgb.cv.mknfold(dtrain, nfold, params)
history <- list()
@ -70,7 +70,8 @@ xgb.cv <- function(params=list(), data, nrounds, nfold, label = NULL,
for (k in 1:nfold) {
fd <- folds[[k]]
succ <- xgb.iter.update(fd$booster, fd$dtrain, i - 1, obj)
msg[[k]] <- strsplit(xgb.iter.eval(fd$booster, fd$watchlist, i - 1, feval), "\t")[[1]]
msg[[k]] <- strsplit(xgb.iter.eval(fd$booster, fd$watchlist, i - 1, feval),
"\t")[[1]]
}
ret <- xgb.cv.aggcv(msg, showsd)
history <- append(history, ret)

View File

@ -23,6 +23,9 @@ Get information of an xgb.DMatrix object
data(iris)
iris[,5] <- as.numeric(iris[,5]=='setosa')
dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
labels <- getinfo(dtrain, "label")
labels <- getinfo(dtrain, 'label')
setinfo(dtrain, 'label', 1-labels)
labels2 <- getinfo(dtrain, 'label')
stopifnot(all(labels2 == 1-labels))
}

33
R-package/man/setinfo.Rd Normal file
View File

@ -0,0 +1,33 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\docType{methods}
\name{setinfo}
\alias{setinfo}
\alias{setinfo,xgb.DMatrix-method}
\title{Set information of an xgb.DMatrix object}
\usage{
setinfo(object, ...)
\S4method{setinfo}{xgb.DMatrix}(object, name, info)
}
\arguments{
\item{object}{Object of class "xgb.DMatrix"}
\item{name}{the name of the field to get}
\item{info}{the specific field of information to set}
\item{...}{other parameters}
}
\description{
Set information of an xgb.DMatrix object
}
\examples{
data(iris)
iris[,5] <- as.numeric(iris[,5]=='setosa')
dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
labels <- getinfo(dtrain, 'label')
setinfo(dtrain, 'label', 1-labels)
labels2 <- getinfo(dtrain, 'label')
stopifnot(all(labels2 == 1-labels))
}

66
R-package/man/xgb.cv.Rd Normal file
View File

@ -0,0 +1,66 @@
% Generated by roxygen2 (4.0.1): do not edit by hand
\name{xgb.cv}
\alias{xgb.cv}
\title{Cross Validation}
\usage{
xgb.cv(params = list(), data, nrounds, nfold, label = NULL, showsd = TRUE,
metrics = list(), obj = NULL, feval = NULL, ...)
}
\arguments{
\item{params}{the list of parameters. Commonly used ones are:
\itemize{
\item \code{objective} objective function, common ones are
\itemize{
\item \code{reg:linear} linear regression
\item \code{binary:logistic} logistic regression for classification
}
\item \code{eta} step size of each boosting step
\item \code{max_depth} maximum depth of the tree
\item \code{nthread} number of thread used in training, if not set, all threads are used
}
See \url{https://github.com/tqchen/xgboost/wiki/Parameters} for
further details. See also inst/examples/demo.R for walkthrough example in R.}
\item{data}{takes an \code{xgb.DMatrix} as the input.}
\item{nrounds}{the max number of iterations}
\item{nfold}{number of folds used}
\item{label}{option field, when data is Matrix}
\item{showd}{boolean, whether show standard deviation of cross validation}
\item{metrics,}{list of evaluation metrics to be used in corss validation,
when it is not specified, the evaluation metric is chosen according to objective function.
Possible options are:
\itemize{
\item \code{error} binary classification error rate
\item \code{rmse} Rooted mean square error
\item \code{logloss} negative log-likelihood function
\item \code{auc} Area under curve
\item \code{merror} Exact matching error, used to evaluate multi-class classification
}}
\item{obj}{customized objective function. Returns gradient and second order
gradient with given prediction and dtrain,}
\item{feval}{custimized evaluation function. Returns
\code{list(metric='metric-name', value='metric-value')} with given
prediction and dtrain,}
\item{...}{other parameters to pass to \code{params}.}
}
\description{
The cross valudation function of xgboost
}
\details{
This is the cross validation function for xgboost
Parallelization is automatically enabled if OpenMP is present.
Number of threads can also be manually specified via "nthread" parameter.
This function only accepts an \code{xgb.DMatrix} object as the input.
}

View File

@ -3,8 +3,8 @@
\alias{xgb.train}
\title{eXtreme Gradient Boosting Training}
\usage{
xgb.train(params = list(), dtrain, nrounds, watchlist = list(),
obj = NULL, feval = NULL, ...)
xgb.train(params = list(), data, nrounds, watchlist = list(), obj = NULL,
feval = NULL, verbose = 1, ...)
}
\arguments{
\item{params}{the list of parameters. Commonly used ones are:
@ -22,7 +22,7 @@ xgb.train(params = list(), dtrain, nrounds, watchlist = list(),
See \url{https://github.com/tqchen/xgboost/wiki/Parameters} for
further details. See also inst/examples/demo.R for walkthrough example in R.}
\item{dtrain}{takes an \code{xgb.DMatrix} as the input.}
\item{data}{takes an \code{xgb.DMatrix} as the input.}
\item{nrounds}{the max number of iterations}
@ -39,6 +39,9 @@ gradient with given prediction and dtrain,}
\code{list(metric='metric-name', value='metric-value')} with given
prediction and dtrain,}
\item{verbose}{If 0, xgboost will stay silent. If 1, xgboost will print
information of performance. If 2, xgboost will print information of both}
\item{...}{other parameters to pass to \code{params}.}
}
\description{

View File

@ -73,9 +73,10 @@ and ranking. The package is made to be extendible, so that users are also allowe
\end{enumerate}
\section{Example with iris}
\section{Example with Mushroom data}
In this section, we will illustrate some common usage of \verb@xgboost@.
In this section, we will illustrate some common usage of \verb@xgboost@. The
Mushroom data is cited from UCI Machine Learning Repository. \citep{Bache+Lichman:2013}
<<Training and prediction with iris>>=
library(xgboost)

View File

@ -18,3 +18,13 @@
publisher={Institute of Mathematical Statistics}
}
@misc{
Bache+Lichman:2013 ,
author = "K. Bache and M. Lichman",
year = "2013",
title = "{UCI} Machine Learning Repository",
url = "http://archive.ics.uci.edu/ml",
institution = "University of California, Irvine, School of Information and Computer Sciences"
}