Update R doc by roxygen2. (#5201)

This commit is contained in:
Jiaming Yuan 2020-01-15 10:49:17 +08:00 committed by GitHub
parent 0184f2e9f7
commit 808f61081b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 179 additions and 48 deletions

View File

@ -63,5 +63,5 @@ Imports:
data.table (>= 1.9.6), data.table (>= 1.9.6),
magrittr (>= 1.5), magrittr (>= 1.5),
stringi (>= 0.5.2) stringi (>= 0.5.2)
RoxygenNote: 6.1.0 RoxygenNote: 7.0.2
SystemRequirements: GNU make, C++11 SystemRequirements: GNU make, C++11

View File

@ -4,8 +4,12 @@
\alias{cb.early.stop} \alias{cb.early.stop}
\title{Callback closure to activate the early stopping.} \title{Callback closure to activate the early stopping.}
\usage{ \usage{
cb.early.stop(stopping_rounds, maximize = FALSE, metric_name = NULL, cb.early.stop(
verbose = TRUE) stopping_rounds,
maximize = FALSE,
metric_name = NULL,
verbose = TRUE
)
} }
\arguments{ \arguments{
\item{stopping_rounds}{The number of rounds with no improvement in \item{stopping_rounds}{The number of rounds with no improvement in

View File

@ -5,10 +5,20 @@
\alias{predict.xgb.Booster.handle} \alias{predict.xgb.Booster.handle}
\title{Predict method for eXtreme Gradient Boosting model} \title{Predict method for eXtreme Gradient Boosting model}
\usage{ \usage{
\method{predict}{xgb.Booster}(object, newdata, missing = NA, \method{predict}{xgb.Booster}(
outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE, object,
predcontrib = FALSE, approxcontrib = FALSE, predinteraction = FALSE, newdata,
reshape = FALSE, ...) missing = NA,
outputmargin = FALSE,
ntreelimit = NULL,
predleaf = FALSE,
predcontrib = FALSE,
approxcontrib = FALSE,
predinteraction = FALSE,
reshape = FALSE,
training = FALSE,
...
)
\method{predict}{xgb.Booster.handle}(object, ...) \method{predict}{xgb.Booster.handle}(object, ...)
} }

View File

@ -87,6 +87,6 @@ accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) /
# Here the accuracy was already good and is now perfect. # Here the accuracy was already good and is now perfect.
cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now", cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now",
accuracy.after, "!\\n")) accuracy.after, "!\n"))
} }

View File

@ -4,11 +4,28 @@
\alias{xgb.cv} \alias{xgb.cv}
\title{Cross Validation} \title{Cross Validation}
\usage{ \usage{
xgb.cv(params = list(), data, nrounds, nfold, label = NULL, missing = NA, xgb.cv(
prediction = FALSE, showsd = TRUE, metrics = list(), obj = NULL, params = list(),
feval = NULL, stratified = TRUE, folds = NULL, verbose = TRUE, data,
print_every_n = 1L, early_stopping_rounds = NULL, maximize = NULL, nrounds,
callbacks = list(), ...) nfold,
label = NULL,
missing = NA,
prediction = FALSE,
showsd = TRUE,
metrics = list(),
obj = NULL,
feval = NULL,
stratified = TRUE,
folds = NULL,
train_folds = NULL,
verbose = TRUE,
print_every_n = 1L,
early_stopping_rounds = NULL,
maximize = NULL,
callbacks = list(),
...
)
} }
\arguments{ \arguments{
\item{params}{the list of parameters. Commonly used ones are: \item{params}{the list of parameters. Commonly used ones are:
@ -69,6 +86,9 @@ by the values of outcome labels.}
(each element must be a vector of test fold's indices). When folds are supplied, (each element must be a vector of test fold's indices). When folds are supplied,
the \code{nfold} and \code{stratified} parameters are ignored.} the \code{nfold} and \code{stratified} parameters are ignored.}
\item{train_folds}{\code{list} list specifying which indicies to use for training. If \code{NULL}
(the default) all indices not specified in \code{folds} will be used for training.}
\item{verbose}{\code{boolean}, print the statistics during the process} \item{verbose}{\code{boolean}, print the statistics during the process}
\item{print_every_n}{Print each n-th iteration evaluation messages when \code{verbose>0}. \item{print_every_n}{Print each n-th iteration evaluation messages when \code{verbose>0}.

View File

@ -4,8 +4,14 @@
\alias{xgb.dump} \alias{xgb.dump}
\title{Dump an xgboost model in text format.} \title{Dump an xgboost model in text format.}
\usage{ \usage{
xgb.dump(model, fname = NULL, fmap = "", with_stats = FALSE, xgb.dump(
dump_format = c("text", "json"), ...) model,
fname = NULL,
fmap = "",
with_stats = FALSE,
dump_format = c("text", "json"),
...
)
} }
\arguments{ \arguments{
\item{model}{the model object.} \item{model}{the model object.}

View File

@ -4,8 +4,14 @@
\alias{xgb.importance} \alias{xgb.importance}
\title{Importance of features in a model.} \title{Importance of features in a model.}
\usage{ \usage{
xgb.importance(feature_names = NULL, model = NULL, trees = NULL, xgb.importance(
data = NULL, label = NULL, target = NULL) feature_names = NULL,
model = NULL,
trees = NULL,
data = NULL,
label = NULL,
target = NULL
)
} }
\arguments{ \arguments{
\item{feature_names}{character vector of feature names. If the model already \item{feature_names}{character vector of feature names. If the model already

View File

@ -4,8 +4,14 @@
\alias{xgb.model.dt.tree} \alias{xgb.model.dt.tree}
\title{Parse a boosted tree model text dump} \title{Parse a boosted tree model text dump}
\usage{ \usage{
xgb.model.dt.tree(feature_names = NULL, model = NULL, text = NULL, xgb.model.dt.tree(
trees = NULL, use_int_id = FALSE, ...) feature_names = NULL,
model = NULL,
text = NULL,
trees = NULL,
use_int_id = FALSE,
...
)
} }
\arguments{ \arguments{
\item{feature_names}{character vector of feature names. If the model already \item{feature_names}{character vector of feature names. If the model already

View File

@ -5,11 +5,17 @@
\alias{xgb.plot.deepness} \alias{xgb.plot.deepness}
\title{Plot model trees deepness} \title{Plot model trees deepness}
\usage{ \usage{
xgb.ggplot.deepness(model = NULL, which = c("2x1", "max.depth", "med.depth", xgb.ggplot.deepness(
"med.weight")) model = NULL,
which = c("2x1", "max.depth", "med.depth", "med.weight")
)
xgb.plot.deepness(model = NULL, which = c("2x1", "max.depth", "med.depth", xgb.plot.deepness(
"med.weight"), plot = TRUE, ...) model = NULL,
which = c("2x1", "max.depth", "med.depth", "med.weight"),
plot = TRUE,
...
)
} }
\arguments{ \arguments{
\item{model}{either an \code{xgb.Booster} model generated by the \code{xgb.train} function \item{model}{either an \code{xgb.Booster} model generated by the \code{xgb.train} function

View File

@ -5,12 +5,25 @@
\alias{xgb.plot.importance} \alias{xgb.plot.importance}
\title{Plot feature importance as a bar graph} \title{Plot feature importance as a bar graph}
\usage{ \usage{
xgb.ggplot.importance(importance_matrix = NULL, top_n = NULL, xgb.ggplot.importance(
measure = NULL, rel_to_first = FALSE, n_clusters = c(1:10), ...) importance_matrix = NULL,
top_n = NULL,
measure = NULL,
rel_to_first = FALSE,
n_clusters = c(1:10),
...
)
xgb.plot.importance(importance_matrix = NULL, top_n = NULL, xgb.plot.importance(
measure = NULL, rel_to_first = FALSE, left_margin = 10, cex = NULL, importance_matrix = NULL,
plot = TRUE, ...) top_n = NULL,
measure = NULL,
rel_to_first = FALSE,
left_margin = 10,
cex = NULL,
plot = TRUE,
...
)
} }
\arguments{ \arguments{
\item{importance_matrix}{a \code{data.table} returned by \code{\link{xgb.importance}}.} \item{importance_matrix}{a \code{data.table} returned by \code{\link{xgb.importance}}.}

View File

@ -4,8 +4,15 @@
\alias{xgb.plot.multi.trees} \alias{xgb.plot.multi.trees}
\title{Project all trees on one tree and plot it} \title{Project all trees on one tree and plot it}
\usage{ \usage{
xgb.plot.multi.trees(model, feature_names = NULL, features_keep = 5, xgb.plot.multi.trees(
plot_width = NULL, plot_height = NULL, render = TRUE, ...) model,
feature_names = NULL,
features_keep = 5,
plot_width = NULL,
plot_height = NULL,
render = TRUE,
...
)
} }
\arguments{ \arguments{
\item{model}{produced by the \code{xgb.train} function.} \item{model}{produced by the \code{xgb.train} function.}

View File

@ -4,13 +4,33 @@
\alias{xgb.plot.shap} \alias{xgb.plot.shap}
\title{SHAP contribution dependency plots} \title{SHAP contribution dependency plots}
\usage{ \usage{
xgb.plot.shap(data, shap_contrib = NULL, features = NULL, top_n = 1, xgb.plot.shap(
model = NULL, trees = NULL, target_class = NULL, data,
approxcontrib = FALSE, subsample = NULL, n_col = 1, col = rgb(0, 0, 1, shap_contrib = NULL,
0.2), pch = ".", discrete_n_uniq = 5, discrete_jitter = 0.01, features = NULL,
ylab = "SHAP", plot_NA = TRUE, col_NA = rgb(0.7, 0, 1, 0.6), top_n = 1,
pch_NA = ".", pos_NA = 1.07, plot_loess = TRUE, col_loess = 2, model = NULL,
span_loess = 0.5, which = c("1d", "2d"), plot = TRUE, ...) trees = NULL,
target_class = NULL,
approxcontrib = FALSE,
subsample = NULL,
n_col = 1,
col = rgb(0, 0, 1, 0.2),
pch = ".",
discrete_n_uniq = 5,
discrete_jitter = 0.01,
ylab = "SHAP",
plot_NA = TRUE,
col_NA = rgb(0.7, 0, 1, 0.6),
pch_NA = ".",
pos_NA = 1.07,
plot_loess = TRUE,
col_loess = 2,
span_loess = 0.5,
which = c("1d", "2d"),
plot = TRUE,
...
)
} }
\arguments{ \arguments{
\item{data}{data as a \code{matrix} or \code{dgCMatrix}.} \item{data}{data as a \code{matrix} or \code{dgCMatrix}.}

View File

@ -4,9 +4,16 @@
\alias{xgb.plot.tree} \alias{xgb.plot.tree}
\title{Plot a boosted tree model} \title{Plot a boosted tree model}
\usage{ \usage{
xgb.plot.tree(feature_names = NULL, model = NULL, trees = NULL, xgb.plot.tree(
plot_width = NULL, plot_height = NULL, render = TRUE, feature_names = NULL,
show_node_id = FALSE, ...) model = NULL,
trees = NULL,
plot_width = NULL,
plot_height = NULL,
render = TRUE,
show_node_id = FALSE,
...
)
} }
\arguments{ \arguments{
\item{feature_names}{names of each feature as a \code{character} vector.} \item{feature_names}{names of each feature as a \code{character} vector.}

View File

@ -5,15 +5,41 @@
\alias{xgboost} \alias{xgboost}
\title{eXtreme Gradient Boosting Training} \title{eXtreme Gradient Boosting Training}
\usage{ \usage{
xgb.train(params = list(), data, nrounds, watchlist = list(), obj = NULL, xgb.train(
feval = NULL, verbose = 1, print_every_n = 1L, params = list(),
early_stopping_rounds = NULL, maximize = NULL, save_period = NULL, data,
save_name = "xgboost.model", xgb_model = NULL, callbacks = list(), ...) nrounds,
watchlist = list(),
obj = NULL,
feval = NULL,
verbose = 1,
print_every_n = 1L,
early_stopping_rounds = NULL,
maximize = NULL,
save_period = NULL,
save_name = "xgboost.model",
xgb_model = NULL,
callbacks = list(),
...
)
xgboost(data = NULL, label = NULL, missing = NA, weight = NULL, xgboost(
params = list(), nrounds, verbose = 1, print_every_n = 1L, data = NULL,
early_stopping_rounds = NULL, maximize = NULL, save_period = NULL, label = NULL,
save_name = "xgboost.model", xgb_model = NULL, callbacks = list(), ...) missing = NA,
weight = NULL,
params = list(),
nrounds,
verbose = 1,
print_every_n = 1L,
early_stopping_rounds = NULL,
maximize = NULL,
save_period = NULL,
save_name = "xgboost.model",
xgb_model = NULL,
callbacks = list(),
...
)
} }
\arguments{ \arguments{
\item{params}{the list of parameters. \item{params}{the list of parameters.