Update R doc by roxygen2. (#5201)

This commit is contained in:
Jiaming Yuan 2020-01-15 10:49:17 +08:00 committed by GitHub
parent 0184f2e9f7
commit 808f61081b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 179 additions and 48 deletions

View File

@ -63,5 +63,5 @@ Imports:
data.table (>= 1.9.6),
magrittr (>= 1.5),
stringi (>= 0.5.2)
RoxygenNote: 6.1.0
RoxygenNote: 7.0.2
SystemRequirements: GNU make, C++11

View File

@ -4,8 +4,12 @@
\alias{cb.early.stop}
\title{Callback closure to activate the early stopping.}
\usage{
cb.early.stop(stopping_rounds, maximize = FALSE, metric_name = NULL,
verbose = TRUE)
cb.early.stop(
stopping_rounds,
maximize = FALSE,
metric_name = NULL,
verbose = TRUE
)
}
\arguments{
\item{stopping_rounds}{The number of rounds with no improvement in

View File

@ -5,10 +5,20 @@
\alias{predict.xgb.Booster.handle}
\title{Predict method for eXtreme Gradient Boosting model}
\usage{
\method{predict}{xgb.Booster}(object, newdata, missing = NA,
outputmargin = FALSE, ntreelimit = NULL, predleaf = FALSE,
predcontrib = FALSE, approxcontrib = FALSE, predinteraction = FALSE,
reshape = FALSE, ...)
\method{predict}{xgb.Booster}(
object,
newdata,
missing = NA,
outputmargin = FALSE,
ntreelimit = NULL,
predleaf = FALSE,
predcontrib = FALSE,
approxcontrib = FALSE,
predinteraction = FALSE,
reshape = FALSE,
training = FALSE,
...
)
\method{predict}{xgb.Booster.handle}(object, ...)
}

View File

@ -87,6 +87,6 @@ accuracy.after <- sum((predict(bst, new.dtest) >= 0.5) == agaricus.test$label) /
# Here the accuracy was already good and is now perfect.
cat(paste("The accuracy was", accuracy.before, "before adding leaf features and it is now",
accuracy.after, "!\\n"))
accuracy.after, "!\n"))
}

View File

@ -4,11 +4,28 @@
\alias{xgb.cv}
\title{Cross Validation}
\usage{
xgb.cv(params = list(), data, nrounds, nfold, label = NULL, missing = NA,
prediction = FALSE, showsd = TRUE, metrics = list(), obj = NULL,
feval = NULL, stratified = TRUE, folds = NULL, verbose = TRUE,
print_every_n = 1L, early_stopping_rounds = NULL, maximize = NULL,
callbacks = list(), ...)
xgb.cv(
params = list(),
data,
nrounds,
nfold,
label = NULL,
missing = NA,
prediction = FALSE,
showsd = TRUE,
metrics = list(),
obj = NULL,
feval = NULL,
stratified = TRUE,
folds = NULL,
train_folds = NULL,
verbose = TRUE,
print_every_n = 1L,
early_stopping_rounds = NULL,
maximize = NULL,
callbacks = list(),
...
)
}
\arguments{
\item{params}{the list of parameters. Commonly used ones are:
@ -69,6 +86,9 @@ by the values of outcome labels.}
(each element must be a vector of test fold's indices). When folds are supplied,
the \code{nfold} and \code{stratified} parameters are ignored.}
\item{train_folds}{\code{list} list specifying which indicies to use for training. If \code{NULL}
(the default) all indices not specified in \code{folds} will be used for training.}
\item{verbose}{\code{boolean}, print the statistics during the process}
\item{print_every_n}{Print each n-th iteration evaluation messages when \code{verbose>0}.

View File

@ -4,8 +4,14 @@
\alias{xgb.dump}
\title{Dump an xgboost model in text format.}
\usage{
xgb.dump(model, fname = NULL, fmap = "", with_stats = FALSE,
dump_format = c("text", "json"), ...)
xgb.dump(
model,
fname = NULL,
fmap = "",
with_stats = FALSE,
dump_format = c("text", "json"),
...
)
}
\arguments{
\item{model}{the model object.}

View File

@ -4,8 +4,14 @@
\alias{xgb.importance}
\title{Importance of features in a model.}
\usage{
xgb.importance(feature_names = NULL, model = NULL, trees = NULL,
data = NULL, label = NULL, target = NULL)
xgb.importance(
feature_names = NULL,
model = NULL,
trees = NULL,
data = NULL,
label = NULL,
target = NULL
)
}
\arguments{
\item{feature_names}{character vector of feature names. If the model already

View File

@ -4,8 +4,14 @@
\alias{xgb.model.dt.tree}
\title{Parse a boosted tree model text dump}
\usage{
xgb.model.dt.tree(feature_names = NULL, model = NULL, text = NULL,
trees = NULL, use_int_id = FALSE, ...)
xgb.model.dt.tree(
feature_names = NULL,
model = NULL,
text = NULL,
trees = NULL,
use_int_id = FALSE,
...
)
}
\arguments{
\item{feature_names}{character vector of feature names. If the model already

View File

@ -5,11 +5,17 @@
\alias{xgb.plot.deepness}
\title{Plot model trees deepness}
\usage{
xgb.ggplot.deepness(model = NULL, which = c("2x1", "max.depth", "med.depth",
"med.weight"))
xgb.ggplot.deepness(
model = NULL,
which = c("2x1", "max.depth", "med.depth", "med.weight")
)
xgb.plot.deepness(model = NULL, which = c("2x1", "max.depth", "med.depth",
"med.weight"), plot = TRUE, ...)
xgb.plot.deepness(
model = NULL,
which = c("2x1", "max.depth", "med.depth", "med.weight"),
plot = TRUE,
...
)
}
\arguments{
\item{model}{either an \code{xgb.Booster} model generated by the \code{xgb.train} function

View File

@ -5,12 +5,25 @@
\alias{xgb.plot.importance}
\title{Plot feature importance as a bar graph}
\usage{
xgb.ggplot.importance(importance_matrix = NULL, top_n = NULL,
measure = NULL, rel_to_first = FALSE, n_clusters = c(1:10), ...)
xgb.ggplot.importance(
importance_matrix = NULL,
top_n = NULL,
measure = NULL,
rel_to_first = FALSE,
n_clusters = c(1:10),
...
)
xgb.plot.importance(importance_matrix = NULL, top_n = NULL,
measure = NULL, rel_to_first = FALSE, left_margin = 10, cex = NULL,
plot = TRUE, ...)
xgb.plot.importance(
importance_matrix = NULL,
top_n = NULL,
measure = NULL,
rel_to_first = FALSE,
left_margin = 10,
cex = NULL,
plot = TRUE,
...
)
}
\arguments{
\item{importance_matrix}{a \code{data.table} returned by \code{\link{xgb.importance}}.}

View File

@ -4,8 +4,15 @@
\alias{xgb.plot.multi.trees}
\title{Project all trees on one tree and plot it}
\usage{
xgb.plot.multi.trees(model, feature_names = NULL, features_keep = 5,
plot_width = NULL, plot_height = NULL, render = TRUE, ...)
xgb.plot.multi.trees(
model,
feature_names = NULL,
features_keep = 5,
plot_width = NULL,
plot_height = NULL,
render = TRUE,
...
)
}
\arguments{
\item{model}{produced by the \code{xgb.train} function.}

View File

@ -4,13 +4,33 @@
\alias{xgb.plot.shap}
\title{SHAP contribution dependency plots}
\usage{
xgb.plot.shap(data, shap_contrib = NULL, features = NULL, top_n = 1,
model = NULL, trees = NULL, target_class = NULL,
approxcontrib = FALSE, subsample = NULL, n_col = 1, col = rgb(0, 0, 1,
0.2), pch = ".", discrete_n_uniq = 5, discrete_jitter = 0.01,
ylab = "SHAP", plot_NA = TRUE, col_NA = rgb(0.7, 0, 1, 0.6),
pch_NA = ".", pos_NA = 1.07, plot_loess = TRUE, col_loess = 2,
span_loess = 0.5, which = c("1d", "2d"), plot = TRUE, ...)
xgb.plot.shap(
data,
shap_contrib = NULL,
features = NULL,
top_n = 1,
model = NULL,
trees = NULL,
target_class = NULL,
approxcontrib = FALSE,
subsample = NULL,
n_col = 1,
col = rgb(0, 0, 1, 0.2),
pch = ".",
discrete_n_uniq = 5,
discrete_jitter = 0.01,
ylab = "SHAP",
plot_NA = TRUE,
col_NA = rgb(0.7, 0, 1, 0.6),
pch_NA = ".",
pos_NA = 1.07,
plot_loess = TRUE,
col_loess = 2,
span_loess = 0.5,
which = c("1d", "2d"),
plot = TRUE,
...
)
}
\arguments{
\item{data}{data as a \code{matrix} or \code{dgCMatrix}.}

View File

@ -4,9 +4,16 @@
\alias{xgb.plot.tree}
\title{Plot a boosted tree model}
\usage{
xgb.plot.tree(feature_names = NULL, model = NULL, trees = NULL,
plot_width = NULL, plot_height = NULL, render = TRUE,
show_node_id = FALSE, ...)
xgb.plot.tree(
feature_names = NULL,
model = NULL,
trees = NULL,
plot_width = NULL,
plot_height = NULL,
render = TRUE,
show_node_id = FALSE,
...
)
}
\arguments{
\item{feature_names}{names of each feature as a \code{character} vector.}

View File

@ -5,15 +5,41 @@
\alias{xgboost}
\title{eXtreme Gradient Boosting Training}
\usage{
xgb.train(params = list(), data, nrounds, watchlist = list(), obj = NULL,
feval = NULL, verbose = 1, print_every_n = 1L,
early_stopping_rounds = NULL, maximize = NULL, save_period = NULL,
save_name = "xgboost.model", xgb_model = NULL, callbacks = list(), ...)
xgb.train(
params = list(),
data,
nrounds,
watchlist = list(),
obj = NULL,
feval = NULL,
verbose = 1,
print_every_n = 1L,
early_stopping_rounds = NULL,
maximize = NULL,
save_period = NULL,
save_name = "xgboost.model",
xgb_model = NULL,
callbacks = list(),
...
)
xgboost(data = NULL, label = NULL, missing = NA, weight = NULL,
params = list(), nrounds, verbose = 1, print_every_n = 1L,
early_stopping_rounds = NULL, maximize = NULL, save_period = NULL,
save_name = "xgboost.model", xgb_model = NULL, callbacks = list(), ...)
xgboost(
data = NULL,
label = NULL,
missing = NA,
weight = NULL,
params = list(),
nrounds,
verbose = 1,
print_every_n = 1L,
early_stopping_rounds = NULL,
maximize = NULL,
save_period = NULL,
save_name = "xgboost.model",
xgb_model = NULL,
callbacks = list(),
...
)
}
\arguments{
\item{params}{the list of parameters.