some fixes for Travis #Rstat

This commit is contained in:
pommedeterresautee
2015-11-30 15:47:10 +01:00
parent 96c43cf197
commit 6e370b90fd
9 changed files with 25 additions and 29 deletions

View File

@@ -52,7 +52,7 @@ bst <- xgboost(data = train$data, label = train$label, max.depth = 2,
eta = 1, nthread = 2, nround = 2,objective = "binary:logistic")
#agaricus.test$data@Dimnames[[2]] represents the column names of the sparse matrix.
xgb.model.dt.tree(agaricus.train$data@Dimnames[[2]], model = bst)
xgb.model.dt.tree(feature_names = agaricus.train$data@Dimnames[[2]], model = bst)
}

View File

@@ -35,7 +35,7 @@ This function is inspired by this blog post \url{http://aysent.github.io/2015/11
\examples{
data(agaricus.train, package='xgboost')
bst <- xgboost(data = agaricus.train$data, label = agaricus.train$label, max.depth = 15,
bst <- xgboost(data = agaricus.train$data, max.depth = 15,
eta = 1, nthread = 2, nround = 30, objective = "binary:logistic",
min_child_weight = 50)

View File

@@ -4,12 +4,14 @@
\alias{xgb.plot.multi.trees}
\title{Project all trees on one tree and plot it}
\usage{
xgb.plot.multi.trees(model, names, features.keep = 5, plot.width = NULL,
plot.height = NULL)
xgb.plot.multi.trees(model, feature_names = NULL, features.keep = 5,
plot.width = NULL, plot.height = NULL)
}
\arguments{
\item{model}{dump generated by the \code{xgb.train} function. Avoid the creation of a dump file.}
\item{feature_names}{names of each feature as a character vector. Can be extracted from a sparse matrix (see example). If model dump already contains feature names, this argument should be \code{NULL}.}
\item{features.keep}{number of features to keep in each position of the multi trees.}
\item{plot.width}{width in pixels of the graph to produce}
@@ -49,7 +51,7 @@ bst <- xgboost(data = agaricus.train$data, label = agaricus.train$label, max.dep
eta = 1, nthread = 2, nround = 30, objective = "binary:logistic",
min_child_weight = 50)
p <- xgb.plot.multi.trees(model = bst, names = agaricus.train$data@Dimnames[[2]], 3)
p <- xgb.plot.multi.trees(model = bst, feature_names = agaricus.train$data@Dimnames[[2]], features.keep = 3)
print(p)
}

View File

@@ -4,14 +4,12 @@
\alias{xgb.plot.tree}
\title{Plot a boosted tree model}
\usage{
xgb.plot.tree(feature_names = NULL, filename_dump = NULL, model = NULL,
n_first_tree = NULL, plot.width = NULL, plot.height = NULL)
xgb.plot.tree(feature_names = NULL, model = NULL, n_first_tree = NULL,
plot.width = NULL, plot.height = NULL)
}
\arguments{
\item{feature_names}{names of each feature as a character vector. Can be extracted from a sparse matrix (see example). If model dump already contains feature names, this argument should be \code{NULL}.}
\item{filename_dump}{the path to the text file storing the model. Model dump must include the gain per feature and per tree (parameter \code{with.stats = T} in function \code{xgb.dump}). Possible to provide a model directly (see \code{model} argument).}
\item{model}{generated by the \code{xgb.train} function. Avoid the creation of a dump file.}
\item{n_first_tree}{limit the plot to the n first trees. If \code{NULL}, all trees of the model are plotted. Performance can be low for huge models.}
@@ -51,7 +49,7 @@ bst <- xgboost(data = train$data, label = train$label, max.depth = 2,
eta = 1, nthread = 2, nround = 2,objective = "binary:logistic")
#agaricus.test$data@Dimnames[[2]] represents the column names of the sparse matrix.
xgb.plot.tree(agaricus.train$data@Dimnames[[2]], model = bst)
xgb.plot.tree(feature_names = agaricus.train$data@Dimnames[[2]], model = bst)
}