44 lines
1.7 KiB
R
44 lines
1.7 KiB
R
% Generated by roxygen2 (4.1.0): do not edit by hand
|
|
% Please edit documentation in R/xgb.plot.tree.R
|
|
\name{xgb.plot.tree}
|
|
\alias{xgb.plot.tree}
|
|
\title{Plot a boosted tree model}
|
|
\usage{
|
|
xgb.plot.tree(feature_names = NULL, filename_dump = NULL,
|
|
n_first_tree = NULL)
|
|
}
|
|
\arguments{
|
|
\item{feature_names}{names of each feature as a character vector. Can be extracted from a sparse matrix (see example). If model dump already contains feature names, this argument should be \code{NULL}.}
|
|
|
|
\item{filename_dump}{the path to the text file storing the model. Model dump must include the gain per feature and per tree (\code{with.stats = T} in function \code{xgb.dump}).}
|
|
|
|
\item{n_first_tree}{limit the plot to the n first trees.}
|
|
}
|
|
\value{
|
|
A \code{data.table} of the features used in the model with their average gain (and their weight for boosted tree model) in the model.
|
|
}
|
|
\description{
|
|
Read a xgboost model text dump.
|
|
Only works for boosted tree model (not linear model).
|
|
}
|
|
\details{
|
|
This is the function to plot the trees growned.
|
|
It uses Mermaid JS library for that purpose.
|
|
Performance can be low for huge models.
|
|
}
|
|
\examples{
|
|
data(agaricus.train, package='xgboost')
|
|
|
|
#Both dataset are list with two items, a sparse matrix and labels (labels = outcome column which will be learned).
|
|
#Each column of the sparse Matrix is a feature in one hot encoding format.
|
|
train <- agaricus.train
|
|
|
|
bst <- xgboost(data = train$data, label = train$label, max.depth = 2,
|
|
eta = 1, nround = 2,objective = "binary:logistic")
|
|
xgb.dump(bst, 'xgb.model.dump', with.stats = T)
|
|
|
|
#agaricus.test$data@Dimnames[[2]] represents the column names of the sparse matrix.
|
|
xgb.plot.tree(agaricus.train$data@Dimnames[[2]], 'xgb.model.dump')
|
|
}
|
|
|