CRAN Submission for 0.71.1 (#3311)
* fix for CRAN manual checks * fix for CRAN manual checks * pass local check * fix variable naming style * Adding Philip's record
This commit is contained in:
parent
49b9f39818
commit
098075b81b
@ -7,8 +7,8 @@ Committers
|
|||||||
Committers are people who have made substantial contribution to the project and granted write access to the project.
|
Committers are people who have made substantial contribution to the project and granted write access to the project.
|
||||||
* [Tianqi Chen](https://github.com/tqchen), University of Washington
|
* [Tianqi Chen](https://github.com/tqchen), University of Washington
|
||||||
- Tianqi is a PhD working on large-scale machine learning, he is the creator of the project.
|
- Tianqi is a PhD working on large-scale machine learning, he is the creator of the project.
|
||||||
* [Tong He](https://github.com/hetong007), Simon Fraser University
|
* [Tong He](https://github.com/hetong007), Amazon AI
|
||||||
- Tong is a master student working on data mining, he is the maintainer of xgboost R package.
|
- Tong is an applied scientist in Amazon AI, he is the maintainer of xgboost R package.
|
||||||
* [Vadim Khotilovich](https://github.com/khotilov)
|
* [Vadim Khotilovich](https://github.com/khotilov)
|
||||||
- Vadim contributes many improvements in R and core packages.
|
- Vadim contributes many improvements in R and core packages.
|
||||||
* [Bing Xu](https://github.com/antinucleon)
|
* [Bing Xu](https://github.com/antinucleon)
|
||||||
|
|||||||
@ -2,7 +2,7 @@ Package: xgboost
|
|||||||
Type: Package
|
Type: Package
|
||||||
Title: Extreme Gradient Boosting
|
Title: Extreme Gradient Boosting
|
||||||
Version: 0.71.1
|
Version: 0.71.1
|
||||||
Date: 2018-04-11
|
Date: 2018-05-11
|
||||||
Authors@R: c(
|
Authors@R: c(
|
||||||
person("Tianqi", "Chen", role = c("aut"),
|
person("Tianqi", "Chen", role = c("aut"),
|
||||||
email = "tianqi.tchen@gmail.com"),
|
email = "tianqi.tchen@gmail.com"),
|
||||||
@ -14,7 +14,20 @@ Authors@R: c(
|
|||||||
email = "khotilovich@gmail.com"),
|
email = "khotilovich@gmail.com"),
|
||||||
person("Yuan", "Tang", role = c("aut"),
|
person("Yuan", "Tang", role = c("aut"),
|
||||||
email = "terrytangyuan@gmail.com",
|
email = "terrytangyuan@gmail.com",
|
||||||
comment = c(ORCID = "0000-0001-5243-233X"))
|
comment = c(ORCID = "0000-0001-5243-233X")),
|
||||||
|
person("Hyunsu", "Cho", role = c("aut"),
|
||||||
|
email = "chohyu01@cs.washington.edu"),
|
||||||
|
person("Kailong", "Chen", role = c("aut")),
|
||||||
|
person("Rory", "Mitchell", role = c("aut")),
|
||||||
|
person("Ignacio", "Cano", role = c("aut")),
|
||||||
|
person("Tianyi", "Zhou", role = c("aut")),
|
||||||
|
person("Mu", "Li", role = c("aut")),
|
||||||
|
person("Junyuan", "Xie", role = c("aut")),
|
||||||
|
person("Min", "Lin", role = c("aut")),
|
||||||
|
person("Yifeng", "Geng", role = c("aut")),
|
||||||
|
person("Yutian", "Li", role = c("aut")),
|
||||||
|
person("XGBoost contributors", role = c("cph"),
|
||||||
|
comment = "base XGBoost implementation")
|
||||||
)
|
)
|
||||||
Description: Extreme Gradient Boosting, which is an efficient implementation
|
Description: Extreme Gradient Boosting, which is an efficient implementation
|
||||||
of the gradient boosting framework from Chen & Guestrin (2016) <doi:10.1145/2939672.2939785>.
|
of the gradient boosting framework from Chen & Guestrin (2016) <doi:10.1145/2939672.2939785>.
|
||||||
@ -28,6 +41,7 @@ Description: Extreme Gradient Boosting, which is an efficient implementation
|
|||||||
License: Apache License (== 2.0) | file LICENSE
|
License: Apache License (== 2.0) | file LICENSE
|
||||||
URL: https://github.com/dmlc/xgboost
|
URL: https://github.com/dmlc/xgboost
|
||||||
BugReports: https://github.com/dmlc/xgboost/issues
|
BugReports: https://github.com/dmlc/xgboost/issues
|
||||||
|
NeedsCompilation: yes
|
||||||
VignetteBuilder: knitr
|
VignetteBuilder: knitr
|
||||||
Suggests:
|
Suggests:
|
||||||
knitr,
|
knitr,
|
||||||
|
|||||||
@ -691,11 +691,6 @@ cb.gblinear.history <- function(sparse=FALSE) {
|
|||||||
#' For an \code{xgb.cv} result, a list of such matrices is returned with the elements
|
#' For an \code{xgb.cv} result, a list of such matrices is returned with the elements
|
||||||
#' corresponding to CV folds.
|
#' corresponding to CV folds.
|
||||||
#'
|
#'
|
||||||
#' @examples
|
|
||||||
#' \dontrun{
|
|
||||||
#' See \code{\link{cv.gblinear.history}}
|
|
||||||
#' }
|
|
||||||
#'
|
|
||||||
#' @export
|
#' @export
|
||||||
xgb.gblinear.history <- function(model, class_index = NULL) {
|
xgb.gblinear.history <- function(model, class_index = NULL) {
|
||||||
|
|
||||||
|
|||||||
@ -30,7 +30,8 @@
|
|||||||
#' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
#' bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
#' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
#' eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
#' # save the model in file 'xgb.model.dump'
|
#' # save the model in file 'xgb.model.dump'
|
||||||
#' xgb.dump(bst, 'xgb.model.dump', with_stats = TRUE)
|
#' dump_path = file.path(tempdir(), 'model.dump')
|
||||||
|
#' xgb.dump(bst, dump_path, with_stats = TRUE)
|
||||||
#'
|
#'
|
||||||
#' # print the model without saving it to a file
|
#' # print the model without saving it to a file
|
||||||
#' print(xgb.dump(bst, with_stats = TRUE))
|
#' print(xgb.dump(bst, with_stats = TRUE))
|
||||||
|
|||||||
@ -99,7 +99,8 @@ err <- as.numeric(sum(as.integer(pred > 0.5) != label))/length(label)
|
|||||||
print(paste("test-error=", err))
|
print(paste("test-error=", err))
|
||||||
|
|
||||||
# You can dump the tree you learned using xgb.dump into a text file
|
# You can dump the tree you learned using xgb.dump into a text file
|
||||||
xgb.dump(bst, "dump.raw.txt", with_stats = T)
|
dump_path = file.path(tempdir(), 'dump.raw.txt')
|
||||||
|
xgb.dump(bst, dump_path, with_stats = T)
|
||||||
|
|
||||||
# Finally, you can check which features are the most important.
|
# Finally, you can check which features are the most important.
|
||||||
print("Most important features (look at column Gain):")
|
print("Most important features (look at column Gain):")
|
||||||
|
|||||||
@ -99,7 +99,7 @@ An object of class \code{xgb.cv.synchronous} with the following elements:
|
|||||||
\item \code{params} parameters that were passed to the xgboost library. Note that it does not
|
\item \code{params} parameters that were passed to the xgboost library. Note that it does not
|
||||||
capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
capture parameters changed by the \code{\link{cb.reset.parameters}} callback.
|
||||||
\item \code{callbacks} callback functions that were either automatically assigned or
|
\item \code{callbacks} callback functions that were either automatically assigned or
|
||||||
explicitely passed.
|
explicitly passed.
|
||||||
\item \code{evaluation_log} evaluation history storead as a \code{data.table} with the
|
\item \code{evaluation_log} evaluation history storead as a \code{data.table} with the
|
||||||
first column corresponding to iteration number and the rest corresponding to the
|
first column corresponding to iteration number and the rest corresponding to the
|
||||||
CV-based evaluation means and standard deviations for the training and test CV-sets.
|
CV-based evaluation means and standard deviations for the training and test CV-sets.
|
||||||
|
|||||||
@ -44,7 +44,8 @@ test <- agaricus.test
|
|||||||
bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
bst <- xgboost(data = train$data, label = train$label, max_depth = 2,
|
||||||
eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
eta = 1, nthread = 2, nrounds = 2, objective = "binary:logistic")
|
||||||
# save the model in file 'xgb.model.dump'
|
# save the model in file 'xgb.model.dump'
|
||||||
xgb.dump(bst, 'xgb.model.dump', with_stats = TRUE)
|
dump.path = file.path(tempdir(), 'model.dump')
|
||||||
|
xgb.dump(bst, dump.path, with_stats = TRUE)
|
||||||
|
|
||||||
# print the model without saving it to a file
|
# print the model without saving it to a file
|
||||||
print(xgb.dump(bst, with_stats = TRUE))
|
print(xgb.dump(bst, with_stats = TRUE))
|
||||||
|
|||||||
@ -27,9 +27,3 @@ A helper function to extract the matrix of linear coefficients' history
|
|||||||
from a gblinear model created while using the \code{cb.gblinear.history()}
|
from a gblinear model created while using the \code{cb.gblinear.history()}
|
||||||
callback.
|
callback.
|
||||||
}
|
}
|
||||||
\examples{
|
|
||||||
\dontrun{
|
|
||||||
See \\code{\\link{cv.gblinear.history}}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|||||||
@ -42,9 +42,10 @@ mbst.GLM <- xgboost(data = as.matrix(iris[, -5]), label = mlabel, verbose = 0,
|
|||||||
|
|
||||||
test_that("xgb.dump works", {
|
test_that("xgb.dump works", {
|
||||||
expect_length(xgb.dump(bst.Tree), 200)
|
expect_length(xgb.dump(bst.Tree), 200)
|
||||||
expect_true(xgb.dump(bst.Tree, 'xgb.model.dump', with_stats = T))
|
dump_file = file.path(tempdir(), 'xgb.model.dump')
|
||||||
expect_true(file.exists('xgb.model.dump'))
|
expect_true(xgb.dump(bst.Tree, dump_file, with_stats = T))
|
||||||
expect_gt(file.size('xgb.model.dump'), 8000)
|
expect_true(file.exists(dump_file))
|
||||||
|
expect_gt(file.size(dump_file), 8000)
|
||||||
|
|
||||||
# JSON format
|
# JSON format
|
||||||
dmp <- xgb.dump(bst.Tree, dump_format = "json")
|
dmp <- xgb.dump(bst.Tree, dump_format = "json")
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user