resolving not-CRAN issues

This commit is contained in:
hetong007 2015-01-20 15:51:42 -08:00
parent eb01acfad8
commit 6901e90730
11 changed files with 15 additions and 12 deletions

View File

@ -25,5 +25,4 @@ Imports:
data.table (>= 1.9), data.table (>= 1.9),
magrittr (>= 1.5), magrittr (>= 1.5),
stringr, stringr,
DiagrammeR, DiagrammeR
vcd

View File

@ -11,10 +11,10 @@ setClass("xgb.Booster")
#' value of sum of functions, when outputmargin=TRUE, the prediction is #' value of sum of functions, when outputmargin=TRUE, the prediction is
#' untransformed margin value. In logistic regression, outputmargin=T will #' untransformed margin value. In logistic regression, outputmargin=T will
#' output value before logistic transformation. #' output value before logistic transformation.
#' @param predleaf whether predict leaf index instead. If set to TRUE, the output will be a matrix object.
#' @param ntreelimit limit number of trees used in prediction, this parameter is #' @param ntreelimit limit number of trees used in prediction, this parameter is
#' only valid for gbtree, but not for gblinear. set it to be value bigger #' only valid for gbtree, but not for gblinear. set it to be value bigger
#' than 0. It will use all trees by default. #' than 0. It will use all trees by default.
#' @param predleaf whether predict leaf index instead. If set to TRUE, the output will be a matrix object.
#' @examples #' @examples
#' data(agaricus.train, package='xgboost') #' data(agaricus.train, package='xgboost')
#' data(agaricus.test, package='xgboost') #' data(agaricus.test, package='xgboost')

View File

@ -32,7 +32,7 @@
#' @param nfold number of folds used #' @param nfold number of folds used
#' @param label option field, when data is Matrix #' @param label option field, when data is Matrix
#' @param missing Missing is only used when input is dense matrix, pick a float #' @param missing Missing is only used when input is dense matrix, pick a float
# value that represents missing value. Sometime a data use 0 or other extreme value to represents missing values. #' value that represents missing value. Sometime a data use 0 or other extreme value to represents missing values.
#' @param prediction A logical value indicating whether to return the prediction vector. #' @param prediction A logical value indicating whether to return the prediction vector.
#' @param showsd \code{boolean}, whether show standard deviation of cross validation #' @param showsd \code{boolean}, whether show standard deviation of cross validation
#' @param metrics, list of evaluation metrics to be used in corss validation, #' @param metrics, list of evaluation metrics to be used in corss validation,

View File

@ -29,7 +29,7 @@
#' bst <- xgboost(data = train$data, label = train$label, max.depth = 2, #' bst <- xgboost(data = train$data, label = train$label, max.depth = 2,
#' eta = 1, nround = 2,objective = "binary:logistic") #' eta = 1, nround = 2,objective = "binary:logistic")
#' # save the model in file 'xgb.model.dump' #' # save the model in file 'xgb.model.dump'
#' xgb.dump(bst, 'xgb.model.dump', with.stats = T) #' xgb.dump(bst, 'xgb.model.dump', with.stats = TRUE)
#' #'
#' # print the model without saving it to a file #' # print the model without saving it to a file
#' print(xgb.dump(bst)) #' print(xgb.dump(bst))
@ -54,4 +54,4 @@ xgb.dump <- function(model = NULL, fname = NULL, fmap = "", with.stats=FALSE) {
result %>% str_split("\n") %>% unlist %>% Filter(function(x) x != "", .) %>% writeLines(fname) result %>% str_split("\n") %>% unlist %>% Filter(function(x) x != "", .) %>% writeLines(fname)
return(TRUE) return(TRUE)
} }
} }

View File

@ -32,7 +32,8 @@
#' data(agaricus.train, package='xgboost') #' data(agaricus.train, package='xgboost')
#' data(agaricus.test, package='xgboost') #' data(agaricus.test, package='xgboost')
#' #'
#' #Both dataset are list with two items, a sparse matrix and labels (labels = outcome column which will be learned). #' #Both dataset are list with two items, a sparse matrix and labels
#' (labels = outcome column which will be learned).
#' #Each column of the sparse Matrix is a feature in one hot encoding format. #' #Each column of the sparse Matrix is a feature in one hot encoding format.
#' train <- agaricus.train #' train <- agaricus.train
#' test <- agaricus.test #' test <- agaricus.test

View File

@ -42,7 +42,8 @@
#' @examples #' @examples
#' data(agaricus.train, package='xgboost') #' data(agaricus.train, package='xgboost')
#' #'
#' #Both dataset are list with two items, a sparse matrix and labels (labels = outcome column which will be learned). #' #Both dataset are list with two items, a sparse matrix and labels
#' (labels = outcome column which will be learned).
#' #Each column of the sparse Matrix is a feature in one hot encoding format. #' #Each column of the sparse Matrix is a feature in one hot encoding format.
#' train <- agaricus.train #' train <- agaricus.train
#' #'

View File

@ -42,7 +42,8 @@
#' @examples #' @examples
#' data(agaricus.train, package='xgboost') #' data(agaricus.train, package='xgboost')
#' #'
#' #Both dataset are list with two items, a sparse matrix and labels (labels = outcome column which will be learned). #' #Both dataset are list with two items, a sparse matrix and labels
#' (labels = outcome column which will be learned).
#' #Each column of the sparse Matrix is a feature in one hot encoding format. #' #Each column of the sparse Matrix is a feature in one hot encoding format.
#' train <- agaricus.train #' train <- agaricus.train
#' #'

Binary file not shown.

Binary file not shown.

View File

@ -4,4 +4,5 @@ boost_from_prediction Boosting from existing prediction
predict_first_ntree Predicting using first n trees predict_first_ntree Predicting using first n trees
generalized_linear_model Generalized Linear Model generalized_linear_model Generalized Linear Model
cross_validation Cross validation cross_validation Cross validation
create_sparse_matrix create_sparse_matrix Create Sparse Matrix
predict_leaf_indices Predicting the corresponding leaves

View File

@ -1,7 +1,7 @@
require(xgboost) require(xgboost)
require(Matrix) require(Matrix)
require(data.table) require(data.table)
require(vcd) #Available in Cran. Used for its dataset with categorical values. if (!require(vcd)) install.packages('vcd') #Available in Cran. Used for its dataset with categorical values.
# According to its documentation, Xgboost works only on numbers. # According to its documentation, Xgboost works only on numbers.
# Sometimes the dataset we have to work on have categorical data. # Sometimes the dataset we have to work on have categorical data.
@ -86,4 +86,4 @@ print(chisq.test(df$AgeCat, df$Y))
# As you can see, in general destroying information by simplying it won't improve your model. Chi2 just demonstrates that. But in more complex cases, creating a new feature based on existing one which makes link with the outcome more obvious may help the algorithm and improve the model. The case studied here is not enough complex to show that. Check Kaggle forum for some challenging datasets. # As you can see, in general destroying information by simplying it won't improve your model. Chi2 just demonstrates that. But in more complex cases, creating a new feature based on existing one which makes link with the outcome more obvious may help the algorithm and improve the model. The case studied here is not enough complex to show that. Check Kaggle forum for some challenging datasets.
# However it's almost always worse when you add some arbitrary rules. # However it's almost always worse when you add some arbitrary rules.
# Moreover, you can notice that even if we have added some not useful new features highly correlated with other features, the boosting tree algorithm have been able to choose the best one, which in this case is the Age. Linear model may not be that strong in these scenario. # Moreover, you can notice that even if we have added some not useful new features highly correlated with other features, the boosting tree algorithm have been able to choose the best one, which in this case is the Age. Linear model may not be that strong in these scenario.