change doc and demo for new obj feval interface
This commit is contained in:
parent
19b24cf978
commit
8d3a7e1688
@ -36,7 +36,7 @@
|
||||
#' 3. Task Parameters
|
||||
#'
|
||||
#' \itemize{
|
||||
#' \item \code{objective} specify the learning task and the corresponding learning objective, and the objective options are below:
|
||||
#' \item \code{objective} specify the learning task and the corresponding learning objective, users can pass a self-defined function to it. The default objective options are below:
|
||||
#' \itemize{
|
||||
#' \item \code{reg:linear} linear regression (Default).
|
||||
#' \item \code{reg:logistic} logistic regression.
|
||||
@ -48,7 +48,7 @@
|
||||
#' \item \code{rank:pairwise} set xgboost to do ranking task by minimizing the pairwise loss.
|
||||
#' }
|
||||
#' \item \code{base_score} the initial prediction score of all instances, global bias. Default: 0.5
|
||||
#' \item \code{eval_metric} evaluation metrics for validation data. Default: metric will be assigned according to objective(rmse for regression, and error for classification, mean average precision for ranking). List is provided in detail section.
|
||||
#' \item \code{eval_metric} evaluation metrics for validation data. Users can pass a self-defined function to it. Default: metric will be assigned according to objective(rmse for regression, and error for classification, mean average precision for ranking). List is provided in detail section.
|
||||
#' }
|
||||
#'
|
||||
#' @param data takes an \code{xgb.DMatrix} as the input.
|
||||
@ -103,7 +103,6 @@
|
||||
#' dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||
#' dtest <- dtrain
|
||||
#' watchlist <- list(eval = dtest, train = dtrain)
|
||||
#' param <- list(max.depth = 2, eta = 1, silent = 1)
|
||||
#' logregobj <- function(preds, dtrain) {
|
||||
#' labels <- getinfo(dtrain, "label")
|
||||
#' preds <- 1/(1 + exp(-preds))
|
||||
@ -116,7 +115,8 @@
|
||||
#' err <- as.numeric(sum(labels != (preds > 0)))/length(labels)
|
||||
#' return(list(metric = "error", value = err))
|
||||
#' }
|
||||
#' bst <- xgb.train(param, dtrain, nthread = 2, nround = 2, watchlist, logregobj, evalerror)
|
||||
#' param <- list(max.depth = 2, eta = 1, silent = 1, objective=logregobj,eval_metric=evalerror)
|
||||
#' bst <- xgb.train(param, dtrain, nthread = 2, nround = 2, watchlist)
|
||||
#' @export
|
||||
#'
|
||||
xgb.train <- function(params=list(), data, nrounds, watchlist = list(),
|
||||
|
||||
@ -8,7 +8,6 @@ dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||
# note: for customized objective function, we leave objective as default
|
||||
# note: what we are getting is margin value in prediction
|
||||
# you must know what you are doing
|
||||
param <- list(max.depth=2,eta=1,nthread = 2, silent=1)
|
||||
watchlist <- list(eval = dtest, train = dtrain)
|
||||
num_round <- 2
|
||||
|
||||
@ -33,10 +32,13 @@ evalerror <- function(preds, dtrain) {
|
||||
err <- as.numeric(sum(labels != (preds > 0)))/length(labels)
|
||||
return(list(metric = "error", value = err))
|
||||
}
|
||||
|
||||
param <- list(max.depth=2,eta=1,nthread = 2, silent=1,
|
||||
objective=logregobj, eval_metric=evalerror)
|
||||
print ('start training with user customized objective')
|
||||
# training with customized objective, we can also do step by step training
|
||||
# simply look at xgboost.py's implementation of train
|
||||
bst <- xgb.train(param, dtrain, num_round, watchlist, logregobj, evalerror)
|
||||
bst <- xgb.train(param, dtrain, num_round, watchlist)
|
||||
|
||||
#
|
||||
# there can be cases where you want additional information
|
||||
|
||||
@ -7,8 +7,8 @@
|
||||
xgb.cv(params = list(), data, nrounds, nfold, label = NULL,
|
||||
missing = NULL, prediction = FALSE, showsd = TRUE, metrics = list(),
|
||||
obj = NULL, feval = NULL, stratified = TRUE, folds = NULL,
|
||||
verbose = T, printEveryN=1L, early_stop_round = NULL, early.stop.round = NULL,
|
||||
maximize = NULL, ...)
|
||||
verbose = T, printEveryN = 1L, early_stop_round = NULL,
|
||||
early.stop.round = NULL, maximize = NULL, ...)
|
||||
}
|
||||
\arguments{
|
||||
\item{params}{the list of parameters. Commonly used ones are:
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
\title{eXtreme Gradient Boosting Training}
|
||||
\usage{
|
||||
xgb.train(params = list(), data, nrounds, watchlist = list(), obj = NULL,
|
||||
feval = NULL, verbose = 1, printEveryN=1L, early_stop_round = NULL,
|
||||
feval = NULL, verbose = 1, printEveryN = 1L, early_stop_round = NULL,
|
||||
early.stop.round = NULL, maximize = NULL, ...)
|
||||
}
|
||||
\arguments{
|
||||
@ -43,7 +43,7 @@ xgb.train(params = list(), data, nrounds, watchlist = list(), obj = NULL,
|
||||
3. Task Parameters
|
||||
|
||||
\itemize{
|
||||
\item \code{objective} specify the learning task and the corresponding learning objective, and the objective options are below:
|
||||
\item \code{objective} specify the learning task and the corresponding learning objective, users can pass a self-defined function to it. The default objective options are below:
|
||||
\itemize{
|
||||
\item \code{reg:linear} linear regression (Default).
|
||||
\item \code{reg:logistic} logistic regression.
|
||||
@ -55,7 +55,7 @@ xgb.train(params = list(), data, nrounds, watchlist = list(), obj = NULL,
|
||||
\item \code{rank:pairwise} set xgboost to do ranking task by minimizing the pairwise loss.
|
||||
}
|
||||
\item \code{base_score} the initial prediction score of all instances, global bias. Default: 0.5
|
||||
\item \code{eval_metric} evaluation metrics for validation data. Default: metric will be assigned according to objective(rmse for regression, and error for classification, mean average precision for ranking). List is provided in detail section.
|
||||
\item \code{eval_metric} evaluation metrics for validation data. Users can pass a self-defined function to it. Default: metric will be assigned according to objective(rmse for regression, and error for classification, mean average precision for ranking). List is provided in detail section.
|
||||
}}
|
||||
|
||||
\item{data}{takes an \code{xgb.DMatrix} as the input.}
|
||||
@ -122,7 +122,6 @@ data(agaricus.train, package='xgboost')
|
||||
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||
dtest <- dtrain
|
||||
watchlist <- list(eval = dtest, train = dtrain)
|
||||
param <- list(max.depth = 2, eta = 1, silent = 1)
|
||||
logregobj <- function(preds, dtrain) {
|
||||
labels <- getinfo(dtrain, "label")
|
||||
preds <- 1/(1 + exp(-preds))
|
||||
@ -135,6 +134,7 @@ evalerror <- function(preds, dtrain) {
|
||||
err <- as.numeric(sum(labels != (preds > 0)))/length(labels)
|
||||
return(list(metric = "error", value = err))
|
||||
}
|
||||
bst <- xgb.train(param, dtrain, nthread = 2, nround = 2, watchlist, logregobj, evalerror)
|
||||
param <- list(max.depth = 2, eta = 1, silent = 1, objective=logregobj,eval_metric=evalerror)
|
||||
bst <- xgb.train(param, dtrain, nthread = 2, nround = 2, watchlist)
|
||||
}
|
||||
|
||||
|
||||
@ -5,8 +5,8 @@
|
||||
\title{eXtreme Gradient Boosting (Tree) library}
|
||||
\usage{
|
||||
xgboost(data = NULL, label = NULL, missing = NULL, params = list(),
|
||||
nrounds, verbose = 1, printEveryN=1L, early_stop_round = NULL, early.stop.round = NULL,
|
||||
maximize = NULL, ...)
|
||||
nrounds, verbose = 1, printEveryN = 1L, early_stop_round = NULL,
|
||||
early.stop.round = NULL, maximize = NULL, ...)
|
||||
}
|
||||
\arguments{
|
||||
\item{data}{takes \code{matrix}, \code{dgCMatrix}, local data file or
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user