fix iris multiclass problem
This commit is contained in:
parent
2b170ecda4
commit
d776e0fdf5
@ -6,7 +6,7 @@ setClass('xgb.DMatrix')
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' iris[,5] <- as.numeric(iris[,5])
|
||||
#' iris[,5] <- as.numeric(iris[,5]=='setosa')
|
||||
#' dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
|
||||
#' labels <- getinfo(dtrain, "label")
|
||||
#' @rdname getinfo
|
||||
|
||||
@ -11,11 +11,12 @@ setClass("xgb.Booster")
|
||||
#' value of sum of functions, when outputmargin=TRUE, the prediction is
|
||||
#' untransformed margin value. In logistic regression, outputmargin=T will
|
||||
#' output value before logistic transformation.
|
||||
#' @param ntreelimit limit number of trees used in prediction, this parameter is only valid for gbtree, but not for gblinear.
|
||||
#' set it to be value bigger than 0. It will use all trees by default.
|
||||
#' @param ntreelimit limit number of trees used in prediction, this parameter is
|
||||
#' only valid for gbtree, but not for gblinear. set it to be value bigger
|
||||
#' than 0. It will use all trees by default.
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]=='setosa'), nrounds = 2)
|
||||
#' pred <- predict(bst, as.matrix(iris[,1:4]))
|
||||
#' @export
|
||||
#'
|
||||
|
||||
@ -8,7 +8,7 @@ setClass('xgb.DMatrix')
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' iris[,5] <- as.numeric(iris[,5])
|
||||
#' iris[,5] <- as.numeric(iris[,5]=='setosa')
|
||||
#' dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
|
||||
#' dsub <- slice(dtrain, 1:3)
|
||||
#' @rdname slice
|
||||
|
||||
@ -12,7 +12,7 @@
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' iris[,5] <- as.numeric(iris[,5])
|
||||
#' iris[,5] <- as.numeric(iris[,5]=='setosa')
|
||||
#' dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
|
||||
#' xgb.DMatrix.save(dtrain, 'iris.xgb.DMatrix')
|
||||
#' dtrain <- xgb.DMatrix('iris.xgb.DMatrix')
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' iris[,5] <- as.numeric(iris[,5])
|
||||
#' iris[,5] <- as.numeric(iris[,5]=='setosa')
|
||||
#' dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
|
||||
#' xgb.DMatrix.save(dtrain, 'iris.xgb.DMatrix')
|
||||
#' dtrain <- xgb.DMatrix('iris.xgb.DMatrix')
|
||||
|
||||
@ -13,7 +13,7 @@
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]=='setosa'), nrounds = 2)
|
||||
#' xgb.dump(bst, 'iris.xgb.model.dump')
|
||||
#' @export
|
||||
#'
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]=='setosa'), nrounds = 2)
|
||||
#' xgb.save(bst, 'iris.xgb.model')
|
||||
#' bst <- xgb.load('iris.xgb.model')
|
||||
#' pred <- predict(bst, as.matrix(iris[,1:4]))
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]=='setosa'), nrounds = 2)
|
||||
#' xgb.save(bst, 'iris.xgb.model')
|
||||
#' bst <- xgb.load('iris.xgb.model')
|
||||
#' pred <- predict(bst, as.matrix(iris[,1:4]))
|
||||
|
||||
@ -44,7 +44,7 @@
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' iris[,5] <- as.numeric(iris[,5])
|
||||
#' iris[,5] <- as.numeric(iris[,5]=='setosa')
|
||||
#' dtrain <- xgb.DMatrix(as.matrix(iris[,1:4]), label=iris[,5])
|
||||
#' dtest <- dtrain
|
||||
#' watchlist <- list(eval = dtest, train = dtrain)
|
||||
|
||||
@ -34,7 +34,7 @@
|
||||
#'
|
||||
#' @examples
|
||||
#' data(iris)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]), nrounds = 2)
|
||||
#' bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]=='setosa'), nrounds = 2)
|
||||
#' pred <- predict(bst, as.matrix(iris[,1:4]))
|
||||
#' @export
|
||||
#'
|
||||
|
||||
@ -80,7 +80,7 @@ In this section, we will illustrate some common usage of \verb@xgboost@.
|
||||
<<Training and prediction with iris>>=
|
||||
library(xgboost)
|
||||
data(iris)
|
||||
bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]),
|
||||
bst <- xgboost(as.matrix(iris[,1:4]),as.numeric(iris[,5]=='setosa'),
|
||||
nrounds = 5)
|
||||
xgb.save(bst, 'model.save')
|
||||
bst = xgb.load('model.save')
|
||||
@ -121,7 +121,7 @@ training from initial prediction value, weighted training instance.
|
||||
We can use \verb@xgb.DMatrix@ to construct an \verb@xgb.DMatrix@ object:
|
||||
<<xgb.DMatrix>>=
|
||||
iris.mat <- as.matrix(iris[,1:4])
|
||||
iris.label <- as.numeric(iris[,5])
|
||||
iris.label <- as.numeric(iris[,5]=='setosa')
|
||||
diris <- xgb.DMatrix(iris.mat, label = iris.label)
|
||||
class(diris)
|
||||
getinfo(diris,'label')
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user