rename demo of early stopping
This commit is contained in:
parent
d7da4189dc
commit
cfdd6029a8
@ -6,5 +6,5 @@ generalized_linear_model Generalized Linear Model
|
|||||||
cross_validation Cross validation
|
cross_validation Cross validation
|
||||||
create_sparse_matrix Create Sparse Matrix
|
create_sparse_matrix Create Sparse Matrix
|
||||||
predict_leaf_indices Predicting the corresponding leaves
|
predict_leaf_indices Predicting the corresponding leaves
|
||||||
early_Stopping Early Stop in training
|
early_stopping Early Stop in training
|
||||||
poisson_regression Poisson Regression on count data
|
poisson_regression Poisson Regression on count data
|
||||||
|
|||||||
39
R-package/demo/early_stopping.R
Normal file
39
R-package/demo/early_stopping.R
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
require(xgboost)
|
||||||
|
# load in the agaricus dataset
|
||||||
|
data(agaricus.train, package='xgboost')
|
||||||
|
data(agaricus.test, package='xgboost')
|
||||||
|
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
|
||||||
|
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)
|
||||||
|
# note: for customized objective function, we leave objective as default
|
||||||
|
# note: what we are getting is margin value in prediction
|
||||||
|
# you must know what you are doing
|
||||||
|
param <- list(max.depth=2,eta=1,nthread = 2, silent=1)
|
||||||
|
watchlist <- list(eval = dtest)
|
||||||
|
num_round <- 20
|
||||||
|
# user define objective function, given prediction, return gradient and second order gradient
|
||||||
|
# this is loglikelihood loss
|
||||||
|
logregobj <- function(preds, dtrain) {
|
||||||
|
labels <- getinfo(dtrain, "label")
|
||||||
|
preds <- 1/(1 + exp(-preds))
|
||||||
|
grad <- preds - labels
|
||||||
|
hess <- preds * (1 - preds)
|
||||||
|
return(list(grad = grad, hess = hess))
|
||||||
|
}
|
||||||
|
# user defined evaluation function, return a pair metric_name, result
|
||||||
|
# NOTE: when you do customized loss function, the default prediction value is margin
|
||||||
|
# this may make buildin evalution metric not function properly
|
||||||
|
# for example, we are doing logistic loss, the prediction is score before logistic transformation
|
||||||
|
# the buildin evaluation error assumes input is after logistic transformation
|
||||||
|
# Take this in mind when you use the customization, and maybe you need write customized evaluation function
|
||||||
|
evalerror <- function(preds, dtrain) {
|
||||||
|
labels <- getinfo(dtrain, "label")
|
||||||
|
err <- as.numeric(sum(labels != (preds > 0)))/length(labels)
|
||||||
|
return(list(metric = "error", value = err))
|
||||||
|
}
|
||||||
|
print ('start training with early Stopping setting')
|
||||||
|
# training with customized objective, we can also do step by step training
|
||||||
|
# simply look at xgboost.py's implementation of train
|
||||||
|
bst <- xgb.train(param, dtrain, num_round, watchlist, logregobj, evalerror, maximize = FALSE,
|
||||||
|
early.stop.round = 3)
|
||||||
|
bst <- xgb.cv(param, dtrain, num_round, nfold=5, obj=logregobj, feval = evalerror,
|
||||||
|
maximize = FALSE, early.stop.round = 3)
|
||||||
@ -7,5 +7,5 @@ demo(generalized_linear_model)
|
|||||||
demo(cross_validation)
|
demo(cross_validation)
|
||||||
demo(create_sparse_matrix)
|
demo(create_sparse_matrix)
|
||||||
demo(predict_leaf_indices)
|
demo(predict_leaf_indices)
|
||||||
demo(early_Stopping)
|
demo(early_stopping)
|
||||||
demo(poisson_regression)
|
demo(poisson_regression)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user