From 36031d9a36f3946bcb63b3638d9d73c41f757ec4 Mon Sep 17 00:00:00 2001 From: hetong007 Date: Sat, 30 May 2015 15:48:57 -0700 Subject: [PATCH] modify script to use objective and eval_metric --- R-package/demo/cross_validation.R | 6 +++--- R-package/demo/custom_objective.R | 3 ++- R-package/demo/early_stopping.R | 9 +++++---- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/R-package/demo/cross_validation.R b/R-package/demo/cross_validation.R index fbb38f6d8..c3148ae21 100644 --- a/R-package/demo/cross_validation.R +++ b/R-package/demo/cross_validation.R @@ -40,10 +40,10 @@ evalerror <- function(preds, dtrain) { return(list(metric = "error", value = err)) } -param <- list(max.depth=2,eta=1,silent=1) +param <- list(max.depth=2,eta=1,silent=1, + objective = logregobj, eval_metric = evalerror) # train with customized objective -xgb.cv(param, dtrain, nround, nfold = 5, - obj = logregobj, feval=evalerror) +xgb.cv(param, dtrain, nround, nfold = 5) # do cross validation with prediction values for each fold res <- xgb.cv(param, dtrain, nround, nfold=5, prediction = TRUE) diff --git a/R-package/demo/custom_objective.R b/R-package/demo/custom_objective.R index cb90a7b5a..201f23d98 100644 --- a/R-package/demo/custom_objective.R +++ b/R-package/demo/custom_objective.R @@ -61,4 +61,5 @@ logregobjattr <- function(preds, dtrain) { print ('start training with user customized objective, with additional attributes in DMatrix') # training with customized objective, we can also do step by step training # simply look at xgboost.py's implementation of train -bst <- xgb.train(param, dtrain, num_round, watchlist, logregobjattr, evalerror) +bst <- xgb.train(param, dtrain, num_round, watchlist, + objective=logregobj, eval_metric=evalerror) diff --git a/R-package/demo/early_stopping.R b/R-package/demo/early_stopping.R index 34dfebc0b..aa74aa2ee 100644 --- a/R-package/demo/early_stopping.R +++ b/R-package/demo/early_stopping.R @@ -31,9 +31,10 @@ evalerror <- function(preds, dtrain) { return(list(metric = "error", value = err)) } print ('start training with early Stopping setting') -# training with customized objective, we can also do step by step training -# simply look at xgboost.py's implementation of train -bst <- xgb.train(param, dtrain, num_round, watchlist, logregobj, evalerror, maximize = FALSE, + +bst <- xgb.train(param, dtrain, num_round, watchlist, + objective = logregobj, eval_metric = evalerror, maximize = FALSE, early.stop.round = 3) -bst <- xgb.cv(param, dtrain, num_round, nfold=5, obj=logregobj, feval = evalerror, +bst <- xgb.cv(param, dtrain, num_round, nfold = 5, + objective = logregobj, eval_metric = evalerror, maximize = FALSE, early.stop.round = 3)