From 0c8c23194928d3afe3e5a2c1119fe01bb43ea0de Mon Sep 17 00:00:00 2001 From: Ajinkya Kale Date: Wed, 29 Jul 2015 14:28:34 -0700 Subject: [PATCH] Fixing duplicate params in demo Issue in "demo(package="xgboost", custom_objective)" > bst <- xgb.train(param, dtrain, num_round, watchlist, + objective=logregobj, eval_metric=evalerror) Error in xgb.train(param, dtrain, num_round, watchlist, objective = logregobj, : Duplicated term in parameters. Please check your list of params. --- R-package/demo/custom_objective.R | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/R-package/demo/custom_objective.R b/R-package/demo/custom_objective.R index 201f23d98..7234ead86 100644 --- a/R-package/demo/custom_objective.R +++ b/R-package/demo/custom_objective.R @@ -33,7 +33,7 @@ evalerror <- function(preds, dtrain) { return(list(metric = "error", value = err)) } -param <- list(max.depth=2,eta=1,nthread = 2, silent=1, +param <- list(max.depth=2, eta=1, nthread = 2, silent=1, objective=logregobj, eval_metric=evalerror) print ('start training with user customized objective') # training with customized objective, we can also do step by step training @@ -57,9 +57,9 @@ logregobjattr <- function(preds, dtrain) { hess <- preds * (1 - preds) return(list(grad = grad, hess = hess)) } - +param <- list(max.depth=2, eta=1, nthread = 2, silent=1, + objective=logregobjattr, eval_metric=evalerror) print ('start training with user customized objective, with additional attributes in DMatrix') # training with customized objective, we can also do step by step training # simply look at xgboost.py's implementation of train -bst <- xgb.train(param, dtrain, num_round, watchlist, - objective=logregobj, eval_metric=evalerror) +bst <- xgb.train(param, dtrain, num_round, watchlist)