diff --git a/R-package/demo/README.md b/R-package/demo/README.md index be08ee54f..5ee1aa797 100644 --- a/R-package/demo/README.md +++ b/R-package/demo/README.md @@ -1,6 +1,7 @@ XGBoost R Feature Walkthrough ==== -* [Basic walkthrough of wrappers](basic_walkthrough.R) +* [Basic walkthrough of wrappers](basic_walkthrough.R) +* [Train a xgboost model from caret library](caret_wrapper.R) * [Cutomize loss function, and evaluation metric](custom_objective.R) * [Boosting from existing prediction](boost_from_prediction.R) * [Predicting using first n trees](predict_first_ntree.R) diff --git a/R-package/demo/caret_wrapper.R b/R-package/demo/caret_wrapper.R index 5c53c9915..13e05e562 100644 --- a/R-package/demo/caret_wrapper.R +++ b/R-package/demo/caret_wrapper.R @@ -22,11 +22,14 @@ df[,AgeCat:= as.factor(ifelse(Age > 30, "Old", "Young"))] df[,ID:=NULL] #-------------Basic Training using XGBoost in caret Library----------------- -# set up control parameters for caret::train -# here we use 10-fold cross-validation, repeating twice -fitControl <- trainControl(method = "cv", number = 10, repeats = 2) +# Set up control parameters for caret::train +# Here we use 10-fold cross-validation, repeating twice, and using random search for tuning hyper-parameters. +fitControl <- trainControl(method = "cv", number = 10, repeats = 2, search = "random") # train a xgbTree model using caret::train model <- train(factor(Improved)~., data = df, method = "xgbTree", trControl = fitControl) +# Instead of tree for our boosters, you can also fit a linear regression or logistic regression model using xgbLinear +# model <- train(factor(Improved)~., data = df, method = "xgbLinear", trControl = fitControl) + # See model results -print(model) \ No newline at end of file +print(model)