From 4591039ebace6cf5a3f859a00f0212b072585285 Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Tue, 11 Jun 2019 16:04:09 +0800 Subject: [PATCH] Remove remaining reg:linear. (#4544) --- include/xgboost/objective.h | 2 +- python-package/xgboost/sklearn.py | 15 ++++++++------- tests/cli/machine.conf.in | 2 +- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/include/xgboost/objective.h b/include/xgboost/objective.h index 900a16836..2d531fd1b 100644 --- a/include/xgboost/objective.h +++ b/include/xgboost/objective.h @@ -108,7 +108,7 @@ struct ObjFunctionReg * * \code * // example of registering a objective - * XGBOOST_REGISTER_OBJECTIVE(LinearRegression, "reg:linear") + * XGBOOST_REGISTER_OBJECTIVE(LinearRegression, "reg:squarederror") * .describe("Linear regression objective") * .set_body([]() { * return new RegLossObj(LossType::kLinearSquare); diff --git a/python-package/xgboost/sklearn.py b/python-package/xgboost/sklearn.py index b80a79fcf..dc1d73e3f 100644 --- a/python-package/xgboost/sklearn.py +++ b/python-package/xgboost/sklearn.py @@ -136,11 +136,12 @@ class XGBModel(XGBModelBase): """ def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, - verbosity=1, silent=None, objective="reg:linear", booster='gbtree', - n_jobs=1, nthread=None, gamma=0, min_child_weight=1, - max_delta_step=0, subsample=1, colsample_bytree=1, colsample_bylevel=1, - colsample_bynode=1, reg_alpha=0, reg_lambda=1, scale_pos_weight=1, - base_score=0.5, random_state=0, seed=None, missing=None, + verbosity=1, silent=None, objective="reg:squarederror", + booster='gbtree', n_jobs=1, nthread=None, gamma=0, + min_child_weight=1, max_delta_step=0, subsample=1, + colsample_bytree=1, colsample_bylevel=1, colsample_bynode=1, + reg_alpha=0, reg_lambda=1, scale_pos_weight=1, base_score=0.5, + random_state=0, seed=None, missing=None, importance_type="gain", **kwargs): if not SKLEARN_INSTALLED: raise XGBoostError('sklearn needs to be installed in order to use this module') @@ -377,7 +378,7 @@ class XGBModel(XGBModelBase): if callable(self.objective): obj = _objective_decorator(self.objective) - params["objective"] = "reg:linear" + params["objective"] = "reg:squarederror" else: obj = None @@ -928,7 +929,7 @@ class XGBRFRegressor(XGBRegressor): def __init__(self, max_depth=3, learning_rate=1, n_estimators=100, verbosity=1, silent=None, - objective="reg:linear", n_jobs=1, nthread=None, gamma=0, + objective="reg:squarederror", n_jobs=1, nthread=None, gamma=0, min_child_weight=1, max_delta_step=0, subsample=0.8, colsample_bytree=1, colsample_bylevel=1, colsample_bynode=0.8, reg_alpha=0, reg_lambda=1, scale_pos_weight=1, base_score=0.5, random_state=0, seed=None, diff --git a/tests/cli/machine.conf.in b/tests/cli/machine.conf.in index acac74f93..0f12bfddf 100644 --- a/tests/cli/machine.conf.in +++ b/tests/cli/machine.conf.in @@ -1,6 +1,6 @@ # Originally an example in demo/regression/ booster = gbtree -objective = reg:linear +objective = reg:squarederror eta = 1.0 gamma = 1.0 seed = 0