Remove remaining reg:linear. (#4544)
This commit is contained in:
parent
4e9965cb9d
commit
4591039eba
@ -108,7 +108,7 @@ struct ObjFunctionReg
|
|||||||
*
|
*
|
||||||
* \code
|
* \code
|
||||||
* // example of registering a objective
|
* // example of registering a objective
|
||||||
* XGBOOST_REGISTER_OBJECTIVE(LinearRegression, "reg:linear")
|
* XGBOOST_REGISTER_OBJECTIVE(LinearRegression, "reg:squarederror")
|
||||||
* .describe("Linear regression objective")
|
* .describe("Linear regression objective")
|
||||||
* .set_body([]() {
|
* .set_body([]() {
|
||||||
* return new RegLossObj(LossType::kLinearSquare);
|
* return new RegLossObj(LossType::kLinearSquare);
|
||||||
|
|||||||
@ -136,11 +136,12 @@ class XGBModel(XGBModelBase):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100,
|
def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100,
|
||||||
verbosity=1, silent=None, objective="reg:linear", booster='gbtree',
|
verbosity=1, silent=None, objective="reg:squarederror",
|
||||||
n_jobs=1, nthread=None, gamma=0, min_child_weight=1,
|
booster='gbtree', n_jobs=1, nthread=None, gamma=0,
|
||||||
max_delta_step=0, subsample=1, colsample_bytree=1, colsample_bylevel=1,
|
min_child_weight=1, max_delta_step=0, subsample=1,
|
||||||
colsample_bynode=1, reg_alpha=0, reg_lambda=1, scale_pos_weight=1,
|
colsample_bytree=1, colsample_bylevel=1, colsample_bynode=1,
|
||||||
base_score=0.5, random_state=0, seed=None, missing=None,
|
reg_alpha=0, reg_lambda=1, scale_pos_weight=1, base_score=0.5,
|
||||||
|
random_state=0, seed=None, missing=None,
|
||||||
importance_type="gain", **kwargs):
|
importance_type="gain", **kwargs):
|
||||||
if not SKLEARN_INSTALLED:
|
if not SKLEARN_INSTALLED:
|
||||||
raise XGBoostError('sklearn needs to be installed in order to use this module')
|
raise XGBoostError('sklearn needs to be installed in order to use this module')
|
||||||
@ -377,7 +378,7 @@ class XGBModel(XGBModelBase):
|
|||||||
|
|
||||||
if callable(self.objective):
|
if callable(self.objective):
|
||||||
obj = _objective_decorator(self.objective)
|
obj = _objective_decorator(self.objective)
|
||||||
params["objective"] = "reg:linear"
|
params["objective"] = "reg:squarederror"
|
||||||
else:
|
else:
|
||||||
obj = None
|
obj = None
|
||||||
|
|
||||||
@ -928,7 +929,7 @@ class XGBRFRegressor(XGBRegressor):
|
|||||||
|
|
||||||
def __init__(self, max_depth=3, learning_rate=1, n_estimators=100,
|
def __init__(self, max_depth=3, learning_rate=1, n_estimators=100,
|
||||||
verbosity=1, silent=None,
|
verbosity=1, silent=None,
|
||||||
objective="reg:linear", n_jobs=1, nthread=None, gamma=0,
|
objective="reg:squarederror", n_jobs=1, nthread=None, gamma=0,
|
||||||
min_child_weight=1, max_delta_step=0, subsample=0.8, colsample_bytree=1,
|
min_child_weight=1, max_delta_step=0, subsample=0.8, colsample_bytree=1,
|
||||||
colsample_bylevel=1, colsample_bynode=0.8, reg_alpha=0, reg_lambda=1,
|
colsample_bylevel=1, colsample_bynode=0.8, reg_alpha=0, reg_lambda=1,
|
||||||
scale_pos_weight=1, base_score=0.5, random_state=0, seed=None,
|
scale_pos_weight=1, base_score=0.5, random_state=0, seed=None,
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
# Originally an example in demo/regression/
|
# Originally an example in demo/regression/
|
||||||
booster = gbtree
|
booster = gbtree
|
||||||
objective = reg:linear
|
objective = reg:squarederror
|
||||||
eta = 1.0
|
eta = 1.0
|
||||||
gamma = 1.0
|
gamma = 1.0
|
||||||
seed = 0
|
seed = 0
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user