From 422febd18e853d4b1a8bd50154280bd0f7b9cfbf Mon Sep 17 00:00:00 2001 From: Faron Date: Sun, 25 Oct 2015 10:58:07 +0100 Subject: [PATCH] added missing params --- python-package/xgboost/sklearn.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/python-package/xgboost/sklearn.py b/python-package/xgboost/sklearn.py index abfae6b4a..30a4ab70b 100644 --- a/python-package/xgboost/sklearn.py +++ b/python-package/xgboost/sklearn.py @@ -54,6 +54,14 @@ class XGBModel(XGBModelBase): Subsample ratio of the training instance. colsample_bytree : float Subsample ratio of columns when constructing each tree. + colsample_bylevel : float + Subsample ratio of columns for each split, in each level. + reg_alpha : float (xgb's alpha) + L2 regularization term on weights + reg_lambda : float (xgb's lambda) + L1 regularization term on weights + scale_pos_weight : float + Balancing of positive and negative weights. base_score: The initial prediction score of all instances, global bias. @@ -66,7 +74,7 @@ class XGBModel(XGBModelBase): def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True, objective="reg:linear", nthread=-1, gamma=0, min_child_weight=1, max_delta_step=0, - subsample=1, colsample_bytree=1, + subsample=1, colsample_bytree=1, colsample_bylevel=1, reg_alpha=1, reg_lambda=0, scale_pos_weight=1, base_score=0.5, seed=0, missing=None): if not SKLEARN_INSTALLED: raise XGBoostError('sklearn needs to be installed in order to use this module') @@ -82,6 +90,10 @@ class XGBModel(XGBModelBase): self.max_delta_step = max_delta_step self.subsample = subsample self.colsample_bytree = colsample_bytree + self.colsample_bylevel = colsample_bylevel + self.reg_alpha = reg_alpha + self.reg_lambda = reg_lambda + self.scale_pos_weight = scale_pos_weight self.base_score = base_score self.seed = seed @@ -251,14 +263,15 @@ class XGBClassifier(XGBModel, XGBClassifierBase): n_estimators=100, silent=True, objective="binary:logistic", nthread=-1, gamma=0, min_child_weight=1, - max_delta_step=0, subsample=1, colsample_bytree=1, + max_delta_step=0, subsample=1, colsample_bytree=1, colsample_bylevel=1, + reg_alpha=1, reg_lambda=0, scale_pos_weight=1, base_score=0.5, seed=0, missing=None): super(XGBClassifier, self).__init__(max_depth, learning_rate, n_estimators, silent, objective, nthread, gamma, min_child_weight, max_delta_step, subsample, - colsample_bytree, - base_score, seed, missing) + colsample_bytree, colsample_bylevel, reg_alpha, reg_lambda, + scale_pos_weight, base_score, seed, missing) def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True):