From fcb833373b3d2c027ed2fb13f12b53d31e1a14f1 Mon Sep 17 00:00:00 2001 From: HangLi Date: Thu, 23 Apr 2015 16:25:31 -0700 Subject: [PATCH] reorder parameters --- wrapper/xgboost.py | 35 +++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/wrapper/xgboost.py b/wrapper/xgboost.py index fac4da186..5c8199c4d 100644 --- a/wrapper/xgboost.py +++ b/wrapper/xgboost.py @@ -753,8 +753,6 @@ class XGBModel(BaseEstimator): ---------- max_depth : int Maximum tree depth for base learners. - min_child_weight : int - minimum sum of instance weight(hessian) needed in a child. learning_rate : float Boosting learning rate (xgb's "eta") n_estimators : int @@ -763,33 +761,38 @@ class XGBModel(BaseEstimator): Whether to print messages while running boosting. objective : string Specify the learning task and the corresponding learning objective. + + nthread : int + Number of parallel threads used to run xgboost. + min_child_weight : int + minimum sum of instance weight(hessian) needed in a child. subsample : float Subsample ratio of the training instance. colsample_bytree : float Subsample ratio of columns when constructing each tree. eval_metric : string Evaluation metrics for validation data. - nthread : int - Number of parallel threads used to run xgboost. seed : int Random number seed. """ - def __init__(self, max_depth=3, min_child_weight=1, learning_rate=0.1, n_estimators=100, - silent=True, objective="reg:linear", subsample=1, colsample_bytree=1, eval_metric='error', - nthread=-1, seed=0): + def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True, objective="reg:linear", + nthread=-1, min_child_weight=1, subsample=1, colsample_bytree=1, + eval_metric='error', seed=0): if not SKLEARN_INSTALLED: raise Exception('sklearn needs to be installed in order to use this module') self.max_depth = max_depth - self.min_child_weight = min_child_weight self.learning_rate = learning_rate - self.silent = silent self.n_estimators = n_estimators + self.silent = silent self.objective = objective + + self.nthread = nthread + self.min_child_weight = min_child_weight self.subsample = subsample self.colsample_bytree = colsample_bytree self.eval_metric = eval_metric - self.nthread = nthread self.seed = seed + self._Booster = Booster() def get_xgb_params(self): @@ -812,12 +815,12 @@ class XGBModel(BaseEstimator): class XGBClassifier(XGBModel, ClassifierMixin): - def __init__(self, max_depth=3, min_child_weight=1, learning_rate=0.1, n_estimators=100, - silent=True, objective="binary:logistic", subsample=1, colsample_bytree=1, eval_metric='error', - nthread=-1, seed=0): - super(XGBClassifier, self).__init__(max_depth, min_child_weight, learning_rate, n_estimators, - silent, objective, subsample, colsample_bytree,eval_metric, - nthread, seed) + def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True, objective="binary:logistic", + nthread=-1, min_child_weight=1, subsample=1, colsample_bytree=1, + eval_metric='error', seed=0): + super(XGBClassifier, self).__init__(max_depth, learning_rate, n_estimators, silent, objective, + nthread, min_child_weight, subsample, colsample_bytree, + eval_metric, seed) def fit(self, X, y, sample_weight=None): y_values = list(np.unique(y))