reorder parameters

This commit is contained in:
HangLi 2015-04-23 16:25:31 -07:00
parent 29e76c7ac0
commit fcb833373b

View File

@ -753,8 +753,6 @@ class XGBModel(BaseEstimator):
---------- ----------
max_depth : int max_depth : int
Maximum tree depth for base learners. Maximum tree depth for base learners.
min_child_weight : int
minimum sum of instance weight(hessian) needed in a child.
learning_rate : float learning_rate : float
Boosting learning rate (xgb's "eta") Boosting learning rate (xgb's "eta")
n_estimators : int n_estimators : int
@ -763,33 +761,38 @@ class XGBModel(BaseEstimator):
Whether to print messages while running boosting. Whether to print messages while running boosting.
objective : string objective : string
Specify the learning task and the corresponding learning objective. Specify the learning task and the corresponding learning objective.
nthread : int
Number of parallel threads used to run xgboost.
min_child_weight : int
minimum sum of instance weight(hessian) needed in a child.
subsample : float subsample : float
Subsample ratio of the training instance. Subsample ratio of the training instance.
colsample_bytree : float colsample_bytree : float
Subsample ratio of columns when constructing each tree. Subsample ratio of columns when constructing each tree.
eval_metric : string eval_metric : string
Evaluation metrics for validation data. Evaluation metrics for validation data.
nthread : int
Number of parallel threads used to run xgboost.
seed : int seed : int
Random number seed. Random number seed.
""" """
def __init__(self, max_depth=3, min_child_weight=1, learning_rate=0.1, n_estimators=100, def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True, objective="reg:linear",
silent=True, objective="reg:linear", subsample=1, colsample_bytree=1, eval_metric='error', nthread=-1, min_child_weight=1, subsample=1, colsample_bytree=1,
nthread=-1, seed=0): eval_metric='error', seed=0):
if not SKLEARN_INSTALLED: if not SKLEARN_INSTALLED:
raise Exception('sklearn needs to be installed in order to use this module') raise Exception('sklearn needs to be installed in order to use this module')
self.max_depth = max_depth self.max_depth = max_depth
self.min_child_weight = min_child_weight
self.learning_rate = learning_rate self.learning_rate = learning_rate
self.silent = silent
self.n_estimators = n_estimators self.n_estimators = n_estimators
self.silent = silent
self.objective = objective self.objective = objective
self.nthread = nthread
self.min_child_weight = min_child_weight
self.subsample = subsample self.subsample = subsample
self.colsample_bytree = colsample_bytree self.colsample_bytree = colsample_bytree
self.eval_metric = eval_metric self.eval_metric = eval_metric
self.nthread = nthread
self.seed = seed self.seed = seed
self._Booster = Booster() self._Booster = Booster()
def get_xgb_params(self): def get_xgb_params(self):
@ -812,12 +815,12 @@ class XGBModel(BaseEstimator):
class XGBClassifier(XGBModel, ClassifierMixin): class XGBClassifier(XGBModel, ClassifierMixin):
def __init__(self, max_depth=3, min_child_weight=1, learning_rate=0.1, n_estimators=100, def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True, objective="binary:logistic",
silent=True, objective="binary:logistic", subsample=1, colsample_bytree=1, eval_metric='error', nthread=-1, min_child_weight=1, subsample=1, colsample_bytree=1,
nthread=-1, seed=0): eval_metric='error', seed=0):
super(XGBClassifier, self).__init__(max_depth, min_child_weight, learning_rate, n_estimators, super(XGBClassifier, self).__init__(max_depth, learning_rate, n_estimators, silent, objective,
silent, objective, subsample, colsample_bytree,eval_metric, nthread, min_child_weight, subsample, colsample_bytree,
nthread, seed) eval_metric, seed)
def fit(self, X, y, sample_weight=None): def fit(self, X, y, sample_weight=None):
y_values = list(np.unique(y)) y_values = list(np.unique(y))