fix indent warning by flake8

This commit is contained in:
white1033 2015-04-05 23:22:40 +08:00
parent 402e832ce5
commit 18cb8d7de2

View File

@ -26,7 +26,6 @@ except ImportError:
SKLEARN_INSTALLED = False SKLEARN_INSTALLED = False
__all__ = ['DMatrix', 'CVPack', 'Booster', 'aggcv', 'cv', 'mknfold', 'train'] __all__ = ['DMatrix', 'CVPack', 'Booster', 'aggcv', 'cv', 'mknfold', 'train']
if sys.version_info[0] == 3: if sys.version_info[0] == 3:
@ -619,7 +618,7 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None, ea
score = float(msg.rsplit(':', 1)[1]) score = float(msg.rsplit(':', 1)[1])
if (maximize_score and score > best_score) or \ if (maximize_score and score > best_score) or \
(not maximize_score and score < best_score): (not maximize_score and score < best_score):
best_score = score best_score = score
best_score_i = i best_score_i = i
best_msg = msg best_msg = msg
@ -632,7 +631,6 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None, ea
return bst return bst
class CVPack(object): class CVPack(object):
def __init__(self, dtrain, dtest, param): def __init__(self, dtrain, dtest, param):
self.dtrain = dtrain self.dtrain = dtrain
@ -778,6 +776,7 @@ class XGBModel(BaseEstimator):
'silent': True if self.silent == 1 else False, 'silent': True if self.silent == 1 else False,
'objective': self.objective 'objective': self.objective
} }
def get_xgb_params(self): def get_xgb_params(self):
return {'eta': self.eta, 'max_depth': self.max_depth, 'silent': self.silent, 'objective': self.objective} return {'eta': self.eta, 'max_depth': self.max_depth, 'silent': self.silent, 'objective': self.objective}
@ -790,6 +789,7 @@ class XGBModel(BaseEstimator):
testDmatrix = DMatrix(X) testDmatrix = DMatrix(X)
return self._Booster.predict(testDmatrix) return self._Booster.predict(testDmatrix)
class XGBClassifier(XGBModel, ClassifierMixin): class XGBClassifier(XGBModel, ClassifierMixin):
def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True): def __init__(self, max_depth=3, learning_rate=0.1, n_estimators=100, silent=True):
super(XGBClassifier, self).__init__(max_depth, learning_rate, n_estimators, silent, objective="binary:logistic") super(XGBClassifier, self).__init__(max_depth, learning_rate, n_estimators, silent, objective="binary:logistic")
@ -834,9 +834,8 @@ class XGBClassifier(XGBModel, ClassifierMixin):
else: else:
classone_probs = class_probs classone_probs = class_probs
classzero_probs = 1.0 - classone_probs classzero_probs = 1.0 - classone_probs
return np.vstack((classzero_probs,classone_probs)).transpose() return np.vstack((classzero_probs, classone_probs)).transpose()
class XGBRegressor(XGBModel, RegressorMixin): class XGBRegressor(XGBModel, RegressorMixin):
pass pass