Refactor Python tests. (#3897)
* Deprecate nose tests. * Format python tests.
This commit is contained in:
@@ -2,24 +2,26 @@ import xgboost as xgb
|
||||
import testing as tm
|
||||
import numpy as np
|
||||
import unittest
|
||||
import pytest
|
||||
|
||||
rng = np.random.RandomState(1994)
|
||||
|
||||
|
||||
class TestEarlyStopping(unittest.TestCase):
|
||||
|
||||
@pytest.mark.skipif(**tm.no_sklearn())
|
||||
def test_early_stopping_nonparallel(self):
|
||||
tm._skip_if_no_sklearn()
|
||||
from sklearn.datasets import load_digits
|
||||
try:
|
||||
from sklearn.model_selection import train_test_split
|
||||
except:
|
||||
except ImportError:
|
||||
from sklearn.cross_validation import train_test_split
|
||||
|
||||
digits = load_digits(2)
|
||||
X = digits['data']
|
||||
y = digits['target']
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y,
|
||||
random_state=0)
|
||||
clf1 = xgb.XGBClassifier()
|
||||
clf1.fit(X_train, y_train, early_stopping_rounds=5, eval_metric="auc",
|
||||
eval_set=[(X_test, y_test)])
|
||||
@@ -35,36 +37,41 @@ class TestEarlyStopping(unittest.TestCase):
|
||||
eval_set=[(X_test, y_test)])
|
||||
assert clf3.best_score == 1
|
||||
|
||||
@pytest.mark.skipif(**tm.no_sklearn())
|
||||
def evalerror(self, preds, dtrain):
|
||||
tm._skip_if_no_sklearn()
|
||||
from sklearn.metrics import mean_squared_error
|
||||
|
||||
labels = dtrain.get_label()
|
||||
return 'rmse', mean_squared_error(labels, preds)
|
||||
|
||||
@pytest.mark.skipif(**tm.no_sklearn())
|
||||
def test_cv_early_stopping(self):
|
||||
tm._skip_if_no_sklearn()
|
||||
from sklearn.datasets import load_digits
|
||||
|
||||
digits = load_digits(2)
|
||||
X = digits['data']
|
||||
y = digits['target']
|
||||
dm = xgb.DMatrix(X, label=y)
|
||||
params = {'max_depth': 2, 'eta': 1, 'silent': 1, 'objective': 'binary:logistic'}
|
||||
params = {'max_depth': 2, 'eta': 1, 'silent': 1,
|
||||
'objective': 'binary:logistic'}
|
||||
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, early_stopping_rounds=10)
|
||||
assert cv.shape[0] == 10
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, early_stopping_rounds=5)
|
||||
assert cv.shape[0] == 3
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, early_stopping_rounds=1)
|
||||
assert cv.shape[0] == 1
|
||||
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, feval=self.evalerror,
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
|
||||
early_stopping_rounds=10)
|
||||
assert cv.shape[0] == 10
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, feval=self.evalerror,
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
|
||||
early_stopping_rounds=5)
|
||||
assert cv.shape[0] == 3
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
|
||||
early_stopping_rounds=1)
|
||||
assert cv.shape[0] == 1
|
||||
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
|
||||
feval=self.evalerror, early_stopping_rounds=10)
|
||||
assert cv.shape[0] == 10
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
|
||||
feval=self.evalerror, early_stopping_rounds=1)
|
||||
assert cv.shape[0] == 5
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
|
||||
feval=self.evalerror, maximize=True,
|
||||
early_stopping_rounds=1)
|
||||
assert cv.shape[0] == 5
|
||||
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, feval=self.evalerror,
|
||||
maximize=True, early_stopping_rounds=1)
|
||||
assert cv.shape[0] == 1
|
||||
|
||||
Reference in New Issue
Block a user