Added tests for additional params in sklearn wrapper (+1 squashed commit)

Squashed commits:
[43892b9] Added tests for additional params in sklearn wrapper
This commit is contained in:
terrytangyuan 2015-11-02 19:42:21 -06:00
parent 430be8d4bd
commit 166e878830
2 changed files with 55 additions and 43 deletions

View File

@ -42,6 +42,7 @@ on going at master
* Python module now throw exception instead of crash terminal when a parameter error happens.
* Python module now has importance plot and tree plot functions.
* Python module now accepts different learning rates for each boosting round.
* Additional parameters added for sklearn wrapper
* Java api is ready for use
* Added more test cases and continuous integration to make each build more robust
* Improvements in sklearn compatible module

View File

@ -4,9 +4,12 @@ from sklearn.cross_validation import KFold, train_test_split
from sklearn.metrics import mean_squared_error
from sklearn.grid_search import GridSearchCV
from sklearn.datasets import load_iris, load_digits, load_boston
import unittest
rng = np.random.RandomState(1994)
class TestSklearn(unittest.TestCase):
def test_binary_classification():
digits = load_digits(2)
y = digits['target']
@ -27,6 +30,10 @@ def test_multiclass_classification():
for train_index, test_index in kf:
xgb_model = xgb.XGBClassifier().fit(X[train_index],y[train_index])
preds = xgb_model.predict(X[test_index])
# test other params in XGBClassifier().fit
preds2 = xgb_model.predict(X[test_index], output_margin=True, ntree_limit=3)
preds3 = xgb_model.predict(X[test_index], output_margin=True, ntree_limit=0)
preds4 = xgb_model.predict(X[test_index], output_margin=False, ntree_limit=3)
labels = y[test_index]
err = sum(1 for i in range(len(preds)) if int(preds[i]>0.5)!=labels[i]) / float(len(preds))
assert err < 0.4
@ -39,6 +46,10 @@ def test_boston_housing_regression():
for train_index, test_index in kf:
xgb_model = xgb.XGBRegressor().fit(X[train_index],y[train_index])
preds = xgb_model.predict(X[test_index])
# test other params in XGBRegressor().fit
preds2 = xgb_model.predict(X[test_index], output_margin=True, ntree_limit=3)
preds3 = xgb_model.predict(X[test_index], output_margin=True, ntree_limit=0)
preds4 = xgb_model.predict(X[test_index], output_margin=False, ntree_limit=3)
labels = y[test_index]
assert mean_squared_error(preds, labels) < 15