Use pytest conventions consistently (#6337)

* Do not derive from unittest.TestCase (not needed for pytest)

* assertRaises -> pytest.raises

* Simplify test_empty_dmatrix with test parametrization

* setUpClass -> setup_class, tearDownClass -> teardown_class

* Don't import unittest; import pytest

* Use plain assert

* Use parametrized tests in more places

* Fix test_gpu_with_sklearn.py

* Put back run_empty_dmatrix_reg / run_empty_dmatrix_cls

* Fix test_eta_decay_gpu_hist

* Add parametrized tests for monotone constraints

* Fix test names

* Remove test parametrization

* Revise test_slice to be not flaky
This commit is contained in:
Philip Hyunsu Cho
2020-11-19 17:00:15 -08:00
committed by GitHub
parent c763b50dd0
commit 9c9070aea2
34 changed files with 200 additions and 223 deletions

View File

@@ -7,7 +7,6 @@ import tempfile
import os
import shutil
import pytest
import unittest
import json
rng = np.random.RandomState(1994)
@@ -1012,37 +1011,39 @@ def test_feature_weights():
assert poly_decreasing[0] < -0.08
class TestBoostFromPrediction(unittest.TestCase):
def run_boost_from_prediction(self, tree_method):
from sklearn.datasets import load_breast_cancer
X, y = load_breast_cancer(return_X_y=True)
model_0 = xgb.XGBClassifier(
learning_rate=0.3, random_state=0, n_estimators=4,
tree_method=tree_method)
model_0.fit(X=X, y=y)
margin = model_0.predict(X, output_margin=True)
def run_boost_from_prediction(tree_method):
from sklearn.datasets import load_breast_cancer
X, y = load_breast_cancer(return_X_y=True)
model_0 = xgb.XGBClassifier(
learning_rate=0.3, random_state=0, n_estimators=4,
tree_method=tree_method)
model_0.fit(X=X, y=y)
margin = model_0.predict(X, output_margin=True)
model_1 = xgb.XGBClassifier(
learning_rate=0.3, random_state=0, n_estimators=4,
tree_method=tree_method)
model_1.fit(X=X, y=y, base_margin=margin)
predictions_1 = model_1.predict(X, base_margin=margin)
model_1 = xgb.XGBClassifier(
learning_rate=0.3, random_state=0, n_estimators=4,
tree_method=tree_method)
model_1.fit(X=X, y=y, base_margin=margin)
predictions_1 = model_1.predict(X, base_margin=margin)
cls_2 = xgb.XGBClassifier(
learning_rate=0.3, random_state=0, n_estimators=8,
tree_method=tree_method)
cls_2.fit(X=X, y=y)
predictions_2 = cls_2.predict(X)
assert np.all(predictions_1 == predictions_2)
cls_2 = xgb.XGBClassifier(
learning_rate=0.3, random_state=0, n_estimators=8,
tree_method=tree_method)
cls_2.fit(X=X, y=y)
predictions_2 = cls_2.predict(X)
assert np.all(predictions_1 == predictions_2)
@pytest.mark.skipif(**tm.no_sklearn())
def test_boost_from_prediction_hist(self):
self.run_boost_from_prediction('hist')
@pytest.mark.skipif(**tm.no_sklearn())
def test_boost_from_prediction_approx(self):
self.run_boost_from_prediction('approx')
@pytest.mark.skipif(**tm.no_sklearn())
def test_boost_from_prediction_hist():
run_boost_from_prediction('hist')
@pytest.mark.skipif(**tm.no_sklearn())
def test_boost_from_prediction_exact(self):
self.run_boost_from_prediction('exact')
@pytest.mark.skipif(**tm.no_sklearn())
def test_boost_from_prediction_approx():
run_boost_from_prediction('approx')
@pytest.mark.skipif(**tm.no_sklearn())
def test_boost_from_prediction_exact():
run_boost_from_prediction('exact')