Disable pytest-timeout for now. (#8348)

This commit is contained in:
Jiaming Yuan 2022-10-17 23:06:10 +08:00 committed by GitHub
parent fcddbc9264
commit 2176e511fc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 107 additions and 58 deletions

View File

@ -2,7 +2,7 @@
import socket import socket
from platform import system from platform import system
from typing import TypedDict from typing import Any, TypedDict
PytestSkip = TypedDict("PytestSkip", {"condition": bool, "reason": str}) PytestSkip = TypedDict("PytestSkip", {"condition": bool, "reason": str})
@ -39,3 +39,26 @@ def has_ipv6() -> bool:
def skip_ipv6() -> PytestSkip: def skip_ipv6() -> PytestSkip:
"""PyTest skip mark for IPv6.""" """PyTest skip mark for IPv6."""
return {"condition": not has_ipv6(), "reason": "IPv6 is required to be enabled."} return {"condition": not has_ipv6(), "reason": "IPv6 is required to be enabled."}
def timeout(sec: int, *args: Any, enable: bool = False, **kwargs: Any) -> Any:
"""Make a pytest mark for the `pytest-timeout` package.
Parameters
----------
sec :
Timeout seconds.
enable :
Control whether timeout should be applied, used for debugging.
Returns
-------
pytest.mark.timeout
"""
import pytest # pylint: disable=import-error
# This is disabled for now due to regression caused by conflicts between federated
# learning build and the CI container environment.
if enable:
return pytest.mark.timeout(sec, *args, **kwargs)
return pytest.mark.timeout(None, *args, **kwargs)

View File

@ -1,12 +1,15 @@
import sys import sys
from hypothesis import strategies, given, settings, assume, note
import pytest import pytest
from hypothesis import assume, given, note, settings, strategies
import xgboost as xgb import xgboost as xgb
from xgboost import testing
sys.path.append("tests/python") sys.path.append("tests/python")
import testing as tm import testing as tm
pytestmark = testing.timeout(10)
pytestmark = pytest.mark.timeout(10)
parameter_strategy = strategies.fixed_dictionaries({ parameter_strategy = strategies.fixed_dictionaries({
'booster': strategies.just('gblinear'), 'booster': strategies.just('gblinear'),

View File

@ -1,21 +1,23 @@
'''Test model IO with pickle.''' '''Test model IO with pickle.'''
import pickle
import numpy as np
import subprocess
import os
import sys
import json import json
import os
import pickle
import subprocess
import sys
import numpy as np
import pytest import pytest
import xgboost as xgb import xgboost as xgb
from xgboost import XGBClassifier from xgboost import XGBClassifier, testing
sys.path.append("tests/python") sys.path.append("tests/python")
import testing as tm import testing as tm
model_path = './model.pkl' model_path = './model.pkl'
pytestmark = testing.timeout(30)
pytestmark = pytest.mark.timeout(30)
def build_dataset(): def build_dataset():
N = 10 N = 10

View File

@ -1,11 +1,12 @@
import sys import sys
import pytest
import numpy as np import numpy as np
import xgboost as xgb import pytest
from hypothesis import assume, given, settings, strategies
from xgboost.compat import PANDAS_INSTALLED from xgboost.compat import PANDAS_INSTALLED
from hypothesis import given, strategies, assume, settings import xgboost as xgb
from xgboost import testing
if PANDAS_INSTALLED: if PANDAS_INSTALLED:
from hypothesis.extra.pandas import column, data_frames, range_indexes from hypothesis.extra.pandas import column, data_frames, range_indexes
@ -16,8 +17,8 @@ else:
sys.path.append("tests/python") sys.path.append("tests/python")
import testing as tm import testing as tm
from test_predict import run_predict_leaf # noqa
from test_predict import run_threaded_predict # noqa from test_predict import run_threaded_predict # noqa
from test_predict import run_predict_leaf # noqa
rng = np.random.RandomState(1994) rng = np.random.RandomState(1994)
@ -32,7 +33,8 @@ predict_parameter_strategy = strategies.fixed_dictionaries({
'num_parallel_tree': strategies.sampled_from([1, 4]), 'num_parallel_tree': strategies.sampled_from([1, 4]),
}) })
pytestmark = pytest.mark.timeout(20) pytestmark = testing.timeout(20)
class TestGPUPredict: class TestGPUPredict:
def test_predict(self): def test_predict(self):

View File

@ -1,17 +1,21 @@
import numpy as np
import xgboost
import os
import itertools import itertools
import os
import shutil import shutil
import sys
import urllib.request import urllib.request
import zipfile import zipfile
import sys
import pytest import numpy as np
import xgboost
from xgboost import testing
sys.path.append("tests/python") sys.path.append("tests/python")
import testing as tm # noqa import testing as tm # noqa
pytestmark = testing.timeout(10)
pytestmark = pytest.mark.timeout(10)
class TestRanking: class TestRanking:
@classmethod @classmethod

View File

@ -1,15 +1,18 @@
from typing import Dict, Any
import numpy as np
import sys import sys
from typing import Any, Dict
import numpy as np
import pytest import pytest
from hypothesis import assume, given, note, settings, strategies
import xgboost as xgb import xgboost as xgb
from hypothesis import given, strategies, assume, settings, note from xgboost import testing
sys.path.append("tests/python") sys.path.append("tests/python")
import testing as tm
import test_updaters as test_up import test_updaters as test_up
import testing as tm
pytestmark = pytest.mark.timeout(30) pytestmark = testing.timeout(30)
parameter_strategy = strategies.fixed_dictionaries({ parameter_strategy = strategies.fixed_dictionaries({
'max_depth': strategies.integers(0, 11), 'max_depth': strategies.integers(0, 11),

View File

@ -1,12 +1,15 @@
import xgboost as xgb
from xgboost.data import SingleBatchInternalIter as SingleBatch
import numpy as np import numpy as np
from testing import IteratorForTest, non_increasing, make_batches
import pytest import pytest
from hypothesis import given, strategies, settings from hypothesis import given, settings, strategies
from scipy.sparse import csr_matrix from scipy.sparse import csr_matrix
from testing import IteratorForTest, make_batches, non_increasing
from xgboost.data import SingleBatchInternalIter as SingleBatch
import xgboost as xgb
from xgboost import testing
pytestmark = testing.timeout(30)
pytestmark = pytest.mark.timeout(30)
def test_single_batch(tree_method: str = "approx") -> None: def test_single_batch(tree_method: str = "approx") -> None:
from sklearn.datasets import load_breast_cancer from sklearn.datasets import load_breast_cancer

View File

@ -1,10 +1,13 @@
import os import os
import subprocess import subprocess
import pytest
import testing as tm
import sys import sys
pytestmark = pytest.mark.timeout(30) import pytest
import testing as tm
from xgboost import testing
pytestmark = testing.timeout(30)
ROOT_DIR = tm.PROJECT_ROOT ROOT_DIR = tm.PROJECT_ROOT
DEMO_DIR = os.path.join(ROOT_DIR, 'demo') DEMO_DIR = os.path.join(ROOT_DIR, 'demo')

View File

@ -1,9 +1,11 @@
import testing as tm import testing as tm
import pytest from hypothesis import given, note, settings, strategies
from hypothesis import strategies, given, settings, note
import xgboost as xgb import xgboost as xgb
from xgboost import testing
pytestmark = testing.timeout(10)
pytestmark = pytest.mark.timeout(10)
parameter_strategy = strategies.fixed_dictionaries({ parameter_strategy = strategies.fixed_dictionaries({
'booster': strategies.just('gblinear'), 'booster': strategies.just('gblinear'),

View File

@ -1,14 +1,16 @@
import os import os
import tempfile
import subprocess import subprocess
import tempfile
import xgboost as xgb
import numpy as np import numpy as np
import pytest import pytest
import testing as tm import testing as tm
pytestmark = pytest.mark.timeout(10) import xgboost as xgb
from xgboost import testing
pytestmark = testing.timeout(10)
class TestOMP: class TestOMP:
def test_omp(self): def test_omp(self):

View File

@ -9,6 +9,7 @@ import pytest
import testing as tm import testing as tm
import xgboost as xgb import xgboost as xgb
from xgboost import testing
if tm.no_spark()["condition"]: if tm.no_spark()["condition"]:
pytest.skip(msg=tm.no_spark()["reason"], allow_module_level=True) pytest.skip(msg=tm.no_spark()["reason"], allow_module_level=True)
@ -37,7 +38,7 @@ from .utils import SparkTestCase
logging.getLogger("py4j").setLevel(logging.INFO) logging.getLogger("py4j").setLevel(logging.INFO)
pytestmark = pytest.mark.timeout(60) pytestmark = testing.timeout(60)
class XgboostLocalTest(SparkTestCase): class XgboostLocalTest(SparkTestCase):

View File

@ -29,6 +29,7 @@ from test_with_sklearn import run_data_initialization, run_feature_weights
from xgboost.data import _is_cudf_df from xgboost.data import _is_cudf_df
import xgboost as xgb import xgboost as xgb
from xgboost import testing
if sys.platform.startswith("win"): if sys.platform.startswith("win"):
pytest.skip("Skipping dask tests on Windows", allow_module_level=True) pytest.skip("Skipping dask tests on Windows", allow_module_level=True)
@ -44,7 +45,7 @@ from xgboost.dask import DaskDMatrix
dask.config.set({"distributed.scheduler.allowed-failures": False}) dask.config.set({"distributed.scheduler.allowed-failures": False})
pytestmark = pytest.mark.timeout(30) pytestmark = testing.timeout(30)
if hasattr(HealthCheck, 'function_scoped_fixture'): if hasattr(HealthCheck, 'function_scoped_fixture'):
suppress = [HealthCheck.function_scoped_fixture] suppress = [HealthCheck.function_scoped_fixture]

View File

@ -1,20 +1,20 @@
from typing import Callable, Optional
import collections import collections
import importlib.util import importlib.util
import numpy as np
import xgboost as xgb
import testing as tm
import tempfile
import os
import shutil
import pytest
import json import json
import os
import tempfile
from typing import Callable, Optional
import numpy as np
import pytest
import testing as tm
from sklearn.utils.estimator_checks import parametrize_with_checks
import xgboost as xgb
from xgboost import testing
rng = np.random.RandomState(1994) rng = np.random.RandomState(1994)
pytestmark = [pytest.mark.skipif(**tm.no_sklearn()), testing.timeout(30)]
pytestmark = [pytest.mark.skipif(**tm.no_sklearn()), pytest.mark.timeout(30)]
from sklearn.utils.estimator_checks import parametrize_with_checks
def test_binary_classification(): def test_binary_classification():