[CI] Add timeout for distributed GPU tests. (#9917)

This commit is contained in:
Jiaming Yuan 2023-12-24 00:09:05 +08:00 committed by GitHub
parent b807f3e30c
commit 6a5f6ba694
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 17 additions and 7 deletions

View File

@ -5,9 +5,13 @@ import pytest
from xgboost import testing as tm
pytestmark = [
pytest.mark.skipif(**tm.no_dask()),
pytest.mark.skipif(**tm.no_dask_cuda()),
tm.timeout(60),
]
@pytest.mark.skipif(**tm.no_dask())
@pytest.mark.skipif(**tm.no_dask_cuda())
@pytest.mark.skipif(**tm.no_cupy())
@pytest.mark.mgpu
def test_dask_training():
@ -16,8 +20,6 @@ def test_dask_training():
subprocess.check_call(cmd)
@pytest.mark.skipif(**tm.no_dask_cuda())
@pytest.mark.skipif(**tm.no_dask())
@pytest.mark.mgpu
def test_dask_sklearn_demo():
script = os.path.join(tm.demo_dir(__file__), "dask", "sklearn_gpu_training.py")

View File

@ -1,4 +1,4 @@
"""Copyright 2019-2022 XGBoost contributors"""
"""Copyright 2019-2023, XGBoost contributors"""
import asyncio
import json
from collections import OrderedDict
@ -18,6 +18,7 @@ from xgboost.testing.params import hist_parameter_strategy
pytestmark = [
pytest.mark.skipif(**tm.no_dask()),
pytest.mark.skipif(**tm.no_dask_cuda()),
tm.timeout(60),
]
from ..test_with_dask.test_with_dask import generate_array
@ -629,6 +630,7 @@ def test_nccl_load(local_cuda_client: Client, tree_method: str) -> None:
def run(wid: int) -> None:
# FIXME(jiamingy): https://github.com/dmlc/xgboost/issues/9147
from xgboost.core import _LIB, _register_log_callback
_register_log_callback(_LIB)
with CommunicatorContext(**args):

View File

@ -2,7 +2,10 @@ import pytest
from xgboost import testing as tm
pytestmark = pytest.mark.skipif(**tm.no_spark())
pytestmark = [
pytest.mark.skipif(**tm.no_spark()),
tm.timeout(120),
]
from ..test_with_spark.test_data import run_dmatrix_ctor

View File

@ -8,7 +8,10 @@ import sklearn
from xgboost import testing as tm
pytestmark = pytest.mark.skipif(**tm.no_spark())
pytestmark = [
pytest.mark.skipif(**tm.no_spark()),
tm.timeout(240),
]
from pyspark.ml.linalg import Vectors
from pyspark.ml.tuning import CrossValidator, ParamGridBuilder