[SYCL] Add dask support for distributed (#10812)

This commit is contained in:
Dmitry Razdoburdin
2024-09-21 20:01:57 +02:00
committed by GitHub
parent 2a37a8880c
commit d7599e095b
10 changed files with 219 additions and 6 deletions

View File

@@ -306,11 +306,12 @@ def _check_distributed_params(kwargs: Dict[str, Any]) -> None:
raise TypeError(msg)
if device and device.find(":") != -1:
raise ValueError(
"Distributed training doesn't support selecting device ordinal as GPUs are"
" managed by the distributed frameworks. use `device=cuda` or `device=gpu`"
" instead."
)
if device != "sycl:gpu":
raise ValueError(
"Distributed training doesn't support selecting device ordinal as GPUs are"
" managed by the distributed frameworks. use `device=cuda` or `device=gpu`"
" instead."
)
if kwargs.get("booster", None) == "gblinear":
raise NotImplementedError(