[pyspark] Sort workers by task ID. (#10220)
This commit is contained in:
parent
f355418186
commit
54754f29dd
@ -55,7 +55,7 @@ def _start_tracker(context: BarrierTaskContext, n_workers: int) -> Dict[str, Any
|
||||
"""Start Rabit tracker with n_workers"""
|
||||
env: Dict[str, Any] = {"DMLC_NUM_WORKER": n_workers}
|
||||
host = _get_host_ip(context)
|
||||
rabit_context = RabitTracker(host_ip=host, n_workers=n_workers)
|
||||
rabit_context = RabitTracker(host_ip=host, n_workers=n_workers, sortby="task")
|
||||
env.update(rabit_context.worker_envs())
|
||||
rabit_context.start(n_workers)
|
||||
thread = Thread(target=rabit_context.join)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user