[BREAKING] prevent multi-gpu usage (#4749)

* prevent multi-gpu usage

* fix distributed test

* combine gpu predictor tests

* set upper bound on n_gpus
This commit is contained in:
Rong Ou
2019-08-12 14:11:35 -07:00
committed by Rory Mitchell
parent 198f3a6c4a
commit c5b229632d
14 changed files with 59 additions and 298 deletions

View File

@@ -40,10 +40,10 @@ struct GenericParameter : public dmlc::Parameter<GenericParameter> {
.describe("The primary GPU device ordinal.");
DMLC_DECLARE_FIELD(n_gpus)
.set_default(0)
.set_lower_bound(-1)
.describe("Deprecated, please use distributed training with one "
"process per GPU. "
"Number of GPUs to use for multi-gpu algorithms.");
.set_range(0, 1)
.describe("Deprecated. Single process multi-GPU training is no longer supported. "
"Please switch to distributed training with one process per GPU. "
"This can be done using Dask or Spark.");
}
};
} // namespace xgboost