Remove auto configuration of seed_per_iteration. (#7009)

* Remove auto configuration of seed_per_iteration.

This should be related to model recovery from rabit, which is removed.

* Document.
This commit is contained in:
Jiaming Yuan 2021-10-17 15:58:57 +08:00 committed by GitHub
parent fb1a9e6bc5
commit fbb0dc4275
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 6 additions and 7 deletions

View File

@ -420,7 +420,7 @@ Specify the learning task and the corresponding learning objective. The objectiv
* ``seed_per_iteration`` [default=false] * ``seed_per_iteration`` [default=false]
- Seed PRNG determnisticly via iterator number, this option will be switched on automatically on distributed mode. - Seed PRNG determnisticly via iterator number.
*********************** ***********************
Command Line Parameters Command Line Parameters

View File

@ -49,10 +49,7 @@ struct GenericParameter : public XGBoostParameter<GenericParameter> {
DMLC_DECLARE_ALIAS(seed, random_state); DMLC_DECLARE_ALIAS(seed, random_state);
DMLC_DECLARE_FIELD(seed_per_iteration) DMLC_DECLARE_FIELD(seed_per_iteration)
.set_default(false) .set_default(false)
.describe( .describe("Seed PRNG determnisticly via iterator number.");
"Seed PRNG determnisticly via iterator number, "
"this option will be switched on automatically on distributed "
"mode.");
DMLC_DECLARE_FIELD(nthread).set_default(0).describe( DMLC_DECLARE_FIELD(nthread).set_default(0).describe(
"Number of threads to use."); "Number of threads to use.");
DMLC_DECLARE_ALIAS(nthread, n_jobs); DMLC_DECLARE_ALIAS(nthread, n_jobs);

View File

@ -1062,9 +1062,10 @@ class LearnerImpl : public LearnerIO {
monitor_.Start("UpdateOneIter"); monitor_.Start("UpdateOneIter");
TrainingObserver::Instance().Update(iter); TrainingObserver::Instance().Update(iter);
this->Configure(); this->Configure();
if (generic_parameters_.seed_per_iteration || rabit::IsDistributed()) { if (generic_parameters_.seed_per_iteration) {
common::GlobalRandom().seed(generic_parameters_.seed * kRandSeedMagic + iter); common::GlobalRandom().seed(generic_parameters_.seed * kRandSeedMagic + iter);
} }
this->CheckDataSplitMode(); this->CheckDataSplitMode();
this->ValidateDMatrix(train.get(), true); this->ValidateDMatrix(train.get(), true);
@ -1089,9 +1090,10 @@ class LearnerImpl : public LearnerIO {
HostDeviceVector<GradientPair>* in_gpair) override { HostDeviceVector<GradientPair>* in_gpair) override {
monitor_.Start("BoostOneIter"); monitor_.Start("BoostOneIter");
this->Configure(); this->Configure();
if (generic_parameters_.seed_per_iteration || rabit::IsDistributed()) { if (generic_parameters_.seed_per_iteration) {
common::GlobalRandom().seed(generic_parameters_.seed * kRandSeedMagic + iter); common::GlobalRandom().seed(generic_parameters_.seed * kRandSeedMagic + iter);
} }
this->CheckDataSplitMode(); this->CheckDataSplitMode();
this->ValidateDMatrix(train.get(), true); this->ValidateDMatrix(train.get(), true);
auto local_cache = this->GetPredictionCache(); auto local_cache = this->GetPredictionCache();