diff --git a/src/learner.cc b/src/learner.cc index ca6704944..e2187c0ff 100644 --- a/src/learner.cc +++ b/src/learner.cc @@ -61,6 +61,8 @@ #include "xgboost/predictor.h" // for PredictionContainer, PredictionCacheEntry #include "xgboost/string_view.h" // for operator<<, StringView #include "xgboost/task.h" // for ObjInfo +#include +#include namespace { const char* kMaxDeltaStepDefaultValue = "0.7"; @@ -1262,34 +1264,57 @@ class LearnerImpl : public LearnerIO { return out_impl; } - void UpdateOneIter(int iter, std::shared_ptr train) override { - monitor_.Start("UpdateOneIter"); - TrainingObserver::Instance().Update(iter); - this->Configure(); - this->InitBaseScore(train.get()); +void UpdateOneIter(int iter, std::shared_ptr train) override { + std::cerr << "Entering UpdateOneIter, iteration: " << iter << std::endl; + monitor_.Start("UpdateOneIter"); + TrainingObserver::Instance().Update(iter); + + std::cerr << "Configuring..." << std::endl; + this->Configure(); + + std::cerr << "Initializing base score..." << std::endl; + this->InitBaseScore(train.get()); - if (ctx_.seed_per_iteration) { - common::GlobalRandom().seed(ctx_.seed * kRandSeedMagic + iter); - } - - this->ValidateDMatrix(train.get(), true); - - auto& predt = prediction_container_.Cache(train, ctx_.Device()); - - monitor_.Start("PredictRaw"); - this->PredictRaw(train.get(), &predt, true, 0, 0); - TrainingObserver::Instance().Observe(predt.predictions, "Predictions"); - monitor_.Stop("PredictRaw"); - - monitor_.Start("GetGradient"); - GetGradient(predt.predictions, train->Info(), iter, &gpair_); - monitor_.Stop("GetGradient"); - TrainingObserver::Instance().Observe(*gpair_.Data(), "Gradients"); - - gbm_->DoBoost(train.get(), &gpair_, &predt, obj_.get()); - monitor_.Stop("UpdateOneIter"); + if (ctx_.seed_per_iteration) { + std::cerr << "Setting seed for iteration..." << std::endl; + common::GlobalRandom().seed(ctx_.seed * kRandSeedMagic + iter); } + std::cerr << "Validating DMatrix..." << std::endl; + this->ValidateDMatrix(train.get(), true); + + std::cerr << "Caching predictions..." << std::endl; + auto& predt = prediction_container_.Cache(train, ctx_.Device()); + + monitor_.Start("PredictRaw"); + std::cerr << "Predicting raw values..." << std::endl; + this->PredictRaw(train.get(), &predt, true, 0, 0); + TrainingObserver::Instance().Observe(predt.predictions, "Predictions"); + monitor_.Stop("PredictRaw"); + + monitor_.Start("GetGradient"); + std::cerr << "Getting gradients..." << std::endl; + try { + GetGradient(predt.predictions, train->Info(), iter, &gpair_); + } catch (const std::exception& e) { + std::cerr << "Exception in GetGradient: " << e.what() << std::endl; + throw; + } + monitor_.Stop("GetGradient"); + TrainingObserver::Instance().Observe(*gpair_.Data(), "Gradients"); + + std::cerr << "Performing boosting..." << std::endl; + try { + gbm_->DoBoost(train.get(), &gpair_, &predt, obj_.get()); + } catch (const std::exception& e) { + std::cerr << "Exception in DoBoost: " << e.what() << std::endl; + throw; + } + + monitor_.Stop("UpdateOneIter"); + std::cerr << "Exiting UpdateOneIter" << std::endl; +} + void BoostOneIter(int iter, std::shared_ptr train, linalg::Matrix* in_gpair) override { monitor_.Start("BoostOneIter");