[Breaking] Fix custom metric for multi output. (#5954)

* Set output margin to true for custom metric.  This fixes only R and Python.
This commit is contained in:
Jiaming Yuan
2020-07-29 19:25:27 +08:00
committed by GitHub
parent 75b8c22b0b
commit 18349a7ccf
7 changed files with 41 additions and 13 deletions

View File

@@ -154,9 +154,9 @@ LearnerModelParam::LearnerModelParam(
struct LearnerTrainParam : public XGBoostParameter<LearnerTrainParam> {
// data split mode, can be row, col, or none.
DataSplitMode dsplit;
DataSplitMode dsplit {DataSplitMode::kAuto};
// flag to disable default metric
int disable_default_eval_metric;
bool disable_default_eval_metric {false};
// FIXME(trivialfis): The following parameters belong to model itself, but can be
// specified by users. Move them to model parameter once we can get rid of binary IO.
std::string booster;
@@ -171,7 +171,7 @@ struct LearnerTrainParam : public XGBoostParameter<LearnerTrainParam> {
.add_enum("row", DataSplitMode::kRow)
.describe("Data split mode for distributed training.");
DMLC_DECLARE_FIELD(disable_default_eval_metric)
.set_default(0)
.set_default(false)
.describe("Flag to disable default metric. Set to >0 to disable");
DMLC_DECLARE_FIELD(booster)
.set_default("gbtree")
@@ -253,7 +253,7 @@ class LearnerConfiguration : public Learner {
void Configure() override {
// Varient of double checked lock
if (!this->need_configuration_) { return; }
std::lock_guard<std::mutex> gard(config_lock_);
std::lock_guard<std::mutex> guard(config_lock_);
if (!this->need_configuration_) { return; }
monitor_.Start("Configure");