Restore clang tidy test. (#8861)

This commit is contained in:
Jiaming Yuan
2023-03-04 05:47:04 +08:00
committed by GitHub
parent 2dc22e7aad
commit 4d665b3fb0
55 changed files with 216 additions and 205 deletions

View File

@@ -75,10 +75,7 @@ class GBLinear : public GradientBooster {
: GradientBooster{ctx},
learner_model_param_{learner_model_param},
model_{learner_model_param},
previous_model_{learner_model_param},
sum_instance_weight_(0),
sum_weight_complete_(false),
is_converged_(false) {}
previous_model_{learner_model_param} {}
void Configure(const Args& cfg) override {
if (model_.weight.size() == 0) {
@@ -344,10 +341,10 @@ class GBLinear : public GradientBooster {
GBLinearModel previous_model_;
GBLinearTrainParam param_;
std::unique_ptr<LinearUpdater> updater_;
double sum_instance_weight_;
bool sum_weight_complete_;
double sum_instance_weight_{};
bool sum_weight_complete_{false};
common::Monitor monitor_;
bool is_converged_;
bool is_converged_{false};
};
// register the objective functions

View File

@@ -47,12 +47,12 @@ class GBLinearModel : public Model {
DeprecatedGBLinearModelParam param_;
public:
int32_t num_boosted_rounds;
int32_t num_boosted_rounds{0};
LearnerModelParam const* learner_model_param;
public:
explicit GBLinearModel(LearnerModelParam const* learner_model_param) :
num_boosted_rounds{0}, learner_model_param {learner_model_param} {}
explicit GBLinearModel(LearnerModelParam const *learner_model_param)
: learner_model_param{learner_model_param} {}
void Configure(Args const &) { }
// weight for each of feature, bias is the last one