Clang-tidy static analysis (#3222)

* Clang-tidy static analysis

* Modernise checks

* Google coding standard checks

* Identifier renaming according to Google style
This commit is contained in:
Rory Mitchell
2018-04-19 18:57:13 +12:00
committed by GitHub
parent 3242b0a378
commit ccf80703ef
97 changed files with 3407 additions and 3354 deletions

View File

@@ -20,10 +20,10 @@ TEST(c_api, XGDMatrixCreateFromMat_omp) {
std::shared_ptr<xgboost::DMatrix> dmat =
*static_cast<std::shared_ptr<xgboost::DMatrix> *>(handle);
xgboost::MetaInfo &info = dmat->info();
ASSERT_EQ(info.num_col, num_cols);
ASSERT_EQ(info.num_row, row);
ASSERT_EQ(info.num_nonzero, num_cols * row - num_missing);
xgboost::MetaInfo &info = dmat->Info();
ASSERT_EQ(info.num_col_, num_cols);
ASSERT_EQ(info.num_row_, row);
ASSERT_EQ(info.num_nonzero_, num_cols * row - num_missing);
auto iter = dmat->RowIterator();
iter->BeforeFirst();

View File

@@ -38,7 +38,7 @@ void SpeedTest() {
xgboost::common::Timer t;
dh::TransformLbs(
0, &temp_memory, h_rows.size(), dh::raw(row_ptr), row_ptr.size() - 1,
0, &temp_memory, h_rows.size(), dh::Raw(row_ptr), row_ptr.size() - 1,
false,
[=] __device__(size_t idx, size_t ridx) { d_output_row[idx] = ridx; });
@@ -66,7 +66,7 @@ void TestLbs() {
thrust::device_vector<int> output_row(h_rows.size());
auto d_output_row = output_row.data();
dh::TransformLbs(0, &temp_memory, h_rows.size(), dh::raw(row_ptr),
dh::TransformLbs(0, &temp_memory, h_rows.size(), dh::Raw(row_ptr),
row_ptr.size() - 1, false,
[=] __device__(size_t idx, size_t ridx) {
d_output_row[idx] = ridx;
@@ -83,6 +83,6 @@ TEST(cub_lbs, Test) { TestLbs(); }
TEST(sumReduce, Test) {
thrust::device_vector<float> data(100, 1.0f);
dh::CubMemory temp;
auto sum = dh::sumReduction(temp, dh::raw(data), data.size());
auto sum = dh::SumReduction(temp, dh::Raw(data), data.size());
ASSERT_NEAR(sum, 100.0f, 1e-5);
}

View File

@@ -12,9 +12,9 @@ TEST(MetaInfo, GetSet) {
info.SetInfo("root_index", double2, xgboost::kDouble, 2);
EXPECT_EQ(info.GetRoot(1), 2.0f);
EXPECT_EQ(info.labels.size(), 0);
EXPECT_EQ(info.labels_.size(), 0);
info.SetInfo("label", double2, xgboost::kFloat32, 2);
EXPECT_EQ(info.labels.size(), 2);
EXPECT_EQ(info.labels_.size(), 2);
float float2[2] = {1.0f, 2.0f};
EXPECT_EQ(info.GetWeight(1), 1.0f)
@@ -23,26 +23,26 @@ TEST(MetaInfo, GetSet) {
EXPECT_EQ(info.GetWeight(1), 2.0f);
uint32_t uint32_t2[2] = {1U, 2U};
EXPECT_EQ(info.base_margin.size(), 0);
EXPECT_EQ(info.base_margin_.size(), 0);
info.SetInfo("base_margin", uint32_t2, xgboost::kUInt32, 2);
EXPECT_EQ(info.base_margin.size(), 2);
EXPECT_EQ(info.base_margin_.size(), 2);
uint64_t uint64_t2[2] = {1U, 2U};
EXPECT_EQ(info.group_ptr.size(), 0);
EXPECT_EQ(info.group_ptr_.size(), 0);
info.SetInfo("group", uint64_t2, xgboost::kUInt64, 2);
ASSERT_EQ(info.group_ptr.size(), 3);
EXPECT_EQ(info.group_ptr[2], 3);
ASSERT_EQ(info.group_ptr_.size(), 3);
EXPECT_EQ(info.group_ptr_[2], 3);
info.Clear();
ASSERT_EQ(info.group_ptr.size(), 0);
ASSERT_EQ(info.group_ptr_.size(), 0);
}
TEST(MetaInfo, SaveLoadBinary) {
xgboost::MetaInfo info;
double vals[2] = {1.0, 2.0};
info.SetInfo("label", vals, xgboost::kDouble, 2);
info.num_row = 2;
info.num_col = 1;
info.num_row_ = 2;
info.num_col_ = 1;
std::string tmp_file = TempFileName();
dmlc::Stream * fs = dmlc::Stream::Create(tmp_file.c_str(), "w");
@@ -55,9 +55,9 @@ TEST(MetaInfo, SaveLoadBinary) {
fs = dmlc::Stream::Create(tmp_file.c_str(), "r");
xgboost::MetaInfo inforead;
inforead.LoadBinary(fs);
EXPECT_EQ(inforead.labels, info.labels);
EXPECT_EQ(inforead.num_col, info.num_col);
EXPECT_EQ(inforead.num_row, info.num_row);
EXPECT_EQ(inforead.labels_, info.labels_);
EXPECT_EQ(inforead.num_col_, info.num_col_);
EXPECT_EQ(inforead.num_row_, info.num_row_);
std::remove(tmp_file.c_str());
}

View File

@@ -14,9 +14,9 @@ TEST(SimpleCSRSource, SaveLoadBinary) {
xgboost::DMatrix * dmat_read = xgboost::DMatrix::Load(tmp_binfile, true, false);
std::remove(tmp_binfile.c_str());
EXPECT_EQ(dmat->info().num_col, dmat_read->info().num_col);
EXPECT_EQ(dmat->info().num_row, dmat_read->info().num_row);
EXPECT_EQ(dmat->info().num_row, dmat_read->info().num_row);
EXPECT_EQ(dmat->Info().num_col_, dmat_read->Info().num_col_);
EXPECT_EQ(dmat->Info().num_row_, dmat_read->Info().num_row_);
EXPECT_EQ(dmat->Info().num_row_, dmat_read->Info().num_row_);
dmlc::DataIter<xgboost::RowBatch> * row_iter = dmat->RowIterator();
dmlc::DataIter<xgboost::RowBatch> * row_iter_read = dmat_read->RowIterator();

View File

@@ -10,10 +10,10 @@ TEST(SimpleDMatrix, MetaInfo) {
std::remove(tmp_file.c_str());
// Test the metadata that was parsed
EXPECT_EQ(dmat->info().num_row, 2);
EXPECT_EQ(dmat->info().num_col, 5);
EXPECT_EQ(dmat->info().num_nonzero, 6);
EXPECT_EQ(dmat->info().labels.size(), dmat->info().num_row);
EXPECT_EQ(dmat->Info().num_row_, 2);
EXPECT_EQ(dmat->Info().num_col_, 5);
EXPECT_EQ(dmat->Info().num_nonzero_, 6);
EXPECT_EQ(dmat->Info().labels_.size(), dmat->Info().num_row_);
}
TEST(SimpleDMatrix, RowAccess) {
@@ -26,7 +26,7 @@ TEST(SimpleDMatrix, RowAccess) {
long row_count = 0;
row_iter->BeforeFirst();
while (row_iter->Next()) row_count += row_iter->Value().size;
EXPECT_EQ(row_count, dmat->info().num_row);
EXPECT_EQ(row_count, dmat->Info().num_row_);
// Test the data read into the first row
row_iter->BeforeFirst();
row_iter->Next();
@@ -43,15 +43,15 @@ TEST(SimpleDMatrix, ColAccessWithoutBatches) {
std::remove(tmp_file.c_str());
// Unsorted column access
const std::vector<bool> enable(dmat->info().num_col, true);
const std::vector<bool> enable(dmat->Info().num_col_, true);
EXPECT_EQ(dmat->HaveColAccess(false), false);
dmat->InitColAccess(enable, 1, dmat->info().num_row, false);
dmat->InitColAccess(enable, 1, dmat->Info().num_row_, false);
dmat->InitColAccess(enable, 0, 0, false); // Calling it again should not change it
ASSERT_EQ(dmat->HaveColAccess(false), true);
// Sorted column access
EXPECT_EQ(dmat->HaveColAccess(true), false);
dmat->InitColAccess(enable, 1, dmat->info().num_row, true);
dmat->InitColAccess(enable, 1, dmat->Info().num_row_, true);
dmat->InitColAccess(enable, 0, 0, true); // Calling it again should not change it
ASSERT_EQ(dmat->HaveColAccess(true), true);
@@ -67,7 +67,7 @@ TEST(SimpleDMatrix, ColAccessWithoutBatches) {
col_iter->BeforeFirst();
while (col_iter->Next()) {
num_col_batch += 1;
EXPECT_EQ(col_iter->Value().size, dmat->info().num_col)
EXPECT_EQ(col_iter->Value().size, dmat->Info().num_col_)
<< "Expected batch size = number of cells as #batches is 1.";
for (int i = 0; i < static_cast<int>(col_iter->Value().size); ++i) {
EXPECT_EQ(col_iter->Value()[i].length, dmat->GetColSize(i))
@@ -94,7 +94,7 @@ TEST(SimpleDMatrix, ColAccessWithBatches) {
std::remove(tmp_file.c_str());
// Unsorted column access
const std::vector<bool> enable(dmat->info().num_col, true);
const std::vector<bool> enable(dmat->Info().num_col_, true);
EXPECT_EQ(dmat->HaveColAccess(false), false);
dmat->InitColAccess(enable, 1, 1, false);
dmat->InitColAccess(enable, 0, 0, false); // Calling it again should not change it
@@ -118,20 +118,20 @@ TEST(SimpleDMatrix, ColAccessWithBatches) {
col_iter->BeforeFirst();
while (col_iter->Next()) {
num_col_batch += 1;
EXPECT_EQ(col_iter->Value().size, dmat->info().num_col)
EXPECT_EQ(col_iter->Value().size, dmat->Info().num_col_)
<< "Expected batch size = num_cols as max_row_perbatch is 1.";
for (int i = 0; i < static_cast<int>(col_iter->Value().size); ++i) {
EXPECT_LE(col_iter->Value()[i].length, 1)
<< "Expected length of each colbatch <=1 as max_row_perbatch is 1.";
}
}
EXPECT_EQ(num_col_batch, dmat->info().num_row)
EXPECT_EQ(num_col_batch, dmat->Info().num_row_)
<< "Expected num batches = num_rows as max_row_perbatch is 1";
col_iter = nullptr;
// The iterator feats should ignore any numbers larger than the num_col
std::vector<xgboost::bst_uint> sub_feats = {
4, 3, static_cast<unsigned int>(dmat->info().num_col + 1)};
4, 3, static_cast<unsigned int>(dmat->Info().num_col_ + 1)};
dmlc::DataIter<xgboost::ColBatch> * sub_col_iter = dmat->ColIterator(sub_feats);
// Loop over the batches and assert the data is as expected
sub_col_iter->BeforeFirst();

View File

@@ -12,10 +12,10 @@ TEST(SparsePageDMatrix, MetaInfo) {
EXPECT_TRUE(FileExists(tmp_file + ".cache"));
// Test the metadata that was parsed
EXPECT_EQ(dmat->info().num_row, 2);
EXPECT_EQ(dmat->info().num_col, 5);
EXPECT_EQ(dmat->info().num_nonzero, 6);
EXPECT_EQ(dmat->info().labels.size(), dmat->info().num_row);
EXPECT_EQ(dmat->Info().num_row_, 2);
EXPECT_EQ(dmat->Info().num_col_, 5);
EXPECT_EQ(dmat->Info().num_nonzero_, 6);
EXPECT_EQ(dmat->Info().labels_.size(), dmat->Info().num_row_);
// Clean up of external memory files
std::remove((tmp_file + ".cache").c_str());
@@ -34,7 +34,7 @@ TEST(SparsePageDMatrix, RowAccess) {
long row_count = 0;
row_iter->BeforeFirst();
while (row_iter->Next()) row_count += row_iter->Value().size;
EXPECT_EQ(row_count, dmat->info().num_row);
EXPECT_EQ(row_count, dmat->Info().num_row_);
// Test the data read into the first row
row_iter->BeforeFirst();
row_iter->Next();
@@ -57,7 +57,7 @@ TEST(SparsePageDMatrix, ColAcess) {
EXPECT_FALSE(FileExists(tmp_file + ".cache.col.page"));
EXPECT_EQ(dmat->HaveColAccess(true), false);
const std::vector<bool> enable(dmat->info().num_col, true);
const std::vector<bool> enable(dmat->Info().num_col_, true);
dmat->InitColAccess(enable, 1, 1, true); // Max 1 row per patch
ASSERT_EQ(dmat->HaveColAccess(true), true);
EXPECT_TRUE(FileExists(tmp_file + ".cache.col.page"));
@@ -73,10 +73,10 @@ TEST(SparsePageDMatrix, ColAcess) {
col_iter->BeforeFirst();
while (col_iter->Next()) {
num_col_batch += 1;
EXPECT_EQ(col_iter->Value().size, dmat->info().num_col)
EXPECT_EQ(col_iter->Value().size, dmat->Info().num_col_)
<< "Expected batch size to be same as num_cols as max_row_perbatch is 1.";
}
EXPECT_EQ(num_col_batch, dmat->info().num_row)
EXPECT_EQ(num_col_batch, dmat->Info().num_row_)
<< "Expected num batches to be same as num_rows as max_row_perbatch is 1";
col_iter = nullptr;

View File

@@ -34,17 +34,17 @@ void CheckObjFunction(xgboost::ObjFunction * obj,
std::vector<xgboost::bst_float> out_grad,
std::vector<xgboost::bst_float> out_hess) {
xgboost::MetaInfo info;
info.num_row = labels.size();
info.labels = labels;
info.weights = weights;
info.num_row_ = labels.size();
info.labels_ = labels;
info.weights_ = weights;
xgboost::HostDeviceVector<xgboost::bst_float> in_preds(preds);
xgboost::HostDeviceVector<xgboost::bst_gpair> out_gpair;
xgboost::HostDeviceVector<xgboost::GradientPair> out_gpair;
obj->GetGradient(&in_preds, info, 1, &out_gpair);
std::vector<xgboost::bst_gpair>& gpair = out_gpair.data_h();
std::vector<xgboost::GradientPair>& gpair = out_gpair.HostVector();
ASSERT_EQ(gpair.size(), in_preds.size());
ASSERT_EQ(gpair.size(), in_preds.Size());
for (int i = 0; i < static_cast<int>(gpair.size()); ++i) {
EXPECT_NEAR(gpair[i].GetGrad(), out_grad[i], 0.01)
<< "Unexpected grad for pred=" << preds[i] << " label=" << labels[i]
@@ -60,9 +60,9 @@ xgboost::bst_float GetMetricEval(xgboost::Metric * metric,
std::vector<xgboost::bst_float> labels,
std::vector<xgboost::bst_float> weights) {
xgboost::MetaInfo info;
info.num_row = labels.size();
info.labels = labels;
info.weights = weights;
info.num_row_ = labels.size();
info.labels_ = labels;
info.weights_ = weights;
return metric->Eval(preds, info, false);
}

View File

@@ -8,15 +8,15 @@ typedef std::pair<std::string, std::string> arg;
TEST(Linear, shotgun) {
typedef std::pair<std::string, std::string> arg;
auto mat = CreateDMatrix(10, 10, 0);
std::vector<bool> enabled(mat->info().num_col, true);
std::vector<bool> enabled(mat->Info().num_col_, true);
mat->InitColAccess(enabled, 1.0f, 1 << 16, false);
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
xgboost::LinearUpdater::Create("shotgun"));
updater->Init({{"eta", "1."}});
std::vector<xgboost::bst_gpair> gpair(mat->info().num_row,
xgboost::bst_gpair(-5, 1.0));
std::vector<xgboost::GradientPair> gpair(mat->Info().num_row_,
xgboost::GradientPair(-5, 1.0));
xgboost::gbm::GBLinearModel model;
model.param.num_feature = mat->info().num_col;
model.param.num_feature = mat->Info().num_col_;
model.param.num_output_group = 1;
model.LazyInitModel();
updater->Update(&gpair, mat.get(), &model, gpair.size());
@@ -27,15 +27,15 @@ TEST(Linear, shotgun) {
TEST(Linear, coordinate) {
typedef std::pair<std::string, std::string> arg;
auto mat = CreateDMatrix(10, 10, 0);
std::vector<bool> enabled(mat->info().num_col, true);
std::vector<bool> enabled(mat->Info().num_col_, true);
mat->InitColAccess(enabled, 1.0f, 1 << 16, false);
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
xgboost::LinearUpdater::Create("coord_descent"));
updater->Init({});
std::vector<xgboost::bst_gpair> gpair(mat->info().num_row,
xgboost::bst_gpair(-5, 1.0));
std::vector<xgboost::GradientPair> gpair(mat->Info().num_row_,
xgboost::GradientPair(-5, 1.0));
xgboost::gbm::GBLinearModel model;
model.param.num_feature = mat->info().num_col;
model.param.num_feature = mat->Info().num_col_;
model.param.num_output_group = 1;
model.LazyInitModel();
updater->Update(&gpair, mat.get(), &model, gpair.size());

View File

@@ -49,8 +49,8 @@ TEST(Objective, LogisticRegressionBasic) {
xgboost::HostDeviceVector<xgboost::bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1};
std::vector<xgboost::bst_float> out_preds = {0.5f, 0.524f, 0.622f, 0.710f, 0.731f};
obj->PredTransform(&io_preds);
auto& preds = io_preds.data_h();
for (int i = 0; i < static_cast<int>(io_preds.size()); ++i) {
auto& preds = io_preds.HostVector();
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
}
}
@@ -98,8 +98,8 @@ TEST(Objective, PoissonRegressionBasic) {
xgboost::HostDeviceVector<xgboost::bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1};
std::vector<xgboost::bst_float> out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f};
obj->PredTransform(&io_preds);
auto& preds = io_preds.data_h();
for (int i = 0; i < static_cast<int>(io_preds.size()); ++i) {
auto& preds = io_preds.HostVector();
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
}
}
@@ -134,8 +134,8 @@ TEST(Objective, GammaRegressionBasic) {
xgboost::HostDeviceVector<xgboost::bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1};
std::vector<xgboost::bst_float> out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f};
obj->PredTransform(&io_preds);
auto& preds = io_preds.data_h();
for (int i = 0; i < static_cast<int>(io_preds.size()); ++i) {
auto& preds = io_preds.HostVector();
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
}
}
@@ -171,8 +171,8 @@ TEST(Objective, TweedieRegressionBasic) {
xgboost::HostDeviceVector<xgboost::bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1};
std::vector<xgboost::bst_float> out_preds = {1, 1.10f, 1.64f, 2.45f, 2.71f};
obj->PredTransform(&io_preds);
auto& preds = io_preds.data_h();
for (int i = 0; i < static_cast<int>(io_preds.size()); ++i) {
auto& preds = io_preds.HostVector();
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
}
}

View File

@@ -51,8 +51,8 @@ TEST(Objective, GPULogisticRegressionBasic) {
xgboost::HostDeviceVector<xgboost::bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1};
std::vector<xgboost::bst_float> out_preds = {0.5f, 0.524f, 0.622f, 0.710f, 0.731f};
obj->PredTransform(&io_preds);
auto& preds = io_preds.data_h();
for (int i = 0; i < static_cast<int>(io_preds.size()); ++i) {
auto& preds = io_preds.HostVector();
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
}
}

View File

@@ -11,8 +11,8 @@ TEST(cpu_predictor, Test) {
std::vector<std::unique_ptr<RegTree>> trees;
trees.push_back(std::unique_ptr<RegTree>(new RegTree));
trees.back()->InitModel();
(*trees.back())[0].set_leaf(1.5f);
(*trees.back()).stat(0).sum_hess = 1.0f;
(*trees.back())[0].SetLeaf(1.5f);
(*trees.back()).Stat(0).sum_hess = 1.0f;
gbm::GBTreeModel model(0.5);
model.CommitModel(std::move(trees), 0);
model.param.num_output_group = 1;
@@ -26,8 +26,8 @@ TEST(cpu_predictor, Test) {
// Test predict batch
HostDeviceVector<float> out_predictions;
cpu_predictor->PredictBatch(dmat.get(), &out_predictions, model, 0);
std::vector<float>& out_predictions_h = out_predictions.data_h();
for (int i = 0; i < out_predictions.size(); i++) {
std::vector<float>& out_predictions_h = out_predictions.HostVector();
for (int i = 0; i < out_predictions.Size(); i++) {
ASSERT_EQ(out_predictions_h[i], 1.5);
}

View File

@@ -21,8 +21,8 @@ TEST(gpu_predictor, Test) {
std::vector<std::unique_ptr<RegTree>> trees;
trees.push_back(std::unique_ptr<RegTree>(new RegTree()));
trees.back()->InitModel();
(*trees.back())[0].set_leaf(1.5f);
(*trees.back()).stat(0).sum_hess = 1.0f;
(*trees.back())[0].SetLeaf(1.5f);
(*trees.back()).Stat(0).sum_hess = 1.0f;
gbm::GBTreeModel model(0.5);
model.CommitModel(std::move(trees), 0);
model.param.num_output_group = 1;
@@ -37,10 +37,10 @@ TEST(gpu_predictor, Test) {
HostDeviceVector<float> cpu_out_predictions;
gpu_predictor->PredictBatch(dmat.get(), &gpu_out_predictions, model, 0);
cpu_predictor->PredictBatch(dmat.get(), &cpu_out_predictions, model, 0);
std::vector<float>& gpu_out_predictions_h = gpu_out_predictions.data_h();
std::vector<float>& cpu_out_predictions_h = cpu_out_predictions.data_h();
std::vector<float>& gpu_out_predictions_h = gpu_out_predictions.HostVector();
std::vector<float>& cpu_out_predictions_h = cpu_out_predictions.HostVector();
float abs_tolerance = 0.001;
for (int i = 0; i < gpu_out_predictions.size(); i++) {
for (int i = 0; i < gpu_out_predictions.Size(); i++) {
ASSERT_LT(std::abs(gpu_out_predictions_h[i] - cpu_out_predictions_h[i]),
abs_tolerance);
}

View File

@@ -29,7 +29,7 @@ TEST(gpu_hist_experimental, TestSparseShard) {
ASSERT_LT(shard.row_stride, columns);
auto host_gidx_buffer = shard.gidx_buffer.as_vector();
auto host_gidx_buffer = shard.gidx_buffer.AsVector();
common::CompressedIterator<uint32_t> gidx(host_gidx_buffer.data(),
hmat.row_ptr.back() + 1);
@@ -64,7 +64,7 @@ TEST(gpu_hist_experimental, TestDenseShard) {
ASSERT_EQ(shard.row_stride, columns);
auto host_gidx_buffer = shard.gidx_buffer.as_vector();
auto host_gidx_buffer = shard.gidx_buffer.AsVector();
common::CompressedIterator<uint32_t> gidx(host_gidx_buffer.data(),
hmat.row_ptr.back() + 1);

View File

@@ -89,8 +89,8 @@ TEST(Param, SplitEntry) {
xgboost::tree::SplitEntry se3;
se3.Update(2, 101, 0, false);
xgboost::tree::SplitEntry::Reduce(se2, se3);
EXPECT_EQ(se2.split_index(), 101);
EXPECT_FALSE(se2.default_left());
EXPECT_EQ(se2.SplitIndex(), 101);
EXPECT_FALSE(se2.DefaultLeft());
EXPECT_TRUE(se1.NeedReplace(3, 1));
}