Add travis sanitizers tests. (#3557)
* Add travis sanitizers tests. * Add gcc-7 in Travis. * Add SANITIZER_PATH for CMake. * Enable sanitizer tests in Travis. * Fix memory leaks in tests. * Fix all memory leaks reported by Address Sanitizer. * tests/cpp/helpers.h/CreateDMatrix now returns raw pointer.
This commit is contained in:
parent
983cb0b374
commit
cf2d86a4f6
@ -28,6 +28,8 @@ env:
|
|||||||
- TASK=cpp_test
|
- TASK=cpp_test
|
||||||
# distributed test
|
# distributed test
|
||||||
- TASK=distributed_test
|
- TASK=distributed_test
|
||||||
|
# address sanitizer test
|
||||||
|
- TASK=sanitizer_test
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
exclude:
|
exclude:
|
||||||
@ -43,6 +45,8 @@ matrix:
|
|||||||
env: TASK=cpp_test
|
env: TASK=cpp_test
|
||||||
- os: osx
|
- os: osx
|
||||||
env: TASK=distributed_test
|
env: TASK=distributed_test
|
||||||
|
- os: osx
|
||||||
|
env: TASK=sanitizer_test
|
||||||
|
|
||||||
# dependent apt packages
|
# dependent apt packages
|
||||||
addons:
|
addons:
|
||||||
@ -62,6 +66,8 @@ addons:
|
|||||||
- graphviz
|
- graphviz
|
||||||
- gcc-4.8
|
- gcc-4.8
|
||||||
- g++-4.8
|
- g++-4.8
|
||||||
|
- gcc-7
|
||||||
|
- g++-7
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- source dmlc-core/scripts/travis/travis_setup_env.sh
|
- source dmlc-core/scripts/travis/travis_setup_env.sh
|
||||||
|
|||||||
@ -14,9 +14,10 @@ option(USE_NCCL "Build using NCCL for multi-GPU. Also requires USE_CUDA")
|
|||||||
option(JVM_BINDINGS "Build JVM bindings" OFF)
|
option(JVM_BINDINGS "Build JVM bindings" OFF)
|
||||||
option(GOOGLE_TEST "Build google tests" OFF)
|
option(GOOGLE_TEST "Build google tests" OFF)
|
||||||
option(R_LIB "Build shared library for R package" OFF)
|
option(R_LIB "Build shared library for R package" OFF)
|
||||||
option(USE_SANITIZER "Use santizer flags" OFF)
|
|
||||||
set(GPU_COMPUTE_VER "" CACHE STRING
|
set(GPU_COMPUTE_VER "" CACHE STRING
|
||||||
"Space separated list of compute versions to be built against, e.g. '35 61'")
|
"Space separated list of compute versions to be built against, e.g. '35 61'")
|
||||||
|
option(USE_SANITIZER "Use santizer flags" OFF)
|
||||||
|
option(SANITIZER_PATH "Path to sanitizes.")
|
||||||
set(ENABLED_SANITIZERS "address" "leak" CACHE STRING
|
set(ENABLED_SANITIZERS "address" "leak" CACHE STRING
|
||||||
"Semicolon separated list of sanitizer names. E.g 'address;leak'. Supported sanitizers are
|
"Semicolon separated list of sanitizer names. E.g 'address;leak'. Supported sanitizers are
|
||||||
address, leak and thread.")
|
address, leak and thread.")
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
set(ASan_LIB_NAME ASan)
|
set(ASan_LIB_NAME ASan)
|
||||||
|
|
||||||
find_library(ASan_LIBRARY
|
find_library(ASan_LIBRARY
|
||||||
NAMES libasan.so libasan.so.4
|
NAMES libasan.so libasan.so.4 libasan.so.3 libasan.so.2 libasan.so.1 libasan.so.0
|
||||||
PATHS /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib)
|
PATHS ${SANITIZER_PATH} /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ${CMAKE_PREFIX_PATH}/lib)
|
||||||
|
|
||||||
include(FindPackageHandleStandardArgs)
|
include(FindPackageHandleStandardArgs)
|
||||||
find_package_handle_standard_args(ASan DEFAULT_MSG
|
find_package_handle_standard_args(ASan DEFAULT_MSG
|
||||||
|
|||||||
@ -2,7 +2,7 @@ set(LSan_LIB_NAME lsan)
|
|||||||
|
|
||||||
find_library(LSan_LIBRARY
|
find_library(LSan_LIBRARY
|
||||||
NAMES liblsan.so liblsan.so.0 liblsan.so.0.0.0
|
NAMES liblsan.so liblsan.so.0 liblsan.so.0.0.0
|
||||||
PATHS /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib)
|
PATHS ${SANITIZER_PATH} /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ${CMAKE_PREFIX_PATH}/lib)
|
||||||
|
|
||||||
include(FindPackageHandleStandardArgs)
|
include(FindPackageHandleStandardArgs)
|
||||||
find_package_handle_standard_args(LSan DEFAULT_MSG
|
find_package_handle_standard_args(LSan DEFAULT_MSG
|
||||||
|
|||||||
@ -2,7 +2,7 @@ set(TSan_LIB_NAME tsan)
|
|||||||
|
|
||||||
find_library(TSan_LIBRARY
|
find_library(TSan_LIBRARY
|
||||||
NAMES libtsan.so libtsan.so.0 libtsan.so.0.0.0
|
NAMES libtsan.so libtsan.so.0 libtsan.so.0.0.0
|
||||||
PATHS /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib)
|
PATHS ${SANITIZER_PATH} /usr/lib64 /usr/lib /usr/local/lib64 /usr/local/lib ${CMAKE_PREFIX_PATH}/lib)
|
||||||
|
|
||||||
include(FindPackageHandleStandardArgs)
|
include(FindPackageHandleStandardArgs)
|
||||||
find_package_handle_standard_args(TSan DEFAULT_MSG
|
find_package_handle_standard_args(TSan DEFAULT_MSG
|
||||||
|
|||||||
@ -149,6 +149,14 @@ sanitizer is not compatible with the other two sanitizers.
|
|||||||
|
|
||||||
cmake -DUSE_SANITIZER=ON -DENABLED_SANITIZERS="address;leak" /path/to/xgboost
|
cmake -DUSE_SANITIZER=ON -DENABLED_SANITIZERS="address;leak" /path/to/xgboost
|
||||||
|
|
||||||
|
By default, CMake will search regular system paths for sanitizers, you can also
|
||||||
|
supply a specified SANITIZER_PATH.
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
cmake -DUSE_SANITIZER=ON -DENABLED_SANITIZERS="address;leak" \
|
||||||
|
-DSANITIZER_PATH=/path/to/sanitizers /path/to/xgboost
|
||||||
|
|
||||||
How to use sanitizers with CUDA support
|
How to use sanitizers with CUDA support
|
||||||
=======================================
|
=======================================
|
||||||
Runing XGBoost on CUDA with address sanitizer (asan) will raise memory error.
|
Runing XGBoost on CUDA with address sanitizer (asan) will raise memory error.
|
||||||
|
|||||||
@ -13,14 +13,14 @@ TEST(c_api, XGDMatrixCreateFromMatDT) {
|
|||||||
DMatrixHandle handle;
|
DMatrixHandle handle;
|
||||||
XGDMatrixCreateFromDT(data.data(), types.data(), 3, 2, &handle,
|
XGDMatrixCreateFromDT(data.data(), types.data(), 3, 2, &handle,
|
||||||
0);
|
0);
|
||||||
std::shared_ptr<xgboost::DMatrix> dmat =
|
std::shared_ptr<xgboost::DMatrix> *dmat =
|
||||||
*static_cast<std::shared_ptr<xgboost::DMatrix> *>(handle);
|
static_cast<std::shared_ptr<xgboost::DMatrix> *>(handle);
|
||||||
xgboost::MetaInfo &info = dmat->Info();
|
xgboost::MetaInfo &info = (*dmat)->Info();
|
||||||
ASSERT_EQ(info.num_col_, 2);
|
ASSERT_EQ(info.num_col_, 2);
|
||||||
ASSERT_EQ(info.num_row_, 3);
|
ASSERT_EQ(info.num_row_, 3);
|
||||||
ASSERT_EQ(info.num_nonzero_, 6);
|
ASSERT_EQ(info.num_nonzero_, 6);
|
||||||
|
|
||||||
auto iter = dmat->RowIterator();
|
auto iter = (*dmat)->RowIterator();
|
||||||
iter->BeforeFirst();
|
iter->BeforeFirst();
|
||||||
while (iter->Next()) {
|
while (iter->Next()) {
|
||||||
auto batch = iter->Value();
|
auto batch = iter->Value();
|
||||||
@ -29,6 +29,8 @@ TEST(c_api, XGDMatrixCreateFromMatDT) {
|
|||||||
ASSERT_EQ(batch[2][0].fvalue, 3.0f);
|
ASSERT_EQ(batch[2][0].fvalue, 3.0f);
|
||||||
ASSERT_EQ(batch[2][1].fvalue, 0.0f);
|
ASSERT_EQ(batch[2][1].fvalue, 0.0f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(c_api, XGDMatrixCreateFromMat_omp) {
|
TEST(c_api, XGDMatrixCreateFromMat_omp) {
|
||||||
@ -46,14 +48,14 @@ TEST(c_api, XGDMatrixCreateFromMat_omp) {
|
|||||||
std::numeric_limits<float>::quiet_NaN(), &handle,
|
std::numeric_limits<float>::quiet_NaN(), &handle,
|
||||||
0);
|
0);
|
||||||
|
|
||||||
std::shared_ptr<xgboost::DMatrix> dmat =
|
std::shared_ptr<xgboost::DMatrix> *dmat =
|
||||||
*static_cast<std::shared_ptr<xgboost::DMatrix> *>(handle);
|
static_cast<std::shared_ptr<xgboost::DMatrix> *>(handle);
|
||||||
xgboost::MetaInfo &info = dmat->Info();
|
xgboost::MetaInfo &info = (*dmat)->Info();
|
||||||
ASSERT_EQ(info.num_col_, num_cols);
|
ASSERT_EQ(info.num_col_, num_cols);
|
||||||
ASSERT_EQ(info.num_row_, row);
|
ASSERT_EQ(info.num_row_, row);
|
||||||
ASSERT_EQ(info.num_nonzero_, num_cols * row - num_missing);
|
ASSERT_EQ(info.num_nonzero_, num_cols * row - num_missing);
|
||||||
|
|
||||||
auto iter = dmat->RowIterator();
|
auto iter = (*dmat)->RowIterator();
|
||||||
iter->BeforeFirst();
|
iter->BeforeFirst();
|
||||||
while (iter->Next()) {
|
while (iter->Next()) {
|
||||||
auto batch = iter->Value();
|
auto batch = iter->Value();
|
||||||
@ -64,5 +66,6 @@ TEST(c_api, XGDMatrixCreateFromMat_omp) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,23 +7,24 @@ namespace common {
|
|||||||
TEST(DenseColumn, Test) {
|
TEST(DenseColumn, Test) {
|
||||||
auto dmat = CreateDMatrix(100, 10, 0.0);
|
auto dmat = CreateDMatrix(100, 10, 0.0);
|
||||||
GHistIndexMatrix gmat;
|
GHistIndexMatrix gmat;
|
||||||
gmat.Init(dmat.get(), 256);
|
gmat.Init((*dmat).get(), 256);
|
||||||
ColumnMatrix column_matrix;
|
ColumnMatrix column_matrix;
|
||||||
column_matrix.Init(gmat, 0.2);
|
column_matrix.Init(gmat, 0.2);
|
||||||
|
|
||||||
for (auto i = 0ull; i < dmat->Info().num_row_; i++) {
|
for (auto i = 0ull; i < (*dmat)->Info().num_row_; i++) {
|
||||||
for (auto j = 0ull; j < dmat->Info().num_col_; j++) {
|
for (auto j = 0ull; j < (*dmat)->Info().num_col_; j++) {
|
||||||
auto col = column_matrix.GetColumn(j);
|
auto col = column_matrix.GetColumn(j);
|
||||||
EXPECT_EQ(gmat.index[i * dmat->Info().num_col_ + j],
|
EXPECT_EQ(gmat.index[i * (*dmat)->Info().num_col_ + j],
|
||||||
col.GetGlobalBinIdx(i));
|
col.GetGlobalBinIdx(i));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(SparseColumn, Test) {
|
TEST(SparseColumn, Test) {
|
||||||
auto dmat = CreateDMatrix(100, 1, 0.85);
|
auto dmat = CreateDMatrix(100, 1, 0.85);
|
||||||
GHistIndexMatrix gmat;
|
GHistIndexMatrix gmat;
|
||||||
gmat.Init(dmat.get(), 256);
|
gmat.Init((*dmat).get(), 256);
|
||||||
ColumnMatrix column_matrix;
|
ColumnMatrix column_matrix;
|
||||||
column_matrix.Init(gmat, 0.5);
|
column_matrix.Init(gmat, 0.5);
|
||||||
auto col = column_matrix.GetColumn(0);
|
auto col = column_matrix.GetColumn(0);
|
||||||
@ -32,12 +33,13 @@ TEST(SparseColumn, Test) {
|
|||||||
EXPECT_EQ(gmat.index[gmat.row_ptr[col.GetRowIdx(i)]],
|
EXPECT_EQ(gmat.index[gmat.row_ptr[col.GetRowIdx(i)]],
|
||||||
col.GetGlobalBinIdx(i));
|
col.GetGlobalBinIdx(i));
|
||||||
}
|
}
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(DenseColumnWithMissing, Test) {
|
TEST(DenseColumnWithMissing, Test) {
|
||||||
auto dmat = CreateDMatrix(100, 1, 0.5);
|
auto dmat = CreateDMatrix(100, 1, 0.5);
|
||||||
GHistIndexMatrix gmat;
|
GHistIndexMatrix gmat;
|
||||||
gmat.Init(dmat.get(), 256);
|
gmat.Init((*dmat).get(), 256);
|
||||||
ColumnMatrix column_matrix;
|
ColumnMatrix column_matrix;
|
||||||
column_matrix.Init(gmat, 0.2);
|
column_matrix.Init(gmat, 0.2);
|
||||||
auto col = column_matrix.GetColumn(0);
|
auto col = column_matrix.GetColumn(0);
|
||||||
@ -46,6 +48,7 @@ TEST(DenseColumnWithMissing, Test) {
|
|||||||
EXPECT_EQ(gmat.index[gmat.row_ptr[col.GetRowIdx(i)]],
|
EXPECT_EQ(gmat.index[gmat.row_ptr[col.GetRowIdx(i)]],
|
||||||
col.GetGlobalBinIdx(i));
|
col.GetGlobalBinIdx(i));
|
||||||
}
|
}
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
} // namespace common
|
} // namespace common
|
||||||
} // namespace xgboost
|
} // namespace xgboost
|
||||||
|
|||||||
@ -22,7 +22,7 @@ TEST(gpu_hist_util, TestDeviceSketch) {
|
|||||||
DMatrixHandle dmat_handle;
|
DMatrixHandle dmat_handle;
|
||||||
XGDMatrixCreateFromMat(test_data.data(), nrows, 1, -1,
|
XGDMatrixCreateFromMat(test_data.data(), nrows, 1, -1,
|
||||||
&dmat_handle);
|
&dmat_handle);
|
||||||
auto dmat = *static_cast<std::shared_ptr<xgboost::DMatrix> *>(dmat_handle);
|
auto dmat = static_cast<std::shared_ptr<xgboost::DMatrix> *>(dmat_handle);
|
||||||
|
|
||||||
// parameters for finding quantiles
|
// parameters for finding quantiles
|
||||||
tree::TrainParam p;
|
tree::TrainParam p;
|
||||||
@ -34,15 +34,15 @@ TEST(gpu_hist_util, TestDeviceSketch) {
|
|||||||
|
|
||||||
// find quantiles on the CPU
|
// find quantiles on the CPU
|
||||||
HistCutMatrix hmat_cpu;
|
HistCutMatrix hmat_cpu;
|
||||||
hmat_cpu.Init(dmat.get(), p.max_bin);
|
hmat_cpu.Init((*dmat).get(), p.max_bin);
|
||||||
|
|
||||||
// find the cuts on the GPU
|
// find the cuts on the GPU
|
||||||
dmlc::DataIter<SparsePage>* iter = dmat->RowIterator();
|
dmlc::DataIter<SparsePage>* iter = (*dmat)->RowIterator();
|
||||||
iter->BeforeFirst();
|
iter->BeforeFirst();
|
||||||
CHECK(iter->Next());
|
CHECK(iter->Next());
|
||||||
const SparsePage& batch = iter->Value();
|
const SparsePage& batch = iter->Value();
|
||||||
HistCutMatrix hmat_gpu;
|
HistCutMatrix hmat_gpu;
|
||||||
DeviceSketch(batch, dmat->Info(), p, &hmat_gpu);
|
DeviceSketch(batch, (*dmat)->Info(), p, &hmat_gpu);
|
||||||
CHECK(!iter->Next());
|
CHECK(!iter->Next());
|
||||||
|
|
||||||
// compare the cuts
|
// compare the cuts
|
||||||
@ -54,6 +54,8 @@ TEST(gpu_hist_util, TestDeviceSketch) {
|
|||||||
for (int i = 0; i < hmat_gpu.cut.size(); ++i) {
|
for (int i = 0; i < hmat_gpu.cut.size(); ++i) {
|
||||||
ASSERT_LT(fabs(hmat_cpu.cut[i] - hmat_gpu.cut[i]), eps * nrows);
|
ASSERT_LT(fabs(hmat_cpu.cut[i] - hmat_gpu.cut[i]), eps * nrows);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace common
|
} // namespace common
|
||||||
|
|||||||
@ -64,6 +64,8 @@ TEST(MetaInfo, SaveLoadBinary) {
|
|||||||
EXPECT_EQ(inforead.num_row_, info.num_row_);
|
EXPECT_EQ(inforead.num_row_, info.num_row_);
|
||||||
|
|
||||||
std::remove(tmp_file.c_str());
|
std::remove(tmp_file.c_str());
|
||||||
|
|
||||||
|
delete fs;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(MetaInfo, LoadQid) {
|
TEST(MetaInfo, LoadQid) {
|
||||||
|
|||||||
@ -29,4 +29,7 @@ TEST(SimpleCSRSource, SaveLoadBinary) {
|
|||||||
EXPECT_EQ(first_row[2].index, first_row_read[2].index);
|
EXPECT_EQ(first_row[2].index, first_row_read[2].index);
|
||||||
EXPECT_EQ(first_row[2].fvalue, first_row_read[2].fvalue);
|
EXPECT_EQ(first_row[2].fvalue, first_row_read[2].fvalue);
|
||||||
row_iter = nullptr; row_iter_read = nullptr;
|
row_iter = nullptr; row_iter_read = nullptr;
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
|
delete dmat_read;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,6 +14,8 @@ TEST(SimpleDMatrix, MetaInfo) {
|
|||||||
EXPECT_EQ(dmat->Info().num_col_, 5);
|
EXPECT_EQ(dmat->Info().num_col_, 5);
|
||||||
EXPECT_EQ(dmat->Info().num_nonzero_, 6);
|
EXPECT_EQ(dmat->Info().num_nonzero_, 6);
|
||||||
EXPECT_EQ(dmat->Info().labels_.size(), dmat->Info().num_row_);
|
EXPECT_EQ(dmat->Info().labels_.size(), dmat->Info().num_row_);
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(SimpleDMatrix, RowAccess) {
|
TEST(SimpleDMatrix, RowAccess) {
|
||||||
@ -35,6 +37,8 @@ TEST(SimpleDMatrix, RowAccess) {
|
|||||||
EXPECT_EQ(first_row[2].index, 2);
|
EXPECT_EQ(first_row[2].index, 2);
|
||||||
EXPECT_EQ(first_row[2].fvalue, 20);
|
EXPECT_EQ(first_row[2].fvalue, 20);
|
||||||
row_iter = nullptr;
|
row_iter = nullptr;
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(SimpleDMatrix, ColAccessWithoutBatches) {
|
TEST(SimpleDMatrix, ColAccessWithoutBatches) {
|
||||||
@ -76,4 +80,6 @@ TEST(SimpleDMatrix, ColAccessWithoutBatches) {
|
|||||||
}
|
}
|
||||||
EXPECT_EQ(num_col_batch, 1) << "Expected number of batches to be 1";
|
EXPECT_EQ(num_col_batch, 1) << "Expected number of batches to be 1";
|
||||||
col_iter = nullptr;
|
col_iter = nullptr;
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -21,6 +21,8 @@ TEST(SparsePageDMatrix, MetaInfo) {
|
|||||||
// Clean up of external memory files
|
// Clean up of external memory files
|
||||||
std::remove((tmp_file + ".cache").c_str());
|
std::remove((tmp_file + ".cache").c_str());
|
||||||
std::remove((tmp_file + ".cache.row.page").c_str());
|
std::remove((tmp_file + ".cache.row.page").c_str());
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(SparsePageDMatrix, RowAccess) {
|
TEST(SparsePageDMatrix, RowAccess) {
|
||||||
@ -48,6 +50,8 @@ TEST(SparsePageDMatrix, RowAccess) {
|
|||||||
// Clean up of external memory files
|
// Clean up of external memory files
|
||||||
std::remove((tmp_file + ".cache").c_str());
|
std::remove((tmp_file + ".cache").c_str());
|
||||||
std::remove((tmp_file + ".cache.row.page").c_str());
|
std::remove((tmp_file + ".cache.row.page").c_str());
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(SparsePageDMatrix, ColAcess) {
|
TEST(SparsePageDMatrix, ColAcess) {
|
||||||
@ -84,4 +88,6 @@ TEST(SparsePageDMatrix, ColAcess) {
|
|||||||
std::remove((tmp_file + ".cache").c_str());
|
std::remove((tmp_file + ".cache").c_str());
|
||||||
std::remove((tmp_file + ".cache.col.page").c_str());
|
std::remove((tmp_file + ".cache.col.page").c_str());
|
||||||
std::remove((tmp_file + ".cache.row.page").c_str());
|
std::remove((tmp_file + ".cache.row.page").c_str());
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -107,7 +107,7 @@ xgboost::bst_float GetMetricEval(xgboost::Metric * metric,
|
|||||||
return metric->Eval(preds, info, false);
|
return metric->Eval(preds, info, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<xgboost::DMatrix> CreateDMatrix(int rows, int columns,
|
std::shared_ptr<xgboost::DMatrix>* CreateDMatrix(int rows, int columns,
|
||||||
float sparsity, int seed) {
|
float sparsity, int seed) {
|
||||||
const float missing_value = -1;
|
const float missing_value = -1;
|
||||||
std::vector<float> test_data(rows * columns);
|
std::vector<float> test_data(rows * columns);
|
||||||
@ -124,5 +124,5 @@ std::shared_ptr<xgboost::DMatrix> CreateDMatrix(int rows, int columns,
|
|||||||
DMatrixHandle handle;
|
DMatrixHandle handle;
|
||||||
XGDMatrixCreateFromMat(test_data.data(), rows, columns, missing_value,
|
XGDMatrixCreateFromMat(test_data.data(), rows, columns, missing_value,
|
||||||
&handle);
|
&handle);
|
||||||
return *static_cast<std::shared_ptr<xgboost::DMatrix> *>(handle);
|
return static_cast<std::shared_ptr<xgboost::DMatrix> *>(handle);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -59,6 +59,6 @@ xgboost::bst_float GetMetricEval(
|
|||||||
* \return The new d matrix.
|
* \return The new d matrix.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
std::shared_ptr<xgboost::DMatrix> CreateDMatrix(int rows, int columns,
|
std::shared_ptr<xgboost::DMatrix> *CreateDMatrix(int rows, int columns,
|
||||||
float sparsity, int seed = 0);
|
float sparsity, int seed = 0);
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@ -8,37 +8,41 @@ typedef std::pair<std::string, std::string> arg;
|
|||||||
TEST(Linear, shotgun) {
|
TEST(Linear, shotgun) {
|
||||||
typedef std::pair<std::string, std::string> arg;
|
typedef std::pair<std::string, std::string> arg;
|
||||||
auto mat = CreateDMatrix(10, 10, 0);
|
auto mat = CreateDMatrix(10, 10, 0);
|
||||||
std::vector<bool> enabled(mat->Info().num_col_, true);
|
std::vector<bool> enabled((*mat)->Info().num_col_, true);
|
||||||
mat->InitColAccess(1 << 16, false);
|
(*mat)->InitColAccess(1 << 16, false);
|
||||||
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
||||||
xgboost::LinearUpdater::Create("shotgun"));
|
xgboost::LinearUpdater::Create("shotgun"));
|
||||||
updater->Init({{"eta", "1."}});
|
updater->Init({{"eta", "1."}});
|
||||||
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
||||||
mat->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
(*mat)->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
||||||
xgboost::gbm::GBLinearModel model;
|
xgboost::gbm::GBLinearModel model;
|
||||||
model.param.num_feature = mat->Info().num_col_;
|
model.param.num_feature = (*mat)->Info().num_col_;
|
||||||
model.param.num_output_group = 1;
|
model.param.num_output_group = 1;
|
||||||
model.LazyInitModel();
|
model.LazyInitModel();
|
||||||
updater->Update(&gpair, mat.get(), &model, gpair.Size());
|
updater->Update(&gpair, (*mat).get(), &model, gpair.Size());
|
||||||
|
|
||||||
ASSERT_EQ(model.bias()[0], 5.0f);
|
ASSERT_EQ(model.bias()[0], 5.0f);
|
||||||
|
|
||||||
|
delete mat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Linear, coordinate) {
|
TEST(Linear, coordinate) {
|
||||||
typedef std::pair<std::string, std::string> arg;
|
typedef std::pair<std::string, std::string> arg;
|
||||||
auto mat = CreateDMatrix(10, 10, 0);
|
auto mat = CreateDMatrix(10, 10, 0);
|
||||||
std::vector<bool> enabled(mat->Info().num_col_, true);
|
std::vector<bool> enabled((*mat)->Info().num_col_, true);
|
||||||
mat->InitColAccess(1 << 16, false);
|
(*mat)->InitColAccess(1 << 16, false);
|
||||||
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
|
||||||
xgboost::LinearUpdater::Create("coord_descent"));
|
xgboost::LinearUpdater::Create("coord_descent"));
|
||||||
updater->Init({{"eta", "1."}});
|
updater->Init({{"eta", "1."}});
|
||||||
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
xgboost::HostDeviceVector<xgboost::GradientPair> gpair(
|
||||||
mat->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
(*mat)->Info().num_row_, xgboost::GradientPair(-5, 1.0));
|
||||||
xgboost::gbm::GBLinearModel model;
|
xgboost::gbm::GBLinearModel model;
|
||||||
model.param.num_feature = mat->Info().num_col_;
|
model.param.num_feature = (*mat)->Info().num_col_;
|
||||||
model.param.num_output_group = 1;
|
model.param.num_output_group = 1;
|
||||||
model.LazyInitModel();
|
model.LazyInitModel();
|
||||||
updater->Update(&gpair, mat.get(), &model, gpair.Size());
|
updater->Update(&gpair, (*mat).get(), &model, gpair.Size());
|
||||||
|
|
||||||
ASSERT_EQ(model.bias()[0], 5.0f);
|
ASSERT_EQ(model.bias()[0], 5.0f);
|
||||||
|
|
||||||
|
delete mat;
|
||||||
}
|
}
|
||||||
@ -11,6 +11,7 @@ TEST(Metric, RMSE) {
|
|||||||
{0.1f, 0.9f, 0.1f, 0.9f},
|
{0.1f, 0.9f, 0.1f, 0.9f},
|
||||||
{ 0, 0, 1, 1}),
|
{ 0, 0, 1, 1}),
|
||||||
0.6403f, 0.001f);
|
0.6403f, 0.001f);
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, MAE) {
|
TEST(Metric, MAE) {
|
||||||
@ -21,6 +22,7 @@ TEST(Metric, MAE) {
|
|||||||
{0.1f, 0.9f, 0.1f, 0.9f},
|
{0.1f, 0.9f, 0.1f, 0.9f},
|
||||||
{ 0, 0, 1, 1}),
|
{ 0, 0, 1, 1}),
|
||||||
0.5f, 0.001f);
|
0.5f, 0.001f);
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, LogLoss) {
|
TEST(Metric, LogLoss) {
|
||||||
@ -31,6 +33,7 @@ TEST(Metric, LogLoss) {
|
|||||||
{0.1f, 0.9f, 0.1f, 0.9f},
|
{0.1f, 0.9f, 0.1f, 0.9f},
|
||||||
{ 0, 0, 1, 1}),
|
{ 0, 0, 1, 1}),
|
||||||
1.2039f, 0.001f);
|
1.2039f, 0.001f);
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, Error) {
|
TEST(Metric, Error) {
|
||||||
@ -56,6 +59,7 @@ TEST(Metric, Error) {
|
|||||||
{0.1f, 0.2f, 0.1f, 0.2f},
|
{0.1f, 0.2f, 0.1f, 0.2f},
|
||||||
{ 0, 0, 1, 1}),
|
{ 0, 0, 1, 1}),
|
||||||
0.5f, 0.001f);
|
0.5f, 0.001f);
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, PoissionNegLogLik) {
|
TEST(Metric, PoissionNegLogLik) {
|
||||||
@ -66,4 +70,5 @@ TEST(Metric, PoissionNegLogLik) {
|
|||||||
{0.1f, 0.2f, 0.1f, 0.2f},
|
{0.1f, 0.2f, 0.1f, 0.2f},
|
||||||
{ 0, 0, 1, 1}),
|
{ 0, 0, 1, 1}),
|
||||||
1.1280f, 0.001f);
|
1.1280f, 0.001f);
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,8 +4,11 @@
|
|||||||
#include "../helpers.h"
|
#include "../helpers.h"
|
||||||
|
|
||||||
TEST(Metric, UnknownMetric) {
|
TEST(Metric, UnknownMetric) {
|
||||||
EXPECT_ANY_THROW(xgboost::Metric::Create("unknown_name"));
|
xgboost::Metric * metric;
|
||||||
EXPECT_NO_THROW(xgboost::Metric::Create("rmse"));
|
EXPECT_ANY_THROW(metric = xgboost::Metric::Create("unknown_name"));
|
||||||
EXPECT_ANY_THROW(xgboost::Metric::Create("unknown_name@1"));
|
EXPECT_NO_THROW(metric = xgboost::Metric::Create("rmse"));
|
||||||
EXPECT_NO_THROW(xgboost::Metric::Create("error@0.5f"));
|
delete metric;
|
||||||
|
EXPECT_ANY_THROW(metric = xgboost::Metric::Create("unknown_name@1"));
|
||||||
|
EXPECT_NO_THROW(metric = xgboost::Metric::Create("error@0.5f"));
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,6 +13,8 @@ TEST(Metric, MultiClassError) {
|
|||||||
{0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f},
|
{0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f},
|
||||||
{0, 1, 2}),
|
{0, 1, 2}),
|
||||||
0.666f, 0.001f);
|
0.666f, 0.001f);
|
||||||
|
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, MultiClassLogLoss) {
|
TEST(Metric, MultiClassLogLoss) {
|
||||||
@ -25,4 +27,6 @@ TEST(Metric, MultiClassLogLoss) {
|
|||||||
{0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f},
|
{0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f, 0.1f},
|
||||||
{0, 1, 2}),
|
{0, 1, 2}),
|
||||||
2.302f, 0.001f);
|
2.302f, 0.001f);
|
||||||
|
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,6 +17,8 @@ TEST(Metric, AMS) {
|
|||||||
metric = xgboost::Metric::Create("ams@0");
|
metric = xgboost::Metric::Create("ams@0");
|
||||||
ASSERT_STREQ(metric->Name(), "ams@0");
|
ASSERT_STREQ(metric->Name(), "ams@0");
|
||||||
EXPECT_NEAR(GetMetricEval(metric, {0, 1}, {0, 1}), 0.311f, 0.001f);
|
EXPECT_NEAR(GetMetricEval(metric, {0, 1}, {0, 1}), 0.311f, 0.001f);
|
||||||
|
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, AUC) {
|
TEST(Metric, AUC) {
|
||||||
@ -29,6 +31,8 @@ TEST(Metric, AUC) {
|
|||||||
0.5f, 0.001f);
|
0.5f, 0.001f);
|
||||||
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 1}, {}));
|
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 1}, {}));
|
||||||
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 0}, {0, 0}));
|
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 0}, {0, 0}));
|
||||||
|
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, AUCPR) {
|
TEST(Metric, AUCPR) {
|
||||||
@ -50,6 +54,8 @@ TEST(Metric, AUCPR) {
|
|||||||
0.2769199f, 0.001f);
|
0.2769199f, 0.001f);
|
||||||
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 1}, {}));
|
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 1}, {}));
|
||||||
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 0}, {0, 0}));
|
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 0}, {0, 0}));
|
||||||
|
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, Precision) {
|
TEST(Metric, Precision) {
|
||||||
@ -74,6 +80,8 @@ TEST(Metric, Precision) {
|
|||||||
0.5f, 0.001f);
|
0.5f, 0.001f);
|
||||||
|
|
||||||
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 1}, {}));
|
EXPECT_ANY_THROW(GetMetricEval(metric, {0, 1}, {}));
|
||||||
|
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, NDCG) {
|
TEST(Metric, NDCG) {
|
||||||
@ -114,6 +122,8 @@ TEST(Metric, NDCG) {
|
|||||||
{0.1f, 0.9f, 0.1f, 0.9f},
|
{0.1f, 0.9f, 0.1f, 0.9f},
|
||||||
{ 0, 0, 1, 1}),
|
{ 0, 0, 1, 1}),
|
||||||
0.3868f, 0.001f);
|
0.3868f, 0.001f);
|
||||||
|
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Metric, MAP) {
|
TEST(Metric, MAP) {
|
||||||
@ -139,4 +149,5 @@ TEST(Metric, MAP) {
|
|||||||
{0.1f, 0.9f, 0.1f, 0.9f},
|
{0.1f, 0.9f, 0.1f, 0.9f},
|
||||||
{ 0, 0, 1, 1}),
|
{ 0, 0, 1, 1}),
|
||||||
0.25f, 0.001f);
|
0.25f, 0.001f);
|
||||||
|
delete metric;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,4 +17,6 @@ TEST(Objective, HingeObj) {
|
|||||||
{ eps, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, eps });
|
{ eps, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, eps });
|
||||||
|
|
||||||
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,8 @@
|
|||||||
#include "../helpers.h"
|
#include "../helpers.h"
|
||||||
|
|
||||||
TEST(Objective, UnknownFunction) {
|
TEST(Objective, UnknownFunction) {
|
||||||
EXPECT_ANY_THROW(xgboost::ObjFunction::Create("unknown_name"));
|
xgboost::ObjFunction* obj;
|
||||||
EXPECT_NO_THROW(xgboost::ObjFunction::Create("reg:linear"));
|
EXPECT_ANY_THROW(obj = xgboost::ObjFunction::Create("unknown_name"));
|
||||||
|
EXPECT_NO_THROW(obj = xgboost::ObjFunction::Create("reg:linear"));
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -25,4 +25,6 @@ TEST(Objective, PairwiseRankingGPair) {
|
|||||||
{0.9975f, 0.9975f, 0.9975f, 0.9975f});
|
{0.9975f, 0.9975f, 0.9975f, 0.9975f});
|
||||||
|
|
||||||
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
@ -15,6 +15,8 @@ TEST(Objective, LinearRegressionGPair) {
|
|||||||
{1, 1, 1, 1, 1, 1, 1, 1});
|
{1, 1, 1, 1, 1, 1, 1, 1});
|
||||||
|
|
||||||
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, LogisticRegressionGPair) {
|
TEST(Objective, LogisticRegressionGPair) {
|
||||||
@ -27,6 +29,8 @@ TEST(Objective, LogisticRegressionGPair) {
|
|||||||
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
||||||
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
|
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
|
||||||
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
|
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, LogisticRegressionBasic) {
|
TEST(Objective, LogisticRegressionBasic) {
|
||||||
@ -53,6 +57,8 @@ TEST(Objective, LogisticRegressionBasic) {
|
|||||||
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
||||||
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, LogisticRawGPair) {
|
TEST(Objective, LogisticRawGPair) {
|
||||||
@ -65,6 +71,8 @@ TEST(Objective, LogisticRawGPair) {
|
|||||||
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
||||||
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
|
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
|
||||||
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
|
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, PoissonRegressionGPair) {
|
TEST(Objective, PoissonRegressionGPair) {
|
||||||
@ -78,6 +86,8 @@ TEST(Objective, PoissonRegressionGPair) {
|
|||||||
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
||||||
{ 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f},
|
{ 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f},
|
||||||
{1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f});
|
{1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f});
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, PoissonRegressionBasic) {
|
TEST(Objective, PoissonRegressionBasic) {
|
||||||
@ -102,6 +112,8 @@ TEST(Objective, PoissonRegressionBasic) {
|
|||||||
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
||||||
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, GammaRegressionGPair) {
|
TEST(Objective, GammaRegressionGPair) {
|
||||||
@ -114,6 +126,8 @@ TEST(Objective, GammaRegressionGPair) {
|
|||||||
{1, 1, 1, 1, 1, 1, 1, 1},
|
{1, 1, 1, 1, 1, 1, 1, 1},
|
||||||
{1, 1, 1, 1, 0, 0.09f, 0.59f, 0.63f},
|
{1, 1, 1, 1, 0, 0.09f, 0.59f, 0.63f},
|
||||||
{0, 0, 0, 0, 1, 0.90f, 0.40f, 0.36f});
|
{0, 0, 0, 0, 1, 0.90f, 0.40f, 0.36f});
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, GammaRegressionBasic) {
|
TEST(Objective, GammaRegressionBasic) {
|
||||||
@ -138,6 +152,8 @@ TEST(Objective, GammaRegressionBasic) {
|
|||||||
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
||||||
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, TweedieRegressionGPair) {
|
TEST(Objective, TweedieRegressionGPair) {
|
||||||
@ -151,6 +167,8 @@ TEST(Objective, TweedieRegressionGPair) {
|
|||||||
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
||||||
{ 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f},
|
{ 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f},
|
||||||
{0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f});
|
{0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f});
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, TweedieRegressionBasic) {
|
TEST(Objective, TweedieRegressionBasic) {
|
||||||
@ -175,6 +193,8 @@ TEST(Objective, TweedieRegressionBasic) {
|
|||||||
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
||||||
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, CoxRegressionGPair) {
|
TEST(Objective, CoxRegressionGPair) {
|
||||||
@ -187,4 +207,6 @@ TEST(Objective, CoxRegressionGPair) {
|
|||||||
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
||||||
{ 0, 0, 0, -0.799f, -0.788f, -0.590f, 0.910f, 1.006f},
|
{ 0, 0, 0, -0.799f, -0.788f, -0.590f, 0.910f, 1.006f},
|
||||||
{ 0, 0, 0, 0.160f, 0.186f, 0.348f, 0.610f, 0.639f});
|
{ 0, 0, 0, 0.160f, 0.186f, 0.348f, 0.610f, 0.639f});
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,6 +17,8 @@ TEST(Objective, GPULinearRegressionGPair) {
|
|||||||
{1, 1, 1, 1, 1, 1, 1, 1});
|
{1, 1, 1, 1, 1, 1, 1, 1});
|
||||||
|
|
||||||
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
ASSERT_NO_THROW(obj->DefaultEvalMetric());
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, GPULogisticRegressionGPair) {
|
TEST(Objective, GPULogisticRegressionGPair) {
|
||||||
@ -29,6 +31,8 @@ TEST(Objective, GPULogisticRegressionGPair) {
|
|||||||
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
||||||
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
|
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
|
||||||
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
|
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, GPULogisticRegressionBasic) {
|
TEST(Objective, GPULogisticRegressionBasic) {
|
||||||
@ -55,6 +59,8 @@ TEST(Objective, GPULogisticRegressionBasic) {
|
|||||||
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) {
|
||||||
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
EXPECT_NEAR(preds[i], out_preds[i], 0.01f);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Objective, GPULogisticRawGPair) {
|
TEST(Objective, GPULogisticRawGPair) {
|
||||||
@ -67,4 +73,6 @@ TEST(Objective, GPULogisticRawGPair) {
|
|||||||
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
{ 1, 1, 1, 1, 1, 1, 1, 1},
|
||||||
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
|
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
|
||||||
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
|
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
|
||||||
|
|
||||||
|
delete obj;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -25,14 +25,14 @@ TEST(cpu_predictor, Test) {
|
|||||||
|
|
||||||
// Test predict batch
|
// Test predict batch
|
||||||
HostDeviceVector<float> out_predictions;
|
HostDeviceVector<float> out_predictions;
|
||||||
cpu_predictor->PredictBatch(dmat.get(), &out_predictions, model, 0);
|
cpu_predictor->PredictBatch((*dmat).get(), &out_predictions, model, 0);
|
||||||
std::vector<float>& out_predictions_h = out_predictions.HostVector();
|
std::vector<float>& out_predictions_h = out_predictions.HostVector();
|
||||||
for (int i = 0; i < out_predictions.Size(); i++) {
|
for (int i = 0; i < out_predictions.Size(); i++) {
|
||||||
ASSERT_EQ(out_predictions_h[i], 1.5);
|
ASSERT_EQ(out_predictions_h[i], 1.5);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test predict instance
|
// Test predict instance
|
||||||
auto batch = dmat->RowIterator()->Value();
|
auto batch = (*dmat)->RowIterator()->Value();
|
||||||
for (int i = 0; i < batch.Size(); i++) {
|
for (int i = 0; i < batch.Size(); i++) {
|
||||||
std::vector<float> instance_out_predictions;
|
std::vector<float> instance_out_predictions;
|
||||||
cpu_predictor->PredictInstance(batch[i], &instance_out_predictions, model);
|
cpu_predictor->PredictInstance(batch[i], &instance_out_predictions, model);
|
||||||
@ -41,22 +41,24 @@ TEST(cpu_predictor, Test) {
|
|||||||
|
|
||||||
// Test predict leaf
|
// Test predict leaf
|
||||||
std::vector<float> leaf_out_predictions;
|
std::vector<float> leaf_out_predictions;
|
||||||
cpu_predictor->PredictLeaf(dmat.get(), &leaf_out_predictions, model);
|
cpu_predictor->PredictLeaf((*dmat).get(), &leaf_out_predictions, model);
|
||||||
for (int i = 0; i < leaf_out_predictions.size(); i++) {
|
for (int i = 0; i < leaf_out_predictions.size(); i++) {
|
||||||
ASSERT_EQ(leaf_out_predictions[i], 0);
|
ASSERT_EQ(leaf_out_predictions[i], 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test predict contribution
|
// Test predict contribution
|
||||||
std::vector<float> out_contribution;
|
std::vector<float> out_contribution;
|
||||||
cpu_predictor->PredictContribution(dmat.get(), &out_contribution, model);
|
cpu_predictor->PredictContribution((*dmat).get(), &out_contribution, model);
|
||||||
for (int i = 0; i < out_contribution.size(); i++) {
|
for (int i = 0; i < out_contribution.size(); i++) {
|
||||||
ASSERT_EQ(out_contribution[i], 1.5);
|
ASSERT_EQ(out_contribution[i], 1.5);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test predict contribution (approximate method)
|
// Test predict contribution (approximate method)
|
||||||
cpu_predictor->PredictContribution(dmat.get(), &out_contribution, model, true);
|
cpu_predictor->PredictContribution((*dmat).get(), &out_contribution, model, true);
|
||||||
for (int i = 0; i < out_contribution.size(); i++) {
|
for (int i = 0; i < out_contribution.size(); i++) {
|
||||||
ASSERT_EQ(out_contribution[i], 1.5);
|
ASSERT_EQ(out_contribution[i], 1.5);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
} // namespace xgboost
|
} // namespace xgboost
|
||||||
|
|||||||
@ -35,8 +35,8 @@ TEST(gpu_predictor, Test) {
|
|||||||
// Test predict batch
|
// Test predict batch
|
||||||
HostDeviceVector<float> gpu_out_predictions;
|
HostDeviceVector<float> gpu_out_predictions;
|
||||||
HostDeviceVector<float> cpu_out_predictions;
|
HostDeviceVector<float> cpu_out_predictions;
|
||||||
gpu_predictor->PredictBatch(dmat.get(), &gpu_out_predictions, model, 0);
|
gpu_predictor->PredictBatch((*dmat).get(), &gpu_out_predictions, model, 0);
|
||||||
cpu_predictor->PredictBatch(dmat.get(), &cpu_out_predictions, model, 0);
|
cpu_predictor->PredictBatch((*dmat).get(), &cpu_out_predictions, model, 0);
|
||||||
std::vector<float>& gpu_out_predictions_h = gpu_out_predictions.HostVector();
|
std::vector<float>& gpu_out_predictions_h = gpu_out_predictions.HostVector();
|
||||||
std::vector<float>& cpu_out_predictions_h = cpu_out_predictions.HostVector();
|
std::vector<float>& cpu_out_predictions_h = cpu_out_predictions.HostVector();
|
||||||
float abs_tolerance = 0.001;
|
float abs_tolerance = 0.001;
|
||||||
@ -45,7 +45,7 @@ TEST(gpu_predictor, Test) {
|
|||||||
abs_tolerance);
|
abs_tolerance);
|
||||||
}
|
}
|
||||||
// Test predict instance
|
// Test predict instance
|
||||||
auto batch = dmat->RowIterator()->Value();
|
auto batch = (*dmat)->RowIterator()->Value();
|
||||||
for (int i = 0; i < batch.Size(); i++) {
|
for (int i = 0; i < batch.Size(); i++) {
|
||||||
std::vector<float> gpu_instance_out_predictions;
|
std::vector<float> gpu_instance_out_predictions;
|
||||||
std::vector<float> cpu_instance_out_predictions;
|
std::vector<float> cpu_instance_out_predictions;
|
||||||
@ -59,8 +59,8 @@ TEST(gpu_predictor, Test) {
|
|||||||
// Test predict leaf
|
// Test predict leaf
|
||||||
std::vector<float> gpu_leaf_out_predictions;
|
std::vector<float> gpu_leaf_out_predictions;
|
||||||
std::vector<float> cpu_leaf_out_predictions;
|
std::vector<float> cpu_leaf_out_predictions;
|
||||||
cpu_predictor->PredictLeaf(dmat.get(), &cpu_leaf_out_predictions, model);
|
cpu_predictor->PredictLeaf((*dmat).get(), &cpu_leaf_out_predictions, model);
|
||||||
gpu_predictor->PredictLeaf(dmat.get(), &gpu_leaf_out_predictions, model);
|
gpu_predictor->PredictLeaf((*dmat).get(), &gpu_leaf_out_predictions, model);
|
||||||
for (int i = 0; i < gpu_leaf_out_predictions.size(); i++) {
|
for (int i = 0; i < gpu_leaf_out_predictions.size(); i++) {
|
||||||
ASSERT_EQ(gpu_leaf_out_predictions[i], cpu_leaf_out_predictions[i]);
|
ASSERT_EQ(gpu_leaf_out_predictions[i], cpu_leaf_out_predictions[i]);
|
||||||
}
|
}
|
||||||
@ -68,11 +68,13 @@ TEST(gpu_predictor, Test) {
|
|||||||
// Test predict contribution
|
// Test predict contribution
|
||||||
std::vector<float> gpu_out_contribution;
|
std::vector<float> gpu_out_contribution;
|
||||||
std::vector<float> cpu_out_contribution;
|
std::vector<float> cpu_out_contribution;
|
||||||
cpu_predictor->PredictContribution(dmat.get(), &cpu_out_contribution, model);
|
cpu_predictor->PredictContribution((*dmat).get(), &cpu_out_contribution, model);
|
||||||
gpu_predictor->PredictContribution(dmat.get(), &gpu_out_contribution, model);
|
gpu_predictor->PredictContribution((*dmat).get(), &gpu_out_contribution, model);
|
||||||
for (int i = 0; i < gpu_out_contribution.size(); i++) {
|
for (int i = 0; i < gpu_out_contribution.size(); i++) {
|
||||||
ASSERT_EQ(gpu_out_contribution[i], cpu_out_contribution[i]);
|
ASSERT_EQ(gpu_out_contribution[i], cpu_out_contribution[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
} // namespace predictor
|
} // namespace predictor
|
||||||
} // namespace xgboost
|
} // namespace xgboost
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
// Copyright by Contributors
|
// Copyright by Contributors
|
||||||
#include <gtest/gtest.h>
|
#include <gtest/gtest.h>
|
||||||
|
#include <vector>
|
||||||
#include "helpers.h"
|
#include "helpers.h"
|
||||||
#include "xgboost/learner.h"
|
#include "xgboost/learner.h"
|
||||||
|
|
||||||
@ -7,8 +8,11 @@ namespace xgboost {
|
|||||||
TEST(learner, Test) {
|
TEST(learner, Test) {
|
||||||
typedef std::pair<std::string, std::string> arg;
|
typedef std::pair<std::string, std::string> arg;
|
||||||
auto args = {arg("tree_method", "exact")};
|
auto args = {arg("tree_method", "exact")};
|
||||||
auto mat = {CreateDMatrix(10, 10, 0)};
|
auto mat_ptr = CreateDMatrix(10, 10, 0);
|
||||||
|
std::vector<std::shared_ptr<xgboost::DMatrix>> mat = {*mat_ptr};
|
||||||
auto learner = std::unique_ptr<Learner>(Learner::Create(mat));
|
auto learner = std::unique_ptr<Learner>(Learner::Create(mat));
|
||||||
learner->Configure(args);
|
learner->Configure(args);
|
||||||
|
|
||||||
|
delete mat_ptr;
|
||||||
}
|
}
|
||||||
} // namespace xgboost
|
} // namespace xgboost
|
||||||
@ -19,11 +19,11 @@ TEST(gpu_hist_experimental, TestSparseShard) {
|
|||||||
int max_bins = 4;
|
int max_bins = 4;
|
||||||
auto dmat = CreateDMatrix(rows, columns, 0.9f);
|
auto dmat = CreateDMatrix(rows, columns, 0.9f);
|
||||||
common::GHistIndexMatrix gmat;
|
common::GHistIndexMatrix gmat;
|
||||||
gmat.Init(dmat.get(),max_bins);
|
gmat.Init((*dmat).get(),max_bins);
|
||||||
TrainParam p;
|
TrainParam p;
|
||||||
p.max_depth = 6;
|
p.max_depth = 6;
|
||||||
|
|
||||||
dmlc::DataIter<SparsePage>* iter = dmat->RowIterator();
|
dmlc::DataIter<SparsePage>* iter = (*dmat)->RowIterator();
|
||||||
iter->BeforeFirst();
|
iter->BeforeFirst();
|
||||||
CHECK(iter->Next());
|
CHECK(iter->Next());
|
||||||
const SparsePage& batch = iter->Value();
|
const SparsePage& batch = iter->Value();
|
||||||
@ -50,6 +50,8 @@ TEST(gpu_hist_experimental, TestSparseShard) {
|
|||||||
ASSERT_EQ(gidx[i * shard.row_stride + row_offset], shard.null_gidx_value);
|
ASSERT_EQ(gidx[i * shard.row_stride + row_offset], shard.null_gidx_value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(gpu_hist_experimental, TestDenseShard) {
|
TEST(gpu_hist_experimental, TestDenseShard) {
|
||||||
@ -58,11 +60,11 @@ TEST(gpu_hist_experimental, TestDenseShard) {
|
|||||||
int max_bins = 4;
|
int max_bins = 4;
|
||||||
auto dmat = CreateDMatrix(rows, columns, 0);
|
auto dmat = CreateDMatrix(rows, columns, 0);
|
||||||
common::GHistIndexMatrix gmat;
|
common::GHistIndexMatrix gmat;
|
||||||
gmat.Init(dmat.get(),max_bins);
|
gmat.Init((*dmat).get(),max_bins);
|
||||||
TrainParam p;
|
TrainParam p;
|
||||||
p.max_depth = 6;
|
p.max_depth = 6;
|
||||||
|
|
||||||
dmlc::DataIter<SparsePage>* iter = dmat->RowIterator();
|
dmlc::DataIter<SparsePage>* iter = (*dmat)->RowIterator();
|
||||||
iter->BeforeFirst();
|
iter->BeforeFirst();
|
||||||
CHECK(iter->Next());
|
CHECK(iter->Next());
|
||||||
const SparsePage& batch = iter->Value();
|
const SparsePage& batch = iter->Value();
|
||||||
@ -82,6 +84,8 @@ TEST(gpu_hist_experimental, TestDenseShard) {
|
|||||||
for (int i = 0; i < gmat.index.size(); i++) {
|
for (int i = 0; i < gmat.index.size(); i++) {
|
||||||
ASSERT_EQ(gidx[i], gmat.index[i]);
|
ASSERT_EQ(gidx[i], gmat.index[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
delete dmat;
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace tree
|
} // namespace tree
|
||||||
|
|||||||
@ -149,9 +149,39 @@ if [ ${TASK} == "cpp_test" ]; then
|
|||||||
make cover
|
make cover
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
if [ ${TASK} == "distributed_test" ]; then
|
if [ ${TASK} == "distributed_test" ]; then
|
||||||
set -e
|
set -e
|
||||||
make all || exit -1
|
make all || exit -1
|
||||||
cd tests/distributed
|
cd tests/distributed
|
||||||
./runtests.sh
|
./runtests.sh
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ ${TASK} == "sanitizer_test" ]; then
|
||||||
|
set -e
|
||||||
|
# Build gtest via cmake
|
||||||
|
wget -nc https://github.com/google/googletest/archive/release-1.7.0.zip
|
||||||
|
unzip -n release-1.7.0.zip
|
||||||
|
mv googletest-release-1.7.0 gtest && cd gtest
|
||||||
|
CC=gcc-7 CXX=g++-7 cmake -DCMAKE_CXX_FLAGS="-fuse-ld=gold" \
|
||||||
|
-DCMAKE_C_FLAGS="-fuse-ld=gold"
|
||||||
|
make
|
||||||
|
mkdir lib && mv libgtest.a lib
|
||||||
|
cd ..
|
||||||
|
rm -rf release-1.7.0.zip
|
||||||
|
|
||||||
|
mkdir build && cd build
|
||||||
|
CC=gcc-7 CXX=g++-7 cmake .. -DGOOGLE_TEST=ON -DGTEST_ROOT=$PWD/../gtest/ \
|
||||||
|
-DUSE_SANITIZER=ON -DENABLED_SANITIZERS="address" \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
|
-DSANITIZER_PATH=/usr/lib/x86_64-linux-gnu/ \
|
||||||
|
-DCMAKE_CXX_FLAGS="-fuse-ld=gold" \
|
||||||
|
-DCMAKE_C_FLAGS="-fuse-ld=gold"
|
||||||
|
make
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
export ASAN_SYMBOLIZER_PATH=$(which llvm-symbolizer)
|
||||||
|
ASAN_OPTIONS=symbolize=1 ./testxgboost
|
||||||
|
rm -rf build
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user