Use in-memory communicator to test quantile (#8710)
This commit is contained in:
@@ -40,20 +40,10 @@ void PushPage(HostSketchContainer* container, SparsePage const& page, MetaInfo c
|
||||
Span<float const> hessian) {
|
||||
container->PushRowPage(page, info, hessian);
|
||||
}
|
||||
} // anonymous namespace
|
||||
|
||||
template <bool use_column>
|
||||
void TestDistributedQuantile(size_t rows, size_t cols) {
|
||||
std::string msg {"Skipping AllReduce test"};
|
||||
int32_t constexpr kWorkers = 4;
|
||||
InitCommunicatorContext(msg, kWorkers);
|
||||
auto world = collective::GetWorldSize();
|
||||
if (world != 1) {
|
||||
ASSERT_EQ(world, kWorkers);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
void DoTestDistributedQuantile(size_t rows, size_t cols) {
|
||||
auto const world = collective::GetWorldSize();
|
||||
std::vector<MetaInfo> infos(2);
|
||||
auto& h_weights = infos.front().weights_.HostVector();
|
||||
h_weights.resize(rows);
|
||||
@@ -152,47 +142,36 @@ void TestDistributedQuantile(size_t rows, size_t cols) {
|
||||
}
|
||||
}
|
||||
|
||||
template <bool use_column>
|
||||
void TestDistributedQuantile(size_t const rows, size_t const cols) {
|
||||
auto constexpr kWorkers = 4;
|
||||
RunWithInMemoryCommunicator(kWorkers, DoTestDistributedQuantile<use_column>, rows, cols);
|
||||
}
|
||||
} // anonymous namespace
|
||||
|
||||
TEST(Quantile, DistributedBasic) {
|
||||
#if defined(__unix__)
|
||||
constexpr size_t kRows = 10, kCols = 10;
|
||||
TestDistributedQuantile<false>(kRows, kCols);
|
||||
#endif
|
||||
}
|
||||
|
||||
TEST(Quantile, Distributed) {
|
||||
#if defined(__unix__)
|
||||
constexpr size_t kRows = 4000, kCols = 200;
|
||||
TestDistributedQuantile<false>(kRows, kCols);
|
||||
#endif
|
||||
}
|
||||
|
||||
TEST(Quantile, SortedDistributedBasic) {
|
||||
#if defined(__unix__)
|
||||
constexpr size_t kRows = 10, kCols = 10;
|
||||
TestDistributedQuantile<true>(kRows, kCols);
|
||||
#endif
|
||||
}
|
||||
|
||||
TEST(Quantile, SortedDistributed) {
|
||||
#if defined(__unix__)
|
||||
constexpr size_t kRows = 4000, kCols = 200;
|
||||
TestDistributedQuantile<true>(kRows, kCols);
|
||||
#endif
|
||||
}
|
||||
|
||||
TEST(Quantile, SameOnAllWorkers) {
|
||||
#if defined(__unix__)
|
||||
std::string msg{"Skipping Quantile AllreduceBasic test"};
|
||||
int32_t constexpr kWorkers = 4;
|
||||
InitCommunicatorContext(msg, kWorkers);
|
||||
auto world = collective::GetWorldSize();
|
||||
if (world != 1) {
|
||||
CHECK_EQ(world, kWorkers);
|
||||
} else {
|
||||
LOG(WARNING) << msg;
|
||||
return;
|
||||
}
|
||||
|
||||
namespace {
|
||||
void TestSameOnAllWorkers() {
|
||||
auto const world = collective::GetWorldSize();
|
||||
constexpr size_t kRows = 1000, kCols = 100;
|
||||
RunWithSeedsAndBins(
|
||||
kRows, [=](int32_t seed, size_t n_bins, MetaInfo const&) {
|
||||
@@ -256,8 +235,13 @@ TEST(Quantile, SameOnAllWorkers) {
|
||||
}
|
||||
}
|
||||
});
|
||||
collective::Finalize();
|
||||
#endif // defined(__unix__)
|
||||
}
|
||||
} // anonymous namespace
|
||||
|
||||
TEST(Quantile, SameOnAllWorkers) {
|
||||
auto constexpr kWorkers = 4;
|
||||
RunWithInMemoryCommunicator(kWorkers, TestSameOnAllWorkers);
|
||||
}
|
||||
|
||||
} // namespace common
|
||||
} // namespace xgboost
|
||||
|
||||
@@ -338,12 +338,9 @@ TEST(GPUQuantile, MultiMerge) {
|
||||
});
|
||||
}
|
||||
|
||||
TEST(GPUQuantile, AllReduceBasic) {
|
||||
// This test is supposed to run by a python test that setups the environment.
|
||||
std::string msg {"Skipping AllReduce test"};
|
||||
auto n_gpus = AllVisibleGPUs();
|
||||
InitCommunicatorContext(msg, n_gpus);
|
||||
auto world = collective::GetWorldSize();
|
||||
namespace {
|
||||
void TestAllReduceBasic(int32_t n_gpus) {
|
||||
auto const world = collective::GetWorldSize();
|
||||
if (world != 1) {
|
||||
ASSERT_EQ(world, n_gpus);
|
||||
} else {
|
||||
@@ -420,13 +417,16 @@ TEST(GPUQuantile, AllReduceBasic) {
|
||||
ASSERT_NEAR(single_node_data[i].wmin, distributed_data[i].wmin, Eps);
|
||||
}
|
||||
});
|
||||
collective::Finalize();
|
||||
}
|
||||
} // anonymous namespace
|
||||
|
||||
TEST(GPUQuantile, AllReduceBasic) {
|
||||
auto const n_gpus = AllVisibleGPUs();
|
||||
RunWithInMemoryCommunicator(n_gpus, TestAllReduceBasic, n_gpus);
|
||||
}
|
||||
|
||||
TEST(GPUQuantile, SameOnAllWorkers) {
|
||||
std::string msg {"Skipping SameOnAllWorkers test"};
|
||||
auto n_gpus = AllVisibleGPUs();
|
||||
InitCommunicatorContext(msg, n_gpus);
|
||||
namespace {
|
||||
void TestSameOnAllWorkers(int32_t n_gpus) {
|
||||
auto world = collective::GetWorldSize();
|
||||
if (world != 1) {
|
||||
ASSERT_EQ(world, n_gpus);
|
||||
@@ -490,6 +490,12 @@ TEST(GPUQuantile, SameOnAllWorkers) {
|
||||
}
|
||||
});
|
||||
}
|
||||
} // anonymous namespace
|
||||
|
||||
TEST(GPUQuantile, SameOnAllWorkers) {
|
||||
auto const n_gpus = AllVisibleGPUs();
|
||||
RunWithInMemoryCommunicator(n_gpus, TestSameOnAllWorkers, n_gpus);
|
||||
}
|
||||
|
||||
TEST(GPUQuantile, Push) {
|
||||
size_t constexpr kRows = 100;
|
||||
|
||||
@@ -10,31 +10,6 @@
|
||||
|
||||
namespace xgboost {
|
||||
namespace common {
|
||||
inline void InitCommunicatorContext(std::string msg, int32_t n_workers) {
|
||||
auto port = std::getenv("DMLC_TRACKER_PORT");
|
||||
std::string port_str;
|
||||
if (port) {
|
||||
port_str = port;
|
||||
} else {
|
||||
LOG(WARNING) << msg << " as `DMLC_TRACKER_PORT` is not set up.";
|
||||
return;
|
||||
}
|
||||
auto uri = std::getenv("DMLC_TRACKER_URI");
|
||||
std::string uri_str;
|
||||
if (uri) {
|
||||
uri_str = uri;
|
||||
} else {
|
||||
LOG(WARNING) << msg << " as `DMLC_TRACKER_URI` is not set up.";
|
||||
return;
|
||||
}
|
||||
|
||||
Json config{JsonObject()};
|
||||
config["DMLC_TRACKER_PORT"] = port_str;
|
||||
config["DMLC_TRACKER_URI"] = uri_str;
|
||||
config["DMLC_NUM_WORKER"] = n_workers;
|
||||
collective::Init(config);
|
||||
}
|
||||
|
||||
template <typename Fn> void RunWithSeedsAndBins(size_t rows, Fn fn) {
|
||||
std::vector<int32_t> seeds(2);
|
||||
SimpleLCG lcg;
|
||||
|
||||
Reference in New Issue
Block a user