De-duplicate GPU parameters. (#4454)
* Only define `gpu_id` and `n_gpus` in `LearnerTrainParam` * Pass LearnerTrainParam through XGBoost vid factory method. * Disable all GPU usage when GPU related parameters are not specified (fixes XGBoost choosing GPU over aggressively). * Test learner train param io. * Fix gpu pickling.
This commit is contained in:
@@ -12,12 +12,10 @@ namespace tree {
|
||||
|
||||
TEST(GPUExact, Update) {
|
||||
using Arg = std::pair<std::string, std::string>;
|
||||
std::vector<Arg> args{
|
||||
{"n_gpus", "1"},
|
||||
{"gpu_id", "0"},
|
||||
{"max_depth", "1"}};
|
||||
auto lparam = CreateEmptyGenericParam(0, 1);
|
||||
std::vector<Arg> args{{"max_depth", "1"}};
|
||||
|
||||
auto* p_gpuexact_maker = TreeUpdater::Create("grow_gpu");
|
||||
auto* p_gpuexact_maker = TreeUpdater::Create("grow_gpu", &lparam);
|
||||
p_gpuexact_maker->Init(args);
|
||||
|
||||
size_t constexpr kNRows = 4;
|
||||
|
||||
@@ -86,7 +86,6 @@ TEST(GpuHist, BuildGidxDense) {
|
||||
int constexpr kNRows = 16, kNCols = 8;
|
||||
TrainParam param;
|
||||
param.max_depth = 1;
|
||||
param.n_gpus = 1;
|
||||
param.max_leaves = 0;
|
||||
|
||||
DeviceShard<GradientPairPrecise> shard(0, 0, 0, kNRows, param, kNCols);
|
||||
@@ -125,7 +124,6 @@ TEST(GpuHist, BuildGidxSparse) {
|
||||
int constexpr kNRows = 16, kNCols = 8;
|
||||
TrainParam param;
|
||||
param.max_depth = 1;
|
||||
param.n_gpus = 1;
|
||||
param.max_leaves = 0;
|
||||
|
||||
DeviceShard<GradientPairPrecise> shard(0, 0, 0, kNRows, param, kNCols);
|
||||
@@ -169,7 +167,6 @@ void TestBuildHist(GPUHistBuilderBase<GradientSumT>& builder) {
|
||||
|
||||
TrainParam param;
|
||||
param.max_depth = 6;
|
||||
param.n_gpus = 1;
|
||||
param.max_leaves = 0;
|
||||
|
||||
DeviceShard<GradientSumT> shard(0, 0, 0, kNRows, param, kNCols);
|
||||
@@ -264,7 +261,6 @@ TEST(GpuHist, EvaluateSplits) {
|
||||
|
||||
TrainParam param;
|
||||
param.max_depth = 1;
|
||||
param.n_gpus = 1;
|
||||
param.colsample_bynode = 1;
|
||||
param.colsample_bylevel = 1;
|
||||
param.colsample_bytree = 1;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*!
|
||||
* Copyright 2018 by Contributors
|
||||
* Copyright 2018-2019 by Contributors
|
||||
*/
|
||||
#include "../helpers.h"
|
||||
#include "../../../src/common/host_device_vector.h"
|
||||
@@ -29,12 +29,14 @@ TEST(Updater, Prune) {
|
||||
{0.25f, 0.24f}, {0.25f, 0.24f}, {0.25f, 0.24f}, {0.25f, 0.24f} };
|
||||
auto dmat = CreateDMatrix(32, 16, 0.4, 3);
|
||||
|
||||
auto lparam = CreateEmptyGenericParam(0, 0);
|
||||
|
||||
// prepare tree
|
||||
RegTree tree = RegTree();
|
||||
tree.param.InitAllowUnknown(cfg);
|
||||
std::vector<RegTree*> trees {&tree};
|
||||
// prepare pruner
|
||||
std::unique_ptr<TreeUpdater> pruner(TreeUpdater::Create("prune"));
|
||||
std::unique_ptr<TreeUpdater> pruner(TreeUpdater::Create("prune", &lparam));
|
||||
pruner->Init(cfg);
|
||||
|
||||
// loss_chg < min_split_loss;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*!
|
||||
* Copyright 2018 by Contributors
|
||||
* Copyright 2018-2019 by Contributors
|
||||
*/
|
||||
#include "../helpers.h"
|
||||
#include "../../../src/common/host_device_vector.h"
|
||||
@@ -25,9 +25,10 @@ TEST(Updater, Refresh) {
|
||||
{"reg_lambda", "1"}};
|
||||
|
||||
RegTree tree = RegTree();
|
||||
auto lparam = CreateEmptyGenericParam(0, 0);
|
||||
tree.param.InitAllowUnknown(cfg);
|
||||
std::vector<RegTree*> trees {&tree};
|
||||
std::unique_ptr<TreeUpdater> refresher(TreeUpdater::Create("refresh"));
|
||||
std::unique_ptr<TreeUpdater> refresher(TreeUpdater::Create("refresh", &lparam));
|
||||
|
||||
tree.ExpandNode(0, 2, 0.2f, false, 0.0, 0.2f, 0.8f, 0.0f, 0.0f);
|
||||
int cleft = tree[0].LeftChild();
|
||||
|
||||
Reference in New Issue
Block a user