Model IO in JSON. (#5110)

This commit is contained in:
Jiaming Yuan
2019-12-11 11:20:40 +08:00
committed by GitHub
parent c7cc657a4d
commit 208ab3b1ff
25 changed files with 667 additions and 165 deletions

View File

@@ -3,6 +3,10 @@
#include <xgboost/version_config.h>
#include <xgboost/c_api.h>
#include <xgboost/data.h>
#include <xgboost/learner.h>
#include "../helpers.h"
#include "../../../src/common/io.h"
TEST(c_api, XGDMatrixCreateFromMatDT) {
std::vector<int> col0 = {0, -1, 3};
@@ -65,8 +69,44 @@ TEST(c_api, XGDMatrixCreateFromMat_omp) {
}
}
namespace xgboost {
TEST(c_api, Version) {
int patch {0};
XGBoostVersion(NULL, NULL, &patch); // NOLINT
ASSERT_EQ(patch, XGBOOST_VER_PATCH);
}
TEST(c_api, Json_ModelIO){
size_t constexpr kRows = 10;
dmlc::TemporaryDirectory tempdir;
auto pp_dmat = CreateDMatrix(kRows, 10, 0);
auto p_dmat = *pp_dmat;
std::vector<std::shared_ptr<DMatrix>> mat {p_dmat};
std::vector<bst_float> labels(kRows);
for (size_t i = 0; i < labels.size(); ++i) {
labels[i] = i;
}
p_dmat->Info().labels_.HostVector() = labels;
std::shared_ptr<Learner> learner { Learner::Create(mat) };
learner->UpdateOneIter(0, p_dmat.get());
BoosterHandle handle = learner.get();
std::string modelfile_0 = tempdir.path + "/model_0.json";
XGBoosterSaveModel(handle, modelfile_0.c_str());
XGBoosterLoadModel(handle, modelfile_0.c_str());
std::string modelfile_1 = tempdir.path + "/model_1.json";
XGBoosterSaveModel(handle, modelfile_1.c_str());
auto model_str_0 = common::LoadSequentialFile(modelfile_0);
auto model_str_1 = common::LoadSequentialFile(modelfile_1);
ASSERT_EQ(model_str_0.front(), '{');
ASSERT_EQ(model_str_0, model_str_1);
delete pp_dmat;
}
} // namespace xgboost

View File

@@ -0,0 +1,57 @@
/*!
* Copyright 2019 by Contributors
*/
#include <gtest/gtest.h>
#include <memory>
#include <sstream>
#include "../helpers.h"
#include "xgboost/json.h"
#include "xgboost/logging.h"
#include "xgboost/gbm.h"
#include "xgboost/generic_parameters.h"
#include "xgboost/learner.h"
namespace xgboost {
namespace gbm {
TEST(GBLinear, Json_IO) {
size_t constexpr kRows = 16, kCols = 16;
LearnerModelParam param;
param.num_feature = kCols;
param.num_output_group = 1;
GenericParameter gparam;
gparam.Init(Args{});
std::unique_ptr<GradientBooster> gbm {
CreateTrainedGBM("gblinear", Args{}, kRows, kCols, &param, &gparam) };
Json model { Object() };
gbm->SaveModel(&model);
ASSERT_TRUE(IsA<Object>(model));
std::string model_str;
Json::Dump(model, &model_str);
model = Json::Load({model_str.c_str(), model_str.size()});
ASSERT_TRUE(IsA<Object>(model));
model = model["model"];
{
auto weights = get<Array>(model["weights"]);
ASSERT_EQ(weights.size(), 17);
}
{
model = Json::Load({model_str.c_str(), model_str.size()});
model = model["model"];
auto weights = get<Array>(model["weights"]);
ASSERT_EQ(weights.size(), 17); // 16 + 1 (bias)
}
}
} // namespace gbm
} // namespace xgboost

View File

@@ -96,6 +96,71 @@ TEST(GBTree, ChoosePredictor) {
}
// data is not pulled back into host
ASSERT_FALSE(data.HostCanWrite());
delete pp_dmat;
}
#endif // XGBOOST_USE_CUDA
// Some other parts of test are in `Tree.Json_IO'.
TEST(GBTree, Json_IO) {
size_t constexpr kRows = 16, kCols = 16;
LearnerModelParam mparam;
mparam.num_feature = kCols;
mparam.num_output_group = 1;
mparam.base_score = 0.5;
GenericParameter gparam;
gparam.Init(Args{});
std::unique_ptr<GradientBooster> gbm {
CreateTrainedGBM("gbtree", Args{}, kRows, kCols, &mparam, &gparam) };
Json model {Object()};
model["model"] = Object();
auto& j_model = model["model"];
gbm->SaveModel(&j_model);
std::stringstream ss;
Json::Dump(model, &ss);
auto model_str = ss.str();
model = Json::Load({model_str.c_str(), model_str.size()});
ASSERT_EQ(get<String>(model["model"]["name"]), "gbtree");
}
TEST(Dart, Json_IO) {
size_t constexpr kRows = 16, kCols = 16;
LearnerModelParam mparam;
mparam.num_feature = kCols;
mparam.base_score = 0.5;
mparam.num_output_group = 1;
GenericParameter gparam;
gparam.Init(Args{});
std::unique_ptr<GradientBooster> gbm {
CreateTrainedGBM("dart", Args{}, kRows, kCols, &mparam, &gparam) };
Json model {Object()};
model["model"] = Object();
auto& j_model = model["model"];
model["parameters"] = Object();
gbm->SaveModel(&j_model);
std::string model_str;
Json::Dump(model, &model_str);
model = Json::Load({model_str.c_str(), model_str.size()});
{
auto const& gbtree = model["model"]["gbtree"];
ASSERT_TRUE(IsA<Object>(gbtree));
ASSERT_EQ(get<String>(model["model"]["name"]), "dart");
ASSERT_NE(get<Array>(model["model"]["weight_drop"]).size(), 0);
}
}
#endif
} // namespace xgboost

View File

@@ -6,6 +6,8 @@
#include <xgboost/learner.h>
#include <xgboost/version_config.h>
#include "xgboost/json.h"
#include "../../src/common/io.h"
namespace xgboost {
@@ -112,83 +114,54 @@ TEST(Learner, Configuration) {
}
}
TEST(Learner, ObjectiveParameter) {
using Arg = std::pair<std::string, std::string>;
size_t constexpr kRows = 10;
TEST(Learner, Json_ModelIO) {
// Test of comparing JSON object directly.
size_t constexpr kRows = 8;
int32_t constexpr kIters = 4;
auto pp_dmat = CreateDMatrix(kRows, 10, 0);
auto p_dmat = *pp_dmat;
std::vector<bst_float> labels(kRows);
for (size_t i = 0; i < labels.size(); ++i) {
labels[i] = i;
}
p_dmat->Info().labels_.HostVector() = labels;
std::vector<std::shared_ptr<DMatrix>> mat {p_dmat};
std::unique_ptr<Learner> learner {Learner::Create(mat)};
learner->SetParams({Arg{"tree_method", "auto"},
Arg{"objective", "multi:softprob"},
Arg{"num_class", "10"}});
learner->UpdateOneIter(0, p_dmat.get());
auto attr_names = learner->GetConfigurationArguments();
ASSERT_EQ(attr_names.at("objective"), "multi:softprob");
dmlc::TemporaryDirectory tempdir;
const std::string fname = tempdir.path + "/model_para.bst";
std::shared_ptr<DMatrix> p_dmat {*pp_dmat};
p_dmat->Info().labels_.Resize(kRows);
{
// Create a scope to close the stream before next read.
std::unique_ptr<dmlc::Stream> fo(dmlc::Stream::Create(fname.c_str(), "w"));
learner->Save(fo.get());
std::unique_ptr<Learner> learner { Learner::Create({p_dmat}) };
learner->Configure();
Json out { Object() };
learner->SaveModel(&out);
learner->LoadModel(out);
learner->Configure();
Json new_in { Object() };
learner->SaveModel(&new_in);
ASSERT_EQ(new_in, out);
}
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(fname.c_str(), "r"));
std::unique_ptr<Learner> learner1 {Learner::Create(mat)};
learner1->Load(fi.get());
auto attr_names1 = learner1->GetConfigurationArguments();
ASSERT_EQ(attr_names1.at("objective"), "multi:softprob");
{
std::unique_ptr<Learner> learner { Learner::Create({p_dmat}) };
learner->SetParam("verbosity", "3");
for (int32_t iter = 0; iter < kIters; ++iter) {
learner->UpdateOneIter(iter, p_dmat.get());
}
learner->SetAttr("bset_score", "15.2");
Json out { Object() };
learner->SaveModel(&out);
learner->LoadModel(out);
Json new_in { Object() };
learner->Configure();
learner->SaveModel(&new_in);
ASSERT_TRUE(IsA<Object>(out["Learner"]["attributes"]));
ASSERT_EQ(get<Object>(out["Learner"]["attributes"]).size(), 1);
ASSERT_EQ(out, new_in);
}
delete pp_dmat;
}
#if defined(XGBOOST_USE_CUDA)
TEST(Learner, IO) {
using Arg = std::pair<std::string, std::string>;
size_t constexpr kRows = 10;
auto pp_dmat = CreateDMatrix(kRows, 10, 0);
auto p_dmat = *pp_dmat;
std::vector<bst_float> labels(kRows);
for (size_t i = 0; i < labels.size(); ++i) {
labels[i] = i;
}
p_dmat->Info().labels_.HostVector() = labels;
std::vector<std::shared_ptr<DMatrix>> mat {p_dmat};
std::unique_ptr<Learner> learner {Learner::Create(mat)};
learner->SetParams({Arg{"tree_method", "auto"},
Arg{"predictor", "gpu_predictor"},
Arg{"gpu_id", "0"}});
learner->UpdateOneIter(0, p_dmat.get());
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
dmlc::TemporaryDirectory tempdir;
const std::string fname = tempdir.path + "/model.bst";
{
// Create a scope to close the stream before next read.
std::unique_ptr<dmlc::Stream> fo(dmlc::Stream::Create(fname.c_str(), "w"));
learner->Save(fo.get());
}
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(fname.c_str(), "r"));
learner->Load(fi.get());
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
delete pp_dmat;
}
// Tests for automatic GPU configuration.
TEST(Learner, GPUConfiguration) {
using Arg = std::pair<std::string, std::string>;
@@ -242,6 +215,5 @@ TEST(Learner, GPUConfiguration) {
delete pp_dmat;
}
#endif // XGBOOST_USE_CUDA
#endif // defined(XGBOOST_USE_CUDA)
} // namespace xgboost

View File

@@ -3,6 +3,7 @@
#include <xgboost/tree_model.h>
#include "../helpers.h"
#include "dmlc/filesystem.h"
#include "xgboost/json_io.h"
namespace xgboost {
// Manually construct tree in binary format
@@ -77,7 +78,7 @@ TEST(Tree, Load) {
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(tmp_file.c_str(), "r"));
xgboost::RegTree tree;
tree.LoadModel(fi.get());
tree.Load(fi.get());
EXPECT_EQ(tree.GetDepth(1), 1);
EXPECT_EQ(tree[0].SplitCond(), 0.5f);
EXPECT_EQ(tree[0].SplitIndex(), 5);
@@ -218,4 +219,30 @@ TEST(Tree, DumpDot) {
str = tree.DumpModel(fmap, true, R"(dot:{"graph_attrs": {"bgcolor": "#FFFF00"}})");
ASSERT_NE(str.find(R"(graph [ bgcolor="#FFFF00" ])"), std::string::npos);
}
TEST(Tree, Json_IO) {
RegTree tree;
tree.ExpandNode(0, 0, 0.0f, false, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f);
Json j_tree{Object()};
tree.SaveModel(&j_tree);
std::stringstream ss;
Json::Dump(j_tree, &ss);
auto tparam = j_tree["tree_param"];
ASSERT_EQ(get<String>(tparam["num_feature"]), "0");
ASSERT_EQ(get<String>(tparam["num_nodes"]), "3");
ASSERT_EQ(get<String>(tparam["size_leaf_vector"]), "0");
ASSERT_EQ(get<Array const>(j_tree["left_children"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["right_children"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["parents"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["split_indices"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["split_conditions"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["default_left"]).size(), 3);
RegTree loaded_tree;
loaded_tree.LoadModel(j_tree);
ASSERT_EQ(loaded_tree.param.num_nodes, 3);
}
} // namespace xgboost