Model IO in JSON. (#5110)

This commit is contained in:
Jiaming Yuan 2019-12-11 11:20:40 +08:00 committed by GitHub
parent c7cc657a4d
commit 208ab3b1ff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 667 additions and 165 deletions

View File

@ -25,7 +25,9 @@
// gbms
#include "../src/gbm/gbm.cc"
#include "../src/gbm/gbtree.cc"
#include "../src/gbm/gbtree_model.cc"
#include "../src/gbm/gblinear.cc"
#include "../src/gbm/gblinear_model.cc"
// data
#include "../src/data/data.cc"
@ -44,8 +46,8 @@
#endif
// tress
#include "../src/tree/split_evaluator.cc"
#include "../src/tree/param.cc"
#include "../src/tree/split_evaluator.cc"
#include "../src/tree/tree_model.cc"
#include "../src/tree/tree_updater.cc"
#include "../src/tree/updater_colmaker.cc"

View File

@ -428,7 +428,7 @@ XGB_DLL int XGBoosterPredict(BoosterHandle handle,
const float **out_result);
/*!
* \brief load model from existing file
* \brief Load model from existing file
* \param handle handle
* \param fname file name
* \return 0 when success, -1 when failure happens
@ -436,7 +436,7 @@ XGB_DLL int XGBoosterPredict(BoosterHandle handle,
XGB_DLL int XGBoosterLoadModel(BoosterHandle handle,
const char *fname);
/*!
* \brief save model into existing file
* \brief Save model into existing file
* \param handle handle
* \param fname file name
* \return 0 when success, -1 when failure happens

View File

@ -32,7 +32,7 @@ struct LearnerModelParam;
/*!
* \brief interface of gradient boosting model.
*/
class GradientBooster {
class GradientBooster : public Model {
protected:
GenericParameter const* generic_param_;

View File

@ -21,7 +21,7 @@ class FixedPrecisionStreamContainer : public std::basic_stringstream<
char, std::char_traits<char>, Allocator> {
public:
FixedPrecisionStreamContainer() {
this->precision(std::numeric_limits<Number::Float>::max_digits10);
this->precision(std::numeric_limits<double>::max_digits10);
this->imbue(std::locale("C"));
this->setf(std::ios::scientific);
}

View File

@ -16,15 +16,15 @@ class Json;
struct Model {
/*!
* \brief Save the model to stream.
* \param fo output write stream
* \brief load the model from a json object
* \param in json object where to load the model from
*/
virtual void SaveModel(dmlc::Stream* fo) const = 0;
virtual void LoadModel(Json const& in) = 0;
/*!
* \brief Load the model from stream.
* \param fi input read stream
* \brief saves the model config to a json object
* \param out json container where to save the model to
*/
virtual void LoadModel(dmlc::Stream* fi) = 0;
virtual void SaveModel(Json* out) const = 0;
};
struct Configurable {

View File

@ -303,12 +303,15 @@ class RegTree : public Model {
* \brief load model from stream
* \param fi input stream
*/
void LoadModel(dmlc::Stream* fi) override;
void Load(dmlc::Stream* fi);
/*!
* \brief save model to stream
* \param fo output stream
*/
void SaveModel(dmlc::Stream* fo) const override;
void Save(dmlc::Stream* fo) const;
void LoadModel(Json const& in) override;
void SaveModel(Json* out) const override;
bool operator==(const RegTree& b) const {
return nodes_ == b.nodes_ && stats_ == b.stats_ &&

View File

@ -5,23 +5,25 @@
#include <cstdio>
#include <cstring>
#include <fstream>
#include <algorithm>
#include <vector>
#include <string>
#include <memory>
#include "xgboost/data.h"
#include "xgboost/learner.h"
#include "xgboost/c_api.h"
#include "xgboost/logging.h"
#include "xgboost/version_config.h"
#include "xgboost/json.h"
#include "c_api_error.h"
#include "../data/simple_csr_source.h"
#include "../common/io.h"
#include "../data/adapter.h"
namespace xgboost {
// declare the data callback.
XGB_EXTERN_C int XGBoostNativeDataIterSetData(
@ -569,23 +571,43 @@ XGB_DLL int XGBoosterPredict(BoosterHandle handle,
XGB_DLL int XGBoosterLoadModel(BoosterHandle handle, const char* fname) {
API_BEGIN();
CHECK_HANDLE();
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(fname, "r"));
static_cast<Learner*>(handle)->Load(fi.get());
if (common::FileExtension(fname) == "json") {
auto str = common::LoadSequentialFile(fname);
CHECK_GT(str.size(), 2);
CHECK_EQ(str[0], '{');
Json in { Json::Load({str.c_str(), str.size()}) };
static_cast<Learner*>(handle)->LoadModel(in);
} else {
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(fname, "r"));
static_cast<Learner*>(handle)->Load(fi.get());
}
API_END();
}
XGB_DLL int XGBoosterSaveModel(BoosterHandle handle, const char* fname) {
XGB_DLL int XGBoosterSaveModel(BoosterHandle handle, const char* c_fname) {
API_BEGIN();
CHECK_HANDLE();
std::unique_ptr<dmlc::Stream> fo(dmlc::Stream::Create(fname, "w"));
auto *bst = static_cast<Learner*>(handle);
bst->Save(fo.get());
std::unique_ptr<dmlc::Stream> fo(dmlc::Stream::Create(c_fname, "w"));
auto *learner = static_cast<Learner *>(handle);
learner->Configure();
if (common::FileExtension(c_fname) == "json") {
Json out { Object() };
learner->SaveModel(&out);
std::string str;
Json::Dump(out, &str);
fo->Write(str.c_str(), str.size());
} else {
auto *bst = static_cast<Learner*>(handle);
bst->Save(fo.get());
}
API_END();
}
// The following two functions are `Load` and `Save` for memory based serialization
// methods. E.g. Python pickle.
XGB_DLL int XGBoosterLoadModelFromBuffer(BoosterHandle handle,
const void* buf,
xgboost::bst_ulong len) {
const void* buf,
xgboost::bst_ulong len) {
API_BEGIN();
CHECK_HANDLE();
common::MemoryFixSizeBuffer fs((void*)buf, len); // NOLINT(*)
@ -594,16 +616,17 @@ XGB_DLL int XGBoosterLoadModelFromBuffer(BoosterHandle handle,
}
XGB_DLL int XGBoosterGetModelRaw(BoosterHandle handle,
xgboost::bst_ulong* out_len,
const char** out_dptr) {
xgboost::bst_ulong* out_len,
const char** out_dptr) {
std::string& raw_str = XGBAPIThreadLocalStore::Get()->ret_str;
raw_str.resize(0);
API_BEGIN();
CHECK_HANDLE();
common::MemoryBufferStream fo(&raw_str);
auto *bst = static_cast<Learner*>(handle);
bst->Save(&fo);
auto *learner = static_cast<Learner*>(handle);
learner->Configure();
learner->Save(&fo);
*out_dptr = dmlc::BeginPtr(raw_str);
*out_len = static_cast<xgboost::bst_ulong>(raw_str.length());
API_END();
@ -619,6 +642,7 @@ inline void XGBoostDumpModelImpl(
std::vector<std::string>& str_vecs = XGBAPIThreadLocalStore::Get()->ret_vec_str;
std::vector<const char*>& charp_vecs = XGBAPIThreadLocalStore::Get()->ret_vec_charp;
auto *bst = static_cast<Learner*>(handle);
bst->Configure();
str_vecs = bst->DumpModel(fmap, with_stats != 0, format);
charp_vecs.resize(str_vecs.size());
for (size_t i = 0; i < str_vecs.size(); ++i) {

View File

@ -115,7 +115,7 @@ std::string LoadSequentialFile(std::string fname) {
}
size_t f_size_bytes = fs.st_size;
buffer.resize(f_size_bytes+1);
buffer.resize(f_size_bytes + 1);
int32_t fd = open(fname.c_str(), O_RDONLY);
posix_fadvise(fd, 0, 0, POSIX_FADV_SEQUENTIAL);
ssize_t bytes_read = read(fd, &buffer[0], f_size_bytes);

View File

@ -85,6 +85,20 @@ class GBLinear : public GradientBooster {
model_.Save(fo);
}
void SaveModel(Json* p_out) const override {
auto& out = *p_out;
out["name"] = String{"gblinear"};
out["model"] = Object();
auto& model = out["model"];
model_.SaveModel(&model);
}
void LoadModel(Json const& in) override {
CHECK_EQ(get<String>(in["name"]), "gblinear");
auto const& model = in["model"];
model_.LoadModel(model);
}
void DoBoost(DMatrix *p_fmat,
HostDeviceVector<GradientPair> *in_gpair,
ObjFunction* obj) override {

38
src/gbm/gblinear_model.cc Normal file
View File

@ -0,0 +1,38 @@
/*!
* Copyright 2019 by Contributors
*/
#include <utility>
#include <limits>
#include "xgboost/json.h"
#include "gblinear_model.h"
namespace xgboost {
namespace gbm {
void GBLinearModel::SaveModel(Json* p_out) const {
using WeightType = std::remove_reference<decltype(std::declval<decltype(weight)>().back())>::type;
using JsonFloat = Number::Float;
static_assert(std::is_same<WeightType, JsonFloat>::value,
"Weight type should be of the same type with JSON float");
auto& out = *p_out;
size_t const n_weights = weight.size();
std::vector<Json> j_weights(n_weights);
for (size_t i = 0; i < n_weights; ++i) {
j_weights[i] = weight[i];
}
out["weights"] = std::move(j_weights);
}
void GBLinearModel::LoadModel(Json const& in) {
auto const& j_weights = get<Array const>(in["weights"]);
auto n_weights = j_weights.size();
weight.resize(n_weights);
for (size_t i = 0; i < n_weights; ++i) {
weight[i] = get<Number const>(j_weights[i]);
}
}
DMLC_REGISTER_PARAMETER(DeprecatedGBLinearModelParam);
} // namespace gbm
} // namespace xgboost

View File

@ -62,27 +62,21 @@ class GBLinearModel : public Model {
learner_model_param_->num_output_group);
std::fill(weight.begin(), weight.end(), 0.0f);
}
void SaveModel(Json *p_out) const override;
void LoadModel(Json const &in) override;
// save the model to file
inline void Save(dmlc::Stream* fo) const {
void Save(dmlc::Stream *fo) const {
fo->Write(&param, sizeof(param));
fo->Write(weight);
}
// load model from file
inline void Load(dmlc::Stream* fi) {
void Load(dmlc::Stream *fi) {
CHECK_EQ(fi->Read(&param, sizeof(param)), sizeof(param));
fi->Read(&weight);
}
void LoadModel(dmlc::Stream* fi) override {
// They are the same right now until we can split up the saved parameter from model.
this->Load(fi);
}
void SaveModel(dmlc::Stream* fo) const override {
// They are the same right now until we can split up the saved parameter from model.
this->Save(fo);
}
// model bias
inline bst_float *bias() {
return &weight[learner_model_param_->num_feature *

View File

@ -289,8 +289,19 @@ void GBTree::CommitModel(std::vector<std::vector<std::unique_ptr<RegTree>>>&& ne
monitor_.Stop("CommitModel");
}
void GBTree::LoadModel(Json const& in) {
CHECK_EQ(get<String>(in["name"]), "gbtree");
model_.LoadModel(in["model"]);
}
void GBTree::SaveModel(Json* p_out) const {
auto& out = *p_out;
out["name"] = String("gbtree");
out["model"] = Object();
auto& model = out["model"];
model_.SaveModel(&model);
}
// dart
class Dart : public GBTree {
public:
explicit Dart(LearnerModelParam const* booster_config) :
@ -303,6 +314,30 @@ class Dart : public GBTree {
}
}
void SaveModel(Json *p_out) const override {
auto &out = *p_out;
out["name"] = String("dart");
out["gbtree"] = Object();
GBTree::SaveModel(&(out["gbtree"]));
std::vector<Json> j_weight_drop(weight_drop_.size());
for (size_t i = 0; i < weight_drop_.size(); ++i) {
j_weight_drop[i] = Number(weight_drop_[i]);
}
out["weight_drop"] = Array(j_weight_drop);
}
void LoadModel(Json const& in) override {
CHECK_EQ(get<String>(in["name"]), "dart");
auto const& gbtree = in["gbtree"];
GBTree::LoadModel(gbtree);
auto const& j_weight_drop = get<Array>(in["weight_drop"]);
weight_drop_.resize(j_weight_drop.size());
for (size_t i = 0; i < weight_drop_.size(); ++i) {
weight_drop_[i] = get<Number const>(j_weight_drop[i]);
}
}
void Load(dmlc::Stream* fi) override {
GBTree::Load(fi);
weight_drop_.resize(model_.param.num_trees);
@ -387,7 +422,7 @@ class Dart : public GBTree {
if (init_out_preds) {
size_t n = num_group * p_fmat->Info().num_row_;
const auto& base_margin =
p_fmat->Info().base_margin_.ConstHostVector();
p_fmat->Info().base_margin_.ConstHostVector();
out_preds->resize(n);
if (base_margin.size() != 0) {
CHECK_EQ(out_preds->size(), n);

View File

@ -192,6 +192,9 @@ class GBTree : public GradientBooster {
model_.Save(fo);
}
void SaveModel(Json* p_out) const override;
void LoadModel(Json const& in) override;
bool AllowLazyCheckPoint() const override {
return model_.learner_model_param_->num_output_group == 1 ||
tparam_.updater_seq.find("distcol") != std::string::npos;

85
src/gbm/gbtree_model.cc Normal file
View File

@ -0,0 +1,85 @@
/*!
* Copyright 2019 by Contributors
*/
#include "xgboost/json.h"
#include "xgboost/logging.h"
#include "gbtree_model.h"
namespace xgboost {
namespace gbm {
void GBTreeModel::Save(dmlc::Stream* fo) const {
CHECK_EQ(param.num_trees, static_cast<int32_t>(trees.size()));
fo->Write(&param, sizeof(param));
for (const auto & tree : trees) {
tree->Save(fo);
}
if (tree_info.size() != 0) {
fo->Write(dmlc::BeginPtr(tree_info), sizeof(int32_t) * tree_info.size());
}
}
void GBTreeModel::Load(dmlc::Stream* fi) {
CHECK_EQ(fi->Read(&param, sizeof(param)), sizeof(param))
<< "GBTree: invalid model file";
trees.clear();
trees_to_update.clear();
for (int32_t i = 0; i < param.num_trees; ++i) {
std::unique_ptr<RegTree> ptr(new RegTree());
ptr->Load(fi);
trees.push_back(std::move(ptr));
}
tree_info.resize(param.num_trees);
if (param.num_trees != 0) {
CHECK_EQ(
fi->Read(dmlc::BeginPtr(tree_info), sizeof(int32_t) * param.num_trees),
sizeof(int32_t) * param.num_trees);
}
}
void GBTreeModel::SaveModel(Json* p_out) const {
auto& out = *p_out;
CHECK_EQ(param.num_trees, static_cast<int>(trees.size()));
out["model_param"] = toJson(param);
std::vector<Json> trees_json;
size_t t = 0;
for (auto const& tree : trees) {
Json tree_json{Object()};
tree->SaveModel(&tree_json);
tree_json["id"] = std::to_string(t);
trees_json.emplace_back(tree_json);
t++;
}
std::vector<Json> tree_info_json(tree_info.size());
for (size_t i = 0; i < tree_info.size(); ++i) {
tree_info_json[i] = Integer(tree_info[i]);
}
out["trees"] = Array(std::move(trees_json));
out["tree_info"] = Array(std::move(tree_info_json));
}
void GBTreeModel::LoadModel(Json const& in) {
fromJson(in["model_param"], &param);
trees.clear();
trees_to_update.clear();
auto const& trees_json = get<Array const>(in["trees"]);
trees.resize(trees_json.size());
for (size_t t = 0; t < trees.size(); ++t) {
trees[t].reset( new RegTree() );
trees[t]->LoadModel(trees_json[t]);
}
tree_info.resize(param.num_trees);
auto const& tree_info_json = get<Array const>(in["tree_info"]);
for (int32_t i = 0; i < param.num_trees; ++i) {
tree_info[i] = get<Integer const>(tree_info_json[i]);
}
}
} // namespace gbm
} // namespace xgboost

View File

@ -84,43 +84,11 @@ struct GBTreeModel : public Model {
}
}
void LoadModel(dmlc::Stream* fi) override {
// They are the same right now until we can split up the saved parameter from model.
this->Load(fi);
}
void SaveModel(dmlc::Stream* fo) const override {
// They are the same right now until we can split up the saved parameter from model.
this->Save(fo);
}
void Load(dmlc::Stream* fi);
void Save(dmlc::Stream* fo) const;
void Load(dmlc::Stream* fi) {
CHECK_EQ(fi->Read(&param, sizeof(param)), sizeof(param))
<< "GBTree: invalid model file";
trees.clear();
trees_to_update.clear();
for (int i = 0; i < param.num_trees; ++i) {
std::unique_ptr<RegTree> ptr(new RegTree());
ptr->LoadModel(fi);
trees.push_back(std::move(ptr));
}
tree_info.resize(param.num_trees);
if (param.num_trees != 0) {
CHECK_EQ(
fi->Read(dmlc::BeginPtr(tree_info), sizeof(int) * param.num_trees),
sizeof(int) * param.num_trees);
}
}
void Save(dmlc::Stream* fo) const {
CHECK_EQ(param.num_trees, static_cast<int>(trees.size()));
fo->Write(&param, sizeof(param));
for (const auto & tree : trees) {
tree->SaveModel(fo);
}
if (tree_info.size() != 0) {
fo->Write(dmlc::BeginPtr(tree_info), sizeof(int) * tree_info.size());
}
}
void SaveModel(Json* p_out) const override;
void LoadModel(Json const& p_out) override;
std::vector<std::string> DumpModel(const FeatureMap& fmap, bool with_stats,
std::string format) const {

View File

@ -266,14 +266,61 @@ class LearnerImpl : public Learner {
}
}
void LoadModel(dmlc::Stream* fi) override {
// They are the same right now until we can split up the saved parameter from model.
this->Load(fi);
void LoadModel(Json const& in) override {
CHECK(IsA<Object>(in));
Version::Load(in, false);
auto const& learner = get<Object>(in["Learner"]);
mparam_.FromJson(learner.at("learner_model_param"));
auto const& objective_fn = learner.at("objective");
std::string name = get<String>(objective_fn["name"]);
tparam_.UpdateAllowUnknown(Args{{"objective", name}});
obj_.reset(ObjFunction::Create(name, &generic_parameters_));
obj_->LoadConfig(objective_fn);
auto const& gradient_booster = learner.at("gradient_booster");
name = get<String>(gradient_booster["name"]);
tparam_.UpdateAllowUnknown(Args{{"booster", name}});
gbm_.reset(GradientBooster::Create(tparam_.booster,
&generic_parameters_, &learner_model_param_,
cache_));
gbm_->LoadModel(gradient_booster);
learner_model_param_ = LearnerModelParam(mparam_,
obj_->ProbToMargin(mparam_.base_score));
auto const& j_attributes = get<Object const>(learner.at("attributes"));
attributes_.clear();
for (auto const& kv : j_attributes) {
attributes_[kv.first] = get<String const>(kv.second);
}
this->need_configuration_ = true;
}
void SaveModel(dmlc::Stream* fo) const override {
// They are the same right now until we can split up the saved parameter from model.
this->Save(fo);
void SaveModel(Json* p_out) const override {
CHECK(!this->need_configuration_) << "Call Configure before saving model.";
Version::Save(p_out);
Json& out { *p_out };
out["Learner"] = Object();
auto& learner = out["Learner"];
learner["learner_model_param"] = mparam_.ToJson();
learner["gradient_booster"] = Object();
auto& gradient_booster = learner["gradient_booster"];
gbm_->SaveModel(&gradient_booster);
learner["objective"] = Object();
auto& objective_fn = learner["objective"];
obj_->SaveConfig(&objective_fn);
learner["attributes"] = Object();
for (auto const& kv : attributes_) {
learner["attributes"][kv.first] = String(kv.second);
}
}
void Load(dmlc::Stream* fi) override {
@ -747,7 +794,6 @@ class LearnerImpl : public Learner {
LearnerTrainParam tparam_;
// configurations
std::map<std::string, std::string> cfg_;
// FIXME(trivialfis): Legacy field used to store extra attributes into binary model.
std::map<std::string, std::string> attributes_;
std::vector<std::string> metric_names_;
static std::string const kEvalMetric; // NOLINT

View File

@ -8,12 +8,15 @@
#include <xgboost/tree_model.h>
#include <xgboost/logging.h>
#include <xgboost/json.h>
#include <sstream>
#include <limits>
#include <cmath>
#include <iomanip>
#include "param.h"
#include "../common/common.h"
namespace xgboost {
// register tree parameter
@ -615,7 +618,7 @@ std::string RegTree::DumpModel(const FeatureMap& fmap,
return result;
}
void RegTree::LoadModel(dmlc::Stream* fi) {
void RegTree::Load(dmlc::Stream* fi) {
CHECK_EQ(fi->Read(&param, sizeof(TreeParam)), sizeof(TreeParam));
nodes_.resize(param.num_nodes);
stats_.resize(param.num_nodes);
@ -633,11 +636,7 @@ void RegTree::LoadModel(dmlc::Stream* fi) {
}
CHECK_EQ(static_cast<int>(deleted_nodes_.size()), param.num_deleted);
}
/*!
* \brief save model to stream
* \param fo output stream
*/
void RegTree::SaveModel(dmlc::Stream* fo) const {
void RegTree::Save(dmlc::Stream* fo) const {
CHECK_EQ(param.num_nodes, static_cast<int>(nodes_.size()));
CHECK_EQ(param.num_nodes, static_cast<int>(stats_.size()));
fo->Write(&param, sizeof(TreeParam));
@ -646,6 +645,114 @@ void RegTree::SaveModel(dmlc::Stream* fo) const {
fo->Write(dmlc::BeginPtr(stats_), sizeof(RTreeNodeStat) * nodes_.size());
}
void RegTree::LoadModel(Json const& in) {
fromJson(in["tree_param"], &param);
auto n_nodes = param.num_nodes;
CHECK_NE(n_nodes, 0);
// stats
auto const& loss_changes = get<Array const>(in["loss_changes"]);
CHECK_EQ(loss_changes.size(), n_nodes);
auto const& sum_hessian = get<Array const>(in["sum_hessian"]);
CHECK_EQ(sum_hessian.size(), n_nodes);
auto const& base_weights = get<Array const>(in["base_weights"]);
CHECK_EQ(base_weights.size(), n_nodes);
auto const& leaf_child_counts = get<Array const>(in["leaf_child_counts"]);
CHECK_EQ(leaf_child_counts.size(), n_nodes);
// nodes
auto const& lefts = get<Array const>(in["left_children"]);
CHECK_EQ(lefts.size(), n_nodes);
auto const& rights = get<Array const>(in["right_children"]);
CHECK_EQ(rights.size(), n_nodes);
auto const& parents = get<Array const>(in["parents"]);
CHECK_EQ(parents.size(), n_nodes);
auto const& indices = get<Array const>(in["split_indices"]);
CHECK_EQ(indices.size(), n_nodes);
auto const& conds = get<Array const>(in["split_conditions"]);
CHECK_EQ(conds.size(), n_nodes);
auto const& default_left = get<Array const>(in["default_left"]);
CHECK_EQ(default_left.size(), n_nodes);
stats_.resize(n_nodes);
nodes_.resize(n_nodes);
for (int32_t i = 0; i < n_nodes; ++i) {
auto& s = stats_[i];
s.loss_chg = get<Number const>(loss_changes[i]);
s.sum_hess = get<Number const>(sum_hessian[i]);
s.base_weight = get<Number const>(base_weights[i]);
s.leaf_child_cnt = get<Integer const>(leaf_child_counts[i]);
auto& n = nodes_[i];
auto left = get<Integer const>(lefts[i]);
auto right = get<Integer const>(rights[i]);
auto parent = get<Integer const>(parents[i]);
auto ind = get<Integer const>(indices[i]);
auto cond = get<Number const>(conds[i]);
auto dft_left = get<Boolean const>(default_left[i]);
n = Node(left, right, parent, ind, cond, dft_left);
}
deleted_nodes_.resize(0);
for (bst_node_t i = 1; i < param.num_nodes; ++i) {
if (nodes_[i].IsDeleted()) {
deleted_nodes_.push_back(i);
}
}
CHECK_EQ(static_cast<bst_node_t>(deleted_nodes_.size()), param.num_deleted);
}
void RegTree::SaveModel(Json* p_out) const {
auto& out = *p_out;
CHECK_EQ(param.num_nodes, static_cast<int>(nodes_.size()));
CHECK_EQ(param.num_nodes, static_cast<int>(stats_.size()));
out["tree_param"] = toJson(param);
CHECK_EQ(get<String>(out["tree_param"]["num_nodes"]), std::to_string(param.num_nodes));
using I = Integer::Int;
auto n_nodes = param.num_nodes;
// stats
std::vector<Json> loss_changes(n_nodes);
std::vector<Json> sum_hessian(n_nodes);
std::vector<Json> base_weights(n_nodes);
std::vector<Json> leaf_child_counts(n_nodes);
// nodes
std::vector<Json> lefts(n_nodes);
std::vector<Json> rights(n_nodes);
std::vector<Json> parents(n_nodes);
std::vector<Json> indices(n_nodes);
std::vector<Json> conds(n_nodes);
std::vector<Json> default_left(n_nodes);
for (int32_t i = 0; i < n_nodes; ++i) {
auto const& s = stats_[i];
loss_changes[i] = s.loss_chg;
sum_hessian[i] = s.sum_hess;
base_weights[i] = s.base_weight;
leaf_child_counts[i] = static_cast<I>(s.leaf_child_cnt);
auto const& n = nodes_[i];
lefts[i] = static_cast<I>(n.LeftChild());
rights[i] = static_cast<I>(n.RightChild());
parents[i] = static_cast<I>(n.Parent());
indices[i] = static_cast<I>(n.SplitIndex());
conds[i] = n.SplitCond();
default_left[i] = n.DefaultLeft();
}
out["loss_changes"] = std::move(loss_changes);
out["sum_hessian"] = std::move(sum_hessian);
out["base_weights"] = std::move(base_weights);
out["leaf_child_counts"] = std::move(leaf_child_counts);
out["left_children"] = std::move(lefts);
out["right_children"] = std::move(rights);
out["parents"] = std::move(parents);
out["split_indices"] = std::move(indices);
out["split_conditions"] = std::move(conds);
out["default_left"] = std::move(default_left);
}
void RegTree::FillNodeMeanValues() {
size_t num_nodes = this->param.num_nodes;
if (this->node_mean_values_.size() == num_nodes) {

View File

@ -1110,12 +1110,12 @@ class GPUHistMakerSpecialised {
common::MemoryBufferStream fs(&s_model);
int rank = rabit::GetRank();
if (rank == 0) {
local_tree->SaveModel(&fs);
local_tree->Save(&fs);
}
fs.Seek(0);
rabit::Broadcast(&s_model, 0);
RegTree reference_tree {}; // rank 0 tree
reference_tree.LoadModel(&fs);
reference_tree.Load(&fs);
CHECK(*local_tree == reference_tree);
}

View File

@ -40,13 +40,13 @@ class TreeSyncher: public TreeUpdater {
int rank = rabit::GetRank();
if (rank == 0) {
for (auto tree : trees) {
tree->SaveModel(&fs);
tree->Save(&fs);
}
}
fs.Seek(0);
rabit::Broadcast(&s_model, 0);
for (auto tree : trees) {
tree->LoadModel(&fs);
tree->Load(&fs);
}
}
};

View File

@ -3,6 +3,10 @@
#include <xgboost/version_config.h>
#include <xgboost/c_api.h>
#include <xgboost/data.h>
#include <xgboost/learner.h>
#include "../helpers.h"
#include "../../../src/common/io.h"
TEST(c_api, XGDMatrixCreateFromMatDT) {
std::vector<int> col0 = {0, -1, 3};
@ -65,8 +69,44 @@ TEST(c_api, XGDMatrixCreateFromMat_omp) {
}
}
namespace xgboost {
TEST(c_api, Version) {
int patch {0};
XGBoostVersion(NULL, NULL, &patch); // NOLINT
ASSERT_EQ(patch, XGBOOST_VER_PATCH);
}
TEST(c_api, Json_ModelIO){
size_t constexpr kRows = 10;
dmlc::TemporaryDirectory tempdir;
auto pp_dmat = CreateDMatrix(kRows, 10, 0);
auto p_dmat = *pp_dmat;
std::vector<std::shared_ptr<DMatrix>> mat {p_dmat};
std::vector<bst_float> labels(kRows);
for (size_t i = 0; i < labels.size(); ++i) {
labels[i] = i;
}
p_dmat->Info().labels_.HostVector() = labels;
std::shared_ptr<Learner> learner { Learner::Create(mat) };
learner->UpdateOneIter(0, p_dmat.get());
BoosterHandle handle = learner.get();
std::string modelfile_0 = tempdir.path + "/model_0.json";
XGBoosterSaveModel(handle, modelfile_0.c_str());
XGBoosterLoadModel(handle, modelfile_0.c_str());
std::string modelfile_1 = tempdir.path + "/model_1.json";
XGBoosterSaveModel(handle, modelfile_1.c_str());
auto model_str_0 = common::LoadSequentialFile(modelfile_0);
auto model_str_1 = common::LoadSequentialFile(modelfile_1);
ASSERT_EQ(model_str_0.front(), '{');
ASSERT_EQ(model_str_0, model_str_1);
delete pp_dmat;
}
} // namespace xgboost

View File

@ -0,0 +1,57 @@
/*!
* Copyright 2019 by Contributors
*/
#include <gtest/gtest.h>
#include <memory>
#include <sstream>
#include "../helpers.h"
#include "xgboost/json.h"
#include "xgboost/logging.h"
#include "xgboost/gbm.h"
#include "xgboost/generic_parameters.h"
#include "xgboost/learner.h"
namespace xgboost {
namespace gbm {
TEST(GBLinear, Json_IO) {
size_t constexpr kRows = 16, kCols = 16;
LearnerModelParam param;
param.num_feature = kCols;
param.num_output_group = 1;
GenericParameter gparam;
gparam.Init(Args{});
std::unique_ptr<GradientBooster> gbm {
CreateTrainedGBM("gblinear", Args{}, kRows, kCols, &param, &gparam) };
Json model { Object() };
gbm->SaveModel(&model);
ASSERT_TRUE(IsA<Object>(model));
std::string model_str;
Json::Dump(model, &model_str);
model = Json::Load({model_str.c_str(), model_str.size()});
ASSERT_TRUE(IsA<Object>(model));
model = model["model"];
{
auto weights = get<Array>(model["weights"]);
ASSERT_EQ(weights.size(), 17);
}
{
model = Json::Load({model_str.c_str(), model_str.size()});
model = model["model"];
auto weights = get<Array>(model["weights"]);
ASSERT_EQ(weights.size(), 17); // 16 + 1 (bias)
}
}
} // namespace gbm
} // namespace xgboost

View File

@ -96,6 +96,71 @@ TEST(GBTree, ChoosePredictor) {
}
// data is not pulled back into host
ASSERT_FALSE(data.HostCanWrite());
delete pp_dmat;
}
#endif // XGBOOST_USE_CUDA
// Some other parts of test are in `Tree.Json_IO'.
TEST(GBTree, Json_IO) {
size_t constexpr kRows = 16, kCols = 16;
LearnerModelParam mparam;
mparam.num_feature = kCols;
mparam.num_output_group = 1;
mparam.base_score = 0.5;
GenericParameter gparam;
gparam.Init(Args{});
std::unique_ptr<GradientBooster> gbm {
CreateTrainedGBM("gbtree", Args{}, kRows, kCols, &mparam, &gparam) };
Json model {Object()};
model["model"] = Object();
auto& j_model = model["model"];
gbm->SaveModel(&j_model);
std::stringstream ss;
Json::Dump(model, &ss);
auto model_str = ss.str();
model = Json::Load({model_str.c_str(), model_str.size()});
ASSERT_EQ(get<String>(model["model"]["name"]), "gbtree");
}
TEST(Dart, Json_IO) {
size_t constexpr kRows = 16, kCols = 16;
LearnerModelParam mparam;
mparam.num_feature = kCols;
mparam.base_score = 0.5;
mparam.num_output_group = 1;
GenericParameter gparam;
gparam.Init(Args{});
std::unique_ptr<GradientBooster> gbm {
CreateTrainedGBM("dart", Args{}, kRows, kCols, &mparam, &gparam) };
Json model {Object()};
model["model"] = Object();
auto& j_model = model["model"];
model["parameters"] = Object();
gbm->SaveModel(&j_model);
std::string model_str;
Json::Dump(model, &model_str);
model = Json::Load({model_str.c_str(), model_str.size()});
{
auto const& gbtree = model["model"]["gbtree"];
ASSERT_TRUE(IsA<Object>(gbtree));
ASSERT_EQ(get<String>(model["model"]["name"]), "dart");
ASSERT_NE(get<Array>(model["model"]["weight_drop"]).size(), 0);
}
}
#endif
} // namespace xgboost

View File

@ -6,6 +6,8 @@
#include <xgboost/learner.h>
#include <xgboost/version_config.h>
#include "xgboost/json.h"
#include "../../src/common/io.h"
namespace xgboost {
@ -112,83 +114,54 @@ TEST(Learner, Configuration) {
}
}
TEST(Learner, ObjectiveParameter) {
using Arg = std::pair<std::string, std::string>;
size_t constexpr kRows = 10;
TEST(Learner, Json_ModelIO) {
// Test of comparing JSON object directly.
size_t constexpr kRows = 8;
int32_t constexpr kIters = 4;
auto pp_dmat = CreateDMatrix(kRows, 10, 0);
auto p_dmat = *pp_dmat;
std::vector<bst_float> labels(kRows);
for (size_t i = 0; i < labels.size(); ++i) {
labels[i] = i;
}
p_dmat->Info().labels_.HostVector() = labels;
std::vector<std::shared_ptr<DMatrix>> mat {p_dmat};
std::unique_ptr<Learner> learner {Learner::Create(mat)};
learner->SetParams({Arg{"tree_method", "auto"},
Arg{"objective", "multi:softprob"},
Arg{"num_class", "10"}});
learner->UpdateOneIter(0, p_dmat.get());
auto attr_names = learner->GetConfigurationArguments();
ASSERT_EQ(attr_names.at("objective"), "multi:softprob");
dmlc::TemporaryDirectory tempdir;
const std::string fname = tempdir.path + "/model_para.bst";
std::shared_ptr<DMatrix> p_dmat {*pp_dmat};
p_dmat->Info().labels_.Resize(kRows);
{
// Create a scope to close the stream before next read.
std::unique_ptr<dmlc::Stream> fo(dmlc::Stream::Create(fname.c_str(), "w"));
learner->Save(fo.get());
std::unique_ptr<Learner> learner { Learner::Create({p_dmat}) };
learner->Configure();
Json out { Object() };
learner->SaveModel(&out);
learner->LoadModel(out);
learner->Configure();
Json new_in { Object() };
learner->SaveModel(&new_in);
ASSERT_EQ(new_in, out);
}
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(fname.c_str(), "r"));
std::unique_ptr<Learner> learner1 {Learner::Create(mat)};
learner1->Load(fi.get());
auto attr_names1 = learner1->GetConfigurationArguments();
ASSERT_EQ(attr_names1.at("objective"), "multi:softprob");
{
std::unique_ptr<Learner> learner { Learner::Create({p_dmat}) };
learner->SetParam("verbosity", "3");
for (int32_t iter = 0; iter < kIters; ++iter) {
learner->UpdateOneIter(iter, p_dmat.get());
}
learner->SetAttr("bset_score", "15.2");
Json out { Object() };
learner->SaveModel(&out);
learner->LoadModel(out);
Json new_in { Object() };
learner->Configure();
learner->SaveModel(&new_in);
ASSERT_TRUE(IsA<Object>(out["Learner"]["attributes"]));
ASSERT_EQ(get<Object>(out["Learner"]["attributes"]).size(), 1);
ASSERT_EQ(out, new_in);
}
delete pp_dmat;
}
#if defined(XGBOOST_USE_CUDA)
TEST(Learner, IO) {
using Arg = std::pair<std::string, std::string>;
size_t constexpr kRows = 10;
auto pp_dmat = CreateDMatrix(kRows, 10, 0);
auto p_dmat = *pp_dmat;
std::vector<bst_float> labels(kRows);
for (size_t i = 0; i < labels.size(); ++i) {
labels[i] = i;
}
p_dmat->Info().labels_.HostVector() = labels;
std::vector<std::shared_ptr<DMatrix>> mat {p_dmat};
std::unique_ptr<Learner> learner {Learner::Create(mat)};
learner->SetParams({Arg{"tree_method", "auto"},
Arg{"predictor", "gpu_predictor"},
Arg{"gpu_id", "0"}});
learner->UpdateOneIter(0, p_dmat.get());
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
dmlc::TemporaryDirectory tempdir;
const std::string fname = tempdir.path + "/model.bst";
{
// Create a scope to close the stream before next read.
std::unique_ptr<dmlc::Stream> fo(dmlc::Stream::Create(fname.c_str(), "w"));
learner->Save(fo.get());
}
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(fname.c_str(), "r"));
learner->Load(fi.get());
ASSERT_EQ(learner->GetGenericParameter().gpu_id, 0);
delete pp_dmat;
}
// Tests for automatic GPU configuration.
TEST(Learner, GPUConfiguration) {
using Arg = std::pair<std::string, std::string>;
@ -242,6 +215,5 @@ TEST(Learner, GPUConfiguration) {
delete pp_dmat;
}
#endif // XGBOOST_USE_CUDA
#endif // defined(XGBOOST_USE_CUDA)
} // namespace xgboost

View File

@ -3,6 +3,7 @@
#include <xgboost/tree_model.h>
#include "../helpers.h"
#include "dmlc/filesystem.h"
#include "xgboost/json_io.h"
namespace xgboost {
// Manually construct tree in binary format
@ -77,7 +78,7 @@ TEST(Tree, Load) {
std::unique_ptr<dmlc::Stream> fi(dmlc::Stream::Create(tmp_file.c_str(), "r"));
xgboost::RegTree tree;
tree.LoadModel(fi.get());
tree.Load(fi.get());
EXPECT_EQ(tree.GetDepth(1), 1);
EXPECT_EQ(tree[0].SplitCond(), 0.5f);
EXPECT_EQ(tree[0].SplitIndex(), 5);
@ -218,4 +219,30 @@ TEST(Tree, DumpDot) {
str = tree.DumpModel(fmap, true, R"(dot:{"graph_attrs": {"bgcolor": "#FFFF00"}})");
ASSERT_NE(str.find(R"(graph [ bgcolor="#FFFF00" ])"), std::string::npos);
}
TEST(Tree, Json_IO) {
RegTree tree;
tree.ExpandNode(0, 0, 0.0f, false, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f);
Json j_tree{Object()};
tree.SaveModel(&j_tree);
std::stringstream ss;
Json::Dump(j_tree, &ss);
auto tparam = j_tree["tree_param"];
ASSERT_EQ(get<String>(tparam["num_feature"]), "0");
ASSERT_EQ(get<String>(tparam["num_nodes"]), "3");
ASSERT_EQ(get<String>(tparam["size_leaf_vector"]), "0");
ASSERT_EQ(get<Array const>(j_tree["left_children"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["right_children"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["parents"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["split_indices"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["split_conditions"]).size(), 3);
ASSERT_EQ(get<Array const>(j_tree["default_left"]).size(), 3);
RegTree loaded_tree;
loaded_tree.LoadModel(j_tree);
ASSERT_EQ(loaded_tree.param.num_nodes, 3);
}
} // namespace xgboost

View File

@ -1,6 +1,8 @@
import numpy as np
import xgboost as xgb
import unittest
import os
import json
dpath = 'demo/data/'
dtrain = xgb.DMatrix(dpath + 'agaricus.txt.train')
@ -200,3 +202,23 @@ class TestModels(unittest.TestCase):
bst.predict(dm2) # success
self.assertRaises(ValueError, bst.predict, dm1)
bst.predict(dm2) # success
def test_json_model_io(self):
X = np.random.random((10, 3))
y = np.random.randint(2, size=(10,))
dm1 = xgb.DMatrix(X, y)
bst = xgb.train({'tree_method': 'hist'}, dm1)
bst.save_model('./model.json')
with open('./model.json', 'r') as fd:
j_model = json.load(fd)
assert isinstance(j_model['Learner'], dict)
bst = xgb.Booster(model_file='./model.json')
with open('./model.json', 'r') as fd:
j_model = json.load(fd)
assert isinstance(j_model['Learner'], dict)
os.remove('model.json')