Fix #3342 and h2oai/h2o4gpu#625: Save predictor parameters in model file (#3856)
* Fix #3342 and h2oai/h2o4gpu#625: Save predictor parameters in model file This allows pickled models to retain predictor attributes, such as 'predictor' (whether to use CPU or GPU) and 'n_gpu' (number of GPUs to use). Related: h2oai/h2o4gpu#625 Closes #3342. TODO. Write a test. * Fix lint * Do not load GPU predictor into CPU-only XGBoost * Add a test for pickling GPU predictors * Make sample data big enough to pass multi GPU test * Update test_gpu_predictor.cu
This commit is contained in:
committed by
GitHub
parent
e04ab56b57
commit
91537e7353
@@ -13,6 +13,7 @@
|
||||
#include <limits>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <ios>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
#include "./common/common.h"
|
||||
@@ -21,7 +22,6 @@
|
||||
#include "./common/random.h"
|
||||
#include "./common/enum_class_param.h"
|
||||
#include "./common/timer.h"
|
||||
#include "../tests/cpp/test_learner.h"
|
||||
|
||||
namespace {
|
||||
|
||||
@@ -36,6 +36,26 @@ enum class DataSplitMode : int {
|
||||
kAuto = 0, kCol = 1, kRow = 2
|
||||
};
|
||||
|
||||
inline bool IsFloat(const std::string& str) {
|
||||
std::stringstream ss(str);
|
||||
float f;
|
||||
return !((ss >> std::noskipws >> f).rdstate() ^ std::ios_base::eofbit);
|
||||
}
|
||||
|
||||
inline bool IsInt(const std::string& str) {
|
||||
std::stringstream ss(str);
|
||||
int i;
|
||||
return !((ss >> std::noskipws >> i).rdstate() ^ std::ios_base::eofbit);
|
||||
}
|
||||
|
||||
inline std::string RenderParamVal(const std::string& str) {
|
||||
if (IsFloat(str) || IsInt(str)) {
|
||||
return str;
|
||||
} else {
|
||||
return std::string("'") + str + "'";
|
||||
}
|
||||
}
|
||||
|
||||
} // anonymous namespace
|
||||
|
||||
DECLARE_FIELD_ENUM_CLASS(TreeMethod);
|
||||
@@ -152,7 +172,7 @@ DMLC_REGISTER_PARAMETER(LearnerTrainParam);
|
||||
* \brief learner that performs gradient boosting for a specific objective
|
||||
* function. It does training and prediction.
|
||||
*/
|
||||
class LearnerImpl : public Learner, public LearnerTestHook {
|
||||
class LearnerImpl : public Learner {
|
||||
public:
|
||||
explicit LearnerImpl(std::vector<std::shared_ptr<DMatrix> > cache)
|
||||
: cache_(std::move(cache)) {
|
||||
@@ -330,6 +350,38 @@ class LearnerImpl : public Learner, public LearnerTestHook {
|
||||
if (mparam_.contain_extra_attrs != 0) {
|
||||
std::vector<std::pair<std::string, std::string> > attr;
|
||||
fi->Read(&attr);
|
||||
for (auto& kv : attr) {
|
||||
// Load `predictor`, `n_gpus`, `gpu_id` parameters from extra attributes
|
||||
const std::string prefix = "SAVED_PARAM_";
|
||||
if (kv.first.find(prefix) == 0) {
|
||||
const std::string saved_param = kv.first.substr(prefix.length());
|
||||
#ifdef XGBOOST_USE_CUDA
|
||||
if (saved_param == "predictor" || saved_param == "n_gpus"
|
||||
|| saved_param == "gpu_id") {
|
||||
cfg_[saved_param] = kv.second;
|
||||
LOG(INFO)
|
||||
<< "Parameter '" << saved_param << "' has been recovered from "
|
||||
<< "the saved model. It will be set to "
|
||||
<< RenderParamVal(kv.second) << " for prediction. To "
|
||||
<< "override the predictor behavior, explicitly set '"
|
||||
<< saved_param << "' parameter as follows:\n"
|
||||
<< " * Python package: bst.set_param('"
|
||||
<< saved_param << "', [new value])\n"
|
||||
<< " * R package: xgb.parameters(bst) <- list("
|
||||
<< saved_param << " = [new value])\n"
|
||||
<< " * JVM packages: bst.setParam(\""
|
||||
<< saved_param << "\", [new value])";
|
||||
}
|
||||
#else
|
||||
if (saved_param == "predictor" && kv.second == "gpu_predictor") {
|
||||
LOG(INFO) << "Parameter 'predictor' will be set to 'cpu_predictor' "
|
||||
<< "since XGBoots wasn't compiled with GPU support.";
|
||||
cfg_["predictor"] = "cpu_predictor";
|
||||
kv.second = "cpu_predictor";
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
attributes_ =
|
||||
std::map<std::string, std::string>(attr.begin(), attr.end());
|
||||
}
|
||||
@@ -364,15 +416,28 @@ class LearnerImpl : public Learner, public LearnerTestHook {
|
||||
extra_attr.emplace_back("count_poisson_max_delta_step", it->second);
|
||||
}
|
||||
}
|
||||
{
|
||||
// Write `predictor`, `n_gpus`, `gpu_id` parameters as extra attributes
|
||||
for (const auto& key : std::vector<std::string>{
|
||||
"predictor", "n_gpus", "gpu_id"}) {
|
||||
auto it = cfg_.find(key);
|
||||
if (it != cfg_.end()) {
|
||||
mparam.contain_extra_attrs = 1;
|
||||
extra_attr.emplace_back("SAVED_PARAM_" + key, it->second);
|
||||
}
|
||||
}
|
||||
}
|
||||
fo->Write(&mparam, sizeof(LearnerModelParam));
|
||||
fo->Write(name_obj_);
|
||||
fo->Write(name_gbm_);
|
||||
gbm_->Save(fo);
|
||||
if (mparam.contain_extra_attrs != 0) {
|
||||
std::vector<std::pair<std::string, std::string> > attr(
|
||||
attributes_.begin(), attributes_.end());
|
||||
attr.insert(attr.end(), extra_attr.begin(), extra_attr.end());
|
||||
fo->Write(attr);
|
||||
std::map<std::string, std::string> attr(attributes_);
|
||||
for (const auto& kv : extra_attr) {
|
||||
attr[kv.first] = kv.second;
|
||||
}
|
||||
fo->Write(std::vector<std::pair<std::string, std::string>>(
|
||||
attr.begin(), attr.end()));
|
||||
}
|
||||
if (name_obj_ == "count:poisson") {
|
||||
auto it = cfg_.find("max_delta_step");
|
||||
@@ -504,6 +569,10 @@ class LearnerImpl : public Learner, public LearnerTestHook {
|
||||
}
|
||||
}
|
||||
|
||||
const std::map<std::string, std::string>& GetConfigurationArguments() const override {
|
||||
return cfg_;
|
||||
}
|
||||
|
||||
protected:
|
||||
// Revise `tree_method` and `updater` parameters after seeing the training
|
||||
// data matrix
|
||||
@@ -664,11 +733,6 @@ class LearnerImpl : public Learner, public LearnerTestHook {
|
||||
std::vector<std::shared_ptr<DMatrix> > cache_;
|
||||
|
||||
common::Monitor monitor_;
|
||||
|
||||
// diagnostic method reserved for C++ test learner.SelectTreeMethod
|
||||
std::string GetUpdaterSequence() const override {
|
||||
return cfg_.at("updater");
|
||||
}
|
||||
};
|
||||
|
||||
Learner* Learner::Create(
|
||||
|
||||
Reference in New Issue
Block a user