[CLI] initial refactor of CLI
This commit is contained in:
parent
0d95e863c9
commit
cee148ed64
10
Makefile
10
Makefile
@ -57,7 +57,7 @@ endif
|
||||
# specify tensor path
|
||||
.PHONY: clean all lint clean_all
|
||||
|
||||
all: lib/libxgboost.a lib/libxgboost.so
|
||||
all: lib/libxgboost.a lib/libxgboost.so xgboost
|
||||
|
||||
$(DMLC_CORE)/libdmlc.a:
|
||||
+ cd $(DMLC_CORE); make libdmlc.a config=$(ROOTDIR)/$(config); cd $(ROOTDIR)
|
||||
@ -66,9 +66,10 @@ $(RABIT)/lib/$(LIB_RABIT):
|
||||
+ cd $(RABIT); make lib/$(LIB_RABIT); cd $(ROOTDIR)
|
||||
|
||||
SRC = $(wildcard src/*.cc src/*/*.cc)
|
||||
OBJ = $(patsubst src/%.cc, build/%.o, $(SRC))
|
||||
ALL_OBJ = $(patsubst src/%.cc, build/%.o, $(SRC))
|
||||
LIB_DEP = $(DMLC_CORE)/libdmlc.a $(RABIT)/lib/$(LIB_RABIT)
|
||||
ALL_DEP = $(OBJ) $(LIB_DEP)
|
||||
ALL_DEP = $(filter-out build/cli_main.o, $(ALL_OBJ)) $(LIB_DEP)
|
||||
CLI_OBJ = build/cli_main.o
|
||||
|
||||
build/%.o: src/%.cc
|
||||
@mkdir -p $(@D)
|
||||
@ -83,6 +84,9 @@ lib/libxgboost.so: $(ALL_DEP)
|
||||
@mkdir -p $(@D)
|
||||
$(CXX) $(CFLAGS) -shared -o $@ $(filter %.o %.a, $^) $(LDFLAGS)
|
||||
|
||||
xgboost: lib/libxgboost.a $(CLI_OBJ) $(LIB_DEP)
|
||||
$(CXX) $(CFLAGS) -o $@ $(filter %.o %.a, $^) $(LDFLAGS)
|
||||
|
||||
lint:
|
||||
python2 dmlc-core/scripts/lint.py xgboost ${LINT_LANG} include src
|
||||
|
||||
|
||||
@ -36,6 +36,13 @@ typedef void *BoosterHandle;
|
||||
*/
|
||||
XGB_DLL const char *XGBGetLastError();
|
||||
|
||||
/*!
|
||||
* \brief Entry point of CLI program.
|
||||
* \param argc The number of arguments.
|
||||
* \param argv The command line arguments.
|
||||
*/
|
||||
XGB_DLL int XGBoostCLIMain(int argc, char* argv[])
|
||||
|
||||
/*!
|
||||
* \brief load a data matrix
|
||||
* \param fname the name of the file
|
||||
|
||||
@ -9,6 +9,7 @@
|
||||
|
||||
#include <dmlc/base.h>
|
||||
#include <dmlc/data.h>
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include "./base.h"
|
||||
@ -252,7 +253,7 @@ class DMatrix {
|
||||
* \param fname The file name to be saved.
|
||||
* \return The created DMatrix.
|
||||
*/
|
||||
virtual void SaveToLocalFile(const char* fname);
|
||||
virtual void SaveToLocalFile(const std::string& fname);
|
||||
/*!
|
||||
* \brief Load DMatrix from URI.
|
||||
* \param uri The URI of input.
|
||||
@ -260,7 +261,7 @@ class DMatrix {
|
||||
* \param load_row_split Flag to read in part of rows, divided among the workers in distributed mode.
|
||||
* \return The created DMatrix.
|
||||
*/
|
||||
static DMatrix* Load(const char* uri,
|
||||
static DMatrix* Load(const std::string& uri,
|
||||
bool silent,
|
||||
bool load_row_split);
|
||||
/*!
|
||||
|
||||
@ -1,335 +0,0 @@
|
||||
// Copyright 2014 by Contributors
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
#define _CRT_SECURE_NO_DEPRECATE
|
||||
#define NOMINMAX
|
||||
#include <ctime>
|
||||
#include <string>
|
||||
#include <cstring>
|
||||
#include <vector>
|
||||
#include "./sync/sync.h"
|
||||
#include "./io/io.h"
|
||||
#include "./utils/utils.h"
|
||||
#include "./utils/config.h"
|
||||
#include "./learner/learner-inl.hpp"
|
||||
|
||||
namespace xgboost {
|
||||
/*!
|
||||
* \brief wrapping the training process
|
||||
*/
|
||||
class BoostLearnTask {
|
||||
public:
|
||||
inline int Run(int argc, char *argv[]) {
|
||||
if (argc < 2) {
|
||||
printf("Usage: <config>\n");
|
||||
return 0;
|
||||
}
|
||||
utils::ConfigIterator itr(argv[1]);
|
||||
while (itr.Next()) {
|
||||
this->SetParam(itr.name(), itr.val());
|
||||
}
|
||||
for (int i = 2; i < argc; ++i) {
|
||||
char name[256], val[256];
|
||||
if (sscanf(argv[i], "%[^=]=%s", name, val) == 2) {
|
||||
this->SetParam(name, val);
|
||||
}
|
||||
}
|
||||
// do not save anything when save to stdout
|
||||
if (model_out == "stdout" || name_pred == "stdout") {
|
||||
this->SetParam("silent", "1");
|
||||
save_period = 0;
|
||||
}
|
||||
// initialized the result
|
||||
rabit::Init(argc, argv);
|
||||
if (rabit::IsDistributed()) {
|
||||
std::string pname = rabit::GetProcessorName();
|
||||
fprintf(stderr, "start %s:%d\n", pname.c_str(), rabit::GetRank());
|
||||
}
|
||||
if (rabit::IsDistributed() && data_split == "NONE") {
|
||||
this->SetParam("dsplit", "row");
|
||||
}
|
||||
if (rabit::GetRank() != 0) {
|
||||
this->SetParam("silent", "2");
|
||||
}
|
||||
this->InitData();
|
||||
|
||||
if (task == "train") {
|
||||
// if task is training, will try recover from checkpoint
|
||||
this->TaskTrain();
|
||||
return 0;
|
||||
} else {
|
||||
this->InitLearner();
|
||||
}
|
||||
if (task == "dump") {
|
||||
this->TaskDump(); return 0;
|
||||
}
|
||||
if (task == "eval") {
|
||||
this->TaskEval(); return 0;
|
||||
}
|
||||
if (task == "pred") {
|
||||
this->TaskPred();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
inline void SetParam(const char *name, const char *val) {
|
||||
if (!strcmp("silent", name)) silent = atoi(val);
|
||||
if (!strcmp("use_buffer", name)) use_buffer = atoi(val);
|
||||
if (!strcmp("num_round", name)) num_round = atoi(val);
|
||||
if (!strcmp("pred_margin", name)) pred_margin = atoi(val);
|
||||
if (!strcmp("ntree_limit", name)) ntree_limit = atoi(val);
|
||||
if (!strcmp("save_period", name)) save_period = atoi(val);
|
||||
if (!strcmp("eval_train", name)) eval_train = atoi(val);
|
||||
if (!strcmp("task", name)) task = val;
|
||||
if (!strcmp("data", name)) train_path = val;
|
||||
if (!strcmp("test:data", name)) test_path = val;
|
||||
if (!strcmp("model_in", name)) model_in = val;
|
||||
if (!strcmp("model_out", name)) model_out = val;
|
||||
if (!strcmp("model_dir", name)) model_dir_path = val;
|
||||
if (!strcmp("fmap", name)) name_fmap = val;
|
||||
if (!strcmp("name_dump", name)) name_dump = val;
|
||||
if (!strcmp("name_pred", name)) name_pred = val;
|
||||
if (!strcmp("dsplit", name)) data_split = val;
|
||||
if (!strcmp("dump_stats", name)) dump_model_stats = atoi(val);
|
||||
if (!strcmp("save_pbuffer", name)) save_with_pbuffer = atoi(val);
|
||||
if (!strncmp("eval[", name, 5)) {
|
||||
char evname[256];
|
||||
utils::Assert(sscanf(name, "eval[%[^]]", evname) == 1,
|
||||
"must specify evaluation name for display");
|
||||
eval_data_names.push_back(std::string(evname));
|
||||
eval_data_paths.push_back(std::string(val));
|
||||
}
|
||||
learner.SetParam(name, val);
|
||||
}
|
||||
|
||||
public:
|
||||
BoostLearnTask(void) {
|
||||
// default parameters
|
||||
silent = 0;
|
||||
use_buffer = 1;
|
||||
num_round = 10;
|
||||
save_period = 0;
|
||||
eval_train = 0;
|
||||
pred_margin = 0;
|
||||
ntree_limit = 0;
|
||||
dump_model_stats = 0;
|
||||
task = "train";
|
||||
model_in = "NULL";
|
||||
model_out = "NULL";
|
||||
name_fmap = "NULL";
|
||||
name_pred = "pred.txt";
|
||||
name_dump = "dump.txt";
|
||||
model_dir_path = "./";
|
||||
data_split = "NONE";
|
||||
load_part = 0;
|
||||
save_with_pbuffer = 0;
|
||||
data = NULL;
|
||||
}
|
||||
~BoostLearnTask(void) {
|
||||
for (size_t i = 0; i < deval.size(); i++) {
|
||||
delete deval[i];
|
||||
}
|
||||
if (data != NULL) delete data;
|
||||
}
|
||||
|
||||
private:
|
||||
inline void InitData(void) {
|
||||
if (strchr(train_path.c_str(), '%') != NULL) {
|
||||
char s_tmp[256];
|
||||
utils::SPrintf(s_tmp, sizeof(s_tmp), train_path.c_str(), rabit::GetRank());
|
||||
train_path = s_tmp;
|
||||
load_part = 1;
|
||||
}
|
||||
bool loadsplit = data_split == "row";
|
||||
if (name_fmap != "NULL") fmap.LoadText(name_fmap.c_str());
|
||||
if (task == "dump") return;
|
||||
if (task == "pred") {
|
||||
data = io::LoadDataMatrix(test_path.c_str(), silent != 0, use_buffer != 0, loadsplit);
|
||||
} else {
|
||||
// training
|
||||
data = io::LoadDataMatrix(train_path.c_str(),
|
||||
silent != 0 && load_part == 0,
|
||||
use_buffer != 0, loadsplit);
|
||||
utils::Assert(eval_data_names.size() == eval_data_paths.size(), "BUG");
|
||||
for (size_t i = 0; i < eval_data_names.size(); ++i) {
|
||||
deval.push_back(io::LoadDataMatrix(eval_data_paths[i].c_str(),
|
||||
silent != 0,
|
||||
use_buffer != 0,
|
||||
loadsplit));
|
||||
devalall.push_back(deval.back());
|
||||
}
|
||||
|
||||
std::vector<io::DataMatrix *> dcache(1, data);
|
||||
for (size_t i = 0; i < deval.size(); ++i) {
|
||||
dcache.push_back(deval[i]);
|
||||
}
|
||||
// set cache data to be all training and evaluation data
|
||||
learner.SetCacheData(dcache);
|
||||
|
||||
// add training set to evaluation set if needed
|
||||
if (eval_train != 0) {
|
||||
devalall.push_back(data);
|
||||
eval_data_names.push_back(std::string("train"));
|
||||
}
|
||||
}
|
||||
}
|
||||
inline void InitLearner(void) {
|
||||
if (model_in != "NULL") {
|
||||
learner.LoadModel(model_in.c_str());
|
||||
} else {
|
||||
utils::Assert(task == "train", "model_in not specified");
|
||||
learner.InitModel();
|
||||
}
|
||||
}
|
||||
inline void TaskTrain(void) {
|
||||
int version = rabit::LoadCheckPoint(&learner);
|
||||
if (version == 0) this->InitLearner();
|
||||
const time_t start = time(NULL);
|
||||
unsigned long elapsed = 0; // NOLINT(*)
|
||||
learner.CheckInit(data);
|
||||
|
||||
bool allow_lazy = learner.AllowLazyCheckPoint();
|
||||
for (int i = version / 2; i < num_round; ++i) {
|
||||
elapsed = (unsigned long)(time(NULL) - start); // NOLINT(*)
|
||||
if (version % 2 == 0) {
|
||||
if (!silent) printf("boosting round %d, %lu sec elapsed\n", i, elapsed);
|
||||
learner.UpdateOneIter(i, *data);
|
||||
if (allow_lazy) {
|
||||
rabit::LazyCheckPoint(&learner);
|
||||
} else {
|
||||
rabit::CheckPoint(&learner);
|
||||
}
|
||||
version += 1;
|
||||
}
|
||||
utils::Assert(version == rabit::VersionNumber(), "consistent check");
|
||||
std::string res = learner.EvalOneIter(i, devalall, eval_data_names);
|
||||
if (rabit::IsDistributed()) {
|
||||
if (rabit::GetRank() == 0) {
|
||||
rabit::TrackerPrintf("%s\n", res.c_str());
|
||||
}
|
||||
} else {
|
||||
if (silent < 2) {
|
||||
fprintf(stderr, "%s\n", res.c_str());
|
||||
}
|
||||
}
|
||||
if (save_period != 0 && (i + 1) % save_period == 0) {
|
||||
this->SaveModel(i);
|
||||
}
|
||||
if (allow_lazy) {
|
||||
rabit::LazyCheckPoint(&learner);
|
||||
} else {
|
||||
rabit::CheckPoint(&learner);
|
||||
}
|
||||
version += 1;
|
||||
utils::Assert(version == rabit::VersionNumber(), "consistent check");
|
||||
elapsed = (unsigned long)(time(NULL) - start); // NOLINT(*)
|
||||
}
|
||||
// always save final round
|
||||
if ((save_period == 0 || num_round % save_period != 0) && model_out != "NONE") {
|
||||
if (model_out == "NULL") {
|
||||
this->SaveModel(num_round - 1);
|
||||
} else {
|
||||
this->SaveModel(model_out.c_str());
|
||||
}
|
||||
}
|
||||
if (!silent) {
|
||||
printf("\nupdating end, %lu sec in all\n", elapsed);
|
||||
}
|
||||
}
|
||||
inline void TaskEval(void) {
|
||||
learner.EvalOneIter(0, devalall, eval_data_names);
|
||||
}
|
||||
inline void TaskDump(void) {
|
||||
FILE *fo = utils::FopenCheck(name_dump.c_str(), "w");
|
||||
std::vector<std::string> dump = learner.DumpModel(fmap, dump_model_stats != 0);
|
||||
for (size_t i = 0; i < dump.size(); ++i) {
|
||||
fprintf(fo, "booster[%lu]:\n", i);
|
||||
fprintf(fo, "%s", dump[i].c_str());
|
||||
}
|
||||
fclose(fo);
|
||||
}
|
||||
inline void SaveModel(const char *fname) const {
|
||||
if (rabit::GetRank() != 0) return;
|
||||
learner.SaveModel(fname, save_with_pbuffer != 0);
|
||||
}
|
||||
inline void SaveModel(int i) const {
|
||||
char fname[256];
|
||||
utils::SPrintf(fname, sizeof(fname),
|
||||
"%s/%04d.model", model_dir_path.c_str(), i + 1);
|
||||
this->SaveModel(fname);
|
||||
}
|
||||
inline void TaskPred(void) {
|
||||
std::vector<float> preds;
|
||||
if (!silent) printf("start prediction...\n");
|
||||
learner.Predict(*data, pred_margin != 0, &preds, ntree_limit);
|
||||
if (!silent) printf("writing prediction to %s\n", name_pred.c_str());
|
||||
FILE *fo;
|
||||
if (name_pred != "stdout") {
|
||||
fo = utils::FopenCheck(name_pred.c_str(), "w");
|
||||
} else {
|
||||
fo = stdout;
|
||||
}
|
||||
for (size_t i = 0; i < preds.size(); ++i) {
|
||||
fprintf(fo, "%g\n", preds[i]);
|
||||
}
|
||||
if (fo != stdout) fclose(fo);
|
||||
}
|
||||
|
||||
private:
|
||||
/*! \brief whether silent */
|
||||
int silent;
|
||||
/*! \brief special load */
|
||||
int load_part;
|
||||
/*! \brief whether use auto binary buffer */
|
||||
int use_buffer;
|
||||
/*! \brief whether evaluate training statistics */
|
||||
int eval_train;
|
||||
/*! \brief number of boosting iterations */
|
||||
int num_round;
|
||||
/*! \brief the period to save the model, 0 means only save the final round model */
|
||||
int save_period;
|
||||
/*! \brief the path of training/test data set */
|
||||
std::string train_path, test_path;
|
||||
/*! \brief the path of test model file, or file to restart training */
|
||||
std::string model_in;
|
||||
/*! \brief the path of final model file, to be saved */
|
||||
std::string model_out;
|
||||
/*! \brief the path of directory containing the saved models */
|
||||
std::string model_dir_path;
|
||||
/*! \brief task to perform */
|
||||
std::string task;
|
||||
/*! \brief name of predict file */
|
||||
std::string name_pred;
|
||||
/*! \brief data split mode */
|
||||
std::string data_split;
|
||||
/*!\brief limit number of trees in prediction */
|
||||
int ntree_limit;
|
||||
/*!\brief whether to directly output margin value */
|
||||
int pred_margin;
|
||||
/*! \brief whether dump statistics along with model */
|
||||
int dump_model_stats;
|
||||
/*! \brief whether save prediction buffer */
|
||||
int save_with_pbuffer;
|
||||
/*! \brief name of feature map */
|
||||
std::string name_fmap;
|
||||
/*! \brief name of dump file */
|
||||
std::string name_dump;
|
||||
/*! \brief the paths of validation data sets */
|
||||
std::vector<std::string> eval_data_paths;
|
||||
/*! \brief the names of the evaluation data used in output log */
|
||||
std::vector<std::string> eval_data_names;
|
||||
|
||||
private:
|
||||
io::DataMatrix* data;
|
||||
std::vector<io::DataMatrix*> deval;
|
||||
std::vector<const io::DataMatrix*> devalall;
|
||||
utils::FeatMap fmap;
|
||||
learner::BoostLearner learner;
|
||||
};
|
||||
} // namespace xgboost
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
xgboost::BoostLearnTask tsk;
|
||||
tsk.SetParam("seed", "0");
|
||||
int ret = tsk.Run(argc, argv);
|
||||
rabit::Finalize();
|
||||
return ret;
|
||||
}
|
||||
340
src/cli_main.cc
Normal file
340
src/cli_main.cc
Normal file
@ -0,0 +1,340 @@
|
||||
/*!
|
||||
* Copyright 2014 by Contributors
|
||||
* \file cli_main.cc
|
||||
* \brief The command line interface program of xgboost.
|
||||
* This file is not included in dynamic library.
|
||||
*/
|
||||
// Copyright 2014 by Contributors
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
#define _CRT_SECURE_NO_DEPRECATE
|
||||
#define NOMINMAX
|
||||
|
||||
#include <xgboost/learner.h>
|
||||
#include <xgboost/data.h>
|
||||
#include <dmlc/logging.h>
|
||||
#include <dmlc/timer.h>
|
||||
#include <ctime>
|
||||
#include <string>
|
||||
#include <cstdio>
|
||||
#include <cstring>
|
||||
#include <vector>
|
||||
#include "./common/sync.h"
|
||||
#include "./common/config.h"
|
||||
|
||||
|
||||
namespace xgboost {
|
||||
|
||||
enum CLITask {
|
||||
kTrain = 0,
|
||||
kDump2Text = 1,
|
||||
kPredict = 2
|
||||
};
|
||||
|
||||
struct CLIParam : public dmlc::Parameter<CLIParam> {
|
||||
/*! \brief the task name */
|
||||
int task;
|
||||
/*! \brief whether silent */
|
||||
int silent;
|
||||
/*! \brief whether evaluate training statistics */
|
||||
bool eval_train;
|
||||
/*! \brief number of boosting iterations */
|
||||
int num_round;
|
||||
/*! \brief the period to save the model, 0 means only save the final round model */
|
||||
int save_period;
|
||||
/*! \brief the path of training set */
|
||||
std::string train_path;
|
||||
/*! \brief path of test dataset */
|
||||
std::string test_path;
|
||||
/*! \brief the path of test model file, or file to restart training */
|
||||
std::string model_in;
|
||||
/*! \brief the path of final model file, to be saved */
|
||||
std::string model_out;
|
||||
/*! \brief the path of directory containing the saved models */
|
||||
std::string model_dir;
|
||||
/*! \brief name of predict file */
|
||||
std::string name_pred;
|
||||
/*! \brief data split mode */
|
||||
int dsplit;
|
||||
/*!\brief limit number of trees in prediction */
|
||||
int ntree_limit;
|
||||
/*!\brief whether to directly output margin value */
|
||||
bool pred_margin;
|
||||
/*! \brief whether dump statistics along with model */
|
||||
int dump_stats;
|
||||
/*! \brief name of feature map */
|
||||
std::string name_fmap;
|
||||
/*! \brief name of dump file */
|
||||
std::string name_dump;
|
||||
/*! \brief the paths of validation data sets */
|
||||
std::vector<std::string> eval_data_paths;
|
||||
/*! \brief the names of the evaluation data used in output log */
|
||||
std::vector<std::string> eval_data_names;
|
||||
/*! \brief all the configurations */
|
||||
std::vector<std::pair<std::string, std::string> > cfg;
|
||||
|
||||
// declare parameters
|
||||
DMLC_DECLARE_PARAMETER(CLIParam) {
|
||||
// NOTE: declare everything except eval_data_paths.
|
||||
DMLC_DECLARE_FIELD(task).set_default(kTrain)
|
||||
.add_enum("train", kTrain)
|
||||
.add_enum("dump", kDump2Text)
|
||||
.add_enum("pred", kPredict)
|
||||
.describe("Task to be performed by the CLI program.");
|
||||
DMLC_DECLARE_FIELD(silent).set_default(0).set_range(0, 2)
|
||||
.describe("Silent level during the task.");
|
||||
DMLC_DECLARE_FIELD(eval_train).set_default(false)
|
||||
.describe("Whether evaluate on training data during training.");
|
||||
DMLC_DECLARE_FIELD(num_round).set_default(10).set_lower_bound(1)
|
||||
.describe("Number of boosting iterations");
|
||||
DMLC_DECLARE_FIELD(save_period).set_default(0).set_lower_bound(0)
|
||||
.describe("The period to save the model, 0 means only save final model.");
|
||||
DMLC_DECLARE_FIELD(train_path).set_default("NULL")
|
||||
.describe("Training data path.");
|
||||
DMLC_DECLARE_FIELD(test_path).set_default("NULL")
|
||||
.describe("Test data path.");
|
||||
DMLC_DECLARE_FIELD(model_in).set_default("NULL")
|
||||
.describe("Input model path, if any.");
|
||||
DMLC_DECLARE_FIELD(model_out).set_default("NULL")
|
||||
.describe("Output model path, if any.");
|
||||
DMLC_DECLARE_FIELD(model_dir).set_default("./")
|
||||
.describe("Output directory of period checkpoint.");
|
||||
DMLC_DECLARE_FIELD(name_pred).set_default("pred.txt")
|
||||
.describe("Name of the prediction file.");
|
||||
DMLC_DECLARE_FIELD(dsplit).set_default(0)
|
||||
.add_enum("auto", 0)
|
||||
.add_enum("col", 1)
|
||||
.add_enum("row", 2)
|
||||
.describe("Data split mode.");
|
||||
DMLC_DECLARE_FIELD(ntree_limit).set_default(0).set_lower_bound(0)
|
||||
.describe("Number of trees used for prediction, 0 means use all trees.");
|
||||
DMLC_DECLARE_FIELD(dump_stats).set_default(false)
|
||||
.describe("Whether dump the model statistics.");
|
||||
DMLC_DECLARE_FIELD(name_fmap).set_default("NULL")
|
||||
.describe("Name of the feature map file.");
|
||||
DMLC_DECLARE_FIELD(name_dump).set_default("dump.txt")
|
||||
.describe("Name of the output dump text file.");
|
||||
// alias
|
||||
DMLC_DECLARE_ALIAS(train_path, data);
|
||||
DMLC_DECLARE_ALIAS(test_path, "test:data");
|
||||
}
|
||||
// customized configure function of CLIParam
|
||||
inline void Configure(const std::vector<std::pair<std::string, std::string> >& cfg) {
|
||||
this->cfg = cfg;
|
||||
this->InitAllowUnknown(cfg);
|
||||
for (const auto& kv : cfg) {
|
||||
if (!strncmp("eval[", kv.first.c_str(), 5)) {
|
||||
char evname[256];
|
||||
CHECK_EQ(sscanf(kv.first.c_str(), "eval[%[^]]", evname), 1)
|
||||
<< "must specify evaluation name for display";
|
||||
eval_data_names.push_back(std::string(evname));
|
||||
eval_data_paths.push_back(kv.second);
|
||||
}
|
||||
}
|
||||
// constraint.
|
||||
if (name_pred == "stdout") {
|
||||
save_period = 0;
|
||||
silent = 1;
|
||||
}
|
||||
if (dsplit == 0 && rabit::IsDistributed()) {
|
||||
dsplit = 2;
|
||||
}
|
||||
if (rabit::GetRank() != 0) {
|
||||
silent = 2;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
DMLC_REGISTER_PARAMETER(CLIParam);
|
||||
|
||||
void CLITrain(const CLIParam& param) {
|
||||
if (rabit::IsDistributed()) {
|
||||
std::string pname = rabit::GetProcessorName();
|
||||
LOG(INFO) << "start " << pname << ":" << rabit::GetRank();
|
||||
}
|
||||
// load in data.
|
||||
std::unique_ptr<DMatrix> dtrain(
|
||||
DMatrix::Load(param.train_path, param.silent != 0, param.dsplit == 2));
|
||||
std::vector<std::unique_ptr<DMatrix> > deval;
|
||||
std::vector<DMatrix*> cache_mats, eval_datasets;
|
||||
cache_mats.push_back(dtrain.get());
|
||||
for (size_t i = 0; i < param.eval_data_names.size(); ++i) {
|
||||
deval.emplace_back(
|
||||
DMatrix::Load(param.eval_data_paths[i], param.silent != 0, param.dsplit == 2));
|
||||
eval_datasets.push_back(deval.back().get());
|
||||
cache_mats.push_back(deval.back().get());
|
||||
}
|
||||
std::vector<std::string> eval_data_names = param.eval_data_names;
|
||||
if (param.eval_train) {
|
||||
eval_datasets.push_back(dtrain.get());
|
||||
eval_data_names.push_back(std::string("train"));
|
||||
}
|
||||
// initialize the learner.
|
||||
std::unique_ptr<Learner> learner(Learner::Create(cache_mats));
|
||||
learner->Configure(param.cfg);
|
||||
int version = rabit::LoadCheckPoint(learner.get());
|
||||
if (version == 0) {
|
||||
// initializ the model if needed.
|
||||
if (param.model_in != "NULL") {
|
||||
std::unique_ptr<dmlc::Stream> fi(
|
||||
dmlc::Stream::Create(param.model_in.c_str(), "r"));
|
||||
learner->Load(fi.get());
|
||||
}
|
||||
}
|
||||
// start training.
|
||||
const double start = dmlc::GetTime();
|
||||
for (int i = version / 2; i < param.num_round; ++i) {
|
||||
double elapsed = dmlc::GetTime() - start;
|
||||
if (version % 2 == 0) {
|
||||
if (param.silent == 0) {
|
||||
LOG(INFO) << "boosting round " << i << ", " << elapsed << " sec elapsed";
|
||||
}
|
||||
learner->UpdateOneIter(i, dtrain.get());
|
||||
if (learner->AllowLazyCheckPoint()) {
|
||||
rabit::LazyCheckPoint(learner.get());
|
||||
} else {
|
||||
rabit::CheckPoint(learner.get());
|
||||
}
|
||||
version += 1;
|
||||
}
|
||||
CHECK_EQ(version, rabit::VersionNumber());
|
||||
std::string res = learner->EvalOneIter(i, eval_datasets, eval_data_names);
|
||||
if (rabit::IsDistributed()) {
|
||||
if (rabit::GetRank() == 0) {
|
||||
rabit::TrackerPrint(res + "\n");
|
||||
}
|
||||
} else {
|
||||
if (param.silent < 2) {
|
||||
LOG(INFO) << res;
|
||||
}
|
||||
}
|
||||
if (param.save_period != 0 && (i + 1) % param.save_period == 0) {
|
||||
std::ostringstream os;
|
||||
os << param.model_dir << '/' << i + 1 << ".model";
|
||||
std::unique_ptr<dmlc::Stream> fo(
|
||||
dmlc::Stream::Create(os.str().c_str(), "w"));
|
||||
learner->Save(fo.get());
|
||||
}
|
||||
|
||||
if (learner->AllowLazyCheckPoint()) {
|
||||
rabit::LazyCheckPoint(learner.get());
|
||||
} else {
|
||||
rabit::CheckPoint(learner.get());
|
||||
}
|
||||
version += 1;
|
||||
CHECK_EQ(version, rabit::VersionNumber());
|
||||
}
|
||||
// always save final round
|
||||
if ((param.save_period == 0 || param.num_round % param.save_period != 0) &&
|
||||
param.model_out != "NONE") {
|
||||
std::ostringstream os;
|
||||
if (param.model_out == "NULL") {
|
||||
os << param.model_dir << '/' << param.num_round << ".model";
|
||||
} else {
|
||||
os << param.model_out;
|
||||
}
|
||||
std::unique_ptr<dmlc::Stream> fo(
|
||||
dmlc::Stream::Create(os.str().c_str(), "w"));
|
||||
learner->Save(fo.get());
|
||||
}
|
||||
|
||||
if (param.silent == 0) {
|
||||
double elapsed = dmlc::GetTime() - start;
|
||||
LOG(INFO) << "update end, " << elapsed << " sec in all";
|
||||
}
|
||||
}
|
||||
|
||||
void CLIDump2Text(const CLIParam& param) {
|
||||
FeatureMap fmap;
|
||||
if (param.name_fmap != "NULL") {
|
||||
std::unique_ptr<dmlc::Stream> fs(
|
||||
dmlc::Stream::Create(param.name_fmap.c_str(), "r"));
|
||||
dmlc::istream is(fs.get());
|
||||
fmap.LoadText(is);
|
||||
}
|
||||
// load model
|
||||
CHECK_NE(param.model_in, "NULL")
|
||||
<< "Must specifiy model_in for dump";
|
||||
std::unique_ptr<Learner> learner(Learner::Create({}));
|
||||
std::unique_ptr<dmlc::Stream> fi(
|
||||
dmlc::Stream::Create(param.model_in.c_str(), "r"));
|
||||
learner->Load(fi.get());
|
||||
// dump data
|
||||
std::vector<std::string> dump = learner->Dump2Text(fmap, param.dump_stats);
|
||||
std::unique_ptr<dmlc::Stream> fo(
|
||||
dmlc::Stream::Create(param.name_dump.c_str(), "w"));
|
||||
dmlc::ostream os(fo.get());
|
||||
for (size_t i = 0; i < dump.size(); ++i) {
|
||||
os << "booster[" << i << "]:\n";
|
||||
os << dump[i];
|
||||
}
|
||||
// force flush before fo destruct.
|
||||
os.set_stream(nullptr);
|
||||
}
|
||||
|
||||
void CLIPredict(const CLIParam& param) {
|
||||
// load data
|
||||
std::unique_ptr<DMatrix> dtest(
|
||||
DMatrix::Load(param.test_path, param.silent != 0, param.dsplit == 2));
|
||||
// load model
|
||||
CHECK_NE(param.model_in, "NULL")
|
||||
<< "Must specifiy model_in for dump";
|
||||
std::unique_ptr<Learner> learner(Learner::Create({}));
|
||||
std::unique_ptr<dmlc::Stream> fi(
|
||||
dmlc::Stream::Create(param.model_in.c_str(), "r"));
|
||||
learner->Load(fi.get());
|
||||
|
||||
if (param.silent == 0) {
|
||||
LOG(INFO) << "start prediction...";
|
||||
}
|
||||
std::vector<float> preds;
|
||||
learner->Predict(dtest.get(), param.pred_margin, &preds, param.ntree_limit);
|
||||
if (param.silent == 0) {
|
||||
LOG(INFO) << "writing prediction to " << param.name_pred;
|
||||
}
|
||||
std::unique_ptr<dmlc::Stream> fo(
|
||||
dmlc::Stream::Create(param.name_pred.c_str(), "w"));
|
||||
dmlc::ostream os(fo.get());
|
||||
for (float p : preds) {
|
||||
os << p << '\n';
|
||||
}
|
||||
// force flush before fo destruct.
|
||||
os.set_stream(nullptr);
|
||||
}
|
||||
|
||||
int CLIRunTask(int argc, char *argv[]) {
|
||||
if (argc < 2) {
|
||||
printf("Usage: <config>\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::vector<std::pair<std::string, std::string> > cfg;
|
||||
cfg.push_back(std::make_pair("seed", "0"));
|
||||
|
||||
common::ConfigIterator itr(argv[1]);
|
||||
while (itr.Next()) {
|
||||
cfg.push_back(std::make_pair(std::string(itr.name()), std::string(itr.val())));
|
||||
}
|
||||
|
||||
for (int i = 2; i < argc; ++i) {
|
||||
char name[256], val[256];
|
||||
if (sscanf(argv[i], "%[^=]=%s", name, val) == 2) {
|
||||
cfg.push_back(std::make_pair(std::string(name), std::string(val)));
|
||||
}
|
||||
}
|
||||
CLIParam param;
|
||||
param.Configure(cfg);
|
||||
|
||||
rabit::Init(argc, argv);
|
||||
switch (param.task) {
|
||||
case kTrain: CLITrain(param); break;
|
||||
case kDump2Text: CLIDump2Text(param); break;
|
||||
case kPredict: CLIPredict(param); break;
|
||||
}
|
||||
rabit::Finalize();
|
||||
return 0;
|
||||
}
|
||||
} // namespace xgboost
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
return xgboost::CLIRunTask(argc, argv);
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user