first version that reproduce binary classification demo
This commit is contained in:
parent
c4acb4fe01
commit
2c969ecf14
7
Makefile
7
Makefile
@ -3,15 +3,15 @@ export CXX = clang++
|
||||
export CFLAGS = -Wall -O3 -msse2 -Wno-unknown-pragmas
|
||||
|
||||
# specify tensor path
|
||||
BIN = xgunity.exe
|
||||
BIN = xgboost
|
||||
OBJ = io.o
|
||||
.PHONY: clean all
|
||||
|
||||
all: $(BIN) $(OBJ)
|
||||
export LDFLAGS= -pthread -lm
|
||||
|
||||
xgunity.exe: src/xgunity.cpp
|
||||
io.o: src/io/io.cpp
|
||||
xgboost: src/xgboost_main.cpp io.o src/data.h src/tree/*.h src/tree/*.hpp src/gbm/*.h src/gbm/*.hpp src/utils/*.h src/learner/*.h src/learner/*.hpp
|
||||
io.o: src/io/io.cpp src/data.h src/utils/*.h
|
||||
|
||||
$(BIN) :
|
||||
$(CXX) $(CFLAGS) $(LDFLAGS) -o $@ $(filter %.cpp %.o %.c, $^)
|
||||
@ -24,4 +24,3 @@ install:
|
||||
|
||||
clean:
|
||||
$(RM) $(OBJ) $(BIN) *~ */*~ */*/*~
|
||||
|
||||
|
||||
@ -310,12 +310,11 @@ class FMatrixS : public FMatrixInterface<FMatrixS>{
|
||||
const size_t nbatch = std::min(batch.size, max_nrow - batch.base_rowid);
|
||||
for (size_t i = 0; i < nbatch; ++i, ++num_buffered_row_) {
|
||||
SparseBatch::Inst inst = batch[i];
|
||||
for (bst_uint j = 0; j < batch.size; ++j) {
|
||||
for (bst_uint j = 0; j < inst.length; ++j) {
|
||||
builder.AddBudget(inst[j].findex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
builder.InitStorage();
|
||||
|
||||
iter_->BeforeFirst();
|
||||
@ -325,9 +324,9 @@ class FMatrixS : public FMatrixInterface<FMatrixS>{
|
||||
const size_t nbatch = std::min(batch.size, max_nrow - batch.base_rowid);
|
||||
for (size_t i = 0; i < nbatch; ++i) {
|
||||
SparseBatch::Inst inst = batch[i];
|
||||
for (bst_uint j = 0; j < batch.size; ++j) {
|
||||
for (bst_uint j = 0; j < inst.length; ++j) {
|
||||
builder.PushElem(inst[j].findex,
|
||||
Entry((bst_uint)(batch.base_rowid+j),
|
||||
Entry((bst_uint)(batch.base_rowid+i),
|
||||
inst[j].fvalue));
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,6 +7,7 @@
|
||||
*/
|
||||
#include <vector>
|
||||
#include "../data.h"
|
||||
#include "../utils/fmap.h"
|
||||
|
||||
namespace xgboost {
|
||||
/*! \brief namespace for gradient booster */
|
||||
@ -63,6 +64,13 @@ class IGradBooster {
|
||||
int64_t buffer_offset,
|
||||
const std::vector<unsigned> &root_index,
|
||||
std::vector<float> *out_preds) = 0;
|
||||
/*!
|
||||
* \brief dump the model in text format
|
||||
* \param fmap feature map that may help give interpretations of feature
|
||||
* \param option extra option of the dumo model
|
||||
* \return a vector of dump for boosters
|
||||
*/
|
||||
virtual std::vector<std::string> DumpModel(const utils::FeatMap& fmap, int option) = 0;
|
||||
// destrcutor
|
||||
virtual ~IGradBooster(void){}
|
||||
};
|
||||
|
||||
@ -141,6 +141,13 @@ class GBTree : public IGradBooster<FMatrix> {
|
||||
}
|
||||
}
|
||||
}
|
||||
virtual std::vector<std::string> DumpModel(const utils::FeatMap& fmap, int option) {
|
||||
std::vector<std::string> dump;
|
||||
for (size_t i = 0; i < trees.size(); i++) {
|
||||
dump.push_back(trees[i]->DumpModel(fmap, option&1));
|
||||
}
|
||||
return dump;
|
||||
}
|
||||
|
||||
protected:
|
||||
// clear the model
|
||||
|
||||
@ -7,9 +7,9 @@
|
||||
|
||||
namespace xgboost {
|
||||
namespace io {
|
||||
DataMatrix* LoadDataMatrix(const char *fname) {
|
||||
DataMatrix* LoadDataMatrix(const char *fname, bool silent, bool savebuffer) {
|
||||
DMatrixSimple *dmat = new DMatrixSimple();
|
||||
dmat->CacheLoad(fname);
|
||||
dmat->CacheLoad(fname, silent, savebuffer);
|
||||
return dmat;
|
||||
}
|
||||
} // namespace io
|
||||
|
||||
@ -17,9 +17,11 @@ typedef learner::DMatrix<FMatrixS> DataMatrix;
|
||||
/*!
|
||||
* \brief load DataMatrix from stream
|
||||
* \param fname file name to be loaded
|
||||
* \param silent whether print message during loading
|
||||
* \param savebuffer whether temporal buffer the file if the file is in text format
|
||||
* \return a loaded DMatrix
|
||||
*/
|
||||
DataMatrix* LoadDataMatrix(const char *fname);
|
||||
DataMatrix* LoadDataMatrix(const char *fname, bool silent = false, bool savebuffer = true);
|
||||
/*!
|
||||
* \brief save DataMatrix into stream,
|
||||
* note: the saved dmatrix format may not be in exactly same as input
|
||||
|
||||
@ -9,6 +9,7 @@
|
||||
#include <utility>
|
||||
#include <string>
|
||||
#include <climits>
|
||||
#include <cmath>
|
||||
#include <algorithm>
|
||||
#include "./evaluation.h"
|
||||
#include "./helper_utils.h"
|
||||
|
||||
@ -120,8 +120,8 @@ class BoostLearner {
|
||||
}
|
||||
inline void SaveModel(utils::IStream &fo) const {
|
||||
fo.Write(&mparam, sizeof(ModelParam));
|
||||
fo.Write(&name_obj_);
|
||||
fo.Write(&name_gbm_);
|
||||
fo.Write(name_obj_);
|
||||
fo.Write(name_gbm_);
|
||||
gbm_->SaveModel(fo);
|
||||
}
|
||||
/*!
|
||||
@ -139,7 +139,7 @@ class BoostLearner {
|
||||
* \param p_train pointer to the data matrix
|
||||
*/
|
||||
inline void UpdateOneIter(int iter, DMatrix<FMatrix> *p_train) {
|
||||
this->PredictRaw(preds_, *p_train);
|
||||
this->PredictRaw(*p_train, &preds_);
|
||||
obj_->GetGradient(preds_, p_train->info, iter, &gpair_);
|
||||
gbm_->DoBoost(gpair_, p_train->fmat, p_train->info.root_index);
|
||||
}
|
||||
@ -189,6 +189,10 @@ class BoostLearner {
|
||||
this->PredictRaw(data, out_preds);
|
||||
obj_->PredTransform(out_preds);
|
||||
}
|
||||
/*! \brief dump model out */
|
||||
inline std::vector<std::string> DumpModel(const utils::FeatMap& fmap, int option) {
|
||||
return gbm_->DumpModel(fmap, option);
|
||||
}
|
||||
|
||||
protected:
|
||||
/*!
|
||||
@ -212,9 +216,9 @@ class BoostLearner {
|
||||
* \param out_preds output vector that stores the prediction
|
||||
*/
|
||||
inline void PredictRaw(const DMatrix<FMatrix> &data,
|
||||
std::vector<float> *out_preds) {
|
||||
std::vector<float> *out_preds) const {
|
||||
gbm_->Predict(data.fmat, this->FindBufferOffset(data),
|
||||
data.info, out_preds);
|
||||
data.info.root_index, out_preds);
|
||||
}
|
||||
|
||||
/*! \brief training parameter for regression */
|
||||
@ -280,7 +284,7 @@ class BoostLearner {
|
||||
inline int64_t FindBufferOffset(const DMatrix<FMatrix> &mat) const {
|
||||
for (size_t i = 0; i < cache_.size(); ++i) {
|
||||
if (cache_[i].mat_ == &mat && mat.cache_learner_ptr_ == this) {
|
||||
if (cache_[i].num_row_ == mat.num_row) {
|
||||
if (cache_[i].num_row_ == mat.info.num_row) {
|
||||
return cache_[i].buffer_offset_;
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,6 +6,7 @@
|
||||
* \author Tianqi Chen, Kailong Chen
|
||||
*/
|
||||
#include <vector>
|
||||
#include <cmath>
|
||||
#include "./objective.h"
|
||||
|
||||
namespace xgboost {
|
||||
|
||||
@ -27,7 +27,6 @@ class ColMaker: public IUpdater<FMatrix> {
|
||||
const FMatrix &fmat,
|
||||
const std::vector<unsigned> &root_index,
|
||||
const std::vector<RegTree*> &trees) {
|
||||
|
||||
for (size_t i = 0; i < trees.size(); ++i) {
|
||||
Builder builder(param);
|
||||
builder.Update(gpair, fmat, root_index, trees[i]);
|
||||
@ -132,7 +131,9 @@ class ColMaker: public IUpdater<FMatrix> {
|
||||
// initialize feature index
|
||||
unsigned ncol = static_cast<unsigned>(fmat.NumCol());
|
||||
for (unsigned i = 0; i < ncol; ++i) {
|
||||
if (fmat.GetColSize(i) != 0) feat_index.push_back(i);
|
||||
if (fmat.GetColSize(i) != 0) {
|
||||
feat_index.push_back(i);
|
||||
}
|
||||
}
|
||||
unsigned n = static_cast<unsigned>(param.colsample_bytree * feat_index.size());
|
||||
random::Shuffle(feat_index);
|
||||
|
||||
244
src/xgboost_main.cpp
Normal file
244
src/xgboost_main.cpp
Normal file
@ -0,0 +1,244 @@
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
#define _CRT_SECURE_NO_DEPRECATE
|
||||
|
||||
#include <ctime>
|
||||
#include <string>
|
||||
#include <cstring>
|
||||
#include "io/io.h"
|
||||
#include "utils/utils.h"
|
||||
#include "utils/config.h"
|
||||
#include "learner/learner-inl.hpp"
|
||||
|
||||
namespace xgboost {
|
||||
/*!
|
||||
* \brief wrapping the training process
|
||||
*/
|
||||
class BoostLearnTask{
|
||||
public:
|
||||
inline int Run(int argc, char *argv[]) {
|
||||
if (argc < 2) {
|
||||
printf("Usage: <config>\n");
|
||||
return 0;
|
||||
}
|
||||
utils::ConfigIterator itr(argv[1]);
|
||||
while (itr.Next()) {
|
||||
this->SetParam(itr.name(), itr.val());
|
||||
}
|
||||
for (int i = 2; i < argc; ++i) {
|
||||
char name[256], val[256];
|
||||
if (sscanf(argv[i], "%[^=]=%s", name, val) == 2) {
|
||||
this->SetParam(name, val);
|
||||
}
|
||||
}
|
||||
this->InitData();
|
||||
this->InitLearner();
|
||||
if (task == "dump") {
|
||||
this->TaskDump(); return 0;
|
||||
}
|
||||
if (task == "eval") {
|
||||
this->TaskEval(); return 0;
|
||||
}
|
||||
if (task == "pred") {
|
||||
this->TaskPred();
|
||||
} else {
|
||||
this->TaskTrain();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
inline void SetParam(const char *name, const char *val) {
|
||||
if (!strcmp("silent", name)) silent = atoi(val);
|
||||
if (!strcmp("use_buffer", name)) use_buffer = atoi(val);
|
||||
if (!strcmp("seed", name)) random::Seed(atoi(val));
|
||||
if (!strcmp("num_round", name)) num_round = atoi(val);
|
||||
if (!strcmp("save_period", name)) save_period = atoi(val);
|
||||
if (!strcmp("eval_train", name)) eval_train = atoi(val);
|
||||
if (!strcmp("task", name)) task = val;
|
||||
if (!strcmp("data", name)) train_path = val;
|
||||
if (!strcmp("test:data", name)) test_path = val;
|
||||
if (!strcmp("model_in", name)) model_in = val;
|
||||
if (!strcmp("model_out", name)) model_out = val;
|
||||
if (!strcmp("model_dir", name)) model_dir_path = val;
|
||||
if (!strcmp("fmap", name)) name_fmap = val;
|
||||
if (!strcmp("name_dump", name)) name_dump = val;
|
||||
if (!strcmp("name_pred", name)) name_pred = val;
|
||||
if (!strcmp("dump_stats", name)) dump_model_stats = atoi(val);
|
||||
if (!strncmp("eval[", name, 5)) {
|
||||
char evname[256];
|
||||
utils::Assert(sscanf(name, "eval[%[^]]", evname) == 1, "must specify evaluation name for display");
|
||||
eval_data_names.push_back(std::string(evname));
|
||||
eval_data_paths.push_back(std::string(val));
|
||||
}
|
||||
learner.SetParam(name, val);
|
||||
}
|
||||
public:
|
||||
BoostLearnTask(void) {
|
||||
// default parameters
|
||||
silent = 0;
|
||||
use_buffer = 1;
|
||||
num_round = 10;
|
||||
save_period = 0;
|
||||
eval_train = 0;
|
||||
dump_model_stats = 0;
|
||||
task = "train";
|
||||
model_in = "NULL";
|
||||
model_out = "NULL";
|
||||
name_fmap = "NULL";
|
||||
name_pred = "pred.txt";
|
||||
name_dump = "dump.txt";
|
||||
model_dir_path = "./";
|
||||
data = NULL;
|
||||
}
|
||||
~BoostLearnTask(void){
|
||||
for (size_t i = 0; i < deval.size(); i++){
|
||||
delete deval[i];
|
||||
}
|
||||
if (data != NULL) delete data;
|
||||
}
|
||||
private:
|
||||
inline void InitData(void) {
|
||||
if (name_fmap != "NULL") fmap.LoadText(name_fmap.c_str());
|
||||
if (task == "dump") return;
|
||||
if (task == "pred") {
|
||||
data = io::LoadDataMatrix(test_path.c_str(), silent != 0, use_buffer != 0);
|
||||
} else {
|
||||
// training
|
||||
data = io::LoadDataMatrix(train_path.c_str(), silent != 0, use_buffer != 0);
|
||||
{// intialize column access
|
||||
data->fmat.InitColAccess();
|
||||
}
|
||||
utils::Assert(eval_data_names.size() == eval_data_paths.size(), "BUG");
|
||||
for (size_t i = 0; i < eval_data_names.size(); ++i) {
|
||||
deval.push_back(io::LoadDataMatrix(eval_data_paths[i].c_str(), silent != 0, use_buffer != 0));
|
||||
devalall.push_back(deval.back());
|
||||
}
|
||||
|
||||
std::vector<io::DataMatrix *> dcache(1, data);
|
||||
for (size_t i = 0; i < deval.size(); ++ i) {
|
||||
dcache.push_back(deval[i]);
|
||||
}
|
||||
// set cache data to be all training and evaluation data
|
||||
learner.SetCacheData(dcache);
|
||||
|
||||
// add training set to evaluation set if needed
|
||||
if( eval_train != 0 ) {
|
||||
devalall.push_back(data);
|
||||
eval_data_names.push_back(std::string("train"));
|
||||
}
|
||||
}
|
||||
}
|
||||
inline void InitLearner(void) {
|
||||
if (model_in != "NULL"){
|
||||
utils::FileStream fi(utils::FopenCheck(model_in.c_str(), "rb"));
|
||||
learner.LoadModel(fi);
|
||||
fi.Close();
|
||||
} else {
|
||||
utils::Assert(task == "train", "model_in not specified");
|
||||
learner.InitModel();
|
||||
}
|
||||
}
|
||||
inline void TaskTrain(void) {
|
||||
const time_t start = time(NULL);
|
||||
unsigned long elapsed = 0;
|
||||
for (int i = 0; i < num_round; ++i) {
|
||||
elapsed = (unsigned long)(time(NULL) - start);
|
||||
if (!silent) printf("boosting round %d, %lu sec elapsed\n", i, elapsed);
|
||||
learner.UpdateOneIter(i,data);
|
||||
std::string res = learner.EvalOneIter(i, devalall, eval_data_names);
|
||||
fprintf(stderr, "%s\n", res.c_str());
|
||||
if (save_period != 0 && (i + 1) % save_period == 0) {
|
||||
this->SaveModel(i);
|
||||
}
|
||||
elapsed = (unsigned long)(time(NULL) - start);
|
||||
}
|
||||
// always save final round
|
||||
if ((save_period == 0 || num_round % save_period != 0) && model_out != "NONE") {
|
||||
if (model_out == "NULL"){
|
||||
this->SaveModel(num_round - 1);
|
||||
} else {
|
||||
this->SaveModel(model_out.c_str());
|
||||
}
|
||||
}
|
||||
if (!silent){
|
||||
printf("\nupdating end, %lu sec in all\n", elapsed);
|
||||
}
|
||||
}
|
||||
inline void TaskEval(void) {
|
||||
learner.EvalOneIter(0, devalall, eval_data_names);
|
||||
}
|
||||
inline void TaskDump(void){
|
||||
FILE *fo = utils::FopenCheck(name_dump.c_str(), "w");
|
||||
std::vector<std::string> dump = learner.DumpModel(fmap, dump_model_stats != 0);
|
||||
for (size_t i = 0; i < dump.size(); ++ i) {
|
||||
fprintf(fo,"booster[%lu]:\n", i);
|
||||
fprintf(fo,"%s", dump[i].c_str());
|
||||
}
|
||||
fclose(fo);
|
||||
}
|
||||
inline void SaveModel(const char *fname) const {
|
||||
utils::FileStream fo(utils::FopenCheck(fname, "wb"));
|
||||
learner.SaveModel(fo);
|
||||
fo.Close();
|
||||
}
|
||||
inline void SaveModel(int i) const {
|
||||
char fname[256];
|
||||
sprintf(fname, "%s/%04d.model", model_dir_path.c_str(), i + 1);
|
||||
this->SaveModel(fname);
|
||||
}
|
||||
inline void TaskPred(void) {
|
||||
std::vector<float> preds;
|
||||
if (!silent) printf("start prediction...\n");
|
||||
learner.Predict(*data, &preds);
|
||||
if (!silent) printf("writing prediction to %s\n", name_pred.c_str());
|
||||
FILE *fo = utils::FopenCheck(name_pred.c_str(), "w");
|
||||
for (size_t i = 0; i < preds.size(); i++) {
|
||||
fprintf(fo, "%f\n", preds[i]);
|
||||
}
|
||||
fclose(fo);
|
||||
}
|
||||
private:
|
||||
/* \brief whether silent */
|
||||
int silent;
|
||||
/* \brief whether use auto binary buffer */
|
||||
int use_buffer;
|
||||
/* \brief whether evaluate training statistics */
|
||||
int eval_train;
|
||||
/* \brief number of boosting iterations */
|
||||
int num_round;
|
||||
/* \brief the period to save the model, 0 means only save the final round model */
|
||||
int save_period;
|
||||
/* \brief the path of training/test data set */
|
||||
std::string train_path, test_path;
|
||||
/* \brief the path of test model file, or file to restart training */
|
||||
std::string model_in;
|
||||
/* \brief the path of final model file, to be saved */
|
||||
std::string model_out;
|
||||
/* \brief the path of directory containing the saved models */
|
||||
std::string model_dir_path;
|
||||
/* \brief task to perform */
|
||||
std::string task;
|
||||
/* \brief name of predict file */
|
||||
std::string name_pred;
|
||||
/* \brief whether dump statistics along with model */
|
||||
int dump_model_stats;
|
||||
/* \brief name of feature map */
|
||||
std::string name_fmap;
|
||||
/* \brief name of dump file */
|
||||
std::string name_dump;
|
||||
/* \brief the paths of validation data sets */
|
||||
std::vector<std::string> eval_data_paths;
|
||||
/* \brief the names of the evaluation data used in output log */
|
||||
std::vector<std::string> eval_data_names;
|
||||
private:
|
||||
io::DataMatrix* data;
|
||||
std::vector<io::DataMatrix*> deval;
|
||||
std::vector<const io::DataMatrix*> devalall;
|
||||
utils::FeatMap fmap;
|
||||
learner::BoostLearner<FMatrixS> learner;
|
||||
};
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]){
|
||||
xgboost::random::Seed(0);
|
||||
xgboost::BoostLearnTask tsk;
|
||||
return tsk.Run(argc, argv);
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user