make regression module compatible with rank loss, now support weighted loss
This commit is contained in:
302
regrank/xgboost_regrank.h
Normal file
302
regrank/xgboost_regrank.h
Normal file
@@ -0,0 +1,302 @@
|
||||
#ifndef XGBOOST_REGRANK_H
|
||||
#define XGBOOST_REGRANK_H
|
||||
/*!
|
||||
* \file xgboost_regrank.h
|
||||
* \brief class for gradient boosted regression and ranking
|
||||
* \author Kailong Chen: chenkl198812@gmail.com, Tianqi Chen: tianqi.tchen@gmail.com
|
||||
*/
|
||||
#include <cmath>
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include "xgboost_regrank_data.h"
|
||||
#include "xgboost_regrank_eval.h"
|
||||
#include "xgboost_regrank_obj.h"
|
||||
#include "../utils/xgboost_omp.h"
|
||||
#include "../booster/xgboost_gbmbase.h"
|
||||
#include "../utils/xgboost_utils.h"
|
||||
#include "../utils/xgboost_stream.h"
|
||||
|
||||
namespace xgboost{
|
||||
namespace regrank{
|
||||
/*! \brief class for gradient boosted regression and ranking */
|
||||
class RegRankBoostLearner{
|
||||
public:
|
||||
/*! \brief constructor */
|
||||
RegRankBoostLearner(void){
|
||||
silent = 0;
|
||||
obj_ = NULL;
|
||||
name_obj_ = "reg";
|
||||
}
|
||||
/*!
|
||||
* \brief a regression booter associated with training and evaluating data
|
||||
* \param train pointer to the training data
|
||||
* \param evals array of evaluating data
|
||||
* \param evname name of evaluation data, used print statistics
|
||||
*/
|
||||
RegRankBoostLearner(const DMatrix *train,
|
||||
const std::vector<DMatrix *> &evals,
|
||||
const std::vector<std::string> &evname){
|
||||
silent = 0;
|
||||
this->SetData(train, evals, evname);
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief associate regression booster with training and evaluating data
|
||||
* \param train pointer to the training data
|
||||
* \param evals array of evaluating data
|
||||
* \param evname name of evaluation data, used print statistics
|
||||
*/
|
||||
inline void SetData(const DMatrix *train,
|
||||
const std::vector<DMatrix *> &evals,
|
||||
const std::vector<std::string> &evname){
|
||||
this->train_ = train;
|
||||
this->evals_ = evals;
|
||||
this->evname_ = evname;
|
||||
// estimate feature bound
|
||||
int num_feature = (int)(train->data.NumCol());
|
||||
// assign buffer index
|
||||
unsigned buffer_size = static_cast<unsigned>(train->Size());
|
||||
|
||||
for (size_t i = 0; i < evals.size(); ++i){
|
||||
buffer_size += static_cast<unsigned>(evals[i]->Size());
|
||||
num_feature = std::max(num_feature, (int)(evals[i]->data.NumCol()));
|
||||
}
|
||||
|
||||
char str_temp[25];
|
||||
if (num_feature > mparam.num_feature){
|
||||
mparam.num_feature = num_feature;
|
||||
sprintf(str_temp, "%d", num_feature);
|
||||
base_gbm.SetParam("bst:num_feature", str_temp);
|
||||
}
|
||||
|
||||
sprintf(str_temp, "%u", buffer_size);
|
||||
base_gbm.SetParam("num_pbuffer", str_temp);
|
||||
if (!silent){
|
||||
printf("buffer_size=%u\n", buffer_size);
|
||||
}
|
||||
|
||||
// set eval_preds tmp sapce
|
||||
this->eval_preds_.resize(evals.size(), std::vector<float>());
|
||||
}
|
||||
/*!
|
||||
* \brief set parameters from outside
|
||||
* \param name name of the parameter
|
||||
* \param val value of the parameter
|
||||
*/
|
||||
inline void SetParam(const char *name, const char *val){
|
||||
if (!strcmp(name, "silent")) silent = atoi(val);
|
||||
if (!strcmp(name, "eval_metric")) evaluator_.AddEval(val);
|
||||
if (!strcmp(name, "objective") ) name_obj_ = val;
|
||||
mparam.SetParam(name, val);
|
||||
base_gbm.SetParam(name, val);
|
||||
cfg_.push_back( std::make_pair( std::string(name), std::string(val) ) );
|
||||
}
|
||||
/*!
|
||||
* \brief initialize solver before training, called before training
|
||||
* this function is reserved for solver to allocate necessary space and do other preparation
|
||||
*/
|
||||
inline void InitTrainer(void){
|
||||
base_gbm.InitTrainer();
|
||||
obj_ = CreateObjFunction( name_obj_.c_str() );
|
||||
for( size_t i = 0; i < cfg_.size(); ++ i ){
|
||||
obj_->SetParam( cfg_[i].first.c_str(), cfg_[i].second.c_str() );
|
||||
}
|
||||
evaluator_.AddEval( obj_->DefaultEvalMetric() );
|
||||
}
|
||||
/*!
|
||||
* \brief initialize the current data storage for model, if the model is used first time, call this function
|
||||
*/
|
||||
inline void InitModel(void){
|
||||
base_gbm.InitModel();
|
||||
mparam.AdjustBase();
|
||||
}
|
||||
/*!
|
||||
* \brief load model from stream
|
||||
* \param fi input stream
|
||||
*/
|
||||
inline void LoadModel(utils::IStream &fi){
|
||||
base_gbm.LoadModel(fi);
|
||||
utils::Assert(fi.Read(&mparam, sizeof(ModelParam)) != 0);
|
||||
}
|
||||
/*!
|
||||
* \brief DumpModel
|
||||
* \param fo text file
|
||||
* \param fmap feature map that may help give interpretations of feature
|
||||
* \param with_stats whether print statistics as well
|
||||
*/
|
||||
inline void DumpModel(FILE *fo, const utils::FeatMap& fmap, bool with_stats){
|
||||
base_gbm.DumpModel(fo, fmap, with_stats);
|
||||
}
|
||||
/*!
|
||||
* \brief Dump path of all trees
|
||||
* \param fo text file
|
||||
* \param data input data
|
||||
*/
|
||||
inline void DumpPath(FILE *fo, const DMatrix &data){
|
||||
base_gbm.DumpPath(fo, data.data);
|
||||
}
|
||||
/*!
|
||||
* \brief save model to stream
|
||||
* \param fo output stream
|
||||
*/
|
||||
inline void SaveModel(utils::IStream &fo) const{
|
||||
base_gbm.SaveModel(fo);
|
||||
fo.Write(&mparam, sizeof(ModelParam));
|
||||
}
|
||||
/*!
|
||||
* \brief update the model for one iteration
|
||||
* \param iteration iteration number
|
||||
*/
|
||||
inline void UpdateOneIter(int iter){
|
||||
this->PredictBuffer(preds_, *train_, 0);
|
||||
obj_->GetGradient(preds_, train_->info, grad_, hess_);
|
||||
std::vector<unsigned> root_index;
|
||||
base_gbm.DoBoost(grad_, hess_, train_->data, root_index);
|
||||
}
|
||||
/*!
|
||||
* \brief evaluate the model for specific iteration
|
||||
* \param iter iteration number
|
||||
* \param fo file to output log
|
||||
*/
|
||||
inline void EvalOneIter(int iter, FILE *fo = stderr){
|
||||
fprintf(fo, "[%d]", iter);
|
||||
int buffer_offset = static_cast<int>(train_->Size());
|
||||
|
||||
for (size_t i = 0; i < evals_.size(); ++i){
|
||||
std::vector<float> &preds = this->eval_preds_[i];
|
||||
this->PredictBuffer(preds, *evals_[i], buffer_offset);
|
||||
obj_->PredTransform(preds);
|
||||
evaluator_.Eval(fo, evname_[i].c_str(), preds, evals_[i]->info);
|
||||
buffer_offset += static_cast<int>(evals_[i]->Size());
|
||||
}
|
||||
fprintf(fo, "\n");
|
||||
fflush(fo);
|
||||
}
|
||||
/*! \brief get prediction, without buffering */
|
||||
inline void Predict(std::vector<float> &preds, const DMatrix &data){
|
||||
preds.resize(data.Size());
|
||||
const unsigned ndata = static_cast<unsigned>(data.Size());
|
||||
#pragma omp parallel for schedule( static )
|
||||
for (unsigned j = 0; j < ndata; ++j){
|
||||
preds[j] = mparam.base_score + base_gbm.Predict(data.data, j, -1);
|
||||
}
|
||||
obj_->PredTransform( preds );
|
||||
}
|
||||
public:
|
||||
/*!
|
||||
* \brief interactive update
|
||||
* \param action action type
|
||||
*/
|
||||
inline void UpdateInteract(std::string action){
|
||||
this->InteractPredict(preds_, *train_, 0);
|
||||
|
||||
int buffer_offset = static_cast<int>(train_->Size());
|
||||
for (size_t i = 0; i < evals_.size(); ++i){
|
||||
std::vector<float> &preds = this->eval_preds_[i];
|
||||
this->InteractPredict(preds, *evals_[i], buffer_offset);
|
||||
buffer_offset += static_cast<int>(evals_[i]->Size());
|
||||
}
|
||||
|
||||
if (action == "remove"){
|
||||
base_gbm.DelteBooster(); return;
|
||||
}
|
||||
|
||||
obj_->GetGradient(preds_, train_->info, grad_, hess_);
|
||||
std::vector<unsigned> root_index;
|
||||
base_gbm.DoBoost(grad_, hess_, train_->data, root_index);
|
||||
|
||||
this->InteractRePredict(*train_, 0);
|
||||
buffer_offset = static_cast<int>(train_->Size());
|
||||
for (size_t i = 0; i < evals_.size(); ++i){
|
||||
this->InteractRePredict(*evals_[i], buffer_offset);
|
||||
buffer_offset += static_cast<int>(evals_[i]->Size());
|
||||
}
|
||||
}
|
||||
private:
|
||||
/*! \brief get the transformed predictions, given data */
|
||||
inline void InteractPredict(std::vector<float> &preds, const DMatrix &data, unsigned buffer_offset){
|
||||
preds.resize(data.Size());
|
||||
const unsigned ndata = static_cast<unsigned>(data.Size());
|
||||
#pragma omp parallel for schedule( static )
|
||||
for (unsigned j = 0; j < ndata; ++j){
|
||||
preds[j] = mparam.base_score + base_gbm.InteractPredict(data.data, j, buffer_offset + j);
|
||||
}
|
||||
obj_->PredTransform( preds );
|
||||
}
|
||||
/*! \brief repredict trial */
|
||||
inline void InteractRePredict(const DMatrix &data, unsigned buffer_offset){
|
||||
const unsigned ndata = static_cast<unsigned>(data.Size());
|
||||
#pragma omp parallel for schedule( static )
|
||||
for (unsigned j = 0; j < ndata; ++j){
|
||||
base_gbm.InteractRePredict(data.data, j, buffer_offset + j);
|
||||
}
|
||||
}
|
||||
private:
|
||||
/*! \brief get the transformed predictions, given data */
|
||||
inline void PredictBuffer(std::vector<float> &preds, const DMatrix &data, unsigned buffer_offset){
|
||||
preds.resize(data.Size());
|
||||
const unsigned ndata = static_cast<unsigned>(data.Size());
|
||||
#pragma omp parallel for schedule( static )
|
||||
for (unsigned j = 0; j < ndata; ++j){
|
||||
preds[j] = mparam.base_score + base_gbm.Predict(data.data, j, buffer_offset + j);
|
||||
}
|
||||
}
|
||||
private:
|
||||
/*! \brief training parameter for regression */
|
||||
struct ModelParam{
|
||||
/* \brief global bias */
|
||||
float base_score;
|
||||
/* \brief type of loss function */
|
||||
int loss_type;
|
||||
/* \brief number of features */
|
||||
int num_feature;
|
||||
/*! \brief reserved field */
|
||||
int reserved[16];
|
||||
/*! \brief constructor */
|
||||
ModelParam(void){
|
||||
base_score = 0.5f;
|
||||
loss_type = 0;
|
||||
memset(reserved, 0, sizeof(reserved));
|
||||
}
|
||||
/*!
|
||||
* \brief set parameters from outside
|
||||
* \param name name of the parameter
|
||||
* \param val value of the parameter
|
||||
*/
|
||||
inline void SetParam(const char *name, const char *val){
|
||||
if (!strcmp("base_score", name)) base_score = (float)atof(val);
|
||||
if (!strcmp("loss_type", name)) loss_type = atoi(val);
|
||||
if (!strcmp("bst:num_feature", name)) num_feature = atoi(val);
|
||||
}
|
||||
/*!
|
||||
* \brief adjust base_score
|
||||
*/
|
||||
inline void AdjustBase(void){
|
||||
if (loss_type == 1 || loss_type == 2){
|
||||
utils::Assert(base_score > 0.0f && base_score < 1.0f, "sigmoid range constrain");
|
||||
base_score = -logf(1.0f / base_score - 1.0f);
|
||||
}
|
||||
}
|
||||
};
|
||||
private:
|
||||
int silent;
|
||||
EvalSet evaluator_;
|
||||
booster::GBMBase base_gbm;
|
||||
ModelParam mparam;
|
||||
const DMatrix *train_;
|
||||
std::vector<DMatrix *> evals_;
|
||||
std::vector<std::string> evname_;
|
||||
std::vector<unsigned> buffer_index_;
|
||||
// objective fnction
|
||||
IObjFunction *obj_;
|
||||
// name of objective function
|
||||
std::string name_obj_;
|
||||
std::vector< std::pair<std::string, std::string> > cfg_;
|
||||
private:
|
||||
std::vector<float> grad_, hess_, preds_;
|
||||
std::vector< std::vector<float> > eval_preds_;
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
#endif
|
||||
205
regrank/xgboost_regrank_data.h
Normal file
205
regrank/xgboost_regrank_data.h
Normal file
@@ -0,0 +1,205 @@
|
||||
#ifndef XGBOOST_REGRANK_DATA_H
|
||||
#define XGBOOST_REGRANK_DATA_H
|
||||
|
||||
/*!
|
||||
* \file xgboost_regrank_data.h
|
||||
* \brief input data structure for regression, binary classification, and rankning.
|
||||
* Format:
|
||||
* The data should contain each data instance in each line.
|
||||
* The format of line data is as below:
|
||||
* label <nonzero feature dimension> [feature index:feature value]+
|
||||
* When using rank, an addtional group file with suffix group must be provided, giving the number of instances in each group
|
||||
* When using weighted aware classification(regression), an addtional weight file must be provided, giving the weight of each instance
|
||||
*
|
||||
* \author Kailong Chen: chenkl198812@gmail.com, Tianqi Chen: tianqi.tchen@gmail.com
|
||||
*/
|
||||
#include <cstdio>
|
||||
#include <vector>
|
||||
#include "../booster/xgboost_data.h"
|
||||
#include "../utils/xgboost_utils.h"
|
||||
#include "../utils/xgboost_stream.h"
|
||||
|
||||
namespace xgboost{
|
||||
/*! \brief namespace to handle regression and rank */
|
||||
namespace regrank{
|
||||
/*! \brief data matrix for regression content */
|
||||
struct DMatrix{
|
||||
public:
|
||||
/*! \brief data information besides the features */
|
||||
struct Info{
|
||||
/*! \brief label of each instance */
|
||||
std::vector<float> labels;
|
||||
/*! \brief the index of begin and end of a groupneeded when the learning task is ranking */
|
||||
std::vector<unsigned> group_ptr;
|
||||
/*! \brief weights of each instance, optional */
|
||||
std::vector<float> weights;
|
||||
/*! \brief get weight of each instances */
|
||||
inline float GetWeight( size_t i ) const{
|
||||
if( weights.size() != 0 ) return weights[i];
|
||||
else return 1.0f;
|
||||
}
|
||||
};
|
||||
public:
|
||||
/*! \brief feature data content */
|
||||
booster::FMatrixS data;
|
||||
/*! \brief information fields */
|
||||
Info info;
|
||||
public:
|
||||
/*! \brief default constructor */
|
||||
DMatrix(void){}
|
||||
/*! \brief get the number of instances */
|
||||
inline size_t Size() const{
|
||||
return info.labels.size();
|
||||
}
|
||||
/*!
|
||||
* \brief load from text file
|
||||
* \param fname name of text data
|
||||
* \param silent whether print information or not
|
||||
*/
|
||||
inline void LoadText(const char* fname, bool silent = false){
|
||||
data.Clear();
|
||||
FILE* file = utils::FopenCheck(fname, "r");
|
||||
float label; bool init = true;
|
||||
char tmp[1024];
|
||||
std::vector<booster::bst_uint> findex;
|
||||
std::vector<booster::bst_float> fvalue;
|
||||
|
||||
while (fscanf(file, "%s", tmp) == 1){
|
||||
unsigned index; float value;
|
||||
if (sscanf(tmp, "%u:%f", &index, &value) == 2){
|
||||
findex.push_back(index); fvalue.push_back(value);
|
||||
}
|
||||
else{
|
||||
if (!init){
|
||||
info.labels.push_back(label);
|
||||
data.AddRow(findex, fvalue);
|
||||
}
|
||||
findex.clear(); fvalue.clear();
|
||||
utils::Assert(sscanf(tmp, "%f", &label) == 1, "invalid format");
|
||||
init = false;
|
||||
}
|
||||
}
|
||||
|
||||
info.labels.push_back(label);
|
||||
data.AddRow(findex, fvalue);
|
||||
// initialize column support as well
|
||||
data.InitData();
|
||||
|
||||
if (!silent){
|
||||
printf("%ux%u matrix with %lu entries is loaded from %s\n",
|
||||
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname);
|
||||
}
|
||||
fclose(file);
|
||||
this->TryLoadGroup(fname, silent);
|
||||
this->TryLoadWeight(fname, silent);
|
||||
}
|
||||
/*!
|
||||
* \brief load from binary file
|
||||
* \param fname name of binary data
|
||||
* \param silent whether print information or not
|
||||
* \return whether loading is success
|
||||
*/
|
||||
inline bool LoadBinary(const char* fname, bool silent = false){
|
||||
FILE *fp = fopen64(fname, "rb");
|
||||
if (fp == NULL) return false;
|
||||
utils::FileStream fs(fp);
|
||||
data.LoadBinary(fs);
|
||||
info.labels.resize(data.NumRow());
|
||||
utils::Assert(fs.Read(&info.labels[0], sizeof(float)* data.NumRow()) != 0, "DMatrix LoadBinary");
|
||||
fs.Close();
|
||||
// initialize column support as well
|
||||
data.InitData();
|
||||
|
||||
if (!silent){
|
||||
printf("%ux%u matrix with %lu entries is loaded from %s\n",
|
||||
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname);
|
||||
}
|
||||
this->TryLoadGroup(fname, silent);
|
||||
this->TryLoadWeight(fname, silent);
|
||||
return true;
|
||||
}
|
||||
/*!
|
||||
* \brief save to binary file
|
||||
* \param fname name of binary data
|
||||
* \param silent whether print information or not
|
||||
*/
|
||||
inline void SaveBinary(const char* fname, bool silent = false){
|
||||
// initialize column support as well
|
||||
data.InitData();
|
||||
|
||||
utils::FileStream fs(utils::FopenCheck(fname, "wb"));
|
||||
data.SaveBinary(fs);
|
||||
fs.Write(&info.labels[0], sizeof(float)* data.NumRow());
|
||||
fs.Close();
|
||||
if (!silent){
|
||||
printf("%ux%u matrix with %lu entries is saved to %s\n",
|
||||
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname);
|
||||
}
|
||||
}
|
||||
/*!
|
||||
* \brief cache load data given a file name, if filename ends with .buffer, direct load binary
|
||||
* otherwise the function will first check if fname + '.buffer' exists,
|
||||
* if binary buffer exists, it will reads from binary buffer, otherwise, it will load from text file,
|
||||
* and try to create a buffer file
|
||||
* \param fname name of binary data
|
||||
* \param silent whether print information or not
|
||||
* \param savebuffer whether do save binary buffer if it is text
|
||||
*/
|
||||
inline void CacheLoad(const char *fname, bool silent = false, bool savebuffer = true){
|
||||
int len = strlen(fname);
|
||||
if (len > 8 && !strcmp(fname + len - 7, ".buffer")){
|
||||
this->LoadBinary(fname, silent); return;
|
||||
}
|
||||
char bname[1024];
|
||||
sprintf(bname, "%s.buffer", fname);
|
||||
if (!this->LoadBinary(bname, silent)){
|
||||
this->LoadText(fname, silent);
|
||||
if (savebuffer) this->SaveBinary(bname, silent);
|
||||
}
|
||||
}
|
||||
private:
|
||||
inline bool TryLoadGroup(const char* fname, bool silent = false){
|
||||
std::string name = fname;
|
||||
if (name.length() > 8 && !strcmp(fname + name.length() - 7, ".buffer")){
|
||||
name.resize( name.length() - 7 );
|
||||
}
|
||||
name += ".group";
|
||||
//if exists group data load it in
|
||||
FILE *fi = fopen64(name.c_str(), "r");
|
||||
if (fi == NULL) return false;
|
||||
info.group_ptr.push_back(0);
|
||||
unsigned nline;
|
||||
while (fscanf(fi, "%u", &nline) == 1){
|
||||
info.group_ptr.push_back(info.group_ptr.back()+nline);
|
||||
}
|
||||
if(!silent){
|
||||
printf("%lu groups are loaded from %s\n", info.group_ptr.size()-1, name.c_str());
|
||||
}
|
||||
fclose(fi);
|
||||
utils::Assert( info.group_ptr.back() == data.NumRow(), "DMatrix: group data does not match the number of rows in feature matrix" );
|
||||
return true;
|
||||
}
|
||||
inline bool TryLoadWeight(const char* fname, bool silent = false){
|
||||
std::string name = fname;
|
||||
if (name.length() > 8 && !strcmp(fname + name.length() - 7, ".buffer")){
|
||||
name.resize( name.length() - 7 );
|
||||
}
|
||||
name += ".weight";
|
||||
//if exists group data load it in
|
||||
FILE *fi = fopen64(name.c_str(), "r");
|
||||
if (fi == NULL) return false;
|
||||
float wt;
|
||||
while (fscanf(fi, "%f", &wt) == 1){
|
||||
info.weights.push_back( wt );
|
||||
}
|
||||
if(!silent){
|
||||
printf("loading weight from %s\n", name.c_str());
|
||||
}
|
||||
fclose(fi);
|
||||
utils::Assert( info.weights.size() == data.NumRow(), "DMatrix: weight data does not match the number of rows in feature matrix" );
|
||||
return true;
|
||||
}
|
||||
};
|
||||
};
|
||||
};
|
||||
#endif
|
||||
163
regrank/xgboost_regrank_eval.h
Normal file
163
regrank/xgboost_regrank_eval.h
Normal file
@@ -0,0 +1,163 @@
|
||||
#ifndef XGBOOST_REGRANK_EVAL_H
|
||||
#define XGBOOST_REGRANK_EVAL_H
|
||||
/*!
|
||||
* \file xgboost_regrank_eval.h
|
||||
* \brief evaluation metrics for regression and classification and rank
|
||||
* \author Kailong Chen: chenkl198812@gmail.com, Tianqi Chen: tianqi.tchen@gmail.com
|
||||
*/
|
||||
|
||||
#include <cmath>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
#include "../utils/xgboost_utils.h"
|
||||
#include "../utils/xgboost_omp.h"
|
||||
#include "../utils/xgboost_random.h"
|
||||
#include "xgboost_regrank_data.h"
|
||||
|
||||
namespace xgboost{
|
||||
namespace regrank{
|
||||
/*! \brief evaluator that evaluates the loss metrics */
|
||||
struct IEvaluator{
|
||||
/*!
|
||||
* \brief evaluate a specific metric
|
||||
* \param preds prediction
|
||||
* \param info information, including label etc.
|
||||
*/
|
||||
virtual float Eval(const std::vector<float> &preds,
|
||||
const DMatrix::Info &info ) const = 0;
|
||||
/*! \return name of metric */
|
||||
virtual const char *Name(void) const = 0;
|
||||
};
|
||||
|
||||
/*! \brief RMSE */
|
||||
struct EvalRMSE : public IEvaluator{
|
||||
virtual float Eval(const std::vector<float> &preds,
|
||||
const DMatrix::Info &info ) const {
|
||||
const unsigned ndata = static_cast<unsigned>(preds.size());
|
||||
float sum = 0.0, wsum = 0.0;
|
||||
#pragma omp parallel for reduction(+:sum,wsum) schedule( static )
|
||||
for (unsigned i = 0; i < ndata; ++i){
|
||||
const float wt = info.GetWeight(i);
|
||||
const float diff = info.labels[i] - preds[i];
|
||||
sum += diff*diff * wt;
|
||||
wsum += wt;
|
||||
}
|
||||
return sqrtf(sum / wsum);
|
||||
}
|
||||
virtual const char *Name(void) const{
|
||||
return "rmse";
|
||||
}
|
||||
};
|
||||
|
||||
/*! \brief Error */
|
||||
struct EvalLogLoss : public IEvaluator{
|
||||
virtual float Eval(const std::vector<float> &preds,
|
||||
const DMatrix::Info &info ) const {
|
||||
const unsigned ndata = static_cast<unsigned>(preds.size());
|
||||
float sum = 0.0f, wsum = 0.0f;
|
||||
#pragma omp parallel for reduction(+:sum,wsum) schedule( static )
|
||||
for (unsigned i = 0; i < ndata; ++i){
|
||||
const float y = info.labels[i];
|
||||
const float py = preds[i];
|
||||
const float wt = info.GetWeight(i);
|
||||
sum -= wt * ( y * std::log(py) + (1.0f - y)*std::log(1 - py) );
|
||||
wsum+= wt;
|
||||
}
|
||||
return sum / wsum;
|
||||
}
|
||||
virtual const char *Name(void) const{
|
||||
return "negllik";
|
||||
}
|
||||
};
|
||||
|
||||
/*! \brief Error */
|
||||
struct EvalError : public IEvaluator{
|
||||
virtual float Eval(const std::vector<float> &preds,
|
||||
const DMatrix::Info &info ) const {
|
||||
const unsigned ndata = static_cast<unsigned>(preds.size());
|
||||
float sum = 0.0f, wsum = 0.0f;
|
||||
#pragma omp parallel for reduction(+:sum,wsum) schedule( static )
|
||||
for (unsigned i = 0; i < ndata; ++i){
|
||||
const float wt = info.GetWeight(i);
|
||||
if (preds[i] > 0.5f){
|
||||
if (info.labels[i] < 0.5f) sum += wt;
|
||||
}
|
||||
else{
|
||||
if (info.labels[i] >= 0.5f) sum += wt;
|
||||
}
|
||||
wsum += wt;
|
||||
}
|
||||
return sum / wsum;
|
||||
}
|
||||
virtual const char *Name(void) const{
|
||||
return "error";
|
||||
}
|
||||
};
|
||||
|
||||
/*! \brief Area under curve */
|
||||
struct EvalAuc : public IEvaluator{
|
||||
inline static bool CmpFirst( const std::pair<float,float> &a, const std::pair<float,float> &b ){
|
||||
return a.first > b.first;
|
||||
}
|
||||
virtual float Eval( const std::vector<float> &preds,
|
||||
const DMatrix::Info &info ) const {
|
||||
const std::vector<float> &labels = info.labels;
|
||||
const unsigned ndata = static_cast<unsigned>( preds.size() );
|
||||
std::vector< std::pair<float, float> > rec;
|
||||
for( unsigned i = 0; i < ndata; ++ i ){
|
||||
rec.push_back( std::make_pair( preds[i], labels[i]) );
|
||||
}
|
||||
random::Shuffle( rec );
|
||||
std::sort( rec.begin(), rec.end(), CmpFirst );
|
||||
|
||||
long npos = 0, nhit = 0;
|
||||
for( unsigned i = 0; i < ndata; ++ i ){
|
||||
if( rec[i].second > 0.5f ) {
|
||||
++ npos;
|
||||
}else{
|
||||
// this is the number of correct pairs
|
||||
nhit += npos;
|
||||
}
|
||||
}
|
||||
long nneg = ndata - npos;
|
||||
utils::Assert( nneg > 0, "the dataset only contains pos samples" );
|
||||
return static_cast<float>(nhit) / nneg / npos;
|
||||
}
|
||||
virtual const char *Name( void ) const{
|
||||
return "auc";
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
namespace regrank{
|
||||
/*! \brief a set of evaluators */
|
||||
struct EvalSet{
|
||||
public:
|
||||
inline void AddEval(const char *name){
|
||||
for( size_t i = 0; i < evals_.size(); ++ i ){
|
||||
if(!strcmp(name, evals_[i]->Name())) return;
|
||||
}
|
||||
if (!strcmp(name, "rmse")) evals_.push_back( new EvalRMSE() );
|
||||
if (!strcmp(name, "error")) evals_.push_back( new EvalError() );
|
||||
if (!strcmp(name, "logloss")) evals_.push_back( new EvalLogLoss() );
|
||||
if (!strcmp( name, "auc")) evals_.push_back( new EvalAuc() );
|
||||
}
|
||||
~EvalSet(){
|
||||
for( size_t i = 0; i < evals_.size(); ++ i ){
|
||||
delete evals_[i];
|
||||
}
|
||||
}
|
||||
inline void Eval(FILE *fo, const char *evname,
|
||||
const std::vector<float> &preds,
|
||||
const DMatrix::Info &info ) const{
|
||||
for (size_t i = 0; i < evals_.size(); ++i){
|
||||
float res = evals_[i]->Eval(preds, info);
|
||||
fprintf(fo, "\t%s-%s:%f", evname, evals_[i]->Name(), res);
|
||||
}
|
||||
}
|
||||
private:
|
||||
std::vector<const IEvaluator*> evals_;
|
||||
};
|
||||
};
|
||||
};
|
||||
#endif
|
||||
285
regrank/xgboost_regrank_main.cpp
Normal file
285
regrank/xgboost_regrank_main.cpp
Normal file
@@ -0,0 +1,285 @@
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
#define _CRT_SECURE_NO_DEPRECATE
|
||||
|
||||
#include <ctime>
|
||||
#include <string>
|
||||
#include <cstring>
|
||||
#include "xgboost_regrank.h"
|
||||
#include "../utils/xgboost_fmap.h"
|
||||
#include "../utils/xgboost_random.h"
|
||||
#include "../utils/xgboost_config.h"
|
||||
|
||||
namespace xgboost{
|
||||
namespace regrank{
|
||||
/*!
|
||||
* \brief wrapping the training process of the gradient boosting regression model,
|
||||
* given the configuation
|
||||
* \author Kailong Chen: chenkl198812@gmail.com, Tianqi Chen: tianqi.chen@gmail.com
|
||||
*/
|
||||
class RegBoostTask{
|
||||
public:
|
||||
inline int Run(int argc, char *argv[]){
|
||||
if (argc < 2){
|
||||
printf("Usage: <config>\n");
|
||||
return 0;
|
||||
}
|
||||
utils::ConfigIterator itr(argv[1]);
|
||||
while (itr.Next()){
|
||||
this->SetParam(itr.name(), itr.val());
|
||||
}
|
||||
for (int i = 2; i < argc; i++){
|
||||
char name[256], val[256];
|
||||
if (sscanf(argv[i], "%[^=]=%s", name, val) == 2){
|
||||
this->SetParam(name, val);
|
||||
}
|
||||
}
|
||||
this->InitData();
|
||||
this->InitLearner();
|
||||
if (task == "dump"){
|
||||
this->TaskDump();
|
||||
return 0;
|
||||
}
|
||||
if (task == "interact"){
|
||||
this->TaskInteractive(); return 0;
|
||||
}
|
||||
if (task == "dumppath"){
|
||||
this->TaskDumpPath(); return 0;
|
||||
}
|
||||
if (task == "eval"){
|
||||
this->TaskEval(); return 0;
|
||||
}
|
||||
if (task == "pred"){
|
||||
this->TaskPred();
|
||||
}
|
||||
else{
|
||||
this->TaskTrain();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
inline void SetParam(const char *name, const char *val){
|
||||
if (!strcmp("silent", name)) silent = atoi(val);
|
||||
if (!strcmp("use_buffer", name)) use_buffer = atoi(val);
|
||||
if (!strcmp("seed", name)) random::Seed(atoi(val));
|
||||
if (!strcmp("num_round", name)) num_round = atoi(val);
|
||||
if (!strcmp("save_period", name)) save_period = atoi(val);
|
||||
if (!strcmp("task", name)) task = val;
|
||||
if (!strcmp("data", name)) train_path = val;
|
||||
if (!strcmp("test:data", name)) test_path = val;
|
||||
if (!strcmp("model_in", name)) model_in = val;
|
||||
if (!strcmp("model_out", name)) model_out = val;
|
||||
if (!strcmp("model_dir", name)) model_dir_path = val;
|
||||
if (!strcmp("fmap", name)) name_fmap = val;
|
||||
if (!strcmp("name_dump", name)) name_dump = val;
|
||||
if (!strcmp("name_dumppath", name)) name_dumppath = val;
|
||||
if (!strcmp("name_pred", name)) name_pred = val;
|
||||
if (!strcmp("dump_stats", name)) dump_model_stats = atoi(val);
|
||||
if (!strcmp("interact:action", name)) interact_action = val;
|
||||
if (!strncmp("batch:", name, 6)){
|
||||
cfg_batch.PushBack(name + 6, val);
|
||||
}
|
||||
if (!strncmp("eval[", name, 5)) {
|
||||
char evname[256];
|
||||
utils::Assert(sscanf(name, "eval[%[^]]", evname) == 1, "must specify evaluation name for display");
|
||||
eval_data_names.push_back(std::string(evname));
|
||||
eval_data_paths.push_back(std::string(val));
|
||||
}
|
||||
cfg.PushBack(name, val);
|
||||
}
|
||||
public:
|
||||
RegBoostTask(void){
|
||||
// default parameters
|
||||
silent = 0;
|
||||
use_buffer = 1;
|
||||
num_round = 10;
|
||||
save_period = 0;
|
||||
dump_model_stats = 0;
|
||||
task = "train";
|
||||
model_in = "NULL";
|
||||
model_out = "NULL";
|
||||
name_fmap = "NULL";
|
||||
name_pred = "pred.txt";
|
||||
name_dump = "dump.txt";
|
||||
name_dumppath = "dump.path.txt";
|
||||
model_dir_path = "./";
|
||||
interact_action = "update";
|
||||
}
|
||||
~RegBoostTask(void){
|
||||
for (size_t i = 0; i < deval.size(); i++){
|
||||
delete deval[i];
|
||||
}
|
||||
}
|
||||
private:
|
||||
inline void InitData(void){
|
||||
if (name_fmap != "NULL") fmap.LoadText(name_fmap.c_str());
|
||||
if (task == "dump") return;
|
||||
if (task == "pred" || task == "dumppath"){
|
||||
data.CacheLoad(test_path.c_str(), silent != 0, use_buffer != 0);
|
||||
}
|
||||
else{
|
||||
// training
|
||||
data.CacheLoad(train_path.c_str(), silent != 0, use_buffer != 0);
|
||||
utils::Assert(eval_data_names.size() == eval_data_paths.size());
|
||||
for (size_t i = 0; i < eval_data_names.size(); ++i){
|
||||
deval.push_back(new DMatrix());
|
||||
deval.back()->CacheLoad(eval_data_paths[i].c_str(), silent != 0, use_buffer != 0);
|
||||
}
|
||||
}
|
||||
learner.SetData(&data, deval, eval_data_names);
|
||||
}
|
||||
inline void InitLearner(void){
|
||||
cfg.BeforeFirst();
|
||||
while (cfg.Next()){
|
||||
learner.SetParam(cfg.name(), cfg.val());
|
||||
}
|
||||
if (model_in != "NULL"){
|
||||
utils::FileStream fi(utils::FopenCheck(model_in.c_str(), "rb"));
|
||||
learner.LoadModel(fi);
|
||||
fi.Close();
|
||||
}
|
||||
else{
|
||||
utils::Assert(task == "train", "model_in not specified");
|
||||
learner.InitModel();
|
||||
}
|
||||
learner.InitTrainer();
|
||||
}
|
||||
inline void TaskTrain(void){
|
||||
const time_t start = time(NULL);
|
||||
unsigned long elapsed = 0;
|
||||
for (int i = 0; i < num_round; ++i){
|
||||
elapsed = (unsigned long)(time(NULL) - start);
|
||||
if (!silent) printf("boosting round %d, %lu sec elapsed\n", i, elapsed);
|
||||
learner.UpdateOneIter(i);
|
||||
learner.EvalOneIter(i);
|
||||
if (save_period != 0 && (i + 1) % save_period == 0){
|
||||
this->SaveModel(i);
|
||||
}
|
||||
elapsed = (unsigned long)(time(NULL) - start);
|
||||
}
|
||||
// always save final round
|
||||
if ((save_period == 0 || num_round % save_period != 0) && model_out != "NONE"){
|
||||
if (model_out == "NULL"){
|
||||
this->SaveModel(num_round - 1);
|
||||
}
|
||||
else{
|
||||
this->SaveModel(model_out.c_str());
|
||||
}
|
||||
}
|
||||
if (!silent){
|
||||
printf("\nupdating end, %lu sec in all\n", elapsed);
|
||||
}
|
||||
}
|
||||
inline void TaskEval(void){
|
||||
learner.EvalOneIter(0);
|
||||
}
|
||||
inline void TaskInteractive(void){
|
||||
const time_t start = time(NULL);
|
||||
unsigned long elapsed = 0;
|
||||
int batch_action = 0;
|
||||
|
||||
cfg_batch.BeforeFirst();
|
||||
while (cfg_batch.Next()){
|
||||
if (!strcmp(cfg_batch.name(), "run")){
|
||||
learner.UpdateInteract(interact_action);
|
||||
batch_action += 1;
|
||||
}
|
||||
else{
|
||||
learner.SetParam(cfg_batch.name(), cfg_batch.val());
|
||||
}
|
||||
}
|
||||
|
||||
if (batch_action == 0){
|
||||
learner.UpdateInteract(interact_action);
|
||||
}
|
||||
utils::Assert(model_out != "NULL", "interactive mode must specify model_out");
|
||||
this->SaveModel(model_out.c_str());
|
||||
elapsed = (unsigned long)(time(NULL) - start);
|
||||
|
||||
if (!silent){
|
||||
printf("\ninteractive update, %d batch actions, %lu sec in all\n", batch_action, elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
inline void TaskDump(void){
|
||||
FILE *fo = utils::FopenCheck(name_dump.c_str(), "w");
|
||||
learner.DumpModel(fo, fmap, dump_model_stats != 0);
|
||||
fclose(fo);
|
||||
}
|
||||
inline void TaskDumpPath(void){
|
||||
FILE *fo = utils::FopenCheck(name_dumppath.c_str(), "w");
|
||||
learner.DumpPath(fo, data);
|
||||
fclose(fo);
|
||||
}
|
||||
inline void SaveModel(const char *fname) const{
|
||||
utils::FileStream fo(utils::FopenCheck(fname, "wb"));
|
||||
learner.SaveModel(fo);
|
||||
fo.Close();
|
||||
}
|
||||
inline void SaveModel(int i) const{
|
||||
char fname[256];
|
||||
sprintf(fname, "%s/%04d.model", model_dir_path.c_str(), i + 1);
|
||||
this->SaveModel(fname);
|
||||
}
|
||||
inline void TaskPred(void){
|
||||
std::vector<float> preds;
|
||||
if (!silent) printf("start prediction...\n");
|
||||
learner.Predict(preds, data);
|
||||
if (!silent) printf("writing prediction to %s\n", name_pred.c_str());
|
||||
FILE *fo = utils::FopenCheck(name_pred.c_str(), "w");
|
||||
for (size_t i = 0; i < preds.size(); i++){
|
||||
fprintf(fo, "%f\n", preds[i]);
|
||||
}
|
||||
fclose(fo);
|
||||
}
|
||||
private:
|
||||
/* \brief whether silent */
|
||||
int silent;
|
||||
/* \brief whether use auto binary buffer */
|
||||
int use_buffer;
|
||||
/* \brief number of boosting iterations */
|
||||
int num_round;
|
||||
/* \brief the period to save the model, 0 means only save the final round model */
|
||||
int save_period;
|
||||
/*! \brief interfact action */
|
||||
std::string interact_action;
|
||||
/* \brief the path of training/test data set */
|
||||
std::string train_path, test_path;
|
||||
/* \brief the path of test model file, or file to restart training */
|
||||
std::string model_in;
|
||||
/* \brief the path of final model file, to be saved */
|
||||
std::string model_out;
|
||||
/* \brief the path of directory containing the saved models */
|
||||
std::string model_dir_path;
|
||||
/* \brief task to perform */
|
||||
std::string task;
|
||||
/* \brief name of predict file */
|
||||
std::string name_pred;
|
||||
/* \brief whether dump statistics along with model */
|
||||
int dump_model_stats;
|
||||
/* \brief name of feature map */
|
||||
std::string name_fmap;
|
||||
/* \brief name of dump file */
|
||||
std::string name_dump;
|
||||
/* \brief name of dump path file */
|
||||
std::string name_dumppath;
|
||||
/* \brief the paths of validation data sets */
|
||||
std::vector<std::string> eval_data_paths;
|
||||
/* \brief the names of the evaluation data used in output log */
|
||||
std::vector<std::string> eval_data_names;
|
||||
/*! \brief saves configurations */
|
||||
utils::ConfigSaver cfg;
|
||||
/*! \brief batch configurations */
|
||||
utils::ConfigSaver cfg_batch;
|
||||
private:
|
||||
DMatrix data;
|
||||
std::vector<DMatrix*> deval;
|
||||
utils::FeatMap fmap;
|
||||
RegRankBoostLearner learner;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
int main( int argc, char *argv[] ){
|
||||
xgboost::random::Seed( 0 );
|
||||
xgboost::regrank::RegBoostTask tsk;
|
||||
return tsk.Run( argc, argv );
|
||||
}
|
||||
112
regrank/xgboost_regrank_obj.h
Normal file
112
regrank/xgboost_regrank_obj.h
Normal file
@@ -0,0 +1,112 @@
|
||||
#ifndef XGBOOST_REGRANK_OBJ_H
|
||||
#define XGBOOST_REGRANK_OBJ_H
|
||||
/*!
|
||||
* \file xgboost_regrank_obj.h
|
||||
* \brief defines objective function interface used in xgboost for regression and rank
|
||||
* \author Tianqi Chen, Kailong Chen
|
||||
*/
|
||||
#include "xgboost_regrank_data.h"
|
||||
|
||||
namespace xgboost{
|
||||
namespace regrank{
|
||||
/*! \brief interface of objective function */
|
||||
class IObjFunction{
|
||||
public:
|
||||
/*! \brief virtual destructor */
|
||||
virtual ~IObjFunction(void){}
|
||||
/*!
|
||||
* \brief set parameters from outside
|
||||
* \param name name of the parameter
|
||||
* \param val value of the parameter
|
||||
*/
|
||||
virtual void SetParam(const char *name, const char *val) = 0;
|
||||
/*!
|
||||
* \brief get gradient over each of predictions, given existing information
|
||||
* \param preds prediction of current round
|
||||
* \param info information about labels, weights, groups in rank
|
||||
* \param grad gradient over each preds
|
||||
* \param hess second order gradient over each preds
|
||||
*/
|
||||
virtual void GetGradient(const std::vector<float>& preds,
|
||||
const DMatrix::Info &info,
|
||||
std::vector<float> &grad,
|
||||
std::vector<float> &hess ) = 0;
|
||||
/*! \return the default evaluation metric for the problem */
|
||||
virtual const char* DefaultEvalMetric(void) = 0;
|
||||
/*!
|
||||
* \brief transform prediction values, this is only called when Prediction is called
|
||||
* \param preds prediction values, saves to this vector as well
|
||||
*/
|
||||
virtual void PredTransform(std::vector<float> &preds){}
|
||||
};
|
||||
};
|
||||
|
||||
namespace regrank{
|
||||
/*! \brief defines functions to calculate some commonly used functions */
|
||||
struct LossType{
|
||||
public:
|
||||
const static int kLinearSquare = 0;
|
||||
const static int kLogisticNeglik = 1;
|
||||
const static int kLogisticClassify = 2;
|
||||
public:
|
||||
/*! \brief indicate which type we are using */
|
||||
int loss_type;
|
||||
public:
|
||||
/*!
|
||||
* \brief transform the linear sum to prediction
|
||||
* \param x linear sum of boosting ensemble
|
||||
* \return transformed prediction
|
||||
*/
|
||||
inline float PredTransform(float x){
|
||||
switch (loss_type){
|
||||
case kLinearSquare: return x;
|
||||
case kLogisticClassify:
|
||||
case kLogisticNeglik: return 1.0f / (1.0f + expf(-x));
|
||||
default: utils::Error("unknown loss_type"); return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
* \brief calculate first order gradient of loss, given transformed prediction
|
||||
* \param predt transformed prediction
|
||||
* \param label true label
|
||||
* \return first order gradient
|
||||
*/
|
||||
inline float FirstOrderGradient(float predt, float label) const{
|
||||
switch (loss_type){
|
||||
case kLinearSquare: return predt - label;
|
||||
case kLogisticClassify:
|
||||
case kLogisticNeglik: return predt - label;
|
||||
default: utils::Error("unknown loss_type"); return 0.0f;
|
||||
}
|
||||
}
|
||||
/*!
|
||||
* \brief calculate second order gradient of loss, given transformed prediction
|
||||
* \param predt transformed prediction
|
||||
* \param label true label
|
||||
* \return second order gradient
|
||||
*/
|
||||
inline float SecondOrderGradient(float predt, float label) const{
|
||||
switch (loss_type){
|
||||
case kLinearSquare: return 1.0f;
|
||||
case kLogisticClassify:
|
||||
case kLogisticNeglik: return predt * (1 - predt);
|
||||
default: utils::Error("unknown loss_type"); return 0.0f;
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
#include "xgboost_regrank_obj.hpp"
|
||||
|
||||
namespace xgboost{
|
||||
namespace regrank{
|
||||
IObjFunction* CreateObjFunction( const char *name ){
|
||||
if( !strcmp("reg", name ) ) return new RegressionObj();
|
||||
utils::Error("unknown objective function type");
|
||||
return NULL;
|
||||
}
|
||||
};
|
||||
};
|
||||
#endif
|
||||
52
regrank/xgboost_regrank_obj.hpp
Normal file
52
regrank/xgboost_regrank_obj.hpp
Normal file
@@ -0,0 +1,52 @@
|
||||
#ifndef XGBOOST_REGRANK_OBJ_HPP
|
||||
#define XGBOOST_REGRANK_OBJ_HPP
|
||||
/*!
|
||||
* \file xgboost_regrank_obj.h
|
||||
* \brief implementation of objective functions
|
||||
* \author Tianqi Chen, Kailong Chen
|
||||
*/
|
||||
namespace xgboost{
|
||||
namespace regrank{
|
||||
class RegressionObj : public IObjFunction{
|
||||
public:
|
||||
RegressionObj(void){
|
||||
loss.loss_type = LossType::kLinearSquare;
|
||||
}
|
||||
virtual ~RegressionObj(){}
|
||||
virtual void SetParam(const char *name, const char *val){
|
||||
if( !strcmp( "loss_type", name ) ) loss.loss_type = atoi( val );
|
||||
}
|
||||
virtual void GetGradient(const std::vector<float>& preds,
|
||||
const DMatrix::Info &info,
|
||||
std::vector<float> &grad,
|
||||
std::vector<float> &hess ) {
|
||||
grad.resize(preds.size()); hess.resize(preds.size());
|
||||
|
||||
const unsigned ndata = static_cast<unsigned>(preds.size());
|
||||
#pragma omp parallel for schedule( static )
|
||||
for (unsigned j = 0; j < ndata; ++j){
|
||||
grad[j] = loss.FirstOrderGradient(preds[j], info.labels[j]) * info.GetWeight(j);
|
||||
hess[j] = loss.SecondOrderGradient(preds[j], info.labels[j]) * info.GetWeight(j);
|
||||
}
|
||||
}
|
||||
virtual const char* DefaultEvalMetric(void) {
|
||||
if( loss.loss_type == LossType::kLogisticClassify ) return "error";
|
||||
else return "rmse";
|
||||
}
|
||||
virtual void PredTransform(std::vector<float> &preds){
|
||||
const unsigned ndata = static_cast<unsigned>(preds.size());
|
||||
#pragma omp parallel for schedule( static )
|
||||
for (unsigned j = 0; j < ndata; ++j){
|
||||
preds[j] = loss.PredTransform( preds[j] );
|
||||
}
|
||||
}
|
||||
private:
|
||||
LossType loss;
|
||||
};
|
||||
};
|
||||
|
||||
namespace regrank{
|
||||
// TODO rank objective
|
||||
};
|
||||
};
|
||||
#endif
|
||||
Reference in New Issue
Block a user