Merge branch 'master' of ssh://github.com/tqchen/xgboost

Conflicts:
	regression/xgboost_reg_data.h
This commit is contained in:
tqchen
2014-04-18 17:46:44 -07:00
32 changed files with 2456 additions and 2167 deletions

View File

@@ -21,239 +21,240 @@ namespace xgboost{
class RegBoostLearner{
public:
/*! \brief constructor */
RegBoostLearner( void ){
silent = 0;
RegBoostLearner(void){
silent = 0;
}
/*!
* \brief a regression booter associated with training and evaluating data
/*!
* \brief a regression booter associated with training and evaluating data
* \param train pointer to the training data
* \param evals array of evaluating data
* \param evname name of evaluation data, used print statistics
*/
RegBoostLearner( const DMatrix *train,
const std::vector<DMatrix *> &evals,
const std::vector<std::string> &evname ){
RegBoostLearner(const DMatrix *train,
const std::vector<DMatrix *> &evals,
const std::vector<std::string> &evname){
silent = 0;
this->SetData(train,evals,evname);
this->SetData(train, evals, evname);
}
/*!
* \brief associate regression booster with training and evaluating data
/*!
* \brief associate regression booster with training and evaluating data
* \param train pointer to the training data
* \param evals array of evaluating data
* \param evname name of evaluation data, used print statistics
*/
inline void SetData( const DMatrix *train,
const std::vector<DMatrix *> &evals,
const std::vector<std::string> &evname ){
inline void SetData(const DMatrix *train,
const std::vector<DMatrix *> &evals,
const std::vector<std::string> &evname){
this->train_ = train;
this->evals_ = evals;
this->evname_ = evname;
this->evname_ = evname;
// estimate feature bound
int num_feature = (int)(train->data.NumCol());
// assign buffer index
unsigned buffer_size = static_cast<unsigned>( train->Size() );
for( size_t i = 0; i < evals.size(); ++ i ){
buffer_size += static_cast<unsigned>( evals[i]->Size() );
num_feature = std::max( num_feature, (int)(evals[i]->data.NumCol()) );
unsigned buffer_size = static_cast<unsigned>(train->Size());
for (size_t i = 0; i < evals.size(); ++i){
buffer_size += static_cast<unsigned>(evals[i]->Size());
num_feature = std::max(num_feature, (int)(evals[i]->data.NumCol()));
}
char str_temp[25];
if( num_feature > mparam.num_feature ){
if (num_feature > mparam.num_feature){
mparam.num_feature = num_feature;
sprintf( str_temp, "%d", num_feature );
base_gbm.SetParam( "bst:num_feature", str_temp );
sprintf(str_temp, "%d", num_feature);
base_gbm.SetParam("bst:num_feature", str_temp);
}
sprintf( str_temp, "%u", buffer_size );
base_gbm.SetParam( "num_pbuffer", str_temp );
if( !silent ){
printf( "buffer_size=%u\n", buffer_size );
sprintf(str_temp, "%u", buffer_size);
base_gbm.SetParam("num_pbuffer", str_temp);
if (!silent){
printf("buffer_size=%u\n", buffer_size);
}
// set eval_preds tmp sapce
this->eval_preds_.resize( evals.size(), std::vector<float>() );
this->eval_preds_.resize(evals.size(), std::vector<float>());
}
/*!
* \brief set parameters from outside
/*!
* \brief set parameters from outside
* \param name name of the parameter
* \param val value of the parameter
*/
inline void SetParam( const char *name, const char *val ){
if( !strcmp( name, "silent") ) silent = atoi( val );
if( !strcmp( name, "eval_metric") ) evaluator_.AddEval( val );
mparam.SetParam( name, val );
base_gbm.SetParam( name, val );
inline void SetParam(const char *name, const char *val){
if (!strcmp(name, "silent")) silent = atoi(val);
if (!strcmp(name, "eval_metric")) evaluator_.AddEval(val);
mparam.SetParam(name, val);
base_gbm.SetParam(name, val);
}
/*!
* \brief initialize solver before training, called before training
* this function is reserved for solver to allocate necessary space and do other preparation
* this function is reserved for solver to allocate necessary space and do other preparation
*/
inline void InitTrainer( void ){
inline void InitTrainer(void){
base_gbm.InitTrainer();
if( mparam.loss_type == kLogisticClassify ){
evaluator_.AddEval( "error" );
}else{
evaluator_.AddEval( "rmse" );
if (mparam.loss_type == kLogisticClassify){
evaluator_.AddEval("error");
}
else{
evaluator_.AddEval("rmse");
}
evaluator_.Init();
}
}
/*!
* \brief initialize the current data storage for model, if the model is used first time, call this function
*/
inline void InitModel( void ){
inline void InitModel(void){
base_gbm.InitModel();
mparam.AdjustBase();
}
/*!
/*!
* \brief load model from stream
* \param fi input stream
*/
inline void LoadModel( utils::IStream &fi ){
base_gbm.LoadModel( fi );
utils::Assert( fi.Read( &mparam, sizeof(ModelParam) ) != 0 );
*/
inline void LoadModel(utils::IStream &fi){
base_gbm.LoadModel(fi);
utils::Assert(fi.Read(&mparam, sizeof(ModelParam)) != 0);
}
/*!
/*!
* \brief DumpModel
* \param fo text file
* \param fmap feature map that may help give interpretations of feature
* \param with_stats whether print statistics as well
*/
inline void DumpModel( FILE *fo, const utils::FeatMap& fmap, bool with_stats ){
base_gbm.DumpModel( fo, fmap, with_stats );
* \param fo text file
* \param fmap feature map that may help give interpretations of feature
* \param with_stats whether print statistics as well
*/
inline void DumpModel(FILE *fo, const utils::FeatMap& fmap, bool with_stats){
base_gbm.DumpModel(fo, fmap, with_stats);
}
/*!
/*!
* \brief Dump path of all trees
* \param fo text file
* \param fo text file
* \param data input data
*/
inline void DumpPath( FILE *fo, const DMatrix &data ){
base_gbm.DumpPath( fo, data.data );
inline void DumpPath(FILE *fo, const DMatrix &data){
base_gbm.DumpPath(fo, data.data);
}
/*!
/*!
* \brief save model to stream
* \param fo output stream
*/
inline void SaveModel( utils::IStream &fo ) const{
base_gbm.SaveModel( fo );
fo.Write( &mparam, sizeof(ModelParam) );
}
/*!
inline void SaveModel(utils::IStream &fo) const{
base_gbm.SaveModel(fo);
fo.Write(&mparam, sizeof(ModelParam));
}
/*!
* \brief update the model for one iteration
* \param iteration iteration number
*/
inline void UpdateOneIter( int iter ){
this->PredictBuffer( preds_, *train_, 0 );
this->GetGradient( preds_, train_->labels, grad_, hess_ );
inline void UpdateOneIter(int iter){
this->PredictBuffer(preds_, *train_, 0);
this->GetGradient(preds_, train_->labels, grad_, hess_);
std::vector<unsigned> root_index;
base_gbm.DoBoost( grad_, hess_, train_->data, root_index );
base_gbm.DoBoost(grad_, hess_, train_->data, root_index);
}
/*!
/*!
* \brief evaluate the model for specific iteration
* \param iter iteration number
* \param fo file to output log
*/
inline void EvalOneIter( int iter, FILE *fo = stderr ){
fprintf( fo, "[%d]", iter );
int buffer_offset = static_cast<int>( train_->Size() );
for( size_t i = 0; i < evals_.size(); ++i ){
std::vector<float> &preds = this->eval_preds_[ i ];
this->PredictBuffer( preds, *evals_[i], buffer_offset);
evaluator_.Eval( fo, evname_[i].c_str(), preds, (*evals_[i]).labels );
buffer_offset += static_cast<int>( evals_[i]->Size() );
*/
inline void EvalOneIter(int iter, FILE *fo = stderr){
fprintf(fo, "[%d]", iter);
int buffer_offset = static_cast<int>(train_->Size());
for (size_t i = 0; i < evals_.size(); ++i){
std::vector<float> &preds = this->eval_preds_[i];
this->PredictBuffer(preds, *evals_[i], buffer_offset);
evaluator_.Eval(fo, evname_[i].c_str(), preds, (*evals_[i]).labels);
buffer_offset += static_cast<int>(evals_[i]->Size());
}
fprintf( fo,"\n" );
fprintf(fo, "\n");
}
/*! \brief get prediction, without buffering */
inline void Predict( std::vector<float> &preds, const DMatrix &data ){
preds.resize( data.Size() );
inline void Predict(std::vector<float> &preds, const DMatrix &data){
preds.resize(data.Size());
const unsigned ndata = static_cast<unsigned>( data.Size() );
#pragma omp parallel for schedule( static )
for( unsigned j = 0; j < ndata; ++ j ){
const unsigned ndata = static_cast<unsigned>(data.Size());
#pragma omp parallel for schedule( static )
for (unsigned j = 0; j < ndata; ++j){
preds[j] = mparam.PredTransform
( mparam.base_score + base_gbm.Predict( data.data, j, -1 ) );
(mparam.base_score + base_gbm.Predict(data.data, j, -1));
}
}
public:
/*!
/*!
* \brief update the model for one iteration
* \param iteration iteration number
*/
inline void UpdateInteract( std::string action ){
this->InteractPredict( preds_, *train_, 0 );
inline void UpdateInteract(std::string action){
this->InteractPredict(preds_, *train_, 0);
int buffer_offset = static_cast<int>( train_->Size() );
for( size_t i = 0; i < evals_.size(); ++i ){
std::vector<float> &preds = this->eval_preds_[ i ];
this->InteractPredict( preds, *evals_[i], buffer_offset );
buffer_offset += static_cast<int>( evals_[i]->Size() );
int buffer_offset = static_cast<int>(train_->Size());
for (size_t i = 0; i < evals_.size(); ++i){
std::vector<float> &preds = this->eval_preds_[i];
this->InteractPredict(preds, *evals_[i], buffer_offset);
buffer_offset += static_cast<int>(evals_[i]->Size());
}
if( action == "remove" ){
if (action == "remove"){
base_gbm.DelteBooster(); return;
}
this->GetGradient( preds_, train_->labels, grad_, hess_ );
std::vector<unsigned> root_index;
base_gbm.DoBoost( grad_, hess_, train_->data, root_index );
this->InteractRePredict( *train_, 0 );
buffer_offset = static_cast<int>( train_->Size() );
for( size_t i = 0; i < evals_.size(); ++i ){
this->InteractRePredict( *evals_[i], buffer_offset );
buffer_offset += static_cast<int>( evals_[i]->Size() );
this->GetGradient(preds_, train_->labels, grad_, hess_);
std::vector<unsigned> root_index;
base_gbm.DoBoost(grad_, hess_, train_->data, root_index);
this->InteractRePredict(*train_, 0);
buffer_offset = static_cast<int>(train_->Size());
for (size_t i = 0; i < evals_.size(); ++i){
this->InteractRePredict(*evals_[i], buffer_offset);
buffer_offset += static_cast<int>(evals_[i]->Size());
}
}
private:
/*! \brief get the transformed predictions, given data */
inline void InteractPredict( std::vector<float> &preds, const DMatrix &data, unsigned buffer_offset ){
preds.resize( data.Size() );
const unsigned ndata = static_cast<unsigned>( data.Size() );
#pragma omp parallel for schedule( static )
for( unsigned j = 0; j < ndata; ++ j ){
inline void InteractPredict(std::vector<float> &preds, const DMatrix &data, unsigned buffer_offset){
preds.resize(data.Size());
const unsigned ndata = static_cast<unsigned>(data.Size());
#pragma omp parallel for schedule( static )
for (unsigned j = 0; j < ndata; ++j){
preds[j] = mparam.PredTransform
( mparam.base_score + base_gbm.InteractPredict( data.data, j, buffer_offset + j ) );
(mparam.base_score + base_gbm.InteractPredict(data.data, j, buffer_offset + j));
}
}
/*! \brief repredict trial */
inline void InteractRePredict( const DMatrix &data, unsigned buffer_offset ){
const unsigned ndata = static_cast<unsigned>( data.Size() );
#pragma omp parallel for schedule( static )
for( unsigned j = 0; j < ndata; ++ j ){
base_gbm.InteractRePredict( data.data, j, buffer_offset + j );
inline void InteractRePredict(const DMatrix &data, unsigned buffer_offset){
const unsigned ndata = static_cast<unsigned>(data.Size());
#pragma omp parallel for schedule( static )
for (unsigned j = 0; j < ndata; ++j){
base_gbm.InteractRePredict(data.data, j, buffer_offset + j);
}
}
private:
/*! \brief get the transformed predictions, given data */
inline void PredictBuffer( std::vector<float> &preds, const DMatrix &data, unsigned buffer_offset ){
preds.resize( data.Size() );
inline void PredictBuffer(std::vector<float> &preds, const DMatrix &data, unsigned buffer_offset){
preds.resize(data.Size());
const unsigned ndata = static_cast<unsigned>( data.Size() );
#pragma omp parallel for schedule( static )
for( unsigned j = 0; j < ndata; ++ j ){
const unsigned ndata = static_cast<unsigned>(data.Size());
#pragma omp parallel for schedule( static )
for (unsigned j = 0; j < ndata; ++j){
preds[j] = mparam.PredTransform
( mparam.base_score + base_gbm.Predict( data.data, j, buffer_offset + j ) );
(mparam.base_score + base_gbm.Predict(data.data, j, buffer_offset + j));
}
}
/*! \brief get the first order and second order gradient, given the transformed predictions and labels */
inline void GetGradient( const std::vector<float> &preds,
const std::vector<float> &labels,
std::vector<float> &grad,
std::vector<float> &hess ){
grad.resize( preds.size() ); hess.resize( preds.size() );
inline void GetGradient(const std::vector<float> &preds,
const std::vector<float> &labels,
std::vector<float> &grad,
std::vector<float> &hess){
grad.resize(preds.size()); hess.resize(preds.size());
const unsigned ndata = static_cast<unsigned>( preds.size() );
#pragma omp parallel for schedule( static )
for( unsigned j = 0; j < ndata; ++ j ){
grad[j] = mparam.FirstOrderGradient( preds[j], labels[j] );
hess[j] = mparam.SecondOrderGradient( preds[j], labels[j] );
const unsigned ndata = static_cast<unsigned>(preds.size());
#pragma omp parallel for schedule( static )
for (unsigned j = 0; j < ndata; ++j){
grad[j] = mparam.FirstOrderGradient(preds[j], labels[j]);
hess[j] = mparam.SecondOrderGradient(preds[j], labels[j]);
}
}
private:
enum LossType{
kLinearSquare = 0,
@@ -270,73 +271,73 @@ namespace xgboost{
/* \brief number of features */
int num_feature;
/*! \brief reserved field */
int reserved[ 16 ];
int reserved[16];
/*! \brief constructor */
ModelParam( void ){
ModelParam(void){
base_score = 0.5f;
loss_type = 0;
loss_type = 0;
num_feature = 0;
memset( reserved, 0, sizeof( reserved ) );
memset(reserved, 0, sizeof(reserved));
}
/*!
* \brief set parameters from outside
/*!
* \brief set parameters from outside
* \param name name of the parameter
* \param val value of the parameter
*/
inline void SetParam( const char *name, const char *val ){
if( !strcmp("base_score", name ) ) base_score = (float)atof( val );
if( !strcmp("loss_type", name ) ) loss_type = atoi( val );
if( !strcmp("bst:num_feature", name ) ) num_feature = atoi( val );
inline void SetParam(const char *name, const char *val){
if (!strcmp("base_score", name)) base_score = (float)atof(val);
if (!strcmp("loss_type", name)) loss_type = atoi(val);
if (!strcmp("bst:num_feature", name)) num_feature = atoi(val);
}
/*!
/*!
* \brief adjust base_score
*/
inline void AdjustBase( void ){
if( loss_type == 1 || loss_type == 2 ){
utils::Assert( base_score > 0.0f && base_score < 1.0f, "sigmoid range constrain" );
base_score = - logf( 1.0f / base_score - 1.0f );
*/
inline void AdjustBase(void){
if (loss_type == 1 || loss_type == 2){
utils::Assert(base_score > 0.0f && base_score < 1.0f, "sigmoid range constrain");
base_score = -logf(1.0f / base_score - 1.0f);
}
}
/*!
* \brief transform the linear sum to prediction
/*!
* \brief transform the linear sum to prediction
* \param x linear sum of boosting ensemble
* \return transformed prediction
*/
inline float PredTransform( float x ){
switch( loss_type ){
inline float PredTransform(float x){
switch (loss_type){
case kLinearSquare: return x;
case kLogisticClassify:
case kLogisticNeglik: return 1.0f/(1.0f + expf(-x));
case kLogisticNeglik: return 1.0f / (1.0f + expf(-x));
default: utils::Error("unknown loss_type"); return 0.0f;
}
}
/*!
/*!
* \brief calculate first order gradient of loss, given transformed prediction
* \param predt transformed prediction
* \param label true label
* \return first order gradient
*/
inline float FirstOrderGradient( float predt, float label ) const{
switch( loss_type ){
inline float FirstOrderGradient(float predt, float label) const{
switch (loss_type){
case kLinearSquare: return predt - label;
case kLogisticClassify:
case kLogisticNeglik: return predt - label;
default: utils::Error("unknown loss_type"); return 0.0f;
}
}
/*!
/*!
* \brief calculate second order gradient of loss, given transformed prediction
* \param predt transformed prediction
* \param label true label
* \return second order gradient
*/
inline float SecondOrderGradient( float predt, float label ) const{
switch( loss_type ){
inline float SecondOrderGradient(float predt, float label) const{
switch (loss_type){
case kLinearSquare: return 1.0f;
case kLogisticClassify:
case kLogisticNeglik: return predt * ( 1 - predt );
case kLogisticNeglik: return predt * (1 - predt);
default: utils::Error("unknown loss_type"); return 0.0f;
}
}
@@ -348,10 +349,10 @@ namespace xgboost{
* \return the specified loss
*/
inline float Loss(const std::vector<float> &preds, const std::vector<float> &labels) const{
switch( loss_type ){
case kLinearSquare: return SquareLoss(preds,labels);
case kLogisticNeglik:
case kLogisticClassify: return NegLoglikelihoodLoss(preds,labels);
switch (loss_type){
case kLinearSquare: return SquareLoss(preds, labels);
case kLogisticNeglik:
case kLogisticClassify: return NegLoglikelihoodLoss(preds, labels);
default: utils::Error("unknown loss_type"); return 0.0f;
}
}
@@ -364,7 +365,7 @@ namespace xgboost{
*/
inline float SquareLoss(const std::vector<float> &preds, const std::vector<float> &labels) const{
float ans = 0.0;
for(size_t i = 0; i < preds.size(); i++){
for (size_t i = 0; i < preds.size(); i++){
float dif = preds[i] - labels[i];
ans += dif * dif;
}
@@ -379,8 +380,8 @@ namespace xgboost{
*/
inline float NegLoglikelihoodLoss(const std::vector<float> &preds, const std::vector<float> &labels) const{
float ans = 0.0;
for(size_t i = 0; i < preds.size(); i++)
ans -= labels[i] * logf(preds[i]) + ( 1 - labels[i] ) * logf(1 - preds[i]);
for (size_t i = 0; i < preds.size(); i++)
ans -= labels[i] * logf(preds[i]) + (1 - labels[i]) * logf(1 - preds[i]);
return ans;
}
};

View File

@@ -27,111 +27,112 @@ namespace xgboost{
std::vector<float> labels;
public:
/*! \brief default constructor */
DMatrix( void ){}
DMatrix(void){}
/*! \brief get the number of instances */
inline size_t Size() const{
return labels.size();
}
/*!
* \brief load from text file
/*!
* \brief load from text file
* \param fname name of text data
* \param silent whether print information or not
*/
inline void LoadText( const char* fname, bool silent = false ){
*/
inline void LoadText(const char* fname, bool silent = false){
data.Clear();
FILE* file = utils::FopenCheck( fname, "r" );
FILE* file = utils::FopenCheck(fname, "r");
float label; bool init = true;
char tmp[ 1024 ];
char tmp[1024];
std::vector<booster::bst_uint> findex;
std::vector<booster::bst_float> fvalue;
while( fscanf( file, "%s", tmp ) == 1 ){
while (fscanf(file, "%s", tmp) == 1){
unsigned index; float value;
if( sscanf( tmp, "%u:%f", &index, &value ) == 2 ){
findex.push_back( index ); fvalue.push_back( value );
}else{
if( !init ){
labels.push_back( label );
data.AddRow( findex, fvalue );
if (sscanf(tmp, "%u:%f", &index, &value) == 2){
findex.push_back(index); fvalue.push_back(value);
}
else{
if (!init){
labels.push_back(label);
data.AddRow(findex, fvalue);
}
findex.clear(); fvalue.clear();
utils::Assert( sscanf( tmp, "%f", &label ) == 1, "invalid format" );
utils::Assert(sscanf(tmp, "%f", &label) == 1, "invalid format");
init = false;
}
}
labels.push_back( label );
data.AddRow( findex, fvalue );
labels.push_back(label);
data.AddRow(findex, fvalue);
// initialize column support as well
data.InitData();
if( !silent ){
printf("%ux%u matrix with %lu entries is loaded from %s\n",
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname );
if (!silent){
printf("%ux%u matrix with %lu entries is loaded from %s\n",
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname);
}
fclose(file);
}
/*!
* \brief load from binary file
/*!
* \brief load from binary file
* \param fname name of binary data
* \param silent whether print information or not
* \return whether loading is success
*/
inline bool LoadBinary( const char* fname, bool silent = false ){
FILE *fp = fopen64( fname, "rb" );
if( fp == NULL ) return false;
utils::FileStream fs( fp );
data.LoadBinary( fs );
labels.resize( data.NumRow() );
utils::Assert( fs.Read( &labels[0], sizeof(float) * data.NumRow() ) != 0, "DMatrix LoadBinary" );
inline bool LoadBinary(const char* fname, bool silent = false){
FILE *fp = fopen64(fname, "rb");
if (fp == NULL) return false;
utils::FileStream fs(fp);
data.LoadBinary(fs);
labels.resize(data.NumRow());
utils::Assert(fs.Read(&labels[0], sizeof(float)* data.NumRow()) != 0, "DMatrix LoadBinary");
fs.Close();
// initialize column support as well
data.InitData();
if( !silent ){
printf("%ux%u matrix with %lu entries is loaded from %s\n",
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname );
if (!silent){
printf("%ux%u matrix with %lu entries is loaded from %s\n",
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname);
}
return true;
}
/*!
/*!
* \brief save to binary file
* \param fname name of binary data
* \param silent whether print information or not
*/
inline void SaveBinary( const char* fname, bool silent = false ){
inline void SaveBinary(const char* fname, bool silent = false){
// initialize column support as well
data.InitData();
utils::FileStream fs( utils::FopenCheck( fname, "wb" ) );
data.SaveBinary( fs );
fs.Write( &labels[0], sizeof(float) * data.NumRow() );
utils::FileStream fs(utils::FopenCheck(fname, "wb"));
data.SaveBinary(fs);
fs.Write(&labels[0], sizeof(float)* data.NumRow());
fs.Close();
if( !silent ){
printf("%ux%u matrix with %lu entries is saved to %s\n",
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname );
if (!silent){
printf("%ux%u matrix with %lu entries is saved to %s\n",
(unsigned)data.NumRow(), (unsigned)data.NumCol(), (unsigned long)data.NumEntry(), fname);
}
}
/*!
/*!
* \brief cache load data given a file name, if filename ends with .buffer, direct load binary
* otherwise the function will first check if fname + '.buffer' exists,
* if binary buffer exists, it will reads from binary buffer, otherwise, it will load from text file,
* and try to create a buffer file
* and try to create a buffer file
* \param fname name of binary data
* \param silent whether print information or not
* \param savebuffer whether do save binary buffer if it is text
*/
inline void CacheLoad( const char *fname, bool silent = false, bool savebuffer = true ){
int len = strlen( fname );
if( len > 8 && !strcmp( fname + len - 7, ".buffer") ){
this->LoadBinary( fname, silent ); return;
inline void CacheLoad(const char *fname, bool silent = false, bool savebuffer = true){
int len = strlen(fname);
if (len > 8 && !strcmp(fname + len - 7, ".buffer")){
this->LoadBinary(fname, silent); return;
}
char bname[ 1024 ];
sprintf( bname, "%s.buffer", fname );
if( !this->LoadBinary( bname, silent ) ){
this->LoadText( fname, silent );
if( savebuffer ) this->SaveBinary( bname, silent );
char bname[1024];
sprintf(bname, "%s.buffer", fname);
if (!this->LoadBinary(bname, silent)){
this->LoadText(fname, silent);
if (savebuffer) this->SaveBinary(bname, silent);
}
}
};

View File

@@ -16,72 +16,73 @@ namespace xgboost{
namespace regression{
/*! \brief evaluator that evaluates the loss metrics */
struct IEvaluator{
/*!
* \brief evaluate a specific metric
/*!
* \brief evaluate a specific metric
* \param preds prediction
* \param labels label
*/
virtual float Eval( const std::vector<float> &preds,
const std::vector<float> &labels ) const= 0;
virtual float Eval(const std::vector<float> &preds,
const std::vector<float> &labels) const = 0;
/*! \return name of metric */
virtual const char *Name( void ) const= 0;
virtual const char *Name(void) const = 0;
};
/*! \brief RMSE */
struct EvalRMSE : public IEvaluator{
virtual float Eval( const std::vector<float> &preds,
const std::vector<float> &labels ) const{
const unsigned ndata = static_cast<unsigned>( preds.size() );
struct EvalRMSE : public IEvaluator{
virtual float Eval(const std::vector<float> &preds,
const std::vector<float> &labels) const{
const unsigned ndata = static_cast<unsigned>(preds.size());
float sum = 0.0;
#pragma omp parallel for reduction(+:sum) schedule( static )
for( unsigned i = 0; i < ndata; ++ i ){
#pragma omp parallel for reduction(+:sum) schedule( static )
for (unsigned i = 0; i < ndata; ++i){
float diff = preds[i] - labels[i];
sum += diff * diff;
}
return sqrtf( sum / ndata );
}
return sqrtf(sum / ndata);
}
virtual const char *Name( void ) const{
virtual const char *Name(void) const{
return "rmse";
}
};
/*! \brief Error */
struct EvalError : public IEvaluator{
virtual float Eval( const std::vector<float> &preds,
const std::vector<float> &labels ) const{
const unsigned ndata = static_cast<unsigned>( preds.size() );
struct EvalError : public IEvaluator{
virtual float Eval(const std::vector<float> &preds,
const std::vector<float> &labels) const{
const unsigned ndata = static_cast<unsigned>(preds.size());
unsigned nerr = 0;
#pragma omp parallel for reduction(+:nerr) schedule( static )
for( unsigned i = 0; i < ndata; ++ i ){
if( preds[i] > 0.5f ){
if( labels[i] < 0.5f ) nerr += 1;
}else{
if( labels[i] > 0.5f ) nerr += 1;
#pragma omp parallel for reduction(+:nerr) schedule( static )
for (unsigned i = 0; i < ndata; ++i){
if (preds[i] > 0.5f){
if (labels[i] < 0.5f) nerr += 1;
}
}
else{
if (labels[i] > 0.5f) nerr += 1;
}
}
return static_cast<float>(nerr) / ndata;
}
virtual const char *Name( void ) const{
virtual const char *Name(void) const{
return "error";
}
};
/*! \brief Error */
struct EvalLogLoss : public IEvaluator{
virtual float Eval( const std::vector<float> &preds,
const std::vector<float> &labels ) const{
const unsigned ndata = static_cast<unsigned>( preds.size() );
struct EvalLogLoss : public IEvaluator{
virtual float Eval(const std::vector<float> &preds,
const std::vector<float> &labels) const{
const unsigned ndata = static_cast<unsigned>(preds.size());
unsigned nerr = 0;
#pragma omp parallel for reduction(+:nerr) schedule( static )
for( unsigned i = 0; i < ndata; ++ i ){
#pragma omp parallel for reduction(+:nerr) schedule( static )
for (unsigned i = 0; i < ndata; ++i){
const float y = labels[i];
const float py = preds[i];
nerr -= y * std::log(py) + (1.0f-y)*std::log(1-py);
}
nerr -= y * std::log(py) + (1.0f - y)*std::log(1 - py);
}
return static_cast<float>(nerr) / ndata;
}
virtual const char *Name( void ) const{
virtual const char *Name(void) const{
return "negllik";
}
};
@@ -91,28 +92,28 @@ namespace xgboost{
/*! \brief a set of evaluators */
struct EvalSet{
public:
inline void AddEval( const char *name ){
if( !strcmp( name, "rmse") ) evals_.push_back( &rmse_ );
if( !strcmp( name, "error") ) evals_.push_back( &error_ );
if( !strcmp( name, "logloss") ) evals_.push_back( &logloss_ );
inline void AddEval(const char *name){
if (!strcmp(name, "rmse")) evals_.push_back(&rmse_);
if (!strcmp(name, "error")) evals_.push_back(&error_);
if (!strcmp(name, "logloss")) evals_.push_back(&logloss_);
}
inline void Init( void ){
std::sort( evals_.begin(), evals_.end() );
evals_.resize( std::unique( evals_.begin(), evals_.end() ) - evals_.begin() );
inline void Init(void){
std::sort(evals_.begin(), evals_.end());
evals_.resize(std::unique(evals_.begin(), evals_.end()) - evals_.begin());
}
inline void Eval( FILE *fo, const char *evname,
const std::vector<float> &preds,
const std::vector<float> &labels ) const{
for( size_t i = 0; i < evals_.size(); ++ i ){
float res = evals_[i]->Eval( preds, labels );
fprintf( fo, "\t%s-%s:%f", evname, evals_[i]->Name(), res );
}
inline void Eval(FILE *fo, const char *evname,
const std::vector<float> &preds,
const std::vector<float> &labels) const{
for (size_t i = 0; i < evals_.size(); ++i){
float res = evals_[i]->Eval(preds, labels);
fprintf(fo, "\t%s-%s:%f", evname, evals_[i]->Name(), res);
}
}
private:
EvalRMSE rmse_;
EvalError error_;
EvalLogLoss logloss_;
std::vector<const IEvaluator*> evals_;
std::vector<const IEvaluator*> evals_;
};
};
};

View File

@@ -16,83 +16,84 @@ namespace xgboost{
* given the configuation
* \author Kailong Chen: chenkl198812@gmail.com, Tianqi Chen: tianqi.chen@gmail.com
*/
class RegBoostTask{
class RegBoostTask{
public:
inline int Run( int argc, char *argv[] ){
if( argc < 2 ){
printf("Usage: <config>\n");
inline int Run(int argc, char *argv[]){
if (argc < 2){
printf("Usage: <config>\n");
return 0;
}
utils::ConfigIterator itr( argv[1] );
while( itr.Next() ){
this->SetParam( itr.name(), itr.val() );
utils::ConfigIterator itr(argv[1]);
while (itr.Next()){
this->SetParam(itr.name(), itr.val());
}
for( int i = 2; i < argc; i ++ ){
for (int i = 2; i < argc; i++){
char name[256], val[256];
if( sscanf( argv[i], "%[^=]=%s", name, val ) == 2 ){
this->SetParam( name, val );
if (sscanf(argv[i], "%[^=]=%s", name, val) == 2){
this->SetParam(name, val);
}
}
this->InitData();
this->InitLearner();
if( task == "dump" ){
if (task == "dump"){
this->TaskDump();
return 0;
}
if( task == "interact" ){
if (task == "interact"){
this->TaskInteractive(); return 0;
}
if( task == "dumppath" ){
if (task == "dumppath"){
this->TaskDumpPath(); return 0;
}
if( task == "eval" ){
if (task == "eval"){
this->TaskEval(); return 0;
}
if( task == "pred" ){
if (task == "pred"){
this->TaskPred();
}else{
}
else{
this->TaskTrain();
}
return 0;
}
inline void SetParam( const char *name, const char *val ){
if( !strcmp("silent", name ) ) silent = atoi( val );
if( !strcmp("use_buffer", name ) ) use_buffer = atoi( val );
if( !strcmp("seed", name ) ) random::Seed( atoi(val) );
if( !strcmp("num_round", name ) ) num_round = atoi( val );
if( !strcmp("save_period", name ) ) save_period = atoi( val );
if( !strcmp("task", name ) ) task = val;
if( !strcmp("data", name ) ) train_path = val;
if( !strcmp("test:data", name ) ) test_path = val;
if( !strcmp("model_in", name ) ) model_in = val;
if( !strcmp("model_out", name ) ) model_out = val;
if( !strcmp("model_dir", name ) ) model_dir_path = val;
if( !strcmp("fmap", name ) ) name_fmap = val;
if( !strcmp("name_dump", name ) ) name_dump = val;
if( !strcmp("name_dumppath", name ) ) name_dumppath = val;
if( !strcmp("name_pred", name ) ) name_pred = val;
if( !strcmp("dump_stats", name ) ) dump_model_stats = atoi( val );
if( !strcmp("interact:action", name ) ) interact_action = val;
if( !strncmp("batch:", name, 6 ) ){
cfg_batch.PushBack( name + 6, val );
inline void SetParam(const char *name, const char *val){
if (!strcmp("silent", name)) silent = atoi(val);
if (!strcmp("use_buffer", name)) use_buffer = atoi(val);
if (!strcmp("seed", name)) random::Seed(atoi(val));
if (!strcmp("num_round", name)) num_round = atoi(val);
if (!strcmp("save_period", name)) save_period = atoi(val);
if (!strcmp("task", name)) task = val;
if (!strcmp("data", name)) train_path = val;
if (!strcmp("test:data", name)) test_path = val;
if (!strcmp("model_in", name)) model_in = val;
if (!strcmp("model_out", name)) model_out = val;
if (!strcmp("model_dir", name)) model_dir_path = val;
if (!strcmp("fmap", name)) name_fmap = val;
if (!strcmp("name_dump", name)) name_dump = val;
if (!strcmp("name_dumppath", name)) name_dumppath = val;
if (!strcmp("name_pred", name)) name_pred = val;
if (!strcmp("dump_stats", name)) dump_model_stats = atoi(val);
if (!strcmp("interact:action", name)) interact_action = val;
if (!strncmp("batch:", name, 6)){
cfg_batch.PushBack(name + 6, val);
}
if( !strncmp("eval[", name, 5 ) ) {
char evname[ 256 ];
utils::Assert( sscanf( name, "eval[%[^]]", evname ) == 1, "must specify evaluation name for display");
eval_data_names.push_back( std::string( evname ) );
eval_data_paths.push_back( std::string( val ) );
if (!strncmp("eval[", name, 5)) {
char evname[256];
utils::Assert(sscanf(name, "eval[%[^]]", evname) == 1, "must specify evaluation name for display");
eval_data_names.push_back(std::string(evname));
eval_data_paths.push_back(std::string(val));
}
cfg.PushBack( name, val );
cfg.PushBack(name, val);
}
public:
RegBoostTask( void ){
RegBoostTask(void){
// default parameters
silent = 0;
use_buffer = 1;
num_round = 10;
save_period = 0;
dump_model_stats = 0;
task = "train";
task = "train";
model_in = "NULL";
model_out = "NULL";
name_fmap = "NULL";
@@ -102,128 +103,132 @@ namespace xgboost{
model_dir_path = "./";
interact_action = "update";
}
~RegBoostTask( void ){
for( size_t i = 0; i < deval.size(); i ++ ){
~RegBoostTask(void){
for (size_t i = 0; i < deval.size(); i++){
delete deval[i];
}
}
private:
inline void InitData( void ){
if( name_fmap != "NULL" ) fmap.LoadText( name_fmap.c_str() );
if( task == "dump" ) return;
if( task == "pred" || task == "dumppath" ){
data.CacheLoad( test_path.c_str(), silent!=0, use_buffer!=0 );
}else{
inline void InitData(void){
if (name_fmap != "NULL") fmap.LoadText(name_fmap.c_str());
if (task == "dump") return;
if (task == "pred" || task == "dumppath"){
data.CacheLoad(test_path.c_str(), silent != 0, use_buffer != 0);
}
else{
// training
data.CacheLoad( train_path.c_str(), silent!=0, use_buffer!=0 );
utils::Assert( eval_data_names.size() == eval_data_paths.size() );
for( size_t i = 0; i < eval_data_names.size(); ++ i ){
deval.push_back( new DMatrix() );
deval.back()->CacheLoad( eval_data_paths[i].c_str(), silent!=0, use_buffer!=0 );
data.CacheLoad(train_path.c_str(), silent != 0, use_buffer != 0);
utils::Assert(eval_data_names.size() == eval_data_paths.size());
for (size_t i = 0; i < eval_data_names.size(); ++i){
deval.push_back(new DMatrix());
deval.back()->CacheLoad(eval_data_paths[i].c_str(), silent != 0, use_buffer != 0);
}
}
learner.SetData( &data, deval, eval_data_names );
learner.SetData(&data, deval, eval_data_names);
}
inline void InitLearner( void ){
inline void InitLearner(void){
cfg.BeforeFirst();
while( cfg.Next() ){
learner.SetParam( cfg.name(), cfg.val() );
while (cfg.Next()){
learner.SetParam(cfg.name(), cfg.val());
}
if( model_in != "NULL" ){
utils::FileStream fi( utils::FopenCheck( model_in.c_str(), "rb") );
learner.LoadModel( fi );
if (model_in != "NULL"){
utils::FileStream fi(utils::FopenCheck(model_in.c_str(), "rb"));
learner.LoadModel(fi);
fi.Close();
}else{
utils::Assert( task == "train", "model_in not specified" );
}
else{
utils::Assert(task == "train", "model_in not specified");
learner.InitModel();
}
learner.InitTrainer();
}
inline void TaskTrain( void ){
const time_t start = time( NULL );
inline void TaskTrain(void){
const time_t start = time(NULL);
unsigned long elapsed = 0;
for( int i = 0; i < num_round; ++ i ){
elapsed = (unsigned long)(time(NULL) - start);
if( !silent ) printf("boosting round %d, %lu sec elapsed\n", i , elapsed );
learner.UpdateOneIter( i );
learner.EvalOneIter( i );
if( save_period != 0 && (i+1) % save_period == 0 ){
this->SaveModel( i );
for (int i = 0; i < num_round; ++i){
elapsed = (unsigned long)(time(NULL) - start);
if (!silent) printf("boosting round %d, %lu sec elapsed\n", i, elapsed);
learner.UpdateOneIter(i);
learner.EvalOneIter(i);
if (save_period != 0 && (i + 1) % save_period == 0){
this->SaveModel(i);
}
elapsed = (unsigned long)(time(NULL) - start);
elapsed = (unsigned long)(time(NULL) - start);
}
// always save final round
if( save_period == 0 || num_round % save_period != 0 ){
if( model_out == "NULL" ){
this->SaveModel( num_round - 1 );
}else{
this->SaveModel( model_out.c_str() );
if (save_period == 0 || num_round % save_period != 0){
if (model_out == "NULL"){
this->SaveModel(num_round - 1);
}
else{
this->SaveModel(model_out.c_str());
}
}
if( !silent ){
printf("\nupdating end, %lu sec in all\n", elapsed );
if (!silent){
printf("\nupdating end, %lu sec in all\n", elapsed);
}
}
inline void TaskEval( void ){
learner.EvalOneIter( 0 );
inline void TaskEval(void){
learner.EvalOneIter(0);
}
inline void TaskInteractive( void ){
const time_t start = time( NULL );
inline void TaskInteractive(void){
const time_t start = time(NULL);
unsigned long elapsed = 0;
int batch_action = 0;
cfg_batch.BeforeFirst();
while( cfg_batch.Next() ){
if( !strcmp( cfg_batch.name(), "run" ) ){
learner.UpdateInteract( interact_action );
while (cfg_batch.Next()){
if (!strcmp(cfg_batch.name(), "run")){
learner.UpdateInteract(interact_action);
batch_action += 1;
} else{
learner.SetParam( cfg_batch.name(), cfg_batch.val() );
}
else{
learner.SetParam(cfg_batch.name(), cfg_batch.val());
}
}
if( batch_action == 0 ){
learner.UpdateInteract( interact_action );
if (batch_action == 0){
learner.UpdateInteract(interact_action);
}
utils::Assert( model_out != "NULL", "interactive mode must specify model_out" );
this->SaveModel( model_out.c_str() );
elapsed = (unsigned long)(time(NULL) - start);
utils::Assert(model_out != "NULL", "interactive mode must specify model_out");
this->SaveModel(model_out.c_str());
elapsed = (unsigned long)(time(NULL) - start);
if( !silent ){
printf("\ninteractive update, %d batch actions, %lu sec in all\n", batch_action, elapsed );
if (!silent){
printf("\ninteractive update, %d batch actions, %lu sec in all\n", batch_action, elapsed);
}
}
inline void TaskDump( void ){
FILE *fo = utils::FopenCheck( name_dump.c_str(), "w" );
learner.DumpModel( fo, fmap, dump_model_stats != 0 );
fclose( fo );
inline void TaskDump(void){
FILE *fo = utils::FopenCheck(name_dump.c_str(), "w");
learner.DumpModel(fo, fmap, dump_model_stats != 0);
fclose(fo);
}
inline void TaskDumpPath( void ){
FILE *fo = utils::FopenCheck( name_dumppath.c_str(), "w" );
learner.DumpPath( fo, data );
fclose( fo );
inline void TaskDumpPath(void){
FILE *fo = utils::FopenCheck(name_dumppath.c_str(), "w");
learner.DumpPath(fo, data);
fclose(fo);
}
inline void SaveModel( const char *fname ) const{
utils::FileStream fo( utils::FopenCheck( fname, "wb" ) );
learner.SaveModel( fo );
inline void SaveModel(const char *fname) const{
utils::FileStream fo(utils::FopenCheck(fname, "wb"));
learner.SaveModel(fo);
fo.Close();
}
inline void SaveModel( int i ) const{
inline void SaveModel(int i) const{
char fname[256];
sprintf( fname ,"%s/%04d.model", model_dir_path.c_str(), i+1 );
this->SaveModel( fname );
sprintf(fname, "%s/%04d.model", model_dir_path.c_str(), i + 1);
this->SaveModel(fname);
}
inline void TaskPred( void ){
inline void TaskPred(void){
std::vector<float> preds;
if( !silent ) printf("start prediction...\n");
learner.Predict( preds, data );
if( !silent ) printf("writing prediction to %s\n", name_pred.c_str() );
FILE *fo = utils::FopenCheck( name_pred.c_str(), "w" );
for( size_t i = 0; i < preds.size(); i ++ ){
fprintf( fo, "%f\n", preds[i] );
if (!silent) printf("start prediction...\n");
learner.Predict(preds, data);
if (!silent) printf("writing prediction to %s\n", name_pred.c_str());
FILE *fo = utils::FopenCheck(name_pred.c_str(), "w");
for (size_t i = 0; i < preds.size(); i++){
fprintf(fo, "%f\n", preds[i]);
}
fclose( fo );
fclose(fo);
}
private:
/* \brief whether silent */
@@ -231,7 +236,7 @@ namespace xgboost{
/* \brief whether use auto binary buffer */
int use_buffer;
/* \brief number of boosting iterations */
int num_round;
int num_round;
/* \brief the period to save the model, 0 means only save the final round model */
int save_period;
/*! \brief interfact action */
@@ -257,9 +262,9 @@ namespace xgboost{
/* \brief name of dump path file */
std::string name_dumppath;
/* \brief the paths of validation data sets */
std::vector<std::string> eval_data_paths;
std::vector<std::string> eval_data_paths;
/* \brief the names of the evaluation data used in output log */
std::vector<std::string> eval_data_names;
std::vector<std::string> eval_data_names;
/*! \brief saves configurations */
utils::ConfigSaver cfg;
/*! \brief batch configurations */
@@ -274,7 +279,7 @@ namespace xgboost{
};
int main( int argc, char *argv[] ){
xgboost::random::Seed( 0 );
xgboost::regression::RegBoostTask tsk;
return tsk.Run( argc, argv );
xgboost::random::Seed( 0 );
xgboost::regression::RegBoostTask tsk;
return tsk.Run( argc, argv );
}