diff --git a/README.md b/README.md index b05655f52..b1dee9fdd 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ xgboost: eXtreme Gradient Boosting ======= -An efficient general purpose gradient boosting (tree) library. +A General purpose gradient boosting (tree) library. Creater: Tianqi Chen @@ -32,8 +32,9 @@ Planned components - matrix factorization - structured prediction -File extension convention: -(1) .h are interface, utils and data structures, with detailed comment; -(2) .cpp are implementations that will be compiled, with less comment; -(3) .hpp are implementations that will be included by .cpp, with less comment +File extension convention +======= +* .h are interface, utils and data structures, with detailed comment; +* .cpp are implementations that will be compiled, with less comment; +* .hpp are implementations that will be included by .cpp, with less comment diff --git a/demo/binary_classification/README b/demo/binary_classification/README index 64ef214df..86cd68992 100644 --- a/demo/binary_classification/README +++ b/demo/binary_classification/README @@ -9,3 +9,5 @@ Format of featmap.txt: q means continuous quantities, i means indicator features. Feature id must be from 0 to num_features, in sorted order. + +Detailed explaination: https://github.com/tqchen/xgboost/wiki/Binary-Classification diff --git a/demo/binary_classification/runexp.sh b/demo/binary_classification/runexp.sh index bd4354104..68c3e6fb9 100755 --- a/demo/binary_classification/runexp.sh +++ b/demo/binary_classification/runexp.sh @@ -6,9 +6,10 @@ python mknfold.py agaricus.txt 1 # training and output the models ../../xgboost mushroom.conf # output prediction task=pred -../../xgboost mushroom.conf task=pred model_in=0003.model -# print the boosters of 00003.model in dump.raw.txt -../../xgboost mushroom.conf task=dump model_in=0003.model name_dump=dump.raw.txt +../../xgboost mushroom.conf task=pred model_in=0002.model +# print the boosters of 00002.model in dump.raw.txt +../../xgboost mushroom.conf task=dump model_in=0002.model name_dump=dump.raw.txt # use the feature map in printing for better visualization -../../xgboost mushroom.conf task=dump model_in=0003.model fmap=featmap.txt name_dump=dump.nice.txt +../../xgboost mushroom.conf task=dump model_in=0002.model fmap=featmap.txt name_dump=dump.nice.txt cat dump.nice.txt + diff --git a/regression/xgboost_reg_main.cpp b/regression/xgboost_reg_main.cpp index f8180ccc9..9d43c22fb 100644 --- a/regression/xgboost_reg_main.cpp +++ b/regression/xgboost_reg_main.cpp @@ -111,7 +111,7 @@ namespace xgboost{ inline void InitData( void ){ if( name_fmap != "NULL" ) fmap.LoadText( name_fmap.c_str() ); if( task == "dump" ) return; - if( task == "test" || task == "dumppath" ){ + if( task == "pred" || task == "dumppath" ){ data.CacheLoad( test_path.c_str(), silent!=0, use_buffer!=0 ); }else{ // training @@ -155,7 +155,7 @@ namespace xgboost{ // always save final round if( save_period == 0 || num_round % save_period != 0 ){ if( model_out == "NULL" ){ - this->SaveModel( num_round ); + this->SaveModel( num_round - 1 ); }else{ this->SaveModel( model_out.c_str() ); }