chg all settings to obj
This commit is contained in:
parent
213375baca
commit
98e507451c
5
.gitignore
vendored
5
.gitignore
vendored
@ -18,3 +18,8 @@
|
||||
*model
|
||||
xgboost
|
||||
*pyc
|
||||
*train
|
||||
*test
|
||||
*group
|
||||
*rar
|
||||
*vali
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
# choose the tree booster, 0: tree, 1: linear
|
||||
booster_type = 0
|
||||
# choose logistic regression loss function for binary classification
|
||||
loss_type = 2
|
||||
objective = binary:logistic
|
||||
|
||||
# Tree Booster Parameters
|
||||
# step size shrinkage
|
||||
|
||||
@ -31,8 +31,9 @@ xgmat = xgb.DMatrix( data, label=label, missing = -999.0, weight=weight )
|
||||
|
||||
# setup parameters for xgboost
|
||||
param = {}
|
||||
# use logistic regression loss
|
||||
param['loss_type'] = 3
|
||||
# use logistic regression loss, use raw prediction before logistic transformation
|
||||
# since we only need the rank
|
||||
param['objective'] = 'binary:logitraw'
|
||||
# scale weight of positive examples
|
||||
param['scale_pos_weight'] = sum_wneg/sum_wpos
|
||||
param['bst:eta'] = 0.1
|
||||
|
||||
@ -33,7 +33,7 @@ xgmat = xgb.DMatrix( data, label=label, missing = -999.0, weight=weight )
|
||||
# setup parameters for xgboost
|
||||
param = {}
|
||||
# use logistic regression loss
|
||||
param['loss_type'] = 1
|
||||
param['objective'] = 'binary:logitraw'
|
||||
# scale weight of positive examples
|
||||
param['scale_pos_weight'] = sum_wneg/sum_wpos
|
||||
param['bst:eta'] = 0.1
|
||||
|
||||
@ -3,6 +3,5 @@ python trans_data.py test.txt mq2008.test mq2008.test.group
|
||||
python trans_data.py vali.txt mq2008.vali mq2008.vali.group
|
||||
|
||||
../../xgboost mq2008.conf
|
||||
|
||||
../../xgboost mq2008.conf task=pred model_in=0002.model
|
||||
../../xgboost mq2008.conf task=pred model_in=0004.model
|
||||
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
# General Parameters, see comment for each definition
|
||||
# choose the tree booster, 0: tree, 1: linear
|
||||
booster_type = 0
|
||||
# this is the only difference with classification, use 0: linear regression
|
||||
# when labels are in [0,1] we can also use 1: logistic regression
|
||||
loss_type = 0
|
||||
# this is the only difference with classification, use reg:linear to do linear classification
|
||||
# when labels are in [0,1] we can also use reg:logistic
|
||||
objective = reg:linear
|
||||
|
||||
# Tree Booster Parameters
|
||||
# step size shrinkage
|
||||
|
||||
@ -25,7 +25,7 @@ namespace xgboost{
|
||||
RegRankBoostLearner(void){
|
||||
silent = 0;
|
||||
obj_ = NULL;
|
||||
name_obj_ = "reg";
|
||||
name_obj_ = "reg:linear";
|
||||
}
|
||||
/*!
|
||||
* \brief a regression booter associated with training and evaluating data
|
||||
|
||||
@ -129,9 +129,11 @@ namespace xgboost{
|
||||
if( fs.Read(&nwt, sizeof(unsigned) ) != 0 ){
|
||||
utils::Assert( nwt == 0 || nwt == data.NumRow(), "invalid weight" );
|
||||
info.weights.resize( nwt );
|
||||
if( nwt != 0 ){
|
||||
utils::Assert( fs.Read(&info.weights[0], sizeof(unsigned) * nwt) != 0, "Load weight file");
|
||||
}
|
||||
}
|
||||
}
|
||||
fs.Close();
|
||||
|
||||
if (!silent){
|
||||
|
||||
@ -109,12 +109,13 @@ namespace xgboost{
|
||||
namespace xgboost{
|
||||
namespace regrank{
|
||||
inline IObjFunction* CreateObjFunction( const char *name ){
|
||||
if( !strcmp("reg", name ) ) return new RegressionObj();
|
||||
if( !strcmp("reg:linear", name ) ) return new RegressionObj( LossType::kLinearSquare );
|
||||
if( !strcmp("reg:logistic", name ) ) return new RegressionObj( LossType::kLogisticNeglik );
|
||||
if( !strcmp("binary:logistic", name ) ) return new RegressionObj( LossType::kLogisticClassify );
|
||||
if( !strcmp("binary:logitraw", name ) ) return new RegressionObj( LossType::kLogisticRaw );
|
||||
if( !strcmp("multi:softmax", name ) ) return new SoftmaxMultiClassObj();
|
||||
if( !strcmp("rank:pairwise", name ) ) return new PairwiseRankObj();
|
||||
if( !strcmp("rank:softmax", name ) ) return new SoftmaxRankObj();
|
||||
if( !strcmp("softmax", name ) ) return new SoftmaxMultiClassObj();
|
||||
// if (!strcmp("lambdarank:map", name)) return new LambdaRankObj_MAP();
|
||||
// if (!strcmp("lambdarank:ndcg", name)) return new LambdaRankObj_NDCG();
|
||||
utils::Error("unknown objective function type");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@ -14,8 +14,8 @@ namespace xgboost{
|
||||
namespace regrank{
|
||||
class RegressionObj : public IObjFunction{
|
||||
public:
|
||||
RegressionObj(void){
|
||||
loss.loss_type = LossType::kLinearSquare;
|
||||
RegressionObj( int loss_type ){
|
||||
loss.loss_type = loss_type;
|
||||
}
|
||||
virtual ~RegressionObj(){}
|
||||
virtual void SetParam(const char *name, const char *val){
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user