This commit is contained in:
tqchen 2014-05-15 18:56:28 -07:00
parent e2d13db24e
commit c22df2b31a
3 changed files with 46 additions and 36 deletions

View File

@ -10,7 +10,7 @@ OBJ =
all: $(BIN) $(OBJ) all: $(BIN) $(OBJ)
export LDFLAGS= -pthread -lm export LDFLAGS= -pthread -lm
xgboost: regrank/xgboost_regrank_main.cpp regrank/*.h booster/*.h booster/*/*.hpp booster/*.hpp xgboost: regrank/xgboost_regrank_main.cpp regrank/*.h regrank/*.hpp booster/*.h booster/*/*.hpp booster/*.hpp
$(BIN) : $(BIN) :

View File

@ -2,9 +2,9 @@
# choose the tree booster, 0: tree, 1: linear # choose the tree booster, 0: tree, 1: linear
booster_type = 0 booster_type = 0
objective="rank:pairwise" #objective="rank:pairwise"
#objective="rank:softmax" #objective="rank:softmax"
#objective="lambdarank:map" objective="lambdarank:map"
#objective="lambdarank:ndcg" #objective="lambdarank:ndcg"
# Tree Booster Parameters # Tree Booster Parameters
@ -13,7 +13,7 @@ bst:eta = 0.1
# minimum loss reduction required to make a further partition # minimum loss reduction required to make a further partition
bst:gamma = 1.0 bst:gamma = 1.0
# minimum sum of instance weight(hessian) needed in a child # minimum sum of instance weight(hessian) needed in a child
bst:min_child_weight = 1 bst:min_child_weight = 0.1
# maximum depth of a tree # maximum depth of a tree
bst:max_depth = 6 bst:max_depth = 6
eval_metric = "ndcg" eval_metric = "ndcg"

View File

@ -261,7 +261,7 @@ namespace xgboost{
} }
} }
virtual const char* DefaultEvalMetric(void) { virtual const char* DefaultEvalMetric(void) {
return "ndcg"; return "map";
} }
private: private:
inline void AddGradient( unsigned pid, unsigned nid, inline void AddGradient( unsigned pid, unsigned nid,
@ -285,23 +285,36 @@ namespace xgboost{
}; };
}; };
namespace regrank{ namespace regrank{
class LambdaRankObj : public IObjFunction{ class LambdaRankObj : public IObjFunction{
public: public:
LambdaRankObj(void){} LambdaRankObj(void){
loss_.loss_type = LossType::kLogisticRaw;
}
virtual ~LambdaRankObj(){} virtual ~LambdaRankObj(){}
virtual void SetParam(const char *name, const char *val){ virtual void SetParam(const char *name, const char *val){
if( !strcmp( "loss_type", name ) ) loss_.loss_type = atoi( val ); if( !strcmp( "loss_type", name ) ) loss_.loss_type = atoi( val );
if( !strcmp( "fix_list_weight", name ) ) fix_list_weight_ = (float)atof( val ); if( !strcmp( "fix_list_weight", name ) ) fix_list_weight_ = (float)atof( val );
} }
private: private:
LossType loss_; LossType loss_;
float fix_list_weight_; float fix_list_weight_;
protected: protected:
/*! \brief helper information in a list */
struct ListEntry{
/*! \brief the predict score we in the data */
float pred;
/*! \brief the actual label of the entry */
float label;
/*! \brief row index in the data matrix */
unsigned rindex;
// constructor
ListEntry(float pred, float label, unsigned rindex): pred(pred),label(label),rindex(rindex){}
// comparator by prediction
inline bool operator<(const ListEntry &p) const{
return pred > p.pred;
}
};
class Triple{ class Triple{
public: public:
@ -431,8 +444,6 @@ namespace xgboost{
i = j; i = j;
} }
} }
public: public:
virtual void GetGradient(const std::vector<float>& preds, virtual void GetGradient(const std::vector<float>& preds,
const DMatrix::Info &info, const DMatrix::Info &info,
@ -449,7 +460,6 @@ namespace xgboost{
GetGroupGradient(preds, info.labels, group_index, grad, hess, pairs, i); GetGroupGradient(preds, info.labels, group_index, grad, hess, pairs, i);
} }
} }
virtual const char* DefaultEvalMetric(void) { virtual const char* DefaultEvalMetric(void) {
return "auc"; return "auc";
} }