[REFACTOR] Add alias, allow missing variables, init gbm interface

This commit is contained in:
tqchen
2016-01-02 04:40:49 -08:00
parent 4f26d98150
commit e4567bbc47
16 changed files with 145 additions and 96 deletions

View File

@@ -12,7 +12,7 @@
namespace xgboost {
namespace common {
/*!
* \brief Random Engine
* \brief Define mt19937 as default type Random Engine.
*/
typedef std::mt19937 RandomEngine;
/*!

View File

@@ -6,6 +6,7 @@
#include <xgboost/objective.h>
#include <xgboost/metric.h>
#include <xgboost/tree_updater.h>
#include <xgboost/gbm.h>
#include "./common/random.h"
#include "./common/base64.h"
@@ -13,6 +14,7 @@ namespace dmlc {
DMLC_REGISTRY_ENABLE(::xgboost::ObjFunctionReg);
DMLC_REGISTRY_ENABLE(::xgboost::MetricReg);
DMLC_REGISTRY_ENABLE(::xgboost::TreeUpdaterReg);
DMLC_REGISTRY_ENABLE(::xgboost::GradientBoosterReg);
} // namespace dmlc
namespace xgboost {
@@ -45,7 +47,6 @@ Metric* Metric::Create(const char* name) {
}
}
// implement factory functions
TreeUpdater* TreeUpdater::Create(const char* name) {
auto *e = ::dmlc::Registry< ::xgboost::TreeUpdaterReg>::Get()->Find(name);
if (e == nullptr) {
@@ -54,6 +55,14 @@ TreeUpdater* TreeUpdater::Create(const char* name) {
return (e->body)();
}
GradientBooster* GradientBooster::Create(const char* name) {
auto *e = ::dmlc::Registry< ::xgboost::GradientBoosterReg>::Get()->Find(name);
if (e == nullptr) {
LOG(FATAL) << "Unknown gbm type " << name;
}
return (e->body)();
}
namespace common {
RandomEngine& GlobalRandom() {
static RandomEngine inst;
@@ -61,4 +70,3 @@ RandomEngine& GlobalRandom() {
}
}
} // namespace xgboost

View File

@@ -30,7 +30,7 @@ class SoftmaxMultiClassObj : public ObjFunction {
: output_prob_(output_prob) {
}
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param_.Init(args);
param_.InitAllowUnknown(args);
}
void GetGradient(const std::vector<float>& preds,
const MetaInfo& info,

View File

@@ -33,7 +33,7 @@ struct LambdaRankParam : public dmlc::Parameter<LambdaRankParam> {
class LambdaRankObj : public ObjFunction {
public:
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param_.Init(args);
param_.InitAllowUnknown(args);
}
void GetGradient(const std::vector<float>& preds,
const MetaInfo& info,

View File

@@ -77,7 +77,7 @@ template<typename Loss>
class RegLossObj : public ObjFunction {
public:
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param_.Init(args);
param_.InitAllowUnknown(args);
}
void GetGradient(const std::vector<float> &preds,
const MetaInfo &info,
@@ -156,7 +156,7 @@ class PoissonRegression : public ObjFunction {
public:
// declare functions
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param_.Init(args);
param_.InitAllowUnknown(args);
}
void GetGradient(const std::vector<float> &preds,

View File

@@ -16,9 +16,9 @@ namespace tree {
/*! \brief training parameters for regression tree */
struct TrainParam : public dmlc::Parameter<TrainParam> {
// learning step size for a time
float eta;
float learning_rate;
// minimum loss change required for a split
float gamma;
float min_split_loss;
// maximum depth of a tree
int max_depth;
//----- the rest parameters are less important ----
@@ -59,9 +59,9 @@ struct TrainParam : public dmlc::Parameter<TrainParam> {
bool silent;
// declare the parameters
DMLC_DECLARE_PARAMETER(TrainParam) {
DMLC_DECLARE_FIELD(eta).set_lower_bound(0.0f).set_default(0.3f)
DMLC_DECLARE_FIELD(learning_rate).set_lower_bound(0.0f).set_default(0.3f)
.describe("Learning rate(step size) of update.");
DMLC_DECLARE_FIELD(gamma).set_lower_bound(0.0f).set_default(0.0f)
DMLC_DECLARE_FIELD(min_split_loss).set_lower_bound(0.0f).set_default(0.0f)
.describe("Minimum loss reduction required to make a further partition.");
DMLC_DECLARE_FIELD(max_depth).set_lower_bound(0).set_default(6)
.describe("Maximum depth of the tree.");
@@ -101,6 +101,11 @@ struct TrainParam : public dmlc::Parameter<TrainParam> {
.describe("Number of threads used for training.");
DMLC_DECLARE_FIELD(silent).set_default(false)
.describe("Not print information during trainig.");
// add alias of parameters
DMLC_DECLARE_ALIAS(reg_lambda, lambda);
DMLC_DECLARE_ALIAS(reg_alpha, alpha);
DMLC_DECLARE_ALIAS(min_split_loss, gamma);
DMLC_DECLARE_ALIAS(learning_rate, eta);
}
// calculate the cost of loss function
@@ -159,7 +164,7 @@ struct TrainParam : public dmlc::Parameter<TrainParam> {
}
/*! \brief given the loss change, whether we need to invoke pruning */
inline bool need_prune(double loss_chg, int depth) const {
return loss_chg < this->gamma;
return loss_chg < this->min_split_loss;
}
/*! \brief whether we can split with current hessian */
inline bool cannot_split(double sum_hess, int depth) const {

View File

@@ -29,7 +29,7 @@ namespace tree {
class BaseMaker: public TreeUpdater {
public:
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param.Init(args);
param.InitAllowUnknown(args);
}
protected:

View File

@@ -20,7 +20,7 @@ template<typename TStats>
class ColMaker: public TreeUpdater {
public:
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param.Init(args);
param.InitAllowUnknown(args);
}
void Update(const std::vector<bst_gpair> &gpair,
@@ -28,14 +28,14 @@ class ColMaker: public TreeUpdater {
const std::vector<RegTree*> &trees) override {
TStats::CheckInfo(dmat->info());
// rescale learning rate according to size of trees
float lr = param.eta;
param.eta = lr / trees.size();
float lr = param.learning_rate;
param.learning_rate = lr / trees.size();
// build tree
for (size_t i = 0; i < trees.size(); ++i) {
Builder builder(param);
builder.Update(gpair, dmat, trees[i]);
}
param.eta = lr;
param.learning_rate = lr;
}
protected:
@@ -95,7 +95,7 @@ class ColMaker: public TreeUpdater {
// set all the rest expanding nodes to leaf
for (size_t i = 0; i < qexpand_.size(); ++i) {
const int nid = qexpand_[i];
(*p_tree)[nid].set_leaf(snode[nid].weight * param.eta);
(*p_tree)[nid].set_leaf(snode[nid].weight * param.learning_rate);
}
// remember auxiliary statistics in the tree node
for (int nid = 0; nid < p_tree->param.num_nodes; ++nid) {
@@ -606,7 +606,7 @@ class ColMaker: public TreeUpdater {
(*p_tree)[(*p_tree)[nid].cleft()].set_leaf(0.0f, 0);
(*p_tree)[(*p_tree)[nid].cright()].set_leaf(0.0f, 0);
} else {
(*p_tree)[nid].set_leaf(e.weight * param.eta);
(*p_tree)[nid].set_leaf(e.weight * param.learning_rate);
}
}
}
@@ -732,7 +732,7 @@ class DistColMaker : public ColMaker<TStats> {
pruner.reset(TreeUpdater::Create("prune"));
}
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param.Init(args);
param.InitAllowUnknown(args);
pruner->Init(args);
}
void Update(const std::vector<bst_gpair> &gpair,

View File

@@ -23,13 +23,13 @@ class HistMaker: public BaseMaker {
const std::vector<RegTree*> &trees) override {
TStats::CheckInfo(p_fmat->info());
// rescale learning rate according to size of trees
float lr = param.eta;
param.eta = lr / trees.size();
float lr = param.learning_rate;
param.learning_rate = lr / trees.size();
// build tree
for (size_t i = 0; i < trees.size(); ++i) {
this->Update(gpair, p_fmat, trees[i]);
}
param.eta = lr;
param.learning_rate = lr;
}
protected:
@@ -139,7 +139,7 @@ class HistMaker: public BaseMaker {
}
for (size_t i = 0; i < qexpand.size(); ++i) {
const int nid = qexpand[i];
(*p_tree)[nid].set_leaf(p_tree->stat(nid).base_weight * param.eta);
(*p_tree)[nid].set_leaf(p_tree->stat(nid).base_weight * param.learning_rate);
}
}
// this function does two jobs
@@ -246,7 +246,7 @@ class HistMaker: public BaseMaker {
this->SetStats(p_tree, (*p_tree)[nid].cleft(), left_sum[wid]);
this->SetStats(p_tree, (*p_tree)[nid].cright(), right_sum);
} else {
(*p_tree)[nid].set_leaf(p_tree->stat(nid).base_weight * param.eta);
(*p_tree)[nid].set_leaf(p_tree->stat(nid).base_weight * param.learning_rate);
}
}
}

View File

@@ -22,7 +22,7 @@ class TreePruner: public TreeUpdater {
}
// set training parameter
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param.Init(args);
param.InitAllowUnknown(args);
syncher->Init(args);
}
// update the tree, do pruning
@@ -30,12 +30,12 @@ class TreePruner: public TreeUpdater {
DMatrix *p_fmat,
const std::vector<RegTree*> &trees) override {
// rescale learning rate according to size of trees
float lr = param.eta;
param.eta = lr / trees.size();
float lr = param.learning_rate;
param.learning_rate = lr / trees.size();
for (size_t i = 0; i < trees.size(); ++i) {
this->DoPrune(*trees[i]);
}
param.eta = lr;
param.learning_rate = lr;
syncher->Update(gpair, p_fmat, trees);
}
@@ -48,7 +48,7 @@ class TreePruner: public TreeUpdater {
++s.leaf_child_cnt;
if (s.leaf_child_cnt >= 2 && param.need_prune(s.loss_chg, depth - 1)) {
// need to be pruned
tree.ChangeToLeaf(pid, param.eta * s.base_weight);
tree.ChangeToLeaf(pid, param.learning_rate * s.base_weight);
// tail recursion
return this->TryPruneLeaf(tree, pid, depth - 1, npruned + 2);
} else {

View File

@@ -19,7 +19,7 @@ template<typename TStats>
class TreeRefresher: public TreeUpdater {
public:
void Init(const std::vector<std::pair<std::string, std::string> >& args) override {
param.Init(args);
param.InitAllowUnknown(args);
}
// update the tree, do pruning
void Update(const std::vector<bst_gpair> &gpair,
@@ -94,8 +94,8 @@ class TreeRefresher: public TreeUpdater {
reducer.Allreduce(dmlc::BeginPtr(stemp[0]), stemp[0].size());
#endif
// rescale learning rate according to size of trees
float lr = param.eta;
param.eta = lr / trees.size();
float lr = param.learning_rate;
param.learning_rate = lr / trees.size();
int offset = 0;
for (size_t i = 0; i < trees.size(); ++i) {
for (int rid = 0; rid < trees[i]->param.num_roots; ++rid) {
@@ -104,7 +104,7 @@ class TreeRefresher: public TreeUpdater {
offset += trees[i]->param.num_nodes;
}
// set learning rate back
param.eta = lr;
param.learning_rate = lr;
}
private:
@@ -131,7 +131,7 @@ class TreeRefresher: public TreeUpdater {
tree.stat(nid).sum_hess = static_cast<float>(gstats[nid].sum_hess);
gstats[nid].SetLeafVec(param, tree.leafvec(nid));
if (tree[nid].is_leaf()) {
tree[nid].set_leaf(tree.stat(nid).base_weight * param.eta);
tree[nid].set_leaf(tree.stat(nid).base_weight * param.learning_rate);
} else {
tree.stat(nid).loss_chg = static_cast<float>(
gstats[tree[nid].cleft()].CalcGain(param) +

View File

@@ -24,13 +24,13 @@ class SketchMaker: public BaseMaker {
DMatrix *p_fmat,
const std::vector<RegTree*> &trees) override {
// rescale learning rate according to size of trees
float lr = param.eta;
param.eta = lr / trees.size();
float lr = param.learning_rate;
param.learning_rate = lr / trees.size();
// build tree
for (size_t i = 0; i < trees.size(); ++i) {
this->Update(gpair, p_fmat, trees[i]);
}
param.eta = lr;
param.learning_rate = lr;
}
protected:
@@ -67,7 +67,7 @@ class SketchMaker: public BaseMaker {
// set left leaves
for (size_t i = 0; i < qexpand.size(); ++i) {
const int nid = qexpand[i];
(*p_tree)[nid].set_leaf(p_tree->stat(nid).base_weight * param.eta);
(*p_tree)[nid].set_leaf(p_tree->stat(nid).base_weight * param.learning_rate);
}
}
// define the sketch we want to use
@@ -302,7 +302,7 @@ class SketchMaker: public BaseMaker {
(*p_tree)[(*p_tree)[nid].cleft()].set_leaf(0.0f, 0);
(*p_tree)[(*p_tree)[nid].cright()].set_leaf(0.0f, 0);
} else {
(*p_tree)[nid].set_leaf(p_tree->stat(nid).base_weight * param.eta);
(*p_tree)[nid].set_leaf(p_tree->stat(nid).base_weight * param.learning_rate);
}
}
}