lint learner finish

This commit is contained in:
tqchen 2015-07-03 19:20:45 -07:00
parent 1581de08da
commit aba41d07cd
8 changed files with 127 additions and 112 deletions

View File

@ -1,11 +1,13 @@
#ifndef XGBOOST_LEARNER_DMATRIX_H_
#define XGBOOST_LEARNER_DMATRIX_H_
/*! /*!
* Copyright 2014 by Contributors
* \file dmatrix.h * \file dmatrix.h
* \brief meta data and template data structure * \brief meta data and template data structure
* used for regression/classification/ranking * used for regression/classification/ranking
* \author Tianqi Chen * \author Tianqi Chen
*/ */
#ifndef XGBOOST_LEARNER_DMATRIX_H_
#define XGBOOST_LEARNER_DMATRIX_H_
#include <vector> #include <vector>
#include <cstring> #include <cstring>
#include "../data.h" #include "../data.h"
@ -66,7 +68,7 @@ struct MetaInfo {
return 1.0f; return 1.0f;
} }
} }
inline void SaveBinary(utils::IStream &fo) const { inline void SaveBinary(utils::IStream &fo) const { // NOLINT(*)
int version = kVersion; int version = kVersion;
fo.Write(&version, sizeof(version)); fo.Write(&version, sizeof(version));
fo.Write(&info.num_row, sizeof(info.num_row)); fo.Write(&info.num_row, sizeof(info.num_row));
@ -77,7 +79,7 @@ struct MetaInfo {
fo.Write(info.root_index); fo.Write(info.root_index);
fo.Write(base_margin); fo.Write(base_margin);
} }
inline void LoadBinary(utils::IStream &fi) { inline void LoadBinary(utils::IStream &fi) { // NOLINT(*)
int version; int version;
utils::Check(fi.Read(&version, sizeof(version)) != 0, "MetaInfo: invalid format"); utils::Check(fi.Read(&version, sizeof(version)) != 0, "MetaInfo: invalid format");
utils::Check(fi.Read(&info.num_row, sizeof(info.num_row)) != 0, "MetaInfo: invalid format"); utils::Check(fi.Read(&info.num_row, sizeof(info.num_row)) != 0, "MetaInfo: invalid format");
@ -114,7 +116,7 @@ struct MetaInfo {
return labels; return labels;
} }
inline const std::vector<float>& GetFloatInfo(const char *field) const { inline const std::vector<float>& GetFloatInfo(const char *field) const {
return ((MetaInfo*)this)->GetFloatInfo(field); return ((MetaInfo*)this)->GetFloatInfo(field); // NOLINT(*)
} }
inline std::vector<unsigned> &GetUIntInfo(const char *field) { inline std::vector<unsigned> &GetUIntInfo(const char *field) {
using namespace std; using namespace std;
@ -124,7 +126,7 @@ struct MetaInfo {
return info.root_index; return info.root_index;
} }
inline const std::vector<unsigned> &GetUIntInfo(const char *field) const { inline const std::vector<unsigned> &GetUIntInfo(const char *field) const {
return ((MetaInfo*)this)->GetUIntInfo(field); return ((MetaInfo*)this)->GetUIntInfo(field); // NOLINT(*)
} }
// try to load weight information from file, if exists // try to load weight information from file, if exists
inline bool TryLoadFloatInfo(const char *field, const char* fname, bool silent = false) { inline bool TryLoadFloatInfo(const char *field, const char* fname, bool silent = false) {

View File

@ -1,10 +1,12 @@
#ifndef XGBOOST_LEARNER_EVALUATION_INL_HPP_
#define XGBOOST_LEARNER_EVALUATION_INL_HPP_
/*! /*!
* Copyright 2014 by Contributors
* \file xgboost_evaluation-inl.hpp * \file xgboost_evaluation-inl.hpp
* \brief evaluation metrics for regression and classification and rank * \brief evaluation metrics for regression and classification and rank
* \author Kailong Chen, Tianqi Chen * \author Kailong Chen, Tianqi Chen
*/ */
#ifndef XGBOOST_LEARNER_EVALUATION_INL_HPP_
#define XGBOOST_LEARNER_EVALUATION_INL_HPP_
#include <vector> #include <vector>
#include <utility> #include <utility>
#include <string> #include <string>
@ -344,7 +346,8 @@ struct EvalPrecisionRatio : public IEvaluator{
} }
protected: protected:
inline double CalcPRatio(const std::vector< std::pair<float, unsigned> >& rec, const MetaInfo &info) const { inline double CalcPRatio(const std::vector< std::pair<float, unsigned> >& rec,
const MetaInfo &info) const {
size_t cutoff = static_cast<size_t>(ratio_ * rec.size()); size_t cutoff = static_cast<size_t>(ratio_ * rec.size());
double wt_hit = 0.0, wsum = 0.0, wt_sum = 0.0; double wt_hit = 0.0, wsum = 0.0, wt_sum = 0.0;
for (size_t j = 0; j < cutoff; ++j) { for (size_t j = 0; j < cutoff; ++j) {
@ -489,7 +492,7 @@ struct EvalRankList : public IEvaluator {
} }
} }
/*! \return evaluation metric, given the pair_sort record, (pred,label) */ /*! \return evaluation metric, given the pair_sort record, (pred,label) */
virtual float EvalMetric(std::vector< std::pair<float, unsigned> > &pair_sort) const = 0; virtual float EvalMetric(std::vector< std::pair<float, unsigned> > &pair_sort) const = 0; // NOLINT(*)
protected: protected:
unsigned topn_; unsigned topn_;
@ -530,7 +533,7 @@ struct EvalNDCG : public EvalRankList{
} }
return static_cast<float>(sumdcg); return static_cast<float>(sumdcg);
} }
virtual float EvalMetric(std::vector< std::pair<float, unsigned> > &rec) const { virtual float EvalMetric(std::vector< std::pair<float, unsigned> > &rec) const { // NOLINT(*)
std::stable_sort(rec.begin(), rec.end(), CmpFirst); std::stable_sort(rec.begin(), rec.end(), CmpFirst);
float dcg = this->CalcDCG(rec); float dcg = this->CalcDCG(rec);
std::stable_sort(rec.begin(), rec.end(), CmpSecond); std::stable_sort(rec.begin(), rec.end(), CmpSecond);

View File

@ -1,10 +1,12 @@
#ifndef XGBOOST_LEARNER_EVALUATION_H_
#define XGBOOST_LEARNER_EVALUATION_H_
/*! /*!
* Copyright 2014 by Contributors
* \file evaluation.h * \file evaluation.h
* \brief interface of evaluation function supported in xgboost * \brief interface of evaluation function supported in xgboost
* \author Tianqi Chen, Kailong Chen * \author Tianqi Chen, Kailong Chen
*/ */
#ifndef XGBOOST_LEARNER_EVALUATION_H_
#define XGBOOST_LEARNER_EVALUATION_H_
#include <string> #include <string>
#include <vector> #include <vector>
#include <cstdio> #include <cstdio>

View File

@ -1,10 +1,12 @@
#ifndef XGBOOST_LEARNER_HELPER_UTILS_H_
#define XGBOOST_LEARNER_HELPER_UTILS_H_
/*! /*!
* Copyright 2014 by Contributors
* \file helper_utils.h * \file helper_utils.h
* \brief useful helper functions * \brief useful helper functions
* \author Tianqi Chen, Kailong Chen * \author Tianqi Chen, Kailong Chen
*/ */
#ifndef XGBOOST_LEARNER_HELPER_UTILS_H_
#define XGBOOST_LEARNER_HELPER_UTILS_H_
#include <utility> #include <utility>
#include <vector> #include <vector>
#include <cmath> #include <cmath>

View File

@ -1,10 +1,12 @@
#ifndef XGBOOST_LEARNER_LEARNER_INL_HPP_
#define XGBOOST_LEARNER_LEARNER_INL_HPP_
/*! /*!
* Copyright 2014 by Contributors
* \file learner-inl.hpp * \file learner-inl.hpp
* \brief learning algorithm * \brief learning algorithm
* \author Tianqi Chen * \author Tianqi Chen
*/ */
#ifndef XGBOOST_LEARNER_LEARNER_INL_HPP_
#define XGBOOST_LEARNER_LEARNER_INL_HPP_
#include <algorithm> #include <algorithm>
#include <vector> #include <vector>
#include <utility> #include <utility>
@ -68,7 +70,7 @@ class BoostLearner : public rabit::Serializable {
} }
char str_temp[25]; char str_temp[25];
utils::SPrintf(str_temp, sizeof(str_temp), "%lu", utils::SPrintf(str_temp, sizeof(str_temp), "%lu",
static_cast<unsigned long>(buffer_size)); static_cast<unsigned long>(buffer_size)); // NOLINT(*)
this->SetParam("num_pbuffer", str_temp); this->SetParam("num_pbuffer", str_temp);
this->pred_buffer_size = buffer_size; this->pred_buffer_size = buffer_size;
} }
@ -161,7 +163,7 @@ class BoostLearner : public rabit::Serializable {
* \param fi input stream * \param fi input stream
* \param calc_num_feature whether call InitTrainer with calc_num_feature * \param calc_num_feature whether call InitTrainer with calc_num_feature
*/ */
inline void LoadModel(utils::IStream &fi, inline void LoadModel(utils::IStream &fi, // NOLINT(*)
bool calc_num_feature = true) { bool calc_num_feature = true) {
utils::Check(fi.Read(&mparam, sizeof(ModelParam)) != 0, utils::Check(fi.Read(&mparam, sizeof(ModelParam)) != 0,
"BoostLearner: wrong model format"); "BoostLearner: wrong model format");
@ -228,7 +230,7 @@ class BoostLearner : public rabit::Serializable {
} }
delete fi; delete fi;
} }
inline void SaveModel(utils::IStream &fo, bool with_pbuffer) const { inline void SaveModel(utils::IStream &fo, bool with_pbuffer) const { // NOLINT(*)
ModelParam p = mparam; ModelParam p = mparam;
p.saved_with_pbuffer = static_cast<int>(with_pbuffer); p.saved_with_pbuffer = static_cast<int>(with_pbuffer);
fo.Write(&p, sizeof(ModelParam)); fo.Write(&p, sizeof(ModelParam));
@ -345,8 +347,7 @@ class BoostLearner : public rabit::Serializable {
bool output_margin, bool output_margin,
std::vector<float> *out_preds, std::vector<float> *out_preds,
unsigned ntree_limit = 0, unsigned ntree_limit = 0,
bool pred_leaf = false bool pred_leaf = false) const {
) const {
if (pred_leaf) { if (pred_leaf) {
gbm_->PredictLeaf(data.fmat(), data.info.info, out_preds, ntree_limit); gbm_->PredictLeaf(data.fmat(), data.info.info, out_preds, ntree_limit);
} else { } else {
@ -517,7 +518,7 @@ class BoostLearner : public rabit::Serializable {
protected: protected:
// magic number to transform random seed // magic number to transform random seed
const static int kRandSeedMagic = 127; static const int kRandSeedMagic = 127;
// cache entry object that helps handle feature caching // cache entry object that helps handle feature caching
struct CacheEntry { struct CacheEntry {
const DMatrix *mat_; const DMatrix *mat_;

View File

@ -1,10 +1,12 @@
#ifndef XGBOOST_LEARNER_OBJECTIVE_INL_HPP_
#define XGBOOST_LEARNER_OBJECTIVE_INL_HPP_
/*! /*!
* Copyright 2014 by Contributors
* \file objective-inl.hpp * \file objective-inl.hpp
* \brief objective function implementations * \brief objective function implementations
* \author Tianqi Chen, Kailong Chen * \author Tianqi Chen, Kailong Chen
*/ */
#ifndef XGBOOST_LEARNER_OBJECTIVE_INL_HPP_
#define XGBOOST_LEARNER_OBJECTIVE_INL_HPP_
#include <vector> #include <vector>
#include <algorithm> #include <algorithm>
#include <utility> #include <utility>
@ -176,7 +178,7 @@ class RegLossObj : public IObjFunction {
// poisson regression for count // poisson regression for count
class PoissonRegression : public IObjFunction { class PoissonRegression : public IObjFunction {
public: public:
explicit PoissonRegression(void) { PoissonRegression(void) {
max_delta_step = 0.0f; max_delta_step = 0.0f;
} }
virtual ~PoissonRegression(void) {} virtual ~PoissonRegression(void) {}
@ -201,9 +203,9 @@ class PoissonRegression : public IObjFunction {
// check if label in range // check if label in range
bool label_correct = true; bool label_correct = true;
// start calculating gradient // start calculating gradient
const long ndata = static_cast<bst_omp_uint>(preds.size()); const long ndata = static_cast<bst_omp_uint>(preds.size()); // NOLINT(*)
#pragma omp parallel for schedule(static) #pragma omp parallel for schedule(static)
for (long i = 0; i < ndata; ++i) { for (long i = 0; i < ndata; ++i) { // NOLINT(*)
float p = preds[i]; float p = preds[i];
float w = info.GetWeight(i); float w = info.GetWeight(i);
float y = info.labels[i]; float y = info.labels[i];
@ -219,9 +221,9 @@ class PoissonRegression : public IObjFunction {
} }
virtual void PredTransform(std::vector<float> *io_preds) { virtual void PredTransform(std::vector<float> *io_preds) {
std::vector<float> &preds = *io_preds; std::vector<float> &preds = *io_preds;
const long ndata = static_cast<long>(preds.size()); const long ndata = static_cast<long>(preds.size()); // NOLINT(*)
#pragma omp parallel for schedule(static) #pragma omp parallel for schedule(static)
for (long j = 0; j < ndata; ++j) { for (long j = 0; j < ndata; ++j) { // NOLINT(*)
preds[j] = std::exp(preds[j]); preds[j] = std::exp(preds[j]);
} }
} }

View File

@ -1,11 +1,14 @@
#ifndef XGBOOST_LEARNER_OBJECTIVE_H_
#define XGBOOST_LEARNER_OBJECTIVE_H_
/*! /*!
* Copyright 2014 by Contributors
* \file objective.h * \file objective.h
* \brief interface of objective function used for gradient boosting * \brief interface of objective function used for gradient boosting
* \author Tianqi Chen, Kailong Chen * \author Tianqi Chen, Kailong Chen
*/ */
#include "dmatrix.h" #ifndef XGBOOST_LEARNER_OBJECTIVE_H_
#define XGBOOST_LEARNER_OBJECTIVE_H_
#include <vector>
#include "./dmatrix.h"
namespace xgboost { namespace xgboost {
namespace learner { namespace learner {

View File

@ -1,13 +1,13 @@
#ifndef XGBOOST_SYNC_H_
#define XGBOOST_SYNC_H_
/*! /*!
* Copyright 2014 by Contributors
* \file sync.h * \file sync.h
* \brief the synchronization module of rabit * \brief the synchronization module of rabit
* redirects to subtree rabit header * redirects to subtree rabit header
* \author Tianqi Chen * \author Tianqi Chen
*/ */
#ifndef XGBOOST_SYNC_SYNC_H_
#define XGBOOST_SYNC_SYNC_H_
#include "../../subtree/rabit/include/rabit.h" #include "../../subtree/rabit/include/rabit.h"
#include "../../subtree/rabit/include/rabit/timer.h" #include "../../subtree/rabit/include/rabit/timer.h"
#endif // XGBOOST_SYNC_H_ #endif // XGBOOST_SYNC_SYNC_H_