Merge branch 'master' into unity
This commit is contained in:
commit
9e5788a47c
12
.gitignore
vendored
12
.gitignore
vendored
@ -16,7 +16,6 @@
|
||||
*conf
|
||||
*buffer
|
||||
*model
|
||||
xgboost
|
||||
*pyc
|
||||
*train
|
||||
*test
|
||||
@ -24,3 +23,14 @@ xgboost
|
||||
*rar
|
||||
*vali
|
||||
*data
|
||||
*sdf
|
||||
Release
|
||||
*exe*
|
||||
*exp
|
||||
ipch
|
||||
*.filters
|
||||
*.user
|
||||
*log
|
||||
Debug
|
||||
*suo
|
||||
|
||||
|
||||
@ -103,7 +103,7 @@ struct SparseBatch {
|
||||
const Entry *data_ptr;
|
||||
/*! \brief get i-th row from the batch */
|
||||
inline Inst operator[](size_t i) const {
|
||||
return Inst(data_ptr + row_ptr[i], row_ptr[i+1] - row_ptr[i]);
|
||||
return Inst(data_ptr + row_ptr[i], static_cast<bst_uint>(row_ptr[i+1] - row_ptr[i]));
|
||||
}
|
||||
};
|
||||
|
||||
@ -341,7 +341,7 @@ class FMatrixS : public FMatrixInterface<FMatrixS>{
|
||||
const SparseBatch &batch = iter_->Value();
|
||||
for (size_t i = 0; i < batch.size; ++i) {
|
||||
if (pkeep == 1.0f || random::SampleBinary(pkeep)) {
|
||||
buffered_rowset_.push_back(batch.base_rowid+i);
|
||||
buffered_rowset_.push_back(static_cast<bst_uint>(batch.base_rowid+i));
|
||||
SparseBatch::Inst inst = batch[i];
|
||||
for (bst_uint j = 0; j < inst.length; ++j) {
|
||||
builder.AddBudget(inst[j].findex);
|
||||
|
||||
@ -60,7 +60,7 @@ class GBLinear : public IGradBooster<FMatrix> {
|
||||
}
|
||||
}
|
||||
// remove bias effect
|
||||
double dw = param.learning_rate * param.CalcDeltaBias(sum_grad, sum_hess, model.bias()[gid]);
|
||||
bst_float dw = static_cast<bst_float>(param.learning_rate * param.CalcDeltaBias(sum_grad, sum_hess, model.bias()[gid]));
|
||||
model.bias()[gid] += dw;
|
||||
// update grad value
|
||||
#pragma omp parallel for schedule(static)
|
||||
@ -86,7 +86,7 @@ class GBLinear : public IGradBooster<FMatrix> {
|
||||
sum_hess += p.hess * v * v;
|
||||
}
|
||||
float &w = model[fid][gid];
|
||||
double dw = param.learning_rate * param.CalcDelta(sum_grad, sum_hess, w);
|
||||
bst_float dw = static_cast<bst_float>(param.learning_rate * param.CalcDelta(sum_grad, sum_hess, w));
|
||||
w += dw;
|
||||
// update grad value
|
||||
for (typename FMatrix::ColIter it = fmat.GetSortedCol(fid); it.Next();) {
|
||||
|
||||
@ -130,8 +130,8 @@ class GBTree : public IGradBooster<FMatrix> {
|
||||
for (unsigned i = 0; i < nsize; ++i) {
|
||||
const int tid = omp_get_thread_num();
|
||||
tree::RegTree::FVec &feats = thread_temp[tid];
|
||||
const size_t ridx = batch.base_rowid + i;
|
||||
utils::Assert(ridx < info.num_row, "data row index exceed bound");
|
||||
int64_t ridx = static_cast<int64_t>(batch.base_rowid + i);
|
||||
utils::Assert(static_cast<size_t>(ridx) < info.num_row, "data row index exceed bound");
|
||||
// loop over output groups
|
||||
for (int gid = 0; gid < mparam.num_output_group; ++gid) {
|
||||
this->Pred(batch[i],
|
||||
@ -168,15 +168,15 @@ class GBTree : public IGradBooster<FMatrix> {
|
||||
}
|
||||
updaters.clear();
|
||||
std::string tval = tparam.updater_seq;
|
||||
char *saveptr, *pstr;
|
||||
pstr = strtok_r(&tval[0], ",", &saveptr);
|
||||
char *pstr;
|
||||
pstr = strtok(&tval[0], ",");
|
||||
while (pstr != NULL) {
|
||||
updaters.push_back(tree::CreateUpdater<FMatrix>(pstr));
|
||||
for (size_t j = 0; j < cfg.size(); ++j) {
|
||||
// set parameters
|
||||
updaters.back()->SetParam(cfg[j].first.c_str(), cfg[j].second.c_str());
|
||||
}
|
||||
pstr = strtok_r(NULL, ",", &saveptr);
|
||||
pstr = strtok(NULL, ",");
|
||||
}
|
||||
tparam.updater_initialized = 1;
|
||||
}
|
||||
@ -217,7 +217,7 @@ class GBTree : public IGradBooster<FMatrix> {
|
||||
float psum = 0.0f;
|
||||
// sum of leaf vector
|
||||
std::vector<float> vec_psum(mparam.size_leaf_vector, 0.0f);
|
||||
const int bid = mparam.BufferOffset(buffer_index, bst_group);
|
||||
const int64_t bid = mparam.BufferOffset(buffer_index, bst_group);
|
||||
// load buffered results if any
|
||||
if (bid >= 0) {
|
||||
itop = pred_counter[bid];
|
||||
@ -336,7 +336,7 @@ class GBTree : public IGradBooster<FMatrix> {
|
||||
* \brief get the buffer offset given a buffer index and group id
|
||||
* \return calculated buffer offset
|
||||
*/
|
||||
inline size_t BufferOffset(int64_t buffer_index, int bst_group) const {
|
||||
inline int64_t BufferOffset(int64_t buffer_index, int bst_group) const {
|
||||
if (buffer_index < 0) return -1;
|
||||
utils::Check(buffer_index < num_pbuffer, "buffer_index exceed num_pbuffer");
|
||||
return (buffer_index + num_pbuffer * bst_group) * (size_leaf_vector + 1);
|
||||
|
||||
@ -179,7 +179,7 @@ class DMatrixSimple : public DataMatrix {
|
||||
* \param savebuffer whether do save binary buffer if it is text
|
||||
*/
|
||||
inline void CacheLoad(const char *fname, bool silent = false, bool savebuffer = true) {
|
||||
int len = strlen(fname);
|
||||
size_t len = strlen(fname);
|
||||
if (len > 8 && !strcmp(fname + len - 7, ".buffer")) {
|
||||
if (!this->LoadBinary(fname, silent)) {
|
||||
utils::Error("can not open file \"%s\"", fname);
|
||||
|
||||
@ -78,9 +78,9 @@ struct MetaInfo {
|
||||
}
|
||||
inline void LoadBinary(utils::IStream &fi) {
|
||||
int version;
|
||||
utils::Check(fi.Read(&version, sizeof(version)), "MetaInfo: invalid format");
|
||||
utils::Check(fi.Read(&info.num_row, sizeof(info.num_row)), "MetaInfo: invalid format");
|
||||
utils::Check(fi.Read(&info.num_col, sizeof(info.num_col)), "MetaInfo: invalid format");
|
||||
utils::Check(fi.Read(&version, sizeof(version)) != 0, "MetaInfo: invalid format");
|
||||
utils::Check(fi.Read(&info.num_row, sizeof(info.num_row)) != 0, "MetaInfo: invalid format");
|
||||
utils::Check(fi.Read(&info.num_col, sizeof(info.num_col)) != 0, "MetaInfo: invalid format");
|
||||
utils::Check(fi.Read(&labels), "MetaInfo: invalid format");
|
||||
utils::Check(fi.Read(&group_ptr), "MetaInfo: invalid format");
|
||||
utils::Check(fi.Read(&weights), "MetaInfo: invalid format");
|
||||
|
||||
@ -172,7 +172,7 @@ struct EvalAMS : public IEvaluator {
|
||||
b_fp += wt;
|
||||
}
|
||||
if (rec[i].first != rec[i+1].first) {
|
||||
double ams = sqrtf(2*((s_tp+b_fp+br) * log(1.0 + s_tp/(b_fp+br)) - s_tp));
|
||||
double ams = sqrt(2*((s_tp+b_fp+br) * log(1.0 + s_tp/(b_fp+br)) - s_tp));
|
||||
if (tams < ams) {
|
||||
thresindex = i;
|
||||
tams = ams;
|
||||
@ -181,9 +181,9 @@ struct EvalAMS : public IEvaluator {
|
||||
}
|
||||
if (ntop == ndata) {
|
||||
fprintf(stderr, "\tams-ratio=%g", static_cast<float>(thresindex) / ndata);
|
||||
return tams;
|
||||
return static_cast<float>(tams);
|
||||
} else {
|
||||
return sqrtf(2*((s_tp+b_fp+br) * log(1.0 + s_tp/(b_fp+br)) - s_tp));
|
||||
return static_cast<float>(sqrt(2*((s_tp+b_fp+br) * log(1.0 + s_tp/(b_fp+br)) - s_tp)));
|
||||
}
|
||||
}
|
||||
virtual const char *Name(void) const {
|
||||
@ -211,7 +211,7 @@ struct EvalPrecisionRatio : public IEvaluator{
|
||||
utils::Assert(preds.size() == info.labels.size(), "label size predict size not match");
|
||||
std::vector< std::pair<float, unsigned> > rec;
|
||||
for (size_t j = 0; j < preds.size(); ++j) {
|
||||
rec.push_back(std::make_pair(preds[j], j));
|
||||
rec.push_back(std::make_pair(preds[j], static_cast<unsigned>(j)));
|
||||
}
|
||||
std::sort(rec.begin(), rec.end(), CmpFirst);
|
||||
double pratio = CalcPRatio(rec, info);
|
||||
@ -246,11 +246,12 @@ struct EvalPrecisionRatio : public IEvaluator{
|
||||
struct EvalAuc : public IEvaluator {
|
||||
virtual float Eval(const std::vector<float> &preds,
|
||||
const MetaInfo &info) const {
|
||||
|
||||
utils::Check(info.labels.size() != 0, "label set cannot be empty");
|
||||
utils::Check(preds.size() % info.labels.size() == 0,
|
||||
"label size predict size not match");
|
||||
std::vector<unsigned> tgptr(2, 0); tgptr[1] = info.labels.size();
|
||||
std::vector<unsigned> tgptr(2, 0);
|
||||
tgptr[1] = static_cast<unsigned>(info.labels.size());
|
||||
|
||||
const std::vector<unsigned> &gptr = info.group_ptr.size() == 0 ? tgptr : info.group_ptr;
|
||||
utils::Check(gptr.back() == info.labels.size(),
|
||||
"EvalAuc: group structure must match number of prediction");
|
||||
@ -307,7 +308,7 @@ struct EvalRankList : public IEvaluator {
|
||||
utils::Check(preds.size() == info.labels.size(),
|
||||
"label size predict size not match");
|
||||
// quick consistency when group is not available
|
||||
std::vector<unsigned> tgptr(2, 0); tgptr[1] = preds.size();
|
||||
std::vector<unsigned> tgptr(2, 0); tgptr[1] = static_cast<unsigned>(preds.size());
|
||||
const std::vector<unsigned> &gptr = info.group_ptr.size() == 0 ? tgptr : info.group_ptr;
|
||||
utils::Assert(gptr.size() != 0, "must specify group when constructing rank file");
|
||||
utils::Assert(gptr.back() == preds.size(),
|
||||
@ -382,7 +383,7 @@ struct EvalNDCG : public EvalRankList{
|
||||
for (size_t i = 0; i < rec.size() && i < this->topn_; ++i) {
|
||||
const unsigned rel = rec[i].second;
|
||||
if (rel != 0) {
|
||||
sumdcg += ((1 << rel) - 1) / logf(i + 2);
|
||||
sumdcg += ((1 << rel) - 1) / log(i + 2.0);
|
||||
}
|
||||
}
|
||||
return static_cast<float>(sumdcg);
|
||||
|
||||
@ -7,6 +7,7 @@
|
||||
*/
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <cstdio>
|
||||
#include "../utils/utils.h"
|
||||
#include "./dmatrix.h"
|
||||
|
||||
|
||||
@ -330,7 +330,7 @@ class BoostLearner {
|
||||
for (size_t i = 0; i < cache_.size(); ++i) {
|
||||
if (cache_[i].mat_ == &mat && mat.cache_learner_ptr_ == this) {
|
||||
if (cache_[i].num_row_ == mat.info.num_row()) {
|
||||
return cache_[i].buffer_offset_;
|
||||
return static_cast<int64_t>(cache_[i].buffer_offset_);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -221,7 +221,7 @@ class SoftmaxMultiClassObj : public IObjFunction {
|
||||
rec[k] = preds[j * nclass + k];
|
||||
}
|
||||
if (prob == 0) {
|
||||
tmp[j] = FindMaxIndex(rec);
|
||||
tmp[j] = static_cast<float>(FindMaxIndex(rec));
|
||||
} else {
|
||||
Softmax(&rec);
|
||||
for (int k = 0; k < nclass; ++k) {
|
||||
@ -259,7 +259,7 @@ class LambdaRankObj : public IObjFunction {
|
||||
std::vector<bst_gpair> &gpair = *out_gpair;
|
||||
gpair.resize(preds.size());
|
||||
// quick consistency when group is not available
|
||||
std::vector<unsigned> tgptr(2, 0); tgptr[1] = info.labels.size();
|
||||
std::vector<unsigned> tgptr(2, 0); tgptr[1] = static_cast<unsigned>(info.labels.size());
|
||||
const std::vector<unsigned> &gptr = info.group_ptr.size() == 0 ? tgptr : info.group_ptr;
|
||||
utils::Check(gptr.size() != 0 && gptr.back() == info.labels.size(),
|
||||
"group structure not consistent with #rows");
|
||||
@ -290,7 +290,7 @@ class LambdaRankObj : public IObjFunction {
|
||||
unsigned j = i + 1;
|
||||
while (j < rec.size() && rec[j].first == rec[i].first) ++j;
|
||||
// bucket in [i,j), get a sample outside bucket
|
||||
unsigned nleft = i, nright = rec.size() - j;
|
||||
unsigned nleft = i, nright = static_cast<unsigned>(rec.size() - j);
|
||||
if (nleft + nright != 0) {
|
||||
int nsample = num_pairsample;
|
||||
while (nsample --) {
|
||||
@ -436,9 +436,9 @@ class LambdaRankObjNDCG : public LambdaRankObj {
|
||||
inline static float CalcDCG(const std::vector<float> &labels) {
|
||||
double sumdcg = 0.0;
|
||||
for (size_t i = 0; i < labels.size(); ++i) {
|
||||
const unsigned rel = labels[i];
|
||||
const unsigned rel = static_cast<unsigned>(labels[i]);
|
||||
if (rel != 0) {
|
||||
sumdcg += ((1 << rel) - 1) / logf(i + 2);
|
||||
sumdcg += ((1 << rel) - 1) / logf(static_cast<float>(i + 2));
|
||||
}
|
||||
}
|
||||
return static_cast<float>(sumdcg);
|
||||
|
||||
@ -203,8 +203,8 @@ class ColMaker: public IUpdater<FMatrix> {
|
||||
}
|
||||
// update node statistics
|
||||
snode[nid].stats = stats;
|
||||
snode[nid].root_gain = stats.CalcGain(param);
|
||||
snode[nid].weight = stats.CalcWeight(param);
|
||||
snode[nid].root_gain = static_cast<float>(stats.CalcGain(param));
|
||||
snode[nid].weight = static_cast<float>(stats.CalcWeight(param));
|
||||
}
|
||||
}
|
||||
/*! \brief update queue expand add in new leaves */
|
||||
@ -251,7 +251,7 @@ class ColMaker: public IUpdater<FMatrix> {
|
||||
if (fabsf(fvalue - e.last_fvalue) > rt_2eps && e.stats.sum_hess >= param.min_child_weight) {
|
||||
c.SetSubstract(snode[nid].stats, e.stats);
|
||||
if (c.sum_hess >= param.min_child_weight) {
|
||||
double loss_chg = e.stats.CalcGain(param) + c.CalcGain(param) - snode[nid].root_gain;
|
||||
bst_float loss_chg = static_cast<bst_float>(e.stats.CalcGain(param) + c.CalcGain(param) - snode[nid].root_gain);
|
||||
e.best.Update(loss_chg, fid, (fvalue + e.last_fvalue) * 0.5f, !is_forward_search);
|
||||
}
|
||||
}
|
||||
@ -266,7 +266,7 @@ class ColMaker: public IUpdater<FMatrix> {
|
||||
ThreadEntry &e = temp[nid];
|
||||
c.SetSubstract(snode[nid].stats, e.stats);
|
||||
if (e.stats.sum_hess >= param.min_child_weight && c.sum_hess >= param.min_child_weight) {
|
||||
const double loss_chg = e.stats.CalcGain(param) + c.CalcGain(param) - snode[nid].root_gain;
|
||||
bst_float loss_chg = static_cast<bst_float>(e.stats.CalcGain(param) + c.CalcGain(param) - snode[nid].root_gain);
|
||||
const float delta = is_forward_search ? rt_eps : -rt_eps;
|
||||
e.best.Update(loss_chg, fid, e.last_fvalue + delta, !is_forward_search);
|
||||
}
|
||||
|
||||
@ -61,7 +61,7 @@ class TreeRefresher: public IUpdater<FMatrix> {
|
||||
for (unsigned i = 0; i < nbatch; ++i) {
|
||||
SparseBatch::Inst inst = batch[i];
|
||||
const int tid = omp_get_thread_num();
|
||||
const size_t ridx = batch.base_rowid + i;
|
||||
const bst_uint ridx = static_cast<bst_uint>(batch.base_rowid + i);
|
||||
RegTree::FVec &feats = fvec_temp[tid];
|
||||
feats.Fill(inst);
|
||||
for (size_t j = 0; j < trees.size(); ++j) {
|
||||
@ -112,16 +112,16 @@ class TreeRefresher: public IUpdater<FMatrix> {
|
||||
inline void Refresh(const std::vector<TStats> &gstats,
|
||||
int nid, RegTree *p_tree) {
|
||||
RegTree &tree = *p_tree;
|
||||
tree.stat(nid).base_weight = gstats[nid].CalcWeight(param);
|
||||
tree.stat(nid).base_weight = static_cast<float>(gstats[nid].CalcWeight(param));
|
||||
tree.stat(nid).sum_hess = static_cast<float>(gstats[nid].sum_hess);
|
||||
gstats[nid].SetLeafVec(param, tree.leafvec(nid));
|
||||
if (tree[nid].is_leaf()) {
|
||||
tree[nid].set_leaf(tree.stat(nid).base_weight * param.learning_rate);
|
||||
} else {
|
||||
tree.stat(nid).loss_chg =
|
||||
tree.stat(nid).loss_chg = static_cast<float>(
|
||||
gstats[tree[nid].cleft()].CalcGain(param) +
|
||||
gstats[tree[nid].cright()].CalcGain(param) -
|
||||
gstats[nid].CalcGain(param);
|
||||
gstats[nid].CalcGain(param));
|
||||
this->Refresh(gstats, tree[nid].cleft(), p_tree);
|
||||
this->Refresh(gstats, tree[nid].cright(), p_tree);
|
||||
}
|
||||
|
||||
@ -40,7 +40,7 @@ class IStream {
|
||||
*/
|
||||
template<typename T>
|
||||
inline void Write(const std::vector<T> &vec) {
|
||||
uint64_t sz = vec.size();
|
||||
uint64_t sz = static_cast<uint64_t>(vec.size());
|
||||
this->Write(&sz, sizeof(sz));
|
||||
if (sz != 0) {
|
||||
this->Write(&vec[0], sizeof(T) * sz);
|
||||
@ -66,7 +66,7 @@ class IStream {
|
||||
* \param str the string to be serialized
|
||||
*/
|
||||
inline void Write(const std::string &str) {
|
||||
uint64_t sz = str.length();
|
||||
uint64_t sz = static_cast<uint64_t>(str.length());
|
||||
this->Write(&sz, sizeof(sz));
|
||||
if (sz != 0) {
|
||||
this->Write(&str[0], sizeof(char) * sz);
|
||||
|
||||
@ -9,7 +9,11 @@
|
||||
#include <omp.h>
|
||||
#else
|
||||
#ifndef DISABLE_OPENMP
|
||||
#ifndef _MSC_VER
|
||||
#warning "OpenMP is not available, compile to single thread code"
|
||||
#else
|
||||
// TODO add warning for msvc
|
||||
#endif
|
||||
#endif
|
||||
inline int omp_get_thread_num() { return 0; }
|
||||
inline int omp_get_num_threads() { return 1; }
|
||||
|
||||
@ -88,11 +88,18 @@ inline void Shuffle(std::vector<T> &data) {
|
||||
struct Random{
|
||||
/*! \brief set random number seed */
|
||||
inline void Seed(unsigned sd) {
|
||||
this->rseed = sd;
|
||||
this->rseed = sd;
|
||||
#ifdef _MSC_VER
|
||||
srand(rseed);
|
||||
#endif
|
||||
}
|
||||
/*! \brief return a real number uniform in [0,1) */
|
||||
inline double RandDouble(void) {
|
||||
return static_cast<double>( rand_r( &rseed ) ) / (static_cast<double>( RAND_MAX )+1.0);
|
||||
#ifndef _MSC_VER
|
||||
return static_cast<double>(rand_r(&rseed)) / (static_cast<double>(RAND_MAX) + 1.0);
|
||||
#else
|
||||
return static_cast<double>(rand()) / (static_cast<double>(RAND_MAX) + 1.0);
|
||||
#endif
|
||||
}
|
||||
// random number seed
|
||||
unsigned rseed;
|
||||
|
||||
@ -6,8 +6,19 @@
|
||||
* \author Tianqi Chen
|
||||
*/
|
||||
#define _CRT_SECURE_NO_WARNINGS
|
||||
#include <cstdio>
|
||||
#include <cstdarg>
|
||||
#include <cstdlib>
|
||||
#ifdef _MSC_VER
|
||||
#define fopen64 fopen
|
||||
// temporal solution for MSVC
|
||||
inline int snprintf(char *ptr, size_t sz, const char *fmt, ...) {
|
||||
va_list args;
|
||||
va_start(args, fmt);
|
||||
int ret = vsprintf(ptr, fmt, args);
|
||||
va_end(args);
|
||||
return ret;
|
||||
}
|
||||
#else
|
||||
#ifdef _FILE_OFFSET_BITS
|
||||
#if _FILE_OFFSET_BITS == 32
|
||||
@ -36,11 +47,6 @@ typedef long int64_t;
|
||||
#include <inttypes.h>
|
||||
#endif
|
||||
|
||||
|
||||
#include <cstdio>
|
||||
#include <cstdarg>
|
||||
#include <cstdlib>
|
||||
|
||||
namespace xgboost {
|
||||
/*! \brief namespace for helper utils of the project */
|
||||
namespace utils {
|
||||
|
||||
1
windows/README.md
Normal file
1
windows/README.md
Normal file
@ -0,0 +1 @@
|
||||
This is a test for minimal files needed for windows version
|
||||
26
windows/xgboost.sln
Normal file
26
windows/xgboost.sln
Normal file
@ -0,0 +1,26 @@
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 11.00
|
||||
# Visual Studio 2010
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "xgboost", "xgboost\xgboost.vcxproj", "{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Win32 = Debug|Win32
|
||||
Debug|x64 = Debug|x64
|
||||
Release|Win32 = Release|Win32
|
||||
Release|x64 = Release|x64
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}.Debug|Win32.ActiveCfg = Debug|Win32
|
||||
{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}.Debug|Win32.Build.0 = Debug|Win32
|
||||
{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}.Debug|x64.ActiveCfg = Debug|x64
|
||||
{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}.Debug|x64.Build.0 = Debug|x64
|
||||
{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}.Release|Win32.ActiveCfg = Release|Win32
|
||||
{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}.Release|Win32.Build.0 = Release|Win32
|
||||
{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}.Release|x64.ActiveCfg = Release|x64
|
||||
{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}.Release|x64.Build.0 = Release|x64
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
116
windows/xgboost/xgboost.vcxproj
Normal file
116
windows/xgboost/xgboost.vcxproj
Normal file
@ -0,0 +1,116 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|Win32">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|Win32">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{1D6A56A5-5557-4D20-9D50-3DE4C30BE00C}</ProjectGuid>
|
||||
<RootNamespace>xgboost</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<CharacterSet>MultiByte</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<CharacterSet>MultiByte</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>MultiByte</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>MultiByte</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup />
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\src\io\io.cpp" />
|
||||
<ClCompile Include="..\..\src\xgboost_main.cpp" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
||||
Loading…
x
Reference in New Issue
Block a user