Fix R CRAN failures. (#7404)
* Remove hist builder dtor. * Initialize values. * Tolerance. * Remove the use of nthread in col maker.
This commit is contained in:
parent
55ee272ea8
commit
b0015fda96
@ -228,7 +228,7 @@ if (grepl('Windows', Sys.info()[['sysname']]) ||
|
||||
X <- 10^runif(100, -20, 20)
|
||||
if (capabilities('long.double')) {
|
||||
X2X <- as.numeric(format(X, digits = 17))
|
||||
expect_identical(X, X2X)
|
||||
expect_equal(X, X2X, tolerance = float_tolerance)
|
||||
}
|
||||
# retrieved attributes to be the same as written
|
||||
for (x in X) {
|
||||
|
||||
@ -213,7 +213,7 @@ struct Entry {
|
||||
*/
|
||||
struct BatchParam {
|
||||
/*! \brief The GPU device to use. */
|
||||
int gpu_id;
|
||||
int gpu_id {-1};
|
||||
/*! \brief Maximum number of bins per feature for histograms. */
|
||||
int max_bin{0};
|
||||
/*! \brief Hessian, used for sketching with future approx implementation. */
|
||||
|
||||
@ -49,10 +49,10 @@ class SimpleDMatrix : public DMatrix {
|
||||
MetaInfo info_;
|
||||
// Primary storage type
|
||||
std::shared_ptr<SparsePage> sparse_page_ = std::make_shared<SparsePage>();
|
||||
std::shared_ptr<CSCPage> column_page_;
|
||||
std::shared_ptr<SortedCSCPage> sorted_column_page_;
|
||||
std::shared_ptr<EllpackPage> ellpack_page_;
|
||||
std::shared_ptr<GHistIndexMatrix> gradient_index_;
|
||||
std::shared_ptr<CSCPage> column_page_{nullptr};
|
||||
std::shared_ptr<SortedCSCPage> sorted_column_page_{nullptr};
|
||||
std::shared_ptr<EllpackPage> ellpack_page_{nullptr};
|
||||
std::shared_ptr<GHistIndexMatrix> gradient_index_{nullptr};
|
||||
BatchParam batch_param_;
|
||||
|
||||
bool EllpackExists() const override {
|
||||
|
||||
@ -109,10 +109,9 @@ class ColMaker: public TreeUpdater {
|
||||
interaction_constraints_.Configure(param_, dmat->Info().num_row_);
|
||||
// build tree
|
||||
for (auto tree : trees) {
|
||||
Builder builder(
|
||||
param_,
|
||||
colmaker_param_,
|
||||
interaction_constraints_, column_densities_);
|
||||
CHECK(tparam_);
|
||||
Builder builder(param_, colmaker_param_, interaction_constraints_, tparam_,
|
||||
column_densities_);
|
||||
builder.Update(gpair->ConstHostVector(), dmat, tree);
|
||||
}
|
||||
param_.learning_rate = lr;
|
||||
@ -154,12 +153,12 @@ class ColMaker: public TreeUpdater {
|
||||
class Builder {
|
||||
public:
|
||||
// constructor
|
||||
explicit Builder(const TrainParam& param,
|
||||
const ColMakerTrainParam& colmaker_train_param,
|
||||
explicit Builder(const TrainParam ¶m, const ColMakerTrainParam &colmaker_train_param,
|
||||
FeatureInteractionConstraintHost _interaction_constraints,
|
||||
const std::vector<float> &column_densities)
|
||||
: param_(param), colmaker_train_param_{colmaker_train_param},
|
||||
nthread_(omp_get_max_threads()),
|
||||
GenericParameter const *ctx, const std::vector<float> &column_densities)
|
||||
: param_(param),
|
||||
colmaker_train_param_{colmaker_train_param},
|
||||
ctx_{ctx},
|
||||
tree_evaluator_(param_, column_densities.size(), GenericParameter::kCpuId),
|
||||
interaction_constraints_{std::move(_interaction_constraints)},
|
||||
column_densities_(column_densities) {}
|
||||
@ -237,7 +236,7 @@ class ColMaker: public TreeUpdater {
|
||||
// setup temp space for each thread
|
||||
// reserve a small space
|
||||
stemp_.clear();
|
||||
stemp_.resize(this->nthread_, std::vector<ThreadEntry>());
|
||||
stemp_.resize(this->ctx_->Threads(), std::vector<ThreadEntry>());
|
||||
for (auto& i : stemp_) {
|
||||
i.clear(); i.reserve(256);
|
||||
}
|
||||
@ -450,8 +449,9 @@ class ColMaker: public TreeUpdater {
|
||||
// start enumeration
|
||||
const auto num_features = static_cast<bst_omp_uint>(feat_set.size());
|
||||
#if defined(_OPENMP)
|
||||
CHECK(this->ctx_);
|
||||
const int batch_size = // NOLINT
|
||||
std::max(static_cast<int>(num_features / this->nthread_ / 32), 1);
|
||||
std::max(static_cast<int>(num_features / this->ctx_->Threads() / 32), 1);
|
||||
#endif // defined(_OPENMP)
|
||||
{
|
||||
auto page = batch.GetView();
|
||||
@ -552,7 +552,8 @@ class ColMaker: public TreeUpdater {
|
||||
virtual void SyncBestSolution(const std::vector<int> &qexpand) {
|
||||
for (int nid : qexpand) {
|
||||
NodeEntry &e = snode_[nid];
|
||||
for (int tid = 0; tid < this->nthread_; ++tid) {
|
||||
CHECK(this->ctx_);
|
||||
for (int tid = 0; tid < this->ctx_->Threads(); ++tid) {
|
||||
e.best.Update(stemp_[tid][nid].best);
|
||||
}
|
||||
}
|
||||
@ -608,7 +609,7 @@ class ColMaker: public TreeUpdater {
|
||||
const TrainParam& param_;
|
||||
const ColMakerTrainParam& colmaker_train_param_;
|
||||
// number of omp thread used during training
|
||||
const int nthread_;
|
||||
GenericParameter const* ctx_;
|
||||
common::ColumnSampler column_sampler_;
|
||||
// Instance Data: current node position in the tree of each instance
|
||||
std::vector<int> position_;
|
||||
|
||||
@ -115,9 +115,6 @@ bool QuantileHistMaker::UpdatePredictionCache(
|
||||
}
|
||||
}
|
||||
|
||||
template <typename GradientSumT>
|
||||
QuantileHistMaker::Builder<GradientSumT>::~Builder() = default;
|
||||
|
||||
|
||||
template <typename GradientSumT>
|
||||
template <bool any_missing>
|
||||
|
||||
@ -165,7 +165,6 @@ class QuantileHistMaker: public TreeUpdater {
|
||||
task_{task} {
|
||||
builder_monitor_.Init("Quantile::Builder");
|
||||
}
|
||||
~Builder();
|
||||
// update one tree, growing
|
||||
virtual void Update(const GHistIndexMatrix& gmat,
|
||||
const ColumnMatrix& column_matrix,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user