Fix compiler warnings. (#7974)
- Remove unused parameters. There are still many warnings that are not yet addressed. Currently, the warnings in dmlc-core dominate the error log. - Remove `distributed` parameter from metric. - Fixes some warnings about signed comparison.
This commit is contained in:
@@ -116,7 +116,7 @@ class RowPartitioner {
|
||||
Segment segment = ridx_segments_.at(nidx); // rows belongs to node nidx
|
||||
auto d_ridx = ridx_.CurrentSpan();
|
||||
auto d_position = position_.CurrentSpan();
|
||||
if (left_counts_.size() <= nidx) {
|
||||
if (left_counts_.size() <= static_cast<size_t>(nidx)) {
|
||||
left_counts_.resize((nidx * 2) + 1);
|
||||
thrust::fill(left_counts_.begin(), left_counts_.end(), 0);
|
||||
}
|
||||
|
||||
@@ -203,8 +203,8 @@ class HistEvaluator {
|
||||
// Returns the sum of gradients corresponding to the data points that contains
|
||||
// a non-missing value for the particular feature fid.
|
||||
template <int d_step>
|
||||
GradStats EnumerateSplit(common::HistogramCuts const &cut, common::Span<size_t const> sorted_idx,
|
||||
const common::GHistRow &hist, bst_feature_t fidx, bst_node_t nidx,
|
||||
GradStats EnumerateSplit(common::HistogramCuts const &cut, const common::GHistRow &hist,
|
||||
bst_feature_t fidx, bst_node_t nidx,
|
||||
TreeEvaluator::SplitEvaluator<TrainParam> const &evaluator,
|
||||
SplitEntry *p_best) const {
|
||||
static_assert(d_step == +1 || d_step == -1, "Invalid step.");
|
||||
@@ -333,9 +333,9 @@ class HistEvaluator {
|
||||
EnumeratePart<-1>(cut, sorted_idx, histogram, fidx, nidx, evaluator, best);
|
||||
}
|
||||
} else {
|
||||
auto grad_stats = EnumerateSplit<+1>(cut, {}, histogram, fidx, nidx, evaluator, best);
|
||||
auto grad_stats = EnumerateSplit<+1>(cut, histogram, fidx, nidx, evaluator, best);
|
||||
if (SplitContainsMissingValues(grad_stats, snode_[nidx])) {
|
||||
EnumerateSplit<-1>(cut, {}, histogram, fidx, nidx, evaluator, best);
|
||||
EnumerateSplit<-1>(cut, histogram, fidx, nidx, evaluator, best);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -440,7 +440,7 @@ template <typename Partitioner, typename ExpandEntry>
|
||||
void UpdatePredictionCacheImpl(GenericParameter const *ctx, RegTree const *p_last_tree,
|
||||
std::vector<Partitioner> const &partitioner,
|
||||
HistEvaluator<ExpandEntry> const &hist_evaluator,
|
||||
TrainParam const ¶m, linalg::VectorView<float> out_preds) {
|
||||
linalg::VectorView<float> out_preds) {
|
||||
CHECK_GT(out_preds.Size(), 0U);
|
||||
|
||||
CHECK(p_last_tree);
|
||||
|
||||
@@ -116,7 +116,7 @@ class GloablApproxBuilder {
|
||||
// Caching prediction seems redundant for approx tree method, as sketching takes up
|
||||
// majority of training time.
|
||||
CHECK_EQ(out_preds.Size(), data->Info().num_row_);
|
||||
UpdatePredictionCacheImpl(ctx_, p_last_tree_, partitioner_, evaluator_, param_, out_preds);
|
||||
UpdatePredictionCacheImpl(ctx_, p_last_tree_, partitioner_, evaluator_, out_preds);
|
||||
monitor_->Stop(__func__);
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ class ApproxRowPartitioner {
|
||||
const size_t task_id = partition_builder_.GetTaskIdx(node_in_set, r.begin());
|
||||
partition_builder_.AllocateForTask(task_id);
|
||||
partition_builder_.PartitionRange(
|
||||
node_in_set, nid, r, fidx, &row_set_collection_, [&](size_t row_id) {
|
||||
node_in_set, nid, r, &row_set_collection_, [&](size_t row_id) {
|
||||
auto cut_value = SearchCutValue(row_id, fidx, index, cut_ptrs, cut_values);
|
||||
if (std::isnan(cut_value)) {
|
||||
return candidate.split.DefaultLeft();
|
||||
|
||||
@@ -563,7 +563,7 @@ struct GPUHistMakerDevice {
|
||||
// when processing a large batch
|
||||
this->AllReduceHist(hist_nidx.at(0), reducer, hist_nidx.size());
|
||||
|
||||
for (int i = 0; i < subtraction_nidx.size(); i++) {
|
||||
for (size_t i = 0; i < subtraction_nidx.size(); i++) {
|
||||
auto build_hist_nidx = hist_nidx.at(i);
|
||||
auto subtraction_trick_nidx = subtraction_nidx.at(i);
|
||||
auto parent_nidx = candidates.at(i).nid;
|
||||
|
||||
@@ -257,7 +257,7 @@ bool QuantileHistMaker::Builder::UpdatePredictionCache(DMatrix const *data,
|
||||
}
|
||||
monitor_->Start(__func__);
|
||||
CHECK_EQ(out_preds.Size(), data->Info().num_row_);
|
||||
UpdatePredictionCacheImpl(ctx_, p_last_tree_, partitioner_, *evaluator_, param_, out_preds);
|
||||
UpdatePredictionCacheImpl(ctx_, p_last_tree_, partitioner_, *evaluator_, out_preds);
|
||||
monitor_->Stop(__func__);
|
||||
return true;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user