Run training with empty DMatrix. (#4990)

This makes GPU Hist robust in distributed environment as some workers might not
be associated with any data in either training or evaluation.

* Disable rabit mock test for now: See #5012 .

* Disable dask-cudf test at prediction for now: See #5003

* Launch dask job for all workers despite they might not have any data.
* Check 0 rows in elementwise evaluation metrics.

   Using AUC and AUC-PR still throws an error.  See #4663 for a robust fix.

* Add tests for edge cases.
* Add `LaunchKernel` wrapper handling zero sized grid.
* Move some parts of allreducer into a cu file.
* Don't validate feature names when the booster is empty.

* Sync number of columns in DMatrix.

  As num_feature is required to be the same across all workers in data split
  mode.

* Filtering in dask interface now by default syncs all booster that's not
empty, instead of using rank 0.

* Fix Jenkins' GPU tests.

* Install dask-cuda from source in Jenkins' test.

  Now all tests are actually running.

* Restore GPU Hist tree synchronization test.

* Check UUID of running devices.

  The check is only performed on CUDA version >= 10.x, as 9.x doesn't have UUID field.

* Fix CMake policy and project variables.

  Use xgboost_SOURCE_DIR uniformly, add policy for CMake >= 3.13.

* Fix copying data to CPU

* Fix race condition in cpu predictor.

* Fix duplicated DMatrix construction.

* Don't download extra nccl in CI script.
This commit is contained in:
Jiaming Yuan
2019-11-06 16:13:13 +08:00
committed by GitHub
parent 807a244517
commit 7663de956c
44 changed files with 603 additions and 272 deletions

View File

@@ -3,6 +3,8 @@
* \file elementwise_metric.cc
* \brief evaluation metrics for elementwise binary or regression.
* \author Kailong Chen, Tianqi Chen
*
* The expressions like wsum == 0 ? esum : esum / wsum is used to handle empty dataset.
*/
#include <rabit/rabit.h>
#include <xgboost/metric.h>
@@ -142,7 +144,7 @@ struct EvalRowRMSE {
return diff * diff;
}
static bst_float GetFinal(bst_float esum, bst_float wsum) {
return std::sqrt(esum / wsum);
return wsum == 0 ? std::sqrt(esum) : std::sqrt(esum / wsum);
}
};
@@ -150,12 +152,13 @@ struct EvalRowRMSLE {
char const* Name() const {
return "rmsle";
}
XGBOOST_DEVICE bst_float EvalRow(bst_float label, bst_float pred) const {
bst_float diff = std::log1p(label) - std::log1p(pred);
return diff * diff;
}
static bst_float GetFinal(bst_float esum, bst_float wsum) {
return std::sqrt(esum / wsum);
return wsum == 0 ? std::sqrt(esum) : std::sqrt(esum / wsum);
}
};
@@ -168,7 +171,7 @@ struct EvalRowMAE {
return std::abs(label - pred);
}
static bst_float GetFinal(bst_float esum, bst_float wsum) {
return esum / wsum;
return wsum == 0 ? esum : esum / wsum;
}
};
@@ -190,7 +193,7 @@ struct EvalRowLogLoss {
}
static bst_float GetFinal(bst_float esum, bst_float wsum) {
return esum / wsum;
return wsum == 0 ? esum : esum / wsum;
}
};
@@ -225,7 +228,7 @@ struct EvalError {
}
static bst_float GetFinal(bst_float esum, bst_float wsum) {
return esum / wsum;
return wsum == 0 ? esum : esum / wsum;
}
private:
@@ -245,7 +248,7 @@ struct EvalPoissonNegLogLik {
}
static bst_float GetFinal(bst_float esum, bst_float wsum) {
return esum / wsum;
return wsum == 0 ? esum : esum / wsum;
}
};
@@ -278,7 +281,7 @@ struct EvalGammaNLogLik {
return -((y * theta - b) / a + c);
}
static bst_float GetFinal(bst_float esum, bst_float wsum) {
return esum / wsum;
return wsum == 0 ? esum : esum / wsum;
}
};
@@ -304,7 +307,7 @@ struct EvalTweedieNLogLik {
return -a + b;
}
static bst_float GetFinal(bst_float esum, bst_float wsum) {
return esum / wsum;
return wsum == 0 ? esum : esum / wsum;
}
protected:
@@ -323,7 +326,9 @@ struct EvalEWiseBase : public Metric {
bst_float Eval(const HostDeviceVector<bst_float>& preds,
const MetaInfo& info,
bool distributed) override {
CHECK_NE(info.labels_.Size(), 0U) << "label set cannot be empty";
if (info.labels_.Size() == 0) {
LOG(WARNING) << "label set is empty";
}
CHECK_EQ(preds.Size(), info.labels_.Size())
<< "label and prediction size not match, "
<< "hint: use merror or mlogloss for multi-class classification";
@@ -333,6 +338,7 @@ struct EvalEWiseBase : public Metric {
reducer_.Reduce(*tparam_, device, info.weights_, info.labels_, preds);
double dat[2] { result.Residue(), result.Weights() };
if (distributed) {
rabit::Allreduce<rabit::op::Sum>(dat, 2);
}