Add Accelerated Failure Time loss for survival analysis task (#4763)
* [WIP] Add lower and upper bounds on the label for survival analysis * Update test MetaInfo.SaveLoadBinary to account for extra two fields * Don't clear qids_ for version 2 of MetaInfo * Add SetInfo() and GetInfo() method for lower and upper bounds * changes to aft * Add parameter class for AFT; use enum's to represent distribution and event type * Add AFT metric * changes to neg grad to grad * changes to binomial loss * changes to overflow * changes to eps * changes to code refactoring * changes to code refactoring * changes to code refactoring * Re-factor survival analysis * Remove aft namespace * Move function bodies out of AFTNormal and AFTLogistic, to reduce clutter * Move function bodies out of AFTLoss, to reduce clutter * Use smart pointer to store AFTDistribution and AFTLoss * Rename AFTNoiseDistribution enum to AFTDistributionType for clarity The enum class was not a distribution itself but a distribution type * Add AFTDistribution::Create() method for convenience * changes to extreme distribution * changes to extreme distribution * changes to extreme * changes to extreme distribution * changes to left censored * deleted cout * changes to x,mu and sd and code refactoring * changes to print * changes to hessian formula in censored and uncensored * changes to variable names and pow * changes to Logistic Pdf * changes to parameter * Expose lower and upper bound labels to R package * Use example weights; normalize log likelihood metric * changes to CHECK * changes to logistic hessian to standard formula * changes to logistic formula * Comply with coding style guideline * Revert back Rabit submodule * Revert dmlc-core submodule * Comply with coding style guideline (clang-tidy) * Fix an error in AFTLoss::Gradient() * Add missing files to amalgamation * Address @RAMitchell's comment: minimize future change in MetaInfo interface * Fix lint * Fix compilation error on 32-bit target, when size_t == bst_uint * Allocate sufficient memory to hold extra label info * Use OpenMP to speed up * Fix compilation on Windows * Address reviewer's feedback * Add unit tests for probability distributions * Make Metric subclass of Configurable * Address reviewer's feedback: Configure() AFT metric * Add a dummy test for AFT metric configuration * Complete AFT configuration test; remove debugging print * Rename AFT parameters * Clarify test comment * Add a dummy test for AFT loss for uncensored case * Fix a bug in AFT loss for uncensored labels * Complete unit test for AFT loss metric * Simplify unit tests for AFT metric * Add unit test to verify aggregate output from AFT metric * Use EXPECT_* instead of ASSERT_*, so that we run all unit tests * Use aft_loss_param when serializing AFTObj This is to be consistent with AFT metric * Add unit tests for AFT Objective * Fix OpenMP bug; clarify semantics for shared variables used in OpenMP loops * Add comments * Remove AFT prefix from probability distribution; put probability distribution in separate source file * Add comments * Define kPI and kEulerMascheroni in probability_distribution.h * Add probability_distribution.cc to amalgamation * Remove unnecessary diff * Address reviewer's feedback: define variables where they're used * Eliminate all INFs and NANs from AFT loss and gradient * Add demo * Add tutorial * Fix lint * Use 'survival:aft' to be consistent with 'survival:cox' * Move sample data to demo/data * Add visual demo with 1D toy data * Add Python tests Co-authored-by: Philip Cho <chohyu01@cs.washington.edu>
This commit is contained in:
106
src/metric/survival_metric.cc
Normal file
106
src/metric/survival_metric.cc
Normal file
@@ -0,0 +1,106 @@
|
||||
/*!
|
||||
* Copyright 2019 by Contributors
|
||||
* \file survival_metric.cc
|
||||
* \brief Metrics for survival analysis
|
||||
* \author Avinash Barnwal, Hyunsu Cho and Toby Hocking
|
||||
*/
|
||||
|
||||
#include <rabit/rabit.h>
|
||||
#include <xgboost/metric.h>
|
||||
#include <xgboost/host_device_vector.h>
|
||||
#include <dmlc/registry.h>
|
||||
#include <cmath>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <limits>
|
||||
|
||||
#include "xgboost/json.h"
|
||||
|
||||
#include "../common/math.h"
|
||||
#include "../common/survival_util.h"
|
||||
|
||||
using AFTParam = xgboost::common::AFTParam;
|
||||
using AFTLoss = xgboost::common::AFTLoss;
|
||||
|
||||
namespace xgboost {
|
||||
namespace metric {
|
||||
// tag the this file, used by force static link later.
|
||||
DMLC_REGISTRY_FILE_TAG(survival_metric);
|
||||
|
||||
/*! \brief Negative log likelihood of Accelerated Failure Time model */
|
||||
struct EvalAFT : public Metric {
|
||||
public:
|
||||
explicit EvalAFT(const char* param) {}
|
||||
|
||||
void Configure(const Args& args) override {
|
||||
param_.UpdateAllowUnknown(args);
|
||||
loss_.reset(new AFTLoss(param_.aft_loss_distribution));
|
||||
}
|
||||
|
||||
void SaveConfig(Json* p_out) const override {
|
||||
auto& out = *p_out;
|
||||
out["name"] = String(this->Name());
|
||||
out["aft_loss_param"] = toJson(param_);
|
||||
}
|
||||
|
||||
void LoadConfig(Json const& in) override {
|
||||
fromJson(in["aft_loss_param"], ¶m_);
|
||||
}
|
||||
|
||||
bst_float Eval(const HostDeviceVector<bst_float> &preds,
|
||||
const MetaInfo &info,
|
||||
bool distributed) override {
|
||||
CHECK_NE(info.labels_lower_bound_.Size(), 0U)
|
||||
<< "y_lower cannot be empty";
|
||||
CHECK_NE(info.labels_upper_bound_.Size(), 0U)
|
||||
<< "y_higher cannot be empty";
|
||||
CHECK_EQ(preds.Size(), info.labels_lower_bound_.Size());
|
||||
CHECK_EQ(preds.Size(), info.labels_upper_bound_.Size());
|
||||
|
||||
/* Compute negative log likelihood for each data point and compute weighted average */
|
||||
const auto& yhat = preds.HostVector();
|
||||
const auto& y_lower = info.labels_lower_bound_.HostVector();
|
||||
const auto& y_upper = info.labels_upper_bound_.HostVector();
|
||||
const auto& weights = info.weights_.HostVector();
|
||||
const bool is_null_weight = weights.empty();
|
||||
const float aft_loss_distribution_scale = param_.aft_loss_distribution_scale;
|
||||
CHECK_LE(yhat.size(), static_cast<size_t>(std::numeric_limits<omp_ulong>::max()))
|
||||
<< "yhat is too big";
|
||||
const omp_ulong nsize = static_cast<omp_ulong>(yhat.size());
|
||||
|
||||
double nloglik_sum = 0.0;
|
||||
double weight_sum = 0.0;
|
||||
#pragma omp parallel for default(none) \
|
||||
firstprivate(nsize, is_null_weight, aft_loss_distribution_scale) \
|
||||
shared(weights, y_lower, y_upper, yhat) reduction(+:nloglik_sum, weight_sum)
|
||||
for (omp_ulong i = 0; i < nsize; ++i) {
|
||||
// If weights are empty, data is unweighted so we use 1.0 everywhere
|
||||
const double w = is_null_weight ? 1.0 : weights[i];
|
||||
const double loss
|
||||
= loss_->Loss(y_lower[i], y_upper[i], yhat[i], aft_loss_distribution_scale);
|
||||
nloglik_sum += loss;
|
||||
weight_sum += w;
|
||||
}
|
||||
|
||||
double dat[2]{nloglik_sum, weight_sum};
|
||||
if (distributed) {
|
||||
rabit::Allreduce<rabit::op::Sum>(dat, 2);
|
||||
}
|
||||
return static_cast<bst_float>(dat[0] / dat[1]);
|
||||
}
|
||||
|
||||
const char* Name() const override {
|
||||
return "aft-nloglik";
|
||||
}
|
||||
|
||||
private:
|
||||
AFTParam param_;
|
||||
std::unique_ptr<AFTLoss> loss_;
|
||||
};
|
||||
|
||||
XGBOOST_REGISTER_METRIC(AFT, "aft-nloglik")
|
||||
.describe("Negative log likelihood of Accelerated Failure Time model.")
|
||||
.set_body([](const char* param) { return new EvalAFT(param); });
|
||||
|
||||
} // namespace metric
|
||||
} // namespace xgboost
|
||||
Reference in New Issue
Block a user