Add rmsle metric and reg:squaredlogerror objective (#4541)
This commit is contained in:
@@ -153,6 +153,19 @@ struct EvalRowRMSE {
|
||||
}
|
||||
};
|
||||
|
||||
struct EvalRowRMSLE {
|
||||
char const* Name() const {
|
||||
return "rmsle";
|
||||
}
|
||||
XGBOOST_DEVICE bst_float EvalRow(bst_float label, bst_float pred) const {
|
||||
bst_float diff = std::log1p(label) - std::log1p(pred);
|
||||
return diff * diff;
|
||||
}
|
||||
static bst_float GetFinal(bst_float esum, bst_float wsum) {
|
||||
return std::sqrt(esum / wsum);
|
||||
}
|
||||
};
|
||||
|
||||
struct EvalRowMAE {
|
||||
const char *Name() const {
|
||||
return "mae";
|
||||
@@ -349,6 +362,10 @@ XGBOOST_REGISTER_METRIC(RMSE, "rmse")
|
||||
.describe("Rooted mean square error.")
|
||||
.set_body([](const char* param) { return new EvalEWiseBase<EvalRowRMSE>(); });
|
||||
|
||||
XGBOOST_REGISTER_METRIC(RMSLE, "rmsle")
|
||||
.describe("Rooted mean square log error.")
|
||||
.set_body([](const char* param) { return new EvalEWiseBase<EvalRowRMSLE>(); });
|
||||
|
||||
XGBOOST_REGISTER_METRIC(MAE, "mae")
|
||||
.describe("Mean absolute error.")
|
||||
.set_body([](const char* param) { return new EvalEWiseBase<EvalRowMAE>(); });
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*!
|
||||
* Copyright 2017 XGBoost contributors
|
||||
* Copyright 2017-2019 XGBoost contributors
|
||||
*/
|
||||
#ifndef XGBOOST_OBJECTIVE_REGRESSION_LOSS_H_
|
||||
#define XGBOOST_OBJECTIVE_REGRESSION_LOSS_H_
|
||||
@@ -36,6 +36,29 @@ struct LinearSquareLoss {
|
||||
static const char* DefaultEvalMetric() { return "rmse"; }
|
||||
};
|
||||
|
||||
struct SquaredLogError {
|
||||
XGBOOST_DEVICE static bst_float PredTransform(bst_float x) { return x; }
|
||||
XGBOOST_DEVICE static bool CheckLabel(bst_float label) {
|
||||
return label > -1;
|
||||
}
|
||||
XGBOOST_DEVICE static bst_float FirstOrderGradient(bst_float predt, bst_float label) {
|
||||
predt = fmaxf(predt, -1 + 1e-6); // ensure correct value for log1p
|
||||
return (std::log1p(predt) - std::log1p(label)) / (predt + 1);
|
||||
}
|
||||
XGBOOST_DEVICE static bst_float SecondOrderGradient(bst_float predt, bst_float label) {
|
||||
predt = fmaxf(predt, -1 + 1e-6);
|
||||
float res = (-std::log1p(predt) + std::log1p(label) + 1) /
|
||||
std::pow(predt + 1, 2);
|
||||
res = fmaxf(res, 1e-6f);
|
||||
return res;
|
||||
}
|
||||
static bst_float ProbToMargin(bst_float base_score) { return base_score; }
|
||||
static const char* LabelErrorMsg() {
|
||||
return "label must be greater than -1 for rmsle so that log(label + 1) can be valid.";
|
||||
}
|
||||
static const char* DefaultEvalMetric() { return "rmsle"; }
|
||||
};
|
||||
|
||||
// logistic loss for probability regression task
|
||||
struct LogisticRegression {
|
||||
// duplication is necessary, as __device__ specifier
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*!
|
||||
* Copyright 2015-2018 by Contributors
|
||||
* Copyright 2015-2019 by Contributors
|
||||
* \file regression_obj.cu
|
||||
* \brief Definition of single-value regression and classification objectives.
|
||||
* \author Tianqi Chen, Kailong Chen
|
||||
@@ -124,6 +124,10 @@ XGBOOST_REGISTER_OBJECTIVE(SquaredLossRegression, "reg:squarederror")
|
||||
.describe("Regression with squared error.")
|
||||
.set_body([]() { return new RegLossObj<LinearSquareLoss>(); });
|
||||
|
||||
XGBOOST_REGISTER_OBJECTIVE(SquareLogError, "reg:squaredlogerror")
|
||||
.describe("Regression with root mean squared logarithmic error.")
|
||||
.set_body([]() { return new RegLossObj<SquaredLogError>(); });
|
||||
|
||||
XGBOOST_REGISTER_OBJECTIVE(LogisticRegression, "reg:logistic")
|
||||
.describe("Logistic regression for probability regression task.")
|
||||
.set_body([]() { return new RegLossObj<LogisticRegression>(); });
|
||||
|
||||
Reference in New Issue
Block a user