Implement transform to reduce CPU/GPU code duplication. (#3643)

* Implement Transform class.
* Add tests for softmax.
* Use Transform in regression, softmax and hinge objectives, except for Cox.
* Mark old gpu objective functions deprecated.
* static_assert for softmax.
* Split up multi-gpu tests.
This commit is contained in:
trivialfis
2018-10-02 15:06:21 +13:00
committed by Rory Mitchell
parent 87aca8c244
commit d594b11f35
31 changed files with 1514 additions and 997 deletions

View File

@@ -1,9 +1,11 @@
// Copyright by Contributors
/*!
* Copyright 2017-2018 XGBoost contributors
*/
#include <xgboost/objective.h>
#include "../helpers.h"
TEST(Objective, LinearRegressionGPair) {
TEST(Objective, DeclareUnifiedTest(LinearRegressionGPair)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:linear");
std::vector<std::pair<std::string, std::string> > args;
obj->Configure(args);
@@ -13,27 +15,32 @@ TEST(Objective, LinearRegressionGPair) {
{1, 1, 1, 1, 1, 1, 1, 1},
{0, 0.1f, 0.9f, 1.0f, -1.0f, -0.9f, -0.1f, 0},
{1, 1, 1, 1, 1, 1, 1, 1});
CheckObjFunction(obj,
{0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{0, 0, 0, 0, 1, 1, 1, 1},
{}, // empty weight
{0, 0.1f, 0.9f, 1.0f, -1.0f, -0.9f, -0.1f, 0},
{1, 1, 1, 1, 1, 1, 1, 1});
ASSERT_NO_THROW(obj->DefaultEvalMetric());
delete obj;
}
TEST(Objective, LogisticRegressionGPair) {
TEST(Objective, DeclareUnifiedTest(LogisticRegressionGPair)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:logistic");
std::vector<std::pair<std::string, std::string> > args;
obj->Configure(args);
CheckObjFunction(obj,
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{ 0, 0, 0, 0, 1, 1, 1, 1},
{ 1, 1, 1, 1, 1, 1, 1, 1},
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f},
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f});
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, // preds
{ 0, 0, 0, 0, 1, 1, 1, 1}, // labels
{ 1, 1, 1, 1, 1, 1, 1, 1}, // weights
{ 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f}, // out_grad
{0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f}); // out_hess
delete obj;
}
TEST(Objective, LogisticRegressionBasic) {
TEST(Objective, DeclareUnifiedTest(LogisticRegressionBasic)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:logistic");
std::vector<std::pair<std::string, std::string> > args;
obj->Configure(args);
@@ -61,7 +68,7 @@ TEST(Objective, LogisticRegressionBasic) {
delete obj;
}
TEST(Objective, LogisticRawGPair) {
TEST(Objective, DeclareUnifiedTest(LogisticRawGPair)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("binary:logitraw");
std::vector<std::pair<std::string, std::string> > args;
obj->Configure(args);
@@ -75,7 +82,7 @@ TEST(Objective, LogisticRawGPair) {
delete obj;
}
TEST(Objective, PoissonRegressionGPair) {
TEST(Objective, DeclareUnifiedTest(PoissonRegressionGPair)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("count:poisson");
std::vector<std::pair<std::string, std::string> > args;
args.push_back(std::make_pair("max_delta_step", "0.1f"));
@@ -86,11 +93,16 @@ TEST(Objective, PoissonRegressionGPair) {
{ 1, 1, 1, 1, 1, 1, 1, 1},
{ 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f},
{1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f});
CheckObjFunction(obj,
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{ 0, 0, 0, 0, 1, 1, 1, 1},
{}, // Empty weight
{ 1, 1.10f, 2.45f, 2.71f, 0, 0.10f, 1.45f, 1.71f},
{1.10f, 1.22f, 2.71f, 3.00f, 1.10f, 1.22f, 2.71f, 3.00f});
delete obj;
}
TEST(Objective, PoissonRegressionBasic) {
TEST(Objective, DeclareUnifiedTest(PoissonRegressionBasic)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("count:poisson");
std::vector<std::pair<std::string, std::string> > args;
obj->Configure(args);
@@ -116,7 +128,7 @@ TEST(Objective, PoissonRegressionBasic) {
delete obj;
}
TEST(Objective, GammaRegressionGPair) {
TEST(Objective, DeclareUnifiedTest(GammaRegressionGPair)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:gamma");
std::vector<std::pair<std::string, std::string> > args;
obj->Configure(args);
@@ -126,11 +138,16 @@ TEST(Objective, GammaRegressionGPair) {
{1, 1, 1, 1, 1, 1, 1, 1},
{1, 1, 1, 1, 0, 0.09f, 0.59f, 0.63f},
{0, 0, 0, 0, 1, 0.90f, 0.40f, 0.36f});
CheckObjFunction(obj,
{0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{0, 0, 0, 0, 1, 1, 1, 1},
{}, // Empty weight
{1, 1, 1, 1, 0, 0.09f, 0.59f, 0.63f},
{0, 0, 0, 0, 1, 0.90f, 0.40f, 0.36f});
delete obj;
}
TEST(Objective, GammaRegressionBasic) {
TEST(Objective, DeclareUnifiedTest(GammaRegressionBasic)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:gamma");
std::vector<std::pair<std::string, std::string> > args;
obj->Configure(args);
@@ -156,7 +173,7 @@ TEST(Objective, GammaRegressionBasic) {
delete obj;
}
TEST(Objective, TweedieRegressionGPair) {
TEST(Objective, DeclareUnifiedTest(TweedieRegressionGPair)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:tweedie");
std::vector<std::pair<std::string, std::string> > args;
args.push_back(std::make_pair("tweedie_variance_power", "1.1f"));
@@ -167,11 +184,17 @@ TEST(Objective, TweedieRegressionGPair) {
{ 1, 1, 1, 1, 1, 1, 1, 1},
{ 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f},
{0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f});
CheckObjFunction(obj,
{ 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1},
{ 0, 0, 0, 0, 1, 1, 1, 1},
{}, // Empty weight.
{ 1, 1.09f, 2.24f, 2.45f, 0, 0.10f, 1.33f, 1.55f},
{0.89f, 0.98f, 2.02f, 2.21f, 1, 1.08f, 2.11f, 2.30f});
delete obj;
}
TEST(Objective, TweedieRegressionBasic) {
TEST(Objective, DeclareUnifiedTest(TweedieRegressionBasic)) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("reg:tweedie");
std::vector<std::pair<std::string, std::string> > args;
obj->Configure(args);
@@ -197,6 +220,9 @@ TEST(Objective, TweedieRegressionBasic) {
delete obj;
}
// CoxRegression not implemented in GPU code, no need for testing.
#if !defined(__CUDACC__)
TEST(Objective, CoxRegressionGPair) {
xgboost::ObjFunction * obj = xgboost::ObjFunction::Create("survival:cox");
std::vector<std::pair<std::string, std::string> > args;
@@ -210,3 +236,4 @@ TEST(Objective, CoxRegressionGPair) {
delete obj;
}
#endif