Additional improvements for gblinear (#3134)

* fix rebase conflict

* [core] additional gblinear improvements

* [R] callback for gblinear coefficients history

* force eta=1 for gblinear python tests

* add top_k to GreedyFeatureSelector

* set eta=1 in shotgun test

* [core] fix SparsePage processing in gblinear; col-wise multithreading in greedy updater

* set sorted flag within TryInitColData

* gblinear tests: use scale, add external memory test

* fix multiclass for greedy updater

* fix whitespace

* fix typo
This commit is contained in:
Vadim Khotilovich
2018-03-13 01:27:13 -05:00
committed by GitHub
parent a1b48afa41
commit 706be4e5d4
18 changed files with 750 additions and 260 deletions

View File

@@ -12,7 +12,7 @@ TEST(Linear, shotgun) {
mat->InitColAccess(enabled, 1.0f, 1 << 16, false);
auto updater = std::unique_ptr<xgboost::LinearUpdater>(
xgboost::LinearUpdater::Create("shotgun"));
updater->Init({});
updater->Init({{"eta", "1."}});
std::vector<xgboost::bst_gpair> gpair(mat->info().num_row,
xgboost::bst_gpair(-5, 1.0));
xgboost::gbm::GBLinearModel model;

View File

@@ -3,9 +3,17 @@ from __future__ import print_function
import itertools as it
import numpy as np
import sys
import os
import glob
import testing as tm
import unittest
import xgboost as xgb
try:
from sklearn import metrics, datasets
from sklearn.linear_model import ElasticNet
from sklearn.preprocessing import scale
except ImportError:
None
rng = np.random.RandomState(199)
@@ -21,39 +29,35 @@ def is_float(s):
def xgb_get_weights(bst):
return [float(s) for s in bst.get_dump()[0].split() if is_float(s)]
return np.array([float(s) for s in bst.get_dump()[0].split() if is_float(s)])
# Check gradient/subgradient = 0
def check_least_squares_solution(X, y, pred, tol, reg_alpha, reg_lambda, weights):
reg_alpha = reg_alpha * len(y)
reg_lambda = reg_lambda * len(y)
r = np.subtract(y, pred)
g = X.T.dot(r)
g = np.subtract(g, np.multiply(reg_lambda, weights))
for i in range(0, len(weights)):
if weights[i] == 0.0:
assert abs(g[i]) <= reg_alpha
else:
assert np.isclose(g[i], np.sign(weights[i]) * reg_alpha, rtol=tol, atol=tol)
def check_ElasticNet(X, y, pred, tol, reg_alpha, reg_lambda, weights):
enet = ElasticNet(alpha=reg_alpha + reg_lambda,
l1_ratio=reg_alpha / (reg_alpha + reg_lambda))
enet.fit(X, y)
enet_pred = enet.predict(X)
assert np.isclose(weights, enet.coef_, rtol=tol, atol=tol).all()
assert np.isclose(enet_pred, pred, rtol=tol, atol=tol).all()
def train_diabetes(param_in):
from sklearn import datasets
data = datasets.load_diabetes()
dtrain = xgb.DMatrix(data.data, label=data.target)
X = scale(data.data)
dtrain = xgb.DMatrix(X, label=data.target)
param = {}
param.update(param_in)
bst = xgb.train(param, dtrain, num_rounds)
xgb_pred = bst.predict(dtrain)
check_least_squares_solution(data.data, data.target, xgb_pred, 1e-2, param['alpha'], param['lambda'],
xgb_get_weights(bst)[1:])
check_ElasticNet(X, data.target, xgb_pred, 1e-2,
param['alpha'], param['lambda'],
xgb_get_weights(bst)[1:])
def train_breast_cancer(param_in):
from sklearn import metrics, datasets
data = datasets.load_breast_cancer()
dtrain = xgb.DMatrix(data.data, label=data.target)
X = scale(data.data)
dtrain = xgb.DMatrix(X, label=data.target)
param = {'objective': 'binary:logistic'}
param.update(param_in)
bst = xgb.train(param, dtrain, num_rounds)
@@ -63,9 +67,8 @@ def train_breast_cancer(param_in):
def train_classification(param_in):
from sklearn import metrics, datasets
X, y = datasets.make_classification(random_state=rng,
scale=100) # Scale is necessary otherwise regularisation parameters will force all coefficients to 0
X, y = datasets.make_classification(random_state=rng)
X = scale(X)
dtrain = xgb.DMatrix(X, label=y)
param = {'objective': 'binary:logistic'}
param.update(param_in)
@@ -76,10 +79,11 @@ def train_classification(param_in):
def train_classification_multi(param_in):
from sklearn import metrics, datasets
num_class = 3
X, y = datasets.make_classification(n_samples=10, random_state=rng, scale=100, n_classes=num_class, n_informative=4,
X, y = datasets.make_classification(n_samples=100, random_state=rng,
n_classes=num_class, n_informative=4,
n_features=4, n_redundant=0)
X = scale(X)
dtrain = xgb.DMatrix(X, label=y)
param = {'objective': 'multi:softmax', 'num_class': num_class}
param.update(param_in)
@@ -90,20 +94,42 @@ def train_classification_multi(param_in):
def train_boston(param_in):
from sklearn import datasets
data = datasets.load_boston()
dtrain = xgb.DMatrix(data.data, label=data.target)
X = scale(data.data)
dtrain = xgb.DMatrix(X, label=data.target)
param = {}
param.update(param_in)
bst = xgb.train(param, dtrain, num_rounds)
xgb_pred = bst.predict(dtrain)
check_least_squares_solution(data.data, data.target, xgb_pred, 1e-2, param['alpha'], param['lambda'],
xgb_get_weights(bst)[1:])
check_ElasticNet(X, data.target, xgb_pred, 1e-2,
param['alpha'], param['lambda'],
xgb_get_weights(bst)[1:])
def train_external_mem(param_in):
data = datasets.load_boston()
X = scale(data.data)
y = data.target
param = {}
param.update(param_in)
dtrain = xgb.DMatrix(X, label=y)
bst = xgb.train(param, dtrain, num_rounds)
xgb_pred = bst.predict(dtrain)
np.savetxt('tmptmp_1234.csv', np.hstack((y.reshape(len(y), 1), X)),
delimiter=',', fmt='%10.9f')
dtrain = xgb.DMatrix('tmptmp_1234.csv?format=csv&label_column=0#tmptmp_')
bst = xgb.train(param, dtrain, num_rounds)
xgb_pred_ext = bst.predict(dtrain)
assert np.abs(xgb_pred_ext - xgb_pred).max() < 1e-3
del dtrain, bst
for f in glob.glob("tmptmp_*"):
os.remove(f)
# Enumerates all permutations of variable parameters
def assert_updater_accuracy(linear_updater, variable_param):
param = {'booster': 'gblinear', 'updater': linear_updater, 'tolerance': 1e-8}
param = {'booster': 'gblinear', 'updater': linear_updater, 'eta': 1.,
'top_k': 10, 'tolerance': 1e-5, 'nthread': 2}
names = sorted(variable_param)
combinations = it.product(*(variable_param[Name] for Name in names))
@@ -118,16 +144,17 @@ def assert_updater_accuracy(linear_updater, variable_param):
train_classification(param_tmp)
train_classification_multi(param_tmp)
train_breast_cancer(param_tmp)
train_external_mem(param_tmp)
class TestLinear(unittest.TestCase):
def test_coordinate(self):
tm._skip_if_no_sklearn()
variable_param = {'alpha': [1.0, 5.0], 'lambda': [1.0, 5.0],
'coordinate_selection': ['cyclic', 'random', 'greedy']}
variable_param = {'alpha': [.005, .1], 'lambda': [.005],
'feature_selector': ['cyclic', 'shuffle', 'greedy', 'thrifty']}
assert_updater_accuracy('coord_descent', variable_param)
def test_shotgun(self):
tm._skip_if_no_sklearn()
variable_param = {'alpha': [1.0, 5.0], 'lambda': [1.0, 5.0]}
variable_param = {'alpha': [.005, .1], 'lambda': [.005, .1]}
assert_updater_accuracy('shotgun', variable_param)