Updated sklearn_examples.py for soon-to-be-deprecated modules (#2117)
This commit is contained in:
parent
e65564ba59
commit
f6f5003f79
@ -8,12 +8,8 @@ import pickle
|
|||||||
import xgboost as xgb
|
import xgboost as xgb
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
try:
|
from sklearn.model_selection import KFold, train_test_split, GridSearchCV
|
||||||
from sklearn.model_selection import KFold, train_test_split
|
|
||||||
except:
|
|
||||||
from sklearn.cross_validation import KFold, train_test_split
|
|
||||||
from sklearn.metrics import confusion_matrix, mean_squared_error
|
from sklearn.metrics import confusion_matrix, mean_squared_error
|
||||||
from sklearn.grid_search import GridSearchCV
|
|
||||||
from sklearn.datasets import load_iris, load_digits, load_boston
|
from sklearn.datasets import load_iris, load_digits, load_boston
|
||||||
|
|
||||||
rng = np.random.RandomState(31337)
|
rng = np.random.RandomState(31337)
|
||||||
@ -22,8 +18,8 @@ print("Zeros and Ones from the Digits dataset: binary classification")
|
|||||||
digits = load_digits(2)
|
digits = load_digits(2)
|
||||||
y = digits['target']
|
y = digits['target']
|
||||||
X = digits['data']
|
X = digits['data']
|
||||||
kf = KFold(y.shape[0], n_folds=2, shuffle=True, random_state=rng)
|
kf = KFold(n_splits=2, shuffle=True, random_state=rng)
|
||||||
for train_index, test_index in kf:
|
for train_index, test_index in kf.split(X):
|
||||||
xgb_model = xgb.XGBClassifier().fit(X[train_index],y[train_index])
|
xgb_model = xgb.XGBClassifier().fit(X[train_index],y[train_index])
|
||||||
predictions = xgb_model.predict(X[test_index])
|
predictions = xgb_model.predict(X[test_index])
|
||||||
actuals = y[test_index]
|
actuals = y[test_index]
|
||||||
@ -33,8 +29,8 @@ print("Iris: multiclass classification")
|
|||||||
iris = load_iris()
|
iris = load_iris()
|
||||||
y = iris['target']
|
y = iris['target']
|
||||||
X = iris['data']
|
X = iris['data']
|
||||||
kf = KFold(y.shape[0], n_folds=2, shuffle=True, random_state=rng)
|
kf = KFold(n_splits=2, shuffle=True, random_state=rng)
|
||||||
for train_index, test_index in kf:
|
for train_index, test_index in kf.split(X):
|
||||||
xgb_model = xgb.XGBClassifier().fit(X[train_index],y[train_index])
|
xgb_model = xgb.XGBClassifier().fit(X[train_index],y[train_index])
|
||||||
predictions = xgb_model.predict(X[test_index])
|
predictions = xgb_model.predict(X[test_index])
|
||||||
actuals = y[test_index]
|
actuals = y[test_index]
|
||||||
@ -44,8 +40,8 @@ print("Boston Housing: regression")
|
|||||||
boston = load_boston()
|
boston = load_boston()
|
||||||
y = boston['target']
|
y = boston['target']
|
||||||
X = boston['data']
|
X = boston['data']
|
||||||
kf = KFold(y.shape[0], n_folds=2, shuffle=True, random_state=rng)
|
kf = KFold(n_splits=2, shuffle=True, random_state=rng)
|
||||||
for train_index, test_index in kf:
|
for train_index, test_index in kf.split(X):
|
||||||
xgb_model = xgb.XGBRegressor().fit(X[train_index],y[train_index])
|
xgb_model = xgb.XGBRegressor().fit(X[train_index],y[train_index])
|
||||||
predictions = xgb_model.predict(X[test_index])
|
predictions = xgb_model.predict(X[test_index])
|
||||||
actuals = y[test_index]
|
actuals = y[test_index]
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user