* Revert "Add scikit-learn as dependency for doc build (#3677)" This reverts commit 308f664ade0547242608e21f6198c895415f03da. * Revert "Add scikit-learn tests (#3674)" This reverts commit d176a0fbc8165e3afe3e42ff464ab7b253211555.
This commit is contained in:
parent
8dac0d1009
commit
5a8bbb39a1
@ -41,7 +41,7 @@ sys.path.insert(0, curr_path)
|
||||
|
||||
# -- mock out modules
|
||||
import mock
|
||||
MOCK_MODULES = ['scipy', 'scipy.sparse', 'pandas']
|
||||
MOCK_MODULES = ['scipy', 'scipy.sparse', 'sklearn', 'pandas']
|
||||
for mod_name in MOCK_MODULES:
|
||||
sys.modules[mod_name] = mock.Mock()
|
||||
|
||||
|
||||
@ -6,4 +6,3 @@ sh>=1.12.14
|
||||
matplotlib>=2.1
|
||||
graphviz
|
||||
numpy
|
||||
scikit-learn
|
||||
|
||||
@ -9,6 +9,7 @@ import ctypes
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
import scipy.sparse
|
||||
|
||||
@ -373,15 +374,11 @@ class DMatrix(object):
|
||||
if label is not None:
|
||||
if isinstance(label, np.ndarray):
|
||||
self.set_label_npy2d(label)
|
||||
elif getattr(label, '__array__', None) is not None:
|
||||
self.set_label_npy2d(label.__array__())
|
||||
else:
|
||||
self.set_label(label)
|
||||
if weight is not None:
|
||||
if isinstance(weight, np.ndarray):
|
||||
self.set_weight_npy2d(weight)
|
||||
elif getattr(weight, '__array__', None) is not None:
|
||||
self.set_weight_npy2d(weight.__array__())
|
||||
else:
|
||||
self.set_weight(weight)
|
||||
|
||||
@ -431,7 +428,7 @@ class DMatrix(object):
|
||||
and type if memory use is a concern.
|
||||
"""
|
||||
if len(mat.shape) != 2:
|
||||
raise ValueError('Input numpy.ndarray must be 2 dimensional. Reshape your data.')
|
||||
raise ValueError('Input numpy.ndarray must be 2 dimensional')
|
||||
# flatten the array by rows and ensure it is float32.
|
||||
# we try to avoid data copies if possible (reshape returns a view when possible
|
||||
# and we explicitly tell np.array to try and avoid copying)
|
||||
|
||||
@ -1,12 +1,10 @@
|
||||
# coding: utf-8
|
||||
# pylint: disable=too-many-arguments, too-many-locals, invalid-name, fixme, E0012, R0912, C0302
|
||||
# pylint: disable=too-many-arguments, too-many-locals, invalid-name, fixme, E0012, R0912
|
||||
"""Scikit-Learn Wrapper interface for XGBoost."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import numpy as np
|
||||
import warnings
|
||||
from sklearn.exceptions import NotFittedError
|
||||
from sklearn.exceptions import DataConversionWarning
|
||||
from .core import Booster, DMatrix, XGBoostError
|
||||
from .training import train
|
||||
|
||||
@ -16,16 +14,6 @@ from .compat import (SKLEARN_INSTALLED, XGBModelBase,
|
||||
XGBClassifierBase, XGBRegressorBase, XGBLabelEncoder)
|
||||
|
||||
|
||||
def _check_label_1d(label):
|
||||
"""Produce warning if label is not 1D array"""
|
||||
label = np.array(label, copy=False, dtype=np.float32)
|
||||
if len(label.shape) == 2 and label.shape[1] == 1:
|
||||
warnings.warn('A column-vector y was passed when a 1d array was'
|
||||
' expected. Please change the shape of y to '
|
||||
'(n_samples, ), for example using ravel().',
|
||||
DataConversionWarning, stacklevel=2)
|
||||
|
||||
|
||||
def _objective_decorator(func):
|
||||
"""Decorate an objective function
|
||||
|
||||
@ -190,7 +178,7 @@ class XGBModel(XGBModelBase):
|
||||
booster : a xgboost booster of underlying model
|
||||
"""
|
||||
if self._Booster is None:
|
||||
raise NotFittedError('need to call fit or load_model beforehand')
|
||||
raise XGBoostError('need to call fit or load_model beforehand')
|
||||
return self._Booster
|
||||
|
||||
def get_params(self, deep=False):
|
||||
@ -298,7 +286,6 @@ class XGBModel(XGBModelBase):
|
||||
file name of stored xgb model or 'Booster' instance Xgb model to be
|
||||
loaded before training (allows training continuation).
|
||||
"""
|
||||
_check_label_1d(label=y)
|
||||
if sample_weight is not None:
|
||||
trainDmatrix = DMatrix(X, label=y, weight=sample_weight,
|
||||
missing=self.missing, nthread=self.n_jobs)
|
||||
@ -549,7 +536,6 @@ class XGBClassifier(XGBModel, XGBClassifierBase):
|
||||
file name of stored xgb model or 'Booster' instance Xgb model to be
|
||||
loaded before training (allows training continuation).
|
||||
"""
|
||||
_check_label_1d(label=y)
|
||||
evals_result = {}
|
||||
self.classes_ = np.unique(y)
|
||||
self.n_classes_ = len(self.classes_)
|
||||
@ -926,7 +912,6 @@ class XGBRanker(XGBModel):
|
||||
file name of stored xgb model or 'Booster' instance Xgb model to be
|
||||
loaded before training (allows training continuation).
|
||||
"""
|
||||
_check_label_1d(label=y)
|
||||
# check if group information is provided
|
||||
if group is None:
|
||||
raise ValueError("group is required for ranking task")
|
||||
|
||||
@ -203,18 +203,6 @@ def train(params, dtrain, num_boost_round=10, evals=(), obj=None, feval=None,
|
||||
DeprecationWarning)
|
||||
callbacks.append(callback.reset_learning_rate(learning_rates))
|
||||
|
||||
nrow = dtrain.num_row()
|
||||
ncol = dtrain.num_col()
|
||||
if nrow <= 0:
|
||||
raise ValueError('{} row(s) (shape=({}, {})) while a minimum of 1 is required.'
|
||||
.format(nrow, nrow, ncol))
|
||||
if ncol <= 0:
|
||||
raise ValueError('{} feature(s) (shape=({}, {})) while a minimum of 1 is required.'
|
||||
.format(ncol, nrow, ncol))
|
||||
label = dtrain.get_label()
|
||||
if nrow != len(label):
|
||||
raise ValueError('Label must have same length as the number of data rows')
|
||||
|
||||
return _train_internal(params, dtrain,
|
||||
num_boost_round=num_boost_round,
|
||||
evals=evals,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user