Fix checking booster. (#5505)

* Use `get_params()` instead of `getattr` intrinsic.
This commit is contained in:
Jiaming Yuan 2020-04-10 12:21:21 +08:00 committed by GitHub
parent 6671b42dd4
commit dc2950fd90
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 28 additions and 12 deletions

View File

@ -307,7 +307,7 @@ class XGBModel(XGBModelBase):
np.iinfo(np.int32).max) np.iinfo(np.int32).max)
def parse_parameter(value): def parse_parameter(value):
for t in (int, float): for t in (int, float, str):
try: try:
ret = t(value) ret = t(value)
return ret return ret
@ -678,9 +678,9 @@ class XGBModel(XGBModelBase):
feature_importances_ : array of shape ``[n_features]`` feature_importances_ : array of shape ``[n_features]``
""" """
if getattr(self, 'booster', None) is not None and self.booster not in { if self.get_params()['booster'] not in {'gbtree', 'dart'}:
'gbtree', 'dart'}: raise AttributeError(
raise AttributeError('Feature importance is not defined for Booster type {}' 'Feature importance is not defined for Booster type {}'
.format(self.booster)) .format(self.booster))
b = self.get_booster() b = self.get_booster()
score = b.get_score(importance_type=self.importance_type) score = b.get_score(importance_type=self.importance_type)
@ -703,11 +703,13 @@ class XGBModel(XGBModelBase):
------- -------
coef_ : array of shape ``[n_features]`` or ``[n_classes, n_features]`` coef_ : array of shape ``[n_features]`` or ``[n_classes, n_features]``
""" """
if getattr(self, 'booster', None) is not None and self.booster != 'gblinear': if self.get_params()['booster'] != 'gblinear':
raise AttributeError('Coefficients are not defined for Booster type {}' raise AttributeError(
'Coefficients are not defined for Booster type {}'
.format(self.booster)) .format(self.booster))
b = self.get_booster() b = self.get_booster()
coef = np.array(json.loads(b.get_dump(dump_format='json')[0])['weight']) coef = np.array(json.loads(
b.get_dump(dump_format='json')[0])['weight'])
# Logic for multiclass classification # Logic for multiclass classification
n_classes = getattr(self, 'n_classes_', None) n_classes = getattr(self, 'n_classes_', None)
if n_classes is not None: if n_classes is not None:
@ -732,8 +734,9 @@ class XGBModel(XGBModelBase):
------- -------
intercept_ : array of shape ``(1,)`` or ``[n_classes]`` intercept_ : array of shape ``(1,)`` or ``[n_classes]``
""" """
if getattr(self, 'booster', None) is not None and self.booster != 'gblinear': if self.get_params()['booster'] != 'gblinear':
raise AttributeError('Intercept (bias) is not defined for Booster type {}' raise AttributeError(
'Intercept (bias) is not defined for Booster type {}'
.format(self.booster)) .format(self.booster))
b = self.get_booster() b = self.get_booster()
return np.array(json.loads(b.get_dump(dump_format='json')[0])['bias']) return np.array(json.loads(b.get_dump(dump_format='json')[0])['bias'])

View File

@ -193,6 +193,19 @@ def test_feature_importances_gain():
np.testing.assert_almost_equal(xgb_model.feature_importances_, exp) np.testing.assert_almost_equal(xgb_model.feature_importances_, exp)
def test_select_feature():
from sklearn.datasets import load_digits
from sklearn.feature_selection import SelectFromModel
digits = load_digits(2)
y = digits['target']
X = digits['data']
cls = xgb.XGBClassifier()
cls.fit(X, y)
selector = SelectFromModel(cls, prefit=True, max_features=1)
X_selected = selector.transform(X)
assert X_selected.shape[1] == 1
def test_num_parallel_tree(): def test_num_parallel_tree():
from sklearn.datasets import load_boston from sklearn.datasets import load_boston
reg = xgb.XGBRegressor(n_estimators=4, num_parallel_tree=4, reg = xgb.XGBRegressor(n_estimators=4, num_parallel_tree=4,