add feature_importances_ property for XGBClassifier

This commit is contained in:
Pavel Gladkov 2016-02-08 23:15:18 +03:00
parent 72961d914b
commit 31c0408cb4

View File

@ -326,6 +326,8 @@ class XGBClassifier(XGBModel, XGBClassifierBase):
else:
evals = ()
self._features_count = X.shape[1]
self._le = LabelEncoder().fit(y)
training_labels = self._le.transform(y)
@ -414,6 +416,22 @@ class XGBClassifier(XGBModel, XGBClassifierBase):
return evals_result
@property
def feature_importances_(self):
"""
Returns
-------
feature_importances_ : array of shape = [n_features]
"""
fs = self.booster().get_fscore()
keys = [int(k.replace('f', '')) for k in fs.keys()]
fs_dict = dict(zip(keys, fs.values()))
all_features_dict = dict.fromkeys(range(0, self._features_count), 0)
all_features_dict.update(fs_dict)
return np.array(all_features_dict.values())
class XGBRegressor(XGBModel, XGBRegressorBase):
# pylint: disable=missing-docstring
__doc__ = """Implementation of the scikit-learn API for XGBoost regression.