From bdb291f1c216025d75b2155b29e2c5396195741c Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Thu, 11 Aug 2022 00:30:42 +0800 Subject: [PATCH] [doc] Clarification for feature importance. (#8151) --- python-package/xgboost/sklearn.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/python-package/xgboost/sklearn.py b/python-package/xgboost/sklearn.py index 4d7e5a624..98ad43af6 100644 --- a/python-package/xgboost/sklearn.py +++ b/python-package/xgboost/sklearn.py @@ -1259,8 +1259,12 @@ class XGBModel(XGBModelBase): @property def feature_importances_(self) -> np.ndarray: - """ - Feature importances property, return depends on `importance_type` parameter. + """Feature importances property, return depends on `importance_type` + parameter. When model trained with multi-class/multi-label/multi-target dataset, + the feature importance is "averaged" over all targets. The "average" is defined + based on the importance type. For instance, if the importance type is + "total_gain", then the score is sum of loss change for each split from all + trees. Returns -------