Implement feature score in GBTree. (#7041)
* Categorical data support. * Eliminate text parsing during feature score computation.
This commit is contained in:
@@ -1098,5 +1098,47 @@ XGB_DLL int XGBoosterGetStrFeatureInfo(BoosterHandle handle, const char *field,
|
||||
API_END();
|
||||
}
|
||||
|
||||
XGB_DLL int XGBoosterFeatureScore(BoosterHandle handle,
|
||||
const char *json_config,
|
||||
xgboost::bst_ulong* out_length,
|
||||
const char ***out_features,
|
||||
float **out_scores) {
|
||||
API_BEGIN();
|
||||
CHECK_HANDLE();
|
||||
auto *learner = static_cast<Learner *>(handle);
|
||||
auto config = Json::Load(StringView{json_config});
|
||||
auto importance = get<String const>(config["importance_type"]);
|
||||
std::string feature_map_uri;
|
||||
if (!IsA<Null>(config["feature_map"])) {
|
||||
feature_map_uri = get<String const>(config["feature_map"]);
|
||||
}
|
||||
FeatureMap feature_map = LoadFeatureMap(feature_map_uri);
|
||||
|
||||
auto& scores = learner->GetThreadLocal().ret_vec_float;
|
||||
std::vector<bst_feature_t> features;
|
||||
learner->CalcFeatureScore(importance, &features, &scores);
|
||||
|
||||
auto n_features = learner->GetNumFeature();
|
||||
GenerateFeatureMap(learner, n_features, &feature_map);
|
||||
CHECK_LE(features.size(), n_features);
|
||||
|
||||
auto& feature_names = learner->GetThreadLocal().ret_vec_str;
|
||||
feature_names.resize(features.size());
|
||||
auto& feature_names_c = learner->GetThreadLocal().ret_vec_charp;
|
||||
feature_names_c.resize(features.size());
|
||||
|
||||
for (bst_feature_t i = 0; i < features.size(); ++i) {
|
||||
feature_names[i] = feature_map.Name(features[i]);
|
||||
feature_names_c[i] = feature_names[i].data();
|
||||
}
|
||||
|
||||
CHECK_EQ(scores.size(), features.size());
|
||||
CHECK_EQ(scores.size(), feature_names.size());
|
||||
*out_length = scores.size();
|
||||
*out_scores = scores.data();
|
||||
*out_features = dmlc::BeginPtr(feature_names_c);
|
||||
API_END();
|
||||
}
|
||||
|
||||
// force link rabit
|
||||
static DMLC_ATTRIBUTE_UNUSED int XGBOOST_LINK_RABIT_C_API_ = RabitLinkTag();
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include "xgboost/logging.h"
|
||||
#include "xgboost/json.h"
|
||||
@@ -181,5 +183,45 @@ class XGBoostAPIGuard {
|
||||
RestoreGPUAttribute();
|
||||
}
|
||||
};
|
||||
|
||||
inline FeatureMap LoadFeatureMap(std::string const& uri) {
|
||||
FeatureMap feat;
|
||||
if (uri.size() != 0) {
|
||||
std::unique_ptr<dmlc::Stream> fs(dmlc::Stream::Create(uri.c_str(), "r"));
|
||||
dmlc::istream is(fs.get());
|
||||
feat.LoadText(is);
|
||||
}
|
||||
return feat;
|
||||
}
|
||||
|
||||
// FIXME(jiamingy): Use this for model dump.
|
||||
inline void GenerateFeatureMap(Learner const *learner,
|
||||
size_t n_features, FeatureMap *out_feature_map) {
|
||||
auto &feature_map = *out_feature_map;
|
||||
auto maybe = [&](std::vector<std::string> const &values, size_t i,
|
||||
std::string const &dft) {
|
||||
return values.empty() ? dft : values[i];
|
||||
};
|
||||
if (feature_map.Size() == 0) {
|
||||
// Use the feature names and types from booster.
|
||||
std::vector<std::string> feature_names;
|
||||
learner->GetFeatureNames(&feature_names);
|
||||
if (!feature_names.empty()) {
|
||||
CHECK_EQ(feature_names.size(), n_features) << "Incorrect number of feature names.";
|
||||
}
|
||||
std::vector<std::string> feature_types;
|
||||
learner->GetFeatureTypes(&feature_types);
|
||||
if (!feature_types.empty()) {
|
||||
CHECK_EQ(feature_types.size(), n_features) << "Incorrect number of feature types.";
|
||||
}
|
||||
for (size_t i = 0; i < n_features; ++i) {
|
||||
feature_map.PushBack(
|
||||
i,
|
||||
maybe(feature_names, i, "f" + std::to_string(i)).data(),
|
||||
maybe(feature_types, i, "q").data());
|
||||
}
|
||||
}
|
||||
CHECK_EQ(feature_map.Size(), n_features);
|
||||
}
|
||||
} // namespace xgboost
|
||||
#endif // XGBOOST_C_API_C_API_UTILS_H_
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
#include <dmlc/omp.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
@@ -299,6 +300,58 @@ class GBTree : public GradientBooster {
|
||||
}
|
||||
}
|
||||
|
||||
void FeatureScore(std::string const &importance_type,
|
||||
std::vector<bst_feature_t> *features,
|
||||
std::vector<float> *scores) const override {
|
||||
// Because feature with no importance doesn't appear in the return value so
|
||||
// we need to set up another pair of vectors to store the values during
|
||||
// computation.
|
||||
std::vector<size_t> split_counts(this->model_.learner_model_param->num_feature, 0);
|
||||
std::vector<float> gain_map(this->model_.learner_model_param->num_feature, 0);
|
||||
auto add_score = [&](auto fn) {
|
||||
for (auto const &p_tree : model_.trees) {
|
||||
p_tree->WalkTree([&](bst_node_t nidx) {
|
||||
auto const &node = (*p_tree)[nidx];
|
||||
if (!node.IsLeaf()) {
|
||||
split_counts[node.SplitIndex()]++;
|
||||
fn(p_tree, nidx, node.SplitIndex());
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
if (importance_type == "weight") {
|
||||
add_score([&](auto const &p_tree, bst_node_t, bst_feature_t split) {
|
||||
gain_map[split] = split_counts[split];
|
||||
});
|
||||
}
|
||||
if (importance_type == "gain" || importance_type == "total_gain") {
|
||||
add_score([&](auto const &p_tree, bst_node_t nidx, bst_feature_t split) {
|
||||
gain_map[split] += p_tree->Stat(nidx).loss_chg;
|
||||
});
|
||||
}
|
||||
if (importance_type == "cover" || importance_type == "total_cover") {
|
||||
add_score([&](auto const &p_tree, bst_node_t nidx, bst_feature_t split) {
|
||||
gain_map[split] += p_tree->Stat(nidx).sum_hess;
|
||||
});
|
||||
}
|
||||
if (importance_type == "gain" || importance_type == "cover") {
|
||||
for (size_t i = 0; i < gain_map.size(); ++i) {
|
||||
gain_map[i] /= std::max(1.0f, static_cast<float>(split_counts[i]));
|
||||
}
|
||||
}
|
||||
|
||||
features->clear();
|
||||
scores->clear();
|
||||
for (size_t i = 0; i < split_counts.size(); ++i) {
|
||||
if (split_counts[i] != 0) {
|
||||
features->push_back(i);
|
||||
scores->push_back(gain_map[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void PredictInstance(const SparsePage::Inst& inst,
|
||||
std::vector<bst_float>* out_preds,
|
||||
uint32_t layer_begin, uint32_t layer_end) override {
|
||||
|
||||
@@ -1193,6 +1193,30 @@ class LearnerImpl : public LearnerIO {
|
||||
*out_preds = &out_predictions.predictions;
|
||||
}
|
||||
|
||||
void CalcFeatureScore(std::string const &importance_type,
|
||||
std::vector<bst_feature_t> *features,
|
||||
std::vector<float> *scores) override {
|
||||
this->Configure();
|
||||
std::vector<std::string> allowed_importance_type = {
|
||||
"weight", "total_gain", "total_cover", "gain", "cover"
|
||||
};
|
||||
if (std::find(allowed_importance_type.begin(),
|
||||
allowed_importance_type.end(),
|
||||
importance_type) == allowed_importance_type.end()) {
|
||||
std::stringstream ss;
|
||||
ss << "importance_type mismatch, got: " << importance_type
|
||||
<< "`, expected one of ";
|
||||
for (size_t i = 0; i < allowed_importance_type.size(); ++i) {
|
||||
ss << "`" << allowed_importance_type[i] << "`";
|
||||
if (i != allowed_importance_type.size() - 1) {
|
||||
ss << ", ";
|
||||
}
|
||||
}
|
||||
LOG(FATAL) << ss.str();
|
||||
}
|
||||
gbm_->FeatureScore(importance_type, features, scores);
|
||||
}
|
||||
|
||||
const std::map<std::string, std::string>& GetConfigurationArguments() const override {
|
||||
return cfg_;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user