Restore functionality of max_depth=0 in hist (#7551)
* Restore functionality of max_depth=0 in hist * Add test case
This commit is contained in:
parent
2db808021d
commit
20c0d60ac7
@ -263,7 +263,7 @@ void QuantileHistMaker::Builder<GradientSumT>::ExpandTree(
|
|||||||
ApplySplit<any_missing>(nodes_for_apply_split, gmat, column_matrix, p_tree);
|
ApplySplit<any_missing>(nodes_for_apply_split, gmat, column_matrix, p_tree);
|
||||||
SplitSiblings(nodes_for_apply_split, &nodes_to_evaluate, p_tree);
|
SplitSiblings(nodes_for_apply_split, &nodes_to_evaluate, p_tree);
|
||||||
|
|
||||||
if (depth < param_.max_depth) {
|
if (param_.max_depth == 0 || depth < param_.max_depth) {
|
||||||
size_t i = 0;
|
size_t i = 0;
|
||||||
for (auto const &gidx : p_fmat->GetBatches<GHistIndexMatrix>(
|
for (auto const &gidx : p_fmat->GetBatches<GHistIndexMatrix>(
|
||||||
{GenericParameter::kCpuId, param_.max_bin})) {
|
{GenericParameter::kCpuId, param_.max_bin})) {
|
||||||
|
|||||||
@ -60,3 +60,18 @@ class TestTreeRegularization:
|
|||||||
# sum_hess = 1.0
|
# sum_hess = 1.0
|
||||||
# 0.7 = 0.5 - (sum_grad - alpha * sgn(sum_grad)) / (sum_hess + lambda)
|
# 0.7 = 0.5 - (sum_grad - alpha * sgn(sum_grad)) / (sum_hess + lambda)
|
||||||
assert_approx_equal(preds[0], 0.7)
|
assert_approx_equal(preds[0], 0.7)
|
||||||
|
|
||||||
|
def test_unlimited_depth(self):
|
||||||
|
x = np.array([[0], [1], [2], [3]])
|
||||||
|
y = np.array([0, 1, 2, 3])
|
||||||
|
|
||||||
|
model = xgb.XGBRegressor(
|
||||||
|
n_estimators=1,
|
||||||
|
eta=1,
|
||||||
|
tree_method="hist",
|
||||||
|
grow_policy="lossguide",
|
||||||
|
reg_lambda=0,
|
||||||
|
max_leaves=128,
|
||||||
|
max_depth=0,
|
||||||
|
).fit(x, y)
|
||||||
|
assert np.array_equal(model.predict(x), y)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user