diff --git a/src/tree/split_evaluator.cc b/src/tree/split_evaluator.cc index 9254757e3..c48415ec8 100644 --- a/src/tree/split_evaluator.cc +++ b/src/tree/split_evaluator.cc @@ -499,8 +499,12 @@ class InteractionConstraint final : public SplitEvaluator { // permissible in a given node; returns false otherwise inline bool CheckInteractionConstraint(bst_uint featureid, bst_uint nodeid) const { // short-circuit if no constraint is specified - return (params_.interaction_constraints.empty() - || int_cont_[nodeid].count(featureid) > 0); + if (params_.interaction_constraints.empty()) { + return true; + } + CHECK_LT(nodeid, int_cont_.size()) << "Invariant violated: nodeid = " + << nodeid << ", int_cont_.size() = " << int_cont_.size(); + return (int_cont_[nodeid].count(featureid) > 0); } }; diff --git a/tests/python/test_interaction_constraints.py b/tests/python/test_interaction_constraints.py index c46cb2b7f..7cd50bf8b 100644 --- a/tests/python/test_interaction_constraints.py +++ b/tests/python/test_interaction_constraints.py @@ -2,6 +2,7 @@ import numpy as np import xgboost import unittest +from sklearn.metrics import accuracy_score dpath = 'demo/data/' rng = np.random.RandomState(1994) @@ -19,7 +20,7 @@ class TestInteractionConstraints(unittest.TestCase): dtrain = xgboost.DMatrix(X, label=y) params = {'max_depth': 3, 'eta': 0.1, 'nthread': 2, 'verbosity': 0, - 'interaction_constraints': '[[0, 1]]'} + 'interaction_constraints': '[[0, 1]]', 'tree_method': 'hist'} num_boost_round = 100 # Fit a model that only allows interaction between x1 and x2 bst = xgboost.train(params, dtrain, num_boost_round, evals=[(dtrain, 'train')]) @@ -38,3 +39,20 @@ class TestInteractionConstraints(unittest.TestCase): assert np.all(np.abs(diff1 - diff1[0]) < 1e-4) diff2 = preds[2] - preds[1] assert np.all(np.abs(diff2 - diff2[0]) < 1e-4) + + def test_training_accuracy(self): + dtrain = xgboost.DMatrix(dpath + 'agaricus.txt.train?indexing_mode=1') + dtest = xgboost.DMatrix(dpath + 'agaricus.txt.test?indexing_mode=1') + params = {'eta': 1, 'max_depth': 6, 'objective': 'binary:logistic', + 'tree_method': 'hist', 'interaction_constraints': '[[1,2],[2,3,4]]'} + num_boost_round = 5 + + params['grow_policy'] = 'lossguide' + bst = xgboost.train(params, dtrain, num_boost_round) + pred_dtest = (bst.predict(dtest) < 0.5) + assert accuracy_score(dtest.get_label(), pred_dtest) < 0.1 + + params['grow_policy'] = 'depthwise' + bst = xgboost.train(params, dtrain, num_boost_round) + pred_dtest = (bst.predict(dtest) < 0.5) + assert accuracy_score(dtest.get_label(), pred_dtest) < 0.1 diff --git a/tests/python/test_monotone_constraints.py b/tests/python/test_monotone_constraints.py index aa2b0c9a4..22df36f63 100644 --- a/tests/python/test_monotone_constraints.py +++ b/tests/python/test_monotone_constraints.py @@ -1,7 +1,9 @@ import numpy as np import xgboost as xgb import unittest +from sklearn.metrics import accuracy_score +dpath = 'demo/data/' def is_increasing(y): return np.count_nonzero(np.diff(y) < 0.0) == 0 @@ -97,3 +99,20 @@ class TestMonotoneConstraints(unittest.TestCase): ) assert is_correctly_constrained(constrained_hist_method) + + def test_training_accuracy(self): + dtrain = xgb.DMatrix(dpath + 'agaricus.txt.train?indexing_mode=1') + dtest = xgb.DMatrix(dpath + 'agaricus.txt.test?indexing_mode=1') + params = {'eta': 1, 'max_depth': 6, 'objective': 'binary:logistic', + 'tree_method': 'hist', 'monotone_constraints': '(1, 0)'} + num_boost_round = 5 + + params['grow_policy'] = 'lossguide' + bst = xgb.train(params, dtrain, num_boost_round) + pred_dtest = (bst.predict(dtest) < 0.5) + assert accuracy_score(dtest.get_label(), pred_dtest) < 0.1 + + params['grow_policy'] = 'depthwise' + bst = xgb.train(params, dtrain, num_boost_round) + pred_dtest = (bst.predict(dtest) < 0.5) + assert accuracy_score(dtest.get_label(), pred_dtest) < 0.1