diff --git a/doc/python/python_intro.rst b/doc/python/python_intro.rst index f9c50da91..4255cba4e 100644 --- a/doc/python/python_intro.rst +++ b/doc/python/python_intro.rst @@ -161,6 +161,10 @@ A saved model can be loaded as follows: bst = xgb.Booster({'nthread': 4}) # init model bst.load_model('model.bin') # load data +Methods including `update` and `boost` from `xgboost.Booster` are designed for +internal usage only. The wrapper function `xgboost.train` does some +pre-configuration including setting up caches and some other parameters. + Early Stopping -------------- If you have a validation set, you can use early stopping to find the optimal number of boosting rounds. @@ -215,4 +219,3 @@ When you use ``IPython``, you can use the :py:meth:`xgboost.to_graphviz` functio .. code-block:: python xgb.to_graphviz(bst, num_trees=2) - diff --git a/python-package/xgboost/core.py b/python-package/xgboost/core.py index 3bb7299ea..4693cc419 100644 --- a/python-package/xgboost/core.py +++ b/python-package/xgboost/core.py @@ -1041,8 +1041,8 @@ class Booster(object): _check_call(_LIB.XGBoosterSetParam(self.handle, c_str(key), c_str(str(val)))) def update(self, dtrain, iteration, fobj=None): - """ - Update for one iteration, with objective function calculated internally. + """Update for one iteration, with objective function calculated + internally. This function should not be called directly by users. Parameters ---------- @@ -1052,6 +1052,7 @@ class Booster(object): Current iteration number. fobj : function Customized objective function. + """ if not isinstance(dtrain, DMatrix): raise TypeError('invalid training matrix: {}'.format(type(dtrain).__name__)) @@ -1066,8 +1067,9 @@ class Booster(object): self.boost(dtrain, grad, hess) def boost(self, dtrain, grad, hess): - """ - Boost the booster for one iteration, with customized gradient statistics. + """Boost the booster for one iteration, with customized gradient + statistics. Like :func:`xgboost.core.Booster.update`, this + function should not be called directly by users. Parameters ---------- @@ -1077,6 +1079,7 @@ class Booster(object): The first order of gradient. hess : list The second order of gradient. + """ if len(grad) != len(hess): raise ValueError('grad / hess length mismatch: {} / {}'.format(len(grad), len(hess))) diff --git a/src/learner.cc b/src/learner.cc index 468beee30..16f0b73a5 100644 --- a/src/learner.cc +++ b/src/learner.cc @@ -703,6 +703,8 @@ class LearnerImpl : public Learner { if (num_feature > mparam_.num_feature) { mparam_.num_feature = num_feature; } + CHECK_NE(mparam_.num_feature, 0) + << "0 feature is supplied. Are you using raw Booster?"; // setup cfg_["num_feature"] = common::ToString(mparam_.num_feature); CHECK(obj_ == nullptr && gbm_ == nullptr);