From 185e3f19169d79b0910b848de5fe8055434e675e Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Wed, 16 Oct 2019 05:54:09 -0400 Subject: [PATCH] Update GPU doc. (#4953) --- doc/gpu/index.rst | 15 +++++++++++---- python-package/xgboost/sklearn.py | 10 ++++++---- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/doc/gpu/index.rst b/doc/gpu/index.rst index 5c335e66a..1f8d2d9e2 100644 --- a/doc/gpu/index.rst +++ b/doc/gpu/index.rst @@ -46,6 +46,8 @@ Supported parameters +--------------------------------+--------------+ | ``max_bin`` | |tick| | +--------------------------------+--------------+ +| ``gamma`` | |tick| | ++--------------------------------+--------------+ | ``gpu_id`` | |tick| | +--------------------------------+--------------+ | ``n_gpus`` (deprecated) | |tick| | @@ -75,9 +77,13 @@ The GPU algorithms currently work with CLI, Python and R packages. See :doc:`/bu :caption: Python example param['gpu_id'] = 0 - param['max_bin'] = 16 param['tree_method'] = 'gpu_hist' +.. code-block:: python + :caption: With Scikit-Learn interface + + XGBRegressor(tree_method='gpu_hist', gpu_id=0) + Single Node Multi-GPU ===================== @@ -85,9 +91,10 @@ Single Node Multi-GPU Multi-node Multi-GPU Training ============================= -XGBoost supports fully distributed GPU training using `Dask -`_. See Python documentation :ref:`dask_api` and worked examples `here -`_. +XGBoost supports fully distributed GPU training using `Dask `_. For +getting started see our tutorial :doc:`/tutorials/dask` and worked examples `here +`_, also Python documentation +:ref:`dask_api` for complete reference. Objective functions diff --git a/python-package/xgboost/sklearn.py b/python-package/xgboost/sklearn.py index 74d52393e..66efb01fa 100644 --- a/python-package/xgboost/sklearn.py +++ b/python-package/xgboost/sklearn.py @@ -101,8 +101,8 @@ class XGBModel(XGBModelBase): .. note:: - Using gblinear booster with shotgun updater is - nondeterministic as it uses Hogwild algorithm. + Using gblinear booster with shotgun updater is nondeterministic as + it uses Hogwild algorithm. missing : float, optional Value in the data which needs to be present as a missing value. If @@ -960,8 +960,10 @@ class XGBRanker(XGBModel): random_state : int Random number seed. - .. note:: Using gblinear booster with shotgun updater is - nondeterministic as it uses Hogwild algorithm. + .. note:: + + Using gblinear booster with shotgun updater is nondeterministic as + it uses Hogwild algorithm. missing : float, optional Value in the data which needs to be present as a missing value. If