From 19b59938b7bf5bcc29b200268a5dd44e470b0705 Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Tue, 15 Aug 2023 02:27:58 +0800 Subject: [PATCH] Convert input to str for hypothesis note. (#9480) --- tests/python-gpu/test_gpu_updaters.py | 8 ++++---- tests/python/test_updaters.py | 12 ++++++------ .../test_gpu_with_dask/test_gpu_with_dask.py | 2 +- .../test_with_dask/test_with_dask.py | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/python-gpu/test_gpu_updaters.py b/tests/python-gpu/test_gpu_updaters.py index 653a99f3a..587210cf2 100644 --- a/tests/python-gpu/test_gpu_updaters.py +++ b/tests/python-gpu/test_gpu_updaters.py @@ -36,7 +36,7 @@ class TestGPUUpdatersMulti: param["tree_method"] = "gpu_hist" param = dataset.set_params(param) result = train_result(param, dataset.get_dmat(), num_rounds) - note(result) + note(str(result)) assert tm.non_increasing(result["train"][dataset.metric]) @@ -90,12 +90,12 @@ class TestGPUUpdaters: def test_sparse(self, dataset): param = {"tree_method": "hist", "max_bin": 64} hist_result = train_result(param, dataset.get_dmat(), 16) - note(hist_result) + note(str(hist_result)) assert tm.non_increasing(hist_result["train"][dataset.metric]) param = {"tree_method": "gpu_hist", "max_bin": 64} gpu_hist_result = train_result(param, dataset.get_dmat(), 16) - note(gpu_hist_result) + note(str(gpu_hist_result)) assert tm.non_increasing(gpu_hist_result["train"][dataset.metric]) np.testing.assert_allclose( @@ -221,7 +221,7 @@ class TestGPUUpdaters: dataset.get_device_dmat(max_bin=param.get("max_bin", None)), num_rounds, ) - note(result) + note(str(result)) assert tm.non_increasing(result["train"][dataset.metric], tolerance=1e-3) @given( diff --git a/tests/python/test_updaters.py b/tests/python/test_updaters.py index 3fa32660d..c4c0de032 100644 --- a/tests/python/test_updaters.py +++ b/tests/python/test_updaters.py @@ -58,7 +58,7 @@ class TestTreeMethodMulti: param.update(hist_param) param.update(cache_param) result = train_result(param, dataset.get_dmat(), num_rounds) - note(result) + note(str(result)) assert tm.non_increasing(result["train"][dataset.metric]) @given( @@ -84,7 +84,7 @@ class TestTreeMethodMulti: param.update(hist_param) param.update(cache_param) result = train_result(param, dataset.get_dmat(), num_rounds) - note(result) + note(str(result)) assert tm.non_increasing(result["train"][dataset.metric]) @@ -125,7 +125,7 @@ class TestTreeMethod: param.update(hist_param) param.update(cache_param) result = train_result(param, dataset.get_dmat(), num_rounds) - note(result) + note(str(result)) assert tm.non_increasing(result["train"][dataset.metric]) @pytest.mark.skipif(**tm.no_sklearn()) @@ -172,7 +172,7 @@ class TestTreeMethod: param.update(hist_param) param.update(cache_param) result = train_result(param, dataset.get_dmat(), num_rounds) - note(result) + note(str(result)) assert tm.non_increasing(result["train"][dataset.metric]) def test_hist_categorical(self): @@ -224,12 +224,12 @@ class TestTreeMethod: def test_sparse(self, dataset): param = {"tree_method": "hist", "max_bin": 64} hist_result = train_result(param, dataset.get_dmat(), 16) - note(hist_result) + note(str(hist_result)) assert tm.non_increasing(hist_result['train'][dataset.metric]) param = {"tree_method": "approx", "max_bin": 64} approx_result = train_result(param, dataset.get_dmat(), 16) - note(approx_result) + note(str(approx_result)) assert tm.non_increasing(approx_result['train'][dataset.metric]) np.testing.assert_allclose( diff --git a/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py b/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py index 4cc934579..893582ee1 100644 --- a/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py +++ b/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py @@ -187,7 +187,7 @@ def run_gpu_hist( num_boost_round=num_rounds, evals=[(m, "train")], )["history"]["train"][dataset.metric] - note(history) + note(str(history)) # See note on `ObjFunction::UpdateTreeLeaf`. update_leaf = dataset.name.endswith("-l1") diff --git a/tests/test_distributed/test_with_dask/test_with_dask.py b/tests/test_distributed/test_with_dask/test_with_dask.py index 5630e5f3e..664c0b89c 100644 --- a/tests/test_distributed/test_with_dask/test_with_dask.py +++ b/tests/test_distributed/test_with_dask/test_with_dask.py @@ -1484,7 +1484,7 @@ class TestWithDask: num_boost_round=num_rounds, evals=[(m, "train")], )["history"] - note(history) + note(str(history)) history = history["train"][dataset.metric] def is_stump():