Specify src path for isort. (#8867)
This commit is contained in:
parent
4d665b3fb0
commit
6a892ce281
@ -8,9 +8,9 @@ import os
|
||||
|
||||
import dask.dataframe as dd
|
||||
from dask.distributed import Client, LocalCluster
|
||||
from xgboost.dask import DaskDMatrix
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost.dask import DaskDMatrix
|
||||
|
||||
|
||||
def main(client):
|
||||
|
||||
@ -5,9 +5,9 @@ Example of training with Dask on CPU
|
||||
"""
|
||||
from dask import array as da
|
||||
from dask.distributed import Client, LocalCluster
|
||||
from xgboost.dask import DaskDMatrix
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost.dask import DaskDMatrix
|
||||
|
||||
|
||||
def main(client):
|
||||
|
||||
@ -6,9 +6,9 @@ import numpy as np
|
||||
from dask.distributed import Client, LocalCluster
|
||||
from dask_ml.datasets import make_regression
|
||||
from dask_ml.model_selection import train_test_split
|
||||
from xgboost.dask import DaskDMatrix
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost.dask import DaskDMatrix
|
||||
|
||||
|
||||
def probability_for_going_backward(epoch):
|
||||
|
||||
@ -7,10 +7,10 @@ from dask import array as da
|
||||
from dask import dataframe as dd
|
||||
from dask.distributed import Client
|
||||
from dask_cuda import LocalCUDACluster
|
||||
from xgboost.dask import DaskDMatrix
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import dask as dxgb
|
||||
from xgboost.dask import DaskDMatrix
|
||||
|
||||
|
||||
def using_dask_matrix(client: Client, X, y):
|
||||
|
||||
@ -10,6 +10,7 @@ from pyspark.ml.linalg import Vectors
|
||||
from pyspark.sql import SparkSession
|
||||
from pyspark.sql.functions import rand
|
||||
from sklearn.model_selection import train_test_split
|
||||
|
||||
from xgboost.spark import SparkXGBClassifier, SparkXGBRegressor
|
||||
|
||||
spark = SparkSession.builder.master("local[*]").getOrCreate()
|
||||
|
||||
@ -4,7 +4,6 @@ Example of training controller with NVFlare
|
||||
"""
|
||||
import multiprocessing
|
||||
|
||||
import xgboost.federated
|
||||
from nvflare.apis.client import Client
|
||||
from nvflare.apis.fl_context import FLContext
|
||||
from nvflare.apis.impl.controller import Controller, Task
|
||||
@ -12,6 +11,8 @@ from nvflare.apis.shareable import Shareable
|
||||
from nvflare.apis.signal import Signal
|
||||
from trainer import SupportedTasks
|
||||
|
||||
import xgboost.federated
|
||||
|
||||
|
||||
class XGBoostController(Controller):
|
||||
def __init__(self, port: int, world_size: int, server_key_path: str,
|
||||
|
||||
@ -34,12 +34,12 @@ from pyspark.sql.types import (
|
||||
ShortType,
|
||||
)
|
||||
from scipy.special import expit, softmax # pylint: disable=no-name-in-module
|
||||
from xgboost.compat import is_cudf_available
|
||||
from xgboost.core import Booster
|
||||
from xgboost.training import train as worker_train
|
||||
|
||||
import xgboost
|
||||
from xgboost import XGBClassifier, XGBRanker, XGBRegressor
|
||||
from xgboost.compat import is_cudf_available
|
||||
from xgboost.core import Booster
|
||||
from xgboost.training import train as worker_train
|
||||
|
||||
from .data import (
|
||||
_read_csr_matrix_from_unwrapped_spark_vec,
|
||||
|
||||
@ -6,9 +6,9 @@ from typing import Any, Callable, Dict, Iterator, List, Optional, Sequence, Tupl
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from scipy.sparse import csr_matrix
|
||||
from xgboost.compat import concat
|
||||
|
||||
from xgboost import DataIter, DMatrix, QuantileDMatrix, XGBModel
|
||||
from xgboost.compat import concat
|
||||
|
||||
from .._typing import ArrayLike
|
||||
from ..core import _convert_ntree_limit
|
||||
|
||||
@ -8,6 +8,7 @@ import uuid
|
||||
from pyspark import SparkFiles, cloudpickle
|
||||
from pyspark.ml.util import DefaultParamsReader, DefaultParamsWriter, MLReader, MLWriter
|
||||
from pyspark.sql import SparkSession
|
||||
|
||||
from xgboost.core import Booster
|
||||
|
||||
from .utils import get_class_name, get_logger
|
||||
|
||||
@ -8,9 +8,9 @@ from typing import Any, Callable, Dict, Set, Type
|
||||
import pyspark
|
||||
from pyspark import BarrierTaskContext, SparkContext
|
||||
from pyspark.sql.session import SparkSession
|
||||
from xgboost.tracker import RabitTracker
|
||||
|
||||
from xgboost import collective
|
||||
from xgboost.tracker import RabitTracker
|
||||
|
||||
|
||||
def get_class_name(cls: Type) -> str:
|
||||
|
||||
@ -33,10 +33,10 @@ from urllib import request
|
||||
import numpy as np
|
||||
import pytest
|
||||
from scipy import sparse
|
||||
from xgboost.core import ArrayLike
|
||||
from xgboost.sklearn import SklObjective
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost.core import ArrayLike
|
||||
from xgboost.sklearn import SklObjective
|
||||
|
||||
hypothesis = pytest.importorskip("hypothesis")
|
||||
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
import numpy as np
|
||||
from dask import array as da
|
||||
from distributed import Client
|
||||
from xgboost.testing.updater import get_basescore
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost.testing.updater import get_basescore
|
||||
|
||||
|
||||
def check_init_estimation_clf(tree_method: str, client: Client) -> None:
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
from typing import Any, Generator, Tuple, Union
|
||||
|
||||
import numpy as np
|
||||
|
||||
from xgboost.data import pandas_pyarrow_mapper
|
||||
|
||||
|
||||
|
||||
@ -8,9 +8,9 @@ import tempfile
|
||||
from typing import Any, Callable, Dict, Type
|
||||
|
||||
import numpy as np
|
||||
from xgboost._typing import ArrayLike
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost._typing import ArrayLike
|
||||
|
||||
|
||||
def validate_leaf_output(leaf: np.ndarray, num_parallel_tree: int) -> None:
|
||||
|
||||
@ -4,9 +4,9 @@ from functools import partial, update_wrapper
|
||||
from typing import Dict
|
||||
|
||||
import numpy as np
|
||||
import xgboost.testing as tm
|
||||
|
||||
import xgboost as xgb
|
||||
import xgboost.testing as tm
|
||||
|
||||
|
||||
def get_basescore(model: xgb.XGBModel) -> float:
|
||||
@ -78,6 +78,7 @@ def check_quantile_loss(tree_method: str, weighted: bool) -> None:
|
||||
"""Test for quantile loss."""
|
||||
from sklearn.datasets import make_regression
|
||||
from sklearn.metrics import mean_pinball_loss
|
||||
|
||||
from xgboost.sklearn import _metric_decorator
|
||||
|
||||
n_samples = 4096
|
||||
|
||||
@ -3,12 +3,15 @@ import os
|
||||
import subprocess
|
||||
import sys
|
||||
from multiprocessing import Pool, cpu_count
|
||||
from typing import Dict, Tuple
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
from pylint import epylint
|
||||
from test_utils import PY_PACKAGE, ROOT, cd, print_time, record_time
|
||||
|
||||
CURDIR = os.path.normpath(os.path.abspath(os.path.dirname(__file__)))
|
||||
SRCPATH = os.path.normpath(
|
||||
os.path.join(CURDIR, os.path.pardir, os.path.pardir, "python-package")
|
||||
)
|
||||
|
||||
|
||||
@record_time
|
||||
@ -29,7 +32,7 @@ Please run the following command on your machine to address the formatting error
|
||||
|
||||
@record_time
|
||||
def run_isort(rel_path: str) -> bool:
|
||||
cmd = ["isort", "--check", "--profile=black", rel_path]
|
||||
cmd = ["isort", f"--src={SRCPATH}", "--check", "--profile=black", rel_path]
|
||||
ret = subprocess.run(cmd).returncode
|
||||
if ret != 0:
|
||||
subprocess.run(["isort", "--version"])
|
||||
|
||||
@ -2,6 +2,7 @@ import sys
|
||||
|
||||
import pytest
|
||||
from hypothesis import given, settings, strategies
|
||||
|
||||
from xgboost.testing import no_cupy
|
||||
|
||||
sys.path.append("tests/python")
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from xgboost.testing.metrics import check_quantile_error
|
||||
|
||||
import xgboost
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.metrics import check_quantile_error
|
||||
|
||||
sys.path.append("tests/python")
|
||||
import test_eval_metrics as test_em # noqa
|
||||
|
||||
@ -3,10 +3,10 @@ import sys
|
||||
import numpy as np
|
||||
import pytest
|
||||
from hypothesis import assume, given, settings, strategies
|
||||
from xgboost.compat import PANDAS_INSTALLED
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.compat import PANDAS_INSTALLED
|
||||
|
||||
if PANDAS_INSTALLED:
|
||||
from hypothesis.extra.pandas import column, data_frames, range_indexes
|
||||
|
||||
@ -4,11 +4,11 @@ from typing import Any, Dict
|
||||
import numpy as np
|
||||
import pytest
|
||||
from hypothesis import assume, given, note, settings, strategies
|
||||
from xgboost.testing.params import cat_parameter_strategy, hist_parameter_strategy
|
||||
from xgboost.testing.updater import check_init_estimation, check_quantile_loss
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.params import cat_parameter_strategy, hist_parameter_strategy
|
||||
from xgboost.testing.updater import check_init_estimation, check_quantile_loss
|
||||
|
||||
sys.path.append("tests/python")
|
||||
import test_updaters as test_up
|
||||
|
||||
@ -4,11 +4,11 @@ import numpy as np
|
||||
import pytest
|
||||
from hypothesis import given, settings, strategies
|
||||
from scipy.sparse import csr_matrix
|
||||
from xgboost.data import SingleBatchInternalIter as SingleBatch
|
||||
from xgboost.testing import IteratorForTest, make_batches, non_increasing
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.data import SingleBatchInternalIter as SingleBatch
|
||||
from xgboost.testing import IteratorForTest, make_batches, non_increasing
|
||||
|
||||
pytestmark = tm.timeout(30)
|
||||
|
||||
|
||||
@ -6,10 +6,10 @@ import pytest
|
||||
import scipy.sparse
|
||||
from hypothesis import given, settings, strategies
|
||||
from scipy.sparse import csr_matrix, rand
|
||||
from xgboost.testing.data import np_dtypes
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.data import np_dtypes
|
||||
|
||||
rng = np.random.RandomState(1)
|
||||
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import numpy as np
|
||||
import pytest
|
||||
from xgboost.testing.updater import get_basescore
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.updater import get_basescore
|
||||
|
||||
rng = np.random.RandomState(1994)
|
||||
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import numpy as np
|
||||
import pytest
|
||||
from xgboost.testing.metrics import check_quantile_error
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.metrics import check_quantile_error
|
||||
|
||||
rng = np.random.RandomState(1337)
|
||||
|
||||
|
||||
@ -5,11 +5,11 @@ import numpy as np
|
||||
import pandas as pd
|
||||
import pytest
|
||||
from scipy import sparse
|
||||
from xgboost.testing.data import np_dtypes, pd_dtypes
|
||||
from xgboost.testing.shared import validate_leaf_output
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.data import np_dtypes, pd_dtypes
|
||||
from xgboost.testing.shared import validate_leaf_output
|
||||
|
||||
|
||||
def run_threaded_predict(X, rows, predict_func):
|
||||
|
||||
@ -4,6 +4,8 @@ import numpy as np
|
||||
import pytest
|
||||
from hypothesis import given, settings, strategies
|
||||
from scipy import sparse
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost.testing import (
|
||||
IteratorForTest,
|
||||
make_batches,
|
||||
@ -15,8 +17,6 @@ from xgboost.testing import (
|
||||
)
|
||||
from xgboost.testing.data import np_dtypes
|
||||
|
||||
import xgboost as xgb
|
||||
|
||||
|
||||
class TestQuantileDMatrix:
|
||||
def test_basic(self) -> None:
|
||||
|
||||
@ -5,6 +5,9 @@ from typing import Any, Dict, List
|
||||
import numpy as np
|
||||
import pytest
|
||||
from hypothesis import given, note, settings, strategies
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.params import (
|
||||
cat_parameter_strategy,
|
||||
exact_parameter_strategy,
|
||||
@ -12,9 +15,6 @@ from xgboost.testing.params import (
|
||||
)
|
||||
from xgboost.testing.updater import check_init_estimation, check_quantile_loss
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
|
||||
|
||||
def train_result(param, dmat, num_rounds):
|
||||
result = {}
|
||||
|
||||
@ -3,10 +3,10 @@ from typing import Type
|
||||
import numpy as np
|
||||
import pytest
|
||||
from test_dmatrix import set_base_margin_info
|
||||
from xgboost.testing.data import pd_arrow_dtypes, pd_dtypes
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.data import pd_arrow_dtypes, pd_dtypes
|
||||
|
||||
try:
|
||||
import pandas as pd
|
||||
|
||||
@ -8,11 +8,11 @@ from typing import Callable, Optional
|
||||
import numpy as np
|
||||
import pytest
|
||||
from sklearn.utils.estimator_checks import parametrize_with_checks
|
||||
from xgboost.testing.shared import get_feature_weights, validate_data_initialization
|
||||
from xgboost.testing.updater import get_basescore
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.shared import get_feature_weights, validate_data_initialization
|
||||
from xgboost.testing.updater import get_basescore
|
||||
|
||||
rng = np.random.RandomState(1994)
|
||||
pytestmark = [pytest.mark.skipif(**tm.no_sklearn()), tm.timeout(30)]
|
||||
|
||||
@ -3,9 +3,8 @@ import multiprocessing
|
||||
import sys
|
||||
import time
|
||||
|
||||
import xgboost.federated
|
||||
|
||||
import xgboost as xgb
|
||||
import xgboost.federated
|
||||
|
||||
SERVER_KEY = 'server-key.pem'
|
||||
SERVER_CERT = 'server-cert.pem'
|
||||
|
||||
@ -10,10 +10,10 @@ import numpy as np
|
||||
import pytest
|
||||
from hypothesis import given, note, settings, strategies
|
||||
from hypothesis._settings import duration
|
||||
from xgboost.testing.params import hist_parameter_strategy
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.testing.params import hist_parameter_strategy
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(**tm.no_dask()),
|
||||
@ -42,9 +42,9 @@ try:
|
||||
from dask import array as da
|
||||
from dask.distributed import Client
|
||||
from dask_cuda import LocalCUDACluster
|
||||
from xgboost.testing.dask import check_init_estimation
|
||||
|
||||
from xgboost import dask as dxgb
|
||||
from xgboost.testing.dask import check_init_estimation
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
@ -12,6 +12,7 @@ pytestmark = pytest.mark.skipif(**tm.no_spark())
|
||||
from pyspark.ml.linalg import Vectors
|
||||
from pyspark.ml.tuning import CrossValidator, ParamGridBuilder
|
||||
from pyspark.sql import SparkSession
|
||||
|
||||
from xgboost.spark import SparkXGBClassifier, SparkXGBRegressor
|
||||
|
||||
gpu_discovery_script_path = "tests/test_distributed/test_gpu_with_spark/discover_gpu.sh"
|
||||
|
||||
@ -21,6 +21,9 @@ import scipy
|
||||
import sklearn
|
||||
from hypothesis import HealthCheck, given, note, settings
|
||||
from sklearn.datasets import make_classification, make_regression
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.data import _is_cudf_df
|
||||
from xgboost.testing.params import hist_parameter_strategy
|
||||
from xgboost.testing.shared import (
|
||||
@ -29,9 +32,6 @@ from xgboost.testing.shared import (
|
||||
validate_leaf_output,
|
||||
)
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
|
||||
pytestmark = [tm.timeout(60), pytest.mark.skipif(**tm.no_dask())]
|
||||
|
||||
import dask
|
||||
@ -39,6 +39,7 @@ import dask.array as da
|
||||
import dask.dataframe as dd
|
||||
from distributed import Client, LocalCluster
|
||||
from toolz import sliding_window # dependency of dask
|
||||
|
||||
from xgboost.dask import DaskDMatrix
|
||||
from xgboost.testing.dask import check_init_estimation
|
||||
|
||||
|
||||
@ -8,6 +8,7 @@ from xgboost import testing as tm
|
||||
|
||||
pytestmark = [pytest.mark.skipif(**tm.no_spark())]
|
||||
|
||||
from xgboost import DMatrix, QuantileDMatrix
|
||||
from xgboost.spark.data import (
|
||||
_read_csr_matrix_from_unwrapped_spark_vec,
|
||||
alias,
|
||||
@ -15,8 +16,6 @@ from xgboost.spark.data import (
|
||||
stack_series,
|
||||
)
|
||||
|
||||
from xgboost import DMatrix, QuantileDMatrix
|
||||
|
||||
|
||||
def test_stack() -> None:
|
||||
a = pd.DataFrame({"a": [[1, 2], [3, 4]]})
|
||||
|
||||
@ -8,10 +8,10 @@ from typing import Generator, Sequence, Type
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
from xgboost.spark.data import pred_contribs
|
||||
|
||||
import xgboost as xgb
|
||||
from xgboost import testing as tm
|
||||
from xgboost.spark.data import pred_contribs
|
||||
|
||||
pytestmark = [tm.timeout(60), pytest.mark.skipif(**tm.no_spark())]
|
||||
|
||||
@ -23,6 +23,8 @@ from pyspark.ml.linalg import Vectors
|
||||
from pyspark.ml.tuning import CrossValidator, ParamGridBuilder
|
||||
from pyspark.sql import SparkSession
|
||||
from pyspark.sql import functions as spark_sql_func
|
||||
|
||||
from xgboost import XGBClassifier, XGBModel, XGBRegressor
|
||||
from xgboost.spark import (
|
||||
SparkXGBClassifier,
|
||||
SparkXGBClassifierModel,
|
||||
@ -32,8 +34,6 @@ from xgboost.spark import (
|
||||
)
|
||||
from xgboost.spark.core import _non_booster_params
|
||||
|
||||
from xgboost import XGBClassifier, XGBModel, XGBRegressor
|
||||
|
||||
from .utils import SparkTestCase
|
||||
|
||||
logging.getLogger("py4j").setLevel(logging.INFO)
|
||||
|
||||
@ -11,6 +11,7 @@ from xgboost import testing as tm
|
||||
pytestmark = pytest.mark.skipif(**tm.no_spark())
|
||||
|
||||
from pyspark.ml.linalg import Vectors
|
||||
|
||||
from xgboost.spark import SparkXGBClassifier, SparkXGBRegressor
|
||||
from xgboost.spark.utils import _get_max_num_concurrent_tasks
|
||||
|
||||
|
||||
@ -13,6 +13,7 @@ from xgboost import testing as tm
|
||||
pytestmark = [pytest.mark.skipif(**tm.no_spark())]
|
||||
|
||||
from pyspark.sql import SparkSession
|
||||
|
||||
from xgboost.spark.utils import _get_default_params_from_func
|
||||
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user