Fix mypy errors. (#8444)

This commit is contained in:
Jiaming Yuan 2022-11-09 13:19:11 +08:00 committed by GitHub
parent 0252d504d8
commit 9dd8d70f0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 34 additions and 25 deletions

View File

@ -135,7 +135,7 @@ class CallbackContainer:
def __init__( def __init__(
self, self,
callbacks: Sequence[TrainingCallback], callbacks: Sequence[TrainingCallback],
metric: Callable = None, metric: Optional[Callable] = None,
output_margin: bool = True, output_margin: bool = True,
is_cv: bool = False is_cv: bool = False
) -> None: ) -> None:
@ -391,8 +391,6 @@ class EarlyStopping(TrainingCallback):
else: else:
improve_op = minimize improve_op = minimize
assert improve_op
if not self.stopping_history: # First round if not self.stopping_history: # First round
self.current_rounds = 0 self.current_rounds = 0
self.stopping_history[name] = {} self.stopping_history[name] = {}

View File

@ -288,10 +288,10 @@ class DaskDMatrix:
*, *,
weight: Optional[_DaskCollection] = None, weight: Optional[_DaskCollection] = None,
base_margin: Optional[_DaskCollection] = None, base_margin: Optional[_DaskCollection] = None,
missing: float = None, missing: Optional[float] = None,
silent: bool = False, # pylint: disable=unused-argument silent: bool = False, # pylint: disable=unused-argument
feature_names: Optional[FeatureNames] = None, feature_names: Optional[FeatureNames] = None,
feature_types: FeatureTypes = None, feature_types: Optional[FeatureTypes] = None,
group: Optional[_DaskCollection] = None, group: Optional[_DaskCollection] = None,
qid: Optional[_DaskCollection] = None, qid: Optional[_DaskCollection] = None,
label_lower_bound: Optional[_DaskCollection] = None, label_lower_bound: Optional[_DaskCollection] = None,
@ -304,7 +304,7 @@ class DaskDMatrix:
self.feature_names = feature_names self.feature_names = feature_names
self.feature_types = feature_types self.feature_types = feature_types
self.missing = missing self.missing = missing if missing is not None else numpy.nan
self.enable_categorical = enable_categorical self.enable_categorical = enable_categorical
if qid is not None and weight is not None: if qid is not None and weight is not None:
@ -651,7 +651,7 @@ class DaskQuantileDMatrix(DaskDMatrix):
*, *,
weight: Optional[_DaskCollection] = None, weight: Optional[_DaskCollection] = None,
base_margin: Optional[_DaskCollection] = None, base_margin: Optional[_DaskCollection] = None,
missing: float = None, missing: Optional[float] = None,
silent: bool = False, # disable=unused-argument silent: bool = False, # disable=unused-argument
feature_names: Optional[FeatureNames] = None, feature_names: Optional[FeatureNames] = None,
feature_types: Optional[Union[Any, List[Any]]] = None, feature_types: Optional[Union[Any, List[Any]]] = None,
@ -2129,7 +2129,7 @@ class DaskXGBRanker(DaskScikitLearnBase, XGBRankerMixIn):
eval_group: Optional[Sequence[_DaskCollection]] = None, eval_group: Optional[Sequence[_DaskCollection]] = None,
eval_qid: Optional[Sequence[_DaskCollection]] = None, eval_qid: Optional[Sequence[_DaskCollection]] = None,
eval_metric: Optional[Union[str, Sequence[str], Callable]] = None, eval_metric: Optional[Union[str, Sequence[str], Callable]] = None,
early_stopping_rounds: int = None, early_stopping_rounds: Optional[int] = None,
verbose: Union[int, bool] = False, verbose: Union[int, bool] = False,
xgb_model: Optional[Union[XGBModel, Booster]] = None, xgb_model: Optional[Union[XGBModel, Booster]] = None,
sample_weight_eval_set: Optional[Sequence[_DaskCollection]] = None, sample_weight_eval_set: Optional[Sequence[_DaskCollection]] = None,

View File

@ -152,7 +152,7 @@ def version_number() -> int:
class RabitContext: class RabitContext:
"""A context controlling rabit initialization and finalization.""" """A context controlling rabit initialization and finalization."""
def __init__(self, args: List[bytes] = None) -> None: def __init__(self, args: Optional[List[bytes]] = None) -> None:
if args is None: if args is None:
args = [] args = []
self.args = args self.args = args

View File

@ -233,7 +233,7 @@ __model_doc = f"""
should be used to specify categorical data type. Also, JSON/UBJSON should be used to specify categorical data type. Also, JSON/UBJSON
serialization format is required. serialization format is required.
feature_types : FeatureTypes feature_types : Optional[FeatureTypes]
.. versionadded:: 1.7.0 .. versionadded:: 1.7.0
@ -572,7 +572,7 @@ class XGBModel(XGBModelBase):
validate_parameters: Optional[bool] = None, validate_parameters: Optional[bool] = None,
predictor: Optional[str] = None, predictor: Optional[str] = None,
enable_categorical: bool = False, enable_categorical: bool = False,
feature_types: FeatureTypes = None, feature_types: Optional[FeatureTypes] = None,
max_cat_to_onehot: Optional[int] = None, max_cat_to_onehot: Optional[int] = None,
max_cat_threshold: Optional[int] = None, max_cat_threshold: Optional[int] = None,
eval_metric: Optional[Union[str, List[str], Callable]] = None, eval_metric: Optional[Union[str, List[str], Callable]] = None,

View File

@ -1,4 +1,3 @@
# coding: utf-8
# pylint: disable=too-many-locals, too-many-arguments, invalid-name # pylint: disable=too-many-locals, too-many-arguments, invalid-name
# pylint: disable=too-many-branches, too-many-statements # pylint: disable=too-many-branches, too-many-statements
"""Training Library containing training routines.""" """Training Library containing training routines."""
@ -71,7 +70,7 @@ def train(
feval: Optional[Metric] = None, feval: Optional[Metric] = None,
maximize: Optional[bool] = None, maximize: Optional[bool] = None,
early_stopping_rounds: Optional[int] = None, early_stopping_rounds: Optional[int] = None,
evals_result: TrainingCallback.EvalsLog = None, evals_result: Optional[TrainingCallback.EvalsLog] = None,
verbose_eval: Optional[Union[bool, int]] = True, verbose_eval: Optional[Union[bool, int]] = True,
xgb_model: Optional[Union[str, os.PathLike, Booster, bytearray]] = None, xgb_model: Optional[Union[str, os.PathLike, Booster, bytearray]] = None,
callbacks: Optional[Sequence[TrainingCallback]] = None, callbacks: Optional[Sequence[TrainingCallback]] = None,
@ -285,15 +284,20 @@ def groups_to_rows(groups: List[np.ndarray], boundaries: np.ndarray) -> np.ndarr
return np.concatenate([np.arange(boundaries[g], boundaries[g+1]) for g in groups]) return np.concatenate([np.arange(boundaries[g], boundaries[g+1]) for g in groups])
def mkgroupfold(dall: DMatrix, nfold: int, param: BoosterParam, def mkgroupfold(
evals: Sequence[str] = (), fpreproc: FPreProcCallable = None, dall: DMatrix,
shuffle: bool = True) -> List[CVPack]: nfold: int,
param: BoosterParam,
evals: Sequence[str] = (),
fpreproc: Optional[FPreProcCallable] = None,
shuffle: bool = True,
) -> List[CVPack]:
""" """
Make n folds for cross-validation maintaining groups Make n folds for cross-validation maintaining groups
:return: cross-validation folds :return: cross-validation folds
""" """
# we have groups for pairwise ranking... get a list of the group indexes # we have groups for pairwise ranking... get a list of the group indexes
group_boundaries = dall.get_uint_info('group_ptr') group_boundaries = dall.get_uint_info("group_ptr")
group_sizes = np.diff(group_boundaries) group_sizes = np.diff(group_boundaries)
if shuffle is True: if shuffle is True:
@ -327,10 +331,17 @@ def mkgroupfold(dall: DMatrix, nfold: int, param: BoosterParam,
return ret return ret
def mknfold(dall: DMatrix, nfold: int, param: BoosterParam, seed: int, def mknfold(
evals: Sequence[str] = (), fpreproc: FPreProcCallable = None, dall: DMatrix,
stratified: bool = False, folds: XGBStratifiedKFold = None, shuffle: bool = True nfold: int,
) -> List[CVPack]: param: BoosterParam,
seed: int,
evals: Sequence[str] = (),
fpreproc: Optional[FPreProcCallable] = None,
stratified: Optional[bool] = False,
folds: Optional[XGBStratifiedKFold] = None,
shuffle: bool = True,
) -> List[CVPack]:
""" """
Make an n-fold list of CVPack from random indices. Make an n-fold list of CVPack from random indices.
""" """
@ -393,14 +404,14 @@ def cv(
metrics: Sequence[str] = (), metrics: Sequence[str] = (),
obj: Optional[Objective] = None, obj: Optional[Objective] = None,
feval: Optional[Metric] = None, feval: Optional[Metric] = None,
maximize: bool = None, maximize: Optional[bool] = None,
early_stopping_rounds: int = None, early_stopping_rounds: Optional[int] = None,
fpreproc: FPreProcCallable = None, fpreproc: Optional[FPreProcCallable] = None,
as_pandas: bool = True, as_pandas: bool = True,
verbose_eval: Optional[Union[int, bool]] = None, verbose_eval: Optional[Union[int, bool]] = None,
show_stdv: bool = True, show_stdv: bool = True,
seed: int = 0, seed: int = 0,
callbacks: Sequence[TrainingCallback] = None, callbacks: Optional[Sequence[TrainingCallback]] = None,
shuffle: bool = True, shuffle: bool = True,
custom_metric: Optional[Metric] = None, custom_metric: Optional[Metric] = None,
) -> Union[Dict[str, float], DataFrame]: ) -> Union[Dict[str, float], DataFrame]: