Fixed all lint errors

This commit is contained in:
terrytangyuan 2015-12-11 18:46:15 -06:00
parent a7e79e089b
commit 0eb6240fd0
6 changed files with 7 additions and 7 deletions

View File

@ -2,7 +2,7 @@
ignore=tests
unexpected-special-method-signature,too-many-nested-blocks,consider-using-enumerate
unexpected-special-method-signature,too-many-nested-blocks
dummy-variables-rgx=(unused|)_.*

View File

@ -11,7 +11,7 @@ sys.path.insert(0, '.')
#it builds xgboost code on the fly and packs for pip
#please don't use this file for installing from github
if not os.name == 'nt': #if not windows, compile and install
if os.name != 'nt': #if not windows, compile and install
os.system('sh ./xgboost/build-python.sh')
else:
print('Windows users please use github installation.')

View File

@ -1,5 +1,5 @@
# coding: utf-8
# pylint: disable=unused-import, invalid-name
# pylint: disable=unused-import, invalid-name, wrong-import-position
"""For compatibility"""
from __future__ import absolute_import

View File

@ -890,6 +890,7 @@ class Booster(object):
_check_call(_LIB.XGBoosterLoadModelFromBuffer(self.handle, ptr, length))
def dump_model(self, fout, fmap='', with_stats=False):
# pylint: disable=consider-using-enumerate
"""
Dump model into a text file.

View File

@ -5,12 +5,11 @@
from __future__ import absolute_import
import re
from io import BytesIO
import numpy as np
from .core import Booster
from .sklearn import XGBModel
from io import BytesIO
def plot_importance(booster, ax=None, height=0.2,
xlim=None, ylim=None, title='Feature importance',
xlabel='F score', ylabel='Features',

View File

@ -130,7 +130,7 @@ class XGBModel(XGBModelBase):
def fit(self, X, y, eval_set=None, eval_metric=None,
early_stopping_rounds=None, verbose=True):
# pylint: disable=missing-docstring,invalid-name,attribute-defined-outside-init
# pylint: disable=missing-docstring,invalid-name,attribute-defined-outside-init, redefined-variable-type
"""
Fit the gradient boosting model
@ -265,7 +265,7 @@ class XGBClassifier(XGBModel, XGBClassifierBase):
def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None,
early_stopping_rounds=None, verbose=True):
# pylint: disable = attribute-defined-outside-init,arguments-differ
# pylint: disable = attribute-defined-outside-init,arguments-differ, redefined-variable-type
"""
Fit gradient boosting classifier