pylint: upgrade to 1.4.1
This is largely a bugfix release, so should be much easier to transition.
BUG=chromium:431514
TEST=ran on some code bases and checked output
Review URL: https://codereview.chromium.org/876793002
git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@293806 0039d316-1c4b-4281-b951-d872f2087c98
diff --git a/third_party/logilab/astroid/README.chromium b/third_party/logilab/astroid/README.chromium
index 8649b95..01387b8 100644
--- a/third_party/logilab/astroid/README.chromium
+++ b/third_party/logilab/astroid/README.chromium
@@ -1,5 +1,5 @@
URL: http://www.logilab.org/project/logilab-astng
-Version: 1.3.2
+Version: 1.3.4
License: GPL
License File: LICENSE.txt
diff --git a/third_party/logilab/astroid/__pkginfo__.py b/third_party/logilab/astroid/__pkginfo__.py
index 77a6694..0c92414 100644
--- a/third_party/logilab/astroid/__pkginfo__.py
+++ b/third_party/logilab/astroid/__pkginfo__.py
@@ -20,7 +20,7 @@
modname = 'astroid'
-numversion = (1, 3, 2)
+numversion = (1, 3, 4)
version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.60.0', 'six']
diff --git a/third_party/logilab/astroid/brain/builtin_inference.py b/third_party/logilab/astroid/brain/builtin_inference.py
new file mode 100644
index 0000000..f60e791
--- /dev/null
+++ b/third_party/logilab/astroid/brain/builtin_inference.py
@@ -0,0 +1,245 @@
+"""Astroid hooks for various builtins."""
+
+import sys
+from functools import partial
+from textwrap import dedent
+
+import six
+from astroid import (MANAGER, UseInferenceDefault,
+ inference_tip, YES, InferenceError, UnresolvableName)
+from astroid import nodes
+from astroid.builder import AstroidBuilder
+
+
+def _extend_str(class_node, rvalue):
+ """function to extend builtin str/unicode class"""
+ # TODO(cpopa): this approach will make astroid to believe
+ # that some arguments can be passed by keyword, but
+ # unfortunately, strings and bytes don't accept keyword arguments.
+ code = dedent('''
+ class whatever(object):
+ def join(self, iterable):
+ return {rvalue}
+ def replace(self, old, new, count=None):
+ return {rvalue}
+ def format(self, *args, **kwargs):
+ return {rvalue}
+ def encode(self, encoding='ascii', errors=None):
+ return ''
+ def decode(self, encoding='ascii', errors=None):
+ return u''
+ def capitalize(self):
+ return {rvalue}
+ def title(self):
+ return {rvalue}
+ def lower(self):
+ return {rvalue}
+ def upper(self):
+ return {rvalue}
+ def swapcase(self):
+ return {rvalue}
+ def index(self, sub, start=None, end=None):
+ return 0
+ def find(self, sub, start=None, end=None):
+ return 0
+ def count(self, sub, start=None, end=None):
+ return 0
+ def strip(self, chars=None):
+ return {rvalue}
+ def lstrip(self, chars=None):
+ return {rvalue}
+ def rstrip(self, chars=None):
+ return {rvalue}
+ def rjust(self, width, fillchar=None):
+ return {rvalue}
+ def center(self, width, fillchar=None):
+ return {rvalue}
+ def ljust(self, width, fillchar=None):
+ return {rvalue}
+ ''')
+ code = code.format(rvalue=rvalue)
+ fake = AstroidBuilder(MANAGER).string_build(code)['whatever']
+ for method in fake.mymethods():
+ class_node.locals[method.name] = [method]
+ method.parent = class_node
+
+def extend_builtins(class_transforms):
+ from astroid.bases import BUILTINS
+ builtin_ast = MANAGER.astroid_cache[BUILTINS]
+ for class_name, transform in class_transforms.items():
+ transform(builtin_ast[class_name])
+
+if sys.version_info > (3, 0):
+ extend_builtins({'bytes': partial(_extend_str, rvalue="b''"),
+ 'str': partial(_extend_str, rvalue="''")})
+else:
+ extend_builtins({'str': partial(_extend_str, rvalue="''"),
+ 'unicode': partial(_extend_str, rvalue="u''")})
+
+
+def register_builtin_transform(transform, builtin_name):
+ """Register a new transform function for the given *builtin_name*.
+
+ The transform function must accept two parameters, a node and
+ an optional context.
+ """
+ def _transform_wrapper(node, context=None):
+ result = transform(node, context=context)
+ if result:
+ result.parent = node
+ result.lineno = node.lineno
+ result.col_offset = node.col_offset
+ return iter([result])
+
+ MANAGER.register_transform(nodes.CallFunc,
+ inference_tip(_transform_wrapper),
+ lambda n: (isinstance(n.func, nodes.Name) and
+ n.func.name == builtin_name))
+
+
+def _generic_inference(node, context, node_type, transform):
+ args = node.args
+ if not args:
+ return node_type()
+ if len(node.args) > 1:
+ raise UseInferenceDefault()
+
+ arg, = args
+ transformed = transform(arg)
+ if not transformed:
+ try:
+ infered = next(arg.infer(context=context))
+ except (InferenceError, StopIteration):
+ raise UseInferenceDefault()
+ if infered is YES:
+ raise UseInferenceDefault()
+ transformed = transform(infered)
+ if not transformed or transformed is YES:
+ raise UseInferenceDefault()
+ return transformed
+
+
+def _generic_transform(arg, klass, iterables, build_elts):
+ if isinstance(arg, klass):
+ return arg
+ elif isinstance(arg, iterables):
+ if not all(isinstance(elt, nodes.Const)
+ for elt in arg.elts):
+ # TODO(cpopa): Don't support heterogenous elements.
+ # Not yet, though.
+ raise UseInferenceDefault()
+ elts = [elt.value for elt in arg.elts]
+ elif isinstance(arg, nodes.Dict):
+ if not all(isinstance(elt[0], nodes.Const)
+ for elt in arg.items):
+ raise UseInferenceDefault()
+ elts = [item[0].value for item in arg.items]
+ elif (isinstance(arg, nodes.Const) and
+ isinstance(arg.value, (six.string_types, six.binary_type))):
+ elts = arg.value
+ else:
+ return
+ return klass(elts=build_elts(elts))
+
+
+def _infer_builtin(node, context,
+ klass=None, iterables=None,
+ build_elts=None):
+ transform_func = partial(
+ _generic_transform,
+ klass=klass,
+ iterables=iterables,
+ build_elts=build_elts)
+
+ return _generic_inference(node, context, klass, transform_func)
+
+# pylint: disable=invalid-name
+infer_tuple = partial(
+ _infer_builtin,
+ klass=nodes.Tuple,
+ iterables=(nodes.List, nodes.Set),
+ build_elts=tuple)
+
+infer_list = partial(
+ _infer_builtin,
+ klass=nodes.List,
+ iterables=(nodes.Tuple, nodes.Set),
+ build_elts=list)
+
+infer_set = partial(
+ _infer_builtin,
+ klass=nodes.Set,
+ iterables=(nodes.List, nodes.Tuple),
+ build_elts=set)
+
+
+def _get_elts(arg, context):
+ is_iterable = lambda n: isinstance(n,
+ (nodes.List, nodes.Tuple, nodes.Set))
+ try:
+ infered = next(arg.infer(context))
+ except (InferenceError, UnresolvableName):
+ raise UseInferenceDefault()
+ if isinstance(infered, nodes.Dict):
+ items = infered.items
+ elif is_iterable(infered):
+ items = []
+ for elt in infered.elts:
+ # If an item is not a pair of two items,
+ # then fallback to the default inference.
+ # Also, take in consideration only hashable items,
+ # tuples and consts. We are choosing Names as well.
+ if not is_iterable(elt):
+ raise UseInferenceDefault()
+ if len(elt.elts) != 2:
+ raise UseInferenceDefault()
+ if not isinstance(elt.elts[0],
+ (nodes.Tuple, nodes.Const, nodes.Name)):
+ raise UseInferenceDefault()
+ items.append(tuple(elt.elts))
+ else:
+ raise UseInferenceDefault()
+ return items
+
+def infer_dict(node, context=None):
+ """Try to infer a dict call to a Dict node.
+
+ The function treats the following cases:
+
+ * dict()
+ * dict(mapping)
+ * dict(iterable)
+ * dict(iterable, **kwargs)
+ * dict(mapping, **kwargs)
+ * dict(**kwargs)
+
+ If a case can't be infered, we'll fallback to default inference.
+ """
+ has_keywords = lambda args: all(isinstance(arg, nodes.Keyword)
+ for arg in args)
+ if not node.args and not node.kwargs:
+ # dict()
+ return nodes.Dict()
+ elif has_keywords(node.args) and node.args:
+ # dict(a=1, b=2, c=4)
+ items = [(nodes.Const(arg.arg), arg.value) for arg in node.args]
+ elif (len(node.args) >= 2 and
+ has_keywords(node.args[1:])):
+ # dict(some_iterable, b=2, c=4)
+ elts = _get_elts(node.args[0], context)
+ keys = [(nodes.Const(arg.arg), arg.value) for arg in node.args[1:]]
+ items = elts + keys
+ elif len(node.args) == 1:
+ items = _get_elts(node.args[0], context)
+ else:
+ raise UseInferenceDefault()
+
+ empty = nodes.Dict()
+ empty.items = items
+ return empty
+
+# Builtins inference
+register_builtin_transform(infer_tuple, 'tuple')
+register_builtin_transform(infer_set, 'set')
+register_builtin_transform(infer_list, 'list')
+register_builtin_transform(infer_dict, 'dict')
diff --git a/third_party/logilab/astroid/brain/py2stdlib.py b/third_party/logilab/astroid/brain/py2stdlib.py
index 973c3ad..2bfcbcd 100644
--- a/third_party/logilab/astroid/brain/py2stdlib.py
+++ b/third_party/logilab/astroid/brain/py2stdlib.py
@@ -1,3 +1,4 @@
+
"""Astroid hooks for the Python 2 standard library.
Currently help understanding of :
@@ -6,6 +7,7 @@
"""
import sys
+from functools import partial
from textwrap import dedent
from astroid import (
@@ -104,6 +106,12 @@
@property
def name(self):
return %(name)r
+ @property
+ def block_size(self):
+ return 1
+ @property
+ def digest_size(self):
+ return 1
'''
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
classes = "".join(
@@ -322,5 +330,5 @@
MANAGER.register_transform(nodes.Class, infer_enum_class)
register_module_extender(MANAGER, 'hashlib', hashlib_transform)
register_module_extender(MANAGER, 'collections', collections_transform)
-register_module_extender(MANAGER, 'pkg_resourcds', pkg_resources_transform)
+register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform)
register_module_extender(MANAGER, 'subprocess', subprocess_transform)
diff --git a/third_party/logilab/astroid/brain/pynose.py b/third_party/logilab/astroid/brain/pynose.py
new file mode 100644
index 0000000..6315a34
--- /dev/null
+++ b/third_party/logilab/astroid/brain/pynose.py
@@ -0,0 +1,56 @@
+# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 2.1 of the License, or (at your
+# option) any later version.
+#
+# astroid is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+# for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+
+"""Hooks for nose library."""
+
+import re
+import unittest
+
+from astroid import List, MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def _pep8(name, caps=re.compile('([A-Z])')):
+ return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
+
+
+def nose_transform():
+ """Custom transform for the nose.tools module."""
+
+ builder = AstroidBuilder(MANAGER)
+ stub = AstroidBuilder(MANAGER).string_build('''__all__ = []''')
+ unittest_module = builder.module_build(unittest.case)
+ case = unittest_module['TestCase']
+ all_entries = ['ok_', 'eq_']
+
+ for method_name, method in case.locals.items():
+ if method_name.startswith('assert') and '_' not in method_name:
+ pep8_name = _pep8(method_name)
+ all_entries.append(pep8_name)
+ stub[pep8_name] = method[0]
+
+ # Update the __all__ variable, since nose.tools
+ # does this manually with .append.
+ all_assign = stub['__all__'].parent
+ all_object = List(all_entries)
+ all_object.parent = all_assign
+ all_assign.value = all_object
+ return stub
+
+
+register_module_extender(MANAGER, 'nose.tools.trivial', nose_transform)
diff --git a/third_party/logilab/astroid/brain/pysix_moves.py b/third_party/logilab/astroid/brain/pysix_moves.py
new file mode 100644
index 0000000..5648278
--- /dev/null
+++ b/third_party/logilab/astroid/brain/pysix_moves.py
@@ -0,0 +1,225 @@
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of astroid.
+#
+# astroid is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option) any
+# later version.
+#
+# astroid is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with astroid. If not, see <http://www.gnu.org/licenses/>.
+
+"""Astroid hooks for six.moves."""
+
+import sys
+from textwrap import dedent
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def six_moves_transform_py2():
+ return AstroidBuilder(MANAGER).string_build(dedent('''
+ import urllib as _urllib
+ import urllib2 as _urllib2
+ import urlparse as _urlparse
+
+ class Moves(object):
+ import BaseHTTPServer
+ import CGIHTTPServer
+ import SimpleHTTPServer
+
+ from StringIO import StringIO
+ from cStringIO import StringIO as cStringIO
+ from UserDict import UserDict
+ from UserList import UserList
+ from UserString import UserString
+
+ import __builtin__ as builtins
+ import thread as _thread
+ import dummy_thread as _dummy_thread
+ import ConfigParser as configparser
+ import copy_reg as copyreg
+ from itertools import (imap as map,
+ ifilter as filter,
+ ifilterfalse as filterfalse,
+ izip_longest as zip_longest,
+ izip as zip)
+ import htmlentitydefs as html_entities
+ import HTMLParser as html_parser
+ import httplib as http_client
+ import cookielib as http_cookiejar
+ import Cookie as http_cookies
+ import Queue as queue
+ import repr as reprlib
+ from pipes import quote as shlex_quote
+ import SocketServer as socketserver
+ import SimpleXMLRPCServer as xmlrpc_server
+ import xmlrpclib as xmlrpc_client
+ import _winreg as winreg
+ import robotparser as urllib_robotparser
+
+ input = raw_input
+ intern = intern
+ range = xrange
+ xrange = xrange
+ reduce = reduce
+ reload_module = reload
+
+ class UrllibParse(object):
+ ParseResult = _urlparse.ParseResult
+ SplitResult = _urlparse.SplitResult
+ parse_qs = _urlparse.parse_qs
+ parse_qsl = _urlparse.parse_qsl
+ urldefrag = _urlparse.urldefrag
+ urljoin = _urlparse.urljoin
+ urlparse = _urlparse.urlparse
+ urlsplit = _urlparse.urlsplit
+ urlunparse = _urlparse.urlunparse
+ urlunsplit = _urlparse.urlunsplit
+ quote = _urllib.quote
+ quote_plus = _urllib.quote_plus
+ unquote = _urllib.unquote
+ unquote_plus = _urllib.unquote_plus
+ urlencode = _urllib.urlencode
+ splitquery = _urllib.splitquery
+ splittag = _urllib.splittag
+ splituser = _urllib.splituser
+ uses_fragment = _urlparse.uses_fragment
+ uses_netloc = _urlparse.uses_netloc
+ uses_params = _urlparse.uses_params
+ uses_query = _urlparse.uses_query
+ uses_relative = _urlparse.uses_relative
+
+ class UrllibError(object):
+ URLError = _urllib2.URLError
+ HTTPError = _urllib2.HTTPError
+ ContentTooShortError = _urllib.ContentTooShortError
+
+ class DummyModule(object):
+ pass
+
+ class UrllibRequest(object):
+ urlopen = _urllib2.urlopen
+ install_opener = _urllib2.install_opener
+ build_opener = _urllib2.build_opener
+ pathname2url = _urllib.pathname2url
+ url2pathname = _urllib.url2pathname
+ getproxies = _urllib.getproxies
+ Request = _urllib2.Request
+ OpenerDirector = _urllib2.OpenerDirector
+ HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler
+ HTTPRedirectHandler = _urllib2.HTTPRedirectHandler
+ HTTPCookieProcessor = _urllib2.HTTPCookieProcessor
+ ProxyHandler = _urllib2.ProxyHandler
+ BaseHandler = _urllib2.BaseHandler
+ HTTPPasswordMgr = _urllib2.HTTPPasswordMgr
+ HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm
+ AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler
+ HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler
+ ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler
+ AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler
+ HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler
+ ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler
+ HTTPHandler = _urllib2.HTTPHandler
+ HTTPSHandler = _urllib2.HTTPSHandler
+ FileHandler = _urllib2.FileHandler
+ FTPHandler = _urllib2.FTPHandler
+ CacheFTPHandler = _urllib2.CacheFTPHandler
+ UnknownHandler = _urllib2.UnknownHandler
+ HTTPErrorProcessor = _urllib2.HTTPErrorProcessor
+ urlretrieve = _urllib.urlretrieve
+ urlcleanup = _urllib.urlcleanup
+ proxy_bypass = _urllib.proxy_bypass
+
+ urllib_parse = UrllibParse()
+ urllib_error = UrllibError()
+ urllib = DummyModule()
+ urllib.request = UrllibRequest()
+ urllib.parse = UrllibParse()
+ urllib.error = UrllibError()
+
+ moves = Moves()
+
+ '''))
+
+
+def six_moves_transform_py3():
+ return AstroidBuilder(MANAGER).string_build(dedent('''
+ class Moves(object):
+ import _io
+ cStringIO = _io.StringIO
+ filter = filter
+ from itertools import filterfalse
+ input = input
+ from sys import intern
+ map = map
+ range = range
+ from imp import reload as reload_module
+ from functools import reduce
+ from shlex import quote as shlex_quote
+ from io import StringIO
+ from collections import UserDict, UserList, UserString
+ xrange = range
+ zip = zip
+ from itertools import zip_longest
+ import builtins
+ import configparser
+ import copyreg
+ import _dummy_thread
+ import http.cookiejar as http_cookiejar
+ import http.cookies as http_cookies
+ import html.entities as html_entities
+ import html.parser as html_parser
+ import http.client as http_client
+ import http.server
+ BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
+ import pickle as cPickle
+ import queue
+ import reprlib
+ import socketserver
+ import _thread
+ import winreg
+ import xmlrpc.server as xmlrpc_server
+ import xmlrpc.client as xmlrpc_client
+ import urllib.robotparser as urllib_robotparser
+ import email.mime.multipart as email_mime_multipart
+ import email.mime.nonmultipart as email_mime_nonmultipart
+ import email.mime.text as email_mime_text
+ import email.mime.base as email_mime_base
+ import urllib.parse as urllib_parse
+ import urllib.error as urllib_error
+ import tkinter
+ import tkinter.dialog as tkinter_dialog
+ import tkinter.filedialog as tkinter_filedialog
+ import tkinter.scrolledtext as tkinter_scrolledtext
+ import tkinter.simpledialog as tkinder_simpledialog
+ import tkinter.tix as tkinter_tix
+ import tkinter.ttk as tkinter_ttk
+ import tkinter.constants as tkinter_constants
+ import tkinter.dnd as tkinter_dnd
+ import tkinter.colorchooser as tkinter_colorchooser
+ import tkinter.commondialog as tkinter_commondialog
+ import tkinter.filedialog as tkinter_tkfiledialog
+ import tkinter.font as tkinter_font
+ import tkinter.messagebox as tkinter_messagebox
+ import urllib.request
+ import urllib.robotparser as urllib_robotparser
+ import urllib.parse as urllib_parse
+ import urllib.error as urllib_error
+ moves = Moves()
+ '''))
+
+if sys.version_info[0] == 2:
+ TRANSFORM = six_moves_transform_py2
+else:
+ TRANSFORM = six_moves_transform_py3
+
+register_module_extender(MANAGER, 'six', TRANSFORM)
diff --git a/third_party/logilab/astroid/manager.py b/third_party/logilab/astroid/manager.py
index f5f1d79..fe78713 100644
--- a/third_party/logilab/astroid/manager.py
+++ b/third_party/logilab/astroid/manager.py
@@ -144,7 +144,7 @@
if module is not None:
return module
elif mp_type in (imp.C_BUILTIN, imp.C_EXTENSION):
- if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname):
+ if mp_type == imp.C_EXTENSION and not self._can_load_extension(modname):
return self._build_stub_module(modname)
try:
module = modutils.load_module_from_name(modname)
@@ -311,7 +311,7 @@
self.transforms[node_class].remove((transform, predicate))
def register_failed_import_hook(self, hook):
- """"Registers a hook to resolve imports that cannot be found otherwise.
+ """Registers a hook to resolve imports that cannot be found otherwise.
`hook` must be a function that accepts a single argument `modname` which
contains the name of the module or package that could not be imported.
@@ -348,15 +348,16 @@
"""Cache a module if no module with the same name is known yet."""
self.astroid_cache.setdefault(module.name, module)
- def clear_cache(self):
+ def clear_cache(self, astroid_builtin=None):
# XXX clear transforms
self.astroid_cache.clear()
# force bootstrap again, else we may ends up with cache inconsistency
# between the manager and CONST_PROXY, making
# unittest_lookup.LookupTC.test_builtin_lookup fail depending on the
# test order
- from astroid.raw_building import astroid_bootstrapping
- astroid_bootstrapping()
+ import astroid.raw_building
+ astroid.raw_building._astroid_bootstrapping(
+ astroid_builtin=astroid_builtin)
class Project(object):
diff --git a/third_party/logilab/astroid/modutils.py b/third_party/logilab/astroid/modutils.py
index 03cd876..68a2086 100644
--- a/third_party/logilab/astroid/modutils.py
+++ b/third_party/logilab/astroid/modutils.py
@@ -20,7 +20,7 @@
:type PY_SOURCE_EXTS: tuple(str)
:var PY_SOURCE_EXTS: list of possible python source file extension
-:type STD_LIB_DIRS: list of str
+:type STD_LIB_DIRS: set of str
:var STD_LIB_DIRS: directories where standard modules are located
:type BUILTIN_MODULES: dict
@@ -54,31 +54,33 @@
PY_COMPILED_EXTS = ('so',)
# Notes about STD_LIB_DIRS
-# Consider arch-specific installation for STD_LIB_DIR definition
+# Consider arch-specific installation for STD_LIB_DIRS definition
# :mod:`distutils.sysconfig` contains to much hardcoded values to rely on
#
# :see: `Problems with /usr/lib64 builds <http://bugs.python.org/issue1294959>`_
# :see: `FHS <http://www.pathname.com/fhs/pub/fhs-2.3.html#LIBLTQUALGTALTERNATEFORMATESSENTIAL>`_
try:
- # The explicit prefix is to work around a patch in virtualenv that
+ # The explicit sys.prefix is to work around a patch in virtualenv that
# replaces the 'real' sys.prefix (i.e. the location of the binary)
# with the prefix from which the virtualenv was created. This throws
# off the detection logic for standard library modules, thus the
# workaround.
- STD_LIB_DIRS = [
+ STD_LIB_DIRS = {
get_python_lib(standard_lib=True, prefix=sys.prefix),
- get_python_lib(standard_lib=True)]
+ # Take care of installations where exec_prefix != prefix.
+ get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
+ get_python_lib(standard_lib=True)}
if os.name == 'nt':
- STD_LIB_DIRS.append(os.path.join(sys.prefix, 'dlls'))
+ STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls'))
try:
# real_prefix is defined when running inside virtualenv.
- STD_LIB_DIRS.append(os.path.join(sys.real_prefix, 'dlls'))
+ STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls'))
except AttributeError:
pass
# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
# non-valid path, see https://bugs.pypy.org/issue1164
except DistutilsPlatformError:
- STD_LIB_DIRS = []
+ STD_LIB_DIRS = set()
EXT_LIB_DIR = get_python_lib()
diff --git a/third_party/logilab/astroid/raw_building.py b/third_party/logilab/astroid/raw_building.py
index 08f6af6..d5e8b3d 100644
--- a/third_party/logilab/astroid/raw_building.py
+++ b/third_party/logilab/astroid/raw_building.py
@@ -332,12 +332,14 @@
Astroid_BUILDER = InspectBuilder()
_CONST_PROXY = {}
-def astroid_bootstrapping():
+def _astroid_bootstrapping(astroid_builtin=None):
"""astroid boot strapping the builtins module"""
# this boot strapping is necessary since we need the Const nodes to
# inspect_build builtins, and then we can proxy Const
- from logilab.common.compat import builtins
- astroid_builtin = Astroid_BUILDER.inspect_build(builtins)
+ if astroid_builtin is None:
+ from logilab.common.compat import builtins
+ astroid_builtin = Astroid_BUILDER.inspect_build(builtins)
+
for cls, node_cls in CONST_CLS.items():
if cls is type(None):
proxy = build_class('NoneType')
@@ -349,7 +351,7 @@
else:
_CONST_PROXY[cls] = proxy
-astroid_bootstrapping()
+_astroid_bootstrapping()
# TODO : find a nicer way to handle this situation;
# However __proxied introduced an
diff --git a/third_party/logilab/astroid/rebuilder.py b/third_party/logilab/astroid/rebuilder.py
index 3f85251..14c606e 100644
--- a/third_party/logilab/astroid/rebuilder.py
+++ b/third_party/logilab/astroid/rebuilder.py
@@ -23,7 +23,7 @@
from _ast import (
Expr as Discard, Str,
# binary operators
- Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor,
+ Add, Div, FloorDiv, Mod, Mult, Pow, Sub, BitAnd, BitOr, BitXor,
LShift, RShift,
# logical operators
And, Or,
diff --git a/third_party/logilab/astroid/scoped_nodes.py b/third_party/logilab/astroid/scoped_nodes.py
index f9ec7b7..db39b8b 100644
--- a/third_party/logilab/astroid/scoped_nodes.py
+++ b/third_party/logilab/astroid/scoped_nodes.py
@@ -24,6 +24,7 @@
__doctype__ = "restructuredtext en"
import sys
+import warnings
from itertools import chain
try:
from io import BytesIO
@@ -35,7 +36,7 @@
from logilab.common.decorators import cached, cachedproperty
from astroid.exceptions import NotFoundError, \
- AstroidBuildingException, InferenceError
+ AstroidBuildingException, InferenceError, ResolveError
from astroid.node_classes import Const, DelName, DelAttr, \
Dict, From, List, Pass, Raise, Return, Tuple, Yield, YieldFrom, \
LookupMixIn, const_factory as cf, unpack_infer, Name, CallFunc
@@ -49,6 +50,46 @@
ITER_METHODS = ('__iter__', '__getitem__')
PY3K = sys.version_info >= (3, 0)
+def _c3_merge(sequences):
+ """Merges MROs in *sequences* to a single MRO using the C3 algorithm.
+
+ Adapted from http://www.python.org/download/releases/2.3/mro/.
+
+ """
+ result = []
+ while True:
+ sequences = [s for s in sequences if s] # purge empty sequences
+ if not sequences:
+ return result
+ for s1 in sequences: # find merge candidates among seq heads
+ candidate = s1[0]
+ for s2 in sequences:
+ if candidate in s2[1:]:
+ candidate = None
+ break # reject the current head, it appears later
+ else:
+ break
+ if not candidate:
+ # Show all the remaining bases, which were considered as
+ # candidates for the next mro sequence.
+ bases = ["({})".format(", ".join(base.name
+ for base in subsequence))
+ for subsequence in sequences]
+ raise ResolveError("Cannot create a consistent method resolution "
+ "order for bases %s" % ", ".join(bases))
+ result.append(candidate)
+ # remove the chosen candidate
+ for seq in sequences:
+ if seq[0] == candidate:
+ del seq[0]
+
+
+def _verify_duplicates_mro(sequences):
+ for sequence in sequences:
+ names = [node.qname() for node in sequence]
+ if len(names) != len(set(names)):
+ raise ResolveError('Duplicates found in the mro.')
+
def remove_nodes(func, cls):
def wrapper(*args, **kwargs):
@@ -257,14 +298,37 @@
self.body = []
self.future_imports = set()
- @cachedproperty
- def file_stream(self):
+ def _get_stream(self):
if self.file_bytes is not None:
return BytesIO(self.file_bytes)
if self.file is not None:
- return open(self.file, 'rb')
+ stream = open(self.file, 'rb')
+ return stream
return None
+ @property
+ def file_stream(self):
+ warnings.warn("file_stream property is deprecated and "
+ "it is slated for removal in astroid 1.6."
+ "Use the new method 'stream' instead.",
+ PendingDeprecationWarning,
+ stacklevel=2)
+ return self._get_stream()
+
+ def stream(self):
+ """Get a stream to the underlying file or bytes."""
+ return self._get_stream()
+
+ def close(self):
+ """Close the underlying file streams."""
+ warnings.warn("close method is deprecated and it is "
+ "slated for removal in astroid 1.6, along "
+ "with 'file_stream' property. "
+ "Its behaviour is replaced by managing each "
+ "file stream returned by the 'stream' method.",
+ PendingDeprecationWarning,
+ stacklevel=2)
+
def block_range(self, lineno):
"""return block line numbers.
@@ -505,50 +569,28 @@
# Function ###################################################################
def _infer_decorator_callchain(node):
- """ Detect decorator call chaining and see if the
- end result is a static or a classmethod.
+ """Detect decorator call chaining and see if the end result is a
+ static or a classmethod.
"""
- current = node
- while True:
- if isinstance(current, CallFunc):
- try:
- current = next(current.func.infer())
- except InferenceError:
- return
- elif isinstance(current, Function):
- if not current.parent:
- return
- try:
- # TODO: We don't handle multiple inference results right now,
- # because there's no flow to reason when the return
- # is what we are looking for, a static or a class method.
- result = next(current.infer_call_result(current.parent))
- if current is result:
- # This will lead to an infinite loop, where a decorator
- # returns itself.
- return
- except (StopIteration, InferenceError):
- return
- if isinstance(result, (Function, CallFunc)):
- current = result
- else:
- if isinstance(result, Instance):
- result = result._proxied
- if isinstance(result, Class):
- if (result.name == 'classmethod' and
- result.root().name == BUILTINS):
- return 'classmethod'
- elif (result.name == 'staticmethod' and
- result.root().name == BUILTINS):
- return 'staticmethod'
- else:
- return
- else:
- # We aren't interested in anything else returned,
- # so go back to the function type inference.
- return
- else:
- return
+ if not isinstance(node, Function):
+ return
+ if not node.parent:
+ return
+ try:
+ # TODO: We don't handle multiple inference results right now,
+ # because there's no flow to reason when the return
+ # is what we are looking for, a static or a class method.
+ result = next(node.infer_call_result(node.parent))
+ except (StopIteration, InferenceError):
+ return
+ if isinstance(result, Instance):
+ result = result._proxied
+ if isinstance(result, Class):
+ if result.is_subtype_of('%s.classmethod' % BUILTINS):
+ return 'classmethod'
+ if result.is_subtype_of('%s.staticmethod' % BUILTINS):
+ return 'staticmethod'
+
def _function_type(self):
"""
@@ -561,25 +603,34 @@
if self.decorators:
for node in self.decorators.nodes:
if isinstance(node, CallFunc):
- _type = _infer_decorator_callchain(node)
- if _type is None:
+ # Handle the following case:
+ # @some_decorator(arg1, arg2)
+ # def func(...)
+ #
+ try:
+ current = next(node.func.infer())
+ except InferenceError:
continue
- else:
+ _type = _infer_decorator_callchain(current)
+ if _type is not None:
return _type
- if not isinstance(node, Name):
- continue
+
try:
for infered in node.infer():
+ # Check to see if this returns a static or a class method.
+ _type = _infer_decorator_callchain(infered)
+ if _type is not None:
+ return _type
+
if not isinstance(infered, Class):
continue
for ancestor in infered.ancestors():
- if isinstance(ancestor, Class):
- if (ancestor.name == 'classmethod' and
- ancestor.root().name == BUILTINS):
- return 'classmethod'
- elif (ancestor.name == 'staticmethod' and
- ancestor.root().name == BUILTINS):
- return 'staticmethod'
+ if not isinstance(ancestor, Class):
+ continue
+ if ancestor.is_subtype_of('%s.classmethod' % BUILTINS):
+ return 'classmethod'
+ elif ancestor.is_subtype_of('%s.staticmethod' % BUILTINS):
+ return 'staticmethod'
except InferenceError:
pass
return self._type
@@ -763,8 +814,8 @@
# but does not contribute to the inheritance structure itself. We inject
# a fake class into the hierarchy here for several well-known metaclass
# generators, and filter it out later.
- if (self.name == 'with_metaclass' and
- len(self.args.args) == 1 and
+ if (self.name == 'with_metaclass' and
+ len(self.args.args) == 1 and
self.args.vararg is not None):
metaclass = next(caller.args[0].infer(context))
if isinstance(metaclass, Class):
@@ -1328,7 +1379,8 @@
if infered is YES:
continue
if (not isinstance(infered, Const) or
- not isinstance(infered.value, str)):
+ not isinstance(infered.value,
+ six.string_types)):
continue
if not infered.value:
continue
@@ -1339,5 +1391,69 @@
# Cached, because inferring them all the time is expensive
@cached
def slots(self):
- """ Return all the slots for this node. """
- return list(self._islots())
+ """Get all the slots for this node.
+
+ If the class doesn't define any slot, through `__slots__`
+ variable, then this function will return a None.
+ Also, it will return None in the case the slots weren't inferred.
+ Otherwise, it will return a list of slot names.
+ """
+ slots = self._islots()
+ try:
+ first = next(slots)
+ except StopIteration:
+ # The class doesn't have a __slots__ definition.
+ return None
+ return [first] + list(slots)
+
+ def _inferred_bases(self, recurs=True, context=None):
+ # TODO(cpopa): really similar with .ancestors,
+ # but the difference is when one base is inferred,
+ # only the first object is wanted. That's because
+ # we aren't interested in superclasses, as in the following
+ # example:
+ #
+ # class SomeSuperClass(object): pass
+ # class SomeClass(SomeSuperClass): pass
+ # class Test(SomeClass): pass
+ #
+ # Inferring SomeClass from the Test's bases will give
+ # us both SomeClass and SomeSuperClass, but we are interested
+ # only in SomeClass.
+
+ if context is None:
+ context = InferenceContext()
+ if sys.version_info[0] >= 3:
+ if not self.bases and self.qname() != 'builtins.object':
+ yield builtin_lookup("object")[1][0]
+ return
+
+ for stmt in self.bases:
+ try:
+ baseobj = next(stmt.infer(context=context))
+ except InferenceError:
+ # XXX log error ?
+ continue
+ if isinstance(baseobj, Instance):
+ baseobj = baseobj._proxied
+ if not isinstance(baseobj, Class):
+ continue
+ if not baseobj.hide:
+ yield baseobj
+
+ def mro(self, context=None):
+ """Get the method resolution order, using C3 linearization.
+
+ It returns the list of ancestors sorted by the mro.
+ This will raise `NotImplementedError` for old-style classes, since
+ they don't have the concept of MRO.
+ """
+ if not self.newstyle:
+ raise NotImplementedError(
+ "Could not obtain mro for old-style classes.")
+
+ bases = list(self._inferred_bases(context=context))
+ unmerged_mro = [[self]] + [base.mro() for base in bases] + [bases]
+
+ _verify_duplicates_mro(unmerged_mro)
+ return _c3_merge(unmerged_mro)
diff --git a/third_party/logilab/astroid/test_utils.py b/third_party/logilab/astroid/test_utils.py
new file mode 100644
index 0000000..19bd7b9
--- /dev/null
+++ b/third_party/logilab/astroid/test_utils.py
@@ -0,0 +1,218 @@
+"""Utility functions for test code that uses astroid ASTs as input."""
+import functools
+import sys
+import textwrap
+
+from astroid import nodes
+from astroid import builder
+# The name of the transient function that is used to
+# wrap expressions to be extracted when calling
+# extract_node.
+_TRANSIENT_FUNCTION = '__'
+
+# The comment used to select a statement to be extracted
+# when calling extract_node.
+_STATEMENT_SELECTOR = '#@'
+
+
+def _extract_expressions(node):
+ """Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
+
+ The function walks the AST recursively to search for expressions that
+ are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
+ expression, it completely removes the function call node from the tree,
+ replacing it by the wrapped expression inside the parent.
+
+ :param node: An astroid node.
+ :type node: astroid.bases.NodeNG
+ :yields: The sequence of wrapped expressions on the modified tree
+ expression can be found.
+ """
+ if (isinstance(node, nodes.CallFunc)
+ and isinstance(node.func, nodes.Name)
+ and node.func.name == _TRANSIENT_FUNCTION):
+ real_expr = node.args[0]
+ real_expr.parent = node.parent
+ # Search for node in all _astng_fields (the fields checked when
+ # get_children is called) of its parent. Some of those fields may
+ # be lists or tuples, in which case the elements need to be checked.
+ # When we find it, replace it by real_expr, so that the AST looks
+ # like no call to _TRANSIENT_FUNCTION ever took place.
+ for name in node.parent._astroid_fields:
+ child = getattr(node.parent, name)
+ if isinstance(child, (list, tuple)):
+ for idx, compound_child in enumerate(child):
+ if compound_child is node:
+ child[idx] = real_expr
+ elif child is node:
+ setattr(node.parent, name, real_expr)
+ yield real_expr
+ else:
+ for child in node.get_children():
+ for result in _extract_expressions(child):
+ yield result
+
+
+def _find_statement_by_line(node, line):
+ """Extracts the statement on a specific line from an AST.
+
+ If the line number of node matches line, it will be returned;
+ otherwise its children are iterated and the function is called
+ recursively.
+
+ :param node: An astroid node.
+ :type node: astroid.bases.NodeNG
+ :param line: The line number of the statement to extract.
+ :type line: int
+ :returns: The statement on the line, or None if no statement for the line
+ can be found.
+ :rtype: astroid.bases.NodeNG or None
+ """
+ if isinstance(node, (nodes.Class, nodes.Function)):
+ # This is an inaccuracy in the AST: the nodes that can be
+ # decorated do not carry explicit information on which line
+ # the actual definition (class/def), but .fromline seems to
+ # be close enough.
+ node_line = node.fromlineno
+ else:
+ node_line = node.lineno
+
+ if node_line == line:
+ return node
+
+ for child in node.get_children():
+ result = _find_statement_by_line(child, line)
+ if result:
+ return result
+
+ return None
+
+def extract_node(code, module_name=''):
+ """Parses some Python code as a module and extracts a designated AST node.
+
+ Statements:
+ To extract one or more statement nodes, append #@ to the end of the line
+
+ Examples:
+ >>> def x():
+ >>> def y():
+ >>> return 1 #@
+
+ The return statement will be extracted.
+
+ >>> class X(object):
+ >>> def meth(self): #@
+ >>> pass
+
+ The funcion object 'meth' will be extracted.
+
+ Expressions:
+ To extract arbitrary expressions, surround them with the fake
+ function call __(...). After parsing, the surrounded expression
+ will be returned and the whole AST (accessible via the returned
+ node's parent attribute) will look like the function call was
+ never there in the first place.
+
+ Examples:
+ >>> a = __(1)
+
+ The const node will be extracted.
+
+ >>> def x(d=__(foo.bar)): pass
+
+ The node containing the default argument will be extracted.
+
+ >>> def foo(a, b):
+ >>> return 0 < __(len(a)) < b
+
+ The node containing the function call 'len' will be extracted.
+
+ If no statements or expressions are selected, the last toplevel
+ statement will be returned.
+
+ If the selected statement is a discard statement, (i.e. an expression
+ turned into a statement), the wrapped expression is returned instead.
+
+ For convenience, singleton lists are unpacked.
+
+ :param str code: A piece of Python code that is parsed as
+ a module. Will be passed through textwrap.dedent first.
+ :param str module_name: The name of the module.
+ :returns: The designated node from the parse tree, or a list of nodes.
+ :rtype: astroid.bases.NodeNG, or a list of nodes.
+ """
+ def _extract(node):
+ if isinstance(node, nodes.Discard):
+ return node.value
+ else:
+ return node
+
+ requested_lines = []
+ for idx, line in enumerate(code.splitlines()):
+ if line.strip().endswith(_STATEMENT_SELECTOR):
+ requested_lines.append(idx + 1)
+
+ tree = build_module(code, module_name=module_name)
+ extracted = []
+ if requested_lines:
+ for line in requested_lines:
+ extracted.append(_find_statement_by_line(tree, line))
+
+ # Modifies the tree.
+ extracted.extend(_extract_expressions(tree))
+
+ if not extracted:
+ extracted.append(tree.body[-1])
+
+ extracted = [_extract(node) for node in extracted]
+ if len(extracted) == 1:
+ return extracted[0]
+ else:
+ return extracted
+
+
+def build_module(code, module_name='', path=None):
+ """Parses a string module with a builder.
+ :param code: The code for the module.
+ :type code: str
+ :param module_name: The name for the module
+ :type module_name: str
+ :param path: The path for the module
+ :type module_name: str
+ :returns: The module AST.
+ :rtype: astroid.bases.NodeNG
+ """
+ code = textwrap.dedent(code)
+ return builder.AstroidBuilder(None).string_build(code, modname=module_name, path=path)
+
+
+def require_version(minver=None, maxver=None):
+ """ Compare version of python interpreter to the given one. Skip the test
+ if older.
+ """
+ def parse(string, default=None):
+ string = string or default
+ try:
+ return tuple(int(v) for v in string.split('.'))
+ except ValueError:
+ raise ValueError('%s is not a correct version : should be X.Y[.Z].' % version)
+
+ def check_require_version(f):
+ current = sys.version_info[:3]
+ if parse(minver, "0") < current <= parse(maxver, "4"):
+ return f
+ else:
+ str_version = '.'.join(str(v) for v in sys.version_info)
+ @functools.wraps(f)
+ def new_f(self, *args, **kwargs):
+ if minver is not None:
+ self.skipTest('Needs Python > %s. Current version is %s.' % (minver, str_version))
+ elif maxver is not None:
+ self.skipTest('Needs Python <= %s. Current version is %s.' % (maxver, str_version))
+ return new_f
+
+
+ return check_require_version
+
+def get_name_node(start_from, name, index=0):
+ return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index]
diff --git a/third_party/pylint/README.chromium b/third_party/pylint/README.chromium
index c8b2203..2c3ecd9 100644
--- a/third_party/pylint/README.chromium
+++ b/third_party/pylint/README.chromium
@@ -1,5 +1,5 @@
URL: http://www.pylint.org/
-Version: 1.4.0
+Version: 1.4.1
License: GPL
License File: LICENSE.txt
diff --git a/third_party/pylint/__pkginfo__.py b/third_party/pylint/__pkginfo__.py
index acca3b7..6ed331a 100644
--- a/third_party/pylint/__pkginfo__.py
+++ b/third_party/pylint/__pkginfo__.py
@@ -19,10 +19,10 @@
modname = distname = 'pylint'
-numversion = (1, 4, 0)
+numversion = (1, 4, 1)
version = '.'.join([str(num) for num in numversion])
-install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.3.2', 'six']
+install_requires = ['logilab-common >= 0.53.0', 'astroid >= 1.3.3', 'six']
license = 'GPL'
description = "python code static checker"
diff --git a/third_party/pylint/checkers/base.py b/third_party/pylint/checkers/base.py
index 8198d16..750d661 100644
--- a/third_party/pylint/checkers/base.py
+++ b/third_party/pylint/checkers/base.py
@@ -45,6 +45,7 @@
has_known_bases,
NoSuchArgumentError,
is_import_error,
+ unimplemented_abstract_methods,
)
@@ -148,8 +149,7 @@
PROPERTY_CLASSES = set(('__builtin__.property', 'abc.abstractproperty'))
else:
PROPERTY_CLASSES = set(('builtins.property', 'abc.abstractproperty'))
-ABC_METHODS = set(('abc.abstractproperty', 'abc.abstractmethod',
- 'abc.abstractclassmethod', 'abc.abstractstaticmethod'))
+
def _determine_function_name_type(node):
"""Determine the name type whose regex the a function's name should match.
@@ -179,26 +179,17 @@
return 'attr'
return 'method'
-def decorated_with_abc(func):
- """ Determine if the `func` node is decorated
- with `abc` decorators (abstractmethod et co.)
- """
- if func.decorators:
- for node in func.decorators.nodes:
- try:
- infered = next(node.infer())
- except InferenceError:
- continue
- if infered and infered.qname() in ABC_METHODS:
- return True
-def has_abstract_methods(node):
+
+def _has_abstract_methods(node):
"""
- Determine if the given `node` has
- abstract methods, defined with `abc` module.
+ Determine if the given `node` has abstract methods.
+
+ The methods should be made abstract by decorating them
+ with `abc` decorators.
"""
- return any(decorated_with_abc(meth)
- for meth in node.methods())
+ return len(unimplemented_abstract_methods(node)) > 0
+
def report_by_type_stats(sect, stats, old_stats):
"""make a report of
@@ -298,7 +289,7 @@
'duplicate-argument-name',
'Duplicate argument names in function definitions are syntax'
' errors.'),
- 'E0110': ('Abstract class with abstract methods instantiated',
+ 'E0110': ('Abstract class %r with abstract methods instantiated',
'abstract-class-instantiated',
'Used when an abstract class with `abc.ABCMeta` as metaclass '
'has abstract methods and is instantiated.'),
@@ -398,17 +389,21 @@
return
# __init__ was called
metaclass = infered.metaclass()
- abstract_methods = has_abstract_methods(infered)
+ abstract_methods = _has_abstract_methods(infered)
if metaclass is None:
# Python 3.4 has `abc.ABC`, which won't be detected
# by ClassNode.metaclass()
for ancestor in infered.ancestors():
if ancestor.qname() == 'abc.ABC' and abstract_methods:
- self.add_message('abstract-class-instantiated', node=node)
+ self.add_message('abstract-class-instantiated',
+ args=(infered.name, ),
+ node=node)
break
return
if metaclass.qname() == 'abc.ABCMeta' and abstract_methods:
- self.add_message('abstract-class-instantiated', node=node)
+ self.add_message('abstract-class-instantiated',
+ args=(infered.name, ),
+ node=node)
def _check_else_on_loop(self, node):
"""Check that any loop with an else clause has a break statement."""
@@ -676,7 +671,13 @@
variable names, max locals
"""
self.stats[node.is_method() and 'method' or 'function'] += 1
+ self._check_dangerous_default(node)
+
+ def _check_dangerous_default(self, node):
# check for dangerous default values as arguments
+ is_iterable = lambda n: isinstance(n, (astroid.List,
+ astroid.Set,
+ astroid.Dict))
for default in node.args.defaults:
try:
value = next(default.infer())
@@ -685,21 +686,30 @@
if (isinstance(value, astroid.Instance) and
value.qname() in DEFAULT_ARGUMENT_SYMBOLS):
+
if value is default:
msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
- elif type(value) is astroid.Instance:
- if isinstance(default, astroid.CallFunc):
- # this argument is direct call to list() or dict() etc
+ elif type(value) is astroid.Instance or is_iterable(value):
+ # We are here in the following situation(s):
+ # * a dict/set/list/tuple call which wasn't inferred
+ # to a syntax node ({}, () etc.). This can happen
+ # when the arguments are invalid or unknown to
+ # the inference.
+ # * a variable from somewhere else, which turns out to be a list
+ # or a dict.
+ if is_iterable(default):
+ msg = value.pytype()
+ elif isinstance(default, astroid.CallFunc):
msg = '%s() (%s)' % (value.name, value.qname())
else:
- # this argument is a variable from somewhere else which turns
- # out to be a list or dict
msg = '%s (%s)' % (default.as_string(), value.qname())
else:
# this argument is a name
msg = '%s (%s)' % (default.as_string(),
DEFAULT_ARGUMENT_SYMBOLS[value.qname()])
- self.add_message('dangerous-default-value', node=node, args=(msg,))
+ self.add_message('dangerous-default-value',
+ node=node,
+ args=(msg, ))
@check_messages('unreachable', 'lost-exception')
def visit_return(self, node):
diff --git a/third_party/pylint/checkers/classes.py b/third_party/pylint/checkers/classes.py
index eeaf689..1a10c35 100644
--- a/third_party/pylint/checkers/classes.py
+++ b/third_party/pylint/checkers/classes.py
@@ -30,7 +30,7 @@
from pylint.checkers.utils import (
PYMETHODS, overrides_a_method, check_messages, is_attr_private,
is_attr_protected, node_frame_class, safe_infer, is_builtin_object,
- decorated_with_property)
+ decorated_with_property, unimplemented_abstract_methods)
import six
if sys.version_info >= (3, 0):
@@ -179,11 +179,11 @@
'missing-interface-method',
'Used when a method declared in an interface is missing from a \
class implementing this interface'),
- 'W0221': ('Arguments number differs from %s method',
+ 'W0221': ('Arguments number differs from %s %r method',
'arguments-differ',
'Used when a method has a different number of arguments than in \
the implemented interface or in an overridden method.'),
- 'W0222': ('Signature differs from %s method',
+ 'W0222': ('Signature differs from %s %r method',
'signature-differs',
'Used when a method signature is different than in the \
implemented interface or in an overridden method.'),
@@ -496,7 +496,7 @@
if infered is YES:
continue
if (not isinstance(infered, astroid.Const) or
- not isinstance(infered.value, str)):
+ not isinstance(infered.value, six.string_types)):
self.add_message('invalid-slots-object',
args=infered.as_string(),
node=elt)
@@ -585,6 +585,8 @@
return
slots = klass.slots()
+ if slots is None:
+ return
# If any ancestor doesn't use slots, the slots
# defined for this class are superfluous.
if any('__slots__' not in ancestor.locals and
@@ -798,21 +800,28 @@
"""check that the given class node implements abstract methods from
base classes
"""
+ def is_abstract(method):
+ return method.is_abstract(pass_is_abstract=False)
+
# check if this class abstract
if class_is_abstract(node):
return
- for method in node.methods():
+
+ methods = sorted(
+ unimplemented_abstract_methods(node, is_abstract).items(),
+ key=lambda item: item[0],
+ )
+ for name, method in methods:
owner = method.parent.frame()
if owner is node:
continue
# owner is not this class, it must be a parent class
# check that the ancestor's method is not abstract
- if method.name in node.locals:
+ if name in node.locals:
# it is redefined as an attribute or with a descriptor
continue
- if method.is_abstract(pass_is_abstract=False):
- self.add_message('abstract-method', node=node,
- args=(method.name, owner.name))
+ self.add_message('abstract-method', node=node,
+ args=(name, owner.name))
def _check_interfaces(self, node):
"""check that the given class node really implements declared
@@ -930,9 +939,13 @@
if is_attr_private(method1.name):
return
if len(method1.args.args) != len(refmethod.args.args):
- self.add_message('arguments-differ', args=class_type, node=method1)
+ self.add_message('arguments-differ',
+ args=(class_type, method1.name),
+ node=method1)
elif len(method1.args.defaults) < len(refmethod.args.defaults):
- self.add_message('signature-differs', args=class_type, node=method1)
+ self.add_message('signature-differs',
+ args=(class_type, method1.name),
+ node=method1)
def is_first_attr(self, node):
"""Check that attribute lookup name use first attribute variable name
diff --git a/third_party/pylint/checkers/exceptions.py b/third_party/pylint/checkers/exceptions.py
index e8e5a54..88a8f22 100644
--- a/third_party/pylint/checkers/exceptions.py
+++ b/third_party/pylint/checkers/exceptions.py
@@ -16,18 +16,22 @@
"""
import sys
-from logilab.common.compat import builtins
-BUILTINS_NAME = builtins.__name__
import astroid
from astroid import YES, Instance, unpack_infer, List, Tuple
+from logilab.common.compat import builtins
from pylint.checkers import BaseChecker
from pylint.checkers.utils import (
- is_empty, is_raising,
- check_messages, inherit_from_std_ex,
- EXCEPTIONS_MODULE, has_known_bases)
+ is_empty,
+ is_raising,
+ check_messages,
+ inherit_from_std_ex,
+ EXCEPTIONS_MODULE,
+ has_known_bases,
+ safe_infer)
from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE
+
def _annotated_unpack_infer(stmt, context=None):
"""
Recursively generate nodes inferred by the given statement.
@@ -35,33 +39,21 @@
Returns an iterator which yields tuples in the format
('original node', 'infered node').
"""
- # TODO: the same code as unpack_infer, except for the annotated
- # return. We need this type of annotation only here and
- # there is no point in complicating the API for unpack_infer.
- # If the need arises, this behaviour can be promoted to unpack_infer
- # as well.
if isinstance(stmt, (List, Tuple)):
for elt in stmt.elts:
- for infered_elt in unpack_infer(elt, context):
- yield elt, infered_elt
+ inferred = safe_infer(elt)
+ if inferred and inferred is not YES:
+ yield elt, inferred
return
- # if infered is a final node, return it and stop
- infered = next(stmt.infer(context))
- if infered is stmt:
- yield stmt, infered
- return
- # else, infer recursivly, except YES object that should be returned as is
for infered in stmt.infer(context):
if infered is YES:
- yield stmt, infered
- else:
- for inf_inf in unpack_infer(infered, context):
- yield stmt, inf_inf
+ continue
+ yield stmt, infered
PY3K = sys.version_info >= (3, 0)
OVERGENERAL_EXCEPTIONS = ('Exception',)
-
+BUILTINS_NAME = builtins.__name__
MSGS = {
'E0701': ('Bad except clauses order (%s)',
'bad-except-order',
@@ -145,21 +137,8 @@
if node.exc is None:
return
if PY3K and node.cause:
- try:
- cause = next(node.cause.infer())
- except astroid.InferenceError:
- pass
- else:
- if cause is YES:
- return
- if isinstance(cause, astroid.Const):
- if cause.value is not None:
- self.add_message('bad-exception-context',
- node=node)
- elif (not isinstance(cause, astroid.Class) and
- not inherit_from_std_ex(cause)):
- self.add_message('bad-exception-context',
- node=node)
+ self._check_bad_exception_context(node)
+
expr = node.exc
if self._check_raise_value(node, expr):
return
@@ -170,23 +149,59 @@
return
self._check_raise_value(node, value)
+ def _check_bad_exception_context(self, node):
+ """Verify that the exception context is properly set.
+
+ An exception context can be only `None` or an exception.
+ """
+ cause = safe_infer(node.cause)
+ if cause in (YES, None):
+ return
+ if isinstance(cause, astroid.Const):
+ if cause.value is not None:
+ self.add_message('bad-exception-context',
+ node=node)
+ elif (not isinstance(cause, astroid.Class) and
+ not inherit_from_std_ex(cause)):
+ self.add_message('bad-exception-context',
+ node=node)
+
def _check_raise_value(self, node, expr):
"""check for bad values, string exception and class inheritance
"""
value_found = True
if isinstance(expr, astroid.Const):
value = expr.value
- if isinstance(value, str):
+ if not isinstance(value, str):
# raising-string will be emitted from python3 porting checker.
- pass
- else:
self.add_message('raising-bad-type', node=node,
args=value.__class__.__name__)
- elif (isinstance(expr, astroid.Name) and \
- expr.name in ('None', 'True', 'False')) or \
- isinstance(expr, (astroid.List, astroid.Dict, astroid.Tuple,
- astroid.Module, astroid.Function)):
- self.add_message('raising-bad-type', node=node, args=expr.name)
+ elif ((isinstance(expr, astroid.Name) and
+ expr.name in ('None', 'True', 'False')) or
+ isinstance(expr, (astroid.List, astroid.Dict, astroid.Tuple,
+ astroid.Module, astroid.Function))):
+ emit = True
+ if not PY3K and isinstance(expr, astroid.Tuple):
+ # On Python 2, using the following is not an error:
+ # raise (ZeroDivisionError, None)
+ # raise (ZeroDivisionError, )
+ # What's left to do is to check that the first
+ # argument is indeed an exception.
+ # Verifying the other arguments is not
+ # the scope of this check.
+ first = expr.elts[0]
+ inferred = safe_infer(first)
+ if isinstance(inferred, Instance):
+ # pylint: disable=protected-access
+ inferred = inferred._proxied
+ if (inferred is YES or
+ isinstance(inferred, astroid.Class)
+ and inherit_from_std_ex(inferred)):
+ emit = False
+ if emit:
+ self.add_message('raising-bad-type',
+ node=node,
+ args=expr.name)
elif ((isinstance(expr, astroid.Name) and expr.name == 'NotImplemented')
or (isinstance(expr, astroid.CallFunc) and
isinstance(expr.func, astroid.Name) and
@@ -194,22 +209,65 @@
self.add_message('notimplemented-raised', node=node)
elif isinstance(expr, (Instance, astroid.Class)):
if isinstance(expr, Instance):
+ # pylint: disable=protected-access
expr = expr._proxied
if (isinstance(expr, astroid.Class) and
- not inherit_from_std_ex(expr) and
- expr.root().name != BUILTINS_NAME):
+ not inherit_from_std_ex(expr)):
if expr.newstyle:
self.add_message('raising-non-exception', node=node)
else:
+ if has_known_bases(expr):
+ confidence = INFERENCE
+ else:
+ confidence = INFERENCE_FAILURE
self.add_message(
'nonstandard-exception', node=node,
- confidence=INFERENCE if has_known_bases(expr) else INFERENCE_FAILURE)
+ confidence=confidence)
else:
value_found = False
else:
value_found = False
return value_found
+ def _check_catching_non_exception(self, handler, exc, part):
+ if isinstance(exc, astroid.Tuple):
+ # Check if it is a tuple of exceptions.
+ inferred = [safe_infer(elt) for elt in exc.elts]
+ if any(node is astroid.YES for node in inferred):
+ # Don't emit if we don't know every component.
+ return
+ if all(node and inherit_from_std_ex(node)
+ for node in inferred):
+ return
+
+ if not isinstance(exc, astroid.Class):
+ # Don't emit the warning if the infered stmt
+ # is None, but the exception handler is something else,
+ # maybe it was redefined.
+ if (isinstance(exc, astroid.Const) and
+ exc.value is None):
+ if ((isinstance(handler.type, astroid.Const) and
+ handler.type.value is None) or
+ handler.type.parent_of(exc)):
+ # If the exception handler catches None or
+ # the exception component, which is None, is
+ # defined by the entire exception handler, then
+ # emit a warning.
+ self.add_message('catching-non-exception',
+ node=handler.type,
+ args=(part.as_string(), ))
+ else:
+ self.add_message('catching-non-exception',
+ node=handler.type,
+ args=(part.as_string(), ))
+ return
+ if (not inherit_from_std_ex(exc) and
+ exc.root().name != BUILTINS_NAME):
+ if has_known_bases(exc):
+ self.add_message('catching-non-exception',
+ node=handler.type,
+ args=(exc.name, ))
+
@check_messages('bare-except', 'broad-except', 'pointless-except',
'binary-op-exception', 'bad-except-order',
'catching-non-exception')
@@ -242,28 +300,14 @@
for part, exc in excs:
if exc is YES:
continue
- if isinstance(exc, astroid.Instance) and inherit_from_std_ex(exc):
+ if (isinstance(exc, astroid.Instance)
+ and inherit_from_std_ex(exc)):
+ # pylint: disable=protected-access
exc = exc._proxied
+
+ self._check_catching_non_exception(handler, exc, part)
+
if not isinstance(exc, astroid.Class):
- # Don't emit the warning if the infered stmt
- # is None, but the exception handler is something else,
- # maybe it was redefined.
- if (isinstance(exc, astroid.Const) and
- exc.value is None):
- if ((isinstance(handler.type, astroid.Const) and
- handler.type.value is None) or
- handler.type.parent_of(exc)):
- # If the exception handler catches None or
- # the exception component, which is None, is
- # defined by the entire exception handler, then
- # emit a warning.
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(part.as_string(), ))
- else:
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(part.as_string(), ))
continue
exc_ancestors = [anc for anc in exc.ancestors()
@@ -280,13 +324,6 @@
self.add_message('broad-except',
args=exc.name, node=handler.type)
- if (not inherit_from_std_ex(exc) and
- exc.root().name != BUILTINS_NAME):
- if has_known_bases(exc):
- self.add_message('catching-non-exception',
- node=handler.type,
- args=(exc.name, ))
-
exceptions_classes += [exc for _, exc in excs]
diff --git a/third_party/pylint/checkers/misc.py b/third_party/pylint/checkers/misc.py
index d4adfd7..7fbe70b 100644
--- a/third_party/pylint/checkers/misc.py
+++ b/third_party/pylint/checkers/misc.py
@@ -82,8 +82,6 @@
"""inspect the source file to find encoding problem or fixmes like
notes
"""
- stream = module.file_stream
- stream.seek(0) # XXX may be removed with astroid > 0.23
if self.config.notes:
notes = re.compile(
r'.*?#\s*(%s)(:*\s*.+)' % "|".join(self.config.notes))
@@ -94,10 +92,11 @@
else:
encoding = 'ascii'
- for lineno, line in enumerate(stream):
- line = self._check_encoding(lineno + 1, line, encoding)
- if line is not None and notes:
- self._check_note(notes, lineno + 1, line)
+ with module.stream() as stream:
+ for lineno, line in enumerate(stream):
+ line = self._check_encoding(lineno + 1, line, encoding)
+ if line is not None and notes:
+ self._check_note(notes, lineno + 1, line)
def register(linter):
diff --git a/third_party/pylint/checkers/python3.py b/third_party/pylint/checkers/python3.py
index 940a158..59c37bf 100644
--- a/third_party/pylint/checkers/python3.py
+++ b/third_party/pylint/checkers/python3.py
@@ -252,9 +252,19 @@
'map is a generator and must be evaluated. '
'Prefer a for-loop as alternative.',
{'maxversion': (3, 0)}),
+ 'W1632': ('input built-in referenced',
+ 'input-builtin',
+ 'Used when the input built-in is referenced '
+ '(backwards-incompatible semantics in Python 3)',
+ {'maxversion': (3, 0)}),
+ 'W1633': ('round built-in referenced',
+ 'round-builtin',
+ 'Used when the round built-in is referenced '
+ '(backwards-incompatible semantics in Python 3)',
+ {'maxversion': (3, 0)}),
}
- _missing_builtins = frozenset([
+ _bad_builtins = frozenset([
'apply',
'basestring',
'buffer',
@@ -262,9 +272,11 @@
'coerce',
'execfile',
'file',
+ 'input', # Not missing, but incompatible semantics
'long',
'raw_input',
'reduce',
+ 'round', # Not missing, but incompatible semantics
'StandardError',
'unicode',
'xrange',
@@ -310,10 +322,10 @@
self.add_message('implicit-map-evaluation', node=node)
def visit_name(self, node):
- """Detect when a built-in that is missing in Python 3 is referenced."""
+ """Detect when a "bad" built-in is referenced."""
found_node = node.lookup(node.name)[0]
if getattr(found_node, 'name', None) == '__builtin__':
- if node.name in self._missing_builtins:
+ if node.name in self._bad_builtins:
message = node.name.lower() + '-builtin'
self.add_message(message, node=node)
diff --git a/third_party/pylint/checkers/similar.py b/third_party/pylint/checkers/similar.py
index 013f1b0..9542077 100644
--- a/third_party/pylint/checkers/similar.py
+++ b/third_party/pylint/checkers/similar.py
@@ -42,7 +42,6 @@
def append_stream(self, streamid, stream, encoding=None):
"""append a file to search for similarities"""
- stream.seek(0) # XXX may be removed with astroid > 0.23
if encoding is None:
readlines = stream.readlines
else:
@@ -300,7 +299,10 @@
stream must implement the readlines method
"""
- self.append_stream(self.linter.current_name, node.file_stream, node.file_encoding)
+ with node.stream() as stream:
+ self.append_stream(self.linter.current_name,
+ stream,
+ node.file_encoding)
def close(self):
"""compute and display similarities on closing (i.e. end of parsing)"""
@@ -361,7 +363,8 @@
usage(1)
sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
for filename in args:
- sim.append_stream(filename, open(filename))
+ with open(filename) as stream:
+ sim.append_stream(filename, stream)
sim.run()
sys.exit(0)
diff --git a/third_party/pylint/checkers/spelling.py b/third_party/pylint/checkers/spelling.py
index 3990633..6cc604a 100644
--- a/third_party/pylint/checkers/spelling.py
+++ b/third_party/pylint/checkers/spelling.py
@@ -97,7 +97,7 @@
if not dict_name:
return
- self.ignore_list = self.config.spelling_ignore_words.split(",")
+ self.ignore_list = [w.strip() for w in self.config.spelling_ignore_words.split(",")]
# "param" appears in docstring in param description and
# "pylint" appears in comments in pylint pragmas.
self.ignore_list.extend(["param", "pylint"])
diff --git a/third_party/pylint/checkers/stdlib.py b/third_party/pylint/checkers/stdlib.py
index d8b5fde..b6b8026 100644
--- a/third_party/pylint/checkers/stdlib.py
+++ b/third_party/pylint/checkers/stdlib.py
@@ -16,6 +16,7 @@
"""Checkers for various standard library functions."""
import re
+import six
import sys
import astroid
@@ -25,13 +26,56 @@
from pylint.checkers import BaseChecker
from pylint.checkers import utils
-_VALID_OPEN_MODE_REGEX = re.compile(r'^(r?U|[rwa]\+?b?)$')
if sys.version_info >= (3, 0):
OPEN_MODULE = '_io'
else:
OPEN_MODULE = '__builtin__'
+
+def _check_mode_str(mode):
+ # check type
+ if not isinstance(mode, six.string_types):
+ return False
+ # check syntax
+ modes = set(mode)
+ _mode = "rwatb+U"
+ creating = False
+ if six.PY3:
+ _mode += "x"
+ creating = "x" in modes
+ if modes - set(_mode) or len(mode) > len(modes):
+ return False
+ # check logic
+ reading = "r" in modes
+ writing = "w" in modes
+ appending = "a" in modes
+ updating = "+" in modes
+ text = "t" in modes
+ binary = "b" in modes
+ if "U" in modes:
+ if writing or appending or creating and six.PY3:
+ return False
+ reading = True
+ if not six.PY3:
+ binary = True
+ if text and binary:
+ return False
+ total = reading + writing + appending + (creating if six.PY3 else 0)
+ if total > 1:
+ return False
+ if not (reading or writing or appending or creating and six.PY3):
+ return False
+ # other 2.x constraints
+ if not six.PY3:
+ if "U" in mode:
+ mode = mode.replace("U", "")
+ if "r" not in mode:
+ mode = "r" + mode
+ return mode[0] in ("r", "w", "a", "U")
+ return True
+
+
class StdlibChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
name = 'stdlib'
@@ -39,7 +83,8 @@
msgs = {
'W1501': ('"%s" is not a valid mode for open.',
'bad-open-mode',
- 'Python supports: r, w, a modes with b, +, and U options. '
+ 'Python supports: r, w, a[, x] modes with b, +, '
+ 'and U (only with r) options. '
'See http://docs.python.org/2/library/functions.html#open'),
'W1502': ('Using datetime.time in a boolean context.',
'boolean-datetime',
@@ -48,16 +93,26 @@
'midnight UTC. This behaviour was fixed in Python 3.5. '
'See http://bugs.python.org/issue13936 for reference.',
{'maxversion': (3, 5)}),
- }
+ 'W1503': ('Redundant use of %s with constant '
+ 'value %r',
+ 'redundant-unittest-assert',
+ 'The first argument of assertTrue and assertFalse is'
+ 'a condition. If a constant is passed as parameter, that'
+ 'condition will be always true. In this case a warning '
+ 'should be emitted.')
+ }
- @utils.check_messages('bad-open-mode')
+ @utils.check_messages('bad-open-mode', 'redundant-unittest-assert')
def visit_callfunc(self, node):
"""Visit a CallFunc node."""
if hasattr(node, 'func'):
infer = utils.safe_infer(node.func)
- if infer and infer.root().name == OPEN_MODULE:
- if getattr(node.func, 'name', None) in ('open', 'file'):
- self._check_open_mode(node)
+ if infer:
+ if infer.root().name == OPEN_MODULE:
+ if getattr(node.func, 'name', None) in ('open', 'file'):
+ self._check_open_mode(node)
+ if infer.root().name == 'unittest.case':
+ self._check_redundant_assert(node, infer)
@utils.check_messages('boolean-datetime')
def visit_unaryop(self, node):
@@ -77,6 +132,14 @@
for value in node.values:
self._check_datetime(value)
+ def _check_redundant_assert(self, node, infer):
+ if (isinstance(infer, astroid.BoundMethod) and
+ node.args and isinstance(node.args[0], astroid.Const) and
+ infer.name in ['assertTrue', 'assertFalse']):
+ self.add_message('redundant-unittest-assert',
+ args=(infer.name, node.args[0].value, ),
+ node=node)
+
def _check_datetime(self, node):
""" Check that a datetime was infered.
If so, emit boolean-datetime warning.
@@ -89,20 +152,22 @@
infered.qname() == 'datetime.time'):
self.add_message('boolean-datetime', node=node)
+
def _check_open_mode(self, node):
"""Check that the mode argument of an open or file call is valid."""
try:
- mode_arg = utils.get_argument_from_call(node, position=1, keyword='mode')
- if mode_arg:
- mode_arg = utils.safe_infer(mode_arg)
- if (isinstance(mode_arg, astroid.Const)
- and not _VALID_OPEN_MODE_REGEX.match(mode_arg.value)):
- self.add_message('bad-open-mode', node=node,
- args=(mode_arg.value))
- except (utils.NoSuchArgumentError, TypeError):
- pass
+ mode_arg = utils.get_argument_from_call(node, position=1,
+ keyword='mode')
+ except utils.NoSuchArgumentError:
+ return
+ if mode_arg:
+ mode_arg = utils.safe_infer(mode_arg)
+ if (isinstance(mode_arg, astroid.Const)
+ and not _check_mode_str(mode_arg.value)):
+ self.add_message('bad-open-mode', node=node,
+ args=mode_arg.value)
+
def register(linter):
"""required method to auto register this checker """
linter.register_checker(StdlibChecker(linter))
-
diff --git a/third_party/pylint/checkers/typecheck.py b/third_party/pylint/checkers/typecheck.py
index 10b9f86..9f074ae 100644
--- a/third_party/pylint/checkers/typecheck.py
+++ b/third_party/pylint/checkers/typecheck.py
@@ -344,11 +344,16 @@
if not isinstance(attr, astroid.Function):
continue
- # Decorated, see if it is decorated with a property
+ # Decorated, see if it is decorated with a property.
+ # Also, check the returns and see if they are callable.
if decorated_with_property(attr):
- self.add_message('not-callable', node=node,
- args=node.func.as_string())
- break
+ if all(return_node.callable()
+ for return_node in attr.infer_call_result(node)):
+ continue
+ else:
+ self.add_message('not-callable', node=node,
+ args=node.func.as_string())
+ break
@check_messages(*(list(MSGS.keys())))
def visit_callfunc(self, node):
diff --git a/third_party/pylint/checkers/utils.py b/third_party/pylint/checkers/utils.py
index f3a7d17..2cb01d5 100644
--- a/third_party/pylint/checkers/utils.py
+++ b/third_party/pylint/checkers/utils.py
@@ -34,6 +34,8 @@
EXCEPTIONS_MODULE = "exceptions"
else:
EXCEPTIONS_MODULE = "builtins"
+ABC_METHODS = set(('abc.abstractproperty', 'abc.abstractmethod',
+ 'abc.abstractclassmethod', 'abc.abstractstaticmethod'))
class NoSuchArgumentError(Exception):
@@ -499,3 +501,64 @@
return True
except astroid.InferenceError:
pass
+
+
+def decorated_with_abc(func):
+ """Determine if the `func` node is decorated with `abc` decorators."""
+ if func.decorators:
+ for node in func.decorators.nodes:
+ try:
+ infered = next(node.infer())
+ except astroid.InferenceError:
+ continue
+ if infered and infered.qname() in ABC_METHODS:
+ return True
+
+
+def unimplemented_abstract_methods(node, is_abstract_cb=decorated_with_abc):
+ """
+ Get the unimplemented abstract methods for the given *node*.
+
+ A method can be considered abstract if the callback *is_abstract_cb*
+ returns a ``True`` value. The check defaults to verifying that
+ a method is decorated with abstract methods.
+ The function will work only for new-style classes. For old-style
+ classes, it will simply return an empty dictionary.
+ For the rest of them, it will return a dictionary of abstract method
+ names and their inferred objects.
+ """
+ visited = {}
+ try:
+ mro = reversed(node.mro())
+ except NotImplementedError:
+ # Old style class, it will not have a mro.
+ return {}
+ except astroid.ResolveError:
+ # Probably inconsistent hierarchy, don'try
+ # to figure this out here.
+ return {}
+ for ancestor in mro:
+ for obj in ancestor.values():
+ infered = obj
+ if isinstance(obj, astroid.AssName):
+ infered = safe_infer(obj)
+ if not infered:
+ continue
+ if not isinstance(infered, astroid.Function):
+ if obj.name in visited:
+ del visited[obj.name]
+ if isinstance(infered, astroid.Function):
+ # It's critical to use the original name,
+ # since after inferring, an object can be something
+ # else than expected, as in the case of the
+ # following assignment.
+ #
+ # class A:
+ # def keys(self): pass
+ # __iter__ = keys
+ abstract = is_abstract_cb(infered)
+ if abstract:
+ visited[obj.name] = infered
+ elif not abstract and obj.name in visited:
+ del visited[obj.name]
+ return visited
diff --git a/third_party/pylint/gui.py b/third_party/pylint/gui.py
index b3edd28..9c9b138 100644
--- a/third_party/pylint/gui.py
+++ b/third_party/pylint/gui.py
@@ -25,7 +25,7 @@
from six.moves.tkinter import (
Tk, Frame, Listbox, Entry, Label, Button, Scrollbar,
- Checkbutton, Radiobutton, IntVar, StringVar,
+ Checkbutton, Radiobutton, IntVar, StringVar, PanedWindow,
TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH, SUNKEN, W,
HORIZONTAL, DISABLED, NORMAL, W,
)
@@ -150,23 +150,34 @@
def init_gui(self):
"""init helper"""
+
+ window = PanedWindow(self.root, orient="vertical")
+ window.pack(side=TOP, fill=BOTH, expand=True)
+
+ top_pane = Frame(window)
+ window.add(top_pane)
+ mid_pane = Frame(window)
+ window.add(mid_pane)
+ bottom_pane = Frame(window)
+ window.add(bottom_pane)
+
#setting up frames
- top_frame = Frame(self.root)
- mid_frame = Frame(self.root)
- radio_frame = Frame(self.root)
- res_frame = Frame(self.root)
- msg_frame = Frame(self.root)
- check_frame = Frame(self.root)
- history_frame = Frame(self.root)
- btn_frame = Frame(self.root)
- rating_frame = Frame(self.root)
+ top_frame = Frame(top_pane)
+ mid_frame = Frame(top_pane)
+ history_frame = Frame(top_pane)
+ radio_frame = Frame(mid_pane)
+ rating_frame = Frame(mid_pane)
+ res_frame = Frame(mid_pane)
+ check_frame = Frame(bottom_pane)
+ msg_frame = Frame(bottom_pane)
+ btn_frame = Frame(bottom_pane)
top_frame.pack(side=TOP, fill=X)
mid_frame.pack(side=TOP, fill=X)
history_frame.pack(side=TOP, fill=BOTH, expand=True)
- radio_frame.pack(side=TOP, fill=BOTH, expand=True)
- rating_frame.pack(side=TOP, fill=BOTH, expand=True)
+ radio_frame.pack(side=TOP, fill=X)
+ rating_frame.pack(side=TOP, fill=X)
res_frame.pack(side=TOP, fill=BOTH, expand=True)
- check_frame.pack(side=TOP, fill=BOTH, expand=True)
+ check_frame.pack(side=TOP, fill=X)
msg_frame.pack(side=TOP, fill=BOTH, expand=True)
btn_frame.pack(side=TOP, fill=X)
diff --git a/third_party/pylint/interfaces.py b/third_party/pylint/interfaces.py
index 067aaa6..64f5a95 100644
--- a/third_party/pylint/interfaces.py
+++ b/third_party/pylint/interfaces.py
@@ -46,7 +46,7 @@
def process_module(self, astroid):
""" process a module
- the module's content is accessible via astroid.file_stream
+ the module's content is accessible via astroid.stream
"""
diff --git a/third_party/pylint/lint.py b/third_party/pylint/lint.py
index 6f762cc..e10ae56 100644
--- a/third_party/pylint/lint.py
+++ b/third_party/pylint/lint.py
@@ -27,48 +27,40 @@
"""
from __future__ import print_function
-# import this first to avoid builtin namespace pollution
-from pylint.checkers import utils #pylint: disable=unused-import
-
-import sys
+import collections
+import contextlib
+import itertools
+import operator
import os
-import tokenize
-from collections import defaultdict
-from contextlib import contextmanager
-from operator import attrgetter
-from warnings import warn
-from itertools import chain
try:
import multiprocessing
except ImportError:
multiprocessing = None
+import sys
+import tokenize
+import warnings
-import six
-from logilab.common.configuration import (
- UnsupportedAction, OptionsManagerMixIn)
-from logilab.common.optik_ext import check_csv
-from logilab.common.interface import implements
-from logilab.common.textutils import splitstrip, unquote
-from logilab.common.ureports import Table, Text, Section
-from logilab.common.__pkginfo__ import version as common_version
-from astroid import MANAGER, AstroidBuildingException
+import astroid
from astroid.__pkginfo__ import version as astroid_version
-from astroid.modutils import load_module_from_name, get_module_part
+from astroid import modutils
+from logilab.common import configuration
+from logilab.common import optik_ext
+from logilab.common import interface
+from logilab.common import textutils
+from logilab.common import ureports
+from logilab.common.__pkginfo__ import version as common_version
+import six
-from pylint.utils import (
- MSG_TYPES, OPTION_RGX,
- PyLintASTWalker, UnknownMessage, MessagesHandlerMixIn, ReportsHandlerMixIn,
- MessagesStore, FileState, EmptyReport,
- expand_modules, tokenize_module, Message)
-from pylint.interfaces import IRawChecker, ITokenChecker, IAstroidChecker, CONFIDENCE_LEVELS
-from pylint.checkers import (BaseTokenChecker,
- table_lines_from_stats,
- initialize as checkers_initialize)
-from pylint.reporters import initialize as reporters_initialize, CollectingReporter
+from pylint import checkers
+from pylint import interfaces
+from pylint import reporters
+from pylint import utils
from pylint import config
from pylint.__pkginfo__ import version
+MANAGER = astroid.MANAGER
+
def _get_new_args(message):
location = (
message.abspath,
@@ -195,10 +187,13 @@
if multiprocessing is not None:
class ChildLinter(multiprocessing.Process): # pylint: disable=no-member
def run(self):
- tasks_queue, results_queue, config = self._args # pylint: disable=no-member
+ tasks_queue, results_queue, self._config = self._args # pylint: disable=no-member
+ self._config["jobs"] = 1 # Child does not parallelize any further.
+
+ # Run linter for received files/modules.
for file_or_module in iter(tasks_queue.get, 'STOP'):
- result = self._run_linter(config, file_or_module[0])
+ result = self._run_linter(file_or_module[0])
try:
results_queue.put(result)
except Exception as ex:
@@ -206,7 +201,7 @@
print(ex, file=sys.stderr)
results_queue.put({})
- def _run_linter(self, config, file_or_module):
+ def _run_linter(self, file_or_module):
linter = PyLinter()
# Register standard checkers.
@@ -214,26 +209,8 @@
# Load command line plugins.
# TODO linter.load_plugin_modules(self._plugins)
- linter.disable('pointless-except')
- linter.disable('suppressed-message')
- linter.disable('useless-suppression')
-
- # TODO(cpopa): the sub-linters will not know all the options
- # because they are not available here, as they are patches to
- # PyLinter options. The following is just a hack to handle
- # just a part of the options available in the Run class.
-
- if 'disable_msg' in config:
- # Disable everything again. We don't have access
- # to the original linter though.
- for msgid in config['disable_msg']:
- linter.disable(msgid)
- for key in set(config) - set(dict(linter.options)):
- del config[key]
-
- config['jobs'] = 1 # Child does not parallelize any further.
- linter.load_configuration(**config)
- linter.set_reporter(CollectingReporter())
+ linter.load_configuration(**self._config)
+ linter.set_reporter(reporters.CollectingReporter())
# Run the checks.
linter.check(file_or_module)
@@ -243,8 +220,10 @@
msgs, linter.stats, linter.msg_status)
-class PyLinter(OptionsManagerMixIn, MessagesHandlerMixIn, ReportsHandlerMixIn,
- BaseTokenChecker):
+class PyLinter(configuration.OptionsManagerMixIn,
+ utils.MessagesHandlerMixIn,
+ utils.ReportsHandlerMixIn,
+ checkers.BaseTokenChecker):
"""lint Python modules using external checkers.
This is the main checker controlling the other ones and the reports
@@ -258,7 +237,7 @@
to ensure the latest code version is actually checked.
"""
- __implements__ = (ITokenChecker,)
+ __implements__ = (interfaces.ITokenChecker, )
name = 'master'
priority = 0
@@ -330,11 +309,11 @@
('confidence',
{'type' : 'multiple_choice', 'metavar': '<levels>',
'default': '',
- 'choices': [c.name for c in CONFIDENCE_LEVELS],
+ 'choices': [c.name for c in interfaces.CONFIDENCE_LEVELS],
'group': 'Messages control',
'help' : 'Only show warnings with the listed confidence levels.'
' Leave empty to show all. Valid levels: %s' % (
- ', '.join(c.name for c in CONFIDENCE_LEVELS),)}),
+ ', '.join(c.name for c in interfaces.CONFIDENCE_LEVELS),)}),
('enable',
{'type' : 'csv', 'metavar': '<msg ids>',
@@ -406,19 +385,20 @@
# some stuff has to be done before ancestors initialization...
#
# messages store / checkers / reporter / astroid manager
- self.msgs_store = MessagesStore()
+ self.msgs_store = utils.MessagesStore()
self.reporter = None
self._reporter_name = None
self._reporters = {}
- self._checkers = defaultdict(list)
+ self._checkers = collections.defaultdict(list)
self._pragma_lineno = {}
self._ignore_file = False
# visit variables
- self.file_state = FileState()
+ self.file_state = utils.FileState()
self.current_name = None
self.current_file = None
self.stats = None
# init options
+ self._external_opts = options
self.options = options + PyLinter.make_options()
self.option_groups = option_groups + PyLinter.option_groups
self._options_methods = {
@@ -428,12 +408,13 @@
'enable-msg': self.enable}
full_version = '%%prog %s, \nastroid %s, common %s\nPython %s' % (
version, astroid_version, common_version, sys.version)
- OptionsManagerMixIn.__init__(self, usage=__doc__,
- version=full_version,
- config_file=pylintrc or config.PYLINTRC)
- MessagesHandlerMixIn.__init__(self)
- ReportsHandlerMixIn.__init__(self)
- BaseTokenChecker.__init__(self)
+ configuration.OptionsManagerMixIn.__init__(
+ self, usage=__doc__,
+ version=full_version,
+ config_file=pylintrc or config.PYLINTRC)
+ utils.MessagesHandlerMixIn.__init__(self)
+ utils.ReportsHandlerMixIn.__init__(self)
+ checkers.BaseTokenChecker.__init__(self)
# provided reports
self.reports = (('RP0001', 'Messages by category',
report_total_messages_stats),
@@ -451,8 +432,8 @@
self.set_reporter(reporter)
def load_default_plugins(self):
- checkers_initialize(self)
- reporters_initialize(self)
+ checkers.initialize(self)
+ reporters.initialize(self)
# Make sure to load the default reporter, because
# the option has been set before the plugins had been loaded.
if not self.reporter:
@@ -466,7 +447,7 @@
if modname in self._dynamic_plugins:
continue
self._dynamic_plugins.add(modname)
- module = load_module_from_name(modname)
+ module = modutils.load_module_from_name(modname)
module.register(self)
def _load_reporter(self):
@@ -475,7 +456,8 @@
self.set_reporter(self._reporters[name]())
else:
qname = self._reporter_name
- module = load_module_from_name(get_module_part(qname))
+ module = modutils.load_module_from_name(
+ modutils.get_module_part(qname))
class_name = qname.split('.')[-1]
reporter_class = getattr(module, class_name)
self.set_reporter(reporter_class())
@@ -496,23 +478,27 @@
meth = self._options_methods[optname]
except KeyError:
meth = self._bw_options_methods[optname]
- warn('%s is deprecated, replace it by %s' % (
- optname, optname.split('-')[0]), DeprecationWarning)
- value = check_csv(None, optname, value)
+ warnings.warn('%s is deprecated, replace it by %s' % (
+ optname, optname.split('-')[0]),
+ DeprecationWarning)
+ value = optik_ext.check_csv(None, optname, value)
if isinstance(value, (list, tuple)):
for _id in value:
meth(_id, ignore_unknown=True)
else:
meth(value)
+ return # no need to call set_option, disable/enable methods do it
elif optname == 'output-format':
self._reporter_name = value
# If the reporters are already available, load
# the reporter class.
if self._reporters:
self._load_reporter()
+
try:
- BaseTokenChecker.set_option(self, optname, value, action, optdict)
- except UnsupportedAction:
+ checkers.BaseTokenChecker.set_option(self, optname,
+ value, action, optdict)
+ except configuration.UnsupportedAction:
print('option %s can\'t be read from config file' % \
optname, file=sys.stderr)
@@ -564,7 +550,7 @@
def disable_reporters(self):
"""disable all reporters"""
for reporters in six.itervalues(self._reports):
- for report_id, _title, _cb in reporters:
+ for report_id, _, _ in reporters:
self.disable_report(report_id)
def error_mode(self):
@@ -586,7 +572,7 @@
for (tok_type, content, start, _, _) in tokens:
if tok_type != tokenize.COMMENT:
continue
- match = OPTION_RGX.search(content)
+ match = utils.OPTION_RGX.search(content)
if match is None:
continue
if match.group(1).strip() == "disable-all" or \
@@ -611,7 +597,7 @@
meth = self._bw_options_methods[opt]
# found a "(dis|en)able-msg" pragma deprecated suppresssion
self.add_message('deprecated-pragma', line=start[0], args=(opt, opt.replace('-msg', '')))
- for msgid in splitstrip(value):
+ for msgid in textutils.splitstrip(value):
# Add the line where a control pragma was encountered.
if opt in control_pragmas:
self._pragma_lineno[msgid] = start[0]
@@ -623,7 +609,7 @@
self._ignore_file = True
return
meth(msgid, 'module', start[0])
- except UnknownMessage:
+ except utils.UnknownMessage:
self.add_message('bad-option-value', args=msgid, line=start[0])
else:
self.add_message('unrecognized-inline-option', args=opt, line=start[0])
@@ -650,7 +636,8 @@
any(self.report_is_enabled(r[0]) for r in checker.reports)):
neededcheckers.append(checker)
# Sort checkers by priority
- neededcheckers = sorted(neededcheckers, key=attrgetter('priority'),
+ neededcheckers = sorted(neededcheckers,
+ key=operator.attrgetter('priority'),
reverse=True)
return neededcheckers
@@ -687,11 +674,11 @@
self._do_check(files_or_modules)
else:
# Hack that permits running pylint, on Windows, with -m switch
- # and with --jobs, as in 'py -2 -m pylint .. --jobs'.
+ # and with --jobs, as in 'python -2 -m pylint .. --jobs'.
# For more details why this is needed,
# see Python issue http://bugs.python.org/issue10845.
- mock_main = six.PY2 and __name__ != '__main__' # -m switch
+ mock_main = __name__ != '__main__' # -m switch
if mock_main:
sys.modules['__main__'] = sys.modules[__name__]
try:
@@ -702,7 +689,14 @@
def _parallel_task(self, files_or_modules):
# Prepare configuration for child linters.
- config = vars(self.config)
+ filter_options = {'symbols', 'include-ids', 'long-help'}
+ filter_options.update([opt_name for opt_name, _ in self._external_opts])
+ config = {}
+ for opt_providers in six.itervalues(self._all_options):
+ for optname, optdict, val in opt_providers.options_and_values():
+ if optname not in filter_options:
+ config[optname] = configuration.format_option_value(optdict, val)
+
childs = []
manager = multiprocessing.Manager() # pylint: disable=no-member
tasks_queue = manager.Queue() # pylint: disable=no-member
@@ -759,14 +753,14 @@
last_module = module
for msg in messages:
- msg = Message(*msg)
+ msg = utils.Message(*msg)
self.set_current_module(module)
self.reporter.handle_message(msg)
all_stats.append(stats)
self.msg_status |= msg_status
- self.stats = _merge_stats(chain(all_stats, [self.stats]))
+ self.stats = _merge_stats(itertools.chain(all_stats, [self.stats]))
self.current_name = last_module
# Insert stats data to local checkers.
@@ -775,15 +769,17 @@
checker.stats = self.stats
def _do_check(self, files_or_modules):
- walker = PyLintASTWalker(self)
+ walker = utils.PyLintASTWalker(self)
checkers = self.prepare_checkers()
- tokencheckers = [c for c in checkers if implements(c, ITokenChecker)
+ tokencheckers = [c for c in checkers
+ if interface.implements(c, interfaces.ITokenChecker)
and c is not self]
- rawcheckers = [c for c in checkers if implements(c, IRawChecker)]
+ rawcheckers = [c for c in checkers
+ if interface.implements(c, interfaces.IRawChecker)]
# notify global begin
for checker in checkers:
checker.open()
- if implements(checker, IAstroidChecker):
+ if interface.implements(checker, interfaces.IAstroidChecker):
walker.add_checker(checker)
# build ast and check modules or packages
for descr in self.expand_files(files_or_modules):
@@ -795,18 +791,18 @@
self.reporter.set_output(open(reportfile, 'w'))
self.set_current_module(modname, filepath)
# get the module representation
- astroid = self.get_ast(filepath, modname)
- if astroid is None:
+ ast_node = self.get_ast(filepath, modname)
+ if ast_node is None:
continue
# XXX to be correct we need to keep module_msgs_state for every
# analyzed module (the problem stands with localized messages which
# are only detected in the .close step)
- self.file_state = FileState(descr['basename'])
+ self.file_state = utils.FileState(descr['basename'])
self._ignore_file = False
# fix the current file (if the source file was not available or
# if it's actually a c extension)
- self.current_file = astroid.file # pylint: disable=maybe-no-member
- self.check_astroid_module(astroid, walker, rawcheckers, tokencheckers)
+ self.current_file = ast_node.file # pylint: disable=maybe-no-member
+ self.check_astroid_module(ast_node, walker, rawcheckers, tokencheckers)
# warn about spurious inline messages handling
for msgid, line, args in self.file_state.iter_spurious_suppression_messages(self.msgs_store):
self.add_message(msgid, line, None, args)
@@ -819,7 +815,7 @@
def expand_files(self, modules):
"""get modules and errors from a list of modules and handle errors
"""
- result, errors = expand_modules(modules, self.config.black_list)
+ result, errors = utils.expand_modules(modules, self.config.black_list)
for error in errors:
message = modname = error["mod"]
key = error["key"]
@@ -840,7 +836,7 @@
self.current_file = filepath or modname
self.stats['by_module'][modname] = {}
self.stats['by_module'][modname]['statement'] = 0
- for msg_cat in six.itervalues(MSG_TYPES):
+ for msg_cat in six.itervalues(utils.MSG_TYPES):
self.stats['by_module'][modname][msg_cat] = 0
def get_ast(self, filepath, modname):
@@ -849,38 +845,24 @@
return MANAGER.ast_from_file(filepath, modname, source=True)
except SyntaxError as ex:
self.add_message('syntax-error', line=ex.lineno, args=ex.msg)
- except AstroidBuildingException as ex:
+ except astroid.AstroidBuildingException as ex:
self.add_message('parse-error', args=ex)
except Exception as ex: # pylint: disable=broad-except
import traceback
traceback.print_exc()
self.add_message('astroid-error', args=(ex.__class__, ex))
- def check_astroid_module(self, astroid, walker, rawcheckers, tokencheckers):
- """check a module from its astroid representation, real work"""
+ def check_astroid_module(self, ast_node, walker,
+ rawcheckers, tokencheckers):
+ """Check a module from its astroid representation."""
try:
- return self._check_astroid_module(astroid, walker,
- rawcheckers, tokencheckers)
- finally:
- # Close file_stream, if opened, to avoid to open many files.
- if astroid.file_stream:
- astroid.file_stream.close()
- # TODO(cpopa): This is an implementation detail, but it will
- # be moved in astroid at some point.
- # We invalidate the cached property, to let the others
- # modules which relies on this one to get a new file stream.
- del astroid.file_stream
-
- def _check_astroid_module(self, astroid, walker, rawcheckers, tokencheckers):
- # call raw checkers if possible
- try:
- tokens = tokenize_module(astroid)
+ tokens = utils.tokenize_module(ast_node)
except tokenize.TokenError as ex:
self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0])
return
- if not astroid.pure_python:
- self.add_message('raw-checker-failed', args=astroid.name)
+ if not ast_node.pure_python:
+ self.add_message('raw-checker-failed', args=ast_node.name)
else:
#assert astroid.file.endswith('.py')
# invoke ITokenChecker interface on self to fetch module/block
@@ -889,14 +871,14 @@
if self._ignore_file:
return False
# walk ast to collect line numbers
- self.file_state.collect_block_lines(self.msgs_store, astroid)
+ self.file_state.collect_block_lines(self.msgs_store, ast_node)
# run raw and tokens checkers
for checker in rawcheckers:
- checker.process_module(astroid)
+ checker.process_module(ast_node)
for checker in tokencheckers:
checker.process_tokens(tokens)
# generate events to astroid checkers
- walker.walk(astroid)
+ walker.walk(ast_node)
return True
# IAstroidChecker interface #################################################
@@ -907,8 +889,9 @@
'by_msg' : {},
}
MANAGER.always_load_extensions = self.config.unsafe_load_any_extension
- MANAGER.extension_package_whitelist.update(self.config.extension_pkg_whitelist)
- for msg_cat in six.itervalues(MSG_TYPES):
+ MANAGER.extension_package_whitelist.update(
+ self.config.extension_pkg_whitelist)
+ for msg_cat in six.itervalues(utils.MSG_TYPES):
self.stats[msg_cat] = 0
def generate_reports(self):
@@ -927,7 +910,7 @@
filename = 'pylint_global.' + self.reporter.extension
self.reporter.set_output(open(filename, 'w'))
else:
- sect = Section()
+ sect = ureports.Section()
if self.config.reports or self.config.output_format == 'html':
self.reporter.display_results(sect)
# save results if persistent run
@@ -938,7 +921,7 @@
# No output will be emitted for the html
# reporter if the file doesn't exist, so emit
# the results here.
- self.reporter.display_results(Section())
+ self.reporter.display_results(ureports.Section())
self.reporter.on_close(self.stats, {})
# specific reports ########################################################
@@ -948,7 +931,7 @@
# check with at least check 1 statements (usually 0 when there is a
# syntax error preventing pylint from further processing)
if stats['statement'] == 0:
- raise EmptyReport()
+ raise utils.EmptyReport()
# get a global note for the code
evaluation = self.config.evaluation
try:
@@ -963,23 +946,23 @@
msg += ' (previous run: %.2f/10, %+.2f)' % (pnote, note - pnote)
if self.config.comment:
msg = '%s\n%s' % (msg, config.get_note_message(note))
- sect.append(Text(msg))
+ sect.append(ureports.Text(msg))
# some reporting functions ####################################################
def report_total_messages_stats(sect, stats, previous_stats):
"""make total errors / warnings report"""
lines = ['type', 'number', 'previous', 'difference']
- lines += table_lines_from_stats(stats, previous_stats,
- ('convention', 'refactor',
- 'warning', 'error'))
- sect.append(Table(children=lines, cols=4, rheaders=1))
+ lines += checkers.table_lines_from_stats(stats, previous_stats,
+ ('convention', 'refactor',
+ 'warning', 'error'))
+ sect.append(ureports.Table(children=lines, cols=4, rheaders=1))
def report_messages_stats(sect, stats, _):
"""make messages type report"""
if not stats['by_msg']:
# don't print this report when we didn't detected any errors
- raise EmptyReport()
+ raise utils.EmptyReport()
in_order = sorted([(value, msg_id)
for msg_id, value in six.iteritems(stats['by_msg'])
if not msg_id.startswith('I')])
@@ -987,14 +970,14 @@
lines = ('message id', 'occurrences')
for value, msg_id in in_order:
lines += (msg_id, str(value))
- sect.append(Table(children=lines, cols=2, rheaders=1))
+ sect.append(ureports.Table(children=lines, cols=2, rheaders=1))
def report_messages_by_module_stats(sect, stats, _):
"""make errors / warnings by modules report"""
if len(stats['by_module']) == 1:
# don't print this report when we are analysing a single module
- raise EmptyReport()
- by_mod = defaultdict(dict)
+ raise utils.EmptyReport()
+ by_mod = collections.defaultdict(dict)
for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'):
total = stats[m_type]
for module in six.iterkeys(stats['by_module']):
@@ -1022,8 +1005,8 @@
for val in line[:-1]:
lines.append('%.2f' % val)
if len(lines) == 5:
- raise EmptyReport()
- sect.append(Table(children=lines, cols=5, rheaders=1))
+ raise utils.EmptyReport()
+ sect.append(ureports.Table(children=lines, cols=5, rheaders=1))
# utilities ###################################################################
@@ -1068,7 +1051,7 @@
i += 1
-@contextmanager
+@contextlib.contextmanager
def fix_import_path(args):
"""Prepare sys.path for running the linter checks.
@@ -1229,10 +1212,12 @@
# run init hook, if present, before loading plugins
if config_parser.has_option('MASTER', 'init-hook'):
cb_init_hook('init-hook',
- unquote(config_parser.get('MASTER', 'init-hook')))
+ textutils.unquote(config_parser.get('MASTER',
+ 'init-hook')))
# is there some additional plugins in the file configuration, in
if config_parser.has_option('MASTER', 'load-plugins'):
- plugins = splitstrip(config_parser.get('MASTER', 'load-plugins'))
+ plugins = textutils.splitstrip(
+ config_parser.get('MASTER', 'load-plugins'))
linter.load_plugin_modules(plugins)
# now we can load file config and command line, plugins (which can
# provide options) have been registered
@@ -1289,7 +1274,7 @@
def cb_add_plugins(self, name, value):
"""callback for option preprocessing (i.e. before option parsing)"""
- self._plugins.extend(splitstrip(value))
+ self._plugins.extend(textutils.splitstrip(value))
def cb_error_mode(self, *args, **kwargs):
"""error mode:
@@ -1314,7 +1299,7 @@
def cb_help_message(self, option, optname, value, parser):
"""optik callback for printing some help about a particular message"""
- self.linter.msgs_store.help_message(splitstrip(value))
+ self.linter.msgs_store.help_message(textutils.splitstrip(value))
sys.exit(0)
def cb_full_documentation(self, option, optname, value, parser):
@@ -1334,7 +1319,7 @@
def cb_list_confidence_levels(option, optname, value, parser):
- for level in CONFIDENCE_LEVELS:
+ for level in interfaces.CONFIDENCE_LEVELS:
print('%-18s: %s' % level)
sys.exit(0)
diff --git a/third_party/pylint/reporters/json.py b/third_party/pylint/reporters/json.py
new file mode 100644
index 0000000..7dba52b
--- /dev/null
+++ b/third_party/pylint/reporters/json.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2003-2014 LOGILAB S.A. (Paris, FRANCE).
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+"""JSON reporter"""
+from __future__ import absolute_import, print_function
+
+import json
+import sys
+from cgi import escape
+
+from pylint.interfaces import IReporter
+from pylint.reporters import BaseReporter
+
+
+class JSONReporter(BaseReporter):
+ """Report messages and layouts in JSON."""
+
+ __implements__ = IReporter
+ name = 'json'
+ extension = 'json'
+
+ def __init__(self, output=sys.stdout):
+ BaseReporter.__init__(self, output)
+ self.messages = []
+
+ def handle_message(self, message):
+ """Manage message of different type and in the context of path."""
+
+ self.messages.append({
+ 'type': message.category,
+ 'module': message.module,
+ 'obj': message.obj,
+ 'line': message.line,
+ 'column': message.column,
+ 'path': message.path,
+ 'symbol': message.symbol,
+ 'message': escape(message.msg or ''),
+ })
+
+ def _display(self, layout):
+ """Launch layouts display"""
+ if self.messages:
+ print(json.dumps(self.messages, indent=4), file=self.out)
+
+
+def register(linter):
+ """Register the reporter classes with the linter."""
+ linter.register_reporter(JSONReporter)
diff --git a/third_party/pylint/reporters/text.py b/third_party/pylint/reporters/text.py
index bc86313..53c4a8d 100644
--- a/third_party/pylint/reporters/text.py
+++ b/third_party/pylint/reporters/text.py
@@ -77,7 +77,8 @@
def __init__(self, output=None):
warnings.warn('%s output format is deprecated. This is equivalent '
- 'to --msg-template=%s' % (self.name, self.line_format))
+ 'to --msg-template=%s' % (self.name, self.line_format),
+ DeprecationWarning)
TextReporter.__init__(self, output)
diff --git a/third_party/pylint/utils.py b/third_party/pylint/utils.py
index fcc7904..6685c4a 100644
--- a/third_party/pylint/utils.py
+++ b/third_party/pylint/utils.py
@@ -128,16 +128,19 @@
return MSG_TYPES_LONG.get(cid)
+def _decoding_readline(stream, module):
+ return lambda: stream.readline().decode(module.file_encoding,
+ 'replace')
+
+
def tokenize_module(module):
- stream = module.file_stream
- stream.seek(0)
- readline = stream.readline
- if sys.version_info < (3, 0):
- if module.file_encoding is not None:
- readline = lambda: stream.readline().decode(module.file_encoding,
- 'replace')
- return list(tokenize.generate_tokens(readline))
- return list(tokenize.tokenize(readline))
+ with module.stream() as stream:
+ readline = stream.readline
+ if sys.version_info < (3, 0):
+ if module.file_encoding is not None:
+ readline = _decoding_readline(stream, module)
+ return list(tokenize.generate_tokens(readline))
+ return list(tokenize.tokenize(readline))
def build_message_def(checker, msgid, msg_tuple):
if implements(checker, (IRawChecker, ITokenChecker)):
@@ -267,8 +270,8 @@
msgs = self._msgs_state
msgs[msg.msgid] = False
# sync configuration object
- self.config.disable_msg = [mid for mid, val in six.iteritems(msgs)
- if not val]
+ self.config.disable = [mid for mid, val in six.iteritems(msgs)
+ if not val]
def enable(self, msgid, scope='package', line=None, ignore_unknown=False):
"""reenable message of the given id"""