summaryrefslogtreecommitdiff
path: root/venv/Lib/site-packages/pylint/checkers
diff options
context:
space:
mode:
Diffstat (limited to 'venv/Lib/site-packages/pylint/checkers')
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__init__.py64
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pycbin0 -> 1580 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pycbin0 -> 2722 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pycbin0 -> 61785 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pycbin0 -> 6481 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pycbin0 -> 44537 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pycbin0 -> 11667 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pycbin0 -> 15668 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pycbin0 -> 31580 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pycbin0 -> 25427 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pycbin0 -> 10919 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pycbin0 -> 4597 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pycbin0 -> 2422 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pycbin0 -> 34941 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pycbin0 -> 3254 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pycbin0 -> 45321 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pycbin0 -> 12304 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pycbin0 -> 9755 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pycbin0 -> 12738 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pycbin0 -> 17427 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pycbin0 -> 40274 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pycbin0 -> 31460 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pycbin0 -> 44587 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/async.py89
-rw-r--r--venv/Lib/site-packages/pylint/checkers/base.py2333
-rw-r--r--venv/Lib/site-packages/pylint/checkers/base_checker.py187
-rw-r--r--venv/Lib/site-packages/pylint/checkers/classes.py1844
-rw-r--r--venv/Lib/site-packages/pylint/checkers/design_analysis.py496
-rw-r--r--venv/Lib/site-packages/pylint/checkers/exceptions.py546
-rw-r--r--venv/Lib/site-packages/pylint/checkers/format.py1332
-rw-r--r--venv/Lib/site-packages/pylint/checkers/imports.py981
-rw-r--r--venv/Lib/site-packages/pylint/checkers/logging.py384
-rw-r--r--venv/Lib/site-packages/pylint/checkers/misc.py171
-rw-r--r--venv/Lib/site-packages/pylint/checkers/newstyle.py127
-rw-r--r--venv/Lib/site-packages/pylint/checkers/python3.py1398
-rw-r--r--venv/Lib/site-packages/pylint/checkers/raw_metrics.py119
-rw-r--r--venv/Lib/site-packages/pylint/checkers/refactoring.py1510
-rw-r--r--venv/Lib/site-packages/pylint/checkers/similar.py452
-rw-r--r--venv/Lib/site-packages/pylint/checkers/spelling.py411
-rw-r--r--venv/Lib/site-packages/pylint/checkers/stdlib.py452
-rw-r--r--venv/Lib/site-packages/pylint/checkers/strings.py755
-rw-r--r--venv/Lib/site-packages/pylint/checkers/typecheck.py1770
-rw-r--r--venv/Lib/site-packages/pylint/checkers/utils.py1253
-rw-r--r--venv/Lib/site-packages/pylint/checkers/variables.py1987
44 files changed, 18661 insertions, 0 deletions
diff --git a/venv/Lib/site-packages/pylint/checkers/__init__.py b/venv/Lib/site-packages/pylint/checkers/__init__.py
new file mode 100644
index 0000000..9c6306f
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__init__.py
@@ -0,0 +1,64 @@
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
+# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""utilities methods and classes for checkers
+
+Base id of standard checkers (used in msg and report ids):
+01: base
+02: classes
+03: format
+04: import
+05: misc
+06: variables
+07: exceptions
+08: similar
+09: design_analysis
+10: newstyle
+11: typecheck
+12: logging
+13: string_format
+14: string_constant
+15: stdlib
+16: python3
+17: refactoring
+18-50: not yet used: reserved for future internal checkers.
+51-99: perhaps used: reserved for external checkers
+
+The raw_metrics checker has no number associated since it doesn't emit any
+messages nor reports. XXX not true, emit a 07 report !
+
+"""
+
+from pylint.checkers.base_checker import BaseChecker, BaseTokenChecker
+from pylint.utils import register_plugins
+
+
+def table_lines_from_stats(stats, _, columns):
+ """get values listed in <columns> from <stats> and <old_stats>,
+ and return a formated list of values, designed to be given to a
+ ureport.Table object
+ """
+ lines = []
+ for m_type in columns:
+ new = stats[m_type]
+ new = "%.3f" % new if isinstance(new, float) else str(new)
+ lines += (m_type.replace("_", " "), new, "NC", "NC")
+ return lines
+
+
+def initialize(linter):
+ """initialize linter with checkers in this package """
+ register_plugins(linter, __path__[0])
+
+
+__all__ = ("BaseChecker", "BaseTokenChecker", "initialize")
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..3782086
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pyc
new file mode 100644
index 0000000..ea14658
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pyc
new file mode 100644
index 0000000..aaa3e51
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pyc
new file mode 100644
index 0000000..e4f8221
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pyc
new file mode 100644
index 0000000..d0f58b4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pyc
new file mode 100644
index 0000000..647b5aa
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 0000000..5371c29
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pyc
new file mode 100644
index 0000000..8a6a0c0
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pyc
new file mode 100644
index 0000000..f8b924d
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pyc
new file mode 100644
index 0000000..90cc06e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pyc
new file mode 100644
index 0000000..9f449d4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pyc
new file mode 100644
index 0000000..e409591
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pyc
new file mode 100644
index 0000000..b405dd3
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pyc
new file mode 100644
index 0000000..fdf16f6
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pyc
new file mode 100644
index 0000000..f65c6b5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pyc
new file mode 100644
index 0000000..09b77e5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pyc
new file mode 100644
index 0000000..dbf748c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pyc
new file mode 100644
index 0000000..97576df
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pyc
new file mode 100644
index 0000000..0aab77c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pyc
new file mode 100644
index 0000000..cc0c9b4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 0000000..90e8ff1
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pyc
new file mode 100644
index 0000000..943ffbd
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/async.py b/venv/Lib/site-packages/pylint/checkers/async.py
new file mode 100644
index 0000000..c33071e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/async.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checker for anything related to the async protocol (PEP 492)."""
+
+import sys
+
+import astroid
+from astroid import bases, exceptions
+
+from pylint import checkers, interfaces, utils
+from pylint.checkers import utils as checker_utils
+from pylint.checkers.utils import decorated_with
+
+
+class AsyncChecker(checkers.BaseChecker):
+ __implements__ = interfaces.IAstroidChecker
+ name = "async"
+ msgs = {
+ "E1700": (
+ "Yield inside async function",
+ "yield-inside-async-function",
+ "Used when an `yield` or `yield from` statement is "
+ "found inside an async function.",
+ {"minversion": (3, 5)},
+ ),
+ "E1701": (
+ "Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
+ "not-async-context-manager",
+ "Used when an async context manager is used with an object "
+ "that does not implement the async context management protocol.",
+ {"minversion": (3, 5)},
+ ),
+ }
+
+ def open(self):
+ self._ignore_mixin_members = utils.get_global_option(
+ self, "ignore-mixin-members"
+ )
+ self._async_generators = ["contextlib.asynccontextmanager"]
+
+ @checker_utils.check_messages("yield-inside-async-function")
+ def visit_asyncfunctiondef(self, node):
+ for child in node.nodes_of_class(astroid.Yield):
+ if child.scope() is node and (
+ sys.version_info[:2] == (3, 5) or isinstance(child, astroid.YieldFrom)
+ ):
+ self.add_message("yield-inside-async-function", node=child)
+
+ @checker_utils.check_messages("not-async-context-manager")
+ def visit_asyncwith(self, node):
+ for ctx_mgr, _ in node.items:
+ inferred = checker_utils.safe_infer(ctx_mgr)
+ if inferred is None or inferred is astroid.Uninferable:
+ continue
+
+ if isinstance(inferred, bases.AsyncGenerator):
+ # Check if we are dealing with a function decorated
+ # with contextlib.asynccontextmanager.
+ if decorated_with(inferred.parent, self._async_generators):
+ continue
+ else:
+ try:
+ inferred.getattr("__aenter__")
+ inferred.getattr("__aexit__")
+ except exceptions.NotFoundError:
+ if isinstance(inferred, astroid.Instance):
+ # If we do not know the bases of this class,
+ # just skip it.
+ if not checker_utils.has_known_bases(inferred):
+ continue
+ # Just ignore mixin classes.
+ if self._ignore_mixin_members:
+ if inferred.name[-5:].lower() == "mixin":
+ continue
+ else:
+ continue
+
+ self.add_message(
+ "not-async-context-manager", node=node, args=(inferred.name,)
+ )
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(AsyncChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/base.py b/venv/Lib/site-packages/pylint/checkers/base.py
new file mode 100644
index 0000000..c94676e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/base.py
@@ -0,0 +1,2333 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2016 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Nick Bastin <nick.bastin@gmail.com>
+# Copyright (c) 2015 Michael Kefeder <oss@multiwave.ch>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Stephane Wirtel <stephane@wirtel.be>
+# Copyright (c) 2015 Cosmin Poieana <cmin@ropython.org>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Radu Ciorba <radu@devrandom.ro>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Elias Dorneles <eliasdorneles@gmail.com>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016 Yannack <yannack@users.noreply.github.com>
+# Copyright (c) 2016 Alex Jurkiewicz <alex@jurkiewi.cz>
+# Copyright (c) 2017 Jacques Kvam <jwkvam@gmail.com>
+# Copyright (c) 2017 ttenhoeve-aa <ttenhoeve@appannie.com>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+# Copyright (c) 2018 Steven M. Vascellaro <svascellaro@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Chris Lamb <chris@chris-lamb.co.uk>
+# Copyright (c) 2018 glmdgrielson <32415403+glmdgrielson@users.noreply.github.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""basic checker for Python code"""
+
+import builtins
+import collections
+import itertools
+import re
+import sys
+from typing import Pattern
+
+import astroid
+import astroid.bases
+import astroid.scoped_nodes
+from astroid.arguments import CallSite
+
+import pylint.utils as lint_utils
+from pylint import checkers, exceptions, interfaces
+from pylint.checkers import utils
+from pylint.checkers.utils import is_property_setter_or_deleter
+from pylint.reporters.ureports import nodes as reporter_nodes
+
+
+class NamingStyle:
+ # It may seem counterintuitive that single naming style
+ # has multiple "accepted" forms of regular expressions,
+ # but we need to special-case stuff like dunder names
+ # in method names.
+ CLASS_NAME_RGX = None # type: Pattern[str]
+ MOD_NAME_RGX = None # type: Pattern[str]
+ CONST_NAME_RGX = None # type: Pattern[str]
+ COMP_VAR_RGX = None # type: Pattern[str]
+ DEFAULT_NAME_RGX = None # type: Pattern[str]
+ CLASS_ATTRIBUTE_RGX = None # type: Pattern[str]
+
+ @classmethod
+ def get_regex(cls, name_type):
+ return {
+ "module": cls.MOD_NAME_RGX,
+ "const": cls.CONST_NAME_RGX,
+ "class": cls.CLASS_NAME_RGX,
+ "function": cls.DEFAULT_NAME_RGX,
+ "method": cls.DEFAULT_NAME_RGX,
+ "attr": cls.DEFAULT_NAME_RGX,
+ "argument": cls.DEFAULT_NAME_RGX,
+ "variable": cls.DEFAULT_NAME_RGX,
+ "class_attribute": cls.CLASS_ATTRIBUTE_RGX,
+ "inlinevar": cls.COMP_VAR_RGX,
+ }[name_type]
+
+
+class SnakeCaseStyle(NamingStyle):
+ """Regex rules for snake_case naming style."""
+
+ CLASS_NAME_RGX = re.compile("[a-z_][a-z0-9_]+$")
+ MOD_NAME_RGX = re.compile("([a-z_][a-z0-9_]*)$")
+ CONST_NAME_RGX = re.compile("(([a-z_][a-z0-9_]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[a-z_][a-z0-9_]*$")
+ DEFAULT_NAME_RGX = re.compile(
+ "(([a-z_][a-z0-9_]{2,})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$"
+ )
+ CLASS_ATTRIBUTE_RGX = re.compile(r"(([a-z_][a-z0-9_]{2,}|(__.*__)))$")
+
+
+class CamelCaseStyle(NamingStyle):
+ """Regex rules for camelCase naming style."""
+
+ CLASS_NAME_RGX = re.compile("[a-z_][a-zA-Z0-9]+$")
+ MOD_NAME_RGX = re.compile("([a-z_][a-zA-Z0-9]*)$")
+ CONST_NAME_RGX = re.compile("(([a-z_][A-Za-z0-9]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[a-z_][A-Za-z0-9]*$")
+ DEFAULT_NAME_RGX = re.compile("(([a-z_][a-zA-Z0-9]{2,})|(__[a-z][a-zA-Z0-9_]+__))$")
+ CLASS_ATTRIBUTE_RGX = re.compile(r"([a-z_][A-Za-z0-9]{2,}|(__.*__))$")
+
+
+class PascalCaseStyle(NamingStyle):
+ """Regex rules for PascalCase naming style."""
+
+ CLASS_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ MOD_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ CONST_NAME_RGX = re.compile("(([A-Z_][A-Za-z0-9]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ DEFAULT_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]{2,}$|(__[a-z][a-zA-Z0-9_]+__)$")
+ CLASS_ATTRIBUTE_RGX = re.compile("[A-Z_][a-zA-Z0-9]{2,}$")
+
+
+class UpperCaseStyle(NamingStyle):
+ """Regex rules for UPPER_CASE naming style."""
+
+ CLASS_NAME_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ MOD_NAME_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ CONST_NAME_RGX = re.compile("(([A-Z_][A-Z0-9_]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ DEFAULT_NAME_RGX = re.compile("([A-Z_][A-Z0-9_]{2,})|(__[a-z][a-zA-Z0-9_]+__)$")
+ CLASS_ATTRIBUTE_RGX = re.compile("[A-Z_][A-Z0-9_]{2,}$")
+
+
+class AnyStyle(NamingStyle):
+ @classmethod
+ def get_regex(cls, name_type):
+ return re.compile(".*")
+
+
+NAMING_STYLES = {
+ "snake_case": SnakeCaseStyle,
+ "camelCase": CamelCaseStyle,
+ "PascalCase": PascalCaseStyle,
+ "UPPER_CASE": UpperCaseStyle,
+ "any": AnyStyle,
+}
+
+# do not require a doc string on private/system methods
+NO_REQUIRED_DOC_RGX = re.compile("^_")
+REVERSED_PROTOCOL_METHOD = "__reversed__"
+SEQUENCE_PROTOCOL_METHODS = ("__getitem__", "__len__")
+REVERSED_METHODS = (SEQUENCE_PROTOCOL_METHODS, (REVERSED_PROTOCOL_METHOD,))
+TYPECHECK_COMPARISON_OPERATORS = frozenset(("is", "is not", "==", "!=", "in", "not in"))
+LITERAL_NODE_TYPES = (astroid.Const, astroid.Dict, astroid.List, astroid.Set)
+UNITTEST_CASE = "unittest.case"
+BUILTINS = builtins.__name__
+TYPE_QNAME = "%s.type" % BUILTINS
+ABC_METACLASSES = {"_py_abc.ABCMeta", "abc.ABCMeta"} # Python 3.7+,
+
+# Name categories that are always consistent with all naming conventions.
+EXEMPT_NAME_CATEGORIES = {"exempt", "ignore"}
+
+# A mapping from builtin-qname -> symbol, to be used when generating messages
+# about dangerous default values as arguments
+DEFAULT_ARGUMENT_SYMBOLS = dict(
+ zip(
+ [".".join([BUILTINS, x]) for x in ("set", "dict", "list")],
+ ["set()", "{}", "[]"],
+ )
+)
+REVERSED_COMPS = {"<": ">", "<=": ">=", ">": "<", ">=": "<="}
+COMPARISON_OPERATORS = frozenset(("==", "!=", "<", ">", "<=", ">="))
+# List of methods which can be redefined
+REDEFINABLE_METHODS = frozenset(("__module__",))
+TYPING_FORWARD_REF_QNAME = "typing.ForwardRef"
+
+
+def _redefines_import(node):
+ """ Detect that the given node (AssignName) is inside an
+ exception handler and redefines an import from the tryexcept body.
+ Returns True if the node redefines an import, False otherwise.
+ """
+ current = node
+ while current and not isinstance(current.parent, astroid.ExceptHandler):
+ current = current.parent
+ if not current or not utils.error_of_type(current.parent, ImportError):
+ return False
+ try_block = current.parent.parent
+ for import_node in try_block.nodes_of_class((astroid.ImportFrom, astroid.Import)):
+ for name, alias in import_node.names:
+ if alias:
+ if alias == node.name:
+ return True
+ elif name == node.name:
+ return True
+ return False
+
+
+def in_loop(node):
+ """return True if the node is inside a kind of for loop"""
+ parent = node.parent
+ while parent is not None:
+ if isinstance(
+ parent,
+ (
+ astroid.For,
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+ ),
+ ):
+ return True
+ parent = parent.parent
+ return False
+
+
+def in_nested_list(nested_list, obj):
+ """return true if the object is an element of <nested_list> or of a nested
+ list
+ """
+ for elmt in nested_list:
+ if isinstance(elmt, (list, tuple)):
+ if in_nested_list(elmt, obj):
+ return True
+ elif elmt == obj:
+ return True
+ return False
+
+
+def _get_break_loop_node(break_node):
+ """
+ Returns the loop node that holds the break node in arguments.
+
+ Args:
+ break_node (astroid.Break): the break node of interest.
+
+ Returns:
+ astroid.For or astroid.While: the loop node holding the break node.
+ """
+ loop_nodes = (astroid.For, astroid.While)
+ parent = break_node.parent
+ while not isinstance(parent, loop_nodes) or break_node in getattr(
+ parent, "orelse", []
+ ):
+ break_node = parent
+ parent = parent.parent
+ if parent is None:
+ break
+ return parent
+
+
+def _loop_exits_early(loop):
+ """
+ Returns true if a loop may ends up in a break statement.
+
+ Args:
+ loop (astroid.For, astroid.While): the loop node inspected.
+
+ Returns:
+ bool: True if the loop may ends up in a break statement, False otherwise.
+ """
+ loop_nodes = (astroid.For, astroid.While)
+ definition_nodes = (astroid.FunctionDef, astroid.ClassDef)
+ inner_loop_nodes = [
+ _node
+ for _node in loop.nodes_of_class(loop_nodes, skip_klass=definition_nodes)
+ if _node != loop
+ ]
+ return any(
+ _node
+ for _node in loop.nodes_of_class(astroid.Break, skip_klass=definition_nodes)
+ if _get_break_loop_node(_node) not in inner_loop_nodes
+ )
+
+
+def _is_multi_naming_match(match, node_type, confidence):
+ return (
+ match is not None
+ and match.lastgroup is not None
+ and match.lastgroup not in EXEMPT_NAME_CATEGORIES
+ and (node_type != "method" or confidence != interfaces.INFERENCE_FAILURE)
+ )
+
+
+BUILTIN_PROPERTY = "builtins.property"
+
+
+def _get_properties(config):
+ """Returns a tuple of property classes and names.
+
+ Property classes are fully qualified, such as 'abc.abstractproperty' and
+ property names are the actual names, such as 'abstract_property'.
+ """
+ property_classes = {BUILTIN_PROPERTY}
+ property_names = set() # Not returning 'property', it has its own check.
+ if config is not None:
+ property_classes.update(config.property_classes)
+ property_names.update(
+ (prop.rsplit(".", 1)[-1] for prop in config.property_classes)
+ )
+ return property_classes, property_names
+
+
+def _determine_function_name_type(node, config=None):
+ """Determine the name type whose regex the a function's name should match.
+
+ :param node: A function node.
+ :type node: astroid.node_classes.NodeNG
+ :param config: Configuration from which to pull additional property classes.
+ :type config: :class:`optparse.Values`
+
+ :returns: One of ('function', 'method', 'attr')
+ :rtype: str
+ """
+ property_classes, property_names = _get_properties(config)
+ if not node.is_method():
+ return "function"
+
+ if is_property_setter_or_deleter(node):
+ # If the function is decorated using the prop_method.{setter,getter}
+ # form, treat it like an attribute as well.
+ return "attr"
+
+ if node.decorators:
+ decorators = node.decorators.nodes
+ else:
+ decorators = []
+ for decorator in decorators:
+ # If the function is a property (decorated with @property
+ # or @abc.abstractproperty), the name type is 'attr'.
+ if isinstance(decorator, astroid.Name) or (
+ isinstance(decorator, astroid.Attribute)
+ and decorator.attrname in property_names
+ ):
+ inferred = utils.safe_infer(decorator)
+ if inferred and inferred.qname() in property_classes:
+ return "attr"
+ return "method"
+
+
+def _has_abstract_methods(node):
+ """
+ Determine if the given `node` has abstract methods.
+
+ The methods should be made abstract by decorating them
+ with `abc` decorators.
+ """
+ return len(utils.unimplemented_abstract_methods(node)) > 0
+
+
+def report_by_type_stats(sect, stats, _):
+ """make a report of
+
+ * percentage of different types documented
+ * percentage of different types with a bad name
+ """
+ # percentage of different types documented and/or with a bad name
+ nice_stats = {}
+ for node_type in ("module", "class", "method", "function"):
+ try:
+ total = stats[node_type]
+ except KeyError:
+ raise exceptions.EmptyReportError()
+ nice_stats[node_type] = {}
+ if total != 0:
+ try:
+ documented = total - stats["undocumented_" + node_type]
+ percent = (documented * 100.0) / total
+ nice_stats[node_type]["percent_documented"] = "%.2f" % percent
+ except KeyError:
+ nice_stats[node_type]["percent_documented"] = "NC"
+ try:
+ percent = (stats["badname_" + node_type] * 100.0) / total
+ nice_stats[node_type]["percent_badname"] = "%.2f" % percent
+ except KeyError:
+ nice_stats[node_type]["percent_badname"] = "NC"
+ lines = ("type", "number", "old number", "difference", "%documented", "%badname")
+ for node_type in ("module", "class", "method", "function"):
+ new = stats[node_type]
+ lines += (
+ node_type,
+ str(new),
+ "NC",
+ "NC",
+ nice_stats[node_type].get("percent_documented", "0"),
+ nice_stats[node_type].get("percent_badname", "0"),
+ )
+ sect.append(reporter_nodes.Table(children=lines, cols=6, rheaders=1))
+
+
+def redefined_by_decorator(node):
+ """return True if the object is a method redefined via decorator.
+
+ For example:
+ @property
+ def x(self): return self._x
+ @x.setter
+ def x(self, value): self._x = value
+ """
+ if node.decorators:
+ for decorator in node.decorators.nodes:
+ if (
+ isinstance(decorator, astroid.Attribute)
+ and getattr(decorator.expr, "name", None) == node.name
+ ):
+ return True
+ return False
+
+
+class _BasicChecker(checkers.BaseChecker):
+ __implements__ = interfaces.IAstroidChecker
+ name = "basic"
+
+
+class BasicErrorChecker(_BasicChecker):
+ msgs = {
+ "E0100": (
+ "__init__ method is a generator",
+ "init-is-generator",
+ "Used when the special class method __init__ is turned into a "
+ "generator by a yield in its body.",
+ ),
+ "E0101": (
+ "Explicit return in __init__",
+ "return-in-init",
+ "Used when the special class method __init__ has an explicit "
+ "return value.",
+ ),
+ "E0102": (
+ "%s already defined line %s",
+ "function-redefined",
+ "Used when a function / class / method is redefined.",
+ ),
+ "E0103": (
+ "%r not properly in loop",
+ "not-in-loop",
+ "Used when break or continue keywords are used outside a loop.",
+ ),
+ "E0104": (
+ "Return outside function",
+ "return-outside-function",
+ 'Used when a "return" statement is found outside a function or method.',
+ ),
+ "E0105": (
+ "Yield outside function",
+ "yield-outside-function",
+ 'Used when a "yield" statement is found outside a function or method.',
+ ),
+ "E0106": (
+ "Return with argument inside generator",
+ "return-arg-in-generator",
+ 'Used when a "return" statement with an argument is found '
+ "outside in a generator function or method (e.g. with some "
+ '"yield" statements).',
+ {"maxversion": (3, 3)},
+ ),
+ "E0107": (
+ "Use of the non-existent %s operator",
+ "nonexistent-operator",
+ "Used when you attempt to use the C-style pre-increment or "
+ "pre-decrement operator -- and ++, which doesn't exist in Python.",
+ ),
+ "E0108": (
+ "Duplicate argument name %s in function definition",
+ "duplicate-argument-name",
+ "Duplicate argument names in function definitions are syntax errors.",
+ ),
+ "E0110": (
+ "Abstract class %r with abstract methods instantiated",
+ "abstract-class-instantiated",
+ "Used when an abstract class with `abc.ABCMeta` as metaclass "
+ "has abstract methods and is instantiated.",
+ ),
+ "W0120": (
+ "Else clause on loop without a break statement",
+ "useless-else-on-loop",
+ "Loops should only have an else clause if they can exit early "
+ "with a break statement, otherwise the statements under else "
+ "should be on the same scope as the loop itself.",
+ ),
+ "E0112": (
+ "More than one starred expression in assignment",
+ "too-many-star-expressions",
+ "Emitted when there are more than one starred "
+ "expressions (`*x`) in an assignment. This is a SyntaxError.",
+ ),
+ "E0113": (
+ "Starred assignment target must be in a list or tuple",
+ "invalid-star-assignment-target",
+ "Emitted when a star expression is used as a starred assignment target.",
+ ),
+ "E0114": (
+ "Can use starred expression only in assignment target",
+ "star-needs-assignment-target",
+ "Emitted when a star expression is not used in an assignment target.",
+ ),
+ "E0115": (
+ "Name %r is nonlocal and global",
+ "nonlocal-and-global",
+ "Emitted when a name is both nonlocal and global.",
+ ),
+ "E0116": (
+ "'continue' not supported inside 'finally' clause",
+ "continue-in-finally",
+ "Emitted when the `continue` keyword is found "
+ "inside a finally clause, which is a SyntaxError.",
+ ),
+ "E0117": (
+ "nonlocal name %s found without binding",
+ "nonlocal-without-binding",
+ "Emitted when a nonlocal variable does not have an attached "
+ "name somewhere in the parent scopes",
+ ),
+ "E0118": (
+ "Name %r is used prior to global declaration",
+ "used-prior-global-declaration",
+ "Emitted when a name is used prior a global declaration, "
+ "which results in an error since Python 3.6.",
+ {"minversion": (3, 6)},
+ ),
+ }
+
+ @utils.check_messages("function-redefined")
+ def visit_classdef(self, node):
+ self._check_redefinition("class", node)
+
+ def _too_many_starred_for_tuple(self, assign_tuple):
+ starred_count = 0
+ for elem in assign_tuple.itered():
+ if isinstance(elem, astroid.Tuple):
+ return self._too_many_starred_for_tuple(elem)
+ if isinstance(elem, astroid.Starred):
+ starred_count += 1
+ return starred_count > 1
+
+ @utils.check_messages("too-many-star-expressions", "invalid-star-assignment-target")
+ def visit_assign(self, node):
+ # Check *a, *b = ...
+ assign_target = node.targets[0]
+ # Check *a = b
+ if isinstance(node.targets[0], astroid.Starred):
+ self.add_message("invalid-star-assignment-target", node=node)
+
+ if not isinstance(assign_target, astroid.Tuple):
+ return
+ if self._too_many_starred_for_tuple(assign_target):
+ self.add_message("too-many-star-expressions", node=node)
+
+ @utils.check_messages("star-needs-assignment-target")
+ def visit_starred(self, node):
+ """Check that a Starred expression is used in an assignment target."""
+ if isinstance(node.parent, astroid.Call):
+ # f(*args) is converted to Call(args=[Starred]), so ignore
+ # them for this check.
+ return
+ if isinstance(
+ node.parent, (astroid.List, astroid.Tuple, astroid.Set, astroid.Dict)
+ ):
+ # PEP 448 unpacking.
+ return
+
+ stmt = node.statement()
+ if not isinstance(stmt, astroid.Assign):
+ return
+
+ if stmt.value is node or stmt.value.parent_of(node):
+ self.add_message("star-needs-assignment-target", node=node)
+
+ @utils.check_messages(
+ "init-is-generator",
+ "return-in-init",
+ "function-redefined",
+ "return-arg-in-generator",
+ "duplicate-argument-name",
+ "nonlocal-and-global",
+ "used-prior-global-declaration",
+ )
+ def visit_functiondef(self, node):
+ self._check_nonlocal_and_global(node)
+ self._check_name_used_prior_global(node)
+ if not redefined_by_decorator(
+ node
+ ) and not utils.is_registered_in_singledispatch_function(node):
+ self._check_redefinition(node.is_method() and "method" or "function", node)
+ # checks for max returns, branch, return in __init__
+ returns = node.nodes_of_class(
+ astroid.Return, skip_klass=(astroid.FunctionDef, astroid.ClassDef)
+ )
+ if node.is_method() and node.name == "__init__":
+ if node.is_generator():
+ self.add_message("init-is-generator", node=node)
+ else:
+ values = [r.value for r in returns]
+ # Are we returning anything but None from constructors
+ if any(v for v in values if not utils.is_none(v)):
+ self.add_message("return-in-init", node=node)
+ # Check for duplicate names by clustering args with same name for detailed report
+ arg_clusters = collections.defaultdict(list)
+ arguments = filter(None, [node.args.args, node.args.kwonlyargs])
+
+ for arg in itertools.chain.from_iterable(arguments):
+ arg_clusters[arg.name].append(arg)
+
+ # provide detailed report about each repeated argument
+ for argument_duplicates in arg_clusters.values():
+ if len(argument_duplicates) != 1:
+ for argument in argument_duplicates:
+ self.add_message(
+ "duplicate-argument-name",
+ line=argument.lineno,
+ node=argument,
+ args=(argument.name,),
+ )
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_name_used_prior_global(self, node):
+
+ scope_globals = {
+ name: child
+ for child in node.nodes_of_class(astroid.Global)
+ for name in child.names
+ if child.scope() is node
+ }
+
+ if not scope_globals:
+ return
+
+ for node_name in node.nodes_of_class(astroid.Name):
+ if node_name.scope() is not node:
+ continue
+
+ name = node_name.name
+ corresponding_global = scope_globals.get(name)
+ if not corresponding_global:
+ continue
+
+ global_lineno = corresponding_global.fromlineno
+ if global_lineno and global_lineno > node_name.fromlineno:
+ self.add_message(
+ "used-prior-global-declaration", node=node_name, args=(name,)
+ )
+
+ def _check_nonlocal_and_global(self, node):
+ """Check that a name is both nonlocal and global."""
+
+ def same_scope(current):
+ return current.scope() is node
+
+ from_iter = itertools.chain.from_iterable
+ nonlocals = set(
+ from_iter(
+ child.names
+ for child in node.nodes_of_class(astroid.Nonlocal)
+ if same_scope(child)
+ )
+ )
+
+ if not nonlocals:
+ return
+
+ global_vars = set(
+ from_iter(
+ child.names
+ for child in node.nodes_of_class(astroid.Global)
+ if same_scope(child)
+ )
+ )
+ for name in nonlocals.intersection(global_vars):
+ self.add_message("nonlocal-and-global", args=(name,), node=node)
+
+ @utils.check_messages("return-outside-function")
+ def visit_return(self, node):
+ if not isinstance(node.frame(), astroid.FunctionDef):
+ self.add_message("return-outside-function", node=node)
+
+ @utils.check_messages("yield-outside-function")
+ def visit_yield(self, node):
+ self._check_yield_outside_func(node)
+
+ @utils.check_messages("yield-outside-function")
+ def visit_yieldfrom(self, node):
+ self._check_yield_outside_func(node)
+
+ @utils.check_messages("not-in-loop", "continue-in-finally")
+ def visit_continue(self, node):
+ self._check_in_loop(node, "continue")
+
+ @utils.check_messages("not-in-loop")
+ def visit_break(self, node):
+ self._check_in_loop(node, "break")
+
+ @utils.check_messages("useless-else-on-loop")
+ def visit_for(self, node):
+ self._check_else_on_loop(node)
+
+ @utils.check_messages("useless-else-on-loop")
+ def visit_while(self, node):
+ self._check_else_on_loop(node)
+
+ @utils.check_messages("nonexistent-operator")
+ def visit_unaryop(self, node):
+ """check use of the non-existent ++ and -- operator operator"""
+ if (
+ (node.op in "+-")
+ and isinstance(node.operand, astroid.UnaryOp)
+ and (node.operand.op == node.op)
+ ):
+ self.add_message("nonexistent-operator", node=node, args=node.op * 2)
+
+ def _check_nonlocal_without_binding(self, node, name):
+ current_scope = node.scope()
+ while True:
+ if current_scope.parent is None:
+ break
+
+ if not isinstance(current_scope, (astroid.ClassDef, astroid.FunctionDef)):
+ self.add_message("nonlocal-without-binding", args=(name,), node=node)
+ return
+
+ if name not in current_scope.locals:
+ current_scope = current_scope.parent.scope()
+ continue
+
+ # Okay, found it.
+ return
+
+ if not isinstance(current_scope, astroid.FunctionDef):
+ self.add_message("nonlocal-without-binding", args=(name,), node=node)
+
+ @utils.check_messages("nonlocal-without-binding")
+ def visit_nonlocal(self, node):
+ for name in node.names:
+ self._check_nonlocal_without_binding(node, name)
+
+ @utils.check_messages("abstract-class-instantiated")
+ def visit_call(self, node):
+ """ Check instantiating abstract class with
+ abc.ABCMeta as metaclass.
+ """
+ try:
+ for inferred in node.func.infer():
+ self._check_inferred_class_is_abstract(inferred, node)
+ except astroid.InferenceError:
+ return
+
+ def _check_inferred_class_is_abstract(self, inferred, node):
+ if not isinstance(inferred, astroid.ClassDef):
+ return
+
+ klass = utils.node_frame_class(node)
+ if klass is inferred:
+ # Don't emit the warning if the class is instantiated
+ # in its own body or if the call is not an instance
+ # creation. If the class is instantiated into its own
+ # body, we're expecting that it knows what it is doing.
+ return
+
+ # __init__ was called
+ abstract_methods = _has_abstract_methods(inferred)
+
+ if not abstract_methods:
+ return
+
+ metaclass = inferred.metaclass()
+
+ if metaclass is None:
+ # Python 3.4 has `abc.ABC`, which won't be detected
+ # by ClassNode.metaclass()
+ for ancestor in inferred.ancestors():
+ if ancestor.qname() == "abc.ABC":
+ self.add_message(
+ "abstract-class-instantiated", args=(inferred.name,), node=node
+ )
+ break
+
+ return
+
+ if metaclass.qname() in ABC_METACLASSES:
+ self.add_message(
+ "abstract-class-instantiated", args=(inferred.name,), node=node
+ )
+
+ def _check_yield_outside_func(self, node):
+ if not isinstance(node.frame(), (astroid.FunctionDef, astroid.Lambda)):
+ self.add_message("yield-outside-function", node=node)
+
+ def _check_else_on_loop(self, node):
+ """Check that any loop with an else clause has a break statement."""
+ if node.orelse and not _loop_exits_early(node):
+ self.add_message(
+ "useless-else-on-loop",
+ node=node,
+ # This is not optimal, but the line previous
+ # to the first statement in the else clause
+ # will usually be the one that contains the else:.
+ line=node.orelse[0].lineno - 1,
+ )
+
+ def _check_in_loop(self, node, node_name):
+ """check that a node is inside a for or while loop"""
+ _node = node.parent
+ while _node:
+ if isinstance(_node, (astroid.For, astroid.While)):
+ if node not in _node.orelse:
+ return
+
+ if isinstance(_node, (astroid.ClassDef, astroid.FunctionDef)):
+ break
+ if (
+ isinstance(_node, astroid.TryFinally)
+ and node in _node.finalbody
+ and isinstance(node, astroid.Continue)
+ ):
+ self.add_message("continue-in-finally", node=node)
+
+ _node = _node.parent
+
+ self.add_message("not-in-loop", node=node, args=node_name)
+
+ def _check_redefinition(self, redeftype, node):
+ """check for redefinition of a function / method / class name"""
+ parent_frame = node.parent.frame()
+
+ # Ignore function stubs created for type information
+ redefinitions = parent_frame.locals[node.name]
+ defined_self = next(
+ (local for local in redefinitions if not utils.is_overload_stub(local)),
+ node,
+ )
+ if defined_self is not node and not astroid.are_exclusive(node, defined_self):
+
+ # Additional checks for methods which are not considered
+ # redefined, since they are already part of the base API.
+ if (
+ isinstance(parent_frame, astroid.ClassDef)
+ and node.name in REDEFINABLE_METHODS
+ ):
+ return
+
+ if utils.is_overload_stub(node):
+ return
+
+ # Check if we have forward references for this node.
+ try:
+ redefinition_index = redefinitions.index(node)
+ except ValueError:
+ pass
+ else:
+ for redefinition in redefinitions[:redefinition_index]:
+ inferred = utils.safe_infer(redefinition)
+ if (
+ inferred
+ and isinstance(inferred, astroid.Instance)
+ and inferred.qname() == TYPING_FORWARD_REF_QNAME
+ ):
+ return
+
+ dummy_variables_rgx = lint_utils.get_global_option(
+ self, "dummy-variables-rgx", default=None
+ )
+ if dummy_variables_rgx and dummy_variables_rgx.match(node.name):
+ return
+ self.add_message(
+ "function-redefined",
+ node=node,
+ args=(redeftype, defined_self.fromlineno),
+ )
+
+
+class BasicChecker(_BasicChecker):
+ """checks for :
+ * doc strings
+ * number of arguments, local variables, branches, returns and statements in
+ functions, methods
+ * required module attributes
+ * dangerous default values as arguments
+ * redefinition of function / method / class
+ * uses of the global statement
+ """
+
+ __implements__ = interfaces.IAstroidChecker
+
+ name = "basic"
+ msgs = {
+ "W0101": (
+ "Unreachable code",
+ "unreachable",
+ 'Used when there is some code behind a "return" or "raise" '
+ "statement, which will never be accessed.",
+ ),
+ "W0102": (
+ "Dangerous default value %s as argument",
+ "dangerous-default-value",
+ "Used when a mutable value as list or dictionary is detected in "
+ "a default value for an argument.",
+ ),
+ "W0104": (
+ "Statement seems to have no effect",
+ "pointless-statement",
+ "Used when a statement doesn't have (or at least seems to) any effect.",
+ ),
+ "W0105": (
+ "String statement has no effect",
+ "pointless-string-statement",
+ "Used when a string is used as a statement (which of course "
+ "has no effect). This is a particular case of W0104 with its "
+ "own message so you can easily disable it if you're using "
+ "those strings as documentation, instead of comments.",
+ ),
+ "W0106": (
+ 'Expression "%s" is assigned to nothing',
+ "expression-not-assigned",
+ "Used when an expression that is not a function call is assigned "
+ "to nothing. Probably something else was intended.",
+ ),
+ "W0108": (
+ "Lambda may not be necessary",
+ "unnecessary-lambda",
+ "Used when the body of a lambda expression is a function call "
+ "on the same argument list as the lambda itself; such lambda "
+ "expressions are in all but a few cases replaceable with the "
+ "function being called in the body of the lambda.",
+ ),
+ "W0109": (
+ "Duplicate key %r in dictionary",
+ "duplicate-key",
+ "Used when a dictionary expression binds the same key multiple times.",
+ ),
+ "W0122": (
+ "Use of exec",
+ "exec-used",
+ 'Used when you use the "exec" statement (function for Python '
+ "3), to discourage its usage. That doesn't "
+ "mean you cannot use it !",
+ ),
+ "W0123": (
+ "Use of eval",
+ "eval-used",
+ 'Used when you use the "eval" function, to discourage its '
+ "usage. Consider using `ast.literal_eval` for safely evaluating "
+ "strings containing Python expressions "
+ "from untrusted sources. ",
+ ),
+ "W0150": (
+ "%s statement in finally block may swallow exception",
+ "lost-exception",
+ "Used when a break or a return statement is found inside the "
+ "finally clause of a try...finally block: the exceptions raised "
+ "in the try clause will be silently swallowed instead of being "
+ "re-raised.",
+ ),
+ "W0199": (
+ "Assert called on a 2-item-tuple. Did you mean 'assert x,y'?",
+ "assert-on-tuple",
+ "A call of assert on a tuple will always evaluate to true if "
+ "the tuple is not empty, and will always evaluate to false if "
+ "it is.",
+ ),
+ "W0124": (
+ 'Following "as" with another context manager looks like a tuple.',
+ "confusing-with-statement",
+ "Emitted when a `with` statement component returns multiple values "
+ "and uses name binding with `as` only for a part of those values, "
+ "as in with ctx() as a, b. This can be misleading, since it's not "
+ "clear if the context manager returns a tuple or if the node without "
+ "a name binding is another context manager.",
+ ),
+ "W0125": (
+ "Using a conditional statement with a constant value",
+ "using-constant-test",
+ "Emitted when a conditional statement (If or ternary if) "
+ "uses a constant value for its test. This might not be what "
+ "the user intended to do.",
+ ),
+ "W0126": (
+ "Using a conditional statement with potentially wrong function or method call due to missing parentheses",
+ "missing-parentheses-for-call-in-test",
+ "Emitted when a conditional statement (If or ternary if) "
+ "seems to wrongly call a function due to missing parentheses",
+ ),
+ "W0127": (
+ "Assigning the same variable %r to itself",
+ "self-assigning-variable",
+ "Emitted when we detect that a variable is assigned to itself",
+ ),
+ "W0128": (
+ "Redeclared variable %r in assignment",
+ "redeclared-assigned-name",
+ "Emitted when we detect that a variable was redeclared in the same assignment.",
+ ),
+ "E0111": (
+ "The first reversed() argument is not a sequence",
+ "bad-reversed-sequence",
+ "Used when the first argument to reversed() builtin "
+ "isn't a sequence (does not implement __reversed__, "
+ "nor __getitem__ and __len__",
+ ),
+ "E0119": (
+ "format function is not called on str",
+ "misplaced-format-function",
+ "Emitted when format function is not called on str object. "
+ 'e.g doing print("value: {}").format(123) instead of '
+ 'print("value: {}".format(123)). This might not be what the user '
+ "intended to do.",
+ ),
+ }
+
+ reports = (("RP0101", "Statistics by type", report_by_type_stats),)
+
+ def __init__(self, linter):
+ _BasicChecker.__init__(self, linter)
+ self.stats = None
+ self._tryfinallys = None
+
+ def open(self):
+ """initialize visit variables and statistics
+ """
+ self._tryfinallys = []
+ self.stats = self.linter.add_stats(module=0, function=0, method=0, class_=0)
+
+ @utils.check_messages("using-constant-test", "missing-parentheses-for-call-in-test")
+ def visit_if(self, node):
+ self._check_using_constant_test(node, node.test)
+
+ @utils.check_messages("using-constant-test", "missing-parentheses-for-call-in-test")
+ def visit_ifexp(self, node):
+ self._check_using_constant_test(node, node.test)
+
+ @utils.check_messages("using-constant-test", "missing-parentheses-for-call-in-test")
+ def visit_comprehension(self, node):
+ if node.ifs:
+ for if_test in node.ifs:
+ self._check_using_constant_test(node, if_test)
+
+ def _check_using_constant_test(self, node, test):
+ const_nodes = (
+ astroid.Module,
+ astroid.scoped_nodes.GeneratorExp,
+ astroid.Lambda,
+ astroid.FunctionDef,
+ astroid.ClassDef,
+ astroid.bases.Generator,
+ astroid.UnboundMethod,
+ astroid.BoundMethod,
+ astroid.Module,
+ )
+ structs = (astroid.Dict, astroid.Tuple, astroid.Set)
+
+ # These nodes are excepted, since they are not constant
+ # values, requiring a computation to happen.
+ except_nodes = (
+ astroid.Call,
+ astroid.BinOp,
+ astroid.BoolOp,
+ astroid.UnaryOp,
+ astroid.Subscript,
+ )
+ inferred = None
+ emit = isinstance(test, (astroid.Const,) + structs + const_nodes)
+ if not isinstance(test, except_nodes):
+ inferred = utils.safe_infer(test)
+
+ if emit:
+ self.add_message("using-constant-test", node=node)
+ elif isinstance(inferred, const_nodes):
+ # If the constant node is a FunctionDef or Lambda then
+ #  it may be a illicit function call due to missing parentheses
+ call_inferred = None
+ if isinstance(inferred, astroid.FunctionDef):
+ call_inferred = inferred.infer_call_result()
+ elif isinstance(inferred, astroid.Lambda):
+ call_inferred = inferred.infer_call_result(node)
+ if call_inferred:
+ try:
+ for inf_call in call_inferred:
+ if inf_call != astroid.Uninferable:
+ self.add_message(
+ "missing-parentheses-for-call-in-test", node=node
+ )
+ break
+ except astroid.InferenceError:
+ pass
+ self.add_message("using-constant-test", node=node)
+
+ def visit_module(self, _):
+ """check module name, docstring and required arguments
+ """
+ self.stats["module"] += 1
+
+ def visit_classdef(self, node): # pylint: disable=unused-argument
+ """check module name, docstring and redefinition
+ increment branch counter
+ """
+ self.stats["class"] += 1
+
+ @utils.check_messages(
+ "pointless-statement", "pointless-string-statement", "expression-not-assigned"
+ )
+ def visit_expr(self, node):
+ """Check for various kind of statements without effect"""
+ expr = node.value
+ if isinstance(expr, astroid.Const) and isinstance(expr.value, str):
+ # treat string statement in a separated message
+ # Handle PEP-257 attribute docstrings.
+ # An attribute docstring is defined as being a string right after
+ # an assignment at the module level, class level or __init__ level.
+ scope = expr.scope()
+ if isinstance(
+ scope, (astroid.ClassDef, astroid.Module, astroid.FunctionDef)
+ ):
+ if isinstance(scope, astroid.FunctionDef) and scope.name != "__init__":
+ pass
+ else:
+ sibling = expr.previous_sibling()
+ if (
+ sibling is not None
+ and sibling.scope() is scope
+ and isinstance(sibling, (astroid.Assign, astroid.AnnAssign))
+ ):
+ return
+ self.add_message("pointless-string-statement", node=node)
+ return
+
+ # Ignore if this is :
+ # * a direct function call
+ # * the unique child of a try/except body
+ # * a yield statement
+ # * an ellipsis (which can be used on Python 3 instead of pass)
+ # warn W0106 if we have any underlying function call (we can't predict
+ # side effects), else pointless-statement
+ if (
+ isinstance(
+ expr, (astroid.Yield, astroid.Await, astroid.Ellipsis, astroid.Call)
+ )
+ or (
+ isinstance(node.parent, astroid.TryExcept)
+ and node.parent.body == [node]
+ )
+ or (isinstance(expr, astroid.Const) and expr.value is Ellipsis)
+ ):
+ return
+ if any(expr.nodes_of_class(astroid.Call)):
+ self.add_message(
+ "expression-not-assigned", node=node, args=expr.as_string()
+ )
+ else:
+ self.add_message("pointless-statement", node=node)
+
+ @staticmethod
+ def _filter_vararg(node, call_args):
+ # Return the arguments for the given call which are
+ # not passed as vararg.
+ for arg in call_args:
+ if isinstance(arg, astroid.Starred):
+ if (
+ isinstance(arg.value, astroid.Name)
+ and arg.value.name != node.args.vararg
+ ):
+ yield arg
+ else:
+ yield arg
+
+ @staticmethod
+ def _has_variadic_argument(args, variadic_name):
+ if not args:
+ return True
+ for arg in args:
+ if isinstance(arg.value, astroid.Name):
+ if arg.value.name != variadic_name:
+ return True
+ else:
+ return True
+ return False
+
+ @utils.check_messages("unnecessary-lambda")
+ def visit_lambda(self, node):
+ """check whether or not the lambda is suspicious
+ """
+ # if the body of the lambda is a call expression with the same
+ # argument list as the lambda itself, then the lambda is
+ # possibly unnecessary and at least suspicious.
+ if node.args.defaults:
+ # If the arguments of the lambda include defaults, then a
+ # judgment cannot be made because there is no way to check
+ # that the defaults defined by the lambda are the same as
+ # the defaults defined by the function called in the body
+ # of the lambda.
+ return
+ call = node.body
+ if not isinstance(call, astroid.Call):
+ # The body of the lambda must be a function call expression
+ # for the lambda to be unnecessary.
+ return
+ if isinstance(node.body.func, astroid.Attribute) and isinstance(
+ node.body.func.expr, astroid.Call
+ ):
+ # Chained call, the intermediate call might
+ # return something else (but we don't check that, yet).
+ return
+
+ call_site = CallSite.from_call(call)
+ ordinary_args = list(node.args.args)
+ new_call_args = list(self._filter_vararg(node, call.args))
+ if node.args.kwarg:
+ if self._has_variadic_argument(call.kwargs, node.args.kwarg):
+ return
+
+ if node.args.vararg:
+ if self._has_variadic_argument(call.starargs, node.args.vararg):
+ return
+ elif call.starargs:
+ return
+
+ if call.keywords:
+ # Look for additional keyword arguments that are not part
+ # of the lambda's signature
+ lambda_kwargs = {keyword.name for keyword in node.args.defaults}
+ if len(lambda_kwargs) != len(call_site.keyword_arguments):
+ # Different lengths, so probably not identical
+ return
+ if set(call_site.keyword_arguments).difference(lambda_kwargs):
+ return
+
+ # The "ordinary" arguments must be in a correspondence such that:
+ # ordinary_args[i].name == call.args[i].name.
+ if len(ordinary_args) != len(new_call_args):
+ return
+ for arg, passed_arg in zip(ordinary_args, new_call_args):
+ if not isinstance(passed_arg, astroid.Name):
+ return
+ if arg.name != passed_arg.name:
+ return
+
+ self.add_message("unnecessary-lambda", line=node.fromlineno, node=node)
+
+ @utils.check_messages("dangerous-default-value")
+ def visit_functiondef(self, node):
+ """check function name, docstring, arguments, redefinition,
+ variable names, max locals
+ """
+ self.stats["method" if node.is_method() else "function"] += 1
+ self._check_dangerous_default(node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_dangerous_default(self, node):
+ # check for dangerous default values as arguments
+ is_iterable = lambda n: isinstance(n, (astroid.List, astroid.Set, astroid.Dict))
+ for default in node.args.defaults:
+ try:
+ value = next(default.infer())
+ except astroid.InferenceError:
+ continue
+
+ if (
+ isinstance(value, astroid.Instance)
+ and value.qname() in DEFAULT_ARGUMENT_SYMBOLS
+ ):
+
+ if value is default:
+ msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
+ elif isinstance(value, astroid.Instance) or is_iterable(value):
+ # We are here in the following situation(s):
+ # * a dict/set/list/tuple call which wasn't inferred
+ # to a syntax node ({}, () etc.). This can happen
+ # when the arguments are invalid or unknown to
+ # the inference.
+ # * a variable from somewhere else, which turns out to be a list
+ # or a dict.
+ if is_iterable(default):
+ msg = value.pytype()
+ elif isinstance(default, astroid.Call):
+ msg = "%s() (%s)" % (value.name, value.qname())
+ else:
+ msg = "%s (%s)" % (default.as_string(), value.qname())
+ else:
+ # this argument is a name
+ msg = "%s (%s)" % (
+ default.as_string(),
+ DEFAULT_ARGUMENT_SYMBOLS[value.qname()],
+ )
+ self.add_message("dangerous-default-value", node=node, args=(msg,))
+
+ @utils.check_messages("unreachable", "lost-exception")
+ def visit_return(self, node):
+ """1 - check is the node has a right sibling (if so, that's some
+ unreachable code)
+ 2 - check is the node is inside the finally clause of a try...finally
+ block
+ """
+ self._check_unreachable(node)
+ # Is it inside final body of a try...finally bloc ?
+ self._check_not_in_finally(node, "return", (astroid.FunctionDef,))
+
+ @utils.check_messages("unreachable")
+ def visit_continue(self, node):
+ """check is the node has a right sibling (if so, that's some unreachable
+ code)
+ """
+ self._check_unreachable(node)
+
+ @utils.check_messages("unreachable", "lost-exception")
+ def visit_break(self, node):
+ """1 - check is the node has a right sibling (if so, that's some
+ unreachable code)
+ 2 - check is the node is inside the finally clause of a try...finally
+ block
+ """
+ # 1 - Is it right sibling ?
+ self._check_unreachable(node)
+ # 2 - Is it inside final body of a try...finally bloc ?
+ self._check_not_in_finally(node, "break", (astroid.For, astroid.While))
+
+ @utils.check_messages("unreachable")
+ def visit_raise(self, node):
+ """check if the node has a right sibling (if so, that's some unreachable
+ code)
+ """
+ self._check_unreachable(node)
+
+ @utils.check_messages("exec-used")
+ def visit_exec(self, node):
+ """just print a warning on exec statements"""
+ self.add_message("exec-used", node=node)
+
+ def _check_misplaced_format_function(self, call_node):
+ if not isinstance(call_node.func, astroid.Attribute):
+ return
+ if call_node.func.attrname != "format":
+ return
+
+ expr = utils.safe_infer(call_node.func.expr)
+ if expr is astroid.Uninferable:
+ return
+ if not expr:
+ # we are doubtful on inferred type of node, so here just check if format
+ # was called on print()
+ call_expr = call_node.func.expr
+ if not isinstance(call_expr, astroid.Call):
+ return
+ if (
+ isinstance(call_expr.func, astroid.Name)
+ and call_expr.func.name == "print"
+ ):
+ self.add_message("misplaced-format-function", node=call_node)
+
+ @utils.check_messages(
+ "eval-used", "exec-used", "bad-reversed-sequence", "misplaced-format-function"
+ )
+ def visit_call(self, node):
+ """visit a Call node -> check if this is not a blacklisted builtin
+ call and check for * or ** use
+ """
+ self._check_misplaced_format_function(node)
+ if isinstance(node.func, astroid.Name):
+ name = node.func.name
+ # ignore the name if it's not a builtin (i.e. not defined in the
+ # locals nor globals scope)
+ if not (name in node.frame() or name in node.root()):
+ if name == "exec":
+ self.add_message("exec-used", node=node)
+ elif name == "reversed":
+ self._check_reversed(node)
+ elif name == "eval":
+ self.add_message("eval-used", node=node)
+
+ @utils.check_messages("assert-on-tuple")
+ def visit_assert(self, node):
+ """check the use of an assert statement on a tuple."""
+ if (
+ node.fail is None
+ and isinstance(node.test, astroid.Tuple)
+ and len(node.test.elts) == 2
+ ):
+ self.add_message("assert-on-tuple", node=node)
+
+ @utils.check_messages("duplicate-key")
+ def visit_dict(self, node):
+ """check duplicate key in dictionary"""
+ keys = set()
+ for k, _ in node.items:
+ if isinstance(k, astroid.Const):
+ key = k.value
+ if key in keys:
+ self.add_message("duplicate-key", node=node, args=key)
+ keys.add(key)
+
+ def visit_tryfinally(self, node):
+ """update try...finally flag"""
+ self._tryfinallys.append(node)
+
+ def leave_tryfinally(self, node): # pylint: disable=unused-argument
+ """update try...finally flag"""
+ self._tryfinallys.pop()
+
+ def _check_unreachable(self, node):
+ """check unreachable code"""
+ unreach_stmt = node.next_sibling()
+ if unreach_stmt is not None:
+ self.add_message("unreachable", node=unreach_stmt)
+
+ def _check_not_in_finally(self, node, node_name, breaker_classes=()):
+ """check that a node is not inside a finally clause of a
+ try...finally statement.
+ If we found before a try...finally bloc a parent which its type is
+ in breaker_classes, we skip the whole check."""
+ # if self._tryfinallys is empty, we're not an in try...finally block
+ if not self._tryfinallys:
+ return
+ # the node could be a grand-grand...-children of the try...finally
+ _parent = node.parent
+ _node = node
+ while _parent and not isinstance(_parent, breaker_classes):
+ if hasattr(_parent, "finalbody") and _node in _parent.finalbody:
+ self.add_message("lost-exception", node=node, args=node_name)
+ return
+ _node = _parent
+ _parent = _node.parent
+
+ def _check_reversed(self, node):
+ """ check that the argument to `reversed` is a sequence """
+ try:
+ argument = utils.safe_infer(utils.get_argument_from_call(node, position=0))
+ except utils.NoSuchArgumentError:
+ pass
+ else:
+ if argument is astroid.Uninferable:
+ return
+ if argument is None:
+ # Nothing was inferred.
+ # Try to see if we have iter().
+ if isinstance(node.args[0], astroid.Call):
+ try:
+ func = next(node.args[0].func.infer())
+ except astroid.InferenceError:
+ return
+ if getattr(
+ func, "name", None
+ ) == "iter" and utils.is_builtin_object(func):
+ self.add_message("bad-reversed-sequence", node=node)
+ return
+
+ if isinstance(argument, (astroid.List, astroid.Tuple)):
+ return
+
+ if isinstance(argument, astroid.Instance):
+ if argument._proxied.name == "dict" and utils.is_builtin_object(
+ argument._proxied
+ ):
+ self.add_message("bad-reversed-sequence", node=node)
+ return
+ if any(
+ ancestor.name == "dict" and utils.is_builtin_object(ancestor)
+ for ancestor in argument._proxied.ancestors()
+ ):
+ # Mappings aren't accepted by reversed(), unless
+ # they provide explicitly a __reversed__ method.
+ try:
+ argument.locals[REVERSED_PROTOCOL_METHOD]
+ except KeyError:
+ self.add_message("bad-reversed-sequence", node=node)
+ return
+
+ if hasattr(argument, "getattr"):
+ # everything else is not a proper sequence for reversed()
+ for methods in REVERSED_METHODS:
+ for meth in methods:
+ try:
+ argument.getattr(meth)
+ except astroid.NotFoundError:
+ break
+ else:
+ break
+ else:
+ self.add_message("bad-reversed-sequence", node=node)
+ else:
+ self.add_message("bad-reversed-sequence", node=node)
+
+ @utils.check_messages("confusing-with-statement")
+ def visit_with(self, node):
+ # a "with" statement with multiple managers coresponds
+ # to one AST "With" node with multiple items
+ pairs = node.items
+ if pairs:
+ for prev_pair, pair in zip(pairs, pairs[1:]):
+ if isinstance(prev_pair[1], astroid.AssignName) and (
+ pair[1] is None and not isinstance(pair[0], astroid.Call)
+ ):
+ # Don't emit a message if the second is a function call
+ # there's no way that can be mistaken for a name assignment.
+ # If the line number doesn't match
+ # we assume it's a nested "with".
+ self.add_message("confusing-with-statement", node=node)
+
+ def _check_self_assigning_variable(self, node):
+ # Detect assigning to the same variable.
+
+ scope = node.scope()
+ scope_locals = scope.locals
+
+ rhs_names = []
+ targets = node.targets
+ if isinstance(targets[0], astroid.Tuple):
+ if len(targets) != 1:
+ # A complex assignment, so bail out early.
+ return
+ targets = targets[0].elts
+
+ if isinstance(node.value, astroid.Name):
+ if len(targets) != 1:
+ return
+ rhs_names = [node.value]
+ elif isinstance(node.value, astroid.Tuple):
+ rhs_count = len(node.value.elts)
+ if len(targets) != rhs_count or rhs_count == 1:
+ return
+ rhs_names = node.value.elts
+
+ for target, lhs_name in zip(targets, rhs_names):
+ if not isinstance(lhs_name, astroid.Name):
+ continue
+ if not isinstance(target, astroid.AssignName):
+ continue
+ if isinstance(scope, astroid.ClassDef) and target.name in scope_locals:
+ # Check that the scope is different than a class level, which is usually
+ # a pattern to expose module level attributes as class level ones.
+ continue
+ if target.name == lhs_name.name:
+ self.add_message(
+ "self-assigning-variable", args=(target.name,), node=target
+ )
+
+ def _check_redeclared_assign_name(self, targets):
+ for target in targets:
+ if not isinstance(target, astroid.Tuple):
+ continue
+
+ found_names = []
+ for element in target.elts:
+ if isinstance(element, astroid.Tuple):
+ self._check_redeclared_assign_name([element])
+ elif isinstance(element, astroid.AssignName) and element.name != "_":
+ found_names.append(element.name)
+
+ names = collections.Counter(found_names)
+ for name, count in names.most_common():
+ if count > 1:
+ self.add_message(
+ "redeclared-assigned-name", args=(name,), node=target
+ )
+
+ @utils.check_messages("self-assigning-variable", "redeclared-assigned-name")
+ def visit_assign(self, node):
+ self._check_self_assigning_variable(node)
+ self._check_redeclared_assign_name(node.targets)
+
+ @utils.check_messages("redeclared-assigned-name")
+ def visit_for(self, node):
+ self._check_redeclared_assign_name([node.target])
+
+
+KNOWN_NAME_TYPES = {
+ "module",
+ "const",
+ "class",
+ "function",
+ "method",
+ "attr",
+ "argument",
+ "variable",
+ "class_attribute",
+ "inlinevar",
+}
+
+
+HUMAN_READABLE_TYPES = {
+ "module": "module",
+ "const": "constant",
+ "class": "class",
+ "function": "function",
+ "method": "method",
+ "attr": "attribute",
+ "argument": "argument",
+ "variable": "variable",
+ "class_attribute": "class attribute",
+ "inlinevar": "inline iteration",
+}
+
+DEFAULT_NAMING_STYLES = {
+ "module": "snake_case",
+ "const": "UPPER_CASE",
+ "class": "PascalCase",
+ "function": "snake_case",
+ "method": "snake_case",
+ "attr": "snake_case",
+ "argument": "snake_case",
+ "variable": "snake_case",
+ "class_attribute": "any",
+ "inlinevar": "any",
+}
+
+
+def _create_naming_options():
+ name_options = []
+ for name_type in sorted(KNOWN_NAME_TYPES):
+ human_readable_name = HUMAN_READABLE_TYPES[name_type]
+ default_style = DEFAULT_NAMING_STYLES[name_type]
+ name_type = name_type.replace("_", "-")
+ name_options.append(
+ (
+ "%s-naming-style" % (name_type,),
+ {
+ "default": default_style,
+ "type": "choice",
+ "choices": list(NAMING_STYLES.keys()),
+ "metavar": "<style>",
+ "help": "Naming style matching correct %s names."
+ % (human_readable_name,),
+ },
+ )
+ )
+ name_options.append(
+ (
+ "%s-rgx" % (name_type,),
+ {
+ "default": None,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Regular expression matching correct %s names. Overrides %s-naming-style."
+ % (human_readable_name, name_type),
+ },
+ )
+ )
+ return tuple(name_options)
+
+
+class NameChecker(_BasicChecker):
+
+ msgs = {
+ "C0102": (
+ 'Black listed name "%s"',
+ "blacklisted-name",
+ "Used when the name is listed in the black list (unauthorized names).",
+ ),
+ "C0103": (
+ '%s name "%s" doesn\'t conform to %s',
+ "invalid-name",
+ "Used when the name doesn't conform to naming rules "
+ "associated to its type (constant, variable, class...).",
+ ),
+ "W0111": (
+ "Name %s will become a keyword in Python %s",
+ "assign-to-new-keyword",
+ "Used when assignment will become invalid in future "
+ "Python release due to introducing new keyword.",
+ ),
+ }
+
+ options = (
+ (
+ "good-names",
+ {
+ "default": ("i", "j", "k", "ex", "Run", "_"),
+ "type": "csv",
+ "metavar": "<names>",
+ "help": "Good variable names which should always be accepted,"
+ " separated by a comma.",
+ },
+ ),
+ (
+ "bad-names",
+ {
+ "default": ("foo", "bar", "baz", "toto", "tutu", "tata"),
+ "type": "csv",
+ "metavar": "<names>",
+ "help": "Bad variable names which should always be refused, "
+ "separated by a comma.",
+ },
+ ),
+ (
+ "name-group",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<name1:name2>",
+ "help": (
+ "Colon-delimited sets of names that determine each"
+ " other's naming style when the name regexes"
+ " allow several styles."
+ ),
+ },
+ ),
+ (
+ "include-naming-hint",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Include a hint for the correct naming format with invalid-name.",
+ },
+ ),
+ (
+ "property-classes",
+ {
+ "default": ("abc.abstractproperty",),
+ "type": "csv",
+ "metavar": "<decorator names>",
+ "help": "List of decorators that produce properties, such as "
+ "abc.abstractproperty. Add to this list to register "
+ "other decorators that produce valid properties. "
+ "These decorators are taken in consideration only for invalid-name.",
+ },
+ ),
+ ) + _create_naming_options()
+
+ KEYWORD_ONSET = {(3, 7): {"async", "await"}}
+
+ def __init__(self, linter):
+ _BasicChecker.__init__(self, linter)
+ self._name_category = {}
+ self._name_group = {}
+ self._bad_names = {}
+ self._name_regexps = {}
+ self._name_hints = {}
+
+ def open(self):
+ self.stats = self.linter.add_stats(
+ badname_module=0,
+ badname_class=0,
+ badname_function=0,
+ badname_method=0,
+ badname_attr=0,
+ badname_const=0,
+ badname_variable=0,
+ badname_inlinevar=0,
+ badname_argument=0,
+ badname_class_attribute=0,
+ )
+ for group in self.config.name_group:
+ for name_type in group.split(":"):
+ self._name_group[name_type] = "group_%s" % (group,)
+
+ regexps, hints = self._create_naming_rules()
+ self._name_regexps = regexps
+ self._name_hints = hints
+
+ def _create_naming_rules(self):
+ regexps = {}
+ hints = {}
+
+ for name_type in KNOWN_NAME_TYPES:
+ naming_style_option_name = "%s_naming_style" % (name_type,)
+ naming_style_name = getattr(self.config, naming_style_option_name)
+
+ regexps[name_type] = NAMING_STYLES[naming_style_name].get_regex(name_type)
+
+ custom_regex_setting_name = "%s_rgx" % (name_type,)
+ custom_regex = getattr(self.config, custom_regex_setting_name, None)
+ if custom_regex is not None:
+ regexps[name_type] = custom_regex
+
+ if custom_regex is not None:
+ hints[name_type] = "%r pattern" % custom_regex.pattern
+ else:
+ hints[name_type] = "%s naming style" % naming_style_name
+
+ return regexps, hints
+
+ @utils.check_messages("blacklisted-name", "invalid-name")
+ def visit_module(self, node):
+ self._check_name("module", node.name.split(".")[-1], node)
+ self._bad_names = {}
+
+ def leave_module(self, node): # pylint: disable=unused-argument
+ for all_groups in self._bad_names.values():
+ if len(all_groups) < 2:
+ continue
+ groups = collections.defaultdict(list)
+ min_warnings = sys.maxsize
+ for group in all_groups.values():
+ groups[len(group)].append(group)
+ min_warnings = min(len(group), min_warnings)
+ if len(groups[min_warnings]) > 1:
+ by_line = sorted(
+ groups[min_warnings],
+ key=lambda group: min(warning[0].lineno for warning in group),
+ )
+ warnings = itertools.chain(*by_line[1:])
+ else:
+ warnings = groups[min_warnings][0]
+ for args in warnings:
+ self._raise_name_warning(*args)
+
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
+ def visit_classdef(self, node):
+ self._check_assign_to_new_keyword_violation(node.name, node)
+ self._check_name("class", node.name, node)
+ for attr, anodes in node.instance_attrs.items():
+ if not any(node.instance_attr_ancestors(attr)):
+ self._check_name("attr", attr, anodes[0])
+
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
+ def visit_functiondef(self, node):
+ # Do not emit any warnings if the method is just an implementation
+ # of a base class method.
+ self._check_assign_to_new_keyword_violation(node.name, node)
+ confidence = interfaces.HIGH
+ if node.is_method():
+ if utils.overrides_a_method(node.parent.frame(), node.name):
+ return
+ confidence = (
+ interfaces.INFERENCE
+ if utils.has_known_bases(node.parent.frame())
+ else interfaces.INFERENCE_FAILURE
+ )
+
+ self._check_name(
+ _determine_function_name_type(node, config=self.config),
+ node.name,
+ node,
+ confidence,
+ )
+ # Check argument names
+ args = node.args.args
+ if args is not None:
+ self._recursive_check_names(args, node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ @utils.check_messages("blacklisted-name", "invalid-name")
+ def visit_global(self, node):
+ for name in node.names:
+ self._check_name("const", name, node)
+
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
+ def visit_assignname(self, node):
+ """check module level assigned names"""
+ self._check_assign_to_new_keyword_violation(node.name, node)
+ frame = node.frame()
+ assign_type = node.assign_type()
+ if isinstance(assign_type, astroid.Comprehension):
+ self._check_name("inlinevar", node.name, node)
+ elif isinstance(frame, astroid.Module):
+ if isinstance(assign_type, astroid.Assign) and not in_loop(assign_type):
+ if isinstance(utils.safe_infer(assign_type.value), astroid.ClassDef):
+ self._check_name("class", node.name, node)
+ else:
+ if not _redefines_import(node):
+ # Don't emit if the name redefines an import
+ # in an ImportError except handler.
+ self._check_name("const", node.name, node)
+ elif isinstance(assign_type, astroid.ExceptHandler):
+ self._check_name("variable", node.name, node)
+ elif isinstance(frame, astroid.FunctionDef):
+ # global introduced variable aren't in the function locals
+ if node.name in frame and node.name not in frame.argnames():
+ if not _redefines_import(node):
+ self._check_name("variable", node.name, node)
+ elif isinstance(frame, astroid.ClassDef):
+ if not list(frame.local_attr_ancestors(node.name)):
+ self._check_name("class_attribute", node.name, node)
+
+ def _recursive_check_names(self, args, node):
+ """check names in a possibly recursive list <arg>"""
+ for arg in args:
+ if isinstance(arg, astroid.AssignName):
+ self._check_name("argument", arg.name, node)
+ else:
+ self._recursive_check_names(arg.elts, node)
+
+ def _find_name_group(self, node_type):
+ return self._name_group.get(node_type, node_type)
+
+ def _raise_name_warning(self, node, node_type, name, confidence):
+ type_label = HUMAN_READABLE_TYPES[node_type]
+ hint = self._name_hints[node_type]
+ if self.config.include_naming_hint:
+ hint += " (%r pattern)" % self._name_regexps[node_type].pattern
+ args = (type_label.capitalize(), name, hint)
+
+ self.add_message("invalid-name", node=node, args=args, confidence=confidence)
+ self.stats["badname_" + node_type] += 1
+
+ def _check_name(self, node_type, name, node, confidence=interfaces.HIGH):
+ """check for a name using the type's regexp"""
+
+ def _should_exempt_from_invalid_name(node):
+ if node_type == "variable":
+ inferred = utils.safe_infer(node)
+ if isinstance(inferred, astroid.ClassDef):
+ return True
+ return False
+
+ if utils.is_inside_except(node):
+ clobbering, _ = utils.clobber_in_except(node)
+ if clobbering:
+ return
+ if name in self.config.good_names:
+ return
+ if name in self.config.bad_names:
+ self.stats["badname_" + node_type] += 1
+ self.add_message("blacklisted-name", node=node, args=name)
+ return
+ regexp = self._name_regexps[node_type]
+ match = regexp.match(name)
+
+ if _is_multi_naming_match(match, node_type, confidence):
+ name_group = self._find_name_group(node_type)
+ bad_name_group = self._bad_names.setdefault(name_group, {})
+ warnings = bad_name_group.setdefault(match.lastgroup, [])
+ warnings.append((node, node_type, name, confidence))
+
+ if match is None and not _should_exempt_from_invalid_name(node):
+ self._raise_name_warning(node, node_type, name, confidence)
+
+ def _check_assign_to_new_keyword_violation(self, name, node):
+ keyword_first_version = self._name_became_keyword_in_version(
+ name, self.KEYWORD_ONSET
+ )
+ if keyword_first_version is not None:
+ self.add_message(
+ "assign-to-new-keyword",
+ node=node,
+ args=(name, keyword_first_version),
+ confidence=interfaces.HIGH,
+ )
+
+ @staticmethod
+ def _name_became_keyword_in_version(name, rules):
+ for version, keywords in rules.items():
+ if name in keywords and sys.version_info < version:
+ return ".".join(map(str, version))
+ return None
+
+
+class DocStringChecker(_BasicChecker):
+ msgs = {
+ "C0112": (
+ "Empty %s docstring",
+ "empty-docstring",
+ "Used when a module, function, class or method has an empty "
+ "docstring (it would be too easy ;).",
+ {"old_names": [("W0132", "old-empty-docstring")]},
+ ),
+ "C0114": (
+ "Missing module docstring",
+ "missing-module-docstring",
+ "Used when a module has no docstring."
+ "Empty modules do not require a docstring.",
+ {"old_names": [("C0111", "missing-docstring")]},
+ ),
+ "C0115": (
+ "Missing class docstring",
+ "missing-class-docstring",
+ "Used when a class has no docstring."
+ "Even an empty class must have a docstring.",
+ {"old_names": [("C0111", "missing-docstring")]},
+ ),
+ "C0116": (
+ "Missing function or method docstring",
+ "missing-function-docstring",
+ "Used when a function or method has no docstring."
+ "Some special methods like __init__ do not require a "
+ "docstring.",
+ {"old_names": [("C0111", "missing-docstring")]},
+ ),
+ }
+ options = (
+ (
+ "no-docstring-rgx",
+ {
+ "default": NO_REQUIRED_DOC_RGX,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Regular expression which should only match "
+ "function or class names that do not require a "
+ "docstring.",
+ },
+ ),
+ (
+ "docstring-min-length",
+ {
+ "default": -1,
+ "type": "int",
+ "metavar": "<int>",
+ "help": (
+ "Minimum line length for functions/classes that"
+ " require docstrings, shorter ones are exempt."
+ ),
+ },
+ ),
+ )
+
+ def open(self):
+ self.stats = self.linter.add_stats(
+ undocumented_module=0,
+ undocumented_function=0,
+ undocumented_method=0,
+ undocumented_class=0,
+ )
+
+ @utils.check_messages("missing-docstring", "empty-docstring")
+ def visit_module(self, node):
+ self._check_docstring("module", node)
+
+ @utils.check_messages("missing-docstring", "empty-docstring")
+ def visit_classdef(self, node):
+ if self.config.no_docstring_rgx.match(node.name) is None:
+ self._check_docstring("class", node)
+
+ @utils.check_messages("missing-docstring", "empty-docstring")
+ def visit_functiondef(self, node):
+ if self.config.no_docstring_rgx.match(node.name) is None:
+ ftype = "method" if node.is_method() else "function"
+ if is_property_setter_or_deleter(node):
+ return
+
+ if isinstance(node.parent.frame(), astroid.ClassDef):
+ overridden = False
+ confidence = (
+ interfaces.INFERENCE
+ if utils.has_known_bases(node.parent.frame())
+ else interfaces.INFERENCE_FAILURE
+ )
+ # check if node is from a method overridden by its ancestor
+ for ancestor in node.parent.frame().ancestors():
+ if node.name in ancestor and isinstance(
+ ancestor[node.name], astroid.FunctionDef
+ ):
+ overridden = True
+ break
+ self._check_docstring(
+ ftype, node, report_missing=not overridden, confidence=confidence
+ )
+ elif isinstance(node.parent.frame(), astroid.Module):
+ self._check_docstring(ftype, node)
+ else:
+ return
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_docstring(
+ self, node_type, node, report_missing=True, confidence=interfaces.HIGH
+ ):
+ """check the node has a non empty docstring"""
+ docstring = node.doc
+ if docstring is None:
+ if not report_missing:
+ return
+ lines = utils.get_node_last_lineno(node) - node.lineno
+
+ if node_type == "module" and not lines:
+ # If the module has no body, there's no reason
+ # to require a docstring.
+ return
+ max_lines = self.config.docstring_min_length
+
+ if node_type != "module" and max_lines > -1 and lines < max_lines:
+ return
+ self.stats["undocumented_" + node_type] += 1
+ if (
+ node.body
+ and isinstance(node.body[0], astroid.Expr)
+ and isinstance(node.body[0].value, astroid.Call)
+ ):
+ # Most likely a string with a format call. Let's see.
+ func = utils.safe_infer(node.body[0].value.func)
+ if isinstance(func, astroid.BoundMethod) and isinstance(
+ func.bound, astroid.Instance
+ ):
+ # Strings.
+ if func.bound.name == "str":
+ return
+ if func.bound.name in ("str", "unicode", "bytes"):
+ return
+ if node_type == "module":
+ message = "missing-module-docstring"
+ elif node_type == "class":
+ message = "missing-class-docstring"
+ else:
+ message = "missing-function-docstring"
+ self.add_message(message, node=node, confidence=confidence)
+ elif not docstring.strip():
+ self.stats["undocumented_" + node_type] += 1
+ self.add_message(
+ "empty-docstring", node=node, args=(node_type,), confidence=confidence
+ )
+
+
+class PassChecker(_BasicChecker):
+ """check if the pass statement is really necessary"""
+
+ msgs = {
+ "W0107": (
+ "Unnecessary pass statement",
+ "unnecessary-pass",
+ 'Used when a "pass" statement that can be avoided is encountered.',
+ )
+ }
+
+ @utils.check_messages("unnecessary-pass")
+ def visit_pass(self, node):
+ if len(node.parent.child_sequence(node)) > 1 or (
+ isinstance(node.parent, (astroid.ClassDef, astroid.FunctionDef))
+ and (node.parent.doc is not None)
+ ):
+ self.add_message("unnecessary-pass", node=node)
+
+
+def _is_one_arg_pos_call(call):
+ """Is this a call with exactly 1 argument,
+ where that argument is positional?
+ """
+ return isinstance(call, astroid.Call) and len(call.args) == 1 and not call.keywords
+
+
+class ComparisonChecker(_BasicChecker):
+ """Checks for comparisons
+
+ - singleton comparison: 'expr == True', 'expr == False' and 'expr == None'
+ - yoda condition: 'const "comp" right' where comp can be '==', '!=', '<',
+ '<=', '>' or '>=', and right can be a variable, an attribute, a method or
+ a function
+ """
+
+ msgs = {
+ "C0121": (
+ "Comparison to %s should be %s",
+ "singleton-comparison",
+ "Used when an expression is compared to singleton "
+ "values like True, False or None.",
+ ),
+ "C0122": (
+ "Comparison should be %s",
+ "misplaced-comparison-constant",
+ "Used when the constant is placed on the left side "
+ "of a comparison. It is usually clearer in intent to "
+ "place it in the right hand side of the comparison.",
+ ),
+ "C0123": (
+ "Using type() instead of isinstance() for a typecheck.",
+ "unidiomatic-typecheck",
+ "The idiomatic way to perform an explicit typecheck in "
+ "Python is to use isinstance(x, Y) rather than "
+ "type(x) == Y, type(x) is Y. Though there are unusual "
+ "situations where these give different results.",
+ {"old_names": [("W0154", "old-unidiomatic-typecheck")]},
+ ),
+ "R0123": (
+ "Comparison to literal",
+ "literal-comparison",
+ "Used when comparing an object to a literal, which is usually "
+ "what you do not want to do, since you can compare to a different "
+ "literal than what was expected altogether.",
+ ),
+ "R0124": (
+ "Redundant comparison - %s",
+ "comparison-with-itself",
+ "Used when something is compared against itself.",
+ ),
+ "W0143": (
+ "Comparing against a callable, did you omit the parenthesis?",
+ "comparison-with-callable",
+ "This message is emitted when pylint detects that a comparison with a "
+ "callable was made, which might suggest that some parenthesis were omitted, "
+ "resulting in potential unwanted behaviour.",
+ ),
+ }
+
+ def _check_singleton_comparison(self, singleton, root_node, negative_check=False):
+ if singleton.value is True:
+ if not negative_check:
+ suggestion = "just 'expr'"
+ else:
+ suggestion = "just 'not expr'"
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(True, suggestion)
+ )
+ elif singleton.value is False:
+ if not negative_check:
+ suggestion = "'not expr'"
+ else:
+ suggestion = "'expr'"
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(False, suggestion)
+ )
+ elif singleton.value is None:
+ if not negative_check:
+ suggestion = "'expr is None'"
+ else:
+ suggestion = "'expr is not None'"
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(None, suggestion)
+ )
+
+ def _check_literal_comparison(self, literal, node):
+ """Check if we compare to a literal, which is usually what we do not want to do."""
+ nodes = (astroid.List, astroid.Tuple, astroid.Dict, astroid.Set)
+ is_other_literal = isinstance(literal, nodes)
+ is_const = False
+ if isinstance(literal, astroid.Const):
+ if isinstance(literal.value, bool) or literal.value is None:
+ # Not interested in this values.
+ return
+ is_const = isinstance(literal.value, (bytes, str, int, float))
+
+ if is_const or is_other_literal:
+ self.add_message("literal-comparison", node=node)
+
+ def _check_misplaced_constant(self, node, left, right, operator):
+ if isinstance(right, astroid.Const):
+ return
+ operator = REVERSED_COMPS.get(operator, operator)
+ suggestion = "%s %s %r" % (right.as_string(), operator, left.value)
+ self.add_message("misplaced-comparison-constant", node=node, args=(suggestion,))
+
+ def _check_logical_tautology(self, node):
+ """Check if identifier is compared against itself.
+ :param node: Compare node
+ :type node: astroid.node_classes.Compare
+ :Example:
+ val = 786
+ if val == val: # [comparison-with-itself]
+ pass
+ """
+ left_operand = node.left
+ right_operand = node.ops[0][1]
+ operator = node.ops[0][0]
+ if isinstance(left_operand, astroid.Const) and isinstance(
+ right_operand, astroid.Const
+ ):
+ left_operand = left_operand.value
+ right_operand = right_operand.value
+ elif isinstance(left_operand, astroid.Name) and isinstance(
+ right_operand, astroid.Name
+ ):
+ left_operand = left_operand.name
+ right_operand = right_operand.name
+
+ if left_operand == right_operand:
+ suggestion = "%s %s %s" % (left_operand, operator, right_operand)
+ self.add_message("comparison-with-itself", node=node, args=(suggestion,))
+
+ def _check_callable_comparison(self, node):
+ operator = node.ops[0][0]
+ if operator not in COMPARISON_OPERATORS:
+ return
+
+ bare_callables = (astroid.FunctionDef, astroid.BoundMethod)
+ left_operand, right_operand = node.left, node.ops[0][1]
+ # this message should be emitted only when there is comparison of bare callable
+ # with non bare callable.
+ if (
+ sum(
+ 1
+ for operand in (left_operand, right_operand)
+ if isinstance(utils.safe_infer(operand), bare_callables)
+ )
+ == 1
+ ):
+ self.add_message("comparison-with-callable", node=node)
+
+ @utils.check_messages(
+ "singleton-comparison",
+ "misplaced-comparison-constant",
+ "unidiomatic-typecheck",
+ "literal-comparison",
+ "comparison-with-itself",
+ "comparison-with-callable",
+ )
+ def visit_compare(self, node):
+ self._check_callable_comparison(node)
+ self._check_logical_tautology(node)
+ self._check_unidiomatic_typecheck(node)
+ # NOTE: this checker only works with binary comparisons like 'x == 42'
+ # but not 'x == y == 42'
+ if len(node.ops) != 1:
+ return
+
+ left = node.left
+ operator, right = node.ops[0]
+ if operator in COMPARISON_OPERATORS and isinstance(left, astroid.Const):
+ self._check_misplaced_constant(node, left, right, operator)
+
+ if operator == "==":
+ if isinstance(left, astroid.Const):
+ self._check_singleton_comparison(left, node)
+ elif isinstance(right, astroid.Const):
+ self._check_singleton_comparison(right, node)
+ if operator == "!=":
+ if isinstance(right, astroid.Const):
+ self._check_singleton_comparison(right, node, negative_check=True)
+ if operator in ("is", "is not"):
+ self._check_literal_comparison(right, node)
+
+ def _check_unidiomatic_typecheck(self, node):
+ operator, right = node.ops[0]
+ if operator in TYPECHECK_COMPARISON_OPERATORS:
+ left = node.left
+ if _is_one_arg_pos_call(left):
+ self._check_type_x_is_y(node, left, operator, right)
+
+ def _check_type_x_is_y(self, node, left, operator, right):
+ """Check for expressions like type(x) == Y."""
+ left_func = utils.safe_infer(left.func)
+ if not (
+ isinstance(left_func, astroid.ClassDef) and left_func.qname() == TYPE_QNAME
+ ):
+ return
+
+ if operator in ("is", "is not") and _is_one_arg_pos_call(right):
+ right_func = utils.safe_infer(right.func)
+ if (
+ isinstance(right_func, astroid.ClassDef)
+ and right_func.qname() == TYPE_QNAME
+ ):
+ # type(x) == type(a)
+ right_arg = utils.safe_infer(right.args[0])
+ if not isinstance(right_arg, LITERAL_NODE_TYPES):
+ # not e.g. type(x) == type([])
+ return
+ self.add_message("unidiomatic-typecheck", node=node)
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(BasicErrorChecker(linter))
+ linter.register_checker(BasicChecker(linter))
+ linter.register_checker(NameChecker(linter))
+ linter.register_checker(DocStringChecker(linter))
+ linter.register_checker(PassChecker(linter))
+ linter.register_checker(ComparisonChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/base_checker.py b/venv/Lib/site-packages/pylint/checkers/base_checker.py
new file mode 100644
index 0000000..f2ae4e5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/base_checker.py
@@ -0,0 +1,187 @@
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
+# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+from inspect import cleandoc
+from typing import Any
+
+from pylint.config import OptionsProviderMixIn
+from pylint.constants import _MSG_ORDER, WarningScope
+from pylint.exceptions import InvalidMessageError
+from pylint.interfaces import UNDEFINED, IRawChecker, ITokenChecker, implements
+from pylint.message.message_definition import MessageDefinition
+from pylint.utils import get_rst_section, get_rst_title
+
+
+class BaseChecker(OptionsProviderMixIn):
+
+ # checker name (you may reuse an existing one)
+ name = None # type: str
+ # options level (0 will be displaying in --help, 1 in --long-help)
+ level = 1
+ # ordered list of options to control the checker behaviour
+ options = () # type: Any
+ # messages issued by this checker
+ msgs = {} # type: Any
+ # reports issued by this checker
+ reports = () # type: Any
+ # mark this checker as enabled or not.
+ enabled = True
+
+ def __init__(self, linter=None):
+ """checker instances should have the linter as argument
+
+ :param ILinter linter: is an object implementing ILinter."""
+ if self.name is not None:
+ self.name = self.name.lower()
+ OptionsProviderMixIn.__init__(self)
+ self.linter = linter
+
+ def __gt__(self, other):
+ """Permit to sort a list of Checker by name."""
+ return "{}{}".format(self.name, self.msgs).__gt__(
+ "{}{}".format(other.name, other.msgs)
+ )
+
+ def __repr__(self):
+ status = "Checker" if self.enabled else "Disabled checker"
+ return "{} '{}' (responsible for '{}')".format(
+ status, self.name, "', '".join(self.msgs.keys())
+ )
+
+ def __str__(self):
+ """This might be incomplete because multiple class inheriting BaseChecker
+ can have the same name. Cf MessageHandlerMixIn.get_full_documentation()"""
+ return self.get_full_documentation(
+ msgs=self.msgs, options=self.options_and_values(), reports=self.reports
+ )
+
+ def get_full_documentation(self, msgs, options, reports, doc=None, module=None):
+ result = ""
+ checker_title = "%s checker" % (self.name.replace("_", " ").title())
+ if module:
+ # Provide anchor to link against
+ result += ".. _%s:\n\n" % module
+ result += "%s\n" % get_rst_title(checker_title, "~")
+ if module:
+ result += "This checker is provided by ``%s``.\n" % module
+ result += "Verbatim name of the checker is ``%s``.\n\n" % self.name
+ if doc:
+ # Provide anchor to link against
+ result += get_rst_title("{} Documentation".format(checker_title), "^")
+ result += "%s\n\n" % cleandoc(doc)
+ # options might be an empty generator and not be False when casted to boolean
+ options = list(options)
+ if options:
+ result += get_rst_title("{} Options".format(checker_title), "^")
+ result += "%s\n" % get_rst_section(None, options)
+ if msgs:
+ result += get_rst_title("{} Messages".format(checker_title), "^")
+ for msgid, msg in sorted(
+ msgs.items(), key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])
+ ):
+ msg = self.create_message_definition_from_tuple(msgid, msg)
+ result += "%s\n" % msg.format_help(checkerref=False)
+ result += "\n"
+ if reports:
+ result += get_rst_title("{} Reports".format(checker_title), "^")
+ for report in reports:
+ result += ":%s: %s\n" % report[:2]
+ result += "\n"
+ result += "\n"
+ return result
+
+ def add_message(
+ self, msgid, line=None, node=None, args=None, confidence=None, col_offset=None
+ ):
+ if not confidence:
+ confidence = UNDEFINED
+ self.linter.add_message(msgid, line, node, args, confidence, col_offset)
+
+ def check_consistency(self):
+ """Check the consistency of msgid.
+
+ msg ids for a checker should be a string of len 4, where the two first
+ characters are the checker id and the two last the msg id in this
+ checker.
+
+ :raises InvalidMessageError: If the checker id in the messages are not
+ always the same. """
+ checker_id = None
+ existing_ids = []
+ for message in self.messages:
+ if checker_id is not None and checker_id != message.msgid[1:3]:
+ error_msg = "Inconsistent checker part in message id "
+ error_msg += "'{}' (expected 'x{checker_id}xx' ".format(
+ message.msgid, checker_id=checker_id
+ )
+ error_msg += "because we already had {existing_ids}).".format(
+ existing_ids=existing_ids
+ )
+ raise InvalidMessageError(error_msg)
+ checker_id = message.msgid[1:3]
+ existing_ids.append(message.msgid)
+
+ def create_message_definition_from_tuple(self, msgid, msg_tuple):
+ if implements(self, (IRawChecker, ITokenChecker)):
+ default_scope = WarningScope.LINE
+ else:
+ default_scope = WarningScope.NODE
+ options = {}
+ if len(msg_tuple) > 3:
+ (msg, symbol, descr, options) = msg_tuple
+ elif len(msg_tuple) > 2:
+ (msg, symbol, descr) = msg_tuple
+ else:
+ error_msg = """Messages should have a msgid and a symbol. Something like this :
+
+"W1234": (
+ "message",
+ "message-symbol",
+ "Message description with detail.",
+ ...
+),
+"""
+ raise InvalidMessageError(error_msg)
+ options.setdefault("scope", default_scope)
+ return MessageDefinition(self, msgid, msg, descr, symbol, **options)
+
+ @property
+ def messages(self) -> list:
+ return [
+ self.create_message_definition_from_tuple(msgid, msg_tuple)
+ for msgid, msg_tuple in sorted(self.msgs.items())
+ ]
+
+ # dummy methods implementing the IChecker interface
+
+ def get_message_definition(self, msgid):
+ for message_definition in self.messages:
+ if message_definition.msgid == msgid:
+ return message_definition
+ error_msg = "MessageDefinition for '{}' does not exists. ".format(msgid)
+ error_msg += "Choose from {}.".format([m.msgid for m in self.messages])
+ raise InvalidMessageError(error_msg)
+
+ def open(self):
+ """called before visiting project (i.e set of modules)"""
+
+ def close(self):
+ """called after visiting project (i.e set of modules)"""
+
+
+class BaseTokenChecker(BaseChecker):
+ """Base class for checkers that want to have access to the token stream."""
+
+ def process_tokens(self, tokens):
+ """Should be overridden by subclasses."""
+ raise NotImplementedError()
diff --git a/venv/Lib/site-packages/pylint/checkers/classes.py b/venv/Lib/site-packages/pylint/checkers/classes.py
new file mode 100644
index 0000000..9f5d099
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/classes.py
@@ -0,0 +1,1844 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2016 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2010 Maarten ter Huurne <maarten@treewalker.org>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2014 David Pursehouse <david.pursehouse@gmail.com>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2016 Anthony Foglia <afoglia@users.noreply.github.com>
+# Copyright (c) 2016 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Ben Green <benhgreen@icloud.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""classes checker for Python code
+"""
+import collections
+from itertools import chain, zip_longest
+
+import astroid
+from astroid import decorators, objects
+from astroid.bases import BUILTINS, Generator
+from astroid.exceptions import DuplicateBasesError, InconsistentMroError
+from astroid.scoped_nodes import function_to_method
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import (
+ PYMETHODS,
+ SPECIAL_METHODS_PARAMS,
+ check_messages,
+ class_is_abstract,
+ decorated_with,
+ decorated_with_property,
+ has_known_bases,
+ is_attr_private,
+ is_attr_protected,
+ is_builtin_object,
+ is_comprehension,
+ is_iterable,
+ is_property_setter,
+ is_property_setter_or_deleter,
+ is_protocol_class,
+ node_frame_class,
+ overrides_a_method,
+ safe_infer,
+ unimplemented_abstract_methods,
+)
+from pylint.interfaces import IAstroidChecker
+from pylint.utils import get_global_option
+
+NEXT_METHOD = "__next__"
+INVALID_BASE_CLASSES = {"bool", "range", "slice", "memoryview"}
+BUILTIN_DECORATORS = {"builtins.property", "builtins.classmethod"}
+
+# Dealing with useless override detection, with regard
+# to parameters vs arguments
+
+_CallSignature = collections.namedtuple(
+ "_CallSignature", "args kws starred_args starred_kws"
+)
+_ParameterSignature = collections.namedtuple(
+ "_ParameterSignature", "args kwonlyargs varargs kwargs"
+)
+
+
+def _signature_from_call(call):
+ kws = {}
+ args = []
+ starred_kws = []
+ starred_args = []
+ for keyword in call.keywords or []:
+ arg, value = keyword.arg, keyword.value
+ if arg is None and isinstance(value, astroid.Name):
+ # Starred node and we are interested only in names,
+ # otherwise some transformation might occur for the parameter.
+ starred_kws.append(value.name)
+ elif isinstance(value, astroid.Name):
+ kws[arg] = value.name
+ else:
+ kws[arg] = None
+
+ for arg in call.args:
+ if isinstance(arg, astroid.Starred) and isinstance(arg.value, astroid.Name):
+ # Positional variadic and a name, otherwise some transformation
+ # might have occurred.
+ starred_args.append(arg.value.name)
+ elif isinstance(arg, astroid.Name):
+ args.append(arg.name)
+ else:
+ args.append(None)
+
+ return _CallSignature(args, kws, starred_args, starred_kws)
+
+
+def _signature_from_arguments(arguments):
+ kwarg = arguments.kwarg
+ vararg = arguments.vararg
+ args = [arg.name for arg in arguments.args if arg.name != "self"]
+ kwonlyargs = [arg.name for arg in arguments.kwonlyargs]
+ return _ParameterSignature(args, kwonlyargs, vararg, kwarg)
+
+
+def _definition_equivalent_to_call(definition, call):
+ """Check if a definition signature is equivalent to a call."""
+ if definition.kwargs:
+ same_kw_variadics = definition.kwargs in call.starred_kws
+ else:
+ same_kw_variadics = not call.starred_kws
+ if definition.varargs:
+ same_args_variadics = definition.varargs in call.starred_args
+ else:
+ same_args_variadics = not call.starred_args
+ same_kwonlyargs = all(kw in call.kws for kw in definition.kwonlyargs)
+ same_args = definition.args == call.args
+
+ no_additional_kwarg_arguments = True
+ if call.kws:
+ for keyword in call.kws:
+ is_arg = keyword in call.args
+ is_kwonly = keyword in definition.kwonlyargs
+ if not is_arg and not is_kwonly:
+ # Maybe this argument goes into **kwargs,
+ # or it is an extraneous argument.
+ # In any case, the signature is different than
+ # the call site, which stops our search.
+ no_additional_kwarg_arguments = False
+ break
+
+ return all(
+ (
+ same_args,
+ same_kwonlyargs,
+ same_args_variadics,
+ same_kw_variadics,
+ no_additional_kwarg_arguments,
+ )
+ )
+
+
+# Deal with parameters overridding in two methods.
+
+
+def _positional_parameters(method):
+ positional = method.args.args
+ if method.type in ("classmethod", "method"):
+ positional = positional[1:]
+ return positional
+
+
+def _get_node_type(node, potential_types):
+ """
+ Return the type of the node if it exists in potential_types.
+
+ Args:
+ node (astroid.node): node to get the type of.
+ potential_types (tuple): potential types of the node.
+
+ Returns:
+ type: type of the node or None.
+ """
+ for potential_type in potential_types:
+ if isinstance(node, potential_type):
+ return potential_type
+ return None
+
+
+def _check_arg_equality(node_a, node_b, attr_name):
+ """
+ Check equality of nodes based on the comparison of their attributes named attr_name.
+
+ Args:
+ node_a (astroid.node): first node to compare.
+ node_b (astroid.node): second node to compare.
+ attr_name (str): name of the nodes attribute to use for comparison.
+
+ Returns:
+ bool: True if node_a.attr_name == node_b.attr_name, False otherwise.
+ """
+ return getattr(node_a, attr_name) == getattr(node_b, attr_name)
+
+
+def _has_different_parameters_default_value(original, overridden):
+ """
+ Check if original and overridden methods arguments have different default values
+
+ Return True if one of the overridden arguments has a default
+ value different from the default value of the original argument
+ If one of the method doesn't have argument (.args is None)
+ return False
+ """
+ if original.args is None or overridden.args is None:
+ return False
+
+ all_args = chain(original.args, original.kwonlyargs)
+ original_param_names = [param.name for param in all_args]
+ default_missing = object()
+ for param_name in original_param_names:
+ try:
+ original_default = original.default_value(param_name)
+ except astroid.exceptions.NoDefault:
+ original_default = default_missing
+ try:
+ overridden_default = overridden.default_value(param_name)
+ except astroid.exceptions.NoDefault:
+ overridden_default = default_missing
+
+ default_list = [
+ arg == default_missing for arg in (original_default, overridden_default)
+ ]
+ if any(default_list) and not all(default_list):
+ # Only one arg has no default value
+ return True
+
+ astroid_type_compared_attr = {
+ astroid.Const: "value",
+ astroid.ClassDef: "name",
+ astroid.Tuple: "elts",
+ astroid.List: "elts",
+ }
+ handled_types = tuple(
+ astroid_type for astroid_type in astroid_type_compared_attr
+ )
+ original_type = _get_node_type(original_default, handled_types)
+ if original_type:
+ #  We handle only astroid types that are inside the dict astroid_type_compared_attr
+ if not isinstance(overridden_default, original_type):
+ #  Two args with same name but different types
+ return True
+ if not _check_arg_equality(
+ original_default,
+ overridden_default,
+ astroid_type_compared_attr[original_type],
+ ):
+ # Two args with same type but different values
+ return True
+ return False
+
+
+def _has_different_parameters(original, overridden, dummy_parameter_regex):
+ zipped = zip_longest(original, overridden)
+ for original_param, overridden_param in zipped:
+ params = (original_param, overridden_param)
+ if not all(params):
+ return True
+
+ names = [param.name for param in params]
+ if any(map(dummy_parameter_regex.match, names)):
+ continue
+ if original_param.name != overridden_param.name:
+ return True
+ return False
+
+
+def _different_parameters(original, overridden, dummy_parameter_regex):
+ """Determine if the two methods have different parameters
+
+ They are considered to have different parameters if:
+
+ * they have different positional parameters, including different names
+
+ * one of the methods is having variadics, while the other is not
+
+ * they have different keyword only parameters.
+
+ """
+ original_parameters = _positional_parameters(original)
+ overridden_parameters = _positional_parameters(overridden)
+
+ different_positional = _has_different_parameters(
+ original_parameters, overridden_parameters, dummy_parameter_regex
+ )
+ different_kwonly = _has_different_parameters(
+ original.args.kwonlyargs, overridden.args.kwonlyargs, dummy_parameter_regex
+ )
+ if original.name in PYMETHODS:
+ # Ignore the difference for special methods. If the parameter
+ # numbers are different, then that is going to be caught by
+ # unexpected-special-method-signature.
+ # If the names are different, it doesn't matter, since they can't
+ # be used as keyword arguments anyway.
+ different_positional = different_kwonly = False
+
+ # Both or none should have extra variadics, otherwise the method
+ # loses or gains capabilities that are not reflected into the parent method,
+ # leading to potential inconsistencies in the code.
+ different_kwarg = (
+ sum(1 for param in (original.args.kwarg, overridden.args.kwarg) if not param)
+ == 1
+ )
+ different_vararg = (
+ sum(1 for param in (original.args.vararg, overridden.args.vararg) if not param)
+ == 1
+ )
+
+ return any(
+ (different_positional, different_kwarg, different_vararg, different_kwonly)
+ )
+
+
+def _is_invalid_base_class(cls):
+ return cls.name in INVALID_BASE_CLASSES and is_builtin_object(cls)
+
+
+def _has_data_descriptor(cls, attr):
+ attributes = cls.getattr(attr)
+ for attribute in attributes:
+ try:
+ for inferred in attribute.infer():
+ if isinstance(inferred, astroid.Instance):
+ try:
+ inferred.getattr("__get__")
+ inferred.getattr("__set__")
+ except astroid.NotFoundError:
+ continue
+ else:
+ return True
+ except astroid.InferenceError:
+ # Can't infer, avoid emitting a false positive in this case.
+ return True
+ return False
+
+
+def _called_in_methods(func, klass, methods):
+ """ Check if the func was called in any of the given methods,
+ belonging to the *klass*. Returns True if so, False otherwise.
+ """
+ if not isinstance(func, astroid.FunctionDef):
+ return False
+ for method in methods:
+ try:
+ inferred = klass.getattr(method)
+ except astroid.NotFoundError:
+ continue
+ for infer_method in inferred:
+ for call in infer_method.nodes_of_class(astroid.Call):
+ try:
+ bound = next(call.func.infer())
+ except (astroid.InferenceError, StopIteration):
+ continue
+ if not isinstance(bound, astroid.BoundMethod):
+ continue
+ func_obj = bound._proxied
+ if isinstance(func_obj, astroid.UnboundMethod):
+ func_obj = func_obj._proxied
+ if func_obj.name == func.name:
+ return True
+ return False
+
+
+def _is_attribute_property(name, klass):
+ """ Check if the given attribute *name* is a property
+ in the given *klass*.
+
+ It will look for `property` calls or for functions
+ with the given name, decorated by `property` or `property`
+ subclasses.
+ Returns ``True`` if the name is a property in the given klass,
+ ``False`` otherwise.
+ """
+
+ try:
+ attributes = klass.getattr(name)
+ except astroid.NotFoundError:
+ return False
+ property_name = "{}.property".format(BUILTINS)
+ for attr in attributes:
+ if attr is astroid.Uninferable:
+ continue
+ try:
+ inferred = next(attr.infer())
+ except astroid.InferenceError:
+ continue
+ if isinstance(inferred, astroid.FunctionDef) and decorated_with_property(
+ inferred
+ ):
+ return True
+ if inferred.pytype() == property_name:
+ return True
+ return False
+
+
+def _has_bare_super_call(fundef_node):
+ for call in fundef_node.nodes_of_class(astroid.Call):
+ func = call.func
+ if isinstance(func, astroid.Name) and func.name == "super" and not call.args:
+ return True
+ return False
+
+
+def _safe_infer_call_result(node, caller, context=None):
+ """
+ Safely infer the return value of a function.
+
+ Returns None if inference failed or if there is some ambiguity (more than
+ one node has been inferred). Otherwise returns inferred value.
+ """
+ try:
+ inferit = node.infer_call_result(caller, context=context)
+ value = next(inferit)
+ except astroid.InferenceError:
+ return None # inference failed
+ except StopIteration:
+ return None # no values inferred
+ try:
+ next(inferit)
+ return None # there is ambiguity on the inferred node
+ except astroid.InferenceError:
+ return None # there is some kind of ambiguity
+ except StopIteration:
+ return value
+
+
+def _has_same_layout_slots(slots, assigned_value):
+ inferred = next(assigned_value.infer())
+ if isinstance(inferred, astroid.ClassDef):
+ other_slots = inferred.slots()
+ if all(
+ first_slot and second_slot and first_slot.value == second_slot.value
+ for (first_slot, second_slot) in zip_longest(slots, other_slots)
+ ):
+ return True
+ return False
+
+
+MSGS = {
+ "F0202": (
+ "Unable to check methods signature (%s / %s)",
+ "method-check-failed",
+ "Used when Pylint has been unable to check methods signature "
+ "compatibility for an unexpected reason. Please report this kind "
+ "if you don't make sense of it.",
+ ),
+ "E0202": (
+ "An attribute defined in %s line %s hides this method",
+ "method-hidden",
+ "Used when a class defines a method which is hidden by an "
+ "instance attribute from an ancestor class or set by some "
+ "client code.",
+ ),
+ "E0203": (
+ "Access to member %r before its definition line %s",
+ "access-member-before-definition",
+ "Used when an instance member is accessed before it's actually assigned.",
+ ),
+ "W0201": (
+ "Attribute %r defined outside __init__",
+ "attribute-defined-outside-init",
+ "Used when an instance attribute is defined outside the __init__ method.",
+ ),
+ "W0212": (
+ "Access to a protected member %s of a client class", # E0214
+ "protected-access",
+ "Used when a protected member (i.e. class member with a name "
+ "beginning with an underscore) is access outside the class or a "
+ "descendant of the class where it's defined.",
+ ),
+ "E0211": (
+ "Method has no argument",
+ "no-method-argument",
+ "Used when a method which should have the bound instance as "
+ "first argument has no argument defined.",
+ ),
+ "E0213": (
+ 'Method should have "self" as first argument',
+ "no-self-argument",
+ 'Used when a method has an attribute different the "self" as '
+ "first argument. This is considered as an error since this is "
+ "a so common convention that you shouldn't break it!",
+ ),
+ "C0202": (
+ "Class method %s should have %s as first argument",
+ "bad-classmethod-argument",
+ "Used when a class method has a first argument named differently "
+ "than the value specified in valid-classmethod-first-arg option "
+ '(default to "cls"), recommended to easily differentiate them '
+ "from regular instance methods.",
+ ),
+ "C0203": (
+ "Metaclass method %s should have %s as first argument",
+ "bad-mcs-method-argument",
+ "Used when a metaclass method has a first argument named "
+ "differently than the value specified in valid-classmethod-first"
+ '-arg option (default to "cls"), recommended to easily '
+ "differentiate them from regular instance methods.",
+ ),
+ "C0204": (
+ "Metaclass class method %s should have %s as first argument",
+ "bad-mcs-classmethod-argument",
+ "Used when a metaclass class method has a first argument named "
+ "differently than the value specified in valid-metaclass-"
+ 'classmethod-first-arg option (default to "mcs"), recommended to '
+ "easily differentiate them from regular instance methods.",
+ ),
+ "W0211": (
+ "Static method with %r as first argument",
+ "bad-staticmethod-argument",
+ 'Used when a static method has "self" or a value specified in '
+ "valid-classmethod-first-arg option or "
+ "valid-metaclass-classmethod-first-arg option as first argument.",
+ ),
+ "R0201": (
+ "Method could be a function",
+ "no-self-use",
+ "Used when a method doesn't use its bound instance, and so could "
+ "be written as a function.",
+ ),
+ "W0221": (
+ "Parameters differ from %s %r method",
+ "arguments-differ",
+ "Used when a method has a different number of arguments than in "
+ "the implemented interface or in an overridden method.",
+ ),
+ "W0222": (
+ "Signature differs from %s %r method",
+ "signature-differs",
+ "Used when a method signature is different than in the "
+ "implemented interface or in an overridden method.",
+ ),
+ "W0223": (
+ "Method %r is abstract in class %r but is not overridden",
+ "abstract-method",
+ "Used when an abstract method (i.e. raise NotImplementedError) is "
+ "not overridden in concrete class.",
+ ),
+ "W0231": (
+ "__init__ method from base class %r is not called",
+ "super-init-not-called",
+ "Used when an ancestor class method has an __init__ method "
+ "which is not called by a derived class.",
+ ),
+ "W0232": (
+ "Class has no __init__ method",
+ "no-init",
+ "Used when a class has no __init__ method, neither its parent classes.",
+ ),
+ "W0233": (
+ "__init__ method from a non direct base class %r is called",
+ "non-parent-init-called",
+ "Used when an __init__ method is called on a class which is not "
+ "in the direct ancestors for the analysed class.",
+ ),
+ "W0235": (
+ "Useless super delegation in method %r",
+ "useless-super-delegation",
+ "Used whenever we can detect that an overridden method is useless, "
+ "relying on super() delegation to do the same thing as another method "
+ "from the MRO.",
+ ),
+ "W0236": (
+ "Method %r was expected to be %r, found it instead as %r",
+ "invalid-overridden-method",
+ "Used when we detect that a method was overridden as a property "
+ "or the other way around, which could result in potential bugs at "
+ "runtime.",
+ ),
+ "E0236": (
+ "Invalid object %r in __slots__, must contain only non empty strings",
+ "invalid-slots-object",
+ "Used when an invalid (non-string) object occurs in __slots__.",
+ ),
+ "E0237": (
+ "Assigning to attribute %r not defined in class slots",
+ "assigning-non-slot",
+ "Used when assigning to an attribute not defined in the class slots.",
+ ),
+ "E0238": (
+ "Invalid __slots__ object",
+ "invalid-slots",
+ "Used when an invalid __slots__ is found in class. "
+ "Only a string, an iterable or a sequence is permitted.",
+ ),
+ "E0239": (
+ "Inheriting %r, which is not a class.",
+ "inherit-non-class",
+ "Used when a class inherits from something which is not a class.",
+ ),
+ "E0240": (
+ "Inconsistent method resolution order for class %r",
+ "inconsistent-mro",
+ "Used when a class has an inconsistent method resolution order.",
+ ),
+ "E0241": (
+ "Duplicate bases for class %r",
+ "duplicate-bases",
+ "Used when a class has duplicate bases.",
+ ),
+ "E0242": (
+ "Value %r in slots conflicts with class variable",
+ "class-variable-slots-conflict",
+ "Used when a value in __slots__ conflicts with a class variable, property or method.",
+ ),
+ "R0202": (
+ "Consider using a decorator instead of calling classmethod",
+ "no-classmethod-decorator",
+ "Used when a class method is defined without using the decorator syntax.",
+ ),
+ "R0203": (
+ "Consider using a decorator instead of calling staticmethod",
+ "no-staticmethod-decorator",
+ "Used when a static method is defined without using the decorator syntax.",
+ ),
+ "C0205": (
+ "Class __slots__ should be a non-string iterable",
+ "single-string-used-for-slots",
+ "Used when a class __slots__ is a simple string, rather than an iterable.",
+ ),
+ "R0205": (
+ "Class %r inherits from object, can be safely removed from bases in python3",
+ "useless-object-inheritance",
+ "Used when a class inherit from object, which under python3 is implicit, "
+ "hence can be safely removed from bases.",
+ ),
+ "R0206": (
+ "Cannot have defined parameters for properties",
+ "property-with-parameters",
+ "Used when we detect that a property also has parameters, which are useless, "
+ "given that properties cannot be called with additional arguments.",
+ ),
+}
+
+
+class ScopeAccessMap:
+ """Store the accessed variables per scope."""
+
+ def __init__(self):
+ self._scopes = collections.defaultdict(lambda: collections.defaultdict(list))
+
+ def set_accessed(self, node):
+ """Set the given node as accessed."""
+
+ frame = node_frame_class(node)
+ if frame is None:
+ # The node does not live in a class.
+ return
+ self._scopes[frame][node.attrname].append(node)
+
+ def accessed(self, scope):
+ """Get the accessed variables for the given scope."""
+ return self._scopes.get(scope, {})
+
+
+class ClassChecker(BaseChecker):
+ """checks for :
+ * methods without self as first argument
+ * overridden methods signature
+ * access only to existent members via self
+ * attributes not defined in the __init__ method
+ * unreachable code
+ """
+
+ __implements__ = (IAstroidChecker,)
+
+ # configuration section name
+ name = "classes"
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = (
+ (
+ "defining-attr-methods",
+ {
+ "default": ("__init__", "__new__", "setUp", "__post_init__"),
+ "type": "csv",
+ "metavar": "<method names>",
+ "help": "List of method names used to declare (i.e. assign) \
+instance attributes.",
+ },
+ ),
+ (
+ "valid-classmethod-first-arg",
+ {
+ "default": ("cls",),
+ "type": "csv",
+ "metavar": "<argument names>",
+ "help": "List of valid names for the first argument in \
+a class method.",
+ },
+ ),
+ (
+ "valid-metaclass-classmethod-first-arg",
+ {
+ "default": ("cls",),
+ "type": "csv",
+ "metavar": "<argument names>",
+ "help": "List of valid names for the first argument in \
+a metaclass class method.",
+ },
+ ),
+ (
+ "exclude-protected",
+ {
+ "default": (
+ # namedtuple public API.
+ "_asdict",
+ "_fields",
+ "_replace",
+ "_source",
+ "_make",
+ ),
+ "type": "csv",
+ "metavar": "<protected access exclusions>",
+ "help": (
+ "List of member names, which should be excluded "
+ "from the protected access warning."
+ ),
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self._accessed = ScopeAccessMap()
+ self._first_attrs = []
+ self._meth_could_be_func = None
+
+ @decorators.cachedproperty
+ def _dummy_rgx(self):
+ return get_global_option(self, "dummy-variables-rgx", default=None)
+
+ @decorators.cachedproperty
+ def _ignore_mixin(self):
+ return get_global_option(self, "ignore-mixin-members", default=True)
+
+ @check_messages(
+ "abstract-method",
+ "no-init",
+ "invalid-slots",
+ "single-string-used-for-slots",
+ "invalid-slots-object",
+ "class-variable-slots-conflict",
+ "inherit-non-class",
+ "useless-object-inheritance",
+ "inconsistent-mro",
+ "duplicate-bases",
+ )
+ def visit_classdef(self, node):
+ """init visit variable _accessed
+ """
+ self._check_bases_classes(node)
+ # if not an exception or a metaclass
+ if node.type == "class" and has_known_bases(node):
+ try:
+ node.local_attr("__init__")
+ except astroid.NotFoundError:
+ self.add_message("no-init", args=node, node=node)
+ self._check_slots(node)
+ self._check_proper_bases(node)
+ self._check_consistent_mro(node)
+
+ def _check_consistent_mro(self, node):
+ """Detect that a class has a consistent mro or duplicate bases."""
+ try:
+ node.mro()
+ except InconsistentMroError:
+ self.add_message("inconsistent-mro", args=node.name, node=node)
+ except DuplicateBasesError:
+ self.add_message("duplicate-bases", args=node.name, node=node)
+ except NotImplementedError:
+ # Old style class, there's no mro so don't do anything.
+ pass
+
+ def _check_proper_bases(self, node):
+ """
+ Detect that a class inherits something which is not
+ a class or a type.
+ """
+ for base in node.bases:
+ ancestor = safe_infer(base)
+ if ancestor in (astroid.Uninferable, None):
+ continue
+ if isinstance(ancestor, astroid.Instance) and ancestor.is_subtype_of(
+ "%s.type" % (BUILTINS,)
+ ):
+ continue
+
+ if not isinstance(ancestor, astroid.ClassDef) or _is_invalid_base_class(
+ ancestor
+ ):
+ self.add_message("inherit-non-class", args=base.as_string(), node=node)
+
+ if ancestor.name == object.__name__:
+ self.add_message(
+ "useless-object-inheritance", args=node.name, node=node
+ )
+
+ def leave_classdef(self, cnode):
+ """close a class node:
+ check that instance attributes are defined in __init__ and check
+ access to existent members
+ """
+ # check access to existent members on non metaclass classes
+ if self._ignore_mixin and cnode.name[-5:].lower() == "mixin":
+ # We are in a mixin class. No need to try to figure out if
+ # something is missing, since it is most likely that it will
+ # miss.
+ return
+
+ accessed = self._accessed.accessed(cnode)
+ if cnode.type != "metaclass":
+ self._check_accessed_members(cnode, accessed)
+ # checks attributes are defined in an allowed method such as __init__
+ if not self.linter.is_message_enabled("attribute-defined-outside-init"):
+ return
+ defining_methods = self.config.defining_attr_methods
+ current_module = cnode.root()
+ for attr, nodes in cnode.instance_attrs.items():
+ # Exclude `__dict__` as it is already defined.
+ if attr == "__dict__":
+ continue
+
+ # Skip nodes which are not in the current module and it may screw up
+ # the output, while it's not worth it
+ nodes = [
+ n
+ for n in nodes
+ if not isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))
+ and n.root() is current_module
+ ]
+ if not nodes:
+ continue # error detected by typechecking
+
+ # Check if any method attr is defined in is a defining method
+ # or if we have the attribute defined in a setter.
+ frames = (node.frame() for node in nodes)
+ if any(
+ frame.name in defining_methods or is_property_setter(frame)
+ for frame in frames
+ ):
+ continue
+
+ # check attribute is defined in a parent's __init__
+ for parent in cnode.instance_attr_ancestors(attr):
+ attr_defined = False
+ # check if any parent method attr is defined in is a defining method
+ for node in parent.instance_attrs[attr]:
+ if node.frame().name in defining_methods:
+ attr_defined = True
+ if attr_defined:
+ # we're done :)
+ break
+ else:
+ # check attribute is defined as a class attribute
+ try:
+ cnode.local_attr(attr)
+ except astroid.NotFoundError:
+ for node in nodes:
+ if node.frame().name not in defining_methods:
+ # If the attribute was set by a call in any
+ # of the defining methods, then don't emit
+ # the warning.
+ if _called_in_methods(
+ node.frame(), cnode, defining_methods
+ ):
+ continue
+ self.add_message(
+ "attribute-defined-outside-init", args=attr, node=node
+ )
+
+ def visit_functiondef(self, node):
+ """check method arguments, overriding"""
+ # ignore actual functions
+ if not node.is_method():
+ return
+
+ self._check_useless_super_delegation(node)
+ self._check_property_with_parameters(node)
+
+ klass = node.parent.frame()
+ self._meth_could_be_func = True
+ # check first argument is self if this is actually a method
+ self._check_first_arg_for_type(node, klass.type == "metaclass")
+ if node.name == "__init__":
+ self._check_init(node)
+ return
+ # check signature if the method overloads inherited method
+ for overridden in klass.local_attr_ancestors(node.name):
+ # get astroid for the searched method
+ try:
+ parent_function = overridden[node.name]
+ except KeyError:
+ # we have found the method but it's not in the local
+ # dictionary.
+ # This may happen with astroid build from living objects
+ continue
+ if not isinstance(parent_function, astroid.FunctionDef):
+ continue
+ self._check_signature(node, parent_function, "overridden", klass)
+ self._check_invalid_overridden_method(node, parent_function)
+ break
+
+ if node.decorators:
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, astroid.Attribute) and decorator.attrname in (
+ "getter",
+ "setter",
+ "deleter",
+ ):
+ # attribute affectation will call this method, not hiding it
+ return
+ if isinstance(decorator, astroid.Name):
+ if decorator.name == "property":
+ # attribute affectation will either call a setter or raise
+ # an attribute error, anyway not hiding the function
+ return
+
+ # Infer the decorator and see if it returns something useful
+ inferred = safe_infer(decorator)
+ if not inferred:
+ return
+ if isinstance(inferred, astroid.FunctionDef):
+ # Okay, it's a decorator, let's see what it can infer.
+ try:
+ inferred = next(inferred.infer_call_result(inferred))
+ except astroid.InferenceError:
+ return
+ try:
+ if (
+ isinstance(inferred, (astroid.Instance, astroid.ClassDef))
+ and inferred.getattr("__get__")
+ and inferred.getattr("__set__")
+ ):
+ return
+ except astroid.AttributeInferenceError:
+ pass
+
+ # check if the method is hidden by an attribute
+ try:
+ overridden = klass.instance_attr(node.name)[0]
+ overridden_frame = overridden.frame()
+ if (
+ isinstance(overridden_frame, astroid.FunctionDef)
+ and overridden_frame.type == "method"
+ ):
+ overridden_frame = overridden_frame.parent.frame()
+ if isinstance(overridden_frame, astroid.ClassDef) and klass.is_subtype_of(
+ overridden_frame.qname()
+ ):
+ args = (overridden.root().name, overridden.fromlineno)
+ self.add_message("method-hidden", args=args, node=node)
+ except astroid.NotFoundError:
+ pass
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_useless_super_delegation(self, function):
+ """Check if the given function node is an useless method override
+
+ We consider it *useless* if it uses the super() builtin, but having
+ nothing additional whatsoever than not implementing the method at all.
+ If the method uses super() to delegate an operation to the rest of the MRO,
+ and if the method called is the same as the current one, the arguments
+ passed to super() are the same as the parameters that were passed to
+ this method, then the method could be removed altogether, by letting
+ other implementation to take precedence.
+ """
+
+ if (
+ not function.is_method()
+ # With decorators is a change of use
+ or function.decorators
+ ):
+ return
+
+ body = function.body
+ if len(body) != 1:
+ # Multiple statements, which means this overridden method
+ # could do multiple things we are not aware of.
+ return
+
+ statement = body[0]
+ if not isinstance(statement, (astroid.Expr, astroid.Return)):
+ # Doing something else than what we are interested into.
+ return
+
+ call = statement.value
+ if (
+ not isinstance(call, astroid.Call)
+ # Not a super() attribute access.
+ or not isinstance(call.func, astroid.Attribute)
+ ):
+ return
+
+ # Should be a super call.
+ try:
+ super_call = next(call.func.expr.infer())
+ except astroid.InferenceError:
+ return
+ else:
+ if not isinstance(super_call, objects.Super):
+ return
+
+ # The name should be the same.
+ if call.func.attrname != function.name:
+ return
+
+ # Should be a super call with the MRO pointer being the
+ # current class and the type being the current instance.
+ current_scope = function.parent.scope()
+ if (
+ super_call.mro_pointer != current_scope
+ or not isinstance(super_call.type, astroid.Instance)
+ or super_call.type.name != current_scope.name
+ ):
+ return
+
+ #  Check values of default args
+ klass = function.parent.frame()
+ meth_node = None
+ for overridden in klass.local_attr_ancestors(function.name):
+ # get astroid for the searched method
+ try:
+ meth_node = overridden[function.name]
+ except KeyError:
+ # we have found the method but it's not in the local
+ # dictionary.
+ # This may happen with astroid build from living objects
+ continue
+ if (
+ not isinstance(meth_node, astroid.FunctionDef)
+ # If the method have an ancestor which is not a
+ # function then it is legitimate to redefine it
+ or _has_different_parameters_default_value(
+ meth_node.args, function.args
+ )
+ ):
+ return
+ break
+
+ # Detect if the parameters are the same as the call's arguments.
+ params = _signature_from_arguments(function.args)
+ args = _signature_from_call(call)
+
+ if meth_node is not None:
+
+ def form_annotations(annotations):
+ return [
+ annotation.as_string() for annotation in filter(None, annotations)
+ ]
+
+ called_annotations = form_annotations(function.args.annotations)
+ overridden_annotations = form_annotations(meth_node.args.annotations)
+ if called_annotations and overridden_annotations:
+ if called_annotations != overridden_annotations:
+ return
+
+ if _definition_equivalent_to_call(params, args):
+ self.add_message(
+ "useless-super-delegation", node=function, args=(function.name,)
+ )
+
+ def _check_property_with_parameters(self, node):
+ if node.args.args and len(node.args.args) > 1 and decorated_with_property(node):
+ self.add_message("property-with-parameters", node=node)
+
+ def _check_invalid_overridden_method(self, function_node, parent_function_node):
+ parent_is_property = decorated_with_property(
+ parent_function_node
+ ) or is_property_setter_or_deleter(parent_function_node)
+ current_is_property = decorated_with_property(
+ function_node
+ ) or is_property_setter_or_deleter(function_node)
+ if parent_is_property and not current_is_property:
+ self.add_message(
+ "invalid-overridden-method",
+ args=(function_node.name, "property", function_node.type),
+ node=function_node,
+ )
+ elif not parent_is_property and current_is_property:
+ self.add_message(
+ "invalid-overridden-method",
+ args=(function_node.name, "method", "property"),
+ node=function_node,
+ )
+
+ def _check_slots(self, node):
+ if "__slots__" not in node.locals:
+ return
+ for slots in node.igetattr("__slots__"):
+ # check if __slots__ is a valid type
+ if slots is astroid.Uninferable:
+ continue
+ if not is_iterable(slots) and not is_comprehension(slots):
+ self.add_message("invalid-slots", node=node)
+ continue
+
+ if isinstance(slots, astroid.Const):
+ # a string, ignore the following checks
+ self.add_message("single-string-used-for-slots", node=node)
+ continue
+ if not hasattr(slots, "itered"):
+ # we can't obtain the values, maybe a .deque?
+ continue
+
+ if isinstance(slots, astroid.Dict):
+ values = [item[0] for item in slots.items]
+ else:
+ values = slots.itered()
+ if values is astroid.Uninferable:
+ return
+ for elt in values:
+ try:
+ self._check_slots_elt(elt, node)
+ except astroid.InferenceError:
+ continue
+
+ def _check_slots_elt(self, elt, node):
+ for inferred in elt.infer():
+ if inferred is astroid.Uninferable:
+ continue
+ if not isinstance(inferred, astroid.Const) or not isinstance(
+ inferred.value, str
+ ):
+ self.add_message(
+ "invalid-slots-object", args=inferred.as_string(), node=elt
+ )
+ continue
+ if not inferred.value:
+ self.add_message(
+ "invalid-slots-object", args=inferred.as_string(), node=elt
+ )
+
+ # Check if we have a conflict with a class variable.
+ class_variable = node.locals.get(inferred.value)
+ if class_variable:
+ # Skip annotated assignments which don't conflict at all with slots.
+ if len(class_variable) == 1:
+ parent = class_variable[0].parent
+ if isinstance(parent, astroid.AnnAssign) and parent.value is None:
+ return
+ self.add_message(
+ "class-variable-slots-conflict", args=(inferred.value,), node=elt
+ )
+
+ def leave_functiondef(self, node):
+ """on method node, check if this method couldn't be a function
+
+ ignore class, static and abstract methods, initializer,
+ methods overridden from a parent class.
+ """
+ if node.is_method():
+ if node.args.args is not None:
+ self._first_attrs.pop()
+ if not self.linter.is_message_enabled("no-self-use"):
+ return
+ class_node = node.parent.frame()
+ if (
+ self._meth_could_be_func
+ and node.type == "method"
+ and node.name not in PYMETHODS
+ and not (
+ node.is_abstract()
+ or overrides_a_method(class_node, node.name)
+ or decorated_with_property(node)
+ or _has_bare_super_call(node)
+ or is_protocol_class(class_node)
+ )
+ ):
+ self.add_message("no-self-use", node=node)
+
+ def visit_attribute(self, node):
+ """check if the getattr is an access to a class member
+ if so, register it. Also check for access to protected
+ class member from outside its class (but ignore __special__
+ methods)
+ """
+ # Check self
+ if self._uses_mandatory_method_param(node):
+ self._accessed.set_accessed(node)
+ return
+ if not self.linter.is_message_enabled("protected-access"):
+ return
+
+ self._check_protected_attribute_access(node)
+
+ def visit_assignattr(self, node):
+ if isinstance(
+ node.assign_type(), astroid.AugAssign
+ ) and self._uses_mandatory_method_param(node):
+ self._accessed.set_accessed(node)
+ self._check_in_slots(node)
+
+ def _check_in_slots(self, node):
+ """ Check that the given AssignAttr node
+ is defined in the class slots.
+ """
+ inferred = safe_infer(node.expr)
+ if not isinstance(inferred, astroid.Instance):
+ return
+
+ klass = inferred._proxied
+ if not has_known_bases(klass):
+ return
+ if "__slots__" not in klass.locals or not klass.newstyle:
+ return
+
+ slots = klass.slots()
+ if slots is None:
+ return
+ # If any ancestor doesn't use slots, the slots
+ # defined for this class are superfluous.
+ if any(
+ "__slots__" not in ancestor.locals and ancestor.name != "object"
+ for ancestor in klass.ancestors()
+ ):
+ return
+
+ if not any(slot.value == node.attrname for slot in slots):
+ # If we have a '__dict__' in slots, then
+ # assigning any name is valid.
+ if not any(slot.value == "__dict__" for slot in slots):
+ if _is_attribute_property(node.attrname, klass):
+ # Properties circumvent the slots mechanism,
+ # so we should not emit a warning for them.
+ return
+ if node.attrname in klass.locals and _has_data_descriptor(
+ klass, node.attrname
+ ):
+ # Descriptors circumvent the slots mechanism as well.
+ return
+ if node.attrname == "__class__" and _has_same_layout_slots(
+ slots, node.parent.value
+ ):
+ return
+ self.add_message("assigning-non-slot", args=(node.attrname,), node=node)
+
+ @check_messages(
+ "protected-access", "no-classmethod-decorator", "no-staticmethod-decorator"
+ )
+ def visit_assign(self, assign_node):
+ self._check_classmethod_declaration(assign_node)
+ node = assign_node.targets[0]
+ if not isinstance(node, astroid.AssignAttr):
+ return
+
+ if self._uses_mandatory_method_param(node):
+ return
+ self._check_protected_attribute_access(node)
+
+ def _check_classmethod_declaration(self, node):
+ """Checks for uses of classmethod() or staticmethod()
+
+ When a @classmethod or @staticmethod decorator should be used instead.
+ A message will be emitted only if the assignment is at a class scope
+ and only if the classmethod's argument belongs to the class where it
+ is defined.
+ `node` is an assign node.
+ """
+ if not isinstance(node.value, astroid.Call):
+ return
+
+ # check the function called is "classmethod" or "staticmethod"
+ func = node.value.func
+ if not isinstance(func, astroid.Name) or func.name not in (
+ "classmethod",
+ "staticmethod",
+ ):
+ return
+
+ msg = (
+ "no-classmethod-decorator"
+ if func.name == "classmethod"
+ else "no-staticmethod-decorator"
+ )
+ # assignment must be at a class scope
+ parent_class = node.scope()
+ if not isinstance(parent_class, astroid.ClassDef):
+ return
+
+ # Check if the arg passed to classmethod is a class member
+ classmeth_arg = node.value.args[0]
+ if not isinstance(classmeth_arg, astroid.Name):
+ return
+
+ method_name = classmeth_arg.name
+ if any(method_name == member.name for member in parent_class.mymethods()):
+ self.add_message(msg, node=node.targets[0])
+
+ def _check_protected_attribute_access(self, node):
+ """Given an attribute access node (set or get), check if attribute
+ access is legitimate. Call _check_first_attr with node before calling
+ this method. Valid cases are:
+ * self._attr in a method or cls._attr in a classmethod. Checked by
+ _check_first_attr.
+ * Klass._attr inside "Klass" class.
+ * Klass2._attr inside "Klass" class when Klass2 is a base class of
+ Klass.
+ """
+ attrname = node.attrname
+
+ if (
+ is_attr_protected(attrname)
+ and attrname not in self.config.exclude_protected
+ ):
+
+ klass = node_frame_class(node)
+
+ # In classes, check we are not getting a parent method
+ # through the class object or through super
+ callee = node.expr.as_string()
+
+ # We are not in a class, no remaining valid case
+ if klass is None:
+ self.add_message("protected-access", node=node, args=attrname)
+ return
+
+ # If the expression begins with a call to super, that's ok.
+ if (
+ isinstance(node.expr, astroid.Call)
+ and isinstance(node.expr.func, astroid.Name)
+ and node.expr.func.name == "super"
+ ):
+ return
+
+ # If the expression begins with a call to type(self), that's ok.
+ if self._is_type_self_call(node.expr):
+ return
+
+ # We are in a class, one remaining valid cases, Klass._attr inside
+ # Klass
+ if not (callee == klass.name or callee in klass.basenames):
+ # Detect property assignments in the body of the class.
+ # This is acceptable:
+ #
+ # class A:
+ # b = property(lambda: self._b)
+
+ stmt = node.parent.statement()
+ if (
+ isinstance(stmt, astroid.Assign)
+ and len(stmt.targets) == 1
+ and isinstance(stmt.targets[0], astroid.AssignName)
+ ):
+ name = stmt.targets[0].name
+ if _is_attribute_property(name, klass):
+ return
+
+ #  A licit use of protected member is inside a special method
+ if not attrname.startswith(
+ "__"
+ ) and self._is_called_inside_special_method(node):
+ return
+
+ self.add_message("protected-access", node=node, args=attrname)
+
+ @staticmethod
+ def _is_called_inside_special_method(node: astroid.node_classes.NodeNG) -> bool:
+ """
+ Returns true if the node is located inside a special (aka dunder) method
+ """
+ try:
+ frame_name = node.frame().name
+ except AttributeError:
+ return False
+ return frame_name and frame_name in PYMETHODS
+
+ def _is_type_self_call(self, expr):
+ return (
+ isinstance(expr, astroid.Call)
+ and isinstance(expr.func, astroid.Name)
+ and expr.func.name == "type"
+ and len(expr.args) == 1
+ and self._is_mandatory_method_param(expr.args[0])
+ )
+
+ def visit_name(self, node):
+ """check if the name handle an access to a class member
+ if so, register it
+ """
+ if self._first_attrs and (
+ node.name == self._first_attrs[-1] or not self._first_attrs[-1]
+ ):
+ self._meth_could_be_func = False
+
+ def _check_accessed_members(self, node, accessed):
+ """check that accessed members are defined"""
+ excs = ("AttributeError", "Exception", "BaseException")
+ for attr, nodes in accessed.items():
+ try:
+ # is it a class attribute ?
+ node.local_attr(attr)
+ # yes, stop here
+ continue
+ except astroid.NotFoundError:
+ pass
+ # is it an instance attribute of a parent class ?
+ try:
+ next(node.instance_attr_ancestors(attr))
+ # yes, stop here
+ continue
+ except StopIteration:
+ pass
+ # is it an instance attribute ?
+ try:
+ defstmts = node.instance_attr(attr)
+ except astroid.NotFoundError:
+ pass
+ else:
+ # filter out augment assignment nodes
+ defstmts = [stmt for stmt in defstmts if stmt not in nodes]
+ if not defstmts:
+ # only augment assignment for this node, no-member should be
+ # triggered by the typecheck checker
+ continue
+ # filter defstmts to only pick the first one when there are
+ # several assignments in the same scope
+ scope = defstmts[0].scope()
+ defstmts = [
+ stmt
+ for i, stmt in enumerate(defstmts)
+ if i == 0 or stmt.scope() is not scope
+ ]
+ # if there are still more than one, don't attempt to be smarter
+ # than we can be
+ if len(defstmts) == 1:
+ defstmt = defstmts[0]
+ # check that if the node is accessed in the same method as
+ # it's defined, it's accessed after the initial assignment
+ frame = defstmt.frame()
+ lno = defstmt.fromlineno
+ for _node in nodes:
+ if (
+ _node.frame() is frame
+ and _node.fromlineno < lno
+ and not astroid.are_exclusive(
+ _node.statement(), defstmt, excs
+ )
+ ):
+ self.add_message(
+ "access-member-before-definition",
+ node=_node,
+ args=(attr, lno),
+ )
+
+ def _check_first_arg_for_type(self, node, metaclass=0):
+ """check the name of first argument, expect:
+
+ * 'self' for a regular method
+ * 'cls' for a class method or a metaclass regular method (actually
+ valid-classmethod-first-arg value)
+ * 'mcs' for a metaclass class method (actually
+ valid-metaclass-classmethod-first-arg)
+ * not one of the above for a static method
+ """
+ # don't care about functions with unknown argument (builtins)
+ if node.args.args is None:
+ return
+ if node.args.args:
+ first_arg = node.argnames()[0]
+ elif node.args.posonlyargs:
+ first_arg = node.args.posonlyargs[0].name
+ else:
+ first_arg = None
+ self._first_attrs.append(first_arg)
+ first = self._first_attrs[-1]
+ # static method
+ if node.type == "staticmethod":
+ if (
+ first_arg == "self"
+ or first_arg in self.config.valid_classmethod_first_arg
+ or first_arg in self.config.valid_metaclass_classmethod_first_arg
+ ):
+ self.add_message("bad-staticmethod-argument", args=first, node=node)
+ return
+ self._first_attrs[-1] = None
+ # class / regular method with no args
+ elif not node.args.args and not node.args.posonlyargs:
+ self.add_message("no-method-argument", node=node)
+ # metaclass
+ elif metaclass:
+ # metaclass __new__ or classmethod
+ if node.type == "classmethod":
+ self._check_first_arg_config(
+ first,
+ self.config.valid_metaclass_classmethod_first_arg,
+ node,
+ "bad-mcs-classmethod-argument",
+ node.name,
+ )
+ # metaclass regular method
+ else:
+ self._check_first_arg_config(
+ first,
+ self.config.valid_classmethod_first_arg,
+ node,
+ "bad-mcs-method-argument",
+ node.name,
+ )
+ # regular class
+ else:
+ # class method
+ if node.type == "classmethod" or node.name == "__class_getitem__":
+ self._check_first_arg_config(
+ first,
+ self.config.valid_classmethod_first_arg,
+ node,
+ "bad-classmethod-argument",
+ node.name,
+ )
+ # regular method without self as argument
+ elif first != "self":
+ self.add_message("no-self-argument", node=node)
+
+ def _check_first_arg_config(self, first, config, node, message, method_name):
+ if first not in config:
+ if len(config) == 1:
+ valid = repr(config[0])
+ else:
+ valid = ", ".join(repr(v) for v in config[:-1])
+ valid = "%s or %r" % (valid, config[-1])
+ self.add_message(message, args=(method_name, valid), node=node)
+
+ def _check_bases_classes(self, node):
+ """check that the given class node implements abstract methods from
+ base classes
+ """
+
+ def is_abstract(method):
+ return method.is_abstract(pass_is_abstract=False)
+
+ # check if this class abstract
+ if class_is_abstract(node):
+ return
+
+ methods = sorted(
+ unimplemented_abstract_methods(node, is_abstract).items(),
+ key=lambda item: item[0],
+ )
+ for name, method in methods:
+ owner = method.parent.frame()
+ if owner is node:
+ continue
+ # owner is not this class, it must be a parent class
+ # check that the ancestor's method is not abstract
+ if name in node.locals:
+ # it is redefined as an attribute or with a descriptor
+ continue
+ self.add_message("abstract-method", node=node, args=(name, owner.name))
+
+ def _check_init(self, node):
+ """check that the __init__ method call super or ancestors'__init__
+ method (unless it is used for type hinting with `typing.overload`)
+ """
+ if not self.linter.is_message_enabled(
+ "super-init-not-called"
+ ) and not self.linter.is_message_enabled("non-parent-init-called"):
+ return
+ klass_node = node.parent.frame()
+ to_call = _ancestors_to_call(klass_node)
+ not_called_yet = dict(to_call)
+ for stmt in node.nodes_of_class(astroid.Call):
+ expr = stmt.func
+ if not isinstance(expr, astroid.Attribute) or expr.attrname != "__init__":
+ continue
+ # skip the test if using super
+ if (
+ isinstance(expr.expr, astroid.Call)
+ and isinstance(expr.expr.func, astroid.Name)
+ and expr.expr.func.name == "super"
+ ):
+ return
+ try:
+ for klass in expr.expr.infer():
+ if klass is astroid.Uninferable:
+ continue
+ # The inferred klass can be super(), which was
+ # assigned to a variable and the `__init__`
+ # was called later.
+ #
+ # base = super()
+ # base.__init__(...)
+
+ if (
+ isinstance(klass, astroid.Instance)
+ and isinstance(klass._proxied, astroid.ClassDef)
+ and is_builtin_object(klass._proxied)
+ and klass._proxied.name == "super"
+ ):
+ return
+ if isinstance(klass, objects.Super):
+ return
+ try:
+ del not_called_yet[klass]
+ except KeyError:
+ if klass not in to_call:
+ self.add_message(
+ "non-parent-init-called", node=expr, args=klass.name
+ )
+ except astroid.InferenceError:
+ continue
+ for klass, method in not_called_yet.items():
+ if decorated_with(node, ["typing.overload"]):
+ continue
+ cls = node_frame_class(method)
+ if klass.name == "object" or (cls and cls.name == "object"):
+ continue
+ self.add_message("super-init-not-called", args=klass.name, node=node)
+
+ def _check_signature(self, method1, refmethod, class_type, cls):
+ """check that the signature of the two given methods match
+ """
+ if not (
+ isinstance(method1, astroid.FunctionDef)
+ and isinstance(refmethod, astroid.FunctionDef)
+ ):
+ self.add_message(
+ "method-check-failed", args=(method1, refmethod), node=method1
+ )
+ return
+
+ instance = cls.instantiate_class()
+ method1 = function_to_method(method1, instance)
+ refmethod = function_to_method(refmethod, instance)
+
+ # Don't care about functions with unknown argument (builtins).
+ if method1.args.args is None or refmethod.args.args is None:
+ return
+
+ # Ignore private to class methods.
+ if is_attr_private(method1.name):
+ return
+ # Ignore setters, they have an implicit extra argument,
+ # which shouldn't be taken in consideration.
+ if is_property_setter(method1):
+ return
+
+ if _different_parameters(
+ refmethod, method1, dummy_parameter_regex=self._dummy_rgx
+ ):
+ self.add_message(
+ "arguments-differ", args=(class_type, method1.name), node=method1
+ )
+ elif len(method1.args.defaults) < len(refmethod.args.defaults):
+ self.add_message(
+ "signature-differs", args=(class_type, method1.name), node=method1
+ )
+
+ def _uses_mandatory_method_param(self, node):
+ """Check that attribute lookup name use first attribute variable name
+
+ Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
+ """
+ return self._is_mandatory_method_param(node.expr)
+
+ def _is_mandatory_method_param(self, node):
+ """Check if astroid.Name corresponds to first attribute variable name
+
+ Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
+ """
+ return (
+ self._first_attrs
+ and isinstance(node, astroid.Name)
+ and node.name == self._first_attrs[-1]
+ )
+
+
+class SpecialMethodsChecker(BaseChecker):
+ """Checker which verifies that special methods
+ are implemented correctly.
+ """
+
+ __implements__ = (IAstroidChecker,)
+ name = "classes"
+ msgs = {
+ "E0301": (
+ "__iter__ returns non-iterator",
+ "non-iterator-returned",
+ "Used when an __iter__ method returns something which is not an "
+ "iterable (i.e. has no `%s` method)" % NEXT_METHOD,
+ {
+ "old_names": [
+ ("W0234", "old-non-iterator-returned-1"),
+ ("E0234", "old-non-iterator-returned-2"),
+ ]
+ },
+ ),
+ "E0302": (
+ "The special method %r expects %s param(s), %d %s given",
+ "unexpected-special-method-signature",
+ "Emitted when a special method was defined with an "
+ "invalid number of parameters. If it has too few or "
+ "too many, it might not work at all.",
+ {"old_names": [("E0235", "bad-context-manager")]},
+ ),
+ "E0303": (
+ "__len__ does not return non-negative integer",
+ "invalid-length-returned",
+ "Used when a __len__ method returns something which is not a "
+ "non-negative integer",
+ {},
+ ),
+ }
+ priority = -2
+
+ @check_messages(
+ "unexpected-special-method-signature",
+ "non-iterator-returned",
+ "invalid-length-returned",
+ )
+ def visit_functiondef(self, node):
+ if not node.is_method():
+ return
+ if node.name == "__iter__":
+ self._check_iter(node)
+ if node.name == "__len__":
+ self._check_len(node)
+ if node.name in PYMETHODS:
+ self._check_unexpected_method_signature(node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_unexpected_method_signature(self, node):
+ expected_params = SPECIAL_METHODS_PARAMS[node.name]
+
+ if expected_params is None:
+ # This can support a variable number of parameters.
+ return
+ if not node.args.args and not node.args.vararg:
+ # Method has no parameter, will be caught
+ # by no-method-argument.
+ return
+
+ if decorated_with(node, [BUILTINS + ".staticmethod"]):
+ # We expect to not take in consideration self.
+ all_args = node.args.args
+ else:
+ all_args = node.args.args[1:]
+ mandatory = len(all_args) - len(node.args.defaults)
+ optional = len(node.args.defaults)
+ current_params = mandatory + optional
+
+ if isinstance(expected_params, tuple):
+ # The expected number of parameters can be any value from this
+ # tuple, although the user should implement the method
+ # to take all of them in consideration.
+ emit = mandatory not in expected_params
+ expected_params = "between %d or %d" % expected_params
+ else:
+ # If the number of mandatory parameters doesn't
+ # suffice, the expected parameters for this
+ # function will be deduced from the optional
+ # parameters.
+ rest = expected_params - mandatory
+ if rest == 0:
+ emit = False
+ elif rest < 0:
+ emit = True
+ elif rest > 0:
+ emit = not ((optional - rest) >= 0 or node.args.vararg)
+
+ if emit:
+ verb = "was" if current_params <= 1 else "were"
+ self.add_message(
+ "unexpected-special-method-signature",
+ args=(node.name, expected_params, current_params, verb),
+ node=node,
+ )
+
+ @staticmethod
+ def _is_iterator(node):
+ if node is astroid.Uninferable:
+ # Just ignore Uninferable objects.
+ return True
+ if isinstance(node, Generator):
+ # Generators can be itered.
+ return True
+
+ if isinstance(node, astroid.Instance):
+ try:
+ node.local_attr(NEXT_METHOD)
+ return True
+ except astroid.NotFoundError:
+ pass
+ elif isinstance(node, astroid.ClassDef):
+ metaclass = node.metaclass()
+ if metaclass and isinstance(metaclass, astroid.ClassDef):
+ try:
+ metaclass.local_attr(NEXT_METHOD)
+ return True
+ except astroid.NotFoundError:
+ pass
+ return False
+
+ def _check_iter(self, node):
+ inferred = _safe_infer_call_result(node, node)
+ if inferred is not None:
+ if not self._is_iterator(inferred):
+ self.add_message("non-iterator-returned", node=node)
+
+ def _check_len(self, node):
+ inferred = _safe_infer_call_result(node, node)
+ if not inferred or inferred is astroid.Uninferable:
+ return
+
+ if (
+ isinstance(inferred, astroid.Instance)
+ and inferred.name == "int"
+ and not isinstance(inferred, astroid.Const)
+ ):
+ # Assume it's good enough, since the int() call might wrap
+ # something that's uninferable for us
+ return
+
+ if not isinstance(inferred, astroid.Const):
+ self.add_message("invalid-length-returned", node=node)
+ return
+
+ value = inferred.value
+ if not isinstance(value, int) or value < 0:
+ self.add_message("invalid-length-returned", node=node)
+
+
+def _ancestors_to_call(klass_node, method="__init__"):
+ """return a dictionary where keys are the list of base classes providing
+ the queried method, and so that should/may be called from the method node
+ """
+ to_call = {}
+ for base_node in klass_node.ancestors(recurs=False):
+ try:
+ to_call[base_node] = next(base_node.igetattr(method))
+ except astroid.InferenceError:
+ continue
+ return to_call
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(ClassChecker(linter))
+ linter.register_checker(SpecialMethodsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/design_analysis.py b/venv/Lib/site-packages/pylint/checkers/design_analysis.py
new file mode 100644
index 0000000..50d8eaa
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/design_analysis.py
@@ -0,0 +1,496 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006, 2009-2010, 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012, 2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Mark Miller <725mrm@gmail.com>
+# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Jakub Wilk <jwilk@jwilk.net>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""check for signs of poor design"""
+
+import re
+from collections import defaultdict
+
+import astroid
+from astroid import BoolOp, If, decorators
+
+from pylint import utils
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import IAstroidChecker
+
+MSGS = {
+ "R0901": (
+ "Too many ancestors (%s/%s)",
+ "too-many-ancestors",
+ "Used when class has too many parent classes, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0902": (
+ "Too many instance attributes (%s/%s)",
+ "too-many-instance-attributes",
+ "Used when class has too many instance attributes, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0903": (
+ "Too few public methods (%s/%s)",
+ "too-few-public-methods",
+ "Used when class has too few public methods, so be sure it's "
+ "really worth it.",
+ ),
+ "R0904": (
+ "Too many public methods (%s/%s)",
+ "too-many-public-methods",
+ "Used when class has too many public methods, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0911": (
+ "Too many return statements (%s/%s)",
+ "too-many-return-statements",
+ "Used when a function or method has too many return statement, "
+ "making it hard to follow.",
+ ),
+ "R0912": (
+ "Too many branches (%s/%s)",
+ "too-many-branches",
+ "Used when a function or method has too many branches, "
+ "making it hard to follow.",
+ ),
+ "R0913": (
+ "Too many arguments (%s/%s)",
+ "too-many-arguments",
+ "Used when a function or method takes too many arguments.",
+ ),
+ "R0914": (
+ "Too many local variables (%s/%s)",
+ "too-many-locals",
+ "Used when a function or method has too many local variables.",
+ ),
+ "R0915": (
+ "Too many statements (%s/%s)",
+ "too-many-statements",
+ "Used when a function or method has too many statements. You "
+ "should then split it in smaller functions / methods.",
+ ),
+ "R0916": (
+ "Too many boolean expressions in if statement (%s/%s)",
+ "too-many-boolean-expressions",
+ "Used when an if statement contains too many boolean expressions.",
+ ),
+}
+SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
+DATACLASSES_DECORATORS = frozenset({"dataclass", "attrs"})
+DATACLASS_IMPORT = "dataclasses"
+TYPING_NAMEDTUPLE = "typing.NamedTuple"
+
+
+def _is_exempt_from_public_methods(node: astroid.ClassDef) -> bool:
+ """Check if a class is exempt from too-few-public-methods"""
+
+ # If it's a typing.Namedtuple or an Enum
+ for ancestor in node.ancestors():
+ if ancestor.name == "Enum" and ancestor.root().name == "enum":
+ return True
+ if ancestor.qname() == TYPING_NAMEDTUPLE:
+ return True
+
+ # Or if it's a dataclass
+ if not node.decorators:
+ return False
+
+ root_locals = set(node.root().locals)
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, astroid.Call):
+ decorator = decorator.func
+ if not isinstance(decorator, (astroid.Name, astroid.Attribute)):
+ continue
+ if isinstance(decorator, astroid.Name):
+ name = decorator.name
+ else:
+ name = decorator.attrname
+ if name in DATACLASSES_DECORATORS and (
+ root_locals.intersection(DATACLASSES_DECORATORS)
+ or DATACLASS_IMPORT in root_locals
+ ):
+ return True
+ return False
+
+
+def _count_boolean_expressions(bool_op):
+ """Counts the number of boolean expressions in BoolOp `bool_op` (recursive)
+
+ example: a and (b or c or (d and e)) ==> 5 boolean expressions
+ """
+ nb_bool_expr = 0
+ for bool_expr in bool_op.get_children():
+ if isinstance(bool_expr, BoolOp):
+ nb_bool_expr += _count_boolean_expressions(bool_expr)
+ else:
+ nb_bool_expr += 1
+ return nb_bool_expr
+
+
+def _count_methods_in_class(node):
+ all_methods = sum(1 for method in node.methods() if not method.name.startswith("_"))
+ # Special methods count towards the number of public methods,
+ # but don't count towards there being too many methods.
+ for method in node.mymethods():
+ if SPECIAL_OBJ.search(method.name) and method.name != "__init__":
+ all_methods += 1
+ return all_methods
+
+
+class MisdesignChecker(BaseChecker):
+ """checks for sign of poor/misdesign:
+ * number of methods, attributes, local variables...
+ * size, complexity of functions, methods
+ """
+
+ __implements__ = (IAstroidChecker,)
+
+ # configuration section name
+ name = "design"
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = (
+ (
+ "max-args",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of arguments for function / method.",
+ },
+ ),
+ (
+ "max-locals",
+ {
+ "default": 15,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of locals for function / method body.",
+ },
+ ),
+ (
+ "max-returns",
+ {
+ "default": 6,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of return / yield for function / "
+ "method body.",
+ },
+ ),
+ (
+ "max-branches",
+ {
+ "default": 12,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of branch for function / method body.",
+ },
+ ),
+ (
+ "max-statements",
+ {
+ "default": 50,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of statements in function / method " "body.",
+ },
+ ),
+ (
+ "max-parents",
+ {
+ "default": 7,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of parents for a class (see R0901).",
+ },
+ ),
+ (
+ "max-attributes",
+ {
+ "default": 7,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of attributes for a class \
+(see R0902).",
+ },
+ ),
+ (
+ "min-public-methods",
+ {
+ "default": 2,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Minimum number of public methods for a class \
+(see R0903).",
+ },
+ ),
+ (
+ "max-public-methods",
+ {
+ "default": 20,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of public methods for a class \
+(see R0904).",
+ },
+ ),
+ (
+ "max-bool-expr",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of boolean expressions in an if "
+ "statement (see R0916).",
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self.stats = None
+ self._returns = None
+ self._branches = None
+ self._stmts = None
+
+ def open(self):
+ """initialize visit variables"""
+ self.stats = self.linter.add_stats()
+ self._returns = []
+ self._branches = defaultdict(int)
+ self._stmts = []
+
+ def _inc_all_stmts(self, amount):
+ for i in range(len(self._stmts)):
+ self._stmts[i] += amount
+
+ @decorators.cachedproperty
+ def _ignored_argument_names(self):
+ return utils.get_global_option(self, "ignored-argument-names", default=None)
+
+ @check_messages(
+ "too-many-ancestors",
+ "too-many-instance-attributes",
+ "too-few-public-methods",
+ "too-many-public-methods",
+ )
+ def visit_classdef(self, node):
+ """check size of inheritance hierarchy and number of instance attributes
+ """
+ nb_parents = len(list(node.ancestors()))
+ if nb_parents > self.config.max_parents:
+ self.add_message(
+ "too-many-ancestors",
+ node=node,
+ args=(nb_parents, self.config.max_parents),
+ )
+
+ if len(node.instance_attrs) > self.config.max_attributes:
+ self.add_message(
+ "too-many-instance-attributes",
+ node=node,
+ args=(len(node.instance_attrs), self.config.max_attributes),
+ )
+
+ @check_messages("too-few-public-methods", "too-many-public-methods")
+ def leave_classdef(self, node):
+ """check number of public methods"""
+ my_methods = sum(
+ 1 for method in node.mymethods() if not method.name.startswith("_")
+ )
+
+ # Does the class contain less than n public methods ?
+ # This checks only the methods defined in the current class,
+ # since the user might not have control over the classes
+ # from the ancestors. It avoids some false positives
+ # for classes such as unittest.TestCase, which provides
+ # a lot of assert methods. It doesn't make sense to warn
+ # when the user subclasses TestCase to add his own tests.
+ if my_methods > self.config.max_public_methods:
+ self.add_message(
+ "too-many-public-methods",
+ node=node,
+ args=(my_methods, self.config.max_public_methods),
+ )
+
+ # Stop here for exception, metaclass, interface classes and other
+ # classes for which we don't need to count the methods.
+ if node.type != "class" or _is_exempt_from_public_methods(node):
+ return
+
+ # Does the class contain more than n public methods ?
+ # This checks all the methods defined by ancestors and
+ # by the current class.
+ all_methods = _count_methods_in_class(node)
+ if all_methods < self.config.min_public_methods:
+ self.add_message(
+ "too-few-public-methods",
+ node=node,
+ args=(all_methods, self.config.min_public_methods),
+ )
+
+ @check_messages(
+ "too-many-return-statements",
+ "too-many-branches",
+ "too-many-arguments",
+ "too-many-locals",
+ "too-many-statements",
+ "keyword-arg-before-vararg",
+ )
+ def visit_functiondef(self, node):
+ """check function name, docstring, arguments, redefinition,
+ variable names, max locals
+ """
+ # init branch and returns counters
+ self._returns.append(0)
+ # check number of arguments
+ args = node.args.args
+ ignored_argument_names = self._ignored_argument_names
+ if args is not None:
+ ignored_args_num = 0
+ if ignored_argument_names:
+ ignored_args_num = sum(
+ 1 for arg in args if ignored_argument_names.match(arg.name)
+ )
+
+ argnum = len(args) - ignored_args_num
+ if argnum > self.config.max_args:
+ self.add_message(
+ "too-many-arguments",
+ node=node,
+ args=(len(args), self.config.max_args),
+ )
+ else:
+ ignored_args_num = 0
+ # check number of local variables
+ locnum = len(node.locals) - ignored_args_num
+ if locnum > self.config.max_locals:
+ self.add_message(
+ "too-many-locals", node=node, args=(locnum, self.config.max_locals)
+ )
+ # init new statements counter
+ self._stmts.append(1)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ @check_messages(
+ "too-many-return-statements",
+ "too-many-branches",
+ "too-many-arguments",
+ "too-many-locals",
+ "too-many-statements",
+ )
+ def leave_functiondef(self, node):
+ """most of the work is done here on close:
+ checks for max returns, branch, return in __init__
+ """
+ returns = self._returns.pop()
+ if returns > self.config.max_returns:
+ self.add_message(
+ "too-many-return-statements",
+ node=node,
+ args=(returns, self.config.max_returns),
+ )
+ branches = self._branches[node]
+ if branches > self.config.max_branches:
+ self.add_message(
+ "too-many-branches",
+ node=node,
+ args=(branches, self.config.max_branches),
+ )
+ # check number of statements
+ stmts = self._stmts.pop()
+ if stmts > self.config.max_statements:
+ self.add_message(
+ "too-many-statements",
+ node=node,
+ args=(stmts, self.config.max_statements),
+ )
+
+ leave_asyncfunctiondef = leave_functiondef
+
+ def visit_return(self, _):
+ """count number of returns"""
+ if not self._returns:
+ return # return outside function, reported by the base checker
+ self._returns[-1] += 1
+
+ def visit_default(self, node):
+ """default visit method -> increments the statements counter if
+ necessary
+ """
+ if node.is_statement:
+ self._inc_all_stmts(1)
+
+ def visit_tryexcept(self, node):
+ """increments the branches counter"""
+ branches = len(node.handlers)
+ if node.orelse:
+ branches += 1
+ self._inc_branch(node, branches)
+ self._inc_all_stmts(branches)
+
+ def visit_tryfinally(self, node):
+ """increments the branches counter"""
+ self._inc_branch(node, 2)
+ self._inc_all_stmts(2)
+
+ @check_messages("too-many-boolean-expressions")
+ def visit_if(self, node):
+ """increments the branches counter and checks boolean expressions"""
+ self._check_boolean_expressions(node)
+ branches = 1
+ # don't double count If nodes coming from some 'elif'
+ if node.orelse and (len(node.orelse) > 1 or not isinstance(node.orelse[0], If)):
+ branches += 1
+ self._inc_branch(node, branches)
+ self._inc_all_stmts(branches)
+
+ def _check_boolean_expressions(self, node):
+ """Go through "if" node `node` and counts its boolean expressions
+
+ if the "if" node test is a BoolOp node
+ """
+ condition = node.test
+ if not isinstance(condition, BoolOp):
+ return
+ nb_bool_expr = _count_boolean_expressions(condition)
+ if nb_bool_expr > self.config.max_bool_expr:
+ self.add_message(
+ "too-many-boolean-expressions",
+ node=condition,
+ args=(nb_bool_expr, self.config.max_bool_expr),
+ )
+
+ def visit_while(self, node):
+ """increments the branches counter"""
+ branches = 1
+ if node.orelse:
+ branches += 1
+ self._inc_branch(node, branches)
+
+ visit_for = visit_while
+
+ def _inc_branch(self, node, branchesnum=1):
+ """increments the branches counter"""
+ self._branches[node.scope()] += branchesnum
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(MisdesignChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/exceptions.py b/venv/Lib/site-packages/pylint/checkers/exceptions.py
new file mode 100644
index 0000000..360e1d1
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/exceptions.py
@@ -0,0 +1,546 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2011-2014 Google, Inc.
+# Copyright (c) 2012 Tim Hatch <tim@timhatch.com>
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Steven Myint <hg@stevenmyint.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Martin von Gagern <gagern@google.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checks for various exception related errors."""
+import builtins
+import inspect
+import typing
+
+import astroid
+from astroid.node_classes import NodeNG
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+
+
+def _builtin_exceptions():
+ def predicate(obj):
+ return isinstance(obj, type) and issubclass(obj, BaseException)
+
+ members = inspect.getmembers(builtins, predicate)
+ return {exc.__name__ for (_, exc) in members}
+
+
+def _annotated_unpack_infer(stmt, context=None):
+ """
+ Recursively generate nodes inferred by the given statement.
+ If the inferred value is a list or a tuple, recurse on the elements.
+ Returns an iterator which yields tuples in the format
+ ('original node', 'inferred node').
+ """
+ if isinstance(stmt, (astroid.List, astroid.Tuple)):
+ for elt in stmt.elts:
+ inferred = utils.safe_infer(elt)
+ if inferred and inferred is not astroid.Uninferable:
+ yield elt, inferred
+ return
+ for inferred in stmt.infer(context):
+ if inferred is astroid.Uninferable:
+ continue
+ yield stmt, inferred
+
+
+def _is_raising(body: typing.List) -> bool:
+ """Return true if the given statement node raise an exception"""
+ for node in body:
+ if isinstance(node, astroid.Raise):
+ return True
+ return False
+
+
+OVERGENERAL_EXCEPTIONS = ("BaseException", "Exception")
+BUILTINS_NAME = builtins.__name__
+
+MSGS = {
+ "E0701": (
+ "Bad except clauses order (%s)",
+ "bad-except-order",
+ "Used when except clauses are not in the correct order (from the "
+ "more specific to the more generic). If you don't fix the order, "
+ "some exceptions may not be caught by the most specific handler.",
+ ),
+ "E0702": (
+ "Raising %s while only classes or instances are allowed",
+ "raising-bad-type",
+ "Used when something which is neither a class, an instance or a "
+ "string is raised (i.e. a `TypeError` will be raised).",
+ ),
+ "E0703": (
+ "Exception context set to something which is not an exception, nor None",
+ "bad-exception-context",
+ 'Used when using the syntax "raise ... from ...", '
+ "where the exception context is not an exception, "
+ "nor None.",
+ ),
+ "E0704": (
+ "The raise statement is not inside an except clause",
+ "misplaced-bare-raise",
+ "Used when a bare raise is not used inside an except clause. "
+ "This generates an error, since there are no active exceptions "
+ "to be reraised. An exception to this rule is represented by "
+ "a bare raise inside a finally clause, which might work, as long "
+ "as an exception is raised inside the try block, but it is "
+ "nevertheless a code smell that must not be relied upon.",
+ ),
+ "E0710": (
+ "Raising a new style class which doesn't inherit from BaseException",
+ "raising-non-exception",
+ "Used when a new style class which doesn't inherit from "
+ "BaseException is raised.",
+ ),
+ "E0711": (
+ "NotImplemented raised - should raise NotImplementedError",
+ "notimplemented-raised",
+ "Used when NotImplemented is raised instead of NotImplementedError",
+ ),
+ "E0712": (
+ "Catching an exception which doesn't inherit from Exception: %s",
+ "catching-non-exception",
+ "Used when a class which doesn't inherit from "
+ "Exception is used as an exception in an except clause.",
+ ),
+ "W0702": (
+ "No exception type(s) specified",
+ "bare-except",
+ "Used when an except clause doesn't specify exceptions type to catch.",
+ ),
+ "W0703": (
+ "Catching too general exception %s",
+ "broad-except",
+ "Used when an except catches a too general exception, "
+ "possibly burying unrelated errors.",
+ ),
+ "W0705": (
+ "Catching previously caught exception type %s",
+ "duplicate-except",
+ "Used when an except catches a type that was already caught by "
+ "a previous handler.",
+ ),
+ "W0706": (
+ "The except handler raises immediately",
+ "try-except-raise",
+ "Used when an except handler uses raise as its first or only "
+ "operator. This is useless because it raises back the exception "
+ "immediately. Remove the raise operator or the entire "
+ "try-except-raise block!",
+ ),
+ "W0711": (
+ 'Exception to catch is the result of a binary "%s" operation',
+ "binary-op-exception",
+ "Used when the exception to catch is of the form "
+ '"except A or B:". If intending to catch multiple, '
+ 'rewrite as "except (A, B):"',
+ ),
+ "W0715": (
+ "Exception arguments suggest string formatting might be intended",
+ "raising-format-tuple",
+ "Used when passing multiple arguments to an exception "
+ "constructor, the first of them a string literal containing what "
+ "appears to be placeholders intended for formatting",
+ ),
+ "W0716": (
+ "Invalid exception operation. %s",
+ "wrong-exception-operation",
+ "Used when an operation is done against an exception, but the operation "
+ "is not valid for the exception in question. Usually emitted when having "
+ "binary operations between exceptions in except handlers.",
+ ),
+}
+
+
+class BaseVisitor:
+ """Base class for visitors defined in this module."""
+
+ def __init__(self, checker, node):
+ self._checker = checker
+ self._node = node
+
+ def visit(self, node):
+ name = node.__class__.__name__.lower()
+ dispatch_meth = getattr(self, "visit_" + name, None)
+ if dispatch_meth:
+ dispatch_meth(node)
+ else:
+ self.visit_default(node)
+
+ def visit_default(self, node): # pylint: disable=unused-argument
+ """Default implementation for all the nodes."""
+
+
+class ExceptionRaiseRefVisitor(BaseVisitor):
+ """Visit references (anything that is not an AST leaf)."""
+
+ def visit_name(self, name):
+ if name.name == "NotImplemented":
+ self._checker.add_message("notimplemented-raised", node=self._node)
+
+ def visit_call(self, call):
+ if isinstance(call.func, astroid.Name):
+ self.visit_name(call.func)
+ if (
+ len(call.args) > 1
+ and isinstance(call.args[0], astroid.Const)
+ and isinstance(call.args[0].value, str)
+ ):
+ msg = call.args[0].value
+ if "%" in msg or ("{" in msg and "}" in msg):
+ self._checker.add_message("raising-format-tuple", node=self._node)
+
+
+class ExceptionRaiseLeafVisitor(BaseVisitor):
+ """Visitor for handling leaf kinds of a raise value."""
+
+ def visit_const(self, const):
+ if not isinstance(const.value, str):
+ # raising-string will be emitted from python3 porting checker.
+ self._checker.add_message(
+ "raising-bad-type", node=self._node, args=const.value.__class__.__name__
+ )
+
+ def visit_instance(self, instance):
+ # pylint: disable=protected-access
+ cls = instance._proxied
+ self.visit_classdef(cls)
+
+ # Exception instances have a particular class type
+ visit_exceptioninstance = visit_instance
+
+ def visit_classdef(self, cls):
+ if not utils.inherit_from_std_ex(cls) and utils.has_known_bases(cls):
+ if cls.newstyle:
+ self._checker.add_message("raising-non-exception", node=self._node)
+
+ def visit_tuple(self, _):
+ self._checker.add_message("raising-bad-type", node=self._node, args="tuple")
+
+ def visit_default(self, node):
+ name = getattr(node, "name", node.__class__.__name__)
+ self._checker.add_message("raising-bad-type", node=self._node, args=name)
+
+
+class ExceptionsChecker(checkers.BaseChecker):
+ """Exception related checks."""
+
+ __implements__ = interfaces.IAstroidChecker
+
+ name = "exceptions"
+ msgs = MSGS
+ priority = -4
+ options = (
+ (
+ "overgeneral-exceptions",
+ {
+ "default": OVERGENERAL_EXCEPTIONS,
+ "type": "csv",
+ "metavar": "<comma-separated class names>",
+ "help": "Exceptions that will emit a warning "
+ 'when being caught. Defaults to "%s".'
+ % (", ".join(OVERGENERAL_EXCEPTIONS),),
+ },
+ ),
+ )
+
+ def open(self):
+ self._builtin_exceptions = _builtin_exceptions()
+ super(ExceptionsChecker, self).open()
+
+ @utils.check_messages(
+ "misplaced-bare-raise",
+ "raising-bad-type",
+ "raising-non-exception",
+ "notimplemented-raised",
+ "bad-exception-context",
+ "raising-format-tuple",
+ )
+ def visit_raise(self, node):
+ if node.exc is None:
+ self._check_misplaced_bare_raise(node)
+ return
+
+ if node.cause:
+ self._check_bad_exception_context(node)
+
+ expr = node.exc
+ ExceptionRaiseRefVisitor(self, node).visit(expr)
+
+ try:
+ inferred_value = expr.inferred()[-1]
+ except astroid.InferenceError:
+ pass
+ else:
+ if inferred_value:
+ ExceptionRaiseLeafVisitor(self, node).visit(inferred_value)
+
+ def _check_misplaced_bare_raise(self, node):
+ # Filter out if it's present in __exit__.
+ scope = node.scope()
+ if (
+ isinstance(scope, astroid.FunctionDef)
+ and scope.is_method()
+ and scope.name == "__exit__"
+ ):
+ return
+
+ current = node
+ # Stop when a new scope is generated or when the raise
+ # statement is found inside a TryFinally.
+ ignores = (astroid.ExceptHandler, astroid.FunctionDef)
+ while current and not isinstance(current.parent, ignores):
+ current = current.parent
+
+ expected = (astroid.ExceptHandler,)
+ if not current or not isinstance(current.parent, expected):
+ self.add_message("misplaced-bare-raise", node=node)
+
+ def _check_bad_exception_context(self, node):
+ """Verify that the exception context is properly set.
+
+ An exception context can be only `None` or an exception.
+ """
+ cause = utils.safe_infer(node.cause)
+ if cause in (astroid.Uninferable, None):
+ return
+
+ if isinstance(cause, astroid.Const):
+ if cause.value is not None:
+ self.add_message("bad-exception-context", node=node)
+ elif not isinstance(cause, astroid.ClassDef) and not utils.inherit_from_std_ex(
+ cause
+ ):
+ self.add_message("bad-exception-context", node=node)
+
+ def _check_catching_non_exception(self, handler, exc, part):
+ if isinstance(exc, astroid.Tuple):
+ # Check if it is a tuple of exceptions.
+ inferred = [utils.safe_infer(elt) for elt in exc.elts]
+ if any(node is astroid.Uninferable for node in inferred):
+ # Don't emit if we don't know every component.
+ return
+ if all(
+ node
+ and (utils.inherit_from_std_ex(node) or not utils.has_known_bases(node))
+ for node in inferred
+ ):
+ return
+
+ if not isinstance(exc, astroid.ClassDef):
+ # Don't emit the warning if the inferred stmt
+ # is None, but the exception handler is something else,
+ # maybe it was redefined.
+ if isinstance(exc, astroid.Const) and exc.value is None:
+ if (
+ isinstance(handler.type, astroid.Const)
+ and handler.type.value is None
+ ) or handler.type.parent_of(exc):
+ # If the exception handler catches None or
+ # the exception component, which is None, is
+ # defined by the entire exception handler, then
+ # emit a warning.
+ self.add_message(
+ "catching-non-exception",
+ node=handler.type,
+ args=(part.as_string(),),
+ )
+ else:
+ self.add_message(
+ "catching-non-exception",
+ node=handler.type,
+ args=(part.as_string(),),
+ )
+ return
+
+ if (
+ not utils.inherit_from_std_ex(exc)
+ and exc.name not in self._builtin_exceptions
+ ):
+ if utils.has_known_bases(exc):
+ self.add_message(
+ "catching-non-exception", node=handler.type, args=(exc.name,)
+ )
+
+ def _check_try_except_raise(self, node):
+ def gather_exceptions_from_handler(
+ handler
+ ) -> typing.Optional[typing.List[NodeNG]]:
+ exceptions = [] # type: typing.List[NodeNG]
+ if handler.type:
+ exceptions_in_handler = utils.safe_infer(handler.type)
+ if isinstance(exceptions_in_handler, astroid.Tuple):
+ exceptions = list(
+ {
+ exception
+ for exception in exceptions_in_handler.elts
+ if isinstance(exception, astroid.Name)
+ }
+ )
+ elif exceptions_in_handler:
+ exceptions = [exceptions_in_handler]
+ else:
+ # Break when we cannot infer anything reliably.
+ return None
+ return exceptions
+
+ bare_raise = False
+ handler_having_bare_raise = None
+ excs_in_bare_handler = []
+ for handler in node.handlers:
+ if bare_raise:
+ # check that subsequent handler is not parent of handler which had bare raise.
+ # since utils.safe_infer can fail for bare except, check it before.
+ # also break early if bare except is followed by bare except.
+
+ excs_in_current_handler = gather_exceptions_from_handler(handler)
+
+ if not excs_in_current_handler:
+ bare_raise = False
+ break
+ if excs_in_bare_handler is None:
+ # It can be `None` when the inference failed
+ break
+
+ for exc_in_current_handler in excs_in_current_handler:
+ inferred_current = utils.safe_infer(exc_in_current_handler)
+ if any(
+ utils.is_subclass_of(
+ utils.safe_infer(exc_in_bare_handler), inferred_current
+ )
+ for exc_in_bare_handler in excs_in_bare_handler
+ ):
+ bare_raise = False
+ break
+
+ # `raise` as the first operator inside the except handler
+ if _is_raising([handler.body[0]]):
+ # flags when there is a bare raise
+ if handler.body[0].exc is None:
+ bare_raise = True
+ handler_having_bare_raise = handler
+ excs_in_bare_handler = gather_exceptions_from_handler(handler)
+ else:
+ if bare_raise:
+ self.add_message("try-except-raise", node=handler_having_bare_raise)
+
+ @utils.check_messages("wrong-exception-operation")
+ def visit_binop(self, node):
+ if isinstance(node.parent, astroid.ExceptHandler):
+ # except (V | A)
+ suggestion = "Did you mean '(%s, %s)' instead?" % (
+ node.left.as_string(),
+ node.right.as_string(),
+ )
+ self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
+
+ @utils.check_messages("wrong-exception-operation")
+ def visit_compare(self, node):
+ if isinstance(node.parent, astroid.ExceptHandler):
+ # except (V < A)
+ suggestion = "Did you mean '(%s, %s)' instead?" % (
+ node.left.as_string(),
+ ", ".join(operand.as_string() for _, operand in node.ops),
+ )
+ self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
+
+ @utils.check_messages(
+ "bare-except",
+ "broad-except",
+ "try-except-raise",
+ "binary-op-exception",
+ "bad-except-order",
+ "catching-non-exception",
+ "duplicate-except",
+ )
+ def visit_tryexcept(self, node):
+ """check for empty except"""
+ self._check_try_except_raise(node)
+ exceptions_classes = []
+ nb_handlers = len(node.handlers)
+ for index, handler in enumerate(node.handlers):
+ if handler.type is None:
+ if not _is_raising(handler.body):
+ self.add_message("bare-except", node=handler)
+
+ # check if an "except:" is followed by some other
+ # except
+ if index < (nb_handlers - 1):
+ msg = "empty except clause should always appear last"
+ self.add_message("bad-except-order", node=node, args=msg)
+
+ elif isinstance(handler.type, astroid.BoolOp):
+ self.add_message(
+ "binary-op-exception", node=handler, args=handler.type.op
+ )
+ else:
+ try:
+ excs = list(_annotated_unpack_infer(handler.type))
+ except astroid.InferenceError:
+ continue
+
+ for part, exc in excs:
+ if exc is astroid.Uninferable:
+ continue
+ if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(
+ exc
+ ):
+ # pylint: disable=protected-access
+ exc = exc._proxied
+
+ self._check_catching_non_exception(handler, exc, part)
+
+ if not isinstance(exc, astroid.ClassDef):
+ continue
+
+ exc_ancestors = [
+ anc
+ for anc in exc.ancestors()
+ if isinstance(anc, astroid.ClassDef)
+ ]
+
+ for previous_exc in exceptions_classes:
+ if previous_exc in exc_ancestors:
+ msg = "%s is an ancestor class of %s" % (
+ previous_exc.name,
+ exc.name,
+ )
+ self.add_message(
+ "bad-except-order", node=handler.type, args=msg
+ )
+ if (
+ exc.name in self.config.overgeneral_exceptions
+ and exc.root().name == utils.EXCEPTIONS_MODULE
+ and not _is_raising(handler.body)
+ ):
+ self.add_message(
+ "broad-except", args=exc.name, node=handler.type
+ )
+
+ if exc in exceptions_classes:
+ self.add_message(
+ "duplicate-except", args=exc.name, node=handler.type
+ )
+
+ exceptions_classes += [exc for _, exc in excs]
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(ExceptionsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/format.py b/venv/Lib/site-packages/pylint/checkers/format.py
new file mode 100644
index 0000000..c4cad31
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/format.py
@@ -0,0 +1,1332 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2015 Google, Inc.
+# Copyright (c) 2013 moxian <aleftmail@inbox.ru>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 frost-nzcr4 <frost.nzcr4@jagmort.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
+# Copyright (c) 2015 Fabio Natali <me@fabionatali.com>
+# Copyright (c) 2015 Harut <yes@harutune.name>
+# Copyright (c) 2015 Mihai Balint <balint.mihai@gmail.com>
+# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Petr Pulc <petrpulc@gmail.com>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Krzysztof Czapla <k.czapla68@gmail.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 James M. Allen <james.m.allen@gmail.com>
+# Copyright (c) 2017 vinnyrose <vinnyrose@users.noreply.github.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.guinta@protonmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Fureigh <rhys.fureigh@gsa.gov>
+# Copyright (c) 2018 Pierre Sassoulas <pierre.sassoulas@wisebim.fr>
+# Copyright (c) 2018 Andreas Freimuth <andreas.freimuth@united-bits.de>
+# Copyright (c) 2018 Jakub Wilk <jwilk@jwilk.net>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Python code format's checker.
+
+By default try to follow Guido's style guide :
+
+https://www.python.org/doc/essays/styleguide/
+
+Some parts of the process_token method is based from The Tab Nanny std module.
+"""
+
+import keyword
+import tokenize
+from functools import reduce # pylint: disable=redefined-builtin
+
+from astroid import nodes
+
+from pylint.checkers import BaseTokenChecker
+from pylint.checkers.utils import check_messages
+from pylint.constants import OPTION_RGX, WarningScope
+from pylint.interfaces import IAstroidChecker, IRawChecker, ITokenChecker
+
+_ASYNC_TOKEN = "async"
+_CONTINUATION_BLOCK_OPENERS = [
+ "elif",
+ "except",
+ "for",
+ "if",
+ "while",
+ "def",
+ "class",
+ "with",
+]
+_KEYWORD_TOKENS = [
+ "assert",
+ "del",
+ "elif",
+ "except",
+ "for",
+ "if",
+ "in",
+ "not",
+ "raise",
+ "return",
+ "while",
+ "yield",
+ "with",
+]
+
+_SPACED_OPERATORS = [
+ "==",
+ "<",
+ ">",
+ "!=",
+ "<>",
+ "<=",
+ ">=",
+ "+=",
+ "-=",
+ "*=",
+ "**=",
+ "/=",
+ "//=",
+ "&=",
+ "|=",
+ "^=",
+ "%=",
+ ">>=",
+ "<<=",
+]
+_OPENING_BRACKETS = ["(", "[", "{"]
+_CLOSING_BRACKETS = [")", "]", "}"]
+_TAB_LENGTH = 8
+
+_EOL = frozenset([tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT])
+_JUNK_TOKENS = (tokenize.COMMENT, tokenize.NL)
+
+# Whitespace checking policy constants
+_MUST = 0
+_MUST_NOT = 1
+_IGNORE = 2
+
+# Whitespace checking config constants
+_DICT_SEPARATOR = "dict-separator"
+_TRAILING_COMMA = "trailing-comma"
+_EMPTY_LINE = "empty-line"
+_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR, _EMPTY_LINE]
+_DEFAULT_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR]
+
+MSGS = {
+ "C0301": (
+ "Line too long (%s/%s)",
+ "line-too-long",
+ "Used when a line is longer than a given number of characters.",
+ ),
+ "C0302": (
+ "Too many lines in module (%s/%s)", # was W0302
+ "too-many-lines",
+ "Used when a module has too many lines, reducing its readability.",
+ ),
+ "C0303": (
+ "Trailing whitespace",
+ "trailing-whitespace",
+ "Used when there is whitespace between the end of a line and the newline.",
+ ),
+ "C0304": (
+ "Final newline missing",
+ "missing-final-newline",
+ "Used when the last line in a file is missing a newline.",
+ ),
+ "C0305": (
+ "Trailing newlines",
+ "trailing-newlines",
+ "Used when there are trailing blank lines in a file.",
+ ),
+ "W0311": (
+ "Bad indentation. Found %s %s, expected %s",
+ "bad-indentation",
+ "Used when an unexpected number of indentation's tabulations or "
+ "spaces has been found.",
+ ),
+ "C0330": ("Wrong %s indentation%s%s.\n%s%s", "bad-continuation", "TODO"),
+ "W0312": (
+ "Found indentation with %ss instead of %ss",
+ "mixed-indentation",
+ "Used when there are some mixed tabs and spaces in a module.",
+ ),
+ "W0301": (
+ "Unnecessary semicolon", # was W0106
+ "unnecessary-semicolon",
+ 'Used when a statement is ended by a semi-colon (";"), which '
+ "isn't necessary (that's python, not C ;).",
+ ),
+ "C0321": (
+ "More than one statement on a single line",
+ "multiple-statements",
+ "Used when more than on statement are found on the same line.",
+ {"scope": WarningScope.NODE},
+ ),
+ "C0325": (
+ "Unnecessary parens after %r keyword",
+ "superfluous-parens",
+ "Used when a single item in parentheses follows an if, for, or "
+ "other keyword.",
+ ),
+ "C0326": (
+ "%s space %s %s %s\n%s",
+ "bad-whitespace",
+ (
+ "Used when a wrong number of spaces is used around an operator, "
+ "bracket or block opener."
+ ),
+ {
+ "old_names": [
+ ("C0323", "no-space-after-operator"),
+ ("C0324", "no-space-after-comma"),
+ ("C0322", "no-space-before-operator"),
+ ]
+ },
+ ),
+ "C0327": (
+ "Mixed line endings LF and CRLF",
+ "mixed-line-endings",
+ "Used when there are mixed (LF and CRLF) newline signs in a file.",
+ ),
+ "C0328": (
+ "Unexpected line ending format. There is '%s' while it should be '%s'.",
+ "unexpected-line-ending-format",
+ "Used when there is different newline than expected.",
+ ),
+}
+
+
+def _underline_token(token):
+ length = token[3][1] - token[2][1]
+ offset = token[2][1]
+ referenced_line = token[4]
+ # If the referenced line does not end with a newline char, fix it
+ if referenced_line[-1] != "\n":
+ referenced_line += "\n"
+ return referenced_line + (" " * offset) + ("^" * length)
+
+
+def _column_distance(token1, token2):
+ if token1 == token2:
+ return 0
+ if token2[3] < token1[3]:
+ token1, token2 = token2, token1
+ if token1[3][0] != token2[2][0]:
+ return None
+ return token2[2][1] - token1[3][1]
+
+
+def _last_token_on_line_is(tokens, line_end, token):
+ return (
+ line_end > 0
+ and tokens.token(line_end - 1) == token
+ or line_end > 1
+ and tokens.token(line_end - 2) == token
+ and tokens.type(line_end - 1) == tokenize.COMMENT
+ )
+
+
+def _token_followed_by_eol(tokens, position):
+ return (
+ tokens.type(position + 1) == tokenize.NL
+ or tokens.type(position + 1) == tokenize.COMMENT
+ and tokens.type(position + 2) == tokenize.NL
+ )
+
+
+def _get_indent_string(line):
+ """Return the indention string of the given line."""
+ result = ""
+ for char in line:
+ if char in " \t":
+ result += char
+ else:
+ break
+ return result
+
+
+def _get_indent_length(line):
+ """Return the length of the indentation on the given token's line."""
+ result = 0
+ for char in line:
+ if char == " ":
+ result += 1
+ elif char == "\t":
+ result += _TAB_LENGTH
+ else:
+ break
+ return result
+
+
+def _get_indent_hint_line(bar_positions, bad_position):
+ """Return a line with |s for each of the positions in the given lists."""
+ if not bar_positions:
+ return "", ""
+
+ bar_positions = [_get_indent_length(indent) for indent in bar_positions]
+ bad_position = _get_indent_length(bad_position)
+ delta_message = ""
+ markers = [(pos, "|") for pos in bar_positions]
+ if len(markers) == 1:
+ # if we have only one marker we'll provide an extra hint on how to fix
+ expected_position = markers[0][0]
+ delta = abs(expected_position - bad_position)
+ direction = "add" if expected_position > bad_position else "remove"
+ delta_message = _CONTINUATION_HINT_MESSAGE % (
+ direction,
+ delta,
+ "s" if delta > 1 else "",
+ )
+ markers.append((bad_position, "^"))
+ markers.sort()
+ line = [" "] * (markers[-1][0] + 1)
+ for position, marker in markers:
+ line[position] = marker
+ return "".join(line), delta_message
+
+
+class _ContinuedIndent:
+ __slots__ = (
+ "valid_outdent_strings",
+ "valid_continuation_strings",
+ "context_type",
+ "token",
+ "position",
+ )
+
+ def __init__(
+ self,
+ context_type,
+ token,
+ position,
+ valid_outdent_strings,
+ valid_continuation_strings,
+ ):
+ self.valid_outdent_strings = valid_outdent_strings
+ self.valid_continuation_strings = valid_continuation_strings
+ self.context_type = context_type
+ self.position = position
+ self.token = token
+
+
+# The contexts for hanging indents.
+# A hanging indented dictionary value after :
+HANGING_DICT_VALUE = "dict-value"
+# Hanging indentation in an expression.
+HANGING = "hanging"
+# Hanging indentation in a block header.
+HANGING_BLOCK = "hanging-block"
+# Continued indentation inside an expression.
+CONTINUED = "continued"
+# Continued indentation in a block header.
+CONTINUED_BLOCK = "continued-block"
+
+SINGLE_LINE = "single"
+WITH_BODY = "multi"
+
+_CONTINUATION_MSG_PARTS = {
+ HANGING_DICT_VALUE: ("hanging", " in dict value"),
+ HANGING: ("hanging", ""),
+ HANGING_BLOCK: ("hanging", " before block"),
+ CONTINUED: ("continued", ""),
+ CONTINUED_BLOCK: ("continued", " before block"),
+}
+
+_CONTINUATION_HINT_MESSAGE = " (%s %d space%s)" # Ex: (remove 2 spaces)
+
+
+def _Indentations(*args):
+ """Valid indentation strings for a continued line."""
+ return {a: None for a in args}
+
+
+def _BeforeBlockIndentations(single, with_body):
+ """Valid alternative indentation strings for continued lines before blocks.
+
+ :param int single: Valid indentation string for statements on a single logical line.
+ :param int with_body: Valid indentation string for statements on several lines.
+
+ :returns: A dictionary mapping indent offsets to a string representing
+ whether the indent if for a line or block.
+ :rtype: dict
+ """
+ return {single: SINGLE_LINE, with_body: WITH_BODY}
+
+
+class TokenWrapper:
+ """A wrapper for readable access to token information."""
+
+ def __init__(self, tokens):
+ self._tokens = tokens
+
+ def token(self, idx):
+ return self._tokens[idx][1]
+
+ def type(self, idx):
+ return self._tokens[idx][0]
+
+ def start_line(self, idx):
+ return self._tokens[idx][2][0]
+
+ def start_col(self, idx):
+ return self._tokens[idx][2][1]
+
+ def line(self, idx):
+ return self._tokens[idx][4]
+
+ def line_indent(self, idx):
+ """Get the string of TABs and Spaces used for indentation of the line of this token"""
+ return _get_indent_string(self.line(idx))
+
+ def token_indent(self, idx):
+ """Get an indentation string for hanging indentation, consisting of the line-indent plus
+ a number of spaces to fill up to the column of this token.
+
+ e.g. the token indent for foo
+ in "<TAB><TAB>print(foo)"
+ is "<TAB><TAB> "
+ """
+ line_indent = self.line_indent(idx)
+ return line_indent + " " * (self.start_col(idx) - len(line_indent))
+
+
+class ContinuedLineState:
+ """Tracker for continued indentation inside a logical line."""
+
+ def __init__(self, tokens, config):
+ self._line_start = -1
+ self._cont_stack = []
+ self._is_block_opener = False
+ self.retained_warnings = []
+ self._config = config
+ self._tokens = TokenWrapper(tokens)
+
+ @property
+ def has_content(self):
+ return bool(self._cont_stack)
+
+ @property
+ def _block_indent_string(self):
+ return self._config.indent_string.replace("\\t", "\t")
+
+ @property
+ def _continuation_string(self):
+ return self._block_indent_string[0] * self._config.indent_after_paren
+
+ @property
+ def _continuation_size(self):
+ return self._config.indent_after_paren
+
+ def handle_line_start(self, pos):
+ """Record the first non-junk token at the start of a line."""
+ if self._line_start > -1:
+ return
+
+ check_token_position = pos
+ if self._tokens.token(pos) == _ASYNC_TOKEN:
+ check_token_position += 1
+ self._is_block_opener = (
+ self._tokens.token(check_token_position) in _CONTINUATION_BLOCK_OPENERS
+ )
+ self._line_start = pos
+
+ def next_physical_line(self):
+ """Prepares the tracker for a new physical line (NL)."""
+ self._line_start = -1
+ self._is_block_opener = False
+
+ def next_logical_line(self):
+ """Prepares the tracker for a new logical line (NEWLINE).
+
+ A new logical line only starts with block indentation.
+ """
+ self.next_physical_line()
+ self.retained_warnings = []
+ self._cont_stack = []
+
+ def add_block_warning(self, token_position, state, valid_indentations):
+ self.retained_warnings.append((token_position, state, valid_indentations))
+
+ def get_valid_indentations(self, idx):
+ """Returns the valid offsets for the token at the given position."""
+ # The closing brace on a dict or the 'for' in a dict comprehension may
+ # reset two indent levels because the dict value is ended implicitly
+ stack_top = -1
+ if (
+ self._tokens.token(idx) in ("}", "for")
+ and self._cont_stack[-1].token == ":"
+ ):
+ stack_top = -2
+ indent = self._cont_stack[stack_top]
+ if self._tokens.token(idx) in _CLOSING_BRACKETS:
+ valid_indentations = indent.valid_outdent_strings
+ else:
+ valid_indentations = indent.valid_continuation_strings
+ return indent, valid_indentations.copy()
+
+ def _hanging_indent_after_bracket(self, bracket, position):
+ """Extracts indentation information for a hanging indent
+
+ Case of hanging indent after a bracket (including parenthesis)
+
+ :param str bracket: bracket in question
+ :param int position: Position of bracket in self._tokens
+
+ :returns: the state and valid positions for hanging indentation
+ :rtype: _ContinuedIndent
+ """
+ indentation = self._tokens.line_indent(position)
+ if (
+ self._is_block_opener
+ and self._continuation_string == self._block_indent_string
+ ):
+ return _ContinuedIndent(
+ HANGING_BLOCK,
+ bracket,
+ position,
+ _Indentations(indentation + self._continuation_string, indentation),
+ _BeforeBlockIndentations(
+ indentation + self._continuation_string,
+ indentation + self._continuation_string * 2,
+ ),
+ )
+ if bracket == ":":
+ # If the dict key was on the same line as the open brace, the new
+ # correct indent should be relative to the key instead of the
+ # current indent level
+ paren_align = self._cont_stack[-1].valid_outdent_strings
+ next_align = self._cont_stack[-1].valid_continuation_strings.copy()
+ next_align_keys = list(next_align.keys())
+ next_align[next_align_keys[0] + self._continuation_string] = True
+ # Note that the continuation of
+ # d = {
+ # 'a': 'b'
+ # 'c'
+ # }
+ # is handled by the special-casing for hanging continued string indents.
+ return _ContinuedIndent(
+ HANGING_DICT_VALUE, bracket, position, paren_align, next_align
+ )
+ return _ContinuedIndent(
+ HANGING,
+ bracket,
+ position,
+ _Indentations(indentation, indentation + self._continuation_string),
+ _Indentations(indentation + self._continuation_string),
+ )
+
+ def _continuation_inside_bracket(self, bracket, position):
+ """Extracts indentation information for a continued indent."""
+ indentation = self._tokens.line_indent(position)
+ token_indent = self._tokens.token_indent(position)
+ next_token_indent = self._tokens.token_indent(position + 1)
+ if (
+ self._is_block_opener
+ and next_token_indent == indentation + self._block_indent_string
+ ):
+ return _ContinuedIndent(
+ CONTINUED_BLOCK,
+ bracket,
+ position,
+ _Indentations(token_indent),
+ _BeforeBlockIndentations(
+ next_token_indent, next_token_indent + self._continuation_string
+ ),
+ )
+ return _ContinuedIndent(
+ CONTINUED,
+ bracket,
+ position,
+ _Indentations(token_indent, next_token_indent),
+ _Indentations(next_token_indent),
+ )
+
+ def pop_token(self):
+ self._cont_stack.pop()
+
+ def push_token(self, token, position):
+ """Pushes a new token for continued indentation on the stack.
+
+ Tokens that can modify continued indentation offsets are:
+ * opening brackets
+ * 'lambda'
+ * : inside dictionaries
+
+ push_token relies on the caller to filter out those
+ interesting tokens.
+
+ :param int token: The concrete token
+ :param int position: The position of the token in the stream.
+ """
+ if _token_followed_by_eol(self._tokens, position):
+ self._cont_stack.append(self._hanging_indent_after_bracket(token, position))
+ else:
+ self._cont_stack.append(self._continuation_inside_bracket(token, position))
+
+
+class FormatChecker(BaseTokenChecker):
+ """checks for :
+ * unauthorized constructions
+ * strict indentation
+ * line length
+ """
+
+ __implements__ = (ITokenChecker, IAstroidChecker, IRawChecker)
+
+ # configuration section name
+ name = "format"
+ # messages
+ msgs = MSGS
+ # configuration options
+ # for available dict keys/values see the optik parser 'add_option' method
+ options = (
+ (
+ "max-line-length",
+ {
+ "default": 100,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of characters on a single line.",
+ },
+ ),
+ (
+ "ignore-long-lines",
+ {
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "default": r"^\s*(# )?<?https?://\S+>?$",
+ "help": (
+ "Regexp for a line that is allowed to be longer than " "the limit."
+ ),
+ },
+ ),
+ (
+ "single-line-if-stmt",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": (
+ "Allow the body of an if to be on the same "
+ "line as the test if there is no else."
+ ),
+ },
+ ),
+ (
+ "single-line-class-stmt",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": (
+ "Allow the body of a class to be on the same "
+ "line as the declaration if body contains "
+ "single statement."
+ ),
+ },
+ ),
+ (
+ "no-space-check",
+ {
+ "default": ",".join(_DEFAULT_NO_SPACE_CHECK_CHOICES),
+ "metavar": ",".join(_NO_SPACE_CHECK_CHOICES),
+ "type": "multiple_choice",
+ "choices": _NO_SPACE_CHECK_CHOICES,
+ "help": (
+ "List of optional constructs for which whitespace "
+ "checking is disabled. "
+ "`" + _DICT_SEPARATOR + "` is used to allow tabulation "
+ "in dicts, etc.: {1 : 1,\\n222: 2}. "
+ "`" + _TRAILING_COMMA + "` allows a space between comma "
+ "and closing bracket: (a, ). "
+ "`" + _EMPTY_LINE + "` allows space-only lines."
+ ),
+ },
+ ),
+ (
+ "max-module-lines",
+ {
+ "default": 1000,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of lines in a module.",
+ },
+ ),
+ (
+ "indent-string",
+ {
+ "default": " ",
+ "type": "non_empty_string",
+ "metavar": "<string>",
+ "help": "String used as indentation unit. This is usually "
+ '" " (4 spaces) or "\\t" (1 tab).',
+ },
+ ),
+ (
+ "indent-after-paren",
+ {
+ "type": "int",
+ "metavar": "<int>",
+ "default": 4,
+ "help": "Number of spaces of indent required inside a hanging "
+ "or continued line.",
+ },
+ ),
+ (
+ "expected-line-ending-format",
+ {
+ "type": "choice",
+ "metavar": "<empty or LF or CRLF>",
+ "default": "",
+ "choices": ["", "LF", "CRLF"],
+ "help": (
+ "Expected format of line ending, "
+ "e.g. empty (any line ending), LF or CRLF."
+ ),
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseTokenChecker.__init__(self, linter)
+ self._lines = None
+ self._visited_lines = None
+ self._bracket_stack = [None]
+
+ def _pop_token(self):
+ self._bracket_stack.pop()
+ self._current_line.pop_token()
+
+ def _push_token(self, token, idx):
+ self._bracket_stack.append(token)
+ self._current_line.push_token(token, idx)
+
+ def new_line(self, tokens, line_end, line_start):
+ """a new line has been encountered, process it if necessary"""
+ if _last_token_on_line_is(tokens, line_end, ";"):
+ self.add_message("unnecessary-semicolon", line=tokens.start_line(line_end))
+
+ line_num = tokens.start_line(line_start)
+ line = tokens.line(line_start)
+ if tokens.type(line_start) not in _JUNK_TOKENS:
+ self._lines[line_num] = line.split("\n")[0]
+ self.check_lines(line, line_num)
+
+ def process_module(self, _module):
+ self._keywords_with_parens = set()
+
+ def _check_keyword_parentheses(self, tokens, start):
+ """Check that there are not unnecessary parens after a keyword.
+
+ Parens are unnecessary if there is exactly one balanced outer pair on a
+ line, and it is followed by a colon, and contains no commas (i.e. is not a
+ tuple).
+
+ Args:
+ tokens: list of Tokens; the entire list of Tokens.
+ start: int; the position of the keyword in the token list.
+ """
+ # If the next token is not a paren, we're fine.
+ if self._inside_brackets(":") and tokens[start][1] == "for":
+ self._pop_token()
+ if tokens[start + 1][1] != "(":
+ return
+
+ found_and_or = False
+ depth = 0
+ keyword_token = str(tokens[start][1])
+ line_num = tokens[start][2][0]
+
+ for i in range(start, len(tokens) - 1):
+ token = tokens[i]
+
+ # If we hit a newline, then assume any parens were for continuation.
+ if token[0] == tokenize.NL:
+ return
+
+ if token[1] == "(":
+ depth += 1
+ elif token[1] == ")":
+ depth -= 1
+ if depth:
+ continue
+ # ')' can't happen after if (foo), since it would be a syntax error.
+ if tokens[i + 1][1] in (":", ")", "]", "}", "in") or tokens[i + 1][
+ 0
+ ] in (tokenize.NEWLINE, tokenize.ENDMARKER, tokenize.COMMENT):
+ # The empty tuple () is always accepted.
+ if i == start + 2:
+ return
+ if keyword_token == "not":
+ if not found_and_or:
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
+ elif keyword_token in ("return", "yield"):
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
+ elif keyword_token not in self._keywords_with_parens:
+ if not found_and_or:
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
+ return
+ elif depth == 1:
+ # This is a tuple, which is always acceptable.
+ if token[1] == ",":
+ return
+ # 'and' and 'or' are the only boolean operators with lower precedence
+ # than 'not', so parens are only required when they are found.
+ if token[1] in ("and", "or"):
+ found_and_or = True
+ # A yield inside an expression must always be in parentheses,
+ # quit early without error.
+ elif token[1] == "yield":
+ return
+ # A generator expression always has a 'for' token in it, and
+ # the 'for' token is only legal inside parens when it is in a
+ # generator expression. The parens are necessary here, so bail
+ # without an error.
+ elif token[1] == "for":
+ return
+
+ def _opening_bracket(self, tokens, i):
+ self._push_token(tokens[i][1], i)
+ # Special case: ignore slices
+ if tokens[i][1] == "[" and tokens[i + 1][1] == ":":
+ return
+
+ if i > 0 and (
+ tokens[i - 1][0] == tokenize.NAME
+ and not (keyword.iskeyword(tokens[i - 1][1]))
+ or tokens[i - 1][1] in _CLOSING_BRACKETS
+ ):
+ self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
+ else:
+ self._check_space(tokens, i, (_IGNORE, _MUST_NOT))
+
+ def _closing_bracket(self, tokens, i):
+ if self._inside_brackets(":"):
+ self._pop_token()
+ self._pop_token()
+ # Special case: ignore slices
+ if tokens[i - 1][1] == ":" and tokens[i][1] == "]":
+ return
+ policy_before = _MUST_NOT
+ if tokens[i][1] in _CLOSING_BRACKETS and tokens[i - 1][1] == ",":
+ if _TRAILING_COMMA in self.config.no_space_check:
+ policy_before = _IGNORE
+
+ self._check_space(tokens, i, (policy_before, _IGNORE))
+
+ def _has_valid_type_annotation(self, tokens, i):
+ """Extended check of PEP-484 type hint presence"""
+ if not self._inside_brackets("("):
+ return False
+ # token_info
+ # type string start end line
+ # 0 1 2 3 4
+ bracket_level = 0
+ for token in tokens[i - 1 :: -1]:
+ if token[1] == ":":
+ return True
+ if token[1] == "(":
+ return False
+ if token[1] == "]":
+ bracket_level += 1
+ elif token[1] == "[":
+ bracket_level -= 1
+ elif token[1] == ",":
+ if not bracket_level:
+ return False
+ elif token[1] in (".", "..."):
+ continue
+ elif token[0] not in (tokenize.NAME, tokenize.STRING, tokenize.NL):
+ return False
+ return False
+
+ def _check_equals_spacing(self, tokens, i):
+ """Check the spacing of a single equals sign."""
+ if self._has_valid_type_annotation(tokens, i):
+ self._check_space(tokens, i, (_MUST, _MUST))
+ elif self._inside_brackets("(") or self._inside_brackets("lambda"):
+ self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
+ else:
+ self._check_space(tokens, i, (_MUST, _MUST))
+
+ def _open_lambda(self, tokens, i): # pylint:disable=unused-argument
+ self._push_token("lambda", i)
+
+ def _handle_colon(self, tokens, i):
+ # Special case: ignore slices
+ if self._inside_brackets("["):
+ return
+ if self._inside_brackets("{") and _DICT_SEPARATOR in self.config.no_space_check:
+ policy = (_IGNORE, _IGNORE)
+ else:
+ policy = (_MUST_NOT, _MUST)
+ self._check_space(tokens, i, policy)
+
+ if self._inside_brackets("lambda"):
+ self._pop_token()
+ elif self._inside_brackets("{"):
+ self._push_token(":", i)
+
+ def _handle_comma(self, tokens, i):
+ # Only require a following whitespace if this is
+ # not a hanging comma before a closing bracket.
+ if tokens[i + 1][1] in _CLOSING_BRACKETS:
+ self._check_space(tokens, i, (_MUST_NOT, _IGNORE))
+ else:
+ self._check_space(tokens, i, (_MUST_NOT, _MUST))
+ if self._inside_brackets(":"):
+ self._pop_token()
+
+ def _check_surrounded_by_space(self, tokens, i):
+ """Check that a binary operator is surrounded by exactly one space."""
+ self._check_space(tokens, i, (_MUST, _MUST))
+
+ def _check_space(self, tokens, i, policies):
+ def _policy_string(policy):
+ if policy == _MUST:
+ return "Exactly one", "required"
+ return "No", "allowed"
+
+ def _name_construct(token):
+ if token[1] == ",":
+ return "comma"
+ if token[1] == ":":
+ return ":"
+ if token[1] in "()[]{}":
+ return "bracket"
+ if token[1] in ("<", ">", "<=", ">=", "!=", "=="):
+ return "comparison"
+ if self._inside_brackets("("):
+ return "keyword argument assignment"
+ return "assignment"
+
+ good_space = [True, True]
+ token = tokens[i]
+ pairs = [(tokens[i - 1], token), (token, tokens[i + 1])]
+
+ for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)):
+ if token_pair[other_idx][0] in _EOL or policy == _IGNORE:
+ continue
+
+ distance = _column_distance(*token_pair)
+ if distance is None:
+ continue
+ good_space[other_idx] = (policy == _MUST and distance == 1) or (
+ policy == _MUST_NOT and distance == 0
+ )
+
+ warnings = []
+ if not any(good_space) and policies[0] == policies[1]:
+ warnings.append((policies[0], "around"))
+ else:
+ for ok, policy, position in zip(good_space, policies, ("before", "after")):
+ if not ok:
+ warnings.append((policy, position))
+ for policy, position in warnings:
+ construct = _name_construct(token)
+ count, state = _policy_string(policy)
+ self.add_message(
+ "bad-whitespace",
+ line=token[2][0],
+ args=(count, state, position, construct, _underline_token(token)),
+ col_offset=token[2][1],
+ )
+
+ def _inside_brackets(self, left):
+ return self._bracket_stack[-1] == left
+
+ def _prepare_token_dispatcher(self):
+ raw = [
+ (_KEYWORD_TOKENS, self._check_keyword_parentheses),
+ (_OPENING_BRACKETS, self._opening_bracket),
+ (_CLOSING_BRACKETS, self._closing_bracket),
+ (["="], self._check_equals_spacing),
+ (_SPACED_OPERATORS, self._check_surrounded_by_space),
+ ([","], self._handle_comma),
+ ([":"], self._handle_colon),
+ (["lambda"], self._open_lambda),
+ ]
+
+ dispatch = {}
+ for tokens, handler in raw:
+ for token in tokens:
+ dispatch[token] = handler
+ return dispatch
+
+ def process_tokens(self, tokens):
+ """process tokens and search for :
+
+ _ non strict indentation (i.e. not always using the <indent> parameter as
+ indent unit)
+ _ too long lines (i.e. longer than <max_chars>)
+ _ optionally bad construct (if given, bad_construct must be a compiled
+ regular expression).
+ """
+ self._bracket_stack = [None]
+ indents = [0]
+ check_equal = False
+ line_num = 0
+ self._lines = {}
+ self._visited_lines = {}
+ token_handlers = self._prepare_token_dispatcher()
+ self._last_line_ending = None
+ last_blank_line_num = 0
+
+ self._current_line = ContinuedLineState(tokens, self.config)
+ for idx, (tok_type, token, start, _, line) in enumerate(tokens):
+ if start[0] != line_num:
+ line_num = start[0]
+ # A tokenizer oddity: if an indented line contains a multi-line
+ # docstring, the line member of the INDENT token does not contain
+ # the full line; therefore we check the next token on the line.
+ if tok_type == tokenize.INDENT:
+ self.new_line(TokenWrapper(tokens), idx - 1, idx + 1)
+ else:
+ self.new_line(TokenWrapper(tokens), idx - 1, idx)
+
+ if tok_type == tokenize.NEWLINE:
+ # a program statement, or ENDMARKER, will eventually follow,
+ # after some (possibly empty) run of tokens of the form
+ # (NL | COMMENT)* (INDENT | DEDENT+)?
+ # If an INDENT appears, setting check_equal is wrong, and will
+ # be undone when we see the INDENT.
+ check_equal = True
+ self._process_retained_warnings(TokenWrapper(tokens), idx)
+ self._current_line.next_logical_line()
+ self._check_line_ending(token, line_num)
+ elif tok_type == tokenize.INDENT:
+ check_equal = False
+ self.check_indent_level(token, indents[-1] + 1, line_num)
+ indents.append(indents[-1] + 1)
+ elif tok_type == tokenize.DEDENT:
+ # there's nothing we need to check here! what's important is
+ # that when the run of DEDENTs ends, the indentation of the
+ # program statement (or ENDMARKER) that triggered the run is
+ # equal to what's left at the top of the indents stack
+ check_equal = True
+ if len(indents) > 1:
+ del indents[-1]
+ elif tok_type == tokenize.NL:
+ if not line.strip("\r\n"):
+ last_blank_line_num = line_num
+ self._check_continued_indentation(TokenWrapper(tokens), idx + 1)
+ self._current_line.next_physical_line()
+ elif tok_type not in (tokenize.COMMENT, tokenize.ENCODING):
+ self._current_line.handle_line_start(idx)
+ # This is the first concrete token following a NEWLINE, so it
+ # must be the first token of the next program statement, or an
+ # ENDMARKER; the "line" argument exposes the leading whitespace
+ # for this statement; in the case of ENDMARKER, line is an empty
+ # string, so will properly match the empty string with which the
+ # "indents" stack was seeded
+ if check_equal:
+ check_equal = False
+ self.check_indent_level(line, indents[-1], line_num)
+
+ if tok_type == tokenize.NUMBER and token.endswith("l"):
+ self.add_message("lowercase-l-suffix", line=line_num)
+
+ try:
+ handler = token_handlers[token]
+ except KeyError:
+ pass
+ else:
+ handler(tokens, idx)
+
+ line_num -= 1 # to be ok with "wc -l"
+ if line_num > self.config.max_module_lines:
+ # Get the line where the too-many-lines (or its message id)
+ # was disabled or default to 1.
+ message_definition = self.linter.msgs_store.get_message_definitions(
+ "too-many-lines"
+ )[0]
+ names = (message_definition.msgid, "too-many-lines")
+ line = next(filter(None, map(self.linter._pragma_lineno.get, names)), 1)
+ self.add_message(
+ "too-many-lines",
+ args=(line_num, self.config.max_module_lines),
+ line=line,
+ )
+
+ # See if there are any trailing lines. Do not complain about empty
+ # files like __init__.py markers.
+ if line_num == last_blank_line_num and line_num > 0:
+ self.add_message("trailing-newlines", line=line_num)
+
+ def _check_line_ending(self, line_ending, line_num):
+ # check if line endings are mixed
+ if self._last_line_ending is not None:
+ # line_ending == "" indicates a synthetic newline added at
+ # the end of a file that does not, in fact, end with a
+ # newline.
+ if line_ending and line_ending != self._last_line_ending:
+ self.add_message("mixed-line-endings", line=line_num)
+
+ self._last_line_ending = line_ending
+
+ # check if line ending is as expected
+ expected = self.config.expected_line_ending_format
+ if expected:
+ # reduce multiple \n\n\n\n to one \n
+ line_ending = reduce(lambda x, y: x + y if x != y else x, line_ending, "")
+ line_ending = "LF" if line_ending == "\n" else "CRLF"
+ if line_ending != expected:
+ self.add_message(
+ "unexpected-line-ending-format",
+ args=(line_ending, expected),
+ line=line_num,
+ )
+
+ def _process_retained_warnings(self, tokens, current_pos):
+ single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ":")
+
+ for indent_pos, state, indentations in self._current_line.retained_warnings:
+ block_type = indentations[tokens.token_indent(indent_pos)]
+ hints = {k: v for k, v in indentations.items() if v != block_type}
+ if single_line_block_stmt and block_type == WITH_BODY:
+ self._add_continuation_message(state, hints, tokens, indent_pos)
+ elif not single_line_block_stmt and block_type == SINGLE_LINE:
+ self._add_continuation_message(state, hints, tokens, indent_pos)
+
+ def _check_continued_indentation(self, tokens, next_idx):
+ def same_token_around_nl(token_type):
+ return (
+ tokens.type(next_idx) == token_type
+ and tokens.type(next_idx - 2) == token_type
+ )
+
+ # Do not issue any warnings if the next line is empty.
+ if not self._current_line.has_content or tokens.type(next_idx) == tokenize.NL:
+ return
+
+ state, valid_indentations = self._current_line.get_valid_indentations(next_idx)
+ # Special handling for hanging comments and strings. If the last line ended
+ # with a comment (string) and the new line contains only a comment, the line
+ # may also be indented to the start of the previous token.
+ if same_token_around_nl(tokenize.COMMENT) or same_token_around_nl(
+ tokenize.STRING
+ ):
+ valid_indentations[tokens.token_indent(next_idx - 2)] = True
+
+ # We can only decide if the indentation of a continued line before opening
+ # a new block is valid once we know of the body of the block is on the
+ # same line as the block opener. Since the token processing is single-pass,
+ # emitting those warnings is delayed until the block opener is processed.
+ if (
+ state.context_type in (HANGING_BLOCK, CONTINUED_BLOCK)
+ and tokens.token_indent(next_idx) in valid_indentations
+ ):
+ self._current_line.add_block_warning(next_idx, state, valid_indentations)
+ elif tokens.token_indent(next_idx) not in valid_indentations:
+ length_indentation = len(tokens.token_indent(next_idx))
+ if not any(
+ length_indentation == 2 * len(indentation)
+ for indentation in valid_indentations
+ ):
+ self._add_continuation_message(
+ state, valid_indentations, tokens, next_idx
+ )
+
+ def _add_continuation_message(self, state, indentations, tokens, position):
+ readable_type, readable_position = _CONTINUATION_MSG_PARTS[state.context_type]
+ hint_line, delta_message = _get_indent_hint_line(
+ indentations, tokens.token_indent(position)
+ )
+ self.add_message(
+ "bad-continuation",
+ line=tokens.start_line(position),
+ args=(
+ readable_type,
+ readable_position,
+ delta_message,
+ tokens.line(position),
+ hint_line,
+ ),
+ )
+
+ @check_messages("multiple-statements")
+ def visit_default(self, node):
+ """check the node line number and check it if not yet done"""
+ if not node.is_statement:
+ return
+ if not node.root().pure_python:
+ return
+ prev_sibl = node.previous_sibling()
+ if prev_sibl is not None:
+ prev_line = prev_sibl.fromlineno
+ else:
+ # The line on which a finally: occurs in a try/finally
+ # is not directly represented in the AST. We infer it
+ # by taking the last line of the body and adding 1, which
+ # should be the line of finally:
+ if (
+ isinstance(node.parent, nodes.TryFinally)
+ and node in node.parent.finalbody
+ ):
+ prev_line = node.parent.body[0].tolineno + 1
+ else:
+ prev_line = node.parent.statement().fromlineno
+ line = node.fromlineno
+ assert line, node
+ if prev_line == line and self._visited_lines.get(line) != 2:
+ self._check_multi_statement_line(node, line)
+ return
+ if line in self._visited_lines:
+ return
+ try:
+ tolineno = node.blockstart_tolineno
+ except AttributeError:
+ tolineno = node.tolineno
+ assert tolineno, node
+ lines = []
+ for line in range(line, tolineno + 1):
+ self._visited_lines[line] = 1
+ try:
+ lines.append(self._lines[line].rstrip())
+ except KeyError:
+ lines.append("")
+
+ def _check_multi_statement_line(self, node, line):
+ """Check for lines containing multiple statements."""
+ # Do not warn about multiple nested context managers
+ # in with statements.
+ if isinstance(node, nodes.With):
+ return
+ # For try... except... finally..., the two nodes
+ # appear to be on the same line due to how the AST is built.
+ if isinstance(node, nodes.TryExcept) and isinstance(
+ node.parent, nodes.TryFinally
+ ):
+ return
+ if (
+ isinstance(node.parent, nodes.If)
+ and not node.parent.orelse
+ and self.config.single_line_if_stmt
+ ):
+ return
+ if (
+ isinstance(node.parent, nodes.ClassDef)
+ and len(node.parent.body) == 1
+ and self.config.single_line_class_stmt
+ ):
+ return
+ self.add_message("multiple-statements", node=node)
+ self._visited_lines[line] = 2
+
+ def check_lines(self, lines, i):
+ """check lines have less than a maximum number of characters
+ """
+ max_chars = self.config.max_line_length
+ ignore_long_line = self.config.ignore_long_lines
+
+ def check_line(line, i):
+ if not line.endswith("\n"):
+ self.add_message("missing-final-newline", line=i)
+ else:
+ # exclude \f (formfeed) from the rstrip
+ stripped_line = line.rstrip("\t\n\r\v ")
+ if not stripped_line and _EMPTY_LINE in self.config.no_space_check:
+ # allow empty lines
+ pass
+ elif line[len(stripped_line) :] not in ("\n", "\r\n"):
+ self.add_message(
+ "trailing-whitespace", line=i, col_offset=len(stripped_line)
+ )
+ # Don't count excess whitespace in the line length.
+ line = stripped_line
+ mobj = OPTION_RGX.search(line)
+ if mobj and "=" in line:
+ front_of_equal, _, back_of_equal = mobj.group(1).partition("=")
+ if front_of_equal.strip() == "disable":
+ if "line-too-long" in {
+ _msg_id.strip() for _msg_id in back_of_equal.split(",")
+ }:
+ return None
+ line = line.rsplit("#", 1)[0].rstrip()
+
+ if len(line) > max_chars and not ignore_long_line.search(line):
+ self.add_message("line-too-long", line=i, args=(len(line), max_chars))
+ return i + 1
+
+ unsplit_ends = {
+ "\v",
+ "\x0b",
+ "\f",
+ "\x0c",
+ "\x1c",
+ "\x1d",
+ "\x1e",
+ "\x85",
+ "\u2028",
+ "\u2029",
+ }
+ unsplit = []
+ for line in lines.splitlines(True):
+ if line[-1] in unsplit_ends:
+ unsplit.append(line)
+ continue
+
+ if unsplit:
+ unsplit.append(line)
+ line = "".join(unsplit)
+ unsplit = []
+
+ i = check_line(line, i)
+ if i is None:
+ break
+
+ if unsplit:
+ check_line("".join(unsplit), i)
+
+ def check_indent_level(self, string, expected, line_num):
+ """return the indent level of the string
+ """
+ indent = self.config.indent_string
+ if indent == "\\t": # \t is not interpreted in the configuration file
+ indent = "\t"
+ level = 0
+ unit_size = len(indent)
+ while string[:unit_size] == indent:
+ string = string[unit_size:]
+ level += 1
+ suppl = ""
+ while string and string[0] in " \t":
+ if string[0] != indent[0]:
+ if string[0] == "\t":
+ args = ("tab", "space")
+ else:
+ args = ("space", "tab")
+ self.add_message("mixed-indentation", args=args, line=line_num)
+ return level
+ suppl += string[0]
+ string = string[1:]
+ if level != expected or suppl:
+ i_type = "spaces"
+ if indent[0] == "\t":
+ i_type = "tabs"
+ self.add_message(
+ "bad-indentation",
+ line=line_num,
+ args=(level * unit_size + len(suppl), i_type, expected * unit_size),
+ )
+ return None
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(FormatChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/imports.py b/venv/Lib/site-packages/pylint/checkers/imports.py
new file mode 100644
index 0000000..42d4362
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/imports.py
@@ -0,0 +1,981 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Noam Yorav-Raphael <noamraph@gmail.com>
+# Copyright (c) 2015 James Morgensen <james.morgensen@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
+# Copyright (c) 2016 Maik Röder <maikroeder@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Michka Popoff <michkapopoff@gmail.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Erik Wright <erik.wright@shopify.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Marianna Polatoglou <mpolatoglou@bloomberg.net>
+# Copyright (c) 2019 Paul Renvoise <renvoisepaul@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""imports checkers for Python code"""
+
+import collections
+import copy
+import os
+import sys
+from distutils import sysconfig
+
+import astroid
+import isort
+from astroid import modutils
+from astroid.decorators import cached
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import (
+ check_messages,
+ is_from_fallback_block,
+ node_ignores_exception,
+)
+from pylint.exceptions import EmptyReportError
+from pylint.graph import DotBackend, get_cycles
+from pylint.interfaces import IAstroidChecker
+from pylint.reporters.ureports.nodes import Paragraph, VerbatimText
+from pylint.utils import get_global_option
+
+
+def _qualified_names(modname):
+ """Split the names of the given module into subparts
+
+ For example,
+ _qualified_names('pylint.checkers.ImportsChecker')
+ returns
+ ['pylint', 'pylint.checkers', 'pylint.checkers.ImportsChecker']
+ """
+ names = modname.split(".")
+ return [".".join(names[0 : i + 1]) for i in range(len(names))]
+
+
+def _get_import_name(importnode, modname):
+ """Get a prepared module name from the given import node
+
+ In the case of relative imports, this will return the
+ absolute qualified module name, which might be useful
+ for debugging. Otherwise, the initial module name
+ is returned unchanged.
+ """
+ if isinstance(importnode, astroid.ImportFrom):
+ if importnode.level:
+ root = importnode.root()
+ if isinstance(root, astroid.Module):
+ modname = root.relative_to_absolute_name(
+ modname, level=importnode.level
+ )
+ return modname
+
+
+def _get_first_import(node, context, name, base, level, alias):
+ """return the node where [base.]<name> is imported or None if not found
+ """
+ fullname = "%s.%s" % (base, name) if base else name
+
+ first = None
+ found = False
+ for first in context.body:
+ if first is node:
+ continue
+ if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
+ continue
+ if isinstance(first, astroid.Import):
+ if any(fullname == iname[0] for iname in first.names):
+ found = True
+ break
+ elif isinstance(first, astroid.ImportFrom):
+ if level == first.level:
+ for imported_name, imported_alias in first.names:
+ if fullname == "%s.%s" % (first.modname, imported_name):
+ found = True
+ break
+ if (
+ name != "*"
+ and name == imported_name
+ and not (alias or imported_alias)
+ ):
+ found = True
+ break
+ if found:
+ break
+ if found and not astroid.are_exclusive(first, node):
+ return first
+ return None
+
+
+def _ignore_import_failure(node, modname, ignored_modules):
+ for submodule in _qualified_names(modname):
+ if submodule in ignored_modules:
+ return True
+
+ return node_ignores_exception(node, ImportError)
+
+
+# utilities to represents import dependencies as tree and dot graph ###########
+
+
+def _make_tree_defs(mod_files_list):
+ """get a list of 2-uple (module, list_of_files_which_import_this_module),
+ it will return a dictionary to represent this as a tree
+ """
+ tree_defs = {}
+ for mod, files in mod_files_list:
+ node = (tree_defs, ())
+ for prefix in mod.split("."):
+ node = node[0].setdefault(prefix, [{}, []])
+ node[1] += files
+ return tree_defs
+
+
+def _repr_tree_defs(data, indent_str=None):
+ """return a string which represents imports as a tree"""
+ lines = []
+ nodes = data.items()
+ for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
+ if not files:
+ files = ""
+ else:
+ files = "(%s)" % ",".join(sorted(files))
+ if indent_str is None:
+ lines.append("%s %s" % (mod, files))
+ sub_indent_str = " "
+ else:
+ lines.append(r"%s\-%s %s" % (indent_str, mod, files))
+ if i == len(nodes) - 1:
+ sub_indent_str = "%s " % indent_str
+ else:
+ sub_indent_str = "%s| " % indent_str
+ if sub:
+ lines.append(_repr_tree_defs(sub, sub_indent_str))
+ return "\n".join(lines)
+
+
+def _dependencies_graph(filename, dep_info):
+ """write dependencies as a dot (graphviz) file
+ """
+ done = {}
+ printer = DotBackend(filename[:-4], rankdir="LR")
+ printer.emit('URL="." node[shape="box"]')
+ for modname, dependencies in sorted(dep_info.items()):
+ done[modname] = 1
+ printer.emit_node(modname)
+ for depmodname in dependencies:
+ if depmodname not in done:
+ done[depmodname] = 1
+ printer.emit_node(depmodname)
+ for depmodname, dependencies in sorted(dep_info.items()):
+ for modname in dependencies:
+ printer.emit_edge(modname, depmodname)
+ printer.generate(filename)
+
+
+def _make_graph(filename, dep_info, sect, gtype):
+ """generate a dependencies graph and add some information about it in the
+ report's section
+ """
+ _dependencies_graph(filename, dep_info)
+ sect.append(Paragraph("%simports graph has been written to %s" % (gtype, filename)))
+
+
+# the import checker itself ###################################################
+
+MSGS = {
+ "E0401": (
+ "Unable to import %s",
+ "import-error",
+ "Used when pylint has been unable to import a module.",
+ {"old_names": [("F0401", "old-import-error")]},
+ ),
+ "E0402": (
+ "Attempted relative import beyond top-level package",
+ "relative-beyond-top-level",
+ "Used when a relative import tries to access too many levels "
+ "in the current package.",
+ ),
+ "R0401": (
+ "Cyclic import (%s)",
+ "cyclic-import",
+ "Used when a cyclic import between two or more modules is detected.",
+ ),
+ "W0401": (
+ "Wildcard import %s",
+ "wildcard-import",
+ "Used when `from module import *` is detected.",
+ ),
+ "W0402": (
+ "Uses of a deprecated module %r",
+ "deprecated-module",
+ "Used a module marked as deprecated is imported.",
+ ),
+ "W0404": (
+ "Reimport %r (imported line %s)",
+ "reimported",
+ "Used when a module is reimported multiple times.",
+ ),
+ "W0406": (
+ "Module import itself",
+ "import-self",
+ "Used when a module is importing itself.",
+ ),
+ "W0407": (
+ "Prefer importing %r instead of %r",
+ "preferred-module",
+ "Used when a module imported has a preferred replacement module.",
+ ),
+ "W0410": (
+ "__future__ import is not the first non docstring statement",
+ "misplaced-future",
+ "Python 2.5 and greater require __future__ import to be the "
+ "first non docstring statement in the module.",
+ ),
+ "C0410": (
+ "Multiple imports on one line (%s)",
+ "multiple-imports",
+ "Used when import statement importing multiple modules is detected.",
+ ),
+ "C0411": (
+ "%s should be placed before %s",
+ "wrong-import-order",
+ "Used when PEP8 import order is not respected (standard imports "
+ "first, then third-party libraries, then local imports)",
+ ),
+ "C0412": (
+ "Imports from package %s are not grouped",
+ "ungrouped-imports",
+ "Used when imports are not grouped by packages",
+ ),
+ "C0413": (
+ 'Import "%s" should be placed at the top of the module',
+ "wrong-import-position",
+ "Used when code and imports are mixed",
+ ),
+ "C0414": (
+ "Import alias does not rename original package",
+ "useless-import-alias",
+ "Used when an import alias is same as original package."
+ "e.g using import numpy as numpy instead of import numpy as np",
+ ),
+ "C0415": (
+ "Import outside toplevel (%s)",
+ "import-outside-toplevel",
+ "Used when an import statement is used anywhere other than the module "
+ "toplevel. Move this import to the top of the file.",
+ ),
+}
+
+
+DEFAULT_STANDARD_LIBRARY = ()
+DEFAULT_KNOWN_THIRD_PARTY = ("enchant",)
+DEFAULT_PREFERRED_MODULES = ()
+
+
+class ImportsChecker(BaseChecker):
+ """checks for
+ * external modules dependencies
+ * relative / wildcard imports
+ * cyclic imports
+ * uses of deprecated modules
+ * uses of modules instead of preferred modules
+ """
+
+ __implements__ = IAstroidChecker
+
+ name = "imports"
+ msgs = MSGS
+ priority = -2
+ deprecated_modules = ("optparse", "tkinter.tix")
+
+ options = (
+ (
+ "deprecated-modules",
+ {
+ "default": deprecated_modules,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Deprecated modules which should not be used,"
+ " separated by a comma.",
+ },
+ ),
+ (
+ "preferred-modules",
+ {
+ "default": DEFAULT_PREFERRED_MODULES,
+ "type": "csv",
+ "metavar": "<module:preferred-module>",
+ "help": "Couples of modules and preferred modules,"
+ " separated by a comma.",
+ },
+ ),
+ (
+ "import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of every (i.e. internal and"
+ " external) dependencies in the given file"
+ " (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "ext-import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of external dependencies in the"
+ " given file (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "int-import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of internal dependencies in the"
+ " given file (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "known-standard-library",
+ {
+ "default": DEFAULT_STANDARD_LIBRARY,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Force import order to recognize a module as part of "
+ "the standard compatibility libraries.",
+ },
+ ),
+ (
+ "known-third-party",
+ {
+ "default": DEFAULT_KNOWN_THIRD_PARTY,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Force import order to recognize a module as part of "
+ "a third party library.",
+ },
+ ),
+ (
+ "allow-any-import-level",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": (
+ "List of modules that can be imported at any level, not just "
+ "the top level one."
+ ),
+ },
+ ),
+ (
+ "analyse-fallback-blocks",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Analyse import fallback blocks. This can be used to "
+ "support both Python 2 and 3 compatible code, which "
+ "means that the block might have code that exists "
+ "only in one or another interpreter, leading to false "
+ "positives when analysed.",
+ },
+ ),
+ (
+ "allow-wildcard-with-all",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Allow wildcard imports from modules that define __all__.",
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self.stats = None
+ self.import_graph = None
+ self._imports_stack = []
+ self._first_non_import_node = None
+ self._module_pkg = {} # mapping of modules to the pkg they belong in
+ self._allow_any_import_level = set()
+ self.reports = (
+ ("RP0401", "External dependencies", self._report_external_dependencies),
+ ("RP0402", "Modules dependencies graph", self._report_dependencies_graph),
+ )
+
+ self._site_packages = self._compute_site_packages()
+
+ @staticmethod
+ def _compute_site_packages():
+ def _normalized_path(path):
+ return os.path.normcase(os.path.abspath(path))
+
+ paths = set()
+ real_prefix = getattr(sys, "real_prefix", None)
+ for prefix in filter(None, (real_prefix, sys.prefix)):
+ path = sysconfig.get_python_lib(prefix=prefix)
+ path = _normalized_path(path)
+ paths.add(path)
+
+ # Handle Debian's derivatives /usr/local.
+ if os.path.isfile("/etc/debian_version"):
+ for prefix in filter(None, (real_prefix, sys.prefix)):
+ libpython = os.path.join(
+ prefix,
+ "local",
+ "lib",
+ "python" + sysconfig.get_python_version(),
+ "dist-packages",
+ )
+ paths.add(libpython)
+ return paths
+
+ def open(self):
+ """called before visiting project (i.e set of modules)"""
+ self.linter.add_stats(dependencies={})
+ self.linter.add_stats(cycles=[])
+ self.stats = self.linter.stats
+ self.import_graph = collections.defaultdict(set)
+ self._module_pkg = {} # mapping of modules to the pkg they belong in
+ self._excluded_edges = collections.defaultdict(set)
+ self._ignored_modules = get_global_option(self, "ignored-modules", default=[])
+ # Build a mapping {'module': 'preferred-module'}
+ self.preferred_modules = dict(
+ module.split(":")
+ for module in self.config.preferred_modules
+ if ":" in module
+ )
+ self._allow_any_import_level = set(self.config.allow_any_import_level)
+
+ def _import_graph_without_ignored_edges(self):
+ filtered_graph = copy.deepcopy(self.import_graph)
+ for node in filtered_graph:
+ filtered_graph[node].difference_update(self._excluded_edges[node])
+ return filtered_graph
+
+ def close(self):
+ """called before visiting project (i.e set of modules)"""
+ if self.linter.is_message_enabled("cyclic-import"):
+ graph = self._import_graph_without_ignored_edges()
+ vertices = list(graph)
+ for cycle in get_cycles(graph, vertices=vertices):
+ self.add_message("cyclic-import", args=" -> ".join(cycle))
+
+ @check_messages(*MSGS)
+ def visit_import(self, node):
+ """triggered when an import statement is seen"""
+ self._check_reimport(node)
+ self._check_import_as_rename(node)
+ self._check_toplevel(node)
+
+ names = [name for name, _ in node.names]
+ if len(names) >= 2:
+ self.add_message("multiple-imports", args=", ".join(names), node=node)
+
+ for name in names:
+ self._check_deprecated_module(node, name)
+ self._check_preferred_module(node, name)
+ imported_module = self._get_imported_module(node, name)
+ if isinstance(node.parent, astroid.Module):
+ # Allow imports nested
+ self._check_position(node)
+ if isinstance(node.scope(), astroid.Module):
+ self._record_import(node, imported_module)
+
+ if imported_module is None:
+ continue
+
+ self._add_imported_module(node, imported_module.name)
+
+ @check_messages(*MSGS)
+ def visit_importfrom(self, node):
+ """triggered when a from statement is seen"""
+ basename = node.modname
+ imported_module = self._get_imported_module(node, basename)
+
+ self._check_import_as_rename(node)
+ self._check_misplaced_future(node)
+ self._check_deprecated_module(node, basename)
+ self._check_preferred_module(node, basename)
+ self._check_wildcard_imports(node, imported_module)
+ self._check_same_line_imports(node)
+ self._check_reimport(node, basename=basename, level=node.level)
+ self._check_toplevel(node)
+
+ if isinstance(node.parent, astroid.Module):
+ # Allow imports nested
+ self._check_position(node)
+ if isinstance(node.scope(), astroid.Module):
+ self._record_import(node, imported_module)
+ if imported_module is None:
+ return
+ for name, _ in node.names:
+ if name != "*":
+ self._add_imported_module(node, "%s.%s" % (imported_module.name, name))
+ else:
+ self._add_imported_module(node, imported_module.name)
+
+ @check_messages(*MSGS)
+ def leave_module(self, node):
+ # Check imports are grouped by category (standard, 3rd party, local)
+ std_imports, ext_imports, loc_imports = self._check_imports_order(node)
+
+ # Check that imports are grouped by package within a given category
+ met_import = set() #  set for 'import x' style
+ met_from = set() #  set for 'from x import y' style
+ current_package = None
+ for import_node, import_name in std_imports + ext_imports + loc_imports:
+ if not self.linter.is_message_enabled(
+ "ungrouped-imports", import_node.fromlineno
+ ):
+ continue
+ if isinstance(import_node, astroid.node_classes.ImportFrom):
+ met = met_from
+ else:
+ met = met_import
+ package, _, _ = import_name.partition(".")
+ if current_package and current_package != package and package in met:
+ self.add_message("ungrouped-imports", node=import_node, args=package)
+ current_package = package
+ met.add(package)
+
+ self._imports_stack = []
+ self._first_non_import_node = None
+
+ def compute_first_non_import_node(self, node):
+ if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
+ return
+ # if the node does not contain an import instruction, and if it is the
+ # first node of the module, keep a track of it (all the import positions
+ # of the module will be compared to the position of this first
+ # instruction)
+ if self._first_non_import_node:
+ return
+ if not isinstance(node.parent, astroid.Module):
+ return
+ nested_allowed = [astroid.TryExcept, astroid.TryFinally]
+ is_nested_allowed = [
+ allowed for allowed in nested_allowed if isinstance(node, allowed)
+ ]
+ if is_nested_allowed and any(
+ node.nodes_of_class((astroid.Import, astroid.ImportFrom))
+ ):
+ return
+ if isinstance(node, astroid.Assign):
+ # Add compatibility for module level dunder names
+ # https://www.python.org/dev/peps/pep-0008/#module-level-dunder-names
+ valid_targets = [
+ isinstance(target, astroid.AssignName)
+ and target.name.startswith("__")
+ and target.name.endswith("__")
+ for target in node.targets
+ ]
+ if all(valid_targets):
+ return
+ self._first_non_import_node = node
+
+ visit_tryfinally = (
+ visit_tryexcept
+ ) = (
+ visit_assignattr
+ ) = (
+ visit_assign
+ ) = (
+ visit_ifexp
+ ) = visit_comprehension = visit_expr = visit_if = compute_first_non_import_node
+
+ def visit_functiondef(self, node):
+ if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
+ return
+ # If it is the first non import instruction of the module, record it.
+ if self._first_non_import_node:
+ return
+
+ # Check if the node belongs to an `If` or a `Try` block. If they
+ # contain imports, skip recording this node.
+ if not isinstance(node.parent.scope(), astroid.Module):
+ return
+
+ root = node
+ while not isinstance(root.parent, astroid.Module):
+ root = root.parent
+
+ if isinstance(root, (astroid.If, astroid.TryFinally, astroid.TryExcept)):
+ if any(root.nodes_of_class((astroid.Import, astroid.ImportFrom))):
+ return
+
+ self._first_non_import_node = node
+
+ visit_classdef = visit_for = visit_while = visit_functiondef
+
+ def _check_misplaced_future(self, node):
+ basename = node.modname
+ if basename == "__future__":
+ # check if this is the first non-docstring statement in the module
+ prev = node.previous_sibling()
+ if prev:
+ # consecutive future statements are possible
+ if not (
+ isinstance(prev, astroid.ImportFrom)
+ and prev.modname == "__future__"
+ ):
+ self.add_message("misplaced-future", node=node)
+ return
+
+ def _check_same_line_imports(self, node):
+ # Detect duplicate imports on the same line.
+ names = (name for name, _ in node.names)
+ counter = collections.Counter(names)
+ for name, count in counter.items():
+ if count > 1:
+ self.add_message("reimported", node=node, args=(name, node.fromlineno))
+
+ def _check_position(self, node):
+ """Check `node` import or importfrom node position is correct
+
+ Send a message if `node` comes before another instruction
+ """
+ # if a first non-import instruction has already been encountered,
+ # it means the import comes after it and therefore is not well placed
+ if self._first_non_import_node:
+ self.add_message("wrong-import-position", node=node, args=node.as_string())
+
+ def _record_import(self, node, importedmodnode):
+ """Record the package `node` imports from"""
+ if isinstance(node, astroid.ImportFrom):
+ importedname = node.modname
+ else:
+ importedname = importedmodnode.name if importedmodnode else None
+ if not importedname:
+ importedname = node.names[0][0].split(".")[0]
+
+ if isinstance(node, astroid.ImportFrom) and (node.level or 0) >= 1:
+ # We need the importedname with first point to detect local package
+ # Example of node:
+ # 'from .my_package1 import MyClass1'
+ # the output should be '.my_package1' instead of 'my_package1'
+ # Example of node:
+ # 'from . import my_package2'
+ # the output should be '.my_package2' instead of '{pyfile}'
+ importedname = "." + importedname
+
+ self._imports_stack.append((node, importedname))
+
+ @staticmethod
+ def _is_fallback_import(node, imports):
+ imports = [import_node for (import_node, _) in imports]
+ return any(astroid.are_exclusive(import_node, node) for import_node in imports)
+
+ def _check_imports_order(self, _module_node):
+ """Checks imports of module `node` are grouped by category
+
+ Imports must follow this order: standard, 3rd party, local
+ """
+ std_imports = []
+ third_party_imports = []
+ first_party_imports = []
+ # need of a list that holds third or first party ordered import
+ external_imports = []
+ local_imports = []
+ third_party_not_ignored = []
+ first_party_not_ignored = []
+ local_not_ignored = []
+ isort_obj = isort.SortImports(
+ file_contents="",
+ known_third_party=self.config.known_third_party,
+ known_standard_library=self.config.known_standard_library,
+ )
+ for node, modname in self._imports_stack:
+ if modname.startswith("."):
+ package = "." + modname.split(".")[1]
+ else:
+ package = modname.split(".")[0]
+ nested = not isinstance(node.parent, astroid.Module)
+ ignore_for_import_order = not self.linter.is_message_enabled(
+ "wrong-import-order", node.fromlineno
+ )
+ import_category = isort_obj.place_module(package)
+ node_and_package_import = (node, package)
+ if import_category in ("FUTURE", "STDLIB"):
+ std_imports.append(node_and_package_import)
+ wrong_import = (
+ third_party_not_ignored
+ or first_party_not_ignored
+ or local_not_ignored
+ )
+ if self._is_fallback_import(node, wrong_import):
+ continue
+ if wrong_import and not nested:
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'standard import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "THIRDPARTY":
+ third_party_imports.append(node_and_package_import)
+ external_imports.append(node_and_package_import)
+ if not nested and not ignore_for_import_order:
+ third_party_not_ignored.append(node_and_package_import)
+ wrong_import = first_party_not_ignored or local_not_ignored
+ if wrong_import and not nested:
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'third party import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "FIRSTPARTY":
+ first_party_imports.append(node_and_package_import)
+ external_imports.append(node_and_package_import)
+ if not nested and not ignore_for_import_order:
+ first_party_not_ignored.append(node_and_package_import)
+ wrong_import = local_not_ignored
+ if wrong_import and not nested:
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'first party import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "LOCALFOLDER":
+ local_imports.append((node, package))
+ if not nested and not ignore_for_import_order:
+ local_not_ignored.append((node, package))
+ return std_imports, external_imports, local_imports
+
+ def _get_imported_module(self, importnode, modname):
+ try:
+ return importnode.do_import_module(modname)
+ except astroid.TooManyLevelsError:
+ if _ignore_import_failure(importnode, modname, self._ignored_modules):
+ return None
+
+ self.add_message("relative-beyond-top-level", node=importnode)
+ except astroid.AstroidSyntaxError as exc:
+ message = "Cannot import {!r} due to syntax error {!r}".format(
+ modname, str(exc.error) # pylint: disable=no-member; false positive
+ )
+ self.add_message("syntax-error", line=importnode.lineno, args=message)
+
+ except astroid.AstroidBuildingException:
+ if not self.linter.is_message_enabled("import-error"):
+ return None
+ if _ignore_import_failure(importnode, modname, self._ignored_modules):
+ return None
+ if not self.config.analyse_fallback_blocks and is_from_fallback_block(
+ importnode
+ ):
+ return None
+
+ dotted_modname = _get_import_name(importnode, modname)
+ self.add_message("import-error", args=repr(dotted_modname), node=importnode)
+
+ def _add_imported_module(self, node, importedmodname):
+ """notify an imported module, used to analyze dependencies"""
+ module_file = node.root().file
+ context_name = node.root().name
+ base = os.path.splitext(os.path.basename(module_file))[0]
+
+ try:
+ importedmodname = modutils.get_module_part(importedmodname, module_file)
+ except ImportError:
+ pass
+
+ if context_name == importedmodname:
+ self.add_message("import-self", node=node)
+
+ elif not modutils.is_standard_module(importedmodname):
+ # if this is not a package __init__ module
+ if base != "__init__" and context_name not in self._module_pkg:
+ # record the module's parent, or the module itself if this is
+ # a top level module, as the package it belongs to
+ self._module_pkg[context_name] = context_name.rsplit(".", 1)[0]
+
+ # handle dependencies
+ importedmodnames = self.stats["dependencies"].setdefault(
+ importedmodname, set()
+ )
+ if context_name not in importedmodnames:
+ importedmodnames.add(context_name)
+
+ # update import graph
+ self.import_graph[context_name].add(importedmodname)
+ if not self.linter.is_message_enabled("cyclic-import", line=node.lineno):
+ self._excluded_edges[context_name].add(importedmodname)
+
+ def _check_deprecated_module(self, node, mod_path):
+ """check if the module is deprecated"""
+ for mod_name in self.config.deprecated_modules:
+ if mod_path == mod_name or mod_path.startswith(mod_name + "."):
+ self.add_message("deprecated-module", node=node, args=mod_path)
+
+ def _check_preferred_module(self, node, mod_path):
+ """check if the module has a preferred replacement"""
+ if mod_path in self.preferred_modules:
+ self.add_message(
+ "preferred-module",
+ node=node,
+ args=(self.preferred_modules[mod_path], mod_path),
+ )
+
+ def _check_import_as_rename(self, node):
+ names = node.names
+ for name in names:
+ if not all(name):
+ return
+
+ real_name = name[0]
+ splitted_packages = real_name.rsplit(".")
+ real_name = splitted_packages[-1]
+ imported_name = name[1]
+ # consider only following cases
+ # import x as x
+ # and ignore following
+ # import x.y.z as z
+ if real_name == imported_name and len(splitted_packages) == 1:
+ self.add_message("useless-import-alias", node=node)
+
+ def _check_reimport(self, node, basename=None, level=None):
+ """check if the import is necessary (i.e. not already done)"""
+ if not self.linter.is_message_enabled("reimported"):
+ return
+
+ frame = node.frame()
+ root = node.root()
+ contexts = [(frame, level)]
+ if root is not frame:
+ contexts.append((root, None))
+
+ for known_context, known_level in contexts:
+ for name, alias in node.names:
+ first = _get_first_import(
+ node, known_context, name, basename, known_level, alias
+ )
+ if first is not None:
+ self.add_message(
+ "reimported", node=node, args=(name, first.fromlineno)
+ )
+
+ def _report_external_dependencies(self, sect, _, _dummy):
+ """return a verbatim layout for displaying dependencies"""
+ dep_info = _make_tree_defs(self._external_dependencies_info().items())
+ if not dep_info:
+ raise EmptyReportError()
+ tree_str = _repr_tree_defs(dep_info)
+ sect.append(VerbatimText(tree_str))
+
+ def _report_dependencies_graph(self, sect, _, _dummy):
+ """write dependencies as a dot (graphviz) file"""
+ dep_info = self.stats["dependencies"]
+ if not dep_info or not (
+ self.config.import_graph
+ or self.config.ext_import_graph
+ or self.config.int_import_graph
+ ):
+ raise EmptyReportError()
+ filename = self.config.import_graph
+ if filename:
+ _make_graph(filename, dep_info, sect, "")
+ filename = self.config.ext_import_graph
+ if filename:
+ _make_graph(filename, self._external_dependencies_info(), sect, "external ")
+ filename = self.config.int_import_graph
+ if filename:
+ _make_graph(filename, self._internal_dependencies_info(), sect, "internal ")
+
+ def _filter_dependencies_graph(self, internal):
+ """build the internal or the external dependency graph"""
+ graph = collections.defaultdict(set)
+ for importee, importers in self.stats["dependencies"].items():
+ for importer in importers:
+ package = self._module_pkg.get(importer, importer)
+ is_inside = importee.startswith(package)
+ if is_inside and internal or not is_inside and not internal:
+ graph[importee].add(importer)
+ return graph
+
+ @cached
+ def _external_dependencies_info(self):
+ """return cached external dependencies information or build and
+ cache them
+ """
+ return self._filter_dependencies_graph(internal=False)
+
+ @cached
+ def _internal_dependencies_info(self):
+ """return cached internal dependencies information or build and
+ cache them
+ """
+ return self._filter_dependencies_graph(internal=True)
+
+ def _check_wildcard_imports(self, node, imported_module):
+ if node.root().package:
+ # Skip the check if in __init__.py issue #2026
+ return
+
+ wildcard_import_is_allowed = self._wildcard_import_is_allowed(imported_module)
+ for name, _ in node.names:
+ if name == "*" and not wildcard_import_is_allowed:
+ self.add_message("wildcard-import", args=node.modname, node=node)
+
+ def _wildcard_import_is_allowed(self, imported_module):
+ return (
+ self.config.allow_wildcard_with_all
+ and imported_module is not None
+ and "__all__" in imported_module.locals
+ )
+
+ def _check_toplevel(self, node):
+ """Check whether the import is made outside the module toplevel.
+ """
+ # If the scope of the import is a module, then obviously it is
+ # not outside the module toplevel.
+ if isinstance(node.scope(), astroid.Module):
+ return
+
+ if isinstance(node, astroid.ImportFrom):
+ module_names = [node.modname]
+ else:
+ module_names = [name[0] for name in node.names]
+
+ # Get the full names of all the imports that are not whitelisted.
+ scoped_imports = [
+ name for name in module_names if name not in self._allow_any_import_level
+ ]
+
+ if scoped_imports:
+ self.add_message(
+ "import-outside-toplevel", args=", ".join(scoped_imports), node=node
+ )
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(ImportsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/logging.py b/venv/Lib/site-packages/pylint/checkers/logging.py
new file mode 100644
index 0000000..5ad0e76
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/logging.py
@@ -0,0 +1,384 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009, 2012, 2014 Google, Inc.
+# Copyright (c) 2012 Mike Bryant <leachim@leachim.info>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Chris Murray <chris@chrismurray.scot>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2017 guillaume2 <guillaume.peillex@gmail.col>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Mariatta Wijaya <mariatta@python.org>
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""checker for use of Python logging
+"""
+import string
+
+import astroid
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+from pylint.checkers.utils import check_messages
+
+MSGS = {
+ "W1201": (
+ "Specify string format arguments as logging function parameters",
+ "logging-not-lazy",
+ "Used when a logging statement has a call form of "
+ '"logging.<logging method>(format_string % (format_args...))". '
+ "Such calls should leave string interpolation to the logging "
+ "method itself and be written "
+ '"logging.<logging method>(format_string, format_args...)" '
+ "so that the program may avoid incurring the cost of the "
+ "interpolation in those cases in which no message will be "
+ "logged. For more, see "
+ "http://www.python.org/dev/peps/pep-0282/.",
+ ),
+ "W1202": (
+ "Use %s formatting in logging functions%s",
+ "logging-format-interpolation",
+ "Used when a logging statement has a call form of "
+ '"logging.<logging method>(<string formatting>)".'
+ " with invalid string formatting. "
+ "Use another way for format the string instead.",
+ ),
+ "E1200": (
+ "Unsupported logging format character %r (%#02x) at index %d",
+ "logging-unsupported-format",
+ "Used when an unsupported format character is used in a logging "
+ "statement format string.",
+ ),
+ "E1201": (
+ "Logging format string ends in middle of conversion specifier",
+ "logging-format-truncated",
+ "Used when a logging statement format string terminates before "
+ "the end of a conversion specifier.",
+ ),
+ "E1205": (
+ "Too many arguments for logging format string",
+ "logging-too-many-args",
+ "Used when a logging format string is given too many arguments.",
+ ),
+ "E1206": (
+ "Not enough arguments for logging format string",
+ "logging-too-few-args",
+ "Used when a logging format string is given too few arguments.",
+ ),
+}
+
+
+CHECKED_CONVENIENCE_FUNCTIONS = {
+ "critical",
+ "debug",
+ "error",
+ "exception",
+ "fatal",
+ "info",
+ "warn",
+ "warning",
+}
+
+
+def is_method_call(func, types=(), methods=()):
+ """Determines if a BoundMethod node represents a method call.
+
+ Args:
+ func (astroid.BoundMethod): The BoundMethod AST node to check.
+ types (Optional[String]): Optional sequence of caller type names to restrict check.
+ methods (Optional[String]): Optional sequence of method names to restrict check.
+
+ Returns:
+ bool: true if the node represents a method call for the given type and
+ method names, False otherwise.
+ """
+ return (
+ isinstance(func, astroid.BoundMethod)
+ and isinstance(func.bound, astroid.Instance)
+ and (func.bound.name in types if types else True)
+ and (func.name in methods if methods else True)
+ )
+
+
+class LoggingChecker(checkers.BaseChecker):
+ """Checks use of the logging module."""
+
+ __implements__ = interfaces.IAstroidChecker
+ name = "logging"
+ msgs = MSGS
+
+ options = (
+ (
+ "logging-modules",
+ {
+ "default": ("logging",),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "Logging modules to check that the string format "
+ "arguments are in logging function parameter format.",
+ },
+ ),
+ (
+ "logging-format-style",
+ {
+ "default": "old",
+ "type": "choice",
+ "metavar": "<old (%) or new ({) or fstr (f'')>",
+ "choices": ["old", "new", "fstr"],
+ "help": "Format style used to check logging format string. "
+ "`old` means using % formatting, `new` is for `{}` formatting,"
+ "and `fstr` is for f-strings.",
+ },
+ ),
+ )
+
+ def visit_module(self, node): # pylint: disable=unused-argument
+ """Clears any state left in this checker from last module checked."""
+ # The code being checked can just as easily "import logging as foo",
+ # so it is necessary to process the imports and store in this field
+ # what name the logging module is actually given.
+ self._logging_names = set()
+ logging_mods = self.config.logging_modules
+
+ self._format_style = self.config.logging_format_style
+ format_styles = {"old": "%", "new": "{", "fstr": "f-string"}
+ format_style_help = ""
+ if self._format_style == "old":
+ format_style_help = " and pass the % parameters as arguments"
+
+ self._format_style_args = (format_styles[self._format_style], format_style_help)
+
+ self._logging_modules = set(logging_mods)
+ self._from_imports = {}
+ for logging_mod in logging_mods:
+ parts = logging_mod.rsplit(".", 1)
+ if len(parts) > 1:
+ self._from_imports[parts[0]] = parts[1]
+
+ def visit_importfrom(self, node):
+ """Checks to see if a module uses a non-Python logging module."""
+ try:
+ logging_name = self._from_imports[node.modname]
+ for module, as_name in node.names:
+ if module == logging_name:
+ self._logging_names.add(as_name or module)
+ except KeyError:
+ pass
+
+ def visit_import(self, node):
+ """Checks to see if this module uses Python's built-in logging."""
+ for module, as_name in node.names:
+ if module in self._logging_modules:
+ self._logging_names.add(as_name or module)
+
+ @check_messages(*MSGS)
+ def visit_call(self, node):
+ """Checks calls to logging methods."""
+
+ def is_logging_name():
+ return (
+ isinstance(node.func, astroid.Attribute)
+ and isinstance(node.func.expr, astroid.Name)
+ and node.func.expr.name in self._logging_names
+ )
+
+ def is_logger_class():
+ try:
+ for inferred in node.func.infer():
+ if isinstance(inferred, astroid.BoundMethod):
+ parent = inferred._proxied.parent
+ if isinstance(parent, astroid.ClassDef) and (
+ parent.qname() == "logging.Logger"
+ or any(
+ ancestor.qname() == "logging.Logger"
+ for ancestor in parent.ancestors()
+ )
+ ):
+ return True, inferred._proxied.name
+ except astroid.exceptions.InferenceError:
+ pass
+ return False, None
+
+ if is_logging_name():
+ name = node.func.attrname
+ else:
+ result, name = is_logger_class()
+ if not result:
+ return
+ self._check_log_method(node, name)
+
+ def _check_log_method(self, node, name):
+ """Checks calls to logging.log(level, format, *format_args)."""
+ if name == "log":
+ if node.starargs or node.kwargs or len(node.args) < 2:
+ # Either a malformed call, star args, or double-star args. Beyond
+ # the scope of this checker.
+ return
+ format_pos = 1
+ elif name in CHECKED_CONVENIENCE_FUNCTIONS:
+ if node.starargs or node.kwargs or not node.args:
+ # Either no args, star args, or double-star args. Beyond the
+ # scope of this checker.
+ return
+ format_pos = 0
+ else:
+ return
+
+ if isinstance(node.args[format_pos], astroid.BinOp):
+ binop = node.args[format_pos]
+ emit = binop.op == "%"
+ if binop.op == "+":
+ total_number_of_strings = sum(
+ 1
+ for operand in (binop.left, binop.right)
+ if self._is_operand_literal_str(utils.safe_infer(operand))
+ )
+ emit = total_number_of_strings > 0
+ if emit:
+ self.add_message("logging-not-lazy", node=node)
+ elif isinstance(node.args[format_pos], astroid.Call):
+ self._check_call_func(node.args[format_pos])
+ elif isinstance(node.args[format_pos], astroid.Const):
+ self._check_format_string(node, format_pos)
+ elif isinstance(
+ node.args[format_pos], (astroid.FormattedValue, astroid.JoinedStr)
+ ):
+ if self._format_style != "fstr":
+ self.add_message(
+ "logging-format-interpolation",
+ node=node,
+ args=self._format_style_args,
+ )
+
+ @staticmethod
+ def _is_operand_literal_str(operand):
+ """
+ Return True if the operand in argument is a literal string
+ """
+ return isinstance(operand, astroid.Const) and operand.name == "str"
+
+ def _check_call_func(self, node):
+ """Checks that function call is not format_string.format().
+
+ Args:
+ node (astroid.node_classes.Call):
+ Call AST node to be checked.
+ """
+ func = utils.safe_infer(node.func)
+ types = ("str", "unicode")
+ methods = ("format",)
+ if is_method_call(func, types, methods) and not is_complex_format_str(
+ func.bound
+ ):
+ self.add_message(
+ "logging-format-interpolation", node=node, args=self._format_style_args
+ )
+
+ def _check_format_string(self, node, format_arg):
+ """Checks that format string tokens match the supplied arguments.
+
+ Args:
+ node (astroid.node_classes.NodeNG): AST node to be checked.
+ format_arg (int): Index of the format string in the node arguments.
+ """
+ num_args = _count_supplied_tokens(node.args[format_arg + 1 :])
+ if not num_args:
+ # If no args were supplied the string is not interpolated and can contain
+ # formatting characters - it's used verbatim. Don't check any further.
+ return
+
+ format_string = node.args[format_arg].value
+ required_num_args = 0
+ if isinstance(format_string, bytes):
+ format_string = format_string.decode()
+ if isinstance(format_string, str):
+ try:
+ if self._format_style == "old":
+ keyword_args, required_num_args, _, _ = utils.parse_format_string(
+ format_string
+ )
+ if keyword_args:
+ # Keyword checking on logging strings is complicated by
+ # special keywords - out of scope.
+ return
+ elif self._format_style == "new":
+ keyword_arguments, implicit_pos_args, explicit_pos_args = utils.parse_format_method_string(
+ format_string
+ )
+
+ keyword_args_cnt = len(
+ set(k for k, l in keyword_arguments if not isinstance(k, int))
+ )
+ required_num_args = (
+ keyword_args_cnt + implicit_pos_args + explicit_pos_args
+ )
+ else:
+ self.add_message(
+ "logging-format-interpolation",
+ node=node,
+ args=self._format_style_args,
+ )
+ except utils.UnsupportedFormatCharacter as ex:
+ char = format_string[ex.index]
+ self.add_message(
+ "logging-unsupported-format",
+ node=node,
+ args=(char, ord(char), ex.index),
+ )
+ return
+ except utils.IncompleteFormatString:
+ self.add_message("logging-format-truncated", node=node)
+ return
+ if num_args > required_num_args:
+ self.add_message("logging-too-many-args", node=node)
+ elif num_args < required_num_args:
+ self.add_message("logging-too-few-args", node=node)
+
+
+def is_complex_format_str(node):
+ """Checks if node represents a string with complex formatting specs.
+
+ Args:
+ node (astroid.node_classes.NodeNG): AST node to check
+ Returns:
+ bool: True if inferred string uses complex formatting, False otherwise
+ """
+ inferred = utils.safe_infer(node)
+ if inferred is None or not (
+ isinstance(inferred, astroid.Const) and isinstance(inferred.value, str)
+ ):
+ return True
+ try:
+ parsed = list(string.Formatter().parse(inferred.value))
+ except ValueError:
+ # This format string is invalid
+ return False
+ for _, _, format_spec, _ in parsed:
+ if format_spec:
+ return True
+ return False
+
+
+def _count_supplied_tokens(args):
+ """Counts the number of tokens in an args list.
+
+ The Python log functions allow for special keyword arguments: func,
+ exc_info and extra. To handle these cases correctly, we only count
+ arguments that aren't keywords.
+
+ Args:
+ args (list): AST nodes that are arguments for a log format string.
+
+ Returns:
+ int: Number of AST nodes that aren't keywords.
+ """
+ return sum(1 for arg in args if not isinstance(arg, astroid.Keyword))
+
+
+def register(linter):
+ """Required method to auto-register this checker."""
+ linter.register_checker(LoggingChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/misc.py b/venv/Lib/site-packages/pylint/checkers/misc.py
new file mode 100644
index 0000000..dcf7a3e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/misc.py
@@ -0,0 +1,171 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006, 2009-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Alexandru Coman <fcoman@bitdefender.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 glegoux <gilles.legoux@gmail.com>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+
+"""Check source code is ascii only or has an encoding declaration (PEP 263)"""
+
+import re
+import tokenize
+
+from pylint.checkers import BaseChecker
+from pylint.constants import OPTION_RGX
+from pylint.interfaces import IRawChecker, ITokenChecker
+from pylint.message import MessagesHandlerMixIn
+
+
+class ByIdManagedMessagesChecker(BaseChecker):
+
+ """checks for messages that are enabled or disabled by id instead of symbol."""
+
+ __implements__ = IRawChecker
+
+ # configuration section name
+ name = "miscellaneous"
+ msgs = {
+ "I0023": (
+ "%s",
+ "use-symbolic-message-instead",
+ "Used when a message is enabled or disabled by id.",
+ )
+ }
+
+ options = ()
+
+ def process_module(self, module):
+ """inspect the source file to find messages activated or deactivated by id."""
+ managed_msgs = MessagesHandlerMixIn.get_by_id_managed_msgs()
+ for (mod_name, msg_id, msg_symbol, lineno, is_disabled) in managed_msgs:
+ if mod_name == module.name:
+ if is_disabled:
+ txt = "Id '{ident}' is used to disable '{symbol}' message emission".format(
+ ident=msg_id, symbol=msg_symbol
+ )
+ else:
+ txt = "Id '{ident}' is used to enable '{symbol}' message emission".format(
+ ident=msg_id, symbol=msg_symbol
+ )
+ self.add_message("use-symbolic-message-instead", line=lineno, args=txt)
+ MessagesHandlerMixIn.clear_by_id_managed_msgs()
+
+
+class EncodingChecker(BaseChecker):
+
+ """checks for:
+ * warning notes in the code like FIXME, XXX
+ * encoding issues.
+ """
+
+ __implements__ = (IRawChecker, ITokenChecker)
+
+ # configuration section name
+ name = "miscellaneous"
+ msgs = {
+ "W0511": (
+ "%s",
+ "fixme",
+ "Used when a warning note as FIXME or XXX is detected.",
+ )
+ }
+
+ options = (
+ (
+ "notes",
+ {
+ "type": "csv",
+ "metavar": "<comma separated values>",
+ "default": ("FIXME", "XXX", "TODO"),
+ "help": (
+ "List of note tags to take in consideration, "
+ "separated by a comma."
+ ),
+ },
+ ),
+ )
+
+ def open(self):
+ super().open()
+ self._fixme_pattern = re.compile(
+ r"#\s*(%s)\b" % "|".join(map(re.escape, self.config.notes)), re.I
+ )
+
+ def _check_encoding(self, lineno, line, file_encoding):
+ try:
+ return line.decode(file_encoding)
+ except UnicodeDecodeError:
+ pass
+ except LookupError:
+ if line.startswith("#") and "coding" in line and file_encoding in line:
+ self.add_message(
+ "syntax-error",
+ line=lineno,
+ args='Cannot decode using encoding "{}",'
+ " bad encoding".format(file_encoding),
+ )
+
+ def process_module(self, module):
+ """inspect the source file to find encoding problem"""
+ if module.file_encoding:
+ encoding = module.file_encoding
+ else:
+ encoding = "ascii"
+
+ with module.stream() as stream:
+ for lineno, line in enumerate(stream):
+ self._check_encoding(lineno + 1, line, encoding)
+
+ def process_tokens(self, tokens):
+ """inspect the source to find fixme problems"""
+ if not self.config.notes:
+ return
+ comments = (
+ token_info for token_info in tokens if token_info.type == tokenize.COMMENT
+ )
+ for comment in comments:
+ comment_text = comment.string[1:].lstrip() # trim '#' and whitespaces
+
+ # handle pylint disable clauses
+ disable_option_match = OPTION_RGX.search(comment_text)
+ if disable_option_match:
+ try:
+ _, value = disable_option_match.group(1).split("=", 1)
+ values = [_val.strip().upper() for _val in value.split(",")]
+ if set(values) & set(self.config.notes):
+ continue
+ except ValueError:
+ self.add_message(
+ "bad-inline-option",
+ args=disable_option_match.group(1).strip(),
+ line=comment.start[0],
+ )
+ continue
+
+ # emit warnings if necessary
+ match = self._fixme_pattern.search("#" + comment_text.lower())
+ if match:
+ note = match.group(1)
+ self.add_message(
+ "fixme",
+ col_offset=comment.string.lower().index(note.lower()),
+ args=comment_text,
+ line=comment.start[0],
+ )
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(EncodingChecker(linter))
+ linter.register_checker(ByIdManagedMessagesChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/newstyle.py b/venv/Lib/site-packages/pylint/checkers/newstyle.py
new file mode 100644
index 0000000..46f4e4e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/newstyle.py
@@ -0,0 +1,127 @@
+# Copyright (c) 2006, 2008-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""check for new / old style related problems
+"""
+import astroid
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages, has_known_bases, node_frame_class
+from pylint.interfaces import IAstroidChecker
+
+MSGS = {
+ "E1003": (
+ "Bad first argument %r given to super()",
+ "bad-super-call",
+ "Used when another argument than the current class is given as "
+ "first argument of the super builtin.",
+ )
+}
+
+
+class NewStyleConflictChecker(BaseChecker):
+ """checks for usage of new style capabilities on old style classes and
+ other new/old styles conflicts problems
+ * use of property, __slots__, super
+ * "super" usage
+ """
+
+ __implements__ = (IAstroidChecker,)
+
+ # configuration section name
+ name = "newstyle"
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = ()
+
+ @check_messages("bad-super-call")
+ def visit_functiondef(self, node):
+ """check use of super"""
+ # ignore actual functions or method within a new style class
+ if not node.is_method():
+ return
+ klass = node.parent.frame()
+ for stmt in node.nodes_of_class(astroid.Call):
+ if node_frame_class(stmt) != node_frame_class(node):
+ # Don't look down in other scopes.
+ continue
+
+ expr = stmt.func
+ if not isinstance(expr, astroid.Attribute):
+ continue
+
+ call = expr.expr
+ # skip the test if using super
+ if not (
+ isinstance(call, astroid.Call)
+ and isinstance(call.func, astroid.Name)
+ and call.func.name == "super"
+ ):
+ continue
+
+ # super should not be used on an old style class
+ if klass.newstyle or not has_known_bases(klass):
+ # super first arg should not be the class
+ if not call.args:
+ continue
+
+ # calling super(type(self), self) can lead to recursion loop
+ # in derived classes
+ arg0 = call.args[0]
+ if (
+ isinstance(arg0, astroid.Call)
+ and isinstance(arg0.func, astroid.Name)
+ and arg0.func.name == "type"
+ ):
+ self.add_message("bad-super-call", node=call, args=("type",))
+ continue
+
+ # calling super(self.__class__, self) can lead to recursion loop
+ # in derived classes
+ if (
+ len(call.args) >= 2
+ and isinstance(call.args[1], astroid.Name)
+ and call.args[1].name == "self"
+ and isinstance(arg0, astroid.Attribute)
+ and arg0.attrname == "__class__"
+ ):
+ self.add_message(
+ "bad-super-call", node=call, args=("self.__class__",)
+ )
+ continue
+
+ try:
+ supcls = call.args and next(call.args[0].infer(), None)
+ except astroid.InferenceError:
+ continue
+
+ if klass is not supcls:
+ name = None
+ # if supcls is not Uninferable, then supcls was inferred
+ # and use its name. Otherwise, try to look
+ # for call.args[0].name
+ if supcls:
+ name = supcls.name
+ elif call.args and hasattr(call.args[0], "name"):
+ name = call.args[0].name
+ if name:
+ self.add_message("bad-super-call", node=call, args=(name,))
+
+ visit_asyncfunctiondef = visit_functiondef
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(NewStyleConflictChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/python3.py b/venv/Lib/site-packages/pylint/checkers/python3.py
new file mode 100644
index 0000000..583b1c2
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/python3.py
@@ -0,0 +1,1398 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014-2015 Brett Cannon <brett@python.org>
+# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
+# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2015 Cosmin Poieana <cmin@ropython.org>
+# Copyright (c) 2015 Viorel Stirbu <viorels@gmail.com>
+# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016-2017 Roy Williams <roy.williams.iii@gmail.com>
+# Copyright (c) 2016 Roy Williams <rwilliams@lyft.com>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017 Daniel Miller <millerdev@gmail.com>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 gaurikholkar <f2013002@goa.bits-pilani.ac.in>
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Check Python 2 code for Python 2/3 source-compatible issues."""
+import re
+import tokenize
+from collections import namedtuple
+
+import astroid
+from astroid import bases
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+from pylint.checkers.utils import find_try_except_wrapper_node, node_ignores_exception
+from pylint.constants import WarningScope
+from pylint.interfaces import INFERENCE, INFERENCE_FAILURE
+
+_ZERO = re.compile("^0+$")
+
+
+def _is_old_octal(literal):
+ if _ZERO.match(literal):
+ return False
+ if re.match(r"0\d+", literal):
+ try:
+ int(literal, 8)
+ except ValueError:
+ return False
+ return True
+ return None
+
+
+def _inferred_value_is_dict(value):
+ if isinstance(value, astroid.Dict):
+ return True
+ return isinstance(value, astroid.Instance) and "dict" in value.basenames
+
+
+def _is_builtin(node):
+ return getattr(node, "name", None) in ("__builtin__", "builtins")
+
+
+_ACCEPTS_ITERATOR = {
+ "iter",
+ "list",
+ "tuple",
+ "sorted",
+ "set",
+ "sum",
+ "any",
+ "all",
+ "enumerate",
+ "dict",
+ "filter",
+ "reversed",
+ "max",
+ "min",
+ "frozenset",
+ "OrderedDict",
+}
+ATTRIBUTES_ACCEPTS_ITERATOR = {"join", "from_iterable"}
+_BUILTIN_METHOD_ACCEPTS_ITERATOR = {
+ "builtins.list.extend",
+ "builtins.dict.update",
+ "builtins.set.update",
+}
+DICT_METHODS = {"items", "keys", "values"}
+
+
+def _in_iterating_context(node):
+ """Check if the node is being used as an iterator.
+
+ Definition is taken from lib2to3.fixer_util.in_special_context().
+ """
+ parent = node.parent
+ # Since a call can't be the loop variant we only need to know if the node's
+ # parent is a 'for' loop to know it's being used as the iterator for the
+ # loop.
+ if isinstance(parent, astroid.For):
+ return True
+ # Need to make sure the use of the node is in the iterator part of the
+ # comprehension.
+ if isinstance(parent, astroid.Comprehension):
+ if parent.iter == node:
+ return True
+ # Various built-ins can take in an iterable or list and lead to the same
+ # value.
+ elif isinstance(parent, astroid.Call):
+ if isinstance(parent.func, astroid.Name):
+ if parent.func.name in _ACCEPTS_ITERATOR:
+ return True
+ elif isinstance(parent.func, astroid.Attribute):
+ if parent.func.attrname in ATTRIBUTES_ACCEPTS_ITERATOR:
+ return True
+
+ inferred = utils.safe_infer(parent.func)
+ if inferred:
+ if inferred.qname() in _BUILTIN_METHOD_ACCEPTS_ITERATOR:
+ return True
+ root = inferred.root()
+ if root and root.name == "itertools":
+ return True
+ # If the call is in an unpacking, there's no need to warn,
+ # since it can be considered iterating.
+ elif isinstance(parent, astroid.Assign) and isinstance(
+ parent.targets[0], (astroid.List, astroid.Tuple)
+ ):
+ if len(parent.targets[0].elts) > 1:
+ return True
+ # If the call is in a containment check, we consider that to
+ # be an iterating context
+ elif (
+ isinstance(parent, astroid.Compare)
+ and len(parent.ops) == 1
+ and parent.ops[0][0] == "in"
+ ):
+ return True
+ # Also if it's an `yield from`, that's fair
+ elif isinstance(parent, astroid.YieldFrom):
+ return True
+ if isinstance(parent, astroid.Starred):
+ return True
+ return False
+
+
+def _is_conditional_import(node):
+ """Checks if an import node is in the context of a conditional.
+ """
+ parent = node.parent
+ return isinstance(
+ parent, (astroid.TryExcept, astroid.ExceptHandler, astroid.If, astroid.IfExp)
+ )
+
+
+Branch = namedtuple("Branch", ["node", "is_py2_only"])
+
+
+class Python3Checker(checkers.BaseChecker):
+
+ __implements__ = interfaces.IAstroidChecker
+ enabled = False
+ name = "python3"
+
+ msgs = {
+ # Errors for what will syntactically break in Python 3, warnings for
+ # everything else.
+ "E1601": (
+ "print statement used",
+ "print-statement",
+ "Used when a print statement is used "
+ "(`print` is a function in Python 3)",
+ ),
+ "E1602": (
+ "Parameter unpacking specified",
+ "parameter-unpacking",
+ "Used when parameter unpacking is specified for a function"
+ "(Python 3 doesn't allow it)",
+ ),
+ "E1603": (
+ "Implicit unpacking of exceptions is not supported in Python 3",
+ "unpacking-in-except",
+ "Python3 will not allow implicit unpacking of "
+ "exceptions in except clauses. "
+ "See http://www.python.org/dev/peps/pep-3110/",
+ {"old_names": [("W0712", "old-unpacking-in-except")]},
+ ),
+ "E1604": (
+ "Use raise ErrorClass(args) instead of raise ErrorClass, args.",
+ "old-raise-syntax",
+ "Used when the alternate raise syntax "
+ "'raise foo, bar' is used "
+ "instead of 'raise foo(bar)'.",
+ {"old_names": [("W0121", "old-old-raise-syntax")]},
+ ),
+ "E1605": (
+ "Use of the `` operator",
+ "backtick",
+ 'Used when the deprecated "``" (backtick) operator is used '
+ "instead of the str() function.",
+ {"scope": WarningScope.NODE, "old_names": [("W0333", "old-backtick")]},
+ ),
+ "E1609": (
+ "Import * only allowed at module level",
+ "import-star-module-level",
+ "Used when the import star syntax is used somewhere "
+ "else than the module level.",
+ {"maxversion": (3, 0)},
+ ),
+ "W1601": (
+ "apply built-in referenced",
+ "apply-builtin",
+ "Used when the apply built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1602": (
+ "basestring built-in referenced",
+ "basestring-builtin",
+ "Used when the basestring built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1603": (
+ "buffer built-in referenced",
+ "buffer-builtin",
+ "Used when the buffer built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1604": (
+ "cmp built-in referenced",
+ "cmp-builtin",
+ "Used when the cmp built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1605": (
+ "coerce built-in referenced",
+ "coerce-builtin",
+ "Used when the coerce built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1606": (
+ "execfile built-in referenced",
+ "execfile-builtin",
+ "Used when the execfile built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1607": (
+ "file built-in referenced",
+ "file-builtin",
+ "Used when the file built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1608": (
+ "long built-in referenced",
+ "long-builtin",
+ "Used when the long built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1609": (
+ "raw_input built-in referenced",
+ "raw_input-builtin",
+ "Used when the raw_input built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1610": (
+ "reduce built-in referenced",
+ "reduce-builtin",
+ "Used when the reduce built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1611": (
+ "StandardError built-in referenced",
+ "standarderror-builtin",
+ "Used when the StandardError built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1612": (
+ "unicode built-in referenced",
+ "unicode-builtin",
+ "Used when the unicode built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1613": (
+ "xrange built-in referenced",
+ "xrange-builtin",
+ "Used when the xrange built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1614": (
+ "__coerce__ method defined",
+ "coerce-method",
+ "Used when a __coerce__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1615": (
+ "__delslice__ method defined",
+ "delslice-method",
+ "Used when a __delslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1616": (
+ "__getslice__ method defined",
+ "getslice-method",
+ "Used when a __getslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1617": (
+ "__setslice__ method defined",
+ "setslice-method",
+ "Used when a __setslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1618": (
+ "import missing `from __future__ import absolute_import`",
+ "no-absolute-import",
+ "Used when an import is not accompanied by "
+ "``from __future__ import absolute_import`` "
+ "(default behaviour in Python 3)",
+ ),
+ "W1619": (
+ "division w/o __future__ statement",
+ "old-division",
+ "Used for non-floor division w/o a float literal or "
+ "``from __future__ import division`` "
+ "(Python 3 returns a float for int division unconditionally)",
+ ),
+ "W1620": (
+ "Calling a dict.iter*() method",
+ "dict-iter-method",
+ "Used for calls to dict.iterkeys(), itervalues() or iteritems() "
+ "(Python 3 lacks these methods)",
+ ),
+ "W1621": (
+ "Calling a dict.view*() method",
+ "dict-view-method",
+ "Used for calls to dict.viewkeys(), viewvalues() or viewitems() "
+ "(Python 3 lacks these methods)",
+ ),
+ "W1622": (
+ "Called a next() method on an object",
+ "next-method-called",
+ "Used when an object's next() method is called "
+ "(Python 3 uses the next() built-in function)",
+ ),
+ "W1623": (
+ "Assigning to a class's __metaclass__ attribute",
+ "metaclass-assignment",
+ "Used when a metaclass is specified by assigning to __metaclass__ "
+ "(Python 3 specifies the metaclass as a class statement argument)",
+ ),
+ "W1624": (
+ "Indexing exceptions will not work on Python 3",
+ "indexing-exception",
+ "Indexing exceptions will not work on Python 3. Use "
+ "`exception.args[index]` instead.",
+ {"old_names": [("W0713", "old-indexing-exception")]},
+ ),
+ "W1625": (
+ "Raising a string exception",
+ "raising-string",
+ "Used when a string exception is raised. This will not "
+ "work on Python 3.",
+ {"old_names": [("W0701", "old-raising-string")]},
+ ),
+ "W1626": (
+ "reload built-in referenced",
+ "reload-builtin",
+ "Used when the reload built-in function is referenced "
+ "(missing from Python 3). You can use instead imp.reload "
+ "or importlib.reload.",
+ ),
+ "W1627": (
+ "__oct__ method defined",
+ "oct-method",
+ "Used when an __oct__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1628": (
+ "__hex__ method defined",
+ "hex-method",
+ "Used when a __hex__ method is defined (method is not used by Python 3)",
+ ),
+ "W1629": (
+ "__nonzero__ method defined",
+ "nonzero-method",
+ "Used when a __nonzero__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1630": (
+ "__cmp__ method defined",
+ "cmp-method",
+ "Used when a __cmp__ method is defined (method is not used by Python 3)",
+ ),
+ # 'W1631': replaced by W1636
+ "W1632": (
+ "input built-in referenced",
+ "input-builtin",
+ "Used when the input built-in is referenced "
+ "(backwards-incompatible semantics in Python 3)",
+ ),
+ "W1633": (
+ "round built-in referenced",
+ "round-builtin",
+ "Used when the round built-in is referenced "
+ "(backwards-incompatible semantics in Python 3)",
+ ),
+ "W1634": (
+ "intern built-in referenced",
+ "intern-builtin",
+ "Used when the intern built-in is referenced "
+ "(Moved to sys.intern in Python 3)",
+ ),
+ "W1635": (
+ "unichr built-in referenced",
+ "unichr-builtin",
+ "Used when the unichr built-in is referenced (Use chr in Python 3)",
+ ),
+ "W1636": (
+ "map built-in referenced when not iterating",
+ "map-builtin-not-iterating",
+ "Used when the map built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ {"old_names": [("W1631", "implicit-map-evaluation")]},
+ ),
+ "W1637": (
+ "zip built-in referenced when not iterating",
+ "zip-builtin-not-iterating",
+ "Used when the zip built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1638": (
+ "range built-in referenced when not iterating",
+ "range-builtin-not-iterating",
+ "Used when the range built-in is referenced in a non-iterating "
+ "context (returns a range in Python 3)",
+ ),
+ "W1639": (
+ "filter built-in referenced when not iterating",
+ "filter-builtin-not-iterating",
+ "Used when the filter built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1640": (
+ "Using the cmp argument for list.sort / sorted",
+ "using-cmp-argument",
+ "Using the cmp argument for list.sort or the sorted "
+ "builtin should be avoided, since it was removed in "
+ "Python 3. Using either `key` or `functools.cmp_to_key` "
+ "should be preferred.",
+ ),
+ "W1641": (
+ "Implementing __eq__ without also implementing __hash__",
+ "eq-without-hash",
+ "Used when a class implements __eq__ but not __hash__. In Python 2, objects "
+ "get object.__hash__ as the default implementation, in Python 3 objects get "
+ "None as their default __hash__ implementation if they also implement __eq__.",
+ ),
+ "W1642": (
+ "__div__ method defined",
+ "div-method",
+ "Used when a __div__ method is defined. Using `__truediv__` and setting"
+ "__div__ = __truediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1643": (
+ "__idiv__ method defined",
+ "idiv-method",
+ "Used when an __idiv__ method is defined. Using `__itruediv__` and setting"
+ "__idiv__ = __itruediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1644": (
+ "__rdiv__ method defined",
+ "rdiv-method",
+ "Used when a __rdiv__ method is defined. Using `__rtruediv__` and setting"
+ "__rdiv__ = __rtruediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1645": (
+ "Exception.message removed in Python 3",
+ "exception-message-attribute",
+ "Used when the message attribute is accessed on an Exception. Use "
+ "str(exception) instead.",
+ ),
+ "W1646": (
+ "non-text encoding used in str.decode",
+ "invalid-str-codec",
+ "Used when using str.encode or str.decode with a non-text encoding. Use "
+ "codecs module to handle arbitrary codecs.",
+ ),
+ "W1647": (
+ "sys.maxint removed in Python 3",
+ "sys-max-int",
+ "Used when accessing sys.maxint. Use sys.maxsize instead.",
+ ),
+ "W1648": (
+ "Module moved in Python 3",
+ "bad-python3-import",
+ "Used when importing a module that no longer exists in Python 3.",
+ ),
+ "W1649": (
+ "Accessing a deprecated function on the string module",
+ "deprecated-string-function",
+ "Used when accessing a string function that has been deprecated in Python 3.",
+ ),
+ "W1650": (
+ "Using str.translate with deprecated deletechars parameters",
+ "deprecated-str-translate-call",
+ "Used when using the deprecated deletechars parameters from str.translate. Use "
+ "re.sub to remove the desired characters ",
+ ),
+ "W1651": (
+ "Accessing a deprecated function on the itertools module",
+ "deprecated-itertools-function",
+ "Used when accessing a function on itertools that has been removed in Python 3.",
+ ),
+ "W1652": (
+ "Accessing a deprecated fields on the types module",
+ "deprecated-types-field",
+ "Used when accessing a field on types that has been removed in Python 3.",
+ ),
+ "W1653": (
+ "next method defined",
+ "next-method-defined",
+ "Used when a next method is defined that would be an iterator in Python 2 but "
+ "is treated as a normal function in Python 3.",
+ ),
+ "W1654": (
+ "dict.items referenced when not iterating",
+ "dict-items-not-iterating",
+ "Used when dict.items is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1655": (
+ "dict.keys referenced when not iterating",
+ "dict-keys-not-iterating",
+ "Used when dict.keys is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1656": (
+ "dict.values referenced when not iterating",
+ "dict-values-not-iterating",
+ "Used when dict.values is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1657": (
+ "Accessing a removed attribute on the operator module",
+ "deprecated-operator-function",
+ "Used when accessing a field on operator module that has been "
+ "removed in Python 3.",
+ ),
+ "W1658": (
+ "Accessing a removed attribute on the urllib module",
+ "deprecated-urllib-function",
+ "Used when accessing a field on urllib module that has been "
+ "removed or moved in Python 3.",
+ ),
+ "W1659": (
+ "Accessing a removed xreadlines attribute",
+ "xreadlines-attribute",
+ "Used when accessing the xreadlines() function on a file stream, "
+ "removed in Python 3.",
+ ),
+ "W1660": (
+ "Accessing a removed attribute on the sys module",
+ "deprecated-sys-function",
+ "Used when accessing a field on sys module that has been "
+ "removed in Python 3.",
+ ),
+ "W1661": (
+ "Using an exception object that was bound by an except handler",
+ "exception-escape",
+ "Emitted when using an exception, that was bound in an except "
+ "handler, outside of the except handler. On Python 3 these "
+ "exceptions will be deleted once they get out "
+ "of the except handler.",
+ ),
+ "W1662": (
+ "Using a variable that was bound inside a comprehension",
+ "comprehension-escape",
+ "Emitted when using a variable, that was bound in a comprehension "
+ "handler, outside of the comprehension itself. On Python 3 these "
+ "variables will be deleted outside of the "
+ "comprehension.",
+ ),
+ }
+
+ _bad_builtins = frozenset(
+ [
+ "apply",
+ "basestring",
+ "buffer",
+ "cmp",
+ "coerce",
+ "execfile",
+ "file",
+ "input", # Not missing, but incompatible semantics
+ "intern",
+ "long",
+ "raw_input",
+ "reduce",
+ "round", # Not missing, but incompatible semantics
+ "StandardError",
+ "unichr",
+ "unicode",
+ "xrange",
+ "reload",
+ ]
+ )
+
+ _unused_magic_methods = frozenset(
+ [
+ "__coerce__",
+ "__delslice__",
+ "__getslice__",
+ "__setslice__",
+ "__oct__",
+ "__hex__",
+ "__nonzero__",
+ "__cmp__",
+ "__div__",
+ "__idiv__",
+ "__rdiv__",
+ ]
+ )
+
+ _invalid_encodings = frozenset(
+ [
+ "base64_codec",
+ "base64",
+ "base_64",
+ "bz2_codec",
+ "bz2",
+ "hex_codec",
+ "hex",
+ "quopri_codec",
+ "quopri",
+ "quotedprintable",
+ "quoted_printable",
+ "uu_codec",
+ "uu",
+ "zlib_codec",
+ "zlib",
+ "zip",
+ "rot13",
+ "rot_13",
+ ]
+ )
+
+ _bad_python3_module_map = {
+ "sys-max-int": {"sys": frozenset(["maxint"])},
+ "deprecated-itertools-function": {
+ "itertools": frozenset(
+ ["izip", "ifilter", "imap", "izip_longest", "ifilterfalse"]
+ )
+ },
+ "deprecated-types-field": {
+ "types": frozenset(
+ [
+ "EllipsisType",
+ "XRangeType",
+ "ComplexType",
+ "StringType",
+ "TypeType",
+ "LongType",
+ "UnicodeType",
+ "ClassType",
+ "BufferType",
+ "StringTypes",
+ "NotImplementedType",
+ "NoneType",
+ "InstanceType",
+ "FloatType",
+ "SliceType",
+ "UnboundMethodType",
+ "ObjectType",
+ "IntType",
+ "TupleType",
+ "ListType",
+ "DictType",
+ "FileType",
+ "DictionaryType",
+ "BooleanType",
+ "DictProxyType",
+ ]
+ )
+ },
+ "bad-python3-import": frozenset(
+ [
+ "anydbm",
+ "BaseHTTPServer",
+ "__builtin__",
+ "CGIHTTPServer",
+ "ConfigParser",
+ "copy_reg",
+ "cPickle",
+ "cStringIO",
+ "Cookie",
+ "cookielib",
+ "dbhash",
+ "dumbdbm",
+ "dumbdb",
+ "Dialog",
+ "DocXMLRPCServer",
+ "FileDialog",
+ "FixTk",
+ "gdbm",
+ "htmlentitydefs",
+ "HTMLParser",
+ "httplib",
+ "markupbase",
+ "Queue",
+ "repr",
+ "robotparser",
+ "ScrolledText",
+ "SimpleDialog",
+ "SimpleHTTPServer",
+ "SimpleXMLRPCServer",
+ "StringIO",
+ "dummy_thread",
+ "SocketServer",
+ "test.test_support",
+ "Tkinter",
+ "Tix",
+ "Tkconstants",
+ "tkColorChooser",
+ "tkCommonDialog",
+ "Tkdnd",
+ "tkFileDialog",
+ "tkFont",
+ "tkMessageBox",
+ "tkSimpleDialog",
+ "UserList",
+ "UserString",
+ "whichdb",
+ "_winreg",
+ "xmlrpclib",
+ "audiodev",
+ "Bastion",
+ "bsddb185",
+ "bsddb3",
+ "Canvas",
+ "cfmfile",
+ "cl",
+ "commands",
+ "compiler",
+ "dircache",
+ "dl",
+ "exception",
+ "fpformat",
+ "htmllib",
+ "ihooks",
+ "imageop",
+ "imputil",
+ "linuxaudiodev",
+ "md5",
+ "mhlib",
+ "mimetools",
+ "MimeWriter",
+ "mimify",
+ "multifile",
+ "mutex",
+ "new",
+ "popen2",
+ "posixfile",
+ "pure",
+ "rexec",
+ "rfc822",
+ "sets",
+ "sha",
+ "sgmllib",
+ "sre",
+ "stringold",
+ "sunaudio",
+ "sv",
+ "test.testall",
+ "thread",
+ "timing",
+ "toaiff",
+ "user",
+ "urllib2",
+ "urlparse",
+ ]
+ ),
+ "deprecated-string-function": {
+ "string": frozenset(
+ [
+ "maketrans",
+ "atof",
+ "atoi",
+ "atol",
+ "capitalize",
+ "expandtabs",
+ "find",
+ "rfind",
+ "index",
+ "rindex",
+ "count",
+ "lower",
+ "letters",
+ "split",
+ "rsplit",
+ "splitfields",
+ "join",
+ "joinfields",
+ "lstrip",
+ "rstrip",
+ "strip",
+ "swapcase",
+ "translate",
+ "upper",
+ "ljust",
+ "rjust",
+ "center",
+ "zfill",
+ "replace",
+ "lowercase",
+ "letters",
+ "uppercase",
+ "atol_error",
+ "atof_error",
+ "atoi_error",
+ "index_error",
+ ]
+ )
+ },
+ "deprecated-operator-function": {"operator": frozenset({"div"})},
+ "deprecated-urllib-function": {
+ "urllib": frozenset(
+ {
+ "addbase",
+ "addclosehook",
+ "addinfo",
+ "addinfourl",
+ "always_safe",
+ "basejoin",
+ "ftpcache",
+ "ftperrors",
+ "ftpwrapper",
+ "getproxies",
+ "getproxies_environment",
+ "getproxies_macosx_sysconf",
+ "main",
+ "noheaders",
+ "pathname2url",
+ "proxy_bypass",
+ "proxy_bypass_environment",
+ "proxy_bypass_macosx_sysconf",
+ "quote",
+ "quote_plus",
+ "reporthook",
+ "splitattr",
+ "splithost",
+ "splitnport",
+ "splitpasswd",
+ "splitport",
+ "splitquery",
+ "splittag",
+ "splittype",
+ "splituser",
+ "splitvalue",
+ "unquote",
+ "unquote_plus",
+ "unwrap",
+ "url2pathname",
+ "urlcleanup",
+ "urlencode",
+ "urlopen",
+ "urlretrieve",
+ }
+ )
+ },
+ "deprecated-sys-function": {"sys": frozenset({"exc_clear"})},
+ }
+
+ _python_2_tests = frozenset(
+ [
+ astroid.extract_node(x).repr_tree()
+ for x in [
+ "sys.version_info[0] == 2",
+ "sys.version_info[0] < 3",
+ "sys.version_info == (2, 7)",
+ "sys.version_info <= (2, 7)",
+ "sys.version_info < (3, 0)",
+ ]
+ ]
+ )
+
+ def __init__(self, *args, **kwargs):
+ self._future_division = False
+ self._future_absolute_import = False
+ self._modules_warned_about = set()
+ self._branch_stack = []
+ super(Python3Checker, self).__init__(*args, **kwargs)
+
+ # pylint: disable=keyword-arg-before-vararg, arguments-differ
+ def add_message(self, msg_id, always_warn=False, *args, **kwargs):
+ if always_warn or not (
+ self._branch_stack and self._branch_stack[-1].is_py2_only
+ ):
+ super(Python3Checker, self).add_message(msg_id, *args, **kwargs)
+
+ def _is_py2_test(self, node):
+ if isinstance(node.test, astroid.Attribute) and isinstance(
+ node.test.expr, astroid.Name
+ ):
+ if node.test.expr.name == "six" and node.test.attrname == "PY2":
+ return True
+ elif (
+ isinstance(node.test, astroid.Compare)
+ and node.test.repr_tree() in self._python_2_tests
+ ):
+ return True
+ return False
+
+ def visit_if(self, node):
+ self._branch_stack.append(Branch(node, self._is_py2_test(node)))
+
+ def leave_if(self, node):
+ assert self._branch_stack.pop().node == node
+
+ def visit_ifexp(self, node):
+ self._branch_stack.append(Branch(node, self._is_py2_test(node)))
+
+ def leave_ifexp(self, node):
+ assert self._branch_stack.pop().node == node
+
+ def visit_module(self, node): # pylint: disable=unused-argument
+ """Clear checker state after previous module."""
+ self._future_division = False
+ self._future_absolute_import = False
+
+ def visit_functiondef(self, node):
+ if node.is_method():
+ if node.name in self._unused_magic_methods:
+ method_name = node.name
+ if node.name.startswith("__"):
+ method_name = node.name[2:-2]
+ self.add_message(method_name + "-method", node=node)
+ elif node.name == "next":
+ # If there is a method named `next` declared, if it is invokable
+ # with zero arguments then it implements the Iterator protocol.
+ # This means if the method is an instance method or a
+ # classmethod 1 argument should cause a failure, if it is a
+ # staticmethod 0 arguments should cause a failure.
+ failing_arg_count = 1
+ if utils.decorated_with(node, [bases.BUILTINS + ".staticmethod"]):
+ failing_arg_count = 0
+ if len(node.args.args) == failing_arg_count:
+ self.add_message("next-method-defined", node=node)
+
+ @utils.check_messages("parameter-unpacking")
+ def visit_arguments(self, node):
+ for arg in node.args:
+ if isinstance(arg, astroid.Tuple):
+ self.add_message("parameter-unpacking", node=arg)
+
+ @utils.check_messages("comprehension-escape")
+ def visit_listcomp(self, node):
+ names = {
+ generator.target.name
+ for generator in node.generators
+ if isinstance(generator.target, astroid.AssignName)
+ }
+ scope = node.parent.scope()
+ scope_names = scope.nodes_of_class(astroid.Name, skip_klass=astroid.FunctionDef)
+ has_redefined_assign_name = any(
+ assign_name
+ for assign_name in scope.nodes_of_class(
+ astroid.AssignName, skip_klass=astroid.FunctionDef
+ )
+ if assign_name.name in names and assign_name.lineno > node.lineno
+ )
+ if has_redefined_assign_name:
+ return
+
+ emitted_for_names = set()
+ scope_names = list(scope_names)
+ for scope_name in scope_names:
+ if (
+ scope_name.name not in names
+ or scope_name.lineno <= node.lineno
+ or scope_name.name in emitted_for_names
+ or scope_name.scope() == node
+ ):
+ continue
+
+ emitted_for_names.add(scope_name.name)
+ self.add_message("comprehension-escape", node=scope_name)
+
+ def visit_name(self, node):
+ """Detect when a "bad" built-in is referenced."""
+ found_node, _ = node.lookup(node.name)
+ if not _is_builtin(found_node):
+ return
+ if node.name not in self._bad_builtins:
+ return
+ if node_ignores_exception(node) or isinstance(
+ find_try_except_wrapper_node(node), astroid.ExceptHandler
+ ):
+ return
+
+ message = node.name.lower() + "-builtin"
+ self.add_message(message, node=node)
+
+ @utils.check_messages("print-statement")
+ def visit_print(self, node):
+ self.add_message("print-statement", node=node, always_warn=True)
+
+ def _warn_if_deprecated(self, node, module, attributes, report_on_modules=True):
+ for message, module_map in self._bad_python3_module_map.items():
+ if module in module_map and module not in self._modules_warned_about:
+ if isinstance(module_map, frozenset):
+ if report_on_modules:
+ self._modules_warned_about.add(module)
+ self.add_message(message, node=node)
+ elif attributes and module_map[module].intersection(attributes):
+ self.add_message(message, node=node)
+
+ def visit_importfrom(self, node):
+ if node.modname == "__future__":
+ for name, _ in node.names:
+ if name == "division":
+ self._future_division = True
+ elif name == "absolute_import":
+ self._future_absolute_import = True
+ else:
+ if not self._future_absolute_import:
+ if self.linter.is_message_enabled("no-absolute-import"):
+ self.add_message("no-absolute-import", node=node)
+ self._future_absolute_import = True
+ if not _is_conditional_import(node) and not node.level:
+ self._warn_if_deprecated(node, node.modname, {x[0] for x in node.names})
+
+ if node.names[0][0] == "*":
+ if self.linter.is_message_enabled("import-star-module-level"):
+ if not isinstance(node.scope(), astroid.Module):
+ self.add_message("import-star-module-level", node=node)
+
+ def visit_import(self, node):
+ if not self._future_absolute_import:
+ if self.linter.is_message_enabled("no-absolute-import"):
+ self.add_message("no-absolute-import", node=node)
+ self._future_absolute_import = True
+ if not _is_conditional_import(node):
+ for name, _ in node.names:
+ self._warn_if_deprecated(node, name, None)
+
+ @utils.check_messages("metaclass-assignment")
+ def visit_classdef(self, node):
+ if "__metaclass__" in node.locals:
+ self.add_message("metaclass-assignment", node=node)
+ locals_and_methods = set(node.locals).union(x.name for x in node.mymethods())
+ if "__eq__" in locals_and_methods and "__hash__" not in locals_and_methods:
+ self.add_message("eq-without-hash", node=node)
+
+ @utils.check_messages("old-division")
+ def visit_binop(self, node):
+ if not self._future_division and node.op == "/":
+ for arg in (node.left, node.right):
+ inferred = utils.safe_infer(arg)
+ # If we can infer the object and that object is not an int, bail out.
+ if inferred and not (
+ (
+ isinstance(inferred, astroid.Const)
+ and isinstance(inferred.value, int)
+ )
+ or (
+ isinstance(inferred, astroid.Instance)
+ and inferred.name == "int"
+ )
+ ):
+ break
+ else:
+ self.add_message("old-division", node=node)
+
+ def _check_cmp_argument(self, node):
+ # Check that the `cmp` argument is used
+ kwargs = []
+ if isinstance(node.func, astroid.Attribute) and node.func.attrname == "sort":
+ inferred = utils.safe_infer(node.func.expr)
+ if not inferred:
+ return
+
+ builtins_list = "{}.list".format(bases.BUILTINS)
+ if isinstance(inferred, astroid.List) or inferred.qname() == builtins_list:
+ kwargs = node.keywords
+
+ elif isinstance(node.func, astroid.Name) and node.func.name == "sorted":
+ inferred = utils.safe_infer(node.func)
+ if not inferred:
+ return
+
+ builtins_sorted = "{}.sorted".format(bases.BUILTINS)
+ if inferred.qname() == builtins_sorted:
+ kwargs = node.keywords
+
+ for kwarg in kwargs or []:
+ if kwarg.arg == "cmp":
+ self.add_message("using-cmp-argument", node=node)
+ return
+
+ @staticmethod
+ def _is_constant_string_or_name(node):
+ if isinstance(node, astroid.Const):
+ return isinstance(node.value, str)
+ return isinstance(node, astroid.Name)
+
+ @staticmethod
+ def _is_none(node):
+ return isinstance(node, astroid.Const) and node.value is None
+
+ @staticmethod
+ def _has_only_n_positional_args(node, number_of_args):
+ return len(node.args) == number_of_args and all(node.args) and not node.keywords
+
+ @staticmethod
+ def _could_be_string(inferred_types):
+ confidence = INFERENCE if inferred_types else INFERENCE_FAILURE
+ for inferred_type in inferred_types:
+ if inferred_type is astroid.Uninferable:
+ confidence = INFERENCE_FAILURE
+ elif not (
+ isinstance(inferred_type, astroid.Const)
+ and isinstance(inferred_type.value, str)
+ ):
+ return None
+ return confidence
+
+ def visit_call(self, node):
+ self._check_cmp_argument(node)
+
+ if isinstance(node.func, astroid.Attribute):
+ inferred_types = set()
+ try:
+ for inferred_receiver in node.func.expr.infer():
+ if inferred_receiver is astroid.Uninferable:
+ continue
+ inferred_types.add(inferred_receiver)
+ if isinstance(inferred_receiver, astroid.Module):
+ self._warn_if_deprecated(
+ node,
+ inferred_receiver.name,
+ {node.func.attrname},
+ report_on_modules=False,
+ )
+ if (
+ _inferred_value_is_dict(inferred_receiver)
+ and node.func.attrname in DICT_METHODS
+ ):
+ if not _in_iterating_context(node):
+ checker = "dict-{}-not-iterating".format(node.func.attrname)
+ self.add_message(checker, node=node)
+ except astroid.InferenceError:
+ pass
+ if node.args:
+ is_str_confidence = self._could_be_string(inferred_types)
+ if is_str_confidence:
+ if (
+ node.func.attrname in ("encode", "decode")
+ and len(node.args) >= 1
+ and node.args[0]
+ ):
+ first_arg = node.args[0]
+ self._validate_encoding(first_arg, node)
+ if (
+ node.func.attrname == "translate"
+ and self._has_only_n_positional_args(node, 2)
+ and self._is_none(node.args[0])
+ and self._is_constant_string_or_name(node.args[1])
+ ):
+ # The above statement looking for calls of the form:
+ #
+ # foo.translate(None, 'abc123')
+ #
+ # or
+ #
+ # foo.translate(None, some_variable)
+ #
+ # This check is somewhat broad and _may_ have some false positives, but
+ # after checking several large codebases it did not have any false
+ # positives while finding several real issues. This call pattern seems
+ # rare enough that the trade off is worth it.
+ self.add_message(
+ "deprecated-str-translate-call",
+ node=node,
+ confidence=is_str_confidence,
+ )
+ return
+ if node.keywords:
+ return
+ if node.func.attrname == "next":
+ self.add_message("next-method-called", node=node)
+ else:
+ if node.func.attrname in ("iterkeys", "itervalues", "iteritems"):
+ self.add_message("dict-iter-method", node=node)
+ elif node.func.attrname in ("viewkeys", "viewvalues", "viewitems"):
+ self.add_message("dict-view-method", node=node)
+ elif isinstance(node.func, astroid.Name):
+ found_node = node.func.lookup(node.func.name)[0]
+ if _is_builtin(found_node):
+ if node.func.name in ("filter", "map", "range", "zip"):
+ if not _in_iterating_context(node):
+ checker = "{}-builtin-not-iterating".format(node.func.name)
+ self.add_message(checker, node=node)
+ if node.func.name == "open" and node.keywords:
+ kwargs = node.keywords
+ for kwarg in kwargs or []:
+ if kwarg.arg == "encoding":
+ self._validate_encoding(kwarg.value, node)
+ break
+
+ def _validate_encoding(self, encoding, node):
+ if isinstance(encoding, astroid.Const):
+ value = encoding.value
+ if value in self._invalid_encodings:
+ self.add_message("invalid-str-codec", node=node)
+
+ @utils.check_messages("indexing-exception")
+ def visit_subscript(self, node):
+ """ Look for indexing exceptions. """
+ try:
+ for inferred in node.value.infer():
+ if not isinstance(inferred, astroid.Instance):
+ continue
+ if utils.inherit_from_std_ex(inferred):
+ self.add_message("indexing-exception", node=node)
+ except astroid.InferenceError:
+ return
+
+ def visit_assignattr(self, node):
+ if isinstance(node.assign_type(), astroid.AugAssign):
+ self.visit_attribute(node)
+
+ def visit_delattr(self, node):
+ self.visit_attribute(node)
+
+ @utils.check_messages("exception-message-attribute", "xreadlines-attribute")
+ def visit_attribute(self, node):
+ """Look for removed attributes"""
+ if node.attrname == "xreadlines":
+ self.add_message("xreadlines-attribute", node=node)
+ return
+
+ exception_message = "message"
+ try:
+ for inferred in node.expr.infer():
+ if isinstance(inferred, astroid.Instance) and utils.inherit_from_std_ex(
+ inferred
+ ):
+ if node.attrname == exception_message:
+
+ # Exceptions with .message clearly defined are an exception
+ if exception_message in inferred.instance_attrs:
+ continue
+ self.add_message("exception-message-attribute", node=node)
+ if isinstance(inferred, astroid.Module):
+ self._warn_if_deprecated(
+ node, inferred.name, {node.attrname}, report_on_modules=False
+ )
+ except astroid.InferenceError:
+ return
+
+ @utils.check_messages("unpacking-in-except", "comprehension-escape")
+ def visit_excepthandler(self, node):
+ """Visit an except handler block and check for exception unpacking."""
+
+ def _is_used_in_except_block(node):
+ scope = node.scope()
+ current = node
+ while (
+ current
+ and current != scope
+ and not isinstance(current, astroid.ExceptHandler)
+ ):
+ current = current.parent
+ return isinstance(current, astroid.ExceptHandler) and current.type != node
+
+ if isinstance(node.name, (astroid.Tuple, astroid.List)):
+ self.add_message("unpacking-in-except", node=node)
+ return
+
+ if not node.name:
+ return
+
+ # Find any names
+ scope = node.parent.scope()
+ scope_names = scope.nodes_of_class(astroid.Name, skip_klass=astroid.FunctionDef)
+ scope_names = list(scope_names)
+ potential_leaked_names = [
+ scope_name
+ for scope_name in scope_names
+ if scope_name.name == node.name.name
+ and scope_name.lineno > node.lineno
+ and not _is_used_in_except_block(scope_name)
+ ]
+ reassignments_for_same_name = {
+ assign_name.lineno
+ for assign_name in scope.nodes_of_class(
+ astroid.AssignName, skip_klass=astroid.FunctionDef
+ )
+ if assign_name.name == node.name.name
+ }
+ for leaked_name in potential_leaked_names:
+ if any(
+ node.lineno < elem < leaked_name.lineno
+ for elem in reassignments_for_same_name
+ ):
+ continue
+ self.add_message("exception-escape", node=leaked_name)
+
+ @utils.check_messages("backtick")
+ def visit_repr(self, node):
+ self.add_message("backtick", node=node)
+
+ @utils.check_messages("raising-string", "old-raise-syntax")
+ def visit_raise(self, node):
+ """Visit a raise statement and check for raising
+ strings or old-raise-syntax.
+ """
+
+ # Ignore empty raise.
+ if node.exc is None:
+ return
+ expr = node.exc
+ if self._check_raise_value(node, expr):
+ return
+ try:
+ value = next(astroid.unpack_infer(expr))
+ except astroid.InferenceError:
+ return
+ self._check_raise_value(node, value)
+
+ def _check_raise_value(self, node, expr):
+ if isinstance(expr, astroid.Const):
+ value = expr.value
+ if isinstance(value, str):
+ self.add_message("raising-string", node=node)
+ return True
+ return None
+
+
+class Python3TokenChecker(checkers.BaseTokenChecker):
+ __implements__ = interfaces.ITokenChecker
+ name = "python3"
+ enabled = False
+
+ msgs = {
+ "E1606": (
+ "Use of long suffix",
+ "long-suffix",
+ 'Used when "l" or "L" is used to mark a long integer. '
+ "This will not work in Python 3, since `int` and `long` "
+ "types have merged.",
+ {"maxversion": (3, 0)},
+ ),
+ "E1607": (
+ "Use of the <> operator",
+ "old-ne-operator",
+ 'Used when the deprecated "<>" operator is used instead '
+ 'of "!=". This is removed in Python 3.',
+ {"maxversion": (3, 0), "old_names": [("W0331", "old-old-ne-operator")]},
+ ),
+ "E1608": (
+ "Use of old octal literal",
+ "old-octal-literal",
+ "Used when encountering the old octal syntax, "
+ "removed in Python 3. To use the new syntax, "
+ "prepend 0o on the number.",
+ {"maxversion": (3, 0)},
+ ),
+ "E1610": (
+ "Non-ascii bytes literals not supported in 3.x",
+ "non-ascii-bytes-literal",
+ "Used when non-ascii bytes literals are found in a program. "
+ "They are no longer supported in Python 3.",
+ {"maxversion": (3, 0)},
+ ),
+ }
+
+ def process_tokens(self, tokens):
+ for idx, (tok_type, token, start, _, _) in enumerate(tokens):
+ if tok_type == tokenize.NUMBER:
+ if token.lower().endswith("l"):
+ # This has a different semantic than lowercase-l-suffix.
+ self.add_message("long-suffix", line=start[0])
+ elif _is_old_octal(token):
+ self.add_message("old-octal-literal", line=start[0])
+ if tokens[idx][1] == "<>":
+ self.add_message("old-ne-operator", line=tokens[idx][2][0])
+ if tok_type == tokenize.STRING and token.startswith("b"):
+ if any(elem for elem in token if ord(elem) > 127):
+ self.add_message("non-ascii-bytes-literal", line=start[0])
+
+
+def register(linter):
+ linter.register_checker(Python3Checker(linter))
+ linter.register_checker(Python3TokenChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/raw_metrics.py b/venv/Lib/site-packages/pylint/checkers/raw_metrics.py
new file mode 100644
index 0000000..0564398
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/raw_metrics.py
@@ -0,0 +1,119 @@
+# Copyright (c) 2007, 2010, 2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013 Google, Inc.
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+""" Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+Raw metrics checker
+"""
+
+import tokenize
+from typing import Any
+
+from pylint.checkers import BaseTokenChecker
+from pylint.exceptions import EmptyReportError
+from pylint.interfaces import ITokenChecker
+from pylint.reporters.ureports.nodes import Table
+
+
+def report_raw_stats(sect, stats, _):
+ """calculate percentage of code / doc / comment / empty
+ """
+ total_lines = stats["total_lines"]
+ if not total_lines:
+ raise EmptyReportError()
+ sect.description = "%s lines have been analyzed" % total_lines
+ lines = ("type", "number", "%", "previous", "difference")
+ for node_type in ("code", "docstring", "comment", "empty"):
+ key = node_type + "_lines"
+ total = stats[key]
+ percent = float(total * 100) / total_lines
+ lines += (node_type, str(total), "%.2f" % percent, "NC", "NC")
+ sect.append(Table(children=lines, cols=5, rheaders=1))
+
+
+class RawMetricsChecker(BaseTokenChecker):
+ """does not check anything but gives some raw metrics :
+ * total number of lines
+ * total number of code lines
+ * total number of docstring lines
+ * total number of comments lines
+ * total number of empty lines
+ """
+
+ __implements__ = (ITokenChecker,)
+
+ # configuration section name
+ name = "metrics"
+ # configuration options
+ options = ()
+ # messages
+ msgs = {} # type: Any
+ # reports
+ reports = (("RP0701", "Raw metrics", report_raw_stats),)
+
+ def __init__(self, linter):
+ BaseTokenChecker.__init__(self, linter)
+ self.stats = None
+
+ def open(self):
+ """init statistics"""
+ self.stats = self.linter.add_stats(
+ total_lines=0,
+ code_lines=0,
+ empty_lines=0,
+ docstring_lines=0,
+ comment_lines=0,
+ )
+
+ def process_tokens(self, tokens):
+ """update stats"""
+ i = 0
+ tokens = list(tokens)
+ while i < len(tokens):
+ i, lines_number, line_type = get_type(tokens, i)
+ self.stats["total_lines"] += lines_number
+ self.stats[line_type] += lines_number
+
+
+JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
+
+
+def get_type(tokens, start_index):
+ """return the line type : docstring, comment, code, empty"""
+ i = start_index
+ tok_type = tokens[i][0]
+ start = tokens[i][2]
+ pos = start
+ line_type = None
+ while i < len(tokens) and tokens[i][2][0] == start[0]:
+ tok_type = tokens[i][0]
+ pos = tokens[i][3]
+ if line_type is None:
+ if tok_type == tokenize.STRING:
+ line_type = "docstring_lines"
+ elif tok_type == tokenize.COMMENT:
+ line_type = "comment_lines"
+ elif tok_type in JUNK:
+ pass
+ else:
+ line_type = "code_lines"
+ i += 1
+ if line_type is None:
+ line_type = "empty_lines"
+ elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
+ i += 1
+ return i, pos[0] - start[0] + 1, line_type
+
+
+def register(linter):
+ """ required method to auto register this checker """
+ linter.register_checker(RawMetricsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/refactoring.py b/venv/Lib/site-packages/pylint/checkers/refactoring.py
new file mode 100644
index 0000000..2831343
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/refactoring.py
@@ -0,0 +1,1510 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
+# Copyright (c) 2017 Łukasz Sznuk <ls@rdprojekt.pl>
+# Copyright (c) 2017 Alex Hearn <alex.d.hearn@gmail.com>
+# Copyright (c) 2017 Antonio Ossa <aaossa@uc.cl>
+# Copyright (c) 2018 Konstantin Manna <Konstantin@Manna.uno>
+# Copyright (c) 2018 Konstantin <Github@pheanex.de>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Matej Marušák <marusak.matej@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Mr. Senko <atodorov@mrsenko.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Looks for code which can be refactored."""
+import builtins
+import collections
+import itertools
+import tokenize
+from functools import reduce
+
+import astroid
+from astroid import decorators
+
+from pylint import checkers, interfaces
+from pylint import utils as lint_utils
+from pylint.checkers import utils
+
+KNOWN_INFINITE_ITERATORS = {"itertools.count"}
+BUILTIN_EXIT_FUNCS = frozenset(("quit", "exit"))
+
+
+def _if_statement_is_always_returning(if_node, returning_node_class):
+ for node in if_node.body:
+ if isinstance(node, returning_node_class):
+ return True
+ return False
+
+
+def _is_len_call(node):
+ """Checks if node is len(SOMETHING)."""
+ return (
+ isinstance(node, astroid.Call)
+ and isinstance(node.func, astroid.Name)
+ and node.func.name == "len"
+ )
+
+
+def _is_constant_zero(node):
+ return isinstance(node, astroid.Const) and node.value == 0
+
+
+def _node_is_test_condition(node):
+ """ Checks if node is an if, while, assert or if expression statement."""
+ return isinstance(node, (astroid.If, astroid.While, astroid.Assert, astroid.IfExp))
+
+
+def _is_trailing_comma(tokens, index):
+ """Check if the given token is a trailing comma
+
+ :param tokens: Sequence of modules tokens
+ :type tokens: list[tokenize.TokenInfo]
+ :param int index: Index of token under check in tokens
+ :returns: True if the token is a comma which trails an expression
+ :rtype: bool
+ """
+ token = tokens[index]
+ if token.exact_type != tokenize.COMMA:
+ return False
+ # Must have remaining tokens on the same line such as NEWLINE
+ left_tokens = itertools.islice(tokens, index + 1, None)
+ same_line_remaining_tokens = list(
+ itertools.takewhile(
+ lambda other_token, _token=token: other_token.start[0] == _token.start[0],
+ left_tokens,
+ )
+ )
+ # Note: If the newline is tokenize.NEWLINE and not tokenize.NL
+ # then the newline denotes the end of expression
+ is_last_element = all(
+ other_token.type in (tokenize.NEWLINE, tokenize.COMMENT)
+ for other_token in same_line_remaining_tokens
+ )
+ if not same_line_remaining_tokens or not is_last_element:
+ return False
+
+ def get_curline_index_start():
+ """Get the index denoting the start of the current line"""
+ for subindex, token in enumerate(reversed(tokens[:index])):
+ # See Lib/tokenize.py and Lib/token.py in cpython for more info
+ if token.type in (tokenize.NEWLINE, tokenize.NL):
+ return index - subindex
+ return 0
+
+ curline_start = get_curline_index_start()
+ expected_tokens = {"return", "yield"}
+ for prevtoken in tokens[curline_start:index]:
+ if "=" in prevtoken.string or prevtoken.string in expected_tokens:
+ return True
+ return False
+
+
+class RefactoringChecker(checkers.BaseTokenChecker):
+ """Looks for code which can be refactored
+
+ This checker also mixes the astroid and the token approaches
+ in order to create knowledge about whether an "else if" node
+ is a true "else if" node, or an "elif" node.
+ """
+
+ __implements__ = (interfaces.ITokenChecker, interfaces.IAstroidChecker)
+
+ name = "refactoring"
+
+ msgs = {
+ "R1701": (
+ "Consider merging these isinstance calls to isinstance(%s, (%s))",
+ "consider-merging-isinstance",
+ "Used when multiple consecutive isinstance calls can be merged into one.",
+ ),
+ "R1706": (
+ "Consider using ternary (%s)",
+ "consider-using-ternary",
+ "Used when one of known pre-python 2.5 ternary syntax is used.",
+ ),
+ "R1709": (
+ "Boolean expression may be simplified to %s",
+ "simplify-boolean-expression",
+ "Emitted when redundant pre-python 2.5 ternary syntax is used.",
+ ),
+ "R1702": (
+ "Too many nested blocks (%s/%s)",
+ "too-many-nested-blocks",
+ "Used when a function or a method has too many nested "
+ "blocks. This makes the code less understandable and "
+ "maintainable.",
+ {"old_names": [("R0101", "old-too-many-nested-blocks")]},
+ ),
+ "R1703": (
+ "The if statement can be replaced with %s",
+ "simplifiable-if-statement",
+ "Used when an if statement can be replaced with 'bool(test)'. ",
+ {"old_names": [("R0102", "old-simplifiable-if-statement")]},
+ ),
+ "R1704": (
+ "Redefining argument with the local name %r",
+ "redefined-argument-from-local",
+ "Used when a local name is redefining an argument, which might "
+ "suggest a potential error. This is taken in account only for "
+ "a handful of name binding operations, such as for iteration, "
+ "with statement assignment and exception handler assignment.",
+ ),
+ "R1705": (
+ 'Unnecessary "%s" after "return"',
+ "no-else-return",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a return statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "return statement.",
+ ),
+ "R1707": (
+ "Disallow trailing comma tuple",
+ "trailing-comma-tuple",
+ "In Python, a tuple is actually created by the comma symbol, "
+ "not by the parentheses. Unfortunately, one can actually create a "
+ "tuple by misplacing a trailing comma, which can lead to potential "
+ "weird bugs in your code. You should always use parentheses "
+ "explicitly for creating a tuple.",
+ ),
+ "R1708": (
+ "Do not raise StopIteration in generator, use return statement instead",
+ "stop-iteration-return",
+ "According to PEP479, the raise of StopIteration to end the loop of "
+ "a generator may lead to hard to find bugs. This PEP specify that "
+ "raise StopIteration has to be replaced by a simple return statement",
+ ),
+ "R1710": (
+ "Either all return statements in a function should return an expression, "
+ "or none of them should.",
+ "inconsistent-return-statements",
+ "According to PEP8, if any return statement returns an expression, "
+ "any return statements where no value is returned should explicitly "
+ "state this as return None, and an explicit return statement "
+ "should be present at the end of the function (if reachable)",
+ ),
+ "R1711": (
+ "Useless return at end of function or method",
+ "useless-return",
+ 'Emitted when a single "return" or "return None" statement is found '
+ "at the end of function or method definition. This statement can safely be "
+ "removed because Python will implicitly return None",
+ ),
+ "R1712": (
+ "Consider using tuple unpacking for swapping variables",
+ "consider-swap-variables",
+ "You do not have to use a temporary variable in order to "
+ 'swap variables. Using "tuple unpacking" to directly swap '
+ "variables makes the intention more clear.",
+ ),
+ "R1713": (
+ "Consider using str.join(sequence) for concatenating "
+ "strings from an iterable",
+ "consider-using-join",
+ "Using str.join(sequence) is faster, uses less memory "
+ "and increases readability compared to for-loop iteration.",
+ ),
+ "R1714": (
+ 'Consider merging these comparisons with "in" to %r',
+ "consider-using-in",
+ "To check if a variable is equal to one of many values,"
+ 'combine the values into a tuple and check if the variable is contained "in" it '
+ "instead of checking for equality against each of the values."
+ "This is faster and less verbose.",
+ ),
+ "R1715": (
+ "Consider using dict.get for getting values from a dict "
+ "if a key is present or a default if not",
+ "consider-using-get",
+ "Using the builtin dict.get for getting a value from a dictionary "
+ "if a key is present or a default if not, is simpler and considered "
+ "more idiomatic, although sometimes a bit slower",
+ ),
+ "R1716": (
+ "Simplify chained comparison between the operands",
+ "chained-comparison",
+ "This message is emitted when pylint encounters boolean operation like"
+ '"a < b and b < c", suggesting instead to refactor it to "a < b < c"',
+ ),
+ "R1717": (
+ "Consider using a dictionary comprehension",
+ "consider-using-dict-comprehension",
+ "Emitted when we detect the creation of a dictionary "
+ "using the dict() callable and a transient list. "
+ "Although there is nothing syntactically wrong with this code, "
+ "it is hard to read and can be simplified to a dict comprehension."
+ "Also it is faster since you don't need to create another "
+ "transient list",
+ ),
+ "R1718": (
+ "Consider using a set comprehension",
+ "consider-using-set-comprehension",
+ "Although there is nothing syntactically wrong with this code, "
+ "it is hard to read and can be simplified to a set comprehension."
+ "Also it is faster since you don't need to create another "
+ "transient list",
+ ),
+ "R1719": (
+ "The if expression can be replaced with %s",
+ "simplifiable-if-expression",
+ "Used when an if expression can be replaced with 'bool(test)'. ",
+ ),
+ "R1720": (
+ 'Unnecessary "%s" after "raise"',
+ "no-else-raise",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a raise statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "raise statement.",
+ ),
+ "R1721": (
+ "Unnecessary use of a comprehension",
+ "unnecessary-comprehension",
+ "Instead of using an identitiy comprehension, "
+ "consider using the list, dict or set constructor. "
+ "It is faster and simpler.",
+ ),
+ "R1722": (
+ "Consider using sys.exit()",
+ "consider-using-sys-exit",
+ "Instead of using exit() or quit(), consider using the sys.exit().",
+ ),
+ "R1723": (
+ 'Unnecessary "%s" after "break"',
+ "no-else-break",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a break statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "break statement.",
+ ),
+ "R1724": (
+ 'Unnecessary "%s" after "continue"',
+ "no-else-continue",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a continue statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "continue statement.",
+ ),
+ }
+ options = (
+ (
+ "max-nested-blocks",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of nested blocks for function / method body",
+ },
+ ),
+ (
+ "never-returning-functions",
+ {
+ "default": ("sys.exit",),
+ "type": "csv",
+ "help": "Complete name of functions that never returns. When checking "
+ "for inconsistent-return-statements if a never returning function is "
+ "called then it will be considered as an explicit return statement "
+ "and no message will be printed.",
+ },
+ ),
+ )
+
+ priority = 0
+
+ def __init__(self, linter=None):
+ checkers.BaseTokenChecker.__init__(self, linter)
+ self._return_nodes = {}
+ self._init()
+ self._never_returning_functions = None
+
+ def _init(self):
+ self._nested_blocks = []
+ self._elifs = []
+ self._nested_blocks_msg = None
+ self._reported_swap_nodes = set()
+
+ def open(self):
+ # do this in open since config not fully initialized in __init__
+ self._never_returning_functions = set(self.config.never_returning_functions)
+
+ @decorators.cachedproperty
+ def _dummy_rgx(self):
+ return lint_utils.get_global_option(self, "dummy-variables-rgx", default=None)
+
+ @staticmethod
+ def _is_bool_const(node):
+ return isinstance(node.value, astroid.Const) and isinstance(
+ node.value.value, bool
+ )
+
+ def _is_actual_elif(self, node):
+ """Check if the given node is an actual elif
+
+ This is a problem we're having with the builtin ast module,
+ which splits `elif` branches into a separate if statement.
+ Unfortunately we need to know the exact type in certain
+ cases.
+ """
+ if isinstance(node.parent, astroid.If):
+ orelse = node.parent.orelse
+ # current if node must directly follow an "else"
+ if orelse and orelse == [node]:
+ if (node.lineno, node.col_offset) in self._elifs:
+ return True
+ return False
+
+ def _check_simplifiable_if(self, node):
+ """Check if the given if node can be simplified.
+
+ The if statement can be reduced to a boolean expression
+ in some cases. For instance, if there are two branches
+ and both of them return a boolean value that depends on
+ the result of the statement's test, then this can be reduced
+ to `bool(test)` without losing any functionality.
+ """
+
+ if self._is_actual_elif(node):
+ # Not interested in if statements with multiple branches.
+ return
+ if len(node.orelse) != 1 or len(node.body) != 1:
+ return
+
+ # Check if both branches can be reduced.
+ first_branch = node.body[0]
+ else_branch = node.orelse[0]
+ if isinstance(first_branch, astroid.Return):
+ if not isinstance(else_branch, astroid.Return):
+ return
+ first_branch_is_bool = self._is_bool_const(first_branch)
+ else_branch_is_bool = self._is_bool_const(else_branch)
+ reduced_to = "'return bool(test)'"
+ elif isinstance(first_branch, astroid.Assign):
+ if not isinstance(else_branch, astroid.Assign):
+ return
+
+ # Check if we assign to the same value
+ first_branch_targets = [
+ target.name
+ for target in first_branch.targets
+ if isinstance(target, astroid.AssignName)
+ ]
+ else_branch_targets = [
+ target.name
+ for target in else_branch.targets
+ if isinstance(target, astroid.AssignName)
+ ]
+ if not first_branch_targets or not else_branch_targets:
+ return
+ if sorted(first_branch_targets) != sorted(else_branch_targets):
+ return
+
+ first_branch_is_bool = self._is_bool_const(first_branch)
+ else_branch_is_bool = self._is_bool_const(else_branch)
+ reduced_to = "'var = bool(test)'"
+ else:
+ return
+
+ if not first_branch_is_bool or not else_branch_is_bool:
+ return
+ if not first_branch.value.value:
+ # This is a case that can't be easily simplified and
+ # if it can be simplified, it will usually result in a
+ # code that's harder to understand and comprehend.
+ # Let's take for instance `arg and arg <= 3`. This could theoretically be
+ # reduced to `not arg or arg > 3`, but the net result is that now the
+ # condition is harder to understand, because it requires understanding of
+ # an extra clause:
+ # * first, there is the negation of truthness with `not arg`
+ # * the second clause is `arg > 3`, which occurs when arg has a
+ # a truth value, but it implies that `arg > 3` is equivalent
+ # with `arg and arg > 3`, which means that the user must
+ # think about this assumption when evaluating `arg > 3`.
+ # The original form is easier to grasp.
+ return
+
+ self.add_message("simplifiable-if-statement", node=node, args=(reduced_to,))
+
+ def process_tokens(self, tokens):
+ # Process tokens and look for 'if' or 'elif'
+ for index, token in enumerate(tokens):
+ token_string = token[1]
+ if token_string == "elif":
+ # AST exists by the time process_tokens is called, so
+ # it's safe to assume tokens[index+1]
+ # exists. tokens[index+1][2] is the elif's position as
+ # reported by CPython and PyPy,
+ # tokens[index][2] is the actual position and also is
+ # reported by IronPython.
+ self._elifs.extend([tokens[index][2], tokens[index + 1][2]])
+ elif _is_trailing_comma(tokens, index):
+ if self.linter.is_message_enabled("trailing-comma-tuple"):
+ self.add_message("trailing-comma-tuple", line=token.start[0])
+
+ def leave_module(self, _):
+ self._init()
+
+ @utils.check_messages("too-many-nested-blocks")
+ def visit_tryexcept(self, node):
+ self._check_nested_blocks(node)
+
+ visit_tryfinally = visit_tryexcept
+ visit_while = visit_tryexcept
+
+ def _check_redefined_argument_from_local(self, name_node):
+ if self._dummy_rgx and self._dummy_rgx.match(name_node.name):
+ return
+ if not name_node.lineno:
+ # Unknown position, maybe it is a manually built AST?
+ return
+
+ scope = name_node.scope()
+ if not isinstance(scope, astroid.FunctionDef):
+ return
+
+ for defined_argument in scope.args.nodes_of_class(
+ astroid.AssignName, skip_klass=(astroid.Lambda,)
+ ):
+ if defined_argument.name == name_node.name:
+ self.add_message(
+ "redefined-argument-from-local",
+ node=name_node,
+ args=(name_node.name,),
+ )
+
+ @utils.check_messages("redefined-argument-from-local", "too-many-nested-blocks")
+ def visit_for(self, node):
+ self._check_nested_blocks(node)
+
+ for name in node.target.nodes_of_class(astroid.AssignName):
+ self._check_redefined_argument_from_local(name)
+
+ @utils.check_messages("redefined-argument-from-local")
+ def visit_excepthandler(self, node):
+ if node.name and isinstance(node.name, astroid.AssignName):
+ self._check_redefined_argument_from_local(node.name)
+
+ @utils.check_messages("redefined-argument-from-local")
+ def visit_with(self, node):
+ for _, names in node.items:
+ if not names:
+ continue
+ for name in names.nodes_of_class(astroid.AssignName):
+ self._check_redefined_argument_from_local(name)
+
+ def _check_superfluous_else(self, node, msg_id, returning_node_class):
+ if not node.orelse:
+ # Not interested in if statements without else.
+ return
+
+ if self._is_actual_elif(node):
+ # Not interested in elif nodes; only if
+ return
+
+ if _if_statement_is_always_returning(node, returning_node_class):
+ orelse = node.orelse[0]
+ followed_by_elif = (orelse.lineno, orelse.col_offset) in self._elifs
+ self.add_message(
+ msg_id, node=node, args="elif" if followed_by_elif else "else"
+ )
+
+ def _check_superfluous_else_return(self, node):
+ return self._check_superfluous_else(
+ node, msg_id="no-else-return", returning_node_class=astroid.Return
+ )
+
+ def _check_superfluous_else_raise(self, node):
+ return self._check_superfluous_else(
+ node, msg_id="no-else-raise", returning_node_class=astroid.Raise
+ )
+
+ def _check_superfluous_else_break(self, node):
+ return self._check_superfluous_else(
+ node, msg_id="no-else-break", returning_node_class=astroid.Break
+ )
+
+ def _check_superfluous_else_continue(self, node):
+ return self._check_superfluous_else(
+ node, msg_id="no-else-continue", returning_node_class=astroid.Continue
+ )
+
+ def _check_consider_get(self, node):
+ def type_and_name_are_equal(node_a, node_b):
+ for _type in [astroid.Name, astroid.AssignName]:
+ if all(isinstance(_node, _type) for _node in [node_a, node_b]):
+ return node_a.name == node_b.name
+ if all(isinstance(_node, astroid.Const) for _node in [node_a, node_b]):
+ return node_a.value == node_b.value
+ return False
+
+ if_block_ok = (
+ isinstance(node.test, astroid.Compare)
+ and len(node.body) == 1
+ and isinstance(node.body[0], astroid.Assign)
+ and isinstance(node.body[0].value, astroid.Subscript)
+ and type_and_name_are_equal(node.body[0].value.value, node.test.ops[0][1])
+ and isinstance(node.body[0].value.slice, astroid.Index)
+ and type_and_name_are_equal(node.body[0].value.slice.value, node.test.left)
+ and len(node.body[0].targets) == 1
+ and isinstance(node.body[0].targets[0], astroid.AssignName)
+ and isinstance(utils.safe_infer(node.test.ops[0][1]), astroid.Dict)
+ )
+
+ if if_block_ok and not node.orelse:
+ self.add_message("consider-using-get", node=node)
+ elif (
+ if_block_ok
+ and len(node.orelse) == 1
+ and isinstance(node.orelse[0], astroid.Assign)
+ and type_and_name_are_equal(
+ node.orelse[0].targets[0], node.body[0].targets[0]
+ )
+ and len(node.orelse[0].targets) == 1
+ ):
+ self.add_message("consider-using-get", node=node)
+
+ @utils.check_messages(
+ "too-many-nested-blocks",
+ "simplifiable-if-statement",
+ "no-else-return",
+ "no-else-raise",
+ "no-else-break",
+ "no-else-continue",
+ "consider-using-get",
+ )
+ def visit_if(self, node):
+ self._check_simplifiable_if(node)
+ self._check_nested_blocks(node)
+ self._check_superfluous_else_return(node)
+ self._check_superfluous_else_raise(node)
+ self._check_superfluous_else_break(node)
+ self._check_superfluous_else_continue(node)
+ self._check_consider_get(node)
+
+ @utils.check_messages("simplifiable-if-expression")
+ def visit_ifexp(self, node):
+ self._check_simplifiable_ifexp(node)
+
+ def _check_simplifiable_ifexp(self, node):
+ if not isinstance(node.body, astroid.Const) or not isinstance(
+ node.orelse, astroid.Const
+ ):
+ return
+
+ if not isinstance(node.body.value, bool) or not isinstance(
+ node.orelse.value, bool
+ ):
+ return
+
+ if isinstance(node.test, astroid.Compare):
+ test_reduced_to = "test"
+ else:
+ test_reduced_to = "bool(test)"
+
+ if (node.body.value, node.orelse.value) == (True, False):
+ reduced_to = "'{}'".format(test_reduced_to)
+ elif (node.body.value, node.orelse.value) == (False, True):
+ reduced_to = "'not test'"
+ else:
+ return
+
+ self.add_message("simplifiable-if-expression", node=node, args=(reduced_to,))
+
+ @utils.check_messages(
+ "too-many-nested-blocks", "inconsistent-return-statements", "useless-return"
+ )
+ def leave_functiondef(self, node):
+ # check left-over nested blocks stack
+ self._emit_nested_blocks_message_if_needed(self._nested_blocks)
+ # new scope = reinitialize the stack of nested blocks
+ self._nested_blocks = []
+ #  check consistent return statements
+ self._check_consistent_returns(node)
+ # check for single return or return None at the end
+ self._check_return_at_the_end(node)
+ self._return_nodes[node.name] = []
+
+ @utils.check_messages("stop-iteration-return")
+ def visit_raise(self, node):
+ self._check_stop_iteration_inside_generator(node)
+
+ def _check_stop_iteration_inside_generator(self, node):
+ """Check if an exception of type StopIteration is raised inside a generator"""
+ frame = node.frame()
+ if not isinstance(frame, astroid.FunctionDef) or not frame.is_generator():
+ return
+ if utils.node_ignores_exception(node, StopIteration):
+ return
+ if not node.exc:
+ return
+ exc = utils.safe_infer(node.exc)
+ if exc is None or exc is astroid.Uninferable:
+ return
+ if self._check_exception_inherit_from_stopiteration(exc):
+ self.add_message("stop-iteration-return", node=node)
+
+ @staticmethod
+ def _check_exception_inherit_from_stopiteration(exc):
+ """Return True if the exception node in argument inherit from StopIteration"""
+ stopiteration_qname = "{}.StopIteration".format(utils.EXCEPTIONS_MODULE)
+ return any(_class.qname() == stopiteration_qname for _class in exc.mro())
+
+ def _check_consider_using_comprehension_constructor(self, node):
+ if (
+ isinstance(node.func, astroid.Name)
+ and node.args
+ and isinstance(node.args[0], astroid.ListComp)
+ ):
+ if node.func.name == "dict" and not isinstance(
+ node.args[0].elt, astroid.Call
+ ):
+ message_name = "consider-using-dict-comprehension"
+ self.add_message(message_name, node=node)
+ elif node.func.name == "set":
+ message_name = "consider-using-set-comprehension"
+ self.add_message(message_name, node=node)
+
+ @utils.check_messages(
+ "stop-iteration-return",
+ "consider-using-dict-comprehension",
+ "consider-using-set-comprehension",
+ "consider-using-sys-exit",
+ )
+ def visit_call(self, node):
+ self._check_raising_stopiteration_in_generator_next_call(node)
+ self._check_consider_using_comprehension_constructor(node)
+ self._check_quit_exit_call(node)
+
+ @staticmethod
+ def _has_exit_in_scope(scope):
+ exit_func = scope.locals.get("exit")
+ return bool(
+ exit_func and isinstance(exit_func[0], (astroid.ImportFrom, astroid.Import))
+ )
+
+ def _check_quit_exit_call(self, node):
+
+ if isinstance(node.func, astroid.Name) and node.func.name in BUILTIN_EXIT_FUNCS:
+ # If we have `exit` imported from `sys` in the current or global scope, exempt this instance.
+ local_scope = node.scope()
+ if self._has_exit_in_scope(local_scope) or self._has_exit_in_scope(
+ node.root()
+ ):
+ return
+ self.add_message("consider-using-sys-exit", node=node)
+
+ def _check_raising_stopiteration_in_generator_next_call(self, node):
+ """Check if a StopIteration exception is raised by the call to next function
+
+ If the next value has a default value, then do not add message.
+
+ :param node: Check to see if this Call node is a next function
+ :type node: :class:`astroid.node_classes.Call`
+ """
+
+ def _looks_like_infinite_iterator(param):
+ inferred = utils.safe_infer(param)
+ if inferred:
+ return inferred.qname() in KNOWN_INFINITE_ITERATORS
+ return False
+
+ if isinstance(node.func, astroid.Attribute):
+ # A next() method, which is now what we want.
+ return
+
+ inferred = utils.safe_infer(node.func)
+ if getattr(inferred, "name", "") == "next":
+ frame = node.frame()
+ # The next builtin can only have up to two
+ # positional arguments and no keyword arguments
+ has_sentinel_value = len(node.args) > 1
+ if (
+ isinstance(frame, astroid.FunctionDef)
+ and frame.is_generator()
+ and not has_sentinel_value
+ and not utils.node_ignores_exception(node, StopIteration)
+ and not _looks_like_infinite_iterator(node.args[0])
+ ):
+ self.add_message("stop-iteration-return", node=node)
+
+ def _check_nested_blocks(self, node):
+ """Update and check the number of nested blocks
+ """
+ # only check block levels inside functions or methods
+ if not isinstance(node.scope(), astroid.FunctionDef):
+ return
+ # messages are triggered on leaving the nested block. Here we save the
+ # stack in case the current node isn't nested in the previous one
+ nested_blocks = self._nested_blocks[:]
+ if node.parent == node.scope():
+ self._nested_blocks = [node]
+ else:
+ # go through ancestors from the most nested to the less
+ for ancestor_node in reversed(self._nested_blocks):
+ if ancestor_node == node.parent:
+ break
+ self._nested_blocks.pop()
+ # if the node is an elif, this should not be another nesting level
+ if isinstance(node, astroid.If) and self._is_actual_elif(node):
+ if self._nested_blocks:
+ self._nested_blocks.pop()
+ self._nested_blocks.append(node)
+
+ # send message only once per group of nested blocks
+ if len(nested_blocks) > len(self._nested_blocks):
+ self._emit_nested_blocks_message_if_needed(nested_blocks)
+
+ def _emit_nested_blocks_message_if_needed(self, nested_blocks):
+ if len(nested_blocks) > self.config.max_nested_blocks:
+ self.add_message(
+ "too-many-nested-blocks",
+ node=nested_blocks[0],
+ args=(len(nested_blocks), self.config.max_nested_blocks),
+ )
+
+ @staticmethod
+ def _duplicated_isinstance_types(node):
+ """Get the duplicated types from the underlying isinstance calls.
+
+ :param astroid.BoolOp node: Node which should contain a bunch of isinstance calls.
+ :returns: Dictionary of the comparison objects from the isinstance calls,
+ to duplicate values from consecutive calls.
+ :rtype: dict
+ """
+ duplicated_objects = set()
+ all_types = collections.defaultdict(set)
+
+ for call in node.values:
+ if not isinstance(call, astroid.Call) or len(call.args) != 2:
+ continue
+
+ inferred = utils.safe_infer(call.func)
+ if not inferred or not utils.is_builtin_object(inferred):
+ continue
+
+ if inferred.name != "isinstance":
+ continue
+
+ isinstance_object = call.args[0].as_string()
+ isinstance_types = call.args[1]
+
+ if isinstance_object in all_types:
+ duplicated_objects.add(isinstance_object)
+
+ if isinstance(isinstance_types, astroid.Tuple):
+ elems = [
+ class_type.as_string() for class_type in isinstance_types.itered()
+ ]
+ else:
+ elems = [isinstance_types.as_string()]
+ all_types[isinstance_object].update(elems)
+
+ # Remove all keys which not duplicated
+ return {
+ key: value for key, value in all_types.items() if key in duplicated_objects
+ }
+
+ def _check_consider_merging_isinstance(self, node):
+ """Check isinstance calls which can be merged together."""
+ if node.op != "or":
+ return
+
+ first_args = self._duplicated_isinstance_types(node)
+ for duplicated_name, class_names in first_args.items():
+ names = sorted(name for name in class_names)
+ self.add_message(
+ "consider-merging-isinstance",
+ node=node,
+ args=(duplicated_name, ", ".join(names)),
+ )
+
+ def _check_consider_using_in(self, node):
+ allowed_ops = {"or": "==", "and": "!="}
+
+ if node.op not in allowed_ops or len(node.values) < 2:
+ return
+
+ for value in node.values:
+ if (
+ not isinstance(value, astroid.Compare)
+ or len(value.ops) != 1
+ or value.ops[0][0] not in allowed_ops[node.op]
+ ):
+ return
+ for comparable in value.left, value.ops[0][1]:
+ if isinstance(comparable, astroid.Call):
+ return
+
+ # Gather variables and values from comparisons
+ variables, values = [], []
+ for value in node.values:
+ variable_set = set()
+ for comparable in value.left, value.ops[0][1]:
+ if isinstance(comparable, astroid.Name):
+ variable_set.add(comparable.as_string())
+ values.append(comparable.as_string())
+ variables.append(variable_set)
+
+ # Look for (common-)variables that occur in all comparisons
+ common_variables = reduce(lambda a, b: a.intersection(b), variables)
+
+ if not common_variables:
+ return
+
+ # Gather information for the suggestion
+ common_variable = sorted(list(common_variables))[0]
+ comprehension = "in" if node.op == "or" else "not in"
+ values = list(collections.OrderedDict.fromkeys(values))
+ values.remove(common_variable)
+ values_string = ", ".join(values) if len(values) != 1 else values[0] + ","
+ suggestion = "%s %s (%s)" % (common_variable, comprehension, values_string)
+
+ self.add_message("consider-using-in", node=node, args=(suggestion,))
+
+ def _check_chained_comparison(self, node):
+ """Check if there is any chained comparison in the expression.
+
+ Add a refactoring message if a boolOp contains comparison like a < b and b < c,
+ which can be chained as a < b < c.
+
+ Care is taken to avoid simplifying a < b < c and b < d.
+ """
+ if node.op != "and" or len(node.values) < 2:
+ return
+
+ def _find_lower_upper_bounds(comparison_node, uses):
+ left_operand = comparison_node.left
+ for operator, right_operand in comparison_node.ops:
+ for operand in (left_operand, right_operand):
+ value = None
+ if isinstance(operand, astroid.Name):
+ value = operand.name
+ elif isinstance(operand, astroid.Const):
+ value = operand.value
+
+ if value is None:
+ continue
+
+ if operator in ("<", "<="):
+ if operand is left_operand:
+ uses[value]["lower_bound"].add(comparison_node)
+ elif operand is right_operand:
+ uses[value]["upper_bound"].add(comparison_node)
+ elif operator in (">", ">="):
+ if operand is left_operand:
+ uses[value]["upper_bound"].add(comparison_node)
+ elif operand is right_operand:
+ uses[value]["lower_bound"].add(comparison_node)
+ left_operand = right_operand
+
+ uses = collections.defaultdict(
+ lambda: {"lower_bound": set(), "upper_bound": set()}
+ )
+ for comparison_node in node.values:
+ if isinstance(comparison_node, astroid.Compare):
+ _find_lower_upper_bounds(comparison_node, uses)
+
+ for _, bounds in uses.items():
+ num_shared = len(bounds["lower_bound"].intersection(bounds["upper_bound"]))
+ num_lower_bounds = len(bounds["lower_bound"])
+ num_upper_bounds = len(bounds["upper_bound"])
+ if num_shared < num_lower_bounds and num_shared < num_upper_bounds:
+ self.add_message("chained-comparison", node=node)
+ break
+
+ @utils.check_messages(
+ "consider-merging-isinstance", "consider-using-in", "chained-comparison"
+ )
+ def visit_boolop(self, node):
+ self._check_consider_merging_isinstance(node)
+ self._check_consider_using_in(node)
+ self._check_chained_comparison(node)
+
+ @staticmethod
+ def _is_simple_assignment(node):
+ return (
+ isinstance(node, astroid.Assign)
+ and len(node.targets) == 1
+ and isinstance(node.targets[0], astroid.node_classes.AssignName)
+ and isinstance(node.value, astroid.node_classes.Name)
+ )
+
+ def _check_swap_variables(self, node):
+ if not node.next_sibling() or not node.next_sibling().next_sibling():
+ return
+ assignments = [node, node.next_sibling(), node.next_sibling().next_sibling()]
+ if not all(self._is_simple_assignment(node) for node in assignments):
+ return
+ if any(node in self._reported_swap_nodes for node in assignments):
+ return
+ left = [node.targets[0].name for node in assignments]
+ right = [node.value.name for node in assignments]
+ if left[0] == right[-1] and left[1:] == right[:-1]:
+ self._reported_swap_nodes.update(assignments)
+ message = "consider-swap-variables"
+ self.add_message(message, node=node)
+
+ @utils.check_messages(
+ "simplify-boolean-expression",
+ "consider-using-ternary",
+ "consider-swap-variables",
+ )
+ def visit_assign(self, node):
+ self._check_swap_variables(node)
+ if self._is_and_or_ternary(node.value):
+ cond, truth_value, false_value = self._and_or_ternary_arguments(node.value)
+ else:
+ return
+
+ if all(
+ isinstance(value, astroid.Compare) for value in (truth_value, false_value)
+ ):
+ return
+
+ inferred_truth_value = utils.safe_infer(truth_value)
+ if inferred_truth_value in (None, astroid.Uninferable):
+ truth_boolean_value = True
+ else:
+ truth_boolean_value = truth_value.bool_value()
+
+ if truth_boolean_value is False:
+ message = "simplify-boolean-expression"
+ suggestion = false_value.as_string()
+ else:
+ message = "consider-using-ternary"
+ suggestion = "{truth} if {cond} else {false}".format(
+ truth=truth_value.as_string(),
+ cond=cond.as_string(),
+ false=false_value.as_string(),
+ )
+ self.add_message(message, node=node, args=(suggestion,))
+
+ visit_return = visit_assign
+
+ def _check_consider_using_join(self, aug_assign):
+ """
+ We start with the augmented assignment and work our way upwards.
+ Names of variables for nodes if match successful:
+ result = '' # assign
+ for number in ['1', '2', '3'] # for_loop
+ result += number # aug_assign
+ """
+ for_loop = aug_assign.parent
+ if not isinstance(for_loop, astroid.For) or len(for_loop.body) > 1:
+ return
+ assign = for_loop.previous_sibling()
+ if not isinstance(assign, astroid.Assign):
+ return
+ result_assign_names = {
+ target.name
+ for target in assign.targets
+ if isinstance(target, astroid.AssignName)
+ }
+
+ is_concat_loop = (
+ aug_assign.op == "+="
+ and isinstance(aug_assign.target, astroid.AssignName)
+ and len(for_loop.body) == 1
+ and aug_assign.target.name in result_assign_names
+ and isinstance(assign.value, astroid.Const)
+ and isinstance(assign.value.value, str)
+ and isinstance(aug_assign.value, astroid.Name)
+ and aug_assign.value.name == for_loop.target.name
+ )
+ if is_concat_loop:
+ self.add_message("consider-using-join", node=aug_assign)
+
+ @utils.check_messages("consider-using-join")
+ def visit_augassign(self, node):
+ self._check_consider_using_join(node)
+
+ @utils.check_messages("unnecessary-comprehension")
+ def visit_comprehension(self, node):
+ self._check_unnecessary_comprehension(node)
+
+ def _check_unnecessary_comprehension(self, node):
+ if (
+ isinstance(node.parent, astroid.GeneratorExp)
+ or len(node.ifs) != 0
+ or len(node.parent.generators) != 1
+ or node.is_async
+ ):
+ return
+
+ if (
+ isinstance(node.parent, astroid.DictComp)
+ and isinstance(node.parent.key, astroid.Name)
+ and isinstance(node.parent.value, astroid.Name)
+ and isinstance(node.target, astroid.Tuple)
+ and all(isinstance(elt, astroid.AssignName) for elt in node.target.elts)
+ ):
+ expr_list = [node.parent.key.name, node.parent.value.name]
+ target_list = [elt.name for elt in node.target.elts]
+
+ elif isinstance(node.parent, (astroid.ListComp, astroid.SetComp)):
+ expr = node.parent.elt
+ if isinstance(expr, astroid.Name):
+ expr_list = expr.name
+ elif isinstance(expr, astroid.Tuple):
+ if any(not isinstance(elt, astroid.Name) for elt in expr.elts):
+ return
+ expr_list = [elt.name for elt in expr.elts]
+ else:
+ expr_list = []
+ target = node.parent.generators[0].target
+ target_list = (
+ target.name
+ if isinstance(target, astroid.AssignName)
+ else (
+ [
+ elt.name
+ for elt in target.elts
+ if isinstance(elt, astroid.AssignName)
+ ]
+ if isinstance(target, astroid.Tuple)
+ else []
+ )
+ )
+ else:
+ return
+ if expr_list == target_list != []:
+ self.add_message("unnecessary-comprehension", node=node)
+
+ @staticmethod
+ def _is_and_or_ternary(node):
+ """
+ Returns true if node is 'condition and true_value or false_value' form.
+
+ All of: condition, true_value and false_value should not be a complex boolean expression
+ """
+ return (
+ isinstance(node, astroid.BoolOp)
+ and node.op == "or"
+ and len(node.values) == 2
+ and isinstance(node.values[0], astroid.BoolOp)
+ and not isinstance(node.values[1], astroid.BoolOp)
+ and node.values[0].op == "and"
+ and not isinstance(node.values[0].values[1], astroid.BoolOp)
+ and len(node.values[0].values) == 2
+ )
+
+ @staticmethod
+ def _and_or_ternary_arguments(node):
+ false_value = node.values[1]
+ condition, true_value = node.values[0].values
+ return condition, true_value, false_value
+
+ def visit_functiondef(self, node):
+ self._return_nodes[node.name] = list(
+ node.nodes_of_class(astroid.Return, skip_klass=astroid.FunctionDef)
+ )
+
+ def _check_consistent_returns(self, node):
+ """Check that all return statements inside a function are consistent.
+
+ Return statements are consistent if:
+ - all returns are explicit and if there is no implicit return;
+ - all returns are empty and if there is, possibly, an implicit return.
+
+ Args:
+ node (astroid.FunctionDef): the function holding the return statements.
+
+ """
+ # explicit return statements are those with a not None value
+ explicit_returns = [
+ _node for _node in self._return_nodes[node.name] if _node.value is not None
+ ]
+ if not explicit_returns:
+ return
+ if len(explicit_returns) == len(
+ self._return_nodes[node.name]
+ ) and self._is_node_return_ended(node):
+ return
+ self.add_message("inconsistent-return-statements", node=node)
+
+ def _is_node_return_ended(self, node):
+ """Check if the node ends with an explicit return statement.
+
+ Args:
+ node (astroid.NodeNG): node to be checked.
+
+ Returns:
+ bool: True if the node ends with an explicit statement, False otherwise.
+
+ """
+ #  Recursion base case
+ if isinstance(node, astroid.Return):
+ return True
+ if isinstance(node, astroid.Call):
+ try:
+ funcdef_node = node.func.inferred()[0]
+ if self._is_function_def_never_returning(funcdef_node):
+ return True
+ except astroid.InferenceError:
+ pass
+ # Avoid the check inside while loop as we don't know
+ #  if they will be completed
+ if isinstance(node, astroid.While):
+ return True
+ if isinstance(node, astroid.Raise):
+ # a Raise statement doesn't need to end with a return statement
+ # but if the exception raised is handled, then the handler has to
+ # ends with a return statement
+ if not node.exc:
+ # Ignore bare raises
+ return True
+ if not utils.is_node_inside_try_except(node):
+ # If the raise statement is not inside a try/except statement
+ #  then the exception is raised and cannot be caught. No need
+ #  to infer it.
+ return True
+ exc = utils.safe_infer(node.exc)
+ if exc is None or exc is astroid.Uninferable:
+ return False
+ exc_name = exc.pytype().split(".")[-1]
+ handlers = utils.get_exception_handlers(node, exc_name)
+ handlers = list(handlers) if handlers is not None else []
+ if handlers:
+ # among all the handlers handling the exception at least one
+ # must end with a return statement
+ return any(
+ self._is_node_return_ended(_handler) for _handler in handlers
+ )
+ # if no handlers handle the exception then it's ok
+ return True
+ if isinstance(node, astroid.If):
+ # if statement is returning if there are exactly two return statements in its
+ #  children : one for the body part, the other for the orelse part
+ # Do not check if inner function definition are return ended.
+ is_orelse_returning = any(
+ self._is_node_return_ended(_ore)
+ for _ore in node.orelse
+ if not isinstance(_ore, astroid.FunctionDef)
+ )
+ is_if_returning = any(
+ self._is_node_return_ended(_ifn)
+ for _ifn in node.body
+ if not isinstance(_ifn, astroid.FunctionDef)
+ )
+ return is_if_returning and is_orelse_returning
+ #  recurses on the children of the node except for those which are except handler
+ # because one cannot be sure that the handler will really be used
+ return any(
+ self._is_node_return_ended(_child)
+ for _child in node.get_children()
+ if not isinstance(_child, astroid.ExceptHandler)
+ )
+
+ def _is_function_def_never_returning(self, node):
+ """Return True if the function never returns. False otherwise.
+
+ Args:
+ node (astroid.FunctionDef): function definition node to be analyzed.
+
+ Returns:
+ bool: True if the function never returns, False otherwise.
+ """
+ try:
+ return node.qname() in self._never_returning_functions
+ except TypeError:
+ return False
+
+ def _check_return_at_the_end(self, node):
+ """Check for presence of a *single* return statement at the end of a
+ function. "return" or "return None" are useless because None is the
+ default return type if they are missing.
+
+ NOTE: produces a message only if there is a single return statement
+ in the function body. Otherwise _check_consistent_returns() is called!
+ Per its implementation and PEP8 we can have a "return None" at the end
+ of the function body if there are other return statements before that!
+ """
+ if len(self._return_nodes[node.name]) > 1:
+ return
+ if len(node.body) <= 1:
+ return
+
+ last = node.body[-1]
+ if isinstance(last, astroid.Return):
+ # e.g. "return"
+ if last.value is None:
+ self.add_message("useless-return", node=node)
+ # return None"
+ elif isinstance(last.value, astroid.Const) and (last.value.value is None):
+ self.add_message("useless-return", node=node)
+
+
+class RecommandationChecker(checkers.BaseChecker):
+ __implements__ = (interfaces.IAstroidChecker,)
+ name = "refactoring"
+ msgs = {
+ "C0200": (
+ "Consider using enumerate instead of iterating with range and len",
+ "consider-using-enumerate",
+ "Emitted when code that iterates with range and len is "
+ "encountered. Such code can be simplified by using the "
+ "enumerate builtin.",
+ ),
+ "C0201": (
+ "Consider iterating the dictionary directly instead of calling .keys()",
+ "consider-iterating-dictionary",
+ "Emitted when the keys of a dictionary are iterated through the .keys() "
+ "method. It is enough to just iterate through the dictionary itself, as "
+ 'in "for key in dictionary".',
+ ),
+ }
+
+ @staticmethod
+ def _is_builtin(node, function):
+ inferred = utils.safe_infer(node)
+ if not inferred:
+ return False
+ return utils.is_builtin_object(inferred) and inferred.name == function
+
+ @utils.check_messages("consider-iterating-dictionary")
+ def visit_call(self, node):
+ if not isinstance(node.func, astroid.Attribute):
+ return
+ if node.func.attrname != "keys":
+ return
+ if not isinstance(node.parent, (astroid.For, astroid.Comprehension)):
+ return
+
+ inferred = utils.safe_infer(node.func)
+ if not isinstance(inferred, astroid.BoundMethod) or not isinstance(
+ inferred.bound, astroid.Dict
+ ):
+ return
+
+ if isinstance(node.parent, (astroid.For, astroid.Comprehension)):
+ self.add_message("consider-iterating-dictionary", node=node)
+
+ @utils.check_messages("consider-using-enumerate")
+ def visit_for(self, node):
+ """Emit a convention whenever range and len are used for indexing."""
+ # Verify that we have a `range([start], len(...), [stop])` call and
+ # that the object which is iterated is used as a subscript in the
+ # body of the for.
+
+ # Is it a proper range call?
+ if not isinstance(node.iter, astroid.Call):
+ return
+ if not self._is_builtin(node.iter.func, "range"):
+ return
+ if len(node.iter.args) == 2 and not _is_constant_zero(node.iter.args[0]):
+ return
+ if len(node.iter.args) > 2:
+ return
+
+ # Is it a proper len call?
+ if not isinstance(node.iter.args[-1], astroid.Call):
+ return
+ second_func = node.iter.args[-1].func
+ if not self._is_builtin(second_func, "len"):
+ return
+ len_args = node.iter.args[-1].args
+ if not len_args or len(len_args) != 1:
+ return
+ iterating_object = len_args[0]
+ if not isinstance(iterating_object, astroid.Name):
+ return
+ # If we're defining __iter__ on self, enumerate won't work
+ scope = node.scope()
+ if iterating_object.name == "self" and scope.name == "__iter__":
+ return
+
+ # Verify that the body of the for loop uses a subscript
+ # with the object that was iterated. This uses some heuristics
+ # in order to make sure that the same object is used in the
+ # for body.
+ for child in node.body:
+ for subscript in child.nodes_of_class(astroid.Subscript):
+ if not isinstance(subscript.value, astroid.Name):
+ continue
+ if not isinstance(subscript.slice, astroid.Index):
+ continue
+ if not isinstance(subscript.slice.value, astroid.Name):
+ continue
+ if subscript.slice.value.name != node.target.name:
+ continue
+ if iterating_object.name != subscript.value.name:
+ continue
+ if subscript.value.scope() != node.scope():
+ # Ignore this subscript if it's not in the same
+ # scope. This means that in the body of the for
+ # loop, another scope was created, where the same
+ # name for the iterating object was used.
+ continue
+ self.add_message("consider-using-enumerate", node=node)
+ return
+
+
+class NotChecker(checkers.BaseChecker):
+ """checks for too many not in comparison expressions
+
+ - "not not" should trigger a warning
+ - "not" followed by a comparison should trigger a warning
+ """
+
+ __implements__ = (interfaces.IAstroidChecker,)
+ msgs = {
+ "C0113": (
+ 'Consider changing "%s" to "%s"',
+ "unneeded-not",
+ "Used when a boolean expression contains an unneeded negation.",
+ )
+ }
+ name = "refactoring"
+ reverse_op = {
+ "<": ">=",
+ "<=": ">",
+ ">": "<=",
+ ">=": "<",
+ "==": "!=",
+ "!=": "==",
+ "in": "not in",
+ "is": "is not",
+ }
+ # sets are not ordered, so for example "not set(LEFT_VALS) <= set(RIGHT_VALS)" is
+ # not equivalent to "set(LEFT_VALS) > set(RIGHT_VALS)"
+ skipped_nodes = (astroid.Set,)
+ # 'builtins' py3, '__builtin__' py2
+ skipped_classnames = [
+ "%s.%s" % (builtins.__name__, qname) for qname in ("set", "frozenset")
+ ]
+
+ @utils.check_messages("unneeded-not")
+ def visit_unaryop(self, node):
+ if node.op != "not":
+ return
+ operand = node.operand
+
+ if isinstance(operand, astroid.UnaryOp) and operand.op == "not":
+ self.add_message(
+ "unneeded-not",
+ node=node,
+ args=(node.as_string(), operand.operand.as_string()),
+ )
+ elif isinstance(operand, astroid.Compare):
+ left = operand.left
+ # ignore multiple comparisons
+ if len(operand.ops) > 1:
+ return
+ operator, right = operand.ops[0]
+ if operator not in self.reverse_op:
+ return
+ # Ignore __ne__ as function of __eq__
+ frame = node.frame()
+ if frame.name == "__ne__" and operator == "==":
+ return
+ for _type in (utils.node_type(left), utils.node_type(right)):
+ if not _type:
+ return
+ if isinstance(_type, self.skipped_nodes):
+ return
+ if (
+ isinstance(_type, astroid.Instance)
+ and _type.qname() in self.skipped_classnames
+ ):
+ return
+ suggestion = "%s %s %s" % (
+ left.as_string(),
+ self.reverse_op[operator],
+ right.as_string(),
+ )
+ self.add_message(
+ "unneeded-not", node=node, args=(node.as_string(), suggestion)
+ )
+
+
+class LenChecker(checkers.BaseChecker):
+ """Checks for incorrect usage of len() inside conditions.
+ Pep8 states:
+ For sequences, (strings, lists, tuples), use the fact that empty sequences are false.
+
+ Yes: if not seq:
+ if seq:
+
+ No: if len(seq):
+ if not len(seq):
+
+ Problems detected:
+ * if len(sequence):
+ * if not len(sequence):
+ * elif len(sequence):
+ * elif not len(sequence):
+ * while len(sequence):
+ * while not len(sequence):
+ * assert len(sequence):
+ * assert not len(sequence):
+ """
+
+ __implements__ = (interfaces.IAstroidChecker,)
+
+ # configuration section name
+ name = "refactoring"
+ msgs = {
+ "C1801": (
+ "Do not use `len(SEQUENCE)` without comparison to determine if a sequence is empty",
+ "len-as-condition",
+ "Used when Pylint detects that len(sequence) is being used "
+ "without explicit comparison inside a condition to determine if a sequence is empty. "
+ "Instead of coercing the length to a boolean, either "
+ "rely on the fact that empty sequences are false or "
+ "compare the length against a scalar.",
+ )
+ }
+
+ priority = -2
+ options = ()
+
+ @utils.check_messages("len-as-condition")
+ def visit_call(self, node):
+ # a len(S) call is used inside a test condition
+ # could be if, while, assert or if expression statement
+ # e.g. `if len(S):`
+ if _is_len_call(node):
+ # the len() call could also be nested together with other
+ # boolean operations, e.g. `if z or len(x):`
+ parent = node.parent
+ while isinstance(parent, astroid.BoolOp):
+ parent = parent.parent
+
+ # we're finally out of any nested boolean operations so check if
+ # this len() call is part of a test condition
+ if not _node_is_test_condition(parent):
+ return
+ if not (node is parent.test or parent.test.parent_of(node)):
+ return
+ self.add_message("len-as-condition", node=node)
+
+ @utils.check_messages("len-as-condition")
+ def visit_unaryop(self, node):
+ """`not len(S)` must become `not S` regardless if the parent block
+ is a test condition or something else (boolean expression)
+ e.g. `if not len(S):`"""
+ if (
+ isinstance(node, astroid.UnaryOp)
+ and node.op == "not"
+ and _is_len_call(node.operand)
+ ):
+ self.add_message("len-as-condition", node=node)
+
+
+def register(linter):
+ """Required method to auto register this checker."""
+ linter.register_checker(RefactoringChecker(linter))
+ linter.register_checker(NotChecker(linter))
+ linter.register_checker(RecommandationChecker(linter))
+ linter.register_checker(LenChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/similar.py b/venv/Lib/site-packages/pylint/checkers/similar.py
new file mode 100644
index 0000000..019b55f
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/similar.py
@@ -0,0 +1,452 @@
+# Copyright (c) 2006, 2008-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012 Ry4an Brase <ry4an-hg@ry4an.org>
+# Copyright (c) 2012 Google, Inc.
+# Copyright (c) 2012 Anthony VEREZ <anthony.verez.external@cassidian.com>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2017 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+# pylint: disable=redefined-builtin
+"""a similarities / code duplication command line tool and pylint checker
+"""
+
+import sys
+from collections import defaultdict
+from getopt import getopt
+from itertools import groupby
+
+import astroid
+
+from pylint.checkers import BaseChecker, table_lines_from_stats
+from pylint.interfaces import IRawChecker
+from pylint.reporters.ureports.nodes import Table
+from pylint.utils import decoding_stream
+
+
+class Similar:
+ """finds copy-pasted lines of code in a project"""
+
+ def __init__(
+ self,
+ min_lines=4,
+ ignore_comments=False,
+ ignore_docstrings=False,
+ ignore_imports=False,
+ ):
+ self.min_lines = min_lines
+ self.ignore_comments = ignore_comments
+ self.ignore_docstrings = ignore_docstrings
+ self.ignore_imports = ignore_imports
+ self.linesets = []
+
+ def append_stream(self, streamid, stream, encoding=None):
+ """append a file to search for similarities"""
+ if encoding is None:
+ readlines = stream.readlines
+ else:
+ readlines = decoding_stream(stream, encoding).readlines
+ try:
+ self.linesets.append(
+ LineSet(
+ streamid,
+ readlines(),
+ self.ignore_comments,
+ self.ignore_docstrings,
+ self.ignore_imports,
+ )
+ )
+ except UnicodeDecodeError:
+ pass
+
+ def run(self):
+ """start looking for similarities and display results on stdout"""
+ self._display_sims(self._compute_sims())
+
+ def _compute_sims(self):
+ """compute similarities in appended files"""
+ no_duplicates = defaultdict(list)
+ for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
+ duplicate = no_duplicates[num]
+ for couples in duplicate:
+ if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
+ couples.add((lineset1, idx1))
+ couples.add((lineset2, idx2))
+ break
+ else:
+ duplicate.append({(lineset1, idx1), (lineset2, idx2)})
+ sims = []
+ for num, ensembles in no_duplicates.items():
+ for couples in ensembles:
+ sims.append((num, couples))
+ sims.sort()
+ sims.reverse()
+ return sims
+
+ def _display_sims(self, sims):
+ """display computed similarities on stdout"""
+ nb_lignes_dupliquees = 0
+ for num, couples in sims:
+ print()
+ print(num, "similar lines in", len(couples), "files")
+ couples = sorted(couples)
+ lineset = idx = None
+ for lineset, idx in couples:
+ print("==%s:%s" % (lineset.name, idx))
+ if lineset:
+ for line in lineset._real_lines[idx : idx + num]:
+ print(" ", line.rstrip())
+ nb_lignes_dupliquees += num * (len(couples) - 1)
+ nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
+ print(
+ "TOTAL lines=%s duplicates=%s percent=%.2f"
+ % (
+ nb_total_lignes,
+ nb_lignes_dupliquees,
+ nb_lignes_dupliquees * 100.0 / nb_total_lignes,
+ )
+ )
+
+ def _find_common(self, lineset1, lineset2):
+ """find similarities in the two given linesets"""
+ lines1 = lineset1.enumerate_stripped
+ lines2 = lineset2.enumerate_stripped
+ find = lineset2.find
+ index1 = 0
+ min_lines = self.min_lines
+ while index1 < len(lineset1):
+ skip = 1
+ num = 0
+ for index2 in find(lineset1[index1]):
+ non_blank = 0
+ for num, ((_, line1), (_, line2)) in enumerate(
+ zip(lines1(index1), lines2(index2))
+ ):
+ if line1 != line2:
+ if non_blank > min_lines:
+ yield num, lineset1, index1, lineset2, index2
+ skip = max(skip, num)
+ break
+ if line1:
+ non_blank += 1
+ else:
+ # we may have reach the end
+ num += 1
+ if non_blank > min_lines:
+ yield num, lineset1, index1, lineset2, index2
+ skip = max(skip, num)
+ index1 += skip
+
+ def _iter_sims(self):
+ """iterate on similarities among all files, by making a cartesian
+ product
+ """
+ for idx, lineset in enumerate(self.linesets[:-1]):
+ for lineset2 in self.linesets[idx + 1 :]:
+ for sim in self._find_common(lineset, lineset2):
+ yield sim
+
+
+def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
+ """return lines with leading/trailing whitespace and any ignored code
+ features removed
+ """
+ if ignore_imports:
+ tree = astroid.parse("".join(lines))
+ node_is_import_by_lineno = (
+ (node.lineno, isinstance(node, (astroid.Import, astroid.ImportFrom)))
+ for node in tree.body
+ )
+ line_begins_import = {
+ lineno: all(is_import for _, is_import in node_is_import_group)
+ for lineno, node_is_import_group in groupby(
+ node_is_import_by_lineno, key=lambda x: x[0]
+ )
+ }
+ current_line_is_import = False
+
+ strippedlines = []
+ docstring = None
+ for lineno, line in enumerate(lines, start=1):
+ line = line.strip()
+ if ignore_docstrings:
+ if not docstring and any(
+ line.startswith(i) for i in ['"""', "'''", 'r"""', "r'''"]
+ ):
+ docstring = line[:3]
+ line = line[3:]
+ if docstring:
+ if line.endswith(docstring):
+ docstring = None
+ line = ""
+ if ignore_imports:
+ current_line_is_import = line_begins_import.get(
+ lineno, current_line_is_import
+ )
+ if current_line_is_import:
+ line = ""
+ if ignore_comments:
+ line = line.split("#", 1)[0].strip()
+ strippedlines.append(line)
+ return strippedlines
+
+
+class LineSet:
+ """Holds and indexes all the lines of a single source file"""
+
+ def __init__(
+ self,
+ name,
+ lines,
+ ignore_comments=False,
+ ignore_docstrings=False,
+ ignore_imports=False,
+ ):
+ self.name = name
+ self._real_lines = lines
+ self._stripped_lines = stripped_lines(
+ lines, ignore_comments, ignore_docstrings, ignore_imports
+ )
+ self._index = self._mk_index()
+
+ def __str__(self):
+ return "<Lineset for %s>" % self.name
+
+ def __len__(self):
+ return len(self._real_lines)
+
+ def __getitem__(self, index):
+ return self._stripped_lines[index]
+
+ def __lt__(self, other):
+ return self.name < other.name
+
+ def __hash__(self):
+ return id(self)
+
+ def enumerate_stripped(self, start_at=0):
+ """return an iterator on stripped lines, starting from a given index
+ if specified, else 0
+ """
+ idx = start_at
+ if start_at:
+ lines = self._stripped_lines[start_at:]
+ else:
+ lines = self._stripped_lines
+ for line in lines:
+ # if line:
+ yield idx, line
+ idx += 1
+
+ def find(self, stripped_line):
+ """return positions of the given stripped line in this set"""
+ return self._index.get(stripped_line, ())
+
+ def _mk_index(self):
+ """create the index for this set"""
+ index = defaultdict(list)
+ for line_no, line in enumerate(self._stripped_lines):
+ if line:
+ index[line].append(line_no)
+ return index
+
+
+MSGS = {
+ "R0801": (
+ "Similar lines in %s files\n%s",
+ "duplicate-code",
+ "Indicates that a set of similar lines has been detected "
+ "among multiple file. This usually means that the code should "
+ "be refactored to avoid this duplication.",
+ )
+}
+
+
+def report_similarities(sect, stats, old_stats):
+ """make a layout with some stats about duplication"""
+ lines = ["", "now", "previous", "difference"]
+ lines += table_lines_from_stats(
+ stats, old_stats, ("nb_duplicated_lines", "percent_duplicated_lines")
+ )
+ sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
+
+
+# wrapper to get a pylint checker from the similar class
+class SimilarChecker(BaseChecker, Similar):
+ """checks for similarities and duplicated code. This computation may be
+ memory / CPU intensive, so you should disable it if you experiment some
+ problems.
+ """
+
+ __implements__ = (IRawChecker,)
+ # configuration section name
+ name = "similarities"
+ # messages
+ msgs = MSGS
+ # configuration options
+ # for available dict keys/values see the optik parser 'add_option' method
+ options = (
+ (
+ "min-similarity-lines", # type: ignore
+ {
+ "default": 4,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Minimum lines number of a similarity.",
+ },
+ ),
+ (
+ "ignore-comments",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore comments when computing similarities.",
+ },
+ ),
+ (
+ "ignore-docstrings",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore docstrings when computing similarities.",
+ },
+ ),
+ (
+ "ignore-imports",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore imports when computing similarities.",
+ },
+ ),
+ )
+ # reports
+ reports = (("RP0801", "Duplication", report_similarities),) # type: ignore
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ Similar.__init__(
+ self, min_lines=4, ignore_comments=True, ignore_docstrings=True
+ )
+ self.stats = None
+
+ def set_option(self, optname, value, action=None, optdict=None):
+ """method called to set an option (registered in the options list)
+
+ overridden to report options setting to Similar
+ """
+ BaseChecker.set_option(self, optname, value, action, optdict)
+ if optname == "min-similarity-lines":
+ self.min_lines = self.config.min_similarity_lines
+ elif optname == "ignore-comments":
+ self.ignore_comments = self.config.ignore_comments
+ elif optname == "ignore-docstrings":
+ self.ignore_docstrings = self.config.ignore_docstrings
+ elif optname == "ignore-imports":
+ self.ignore_imports = self.config.ignore_imports
+
+ def open(self):
+ """init the checkers: reset linesets and statistics information"""
+ self.linesets = []
+ self.stats = self.linter.add_stats(
+ nb_duplicated_lines=0, percent_duplicated_lines=0
+ )
+
+ def process_module(self, node):
+ """process a module
+
+ the module's content is accessible via the stream object
+
+ stream must implement the readlines method
+ """
+ with node.stream() as stream:
+ self.append_stream(self.linter.current_name, stream, node.file_encoding)
+
+ def close(self):
+ """compute and display similarities on closing (i.e. end of parsing)"""
+ total = sum(len(lineset) for lineset in self.linesets)
+ duplicated = 0
+ stats = self.stats
+ for num, couples in self._compute_sims():
+ msg = []
+ lineset = idx = None
+ for lineset, idx in couples:
+ msg.append("==%s:%s" % (lineset.name, idx))
+ msg.sort()
+
+ if lineset:
+ for line in lineset._real_lines[idx : idx + num]:
+ msg.append(line.rstrip())
+
+ self.add_message("R0801", args=(len(couples), "\n".join(msg)))
+ duplicated += num * (len(couples) - 1)
+ stats["nb_duplicated_lines"] = duplicated
+ stats["percent_duplicated_lines"] = total and duplicated * 100.0 / total
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(SimilarChecker(linter))
+
+
+def usage(status=0):
+ """display command line usage information"""
+ print("finds copy pasted blocks in a set of files")
+ print()
+ print(
+ "Usage: symilar [-d|--duplicates min_duplicated_lines] \
+[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1..."
+ )
+ sys.exit(status)
+
+
+def Run(argv=None):
+ """standalone command line access point"""
+ if argv is None:
+ argv = sys.argv[1:]
+
+ s_opts = "hdi"
+ l_opts = (
+ "help",
+ "duplicates=",
+ "ignore-comments",
+ "ignore-imports",
+ "ignore-docstrings",
+ )
+ min_lines = 4
+ ignore_comments = False
+ ignore_docstrings = False
+ ignore_imports = False
+ opts, args = getopt(argv, s_opts, l_opts)
+ for opt, val in opts:
+ if opt in ("-d", "--duplicates"):
+ min_lines = int(val)
+ elif opt in ("-h", "--help"):
+ usage()
+ elif opt in ("-i", "--ignore-comments"):
+ ignore_comments = True
+ elif opt in ("--ignore-docstrings",):
+ ignore_docstrings = True
+ elif opt in ("--ignore-imports",):
+ ignore_imports = True
+ if not args:
+ usage(1)
+ sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
+ for filename in args:
+ with open(filename) as stream:
+ sim.append_stream(filename, stream)
+ sim.run()
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ Run()
diff --git a/venv/Lib/site-packages/pylint/checkers/spelling.py b/venv/Lib/site-packages/pylint/checkers/spelling.py
new file mode 100644
index 0000000..b1a5334
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/spelling.py
@@ -0,0 +1,411 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016-2017 Pedro Algarvio <pedro@algarvio.me>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checker for spelling errors in comments and docstrings.
+"""
+
+import os
+import re
+import tokenize
+
+from pylint.checkers import BaseTokenChecker
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import IAstroidChecker, ITokenChecker
+
+try:
+ import enchant
+ from enchant.tokenize import ( # type: ignore
+ get_tokenizer,
+ Chunker,
+ Filter,
+ EmailFilter,
+ URLFilter,
+ WikiWordFilter,
+ )
+except ImportError:
+ enchant = None
+ # pylint: disable=no-init
+ class Filter: # type: ignore
+ def _skip(self, word):
+ raise NotImplementedError
+
+ class Chunker: # type: ignore
+ pass
+
+
+if enchant is not None:
+ br = enchant.Broker()
+ dicts = br.list_dicts()
+ dict_choices = [""] + [d[0] for d in dicts]
+ dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
+ dicts = ", ".join(dicts)
+ instr = ""
+else:
+ dicts = "none"
+ dict_choices = [""]
+ instr = " To make it work, install the python-enchant package."
+
+
+class WordsWithDigigtsFilter(Filter):
+ """Skips words with digits.
+ """
+
+ def _skip(self, word):
+ for char in word:
+ if char.isdigit():
+ return True
+ return False
+
+
+class WordsWithUnderscores(Filter):
+ """Skips words with underscores.
+
+ They are probably function parameter names.
+ """
+
+ def _skip(self, word):
+ return "_" in word
+
+
+class CamelCasedWord(Filter):
+ r"""Filter skipping over camelCasedWords.
+ This filter skips any words matching the following regular expression:
+
+ ^([a-z]\w+[A-Z]+\w+)
+
+ That is, any words that are camelCasedWords.
+ """
+ _pattern = re.compile(r"^([a-z]+([\d]|[A-Z])(?:\w+)?)")
+
+ def _skip(self, word):
+ return bool(self._pattern.match(word))
+
+
+class SphinxDirectives(Filter):
+ r"""Filter skipping over Sphinx Directives.
+ This filter skips any words matching the following regular expression:
+
+ ^:([a-z]+):`([^`]+)(`)?
+
+ That is, for example, :class:`BaseQuery`
+ """
+ # The final ` in the pattern is optional because enchant strips it out
+ _pattern = re.compile(r"^:([a-z]+):`([^`]+)(`)?")
+
+ def _skip(self, word):
+ return bool(self._pattern.match(word))
+
+
+class ForwardSlashChunkder(Chunker):
+ """
+ This chunker allows splitting words like 'before/after' into 'before' and 'after'
+ """
+
+ def next(self):
+ while True:
+ if not self._text:
+ raise StopIteration()
+ if "/" not in self._text:
+ text = self._text
+ self._offset = 0
+ self._text = ""
+ return (text, 0)
+ pre_text, post_text = self._text.split("/", 1)
+ self._text = post_text
+ self._offset = 0
+ if (
+ not pre_text
+ or not post_text
+ or not pre_text[-1].isalpha()
+ or not post_text[0].isalpha()
+ ):
+ self._text = ""
+ self._offset = 0
+ return (pre_text + "/" + post_text, 0)
+ return (pre_text, 0)
+
+ def _next(self):
+ while True:
+ if "/" not in self._text:
+ return (self._text, 0)
+ pre_text, post_text = self._text.split("/", 1)
+ if not pre_text or not post_text:
+ break
+ if not pre_text[-1].isalpha() or not post_text[0].isalpha():
+ raise StopIteration()
+ self._text = pre_text + " " + post_text
+ raise StopIteration()
+
+
+class SpellingChecker(BaseTokenChecker):
+ """Check spelling in comments and docstrings"""
+
+ __implements__ = (ITokenChecker, IAstroidChecker)
+ name = "spelling"
+ msgs = {
+ "C0401": (
+ "Wrong spelling of a word '%s' in a comment:\n%s\n"
+ "%s\nDid you mean: '%s'?",
+ "wrong-spelling-in-comment",
+ "Used when a word in comment is not spelled correctly.",
+ ),
+ "C0402": (
+ "Wrong spelling of a word '%s' in a docstring:\n%s\n"
+ "%s\nDid you mean: '%s'?",
+ "wrong-spelling-in-docstring",
+ "Used when a word in docstring is not spelled correctly.",
+ ),
+ "C0403": (
+ "Invalid characters %r in a docstring",
+ "invalid-characters-in-docstring",
+ "Used when a word in docstring cannot be checked by enchant.",
+ ),
+ }
+ options = (
+ (
+ "spelling-dict",
+ {
+ "default": "",
+ "type": "choice",
+ "metavar": "<dict name>",
+ "choices": dict_choices,
+ "help": "Spelling dictionary name. "
+ "Available dictionaries: %s.%s" % (dicts, instr),
+ },
+ ),
+ (
+ "spelling-ignore-words",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<comma separated words>",
+ "help": "List of comma separated words that " "should not be checked.",
+ },
+ ),
+ (
+ "spelling-private-dict-file",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<path to file>",
+ "help": "A path to a file that contains the private "
+ "dictionary; one word per line.",
+ },
+ ),
+ (
+ "spelling-store-unknown-words",
+ {
+ "default": "n",
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether to store unknown words to the "
+ "private dictionary (see the "
+ "--spelling-private-dict-file option) instead of "
+ "raising a message.",
+ },
+ ),
+ (
+ "max-spelling-suggestions",
+ {
+ "default": 4,
+ "type": "int",
+ "metavar": "N",
+ "help": "Limits count of emitted suggestions for " "spelling mistakes.",
+ },
+ ),
+ )
+
+ def open(self):
+ self.initialized = False
+ self.private_dict_file = None
+
+ if enchant is None:
+ return
+ dict_name = self.config.spelling_dict
+ if not dict_name:
+ return
+
+ self.ignore_list = [
+ w.strip() for w in self.config.spelling_ignore_words.split(",")
+ ]
+ # "param" appears in docstring in param description and
+ # "pylint" appears in comments in pylint pragmas.
+ self.ignore_list.extend(["param", "pylint"])
+
+ # Expand tilde to allow e.g. spelling-private-dict-file = ~/.pylintdict
+ if self.config.spelling_private_dict_file:
+ self.config.spelling_private_dict_file = os.path.expanduser(
+ self.config.spelling_private_dict_file
+ )
+
+ if self.config.spelling_private_dict_file:
+ self.spelling_dict = enchant.DictWithPWL(
+ dict_name, self.config.spelling_private_dict_file
+ )
+ self.private_dict_file = open(self.config.spelling_private_dict_file, "a")
+ else:
+ self.spelling_dict = enchant.Dict(dict_name)
+
+ if self.config.spelling_store_unknown_words:
+ self.unknown_words = set()
+
+ self.tokenizer = get_tokenizer(
+ dict_name,
+ chunkers=[ForwardSlashChunkder],
+ filters=[
+ EmailFilter,
+ URLFilter,
+ WikiWordFilter,
+ WordsWithDigigtsFilter,
+ WordsWithUnderscores,
+ CamelCasedWord,
+ SphinxDirectives,
+ ],
+ )
+ self.initialized = True
+
+ def close(self):
+ if self.private_dict_file:
+ self.private_dict_file.close()
+
+ def _check_spelling(self, msgid, line, line_num):
+ original_line = line
+ try:
+ initial_space = re.search(r"^[^\S]\s*", line).regs[0][1]
+ except (IndexError, AttributeError):
+ initial_space = 0
+ if line.strip().startswith("#"):
+ line = line.strip()[1:]
+ starts_with_comment = True
+ else:
+ starts_with_comment = False
+ for word, word_start_at in self.tokenizer(line.strip()):
+ word_start_at += initial_space
+ lower_cased_word = word.casefold()
+
+ # Skip words from ignore list.
+ if word in self.ignore_list or lower_cased_word in self.ignore_list:
+ continue
+
+ # Strip starting u' from unicode literals and r' from raw strings.
+ if word.startswith(("u'", 'u"', "r'", 'r"')) and len(word) > 2:
+ word = word[2:]
+ lower_cased_word = lower_cased_word[2:]
+
+ # If it is a known word, then continue.
+ try:
+ if self.spelling_dict.check(lower_cased_word):
+ # The lower cased version of word passed spell checking
+ continue
+
+ # If we reached this far, it means there was a spelling mistake.
+ # Let's retry with the original work because 'unicode' is a
+ # spelling mistake but 'Unicode' is not
+ if self.spelling_dict.check(word):
+ continue
+ except enchant.errors.Error:
+ self.add_message(
+ "invalid-characters-in-docstring", line=line_num, args=(word,)
+ )
+ continue
+
+ # Store word to private dict or raise a message.
+ if self.config.spelling_store_unknown_words:
+ if lower_cased_word not in self.unknown_words:
+ self.private_dict_file.write("%s\n" % lower_cased_word)
+ self.unknown_words.add(lower_cased_word)
+ else:
+ # Present up to N suggestions.
+ suggestions = self.spelling_dict.suggest(word)
+ del suggestions[self.config.max_spelling_suggestions :]
+
+ line_segment = line[word_start_at:]
+ match = re.search(r"(\W|^)(%s)(\W|$)" % word, line_segment)
+ if match:
+ # Start position of second group in regex.
+ col = match.regs[2][0]
+ else:
+ col = line_segment.index(word)
+
+ col += word_start_at
+
+ if starts_with_comment:
+ col += 1
+ indicator = (" " * col) + ("^" * len(word))
+
+ self.add_message(
+ msgid,
+ line=line_num,
+ args=(
+ word,
+ original_line,
+ indicator,
+ "'{}'".format("' or '".join(suggestions)),
+ ),
+ )
+
+ def process_tokens(self, tokens):
+ if not self.initialized:
+ return
+
+ # Process tokens and look for comments.
+ for (tok_type, token, (start_row, _), _, _) in tokens:
+ if tok_type == tokenize.COMMENT:
+ if start_row == 1 and token.startswith("#!/"):
+ # Skip shebang lines
+ continue
+ if token.startswith("# pylint:"):
+ # Skip pylint enable/disable comments
+ continue
+ self._check_spelling("wrong-spelling-in-comment", token, start_row)
+
+ @check_messages("wrong-spelling-in-docstring")
+ def visit_module(self, node):
+ if not self.initialized:
+ return
+ self._check_docstring(node)
+
+ @check_messages("wrong-spelling-in-docstring")
+ def visit_classdef(self, node):
+ if not self.initialized:
+ return
+ self._check_docstring(node)
+
+ @check_messages("wrong-spelling-in-docstring")
+ def visit_functiondef(self, node):
+ if not self.initialized:
+ return
+ self._check_docstring(node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_docstring(self, node):
+ """check the node has any spelling errors"""
+ docstring = node.doc
+ if not docstring:
+ return
+
+ start_line = node.lineno + 1
+
+ # Go through lines of docstring
+ for idx, line in enumerate(docstring.splitlines()):
+ self._check_spelling("wrong-spelling-in-docstring", line, start_line + idx)
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(SpellingChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/stdlib.py b/venv/Lib/site-packages/pylint/checkers/stdlib.py
new file mode 100644
index 0000000..a945107
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/stdlib.py
@@ -0,0 +1,452 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Cosmin Poieana <cmin@ropython.org>
+# Copyright (c) 2014 Vlad Temian <vladtemian@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
+# Copyright (c) 2015 Chris Rebert <code@rebertia.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
+# Copyright (c) 2017 Renat Galimov <renat2017@gmail.com>
+# Copyright (c) 2017 Martin <MartinBasti@users.noreply.github.com>
+# Copyright (c) 2017 Christopher Zurcher <zurcher@users.noreply.github.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2018 Banjamin Freeman <befreeman@users.noreply.github.com>
+# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checkers for various standard library functions."""
+
+import sys
+
+import astroid
+from astroid.bases import Instance
+from astroid.node_classes import Const
+
+from pylint.checkers import BaseChecker, utils
+from pylint.interfaces import IAstroidChecker
+
+OPEN_FILES = {"open", "file"}
+UNITTEST_CASE = "unittest.case"
+THREADING_THREAD = "threading.Thread"
+COPY_COPY = "copy.copy"
+OS_ENVIRON = "os._Environ"
+ENV_GETTERS = {"os.getenv"}
+SUBPROCESS_POPEN = "subprocess.Popen"
+SUBPROCESS_RUN = "subprocess.run"
+OPEN_MODULE = "_io"
+
+
+def _check_mode_str(mode):
+ # check type
+ if not isinstance(mode, str):
+ return False
+ # check syntax
+ modes = set(mode)
+ _mode = "rwatb+Ux"
+ creating = "x" in modes
+ if modes - set(_mode) or len(mode) > len(modes):
+ return False
+ # check logic
+ reading = "r" in modes
+ writing = "w" in modes
+ appending = "a" in modes
+ text = "t" in modes
+ binary = "b" in modes
+ if "U" in modes:
+ if writing or appending or creating:
+ return False
+ reading = True
+ if text and binary:
+ return False
+ total = reading + writing + appending + creating
+ if total > 1:
+ return False
+ if not (reading or writing or appending or creating):
+ return False
+ return True
+
+
+class StdlibChecker(BaseChecker):
+ __implements__ = (IAstroidChecker,)
+ name = "stdlib"
+
+ msgs = {
+ "W1501": (
+ '"%s" is not a valid mode for open.',
+ "bad-open-mode",
+ "Python supports: r, w, a[, x] modes with b, +, "
+ "and U (only with r) options. "
+ "See http://docs.python.org/2/library/functions.html#open",
+ ),
+ "W1502": (
+ "Using datetime.time in a boolean context.",
+ "boolean-datetime",
+ "Using datetime.time in a boolean context can hide "
+ "subtle bugs when the time they represent matches "
+ "midnight UTC. This behaviour was fixed in Python 3.5. "
+ "See http://bugs.python.org/issue13936 for reference.",
+ {"maxversion": (3, 5)},
+ ),
+ "W1503": (
+ "Redundant use of %s with constant value %r",
+ "redundant-unittest-assert",
+ "The first argument of assertTrue and assertFalse is "
+ "a condition. If a constant is passed as parameter, that "
+ "condition will be always true. In this case a warning "
+ "should be emitted.",
+ ),
+ "W1505": (
+ "Using deprecated method %s()",
+ "deprecated-method",
+ "The method is marked as deprecated and will be removed in "
+ "a future version of Python. Consider looking for an "
+ "alternative in the documentation.",
+ ),
+ "W1506": (
+ "threading.Thread needs the target function",
+ "bad-thread-instantiation",
+ "The warning is emitted when a threading.Thread class "
+ "is instantiated without the target function being passed. "
+ "By default, the first parameter is the group param, not the target param. ",
+ ),
+ "W1507": (
+ "Using copy.copy(os.environ). Use os.environ.copy() instead. ",
+ "shallow-copy-environ",
+ "os.environ is not a dict object but proxy object, so "
+ "shallow copy has still effects on original object. "
+ "See https://bugs.python.org/issue15373 for reference. ",
+ ),
+ "E1507": (
+ "%s does not support %s type argument",
+ "invalid-envvar-value",
+ "Env manipulation functions support only string type arguments. "
+ "See https://docs.python.org/3/library/os.html#os.getenv. ",
+ ),
+ "W1508": (
+ "%s default type is %s. Expected str or None.",
+ "invalid-envvar-default",
+ "Env manipulation functions return None or str values. "
+ "Supplying anything different as a default may cause bugs. "
+ "See https://docs.python.org/3/library/os.html#os.getenv. ",
+ ),
+ "W1509": (
+ "Using preexec_fn keyword which may be unsafe in the presence "
+ "of threads",
+ "subprocess-popen-preexec-fn",
+ "The preexec_fn parameter is not safe to use in the presence "
+ "of threads in your application. The child process could "
+ "deadlock before exec is called. If you must use it, keep it "
+ "trivial! Minimize the number of libraries you call into."
+ "https://docs.python.org/3/library/subprocess.html#popen-constructor",
+ ),
+ "W1510": (
+ "Using subprocess.run without explicitly set `check` is not recommended.",
+ "subprocess-run-check",
+ "The check parameter should always be used with explicitly set "
+ "`check` keyword to make clear what the error-handling behavior is."
+ "https://docs.python.org/3/library/subprocess.html#subprocess.runs",
+ ),
+ }
+
+ deprecated = {
+ 0: {
+ "cgi.parse_qs",
+ "cgi.parse_qsl",
+ "ctypes.c_buffer",
+ "distutils.command.register.register.check_metadata",
+ "distutils.command.sdist.sdist.check_metadata",
+ "tkinter.Misc.tk_menuBar",
+ "tkinter.Menu.tk_bindForTraversal",
+ },
+ 2: {
+ (2, 6, 0): {
+ "commands.getstatus",
+ "os.popen2",
+ "os.popen3",
+ "os.popen4",
+ "macostools.touched",
+ },
+ (2, 7, 0): {
+ "unittest.case.TestCase.assertEquals",
+ "unittest.case.TestCase.assertNotEquals",
+ "unittest.case.TestCase.assertAlmostEquals",
+ "unittest.case.TestCase.assertNotAlmostEquals",
+ "unittest.case.TestCase.assert_",
+ "xml.etree.ElementTree.Element.getchildren",
+ "xml.etree.ElementTree.Element.getiterator",
+ "xml.etree.ElementTree.XMLParser.getiterator",
+ "xml.etree.ElementTree.XMLParser.doctype",
+ },
+ },
+ 3: {
+ (3, 0, 0): {
+ "inspect.getargspec",
+ "failUnlessEqual",
+ "assertEquals",
+ "failIfEqual",
+ "assertNotEquals",
+ "failUnlessAlmostEqual",
+ "assertAlmostEquals",
+ "failIfAlmostEqual",
+ "assertNotAlmostEquals",
+ "failUnless",
+ "assert_",
+ "failUnlessRaises",
+ "failIf",
+ "assertRaisesRegexp",
+ "assertRegexpMatches",
+ "assertNotRegexpMatches",
+ },
+ (3, 1, 0): {
+ "base64.encodestring",
+ "base64.decodestring",
+ "ntpath.splitunc",
+ },
+ (3, 2, 0): {
+ "cgi.escape",
+ "configparser.RawConfigParser.readfp",
+ "xml.etree.ElementTree.Element.getchildren",
+ "xml.etree.ElementTree.Element.getiterator",
+ "xml.etree.ElementTree.XMLParser.getiterator",
+ "xml.etree.ElementTree.XMLParser.doctype",
+ },
+ (3, 3, 0): {
+ "inspect.getmoduleinfo",
+ "logging.warn",
+ "logging.Logger.warn",
+ "logging.LoggerAdapter.warn",
+ "nntplib._NNTPBase.xpath",
+ "platform.popen",
+ },
+ (3, 4, 0): {
+ "importlib.find_loader",
+ "plistlib.readPlist",
+ "plistlib.writePlist",
+ "plistlib.readPlistFromBytes",
+ "plistlib.writePlistToBytes",
+ },
+ (3, 4, 4): {"asyncio.tasks.async"},
+ (3, 5, 0): {
+ "fractions.gcd",
+ "inspect.getargvalues",
+ "inspect.formatargspec",
+ "inspect.formatargvalues",
+ "inspect.getcallargs",
+ "platform.linux_distribution",
+ "platform.dist",
+ },
+ (3, 6, 0): {"importlib._bootstrap_external.FileLoader.load_module"},
+ },
+ }
+
+ def _check_bad_thread_instantiation(self, node):
+ if not node.kwargs and not node.keywords and len(node.args) <= 1:
+ self.add_message("bad-thread-instantiation", node=node)
+
+ def _check_for_preexec_fn_in_popen(self, node):
+ if node.keywords:
+ for keyword in node.keywords:
+ if keyword.arg == "preexec_fn":
+ self.add_message("subprocess-popen-preexec-fn", node=node)
+
+ def _check_for_check_kw_in_run(self, node):
+ kwargs = {keyword.arg for keyword in (node.keywords or ())}
+ if "check" not in kwargs:
+ self.add_message("subprocess-run-check", node=node)
+
+ def _check_shallow_copy_environ(self, node):
+ arg = utils.get_argument_from_call(node, position=0)
+ for inferred in arg.inferred():
+ if inferred.qname() == OS_ENVIRON:
+ self.add_message("shallow-copy-environ", node=node)
+ break
+
+ @utils.check_messages(
+ "bad-open-mode",
+ "redundant-unittest-assert",
+ "deprecated-method",
+ "bad-thread-instantiation",
+ "shallow-copy-environ",
+ "invalid-envvar-value",
+ "invalid-envvar-default",
+ "subprocess-popen-preexec-fn",
+ "subprocess-run-check",
+ )
+ def visit_call(self, node):
+ """Visit a Call node."""
+ try:
+ for inferred in node.func.infer():
+ if inferred is astroid.Uninferable:
+ continue
+ if inferred.root().name == OPEN_MODULE:
+ if getattr(node.func, "name", None) in OPEN_FILES:
+ self._check_open_mode(node)
+ elif inferred.root().name == UNITTEST_CASE:
+ self._check_redundant_assert(node, inferred)
+ elif isinstance(inferred, astroid.ClassDef):
+ if inferred.qname() == THREADING_THREAD:
+ self._check_bad_thread_instantiation(node)
+ elif inferred.qname() == SUBPROCESS_POPEN:
+ self._check_for_preexec_fn_in_popen(node)
+ elif isinstance(inferred, astroid.FunctionDef):
+ name = inferred.qname()
+ if name == COPY_COPY:
+ self._check_shallow_copy_environ(node)
+ elif name in ENV_GETTERS:
+ self._check_env_function(node, inferred)
+ elif name == SUBPROCESS_RUN:
+ self._check_for_check_kw_in_run(node)
+ self._check_deprecated_method(node, inferred)
+ except astroid.InferenceError:
+ return
+
+ @utils.check_messages("boolean-datetime")
+ def visit_unaryop(self, node):
+ if node.op == "not":
+ self._check_datetime(node.operand)
+
+ @utils.check_messages("boolean-datetime")
+ def visit_if(self, node):
+ self._check_datetime(node.test)
+
+ @utils.check_messages("boolean-datetime")
+ def visit_ifexp(self, node):
+ self._check_datetime(node.test)
+
+ @utils.check_messages("boolean-datetime")
+ def visit_boolop(self, node):
+ for value in node.values:
+ self._check_datetime(value)
+
+ def _check_deprecated_method(self, node, inferred):
+ py_vers = sys.version_info[0]
+
+ if isinstance(node.func, astroid.Attribute):
+ func_name = node.func.attrname
+ elif isinstance(node.func, astroid.Name):
+ func_name = node.func.name
+ else:
+ # Not interested in other nodes.
+ return
+
+ # Reject nodes which aren't of interest to us.
+ acceptable_nodes = (
+ astroid.BoundMethod,
+ astroid.UnboundMethod,
+ astroid.FunctionDef,
+ )
+ if not isinstance(inferred, acceptable_nodes):
+ return
+
+ qname = inferred.qname()
+ if any(name in self.deprecated[0] for name in (qname, func_name)):
+ self.add_message("deprecated-method", node=node, args=(func_name,))
+ else:
+ for since_vers, func_list in self.deprecated[py_vers].items():
+ if since_vers <= sys.version_info and any(
+ name in func_list for name in (qname, func_name)
+ ):
+ self.add_message("deprecated-method", node=node, args=(func_name,))
+ break
+
+ def _check_redundant_assert(self, node, infer):
+ if (
+ isinstance(infer, astroid.BoundMethod)
+ and node.args
+ and isinstance(node.args[0], astroid.Const)
+ and infer.name in ["assertTrue", "assertFalse"]
+ ):
+ self.add_message(
+ "redundant-unittest-assert",
+ args=(infer.name, node.args[0].value),
+ node=node,
+ )
+
+ def _check_datetime(self, node):
+ """ Check that a datetime was inferred.
+ If so, emit boolean-datetime warning.
+ """
+ try:
+ inferred = next(node.infer())
+ except astroid.InferenceError:
+ return
+ if isinstance(inferred, Instance) and inferred.qname() == "datetime.time":
+ self.add_message("boolean-datetime", node=node)
+
+ def _check_open_mode(self, node):
+ """Check that the mode argument of an open or file call is valid."""
+ try:
+ mode_arg = utils.get_argument_from_call(node, position=1, keyword="mode")
+ except utils.NoSuchArgumentError:
+ return
+ if mode_arg:
+ mode_arg = utils.safe_infer(mode_arg)
+ if isinstance(mode_arg, astroid.Const) and not _check_mode_str(
+ mode_arg.value
+ ):
+ self.add_message("bad-open-mode", node=node, args=mode_arg.value)
+
+ def _check_env_function(self, node, infer):
+ env_name_kwarg = "key"
+ env_value_kwarg = "default"
+ if node.keywords:
+ kwargs = {keyword.arg: keyword.value for keyword in node.keywords}
+ else:
+ kwargs = None
+ if node.args:
+ env_name_arg = node.args[0]
+ elif kwargs and env_name_kwarg in kwargs:
+ env_name_arg = kwargs[env_name_kwarg]
+ else:
+ env_name_arg = None
+
+ if env_name_arg:
+ self._check_invalid_envvar_value(
+ node=node,
+ message="invalid-envvar-value",
+ call_arg=utils.safe_infer(env_name_arg),
+ infer=infer,
+ allow_none=False,
+ )
+
+ if len(node.args) == 2:
+ env_value_arg = node.args[1]
+ elif kwargs and env_value_kwarg in kwargs:
+ env_value_arg = kwargs[env_value_kwarg]
+ else:
+ env_value_arg = None
+
+ if env_value_arg:
+ self._check_invalid_envvar_value(
+ node=node,
+ infer=infer,
+ message="invalid-envvar-default",
+ call_arg=utils.safe_infer(env_value_arg),
+ allow_none=True,
+ )
+
+ def _check_invalid_envvar_value(self, node, infer, message, call_arg, allow_none):
+ if call_arg in (astroid.Uninferable, None):
+ return
+
+ name = infer.qname()
+ if isinstance(call_arg, Const):
+ emit = False
+ if call_arg.value is None:
+ emit = not allow_none
+ elif not isinstance(call_arg.value, str):
+ emit = True
+ if emit:
+ self.add_message(message, node=node, args=(name, call_arg.pytype()))
+ else:
+ self.add_message(message, node=node, args=(name, call_arg.pytype()))
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(StdlibChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/strings.py b/venv/Lib/site-packages/pylint/checkers/strings.py
new file mode 100644
index 0000000..9470f46
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/strings.py
@@ -0,0 +1,755 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009 Charles Hebert <charles.hebert@logilab.fr>
+# Copyright (c) 2010-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016 Peter Dawyndt <Peter.Dawyndt@UGent.be>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checker for string formatting operations.
+"""
+
+import builtins
+import numbers
+import tokenize
+from collections import Counter
+
+import astroid
+from astroid.arguments import CallSite
+from astroid.node_classes import Const
+
+from pylint.checkers import BaseChecker, BaseTokenChecker, utils
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import IAstroidChecker, IRawChecker, ITokenChecker
+
+_AST_NODE_STR_TYPES = ("__builtin__.unicode", "__builtin__.str", "builtins.str")
+
+MSGS = {
+ "E1300": (
+ "Unsupported format character %r (%#02x) at index %d",
+ "bad-format-character",
+ "Used when an unsupported format character is used in a format string.",
+ ),
+ "E1301": (
+ "Format string ends in middle of conversion specifier",
+ "truncated-format-string",
+ "Used when a format string terminates before the end of a "
+ "conversion specifier.",
+ ),
+ "E1302": (
+ "Mixing named and unnamed conversion specifiers in format string",
+ "mixed-format-string",
+ "Used when a format string contains both named (e.g. '%(foo)d') "
+ "and unnamed (e.g. '%d') conversion specifiers. This is also "
+ "used when a named conversion specifier contains * for the "
+ "minimum field width and/or precision.",
+ ),
+ "E1303": (
+ "Expected mapping for format string, not %s",
+ "format-needs-mapping",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with an argument that is not a mapping.",
+ ),
+ "W1300": (
+ "Format string dictionary key should be a string, not %s",
+ "bad-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary whose keys are not all strings.",
+ ),
+ "W1301": (
+ "Unused key %r in format string dictionary",
+ "unused-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary that contains keys not required by the "
+ "format string.",
+ ),
+ "E1304": (
+ "Missing key %r in format string dictionary",
+ "missing-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary that doesn't contain all the keys "
+ "required by the format string.",
+ ),
+ "E1305": (
+ "Too many arguments for format string",
+ "too-many-format-args",
+ "Used when a format string that uses unnamed conversion "
+ "specifiers is given too many arguments.",
+ ),
+ "E1306": (
+ "Not enough arguments for format string",
+ "too-few-format-args",
+ "Used when a format string that uses unnamed conversion "
+ "specifiers is given too few arguments",
+ ),
+ "E1307": (
+ "Argument %r does not match format type %r",
+ "bad-string-format-type",
+ "Used when a type required by format string "
+ "is not suitable for actual argument type",
+ ),
+ "E1310": (
+ "Suspicious argument in %s.%s call",
+ "bad-str-strip-call",
+ "The argument to a str.{l,r,}strip call contains a duplicate character, ",
+ ),
+ "W1302": (
+ "Invalid format string",
+ "bad-format-string",
+ "Used when a PEP 3101 format string is invalid.",
+ ),
+ "W1303": (
+ "Missing keyword argument %r for format string",
+ "missing-format-argument-key",
+ "Used when a PEP 3101 format string that uses named fields "
+ "doesn't receive one or more required keywords.",
+ ),
+ "W1304": (
+ "Unused format argument %r",
+ "unused-format-string-argument",
+ "Used when a PEP 3101 format string that uses named "
+ "fields is used with an argument that "
+ "is not required by the format string.",
+ ),
+ "W1305": (
+ "Format string contains both automatic field numbering "
+ "and manual field specification",
+ "format-combined-specification",
+ "Used when a PEP 3101 format string contains both automatic "
+ "field numbering (e.g. '{}') and manual field "
+ "specification (e.g. '{0}').",
+ ),
+ "W1306": (
+ "Missing format attribute %r in format specifier %r",
+ "missing-format-attribute",
+ "Used when a PEP 3101 format string uses an "
+ "attribute specifier ({0.length}), but the argument "
+ "passed for formatting doesn't have that attribute.",
+ ),
+ "W1307": (
+ "Using invalid lookup key %r in format specifier %r",
+ "invalid-format-index",
+ "Used when a PEP 3101 format string uses a lookup specifier "
+ "({a[1]}), but the argument passed for formatting "
+ "doesn't contain or doesn't have that key as an attribute.",
+ ),
+ "W1308": (
+ "Duplicate string formatting argument %r, consider passing as named argument",
+ "duplicate-string-formatting-argument",
+ "Used when we detect that a string formatting is "
+ "repeating an argument instead of using named string arguments",
+ ),
+}
+
+OTHER_NODES = (
+ astroid.Const,
+ astroid.List,
+ astroid.Lambda,
+ astroid.FunctionDef,
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.GeneratorExp,
+)
+
+BUILTINS_STR = builtins.__name__ + ".str"
+BUILTINS_FLOAT = builtins.__name__ + ".float"
+BUILTINS_INT = builtins.__name__ + ".int"
+
+
+def get_access_path(key, parts):
+ """ Given a list of format specifiers, returns
+ the final access path (e.g. a.b.c[0][1]).
+ """
+ path = []
+ for is_attribute, specifier in parts:
+ if is_attribute:
+ path.append(".{}".format(specifier))
+ else:
+ path.append("[{!r}]".format(specifier))
+ return str(key) + "".join(path)
+
+
+def arg_matches_format_type(arg_type, format_type):
+ if format_type in "sr":
+ # All types can be printed with %s and %r
+ return True
+ if isinstance(arg_type, astroid.Instance):
+ arg_type = arg_type.pytype()
+ if arg_type == BUILTINS_STR:
+ return format_type == "c"
+ if arg_type == BUILTINS_FLOAT:
+ return format_type in "deEfFgGn%"
+ if arg_type == BUILTINS_INT:
+ # Integers allow all types
+ return True
+ return False
+ return True
+
+
+class StringFormatChecker(BaseChecker):
+ """Checks string formatting operations to ensure that the format string
+ is valid and the arguments match the format string.
+ """
+
+ __implements__ = (IAstroidChecker,)
+ name = "string"
+ msgs = MSGS
+
+ # pylint: disable=too-many-branches
+ @check_messages(*MSGS)
+ def visit_binop(self, node):
+ if node.op != "%":
+ return
+ left = node.left
+ args = node.right
+
+ if not (isinstance(left, astroid.Const) and isinstance(left.value, str)):
+ return
+ format_string = left.value
+ try:
+ required_keys, required_num_args, required_key_types, required_arg_types = utils.parse_format_string(
+ format_string
+ )
+ except utils.UnsupportedFormatCharacter as exc:
+ formatted = format_string[exc.index]
+ self.add_message(
+ "bad-format-character",
+ node=node,
+ args=(formatted, ord(formatted), exc.index),
+ )
+ return
+ except utils.IncompleteFormatString:
+ self.add_message("truncated-format-string", node=node)
+ return
+ if required_keys and required_num_args:
+ # The format string uses both named and unnamed format
+ # specifiers.
+ self.add_message("mixed-format-string", node=node)
+ elif required_keys:
+ # The format string uses only named format specifiers.
+ # Check that the RHS of the % operator is a mapping object
+ # that contains precisely the set of keys required by the
+ # format string.
+ if isinstance(args, astroid.Dict):
+ keys = set()
+ unknown_keys = False
+ for k, _ in args.items:
+ if isinstance(k, astroid.Const):
+ key = k.value
+ if isinstance(key, str):
+ keys.add(key)
+ else:
+ self.add_message(
+ "bad-format-string-key", node=node, args=key
+ )
+ else:
+ # One of the keys was something other than a
+ # constant. Since we can't tell what it is,
+ # suppress checks for missing keys in the
+ # dictionary.
+ unknown_keys = True
+ if not unknown_keys:
+ for key in required_keys:
+ if key not in keys:
+ self.add_message(
+ "missing-format-string-key", node=node, args=key
+ )
+ for key in keys:
+ if key not in required_keys:
+ self.add_message(
+ "unused-format-string-key", node=node, args=key
+ )
+ for key, arg in args.items:
+ if not isinstance(key, astroid.Const):
+ continue
+ format_type = required_key_types.get(key.value, None)
+ arg_type = utils.safe_infer(arg)
+ if (
+ format_type is not None
+ and arg_type not in (None, astroid.Uninferable)
+ and not arg_matches_format_type(arg_type, format_type)
+ ):
+ self.add_message(
+ "bad-string-format-type",
+ node=node,
+ args=(arg_type.pytype(), format_type),
+ )
+ elif isinstance(args, (OTHER_NODES, astroid.Tuple)):
+ type_name = type(args).__name__
+ self.add_message("format-needs-mapping", node=node, args=type_name)
+ # else:
+ # The RHS of the format specifier is a name or
+ # expression. It may be a mapping object, so
+ # there's nothing we can check.
+ else:
+ # The format string uses only unnamed format specifiers.
+ # Check that the number of arguments passed to the RHS of
+ # the % operator matches the number required by the format
+ # string.
+ args_elts = ()
+ if isinstance(args, astroid.Tuple):
+ rhs_tuple = utils.safe_infer(args)
+ num_args = None
+ if hasattr(rhs_tuple, "elts"):
+ args_elts = rhs_tuple.elts
+ num_args = len(args_elts)
+ elif isinstance(args, (OTHER_NODES, (astroid.Dict, astroid.DictComp))):
+ args_elts = [args]
+ num_args = 1
+ else:
+ # The RHS of the format specifier is a name or
+ # expression. It could be a tuple of unknown size, so
+ # there's nothing we can check.
+ num_args = None
+ if num_args is not None:
+ if num_args > required_num_args:
+ self.add_message("too-many-format-args", node=node)
+ elif num_args < required_num_args:
+ self.add_message("too-few-format-args", node=node)
+ for arg, format_type in zip(args_elts, required_arg_types):
+ if not arg:
+ continue
+ arg_type = utils.safe_infer(arg)
+ if arg_type not in (
+ None,
+ astroid.Uninferable,
+ ) and not arg_matches_format_type(arg_type, format_type):
+ self.add_message(
+ "bad-string-format-type",
+ node=node,
+ args=(arg_type.pytype(), format_type),
+ )
+
+ @check_messages(*MSGS)
+ def visit_call(self, node):
+ func = utils.safe_infer(node.func)
+ if (
+ isinstance(func, astroid.BoundMethod)
+ and isinstance(func.bound, astroid.Instance)
+ and func.bound.name in ("str", "unicode", "bytes")
+ ):
+ if func.name in ("strip", "lstrip", "rstrip") and node.args:
+ arg = utils.safe_infer(node.args[0])
+ if not isinstance(arg, astroid.Const) or not isinstance(arg.value, str):
+ return
+ if len(arg.value) != len(set(arg.value)):
+ self.add_message(
+ "bad-str-strip-call",
+ node=node,
+ args=(func.bound.name, func.name),
+ )
+ elif func.name == "format":
+ self._check_new_format(node, func)
+
+ def _detect_vacuous_formatting(self, node, positional_arguments):
+ counter = Counter(
+ arg.name for arg in positional_arguments if isinstance(arg, astroid.Name)
+ )
+ for name, count in counter.items():
+ if count == 1:
+ continue
+ self.add_message(
+ "duplicate-string-formatting-argument", node=node, args=(name,)
+ )
+
+ def _check_new_format(self, node, func):
+ """Check the new string formatting. """
+ # Skip ormat nodes which don't have an explicit string on the
+ # left side of the format operation.
+ # We do this because our inference engine can't properly handle
+ # redefinitions of the original string.
+ # Note that there may not be any left side at all, if the format method
+ # has been assigned to another variable. See issue 351. For example:
+ #
+ # fmt = 'some string {}'.format
+ # fmt('arg')
+ if isinstance(node.func, astroid.Attribute) and not isinstance(
+ node.func.expr, astroid.Const
+ ):
+ return
+ if node.starargs or node.kwargs:
+ return
+ try:
+ strnode = next(func.bound.infer())
+ except astroid.InferenceError:
+ return
+ if not (isinstance(strnode, astroid.Const) and isinstance(strnode.value, str)):
+ return
+ try:
+ call_site = CallSite.from_call(node)
+ except astroid.InferenceError:
+ return
+
+ try:
+ fields, num_args, manual_pos = utils.parse_format_method_string(
+ strnode.value
+ )
+ except utils.IncompleteFormatString:
+ self.add_message("bad-format-string", node=node)
+ return
+
+ positional_arguments = call_site.positional_arguments
+ named_arguments = call_site.keyword_arguments
+ named_fields = {field[0] for field in fields if isinstance(field[0], str)}
+ if num_args and manual_pos:
+ self.add_message("format-combined-specification", node=node)
+ return
+
+ check_args = False
+ # Consider "{[0]} {[1]}" as num_args.
+ num_args += sum(1 for field in named_fields if field == "")
+ if named_fields:
+ for field in named_fields:
+ if field and field not in named_arguments:
+ self.add_message(
+ "missing-format-argument-key", node=node, args=(field,)
+ )
+ for field in named_arguments:
+ if field not in named_fields:
+ self.add_message(
+ "unused-format-string-argument", node=node, args=(field,)
+ )
+ # num_args can be 0 if manual_pos is not.
+ num_args = num_args or manual_pos
+ if positional_arguments or num_args:
+ empty = any(True for field in named_fields if field == "")
+ if named_arguments or empty:
+ # Verify the required number of positional arguments
+ # only if the .format got at least one keyword argument.
+ # This means that the format strings accepts both
+ # positional and named fields and we should warn
+ # when one of the them is missing or is extra.
+ check_args = True
+ else:
+ check_args = True
+ if check_args:
+ # num_args can be 0 if manual_pos is not.
+ num_args = num_args or manual_pos
+ if len(positional_arguments) > num_args:
+ self.add_message("too-many-format-args", node=node)
+ elif len(positional_arguments) < num_args:
+ self.add_message("too-few-format-args", node=node)
+
+ self._detect_vacuous_formatting(node, positional_arguments)
+ self._check_new_format_specifiers(node, fields, named_arguments)
+
+ def _check_new_format_specifiers(self, node, fields, named):
+ """
+ Check attribute and index access in the format
+ string ("{0.a}" and "{0[a]}").
+ """
+ for key, specifiers in fields:
+ # Obtain the argument. If it can't be obtained
+ # or inferred, skip this check.
+ if key == "":
+ # {[0]} will have an unnamed argument, defaulting
+ # to 0. It will not be present in `named`, so use the value
+ # 0 for it.
+ key = 0
+ if isinstance(key, numbers.Number):
+ try:
+ argname = utils.get_argument_from_call(node, key)
+ except utils.NoSuchArgumentError:
+ continue
+ else:
+ if key not in named:
+ continue
+ argname = named[key]
+ if argname in (astroid.Uninferable, None):
+ continue
+ try:
+ argument = utils.safe_infer(argname)
+ except astroid.InferenceError:
+ continue
+ if not specifiers or not argument:
+ # No need to check this key if it doesn't
+ # use attribute / item access
+ continue
+ if argument.parent and isinstance(argument.parent, astroid.Arguments):
+ # Ignore any object coming from an argument,
+ # because we can't infer its value properly.
+ continue
+ previous = argument
+ parsed = []
+ for is_attribute, specifier in specifiers:
+ if previous is astroid.Uninferable:
+ break
+ parsed.append((is_attribute, specifier))
+ if is_attribute:
+ try:
+ previous = previous.getattr(specifier)[0]
+ except astroid.NotFoundError:
+ if (
+ hasattr(previous, "has_dynamic_getattr")
+ and previous.has_dynamic_getattr()
+ ):
+ # Don't warn if the object has a custom __getattr__
+ break
+ path = get_access_path(key, parsed)
+ self.add_message(
+ "missing-format-attribute",
+ args=(specifier, path),
+ node=node,
+ )
+ break
+ else:
+ warn_error = False
+ if hasattr(previous, "getitem"):
+ try:
+ previous = previous.getitem(astroid.Const(specifier))
+ except (
+ astroid.AstroidIndexError,
+ astroid.AstroidTypeError,
+ astroid.AttributeInferenceError,
+ ):
+ warn_error = True
+ except astroid.InferenceError:
+ break
+ if previous is astroid.Uninferable:
+ break
+ else:
+ try:
+ # Lookup __getitem__ in the current node,
+ # but skip further checks, because we can't
+ # retrieve the looked object
+ previous.getattr("__getitem__")
+ break
+ except astroid.NotFoundError:
+ warn_error = True
+ if warn_error:
+ path = get_access_path(key, parsed)
+ self.add_message(
+ "invalid-format-index", args=(specifier, path), node=node
+ )
+ break
+
+ try:
+ previous = next(previous.infer())
+ except astroid.InferenceError:
+ # can't check further if we can't infer it
+ break
+
+
+class StringConstantChecker(BaseTokenChecker):
+ """Check string literals"""
+
+ __implements__ = (IAstroidChecker, ITokenChecker, IRawChecker)
+ name = "string"
+ msgs = {
+ "W1401": (
+ "Anomalous backslash in string: '%s'. "
+ "String constant might be missing an r prefix.",
+ "anomalous-backslash-in-string",
+ "Used when a backslash is in a literal string but not as an escape.",
+ ),
+ "W1402": (
+ "Anomalous Unicode escape in byte string: '%s'. "
+ "String constant might be missing an r or u prefix.",
+ "anomalous-unicode-escape-in-string",
+ "Used when an escape like \\u is encountered in a byte "
+ "string where it has no effect.",
+ ),
+ "W1403": (
+ "Implicit string concatenation found in %s",
+ "implicit-str-concat-in-sequence",
+ "String literals are implicitly concatenated in a "
+ "literal iterable definition : "
+ "maybe a comma is missing ?",
+ ),
+ }
+ options = (
+ (
+ "check-str-concat-over-line-jumps",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "This flag controls whether the "
+ "implicit-str-concat-in-sequence should generate a warning "
+ "on implicit string concatenation in sequences defined over "
+ "several lines.",
+ },
+ ),
+ )
+
+ # Characters that have a special meaning after a backslash in either
+ # Unicode or byte strings.
+ ESCAPE_CHARACTERS = "abfnrtvx\n\r\t\\'\"01234567"
+
+ # Characters that have a special meaning after a backslash but only in
+ # Unicode strings.
+ UNICODE_ESCAPE_CHARACTERS = "uUN"
+
+ def __init__(self, *args, **kwargs):
+ super(StringConstantChecker, self).__init__(*args, **kwargs)
+ self.string_tokens = {} # token position -> (token value, next token)
+
+ def process_module(self, module):
+ self._unicode_literals = "unicode_literals" in module.future_imports
+
+ def process_tokens(self, tokens):
+ encoding = "ascii"
+ for i, (tok_type, token, start, _, line) in enumerate(tokens):
+ if tok_type == tokenize.ENCODING:
+ # this is always the first token processed
+ encoding = token
+ elif tok_type == tokenize.STRING:
+ # 'token' is the whole un-parsed token; we can look at the start
+ # of it to see whether it's a raw or unicode string etc.
+ self.process_string_token(token, start[0])
+ # We figure the next token, ignoring comments & newlines:
+ j = i + 1
+ while j < len(tokens) and tokens[j].type in (
+ tokenize.NEWLINE,
+ tokenize.NL,
+ tokenize.COMMENT,
+ ):
+ j += 1
+ next_token = tokens[j] if j < len(tokens) else None
+ if encoding != "ascii":
+ # We convert `tokenize` character count into a byte count,
+ # to match with astroid `.col_offset`
+ start = (start[0], len(line[: start[1]].encode(encoding)))
+ self.string_tokens[start] = (str_eval(token), next_token)
+
+ @check_messages(*(msgs.keys()))
+ def visit_list(self, node):
+ self.check_for_concatenated_strings(node, "list")
+
+ @check_messages(*(msgs.keys()))
+ def visit_set(self, node):
+ self.check_for_concatenated_strings(node, "set")
+
+ @check_messages(*(msgs.keys()))
+ def visit_tuple(self, node):
+ self.check_for_concatenated_strings(node, "tuple")
+
+ def check_for_concatenated_strings(self, iterable_node, iterable_type):
+ for elt in iterable_node.elts:
+ if isinstance(elt, Const) and elt.pytype() in _AST_NODE_STR_TYPES:
+ if elt.col_offset < 0:
+ # This can happen in case of escaped newlines
+ continue
+ if (elt.lineno, elt.col_offset) not in self.string_tokens:
+ # This may happen with Latin1 encoding
+ # cf. https://github.com/PyCQA/pylint/issues/2610
+ continue
+ matching_token, next_token = self.string_tokens[
+ (elt.lineno, elt.col_offset)
+ ]
+ # We detect string concatenation: the AST Const is the
+ # combination of 2 string tokens
+ if matching_token != elt.value and next_token is not None:
+ if next_token.type == tokenize.STRING and (
+ next_token.start[0] == elt.lineno
+ or self.config.check_str_concat_over_line_jumps
+ ):
+ self.add_message(
+ "implicit-str-concat-in-sequence",
+ line=elt.lineno,
+ args=(iterable_type,),
+ )
+
+ def process_string_token(self, token, start_row):
+ quote_char = None
+ index = None
+ for index, char in enumerate(token):
+ if char in "'\"":
+ quote_char = char
+ break
+ if quote_char is None:
+ return
+
+ prefix = token[:index].lower() # markers like u, b, r.
+ after_prefix = token[index:]
+ if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
+ string_body = after_prefix[3:-3]
+ else:
+ string_body = after_prefix[1:-1] # Chop off quotes
+ # No special checks on raw strings at the moment.
+ if "r" not in prefix:
+ self.process_non_raw_string_token(prefix, string_body, start_row)
+
+ def process_non_raw_string_token(self, prefix, string_body, start_row):
+ """check for bad escapes in a non-raw string.
+
+ prefix: lowercase string of eg 'ur' string prefix markers.
+ string_body: the un-parsed body of the string, not including the quote
+ marks.
+ start_row: integer line number in the source.
+ """
+ # Walk through the string; if we see a backslash then escape the next
+ # character, and skip over it. If we see a non-escaped character,
+ # alert, and continue.
+ #
+ # Accept a backslash when it escapes a backslash, or a quote, or
+ # end-of-line, or one of the letters that introduce a special escape
+ # sequence <http://docs.python.org/reference/lexical_analysis.html>
+ #
+ index = 0
+ while True:
+ index = string_body.find("\\", index)
+ if index == -1:
+ break
+ # There must be a next character; having a backslash at the end
+ # of the string would be a SyntaxError.
+ next_char = string_body[index + 1]
+ match = string_body[index : index + 2]
+ if next_char in self.UNICODE_ESCAPE_CHARACTERS:
+ if "u" in prefix:
+ pass
+ elif "b" not in prefix:
+ pass # unicode by default
+ else:
+ self.add_message(
+ "anomalous-unicode-escape-in-string",
+ line=start_row,
+ args=(match,),
+ col_offset=index,
+ )
+ elif next_char not in self.ESCAPE_CHARACTERS:
+ self.add_message(
+ "anomalous-backslash-in-string",
+ line=start_row,
+ args=(match,),
+ col_offset=index,
+ )
+ # Whether it was a valid escape or not, backslash followed by
+ # another character can always be consumed whole: the second
+ # character can never be the start of a new backslash escape.
+ index += 2
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(StringFormatChecker(linter))
+ linter.register_checker(StringConstantChecker(linter))
+
+
+def str_eval(token):
+ """
+ Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit.
+ This supports f-strings, contrary to `ast.literal_eval`.
+ We have to support all string literal notations:
+ https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
+ """
+ if token[0:2].lower() in ("fr", "rf"):
+ token = token[2:]
+ elif token[0].lower() in ("r", "u", "f"):
+ token = token[1:]
+ if token[0:3] in ('"""', "'''"):
+ return token[3:-3]
+ return token[1:-1]
diff --git a/venv/Lib/site-packages/pylint/checkers/typecheck.py b/venv/Lib/site-packages/pylint/checkers/typecheck.py
new file mode 100644
index 0000000..a288f49
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/typecheck.py
@@ -0,0 +1,1770 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009 James Lingard <jchl@aristanetworks.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 David Shea <dshea@redhat.com>
+# Copyright (c) 2014 Steven Myint <hg@stevenmyint.com>
+# Copyright (c) 2014 Holger Peters <email@holger-peters.de>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Anentropic <ego@anentropic.com>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
+# Copyright (c) 2015 Radu Ciorba <radu@devrandom.ro>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016 Jürgen Hermann <jh@web.de>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016 Filipe Brandenburger <filbranden@google.com>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Ben Green <benhgreen@icloud.com>
+# Copyright (c) 2018 Konstantin <Github@pheanex.de>
+# Copyright (c) 2018 Justin Li <justinnhli@users.noreply.github.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""try to find more bugs in the code using astroid inference capabilities
+"""
+
+import builtins
+import fnmatch
+import heapq
+import itertools
+import operator
+import re
+import shlex
+import sys
+import types
+from collections import deque
+from collections.abc import Sequence
+from functools import singledispatch
+
+import astroid
+import astroid.arguments
+import astroid.context
+import astroid.nodes
+from astroid import bases, decorators, exceptions, modutils, objects
+from astroid.interpreter import dunder_lookup
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import (
+ check_messages,
+ decorated_with,
+ decorated_with_property,
+ has_known_bases,
+ is_builtin_object,
+ is_comprehension,
+ is_inside_abstract_class,
+ is_iterable,
+ is_mapping,
+ is_overload_stub,
+ is_super,
+ node_ignores_exception,
+ safe_infer,
+ supports_delitem,
+ supports_getitem,
+ supports_membership_test,
+ supports_setitem,
+)
+from pylint.interfaces import INFERENCE, IAstroidChecker
+from pylint.utils import get_global_option
+
+BUILTINS = builtins.__name__
+STR_FORMAT = {"%s.str.format" % BUILTINS}
+ASYNCIO_COROUTINE = "asyncio.coroutines.coroutine"
+
+
+def _unflatten(iterable):
+ for index, elem in enumerate(iterable):
+ if isinstance(elem, Sequence) and not isinstance(elem, str):
+ for single_elem in _unflatten(elem):
+ yield single_elem
+ elif elem and not index:
+ # We're interested only in the first element.
+ yield elem
+
+
+def _flatten_container(iterable):
+ # Flatten nested containers into a single iterable
+ for item in iterable:
+ if isinstance(item, (list, tuple, types.GeneratorType)):
+ yield from _flatten_container(item)
+ else:
+ yield item
+
+
+def _is_owner_ignored(owner, attrname, ignored_classes, ignored_modules):
+ """Check if the given owner should be ignored
+
+ This will verify if the owner's module is in *ignored_modules*
+ or the owner's module fully qualified name is in *ignored_modules*
+ or if the *ignored_modules* contains a pattern which catches
+ the fully qualified name of the module.
+
+ Also, similar checks are done for the owner itself, if its name
+ matches any name from the *ignored_classes* or if its qualified
+ name can be found in *ignored_classes*.
+ """
+ ignored_modules = set(ignored_modules)
+ module_name = owner.root().name
+ module_qname = owner.root().qname()
+
+ for ignore in ignored_modules:
+ # Try to match the module name / fully qualified name directly
+ if module_qname in ignored_modules or module_name in ignored_modules:
+ return True
+
+ # Try to see if the ignores pattern match against the module name.
+ if fnmatch.fnmatch(module_qname, ignore):
+ return True
+
+ # Otherwise we might have a root module name being ignored,
+ # and the qualified owner has more levels of depth.
+ parts = deque(module_name.split("."))
+ current_module = ""
+
+ while parts:
+ part = parts.popleft()
+ if not current_module:
+ current_module = part
+ else:
+ current_module += ".{}".format(part)
+ if current_module in ignored_modules:
+ return True
+
+ # Match against ignored classes.
+ ignored_classes = set(ignored_classes)
+ if hasattr(owner, "qname"):
+ qname = owner.qname()
+ else:
+ qname = ""
+ return any(ignore in (attrname, qname) for ignore in ignored_classes)
+
+
+@singledispatch
+def _node_names(node):
+ if not hasattr(node, "locals"):
+ return []
+ return node.locals.keys()
+
+
+@_node_names.register(astroid.ClassDef)
+@_node_names.register(astroid.Instance)
+def _(node):
+ values = itertools.chain(node.instance_attrs.keys(), node.locals.keys())
+
+ try:
+ mro = node.mro()[1:]
+ except (NotImplementedError, TypeError):
+ mro = node.ancestors()
+
+ other_values = [value for cls in mro for value in _node_names(cls)]
+ return itertools.chain(values, other_values)
+
+
+def _string_distance(seq1, seq2):
+ seq2_length = len(seq2)
+
+ row = list(range(1, seq2_length + 1)) + [0]
+ for seq1_index, seq1_char in enumerate(seq1):
+ last_row = row
+ row = [0] * seq2_length + [seq1_index + 1]
+
+ for seq2_index, seq2_char in enumerate(seq2):
+ row[seq2_index] = min(
+ last_row[seq2_index] + 1,
+ row[seq2_index - 1] + 1,
+ last_row[seq2_index - 1] + (seq1_char != seq2_char),
+ )
+
+ return row[seq2_length - 1]
+
+
+def _similar_names(owner, attrname, distance_threshold, max_choices):
+ """Given an owner and a name, try to find similar names
+
+ The similar names are searched given a distance metric and only
+ a given number of choices will be returned.
+ """
+ possible_names = []
+ names = _node_names(owner)
+
+ for name in names:
+ if name == attrname:
+ continue
+
+ distance = _string_distance(attrname, name)
+ if distance <= distance_threshold:
+ possible_names.append((name, distance))
+
+ # Now get back the values with a minimum, up to the given
+ # limit or choices.
+ picked = [
+ name
+ for (name, _) in heapq.nsmallest(
+ max_choices, possible_names, key=operator.itemgetter(1)
+ )
+ ]
+ return sorted(picked)
+
+
+def _missing_member_hint(owner, attrname, distance_threshold, max_choices):
+ names = _similar_names(owner, attrname, distance_threshold, max_choices)
+ if not names:
+ # No similar name.
+ return ""
+
+ names = list(map(repr, names))
+ if len(names) == 1:
+ names = ", ".join(names)
+ else:
+ names = "one of {} or {}".format(", ".join(names[:-1]), names[-1])
+
+ return "; maybe {}?".format(names)
+
+
+MSGS = {
+ "E1101": (
+ "%s %r has no %r member%s",
+ "no-member",
+ "Used when a variable is accessed for an unexistent member.",
+ {"old_names": [("E1103", "maybe-no-member")]},
+ ),
+ "I1101": (
+ "%s %r has no %r member%s, but source is unavailable. Consider "
+ "adding this module to extension-pkg-whitelist if you want "
+ "to perform analysis based on run-time introspection of living objects.",
+ "c-extension-no-member",
+ "Used when a variable is accessed for non-existent member of C "
+ "extension. Due to unavailability of source static analysis is impossible, "
+ "but it may be performed by introspecting living objects in run-time.",
+ ),
+ "E1102": (
+ "%s is not callable",
+ "not-callable",
+ "Used when an object being called has been inferred to a non "
+ "callable object.",
+ ),
+ "E1111": (
+ "Assigning result of a function call, where the function has no return",
+ "assignment-from-no-return",
+ "Used when an assignment is done on a function call but the "
+ "inferred function doesn't return anything.",
+ ),
+ "E1120": (
+ "No value for argument %s in %s call",
+ "no-value-for-parameter",
+ "Used when a function call passes too few arguments.",
+ ),
+ "E1121": (
+ "Too many positional arguments for %s call",
+ "too-many-function-args",
+ "Used when a function call passes too many positional arguments.",
+ ),
+ "E1123": (
+ "Unexpected keyword argument %r in %s call",
+ "unexpected-keyword-arg",
+ "Used when a function call passes a keyword argument that "
+ "doesn't correspond to one of the function's parameter names.",
+ ),
+ "E1124": (
+ "Argument %r passed by position and keyword in %s call",
+ "redundant-keyword-arg",
+ "Used when a function call would result in assigning multiple "
+ "values to a function parameter, one value from a positional "
+ "argument and one from a keyword argument.",
+ ),
+ "E1125": (
+ "Missing mandatory keyword argument %r in %s call",
+ "missing-kwoa",
+ (
+ "Used when a function call does not pass a mandatory"
+ " keyword-only argument."
+ ),
+ ),
+ "E1126": (
+ "Sequence index is not an int, slice, or instance with __index__",
+ "invalid-sequence-index",
+ "Used when a sequence type is indexed with an invalid type. "
+ "Valid types are ints, slices, and objects with an __index__ "
+ "method.",
+ ),
+ "E1127": (
+ "Slice index is not an int, None, or instance with __index__",
+ "invalid-slice-index",
+ "Used when a slice index is not an integer, None, or an object "
+ "with an __index__ method.",
+ ),
+ "E1128": (
+ "Assigning result of a function call, where the function returns None",
+ "assignment-from-none",
+ "Used when an assignment is done on a function call but the "
+ "inferred function returns nothing but None.",
+ {"old_names": [("W1111", "old-assignment-from-none")]},
+ ),
+ "E1129": (
+ "Context manager '%s' doesn't implement __enter__ and __exit__.",
+ "not-context-manager",
+ "Used when an instance in a with statement doesn't implement "
+ "the context manager protocol(__enter__/__exit__).",
+ ),
+ "E1130": (
+ "%s",
+ "invalid-unary-operand-type",
+ "Emitted when a unary operand is used on an object which does not "
+ "support this type of operation.",
+ ),
+ "E1131": (
+ "%s",
+ "unsupported-binary-operation",
+ "Emitted when a binary arithmetic operation between two "
+ "operands is not supported.",
+ ),
+ "E1132": (
+ "Got multiple values for keyword argument %r in function call",
+ "repeated-keyword",
+ "Emitted when a function call got multiple values for a keyword.",
+ ),
+ "E1135": (
+ "Value '%s' doesn't support membership test",
+ "unsupported-membership-test",
+ "Emitted when an instance in membership test expression doesn't "
+ "implement membership protocol (__contains__/__iter__/__getitem__).",
+ ),
+ "E1136": (
+ "Value '%s' is unsubscriptable",
+ "unsubscriptable-object",
+ "Emitted when a subscripted value doesn't support subscription "
+ "(i.e. doesn't define __getitem__ method or __class_getitem__ for a class).",
+ ),
+ "E1137": (
+ "%r does not support item assignment",
+ "unsupported-assignment-operation",
+ "Emitted when an object does not support item assignment "
+ "(i.e. doesn't define __setitem__ method).",
+ ),
+ "E1138": (
+ "%r does not support item deletion",
+ "unsupported-delete-operation",
+ "Emitted when an object does not support item deletion "
+ "(i.e. doesn't define __delitem__ method).",
+ ),
+ "E1139": (
+ "Invalid metaclass %r used",
+ "invalid-metaclass",
+ "Emitted whenever we can detect that a class is using, "
+ "as a metaclass, something which might be invalid for using as "
+ "a metaclass.",
+ ),
+ "E1140": (
+ "Dict key is unhashable",
+ "unhashable-dict-key",
+ "Emitted when a dict key is not hashable "
+ "(i.e. doesn't define __hash__ method).",
+ ),
+ "E1141": (
+ "Unpacking a dictionary in iteration without calling .items()",
+ "dict-iter-missing-items",
+ "Emitted when trying to iterate through a dict without calling .items()",
+ ),
+ "W1113": (
+ "Keyword argument before variable positional arguments list "
+ "in the definition of %s function",
+ "keyword-arg-before-vararg",
+ "When defining a keyword argument before variable positional arguments, one can "
+ "end up in having multiple values passed for the aforementioned parameter in "
+ "case the method is called with keyword arguments.",
+ ),
+ "W1114": (
+ "Positional arguments appear to be out of order",
+ "arguments-out-of-order",
+ "Emitted when the caller's argument names fully match the parameter "
+ "names in the function signature but do not have the same order.",
+ ),
+}
+
+# builtin sequence types in Python 2 and 3.
+SEQUENCE_TYPES = {
+ "str",
+ "unicode",
+ "list",
+ "tuple",
+ "bytearray",
+ "xrange",
+ "range",
+ "bytes",
+ "memoryview",
+}
+
+
+def _emit_no_member(node, owner, owner_name, ignored_mixins=True, ignored_none=True):
+ """Try to see if no-member should be emitted for the given owner.
+
+ The following cases are ignored:
+
+ * the owner is a function and it has decorators.
+ * the owner is an instance and it has __getattr__, __getattribute__ implemented
+ * the module is explicitly ignored from no-member checks
+ * the owner is a class and the name can be found in its metaclass.
+ * The access node is protected by an except handler, which handles
+ AttributeError, Exception or bare except.
+ """
+ # pylint: disable=too-many-return-statements
+ if node_ignores_exception(node, AttributeError):
+ return False
+ if ignored_none and isinstance(owner, astroid.Const) and owner.value is None:
+ return False
+ if is_super(owner) or getattr(owner, "type", None) == "metaclass":
+ return False
+ if owner_name and ignored_mixins and owner_name[-5:].lower() == "mixin":
+ return False
+ if isinstance(owner, astroid.FunctionDef) and owner.decorators:
+ return False
+ if isinstance(owner, (astroid.Instance, astroid.ClassDef)):
+ if owner.has_dynamic_getattr():
+ # Issue #2565: Don't ignore enums, as they have a `__getattr__` but it's not
+ # invoked at this point.
+ try:
+ metaclass = owner.metaclass()
+ except exceptions.MroError:
+ return False
+ if metaclass:
+ return metaclass.qname() == "enum.EnumMeta"
+ return False
+ if not has_known_bases(owner):
+ return False
+
+ # Exclude typed annotations, since these might actually exist
+ # at some point during the runtime of the program.
+ attribute = owner.locals.get(node.attrname, [None])[0]
+ if (
+ attribute
+ and isinstance(attribute, astroid.AssignName)
+ and isinstance(attribute.parent, astroid.AnnAssign)
+ ):
+ return False
+ if isinstance(owner, objects.Super):
+ # Verify if we are dealing with an invalid Super object.
+ # If it is invalid, then there's no point in checking that
+ # it has the required attribute. Also, don't fail if the
+ # MRO is invalid.
+ try:
+ owner.super_mro()
+ except (exceptions.MroError, exceptions.SuperError):
+ return False
+ if not all(map(has_known_bases, owner.type.mro())):
+ return False
+ if isinstance(owner, astroid.Module):
+ try:
+ owner.getattr("__getattr__")
+ return False
+ except astroid.NotFoundError:
+ pass
+ if owner_name and node.attrname.startswith("_" + owner_name):
+ # Test if an attribute has been mangled ('private' attribute)
+ unmangled_name = node.attrname.split("_" + owner_name)[-1]
+ try:
+ if owner.getattr(unmangled_name, context=None) is not None:
+ return False
+ except astroid.NotFoundError:
+ return True
+ return True
+
+
+def _determine_callable(callable_obj):
+ # Ordering is important, since BoundMethod is a subclass of UnboundMethod,
+ # and Function inherits Lambda.
+ parameters = 0
+ if hasattr(callable_obj, "implicit_parameters"):
+ parameters = callable_obj.implicit_parameters()
+ if isinstance(callable_obj, astroid.BoundMethod):
+ # Bound methods have an extra implicit 'self' argument.
+ return callable_obj, parameters, callable_obj.type
+ if isinstance(callable_obj, astroid.UnboundMethod):
+ return callable_obj, parameters, "unbound method"
+ if isinstance(callable_obj, astroid.FunctionDef):
+ return callable_obj, parameters, callable_obj.type
+ if isinstance(callable_obj, astroid.Lambda):
+ return callable_obj, parameters, "lambda"
+ if isinstance(callable_obj, astroid.ClassDef):
+ # Class instantiation, lookup __new__ instead.
+ # If we only find object.__new__, we can safely check __init__
+ # instead. If __new__ belongs to builtins, then we look
+ # again for __init__ in the locals, since we won't have
+ # argument information for the builtin __new__ function.
+ try:
+ # Use the last definition of __new__.
+ new = callable_obj.local_attr("__new__")[-1]
+ except exceptions.NotFoundError:
+ new = None
+
+ from_object = new and new.parent.scope().name == "object"
+ from_builtins = new and new.root().name in sys.builtin_module_names
+
+ if not new or from_object or from_builtins:
+ try:
+ # Use the last definition of __init__.
+ callable_obj = callable_obj.local_attr("__init__")[-1]
+ except exceptions.NotFoundError:
+ # do nothing, covered by no-init.
+ raise ValueError
+ else:
+ callable_obj = new
+
+ if not isinstance(callable_obj, astroid.FunctionDef):
+ raise ValueError
+ # both have an extra implicit 'cls'/'self' argument.
+ return callable_obj, parameters, "constructor"
+
+ raise ValueError
+
+
+def _has_parent_of_type(node, node_type, statement):
+ """Check if the given node has a parent of the given type."""
+ parent = node.parent
+ while not isinstance(parent, node_type) and statement.parent_of(parent):
+ parent = parent.parent
+ return isinstance(parent, node_type)
+
+
+def _no_context_variadic_keywords(node, scope):
+ statement = node.statement()
+ variadics = ()
+
+ if isinstance(scope, astroid.Lambda) and not isinstance(scope, astroid.FunctionDef):
+ variadics = list(node.keywords or []) + node.kwargs
+ else:
+ if isinstance(statement, (astroid.Return, astroid.Expr)) and isinstance(
+ statement.value, astroid.Call
+ ):
+ call = statement.value
+ variadics = list(call.keywords or []) + call.kwargs
+
+ return _no_context_variadic(node, scope.args.kwarg, astroid.Keyword, variadics)
+
+
+def _no_context_variadic_positional(node, scope):
+ variadics = ()
+ if isinstance(scope, astroid.Lambda) and not isinstance(scope, astroid.FunctionDef):
+ variadics = node.starargs + node.kwargs
+ else:
+ statement = node.statement()
+ if isinstance(statement, (astroid.Expr, astroid.Return)) and isinstance(
+ statement.value, astroid.Call
+ ):
+ call = statement.value
+ variadics = call.starargs + call.kwargs
+
+ return _no_context_variadic(node, scope.args.vararg, astroid.Starred, variadics)
+
+
+def _no_context_variadic(node, variadic_name, variadic_type, variadics):
+ """Verify if the given call node has variadic nodes without context
+
+ This is a workaround for handling cases of nested call functions
+ which don't have the specific call context at hand.
+ Variadic arguments (variable positional arguments and variable
+ keyword arguments) are inferred, inherently wrong, by astroid
+ as a Tuple, respectively a Dict with empty elements.
+ This can lead pylint to believe that a function call receives
+ too few arguments.
+ """
+ scope = node.scope()
+ is_in_lambda_scope = not isinstance(scope, astroid.FunctionDef) and isinstance(
+ scope, astroid.Lambda
+ )
+ statement = node.statement()
+ for name in statement.nodes_of_class(astroid.Name):
+ if name.name != variadic_name:
+ continue
+
+ inferred = safe_infer(name)
+ if isinstance(inferred, (astroid.List, astroid.Tuple)):
+ length = len(inferred.elts)
+ elif isinstance(inferred, astroid.Dict):
+ length = len(inferred.items)
+ else:
+ continue
+
+ if is_in_lambda_scope and isinstance(inferred.parent, astroid.Arguments):
+ # The statement of the variadic will be the assignment itself,
+ # so we need to go the lambda instead
+ inferred_statement = inferred.parent.parent
+ else:
+ inferred_statement = inferred.statement()
+
+ if not length and isinstance(inferred_statement, astroid.Lambda):
+ is_in_starred_context = _has_parent_of_type(node, variadic_type, statement)
+ used_as_starred_argument = any(
+ variadic.value == name or variadic.value.parent_of(name)
+ for variadic in variadics
+ )
+ if is_in_starred_context or used_as_starred_argument:
+ return True
+ return False
+
+
+def _is_invalid_metaclass(metaclass):
+ try:
+ mro = metaclass.mro()
+ except NotImplementedError:
+ # Cannot have a metaclass which is not a newstyle class.
+ return True
+ else:
+ if not any(is_builtin_object(cls) and cls.name == "type" for cls in mro):
+ return True
+ return False
+
+
+def _infer_from_metaclass_constructor(cls, func):
+ """Try to infer what the given *func* constructor is building
+
+ :param astroid.FunctionDef func:
+ A metaclass constructor. Metaclass definitions can be
+ functions, which should accept three arguments, the name of
+ the class, the bases of the class and the attributes.
+ The function could return anything, but usually it should
+ be a proper metaclass.
+ :param astroid.ClassDef cls:
+ The class for which the *func* parameter should generate
+ a metaclass.
+ :returns:
+ The class generated by the function or None,
+ if we couldn't infer it.
+ :rtype: astroid.ClassDef
+ """
+ context = astroid.context.InferenceContext()
+
+ class_bases = astroid.List()
+ class_bases.postinit(elts=cls.bases)
+
+ attrs = astroid.Dict()
+ local_names = [(name, values[-1]) for name, values in cls.locals.items()]
+ attrs.postinit(local_names)
+
+ builder_args = astroid.Tuple()
+ builder_args.postinit([cls.name, class_bases, attrs])
+
+ context.callcontext = astroid.context.CallContext(builder_args)
+ try:
+ inferred = next(func.infer_call_result(func, context), None)
+ except astroid.InferenceError:
+ return None
+ return inferred or None
+
+
+def _is_c_extension(module_node):
+ return (
+ not modutils.is_standard_module(module_node.name)
+ and not module_node.fully_defined()
+ )
+
+
+class TypeChecker(BaseChecker):
+ """try to find bugs in the code using type inference
+ """
+
+ __implements__ = (IAstroidChecker,)
+
+ # configuration section name
+ name = "typecheck"
+ # messages
+ msgs = MSGS
+ priority = -1
+ # configuration options
+ options = (
+ (
+ "ignore-on-opaque-inference",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "This flag controls whether pylint should warn about "
+ "no-member and similar checks whenever an opaque object "
+ "is returned when inferring. The inference can return "
+ "multiple potential results while evaluating a Python object, "
+ "but some branches might not be evaluated, which results in "
+ "partial inference. In that case, it might be useful to still emit "
+ "no-member and other checks for the rest of the inferred objects.",
+ },
+ ),
+ (
+ "ignore-mixin-members",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": 'Tells whether missing members accessed in mixin \
+class should be ignored. A mixin class is detected if its name ends with \
+"mixin" (case insensitive).',
+ },
+ ),
+ (
+ "ignore-none",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether to warn about missing members when the owner "
+ "of the attribute is inferred to be None.",
+ },
+ ),
+ (
+ "ignored-modules",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<module names>",
+ "help": "List of module names for which member attributes "
+ "should not be checked (useful for modules/projects "
+ "where namespaces are manipulated during runtime and "
+ "thus existing member attributes cannot be "
+ "deduced by static analysis). It supports qualified "
+ "module names, as well as Unix pattern matching.",
+ },
+ ),
+ # the defaults here are *stdlib* names that (almost) always
+ # lead to false positives, since their idiomatic use is
+ # 'too dynamic' for pylint to grok.
+ (
+ "ignored-classes",
+ {
+ "default": ("optparse.Values", "thread._local", "_thread._local"),
+ "type": "csv",
+ "metavar": "<members names>",
+ "help": "List of class names for which member attributes "
+ "should not be checked (useful for classes with "
+ "dynamically set attributes). This supports "
+ "the use of qualified names.",
+ },
+ ),
+ (
+ "generated-members",
+ {
+ "default": (),
+ "type": "string",
+ "metavar": "<members names>",
+ "help": "List of members which are set dynamically and \
+missed by pylint inference system, and so shouldn't trigger E1101 when \
+accessed. Python regular expressions are accepted.",
+ },
+ ),
+ (
+ "contextmanager-decorators",
+ {
+ "default": ["contextlib.contextmanager"],
+ "type": "csv",
+ "metavar": "<decorator names>",
+ "help": "List of decorators that produce context managers, "
+ "such as contextlib.contextmanager. Add to this list "
+ "to register other decorators that produce valid "
+ "context managers.",
+ },
+ ),
+ (
+ "missing-member-hint-distance",
+ {
+ "default": 1,
+ "type": "int",
+ "metavar": "<member hint edit distance>",
+ "help": "The minimum edit distance a name should have in order "
+ "to be considered a similar match for a missing member name.",
+ },
+ ),
+ (
+ "missing-member-max-choices",
+ {
+ "default": 1,
+ "type": "int",
+ "metavar": "<member hint max choices>",
+ "help": "The total number of similar names that should be taken in "
+ "consideration when showing a hint for a missing member.",
+ },
+ ),
+ (
+ "missing-member-hint",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<missing member hint>",
+ "help": "Show a hint with possible names when a member name was not "
+ "found. The aspect of finding the hint is based on edit distance.",
+ },
+ ),
+ (
+ "signature-mutators",
+ {
+ "default": [],
+ "type": "csv",
+ "metavar": "<decorator names>",
+ "help": "List of decorators that change the signature of "
+ "a decorated function.",
+ },
+ ),
+ )
+
+ @decorators.cachedproperty
+ def _suggestion_mode(self):
+ return get_global_option(self, "suggestion-mode", default=True)
+
+ def open(self):
+ # do this in open since config not fully initialized in __init__
+ # generated_members may contain regular expressions
+ # (surrounded by quote `"` and followed by a comma `,`)
+ # REQUEST,aq_parent,"[a-zA-Z]+_set{1,2}"' =>
+ # ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}')
+ if isinstance(self.config.generated_members, str):
+ gen = shlex.shlex(self.config.generated_members)
+ gen.whitespace += ","
+ gen.wordchars += r"[]-+\.*?()|"
+ self.config.generated_members = tuple(tok.strip('"') for tok in gen)
+
+ @check_messages("keyword-arg-before-vararg")
+ def visit_functiondef(self, node):
+ # check for keyword arg before varargs
+ if node.args.vararg and node.args.defaults:
+ self.add_message("keyword-arg-before-vararg", node=node, args=(node.name))
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ @check_messages("invalid-metaclass")
+ def visit_classdef(self, node):
+ def _metaclass_name(metaclass):
+ if isinstance(metaclass, (astroid.ClassDef, astroid.FunctionDef)):
+ return metaclass.name
+ return metaclass.as_string()
+
+ metaclass = node.declared_metaclass()
+ if not metaclass:
+ return
+
+ if isinstance(metaclass, astroid.FunctionDef):
+ # Try to infer the result.
+ metaclass = _infer_from_metaclass_constructor(node, metaclass)
+ if not metaclass:
+ # Don't do anything if we cannot infer the result.
+ return
+
+ if isinstance(metaclass, astroid.ClassDef):
+ if _is_invalid_metaclass(metaclass):
+ self.add_message(
+ "invalid-metaclass", node=node, args=(_metaclass_name(metaclass),)
+ )
+ else:
+ self.add_message(
+ "invalid-metaclass", node=node, args=(_metaclass_name(metaclass),)
+ )
+
+ def visit_assignattr(self, node):
+ if isinstance(node.assign_type(), astroid.AugAssign):
+ self.visit_attribute(node)
+
+ def visit_delattr(self, node):
+ self.visit_attribute(node)
+
+ @check_messages("no-member", "c-extension-no-member")
+ def visit_attribute(self, node):
+ """check that the accessed attribute exists
+
+ to avoid too much false positives for now, we'll consider the code as
+ correct if a single of the inferred nodes has the accessed attribute.
+
+ function/method, super call and metaclasses are ignored
+ """
+ for pattern in self.config.generated_members:
+ # attribute is marked as generated, stop here
+ if re.match(pattern, node.attrname):
+ return
+ if re.match(pattern, node.as_string()):
+ return
+
+ try:
+ inferred = list(node.expr.infer())
+ except exceptions.InferenceError:
+ return
+
+ # list of (node, nodename) which are missing the attribute
+ missingattr = set()
+
+ non_opaque_inference_results = [
+ owner
+ for owner in inferred
+ if owner is not astroid.Uninferable
+ and not isinstance(owner, astroid.nodes.Unknown)
+ ]
+ if (
+ len(non_opaque_inference_results) != len(inferred)
+ and self.config.ignore_on_opaque_inference
+ ):
+ # There is an ambiguity in the inference. Since we can't
+ # make sure that we won't emit a false positive, we just stop
+ # whenever the inference returns an opaque inference object.
+ return
+ for owner in non_opaque_inference_results:
+ name = getattr(owner, "name", None)
+ if _is_owner_ignored(
+ owner, name, self.config.ignored_classes, self.config.ignored_modules
+ ):
+ continue
+
+ try:
+ if not [
+ n
+ for n in owner.getattr(node.attrname)
+ if not isinstance(n.statement(), astroid.AugAssign)
+ ]:
+ missingattr.add((owner, name))
+ continue
+ except AttributeError:
+ continue
+ except exceptions.NotFoundError:
+ # This can't be moved before the actual .getattr call,
+ # because there can be more values inferred and we are
+ # stopping after the first one which has the attribute in question.
+ # The problem is that if the first one has the attribute,
+ # but we continue to the next values which doesn't have the
+ # attribute, then we'll have a false positive.
+ # So call this only after the call has been made.
+ if not _emit_no_member(
+ node,
+ owner,
+ name,
+ ignored_mixins=self.config.ignore_mixin_members,
+ ignored_none=self.config.ignore_none,
+ ):
+ continue
+ missingattr.add((owner, name))
+ continue
+ # stop on the first found
+ break
+ else:
+ # we have not found any node with the attributes, display the
+ # message for inferred nodes
+ done = set()
+ for owner, name in missingattr:
+ if isinstance(owner, astroid.Instance):
+ actual = owner._proxied
+ else:
+ actual = owner
+ if actual in done:
+ continue
+ done.add(actual)
+
+ msg, hint = self._get_nomember_msgid_hint(node, owner)
+ self.add_message(
+ msg,
+ node=node,
+ args=(owner.display_type(), name, node.attrname, hint),
+ confidence=INFERENCE,
+ )
+
+ def _get_nomember_msgid_hint(self, node, owner):
+ suggestions_are_possible = self._suggestion_mode and isinstance(
+ owner, astroid.Module
+ )
+ if suggestions_are_possible and _is_c_extension(owner):
+ msg = "c-extension-no-member"
+ hint = ""
+ else:
+ msg = "no-member"
+ if self.config.missing_member_hint:
+ hint = _missing_member_hint(
+ owner,
+ node.attrname,
+ self.config.missing_member_hint_distance,
+ self.config.missing_member_max_choices,
+ )
+ else:
+ hint = ""
+ return msg, hint
+
+ @check_messages("assignment-from-no-return", "assignment-from-none")
+ def visit_assign(self, node):
+ """check that if assigning to a function call, the function is
+ possibly returning something valuable
+ """
+ if not isinstance(node.value, astroid.Call):
+ return
+
+ function_node = safe_infer(node.value.func)
+ funcs = (astroid.FunctionDef, astroid.UnboundMethod, astroid.BoundMethod)
+ if not isinstance(function_node, funcs):
+ return
+
+ # Unwrap to get the actual function object
+ if isinstance(function_node, astroid.BoundMethod) and isinstance(
+ function_node._proxied, astroid.UnboundMethod
+ ):
+ function_node = function_node._proxied._proxied
+
+ # Make sure that it's a valid function that we can analyze.
+ # Ordered from less expensive to more expensive checks.
+ # pylint: disable=too-many-boolean-expressions
+ if (
+ not function_node.is_function
+ or isinstance(function_node, astroid.AsyncFunctionDef)
+ or function_node.decorators
+ or function_node.is_generator()
+ or function_node.is_abstract(pass_is_abstract=False)
+ or not function_node.root().fully_defined()
+ ):
+ return
+
+ returns = list(
+ function_node.nodes_of_class(astroid.Return, skip_klass=astroid.FunctionDef)
+ )
+ if not returns:
+ self.add_message("assignment-from-no-return", node=node)
+ else:
+ for rnode in returns:
+ if not (
+ isinstance(rnode.value, astroid.Const)
+ and rnode.value.value is None
+ or rnode.value is None
+ ):
+ break
+ else:
+ self.add_message("assignment-from-none", node=node)
+
+ def _check_uninferable_call(self, node):
+ """
+ Check that the given uninferable Call node does not
+ call an actual function.
+ """
+ if not isinstance(node.func, astroid.Attribute):
+ return
+
+ # Look for properties. First, obtain
+ # the lhs of the Attribute node and search the attribute
+ # there. If that attribute is a property or a subclass of properties,
+ # then most likely it's not callable.
+
+ expr = node.func.expr
+ klass = safe_infer(expr)
+ if (
+ klass is None
+ or klass is astroid.Uninferable
+ or not isinstance(klass, astroid.Instance)
+ ):
+ return
+
+ try:
+ attrs = klass._proxied.getattr(node.func.attrname)
+ except exceptions.NotFoundError:
+ return
+
+ for attr in attrs:
+ if attr is astroid.Uninferable:
+ continue
+ if not isinstance(attr, astroid.FunctionDef):
+ continue
+
+ # Decorated, see if it is decorated with a property.
+ # Also, check the returns and see if they are callable.
+ if decorated_with_property(attr):
+
+ try:
+ all_returns_are_callable = all(
+ return_node.callable() or return_node is astroid.Uninferable
+ for return_node in attr.infer_call_result(node)
+ )
+ except astroid.InferenceError:
+ continue
+
+ if not all_returns_are_callable:
+ self.add_message(
+ "not-callable", node=node, args=node.func.as_string()
+ )
+ break
+
+ def _check_argument_order(self, node, call_site, called, called_param_names):
+ """Match the supplied argument names against the function parameters.
+ Warn if some argument names are not in the same order as they are in
+ the function signature.
+ """
+ # Check for called function being an object instance function
+ # If so, ignore the initial 'self' argument in the signature
+ try:
+ is_classdef = isinstance(called.parent, astroid.scoped_nodes.ClassDef)
+ if is_classdef and called_param_names[0] == "self":
+ called_param_names = called_param_names[1:]
+ except IndexError:
+ return
+
+ try:
+ # extract argument names, if they have names
+ calling_parg_names = [p.name for p in call_site.positional_arguments]
+
+ # Additionally get names of keyword arguments to use in a full match
+ # against parameters
+ calling_kwarg_names = [
+ arg.name for arg in call_site.keyword_arguments.values()
+ ]
+ except AttributeError:
+ # the type of arg does not provide a `.name`. In this case we
+ # stop checking for out-of-order arguments because it is only relevant
+ # for named variables.
+ return
+
+ # Don't check for ordering if there is an unmatched arg or param
+ arg_set = set(calling_parg_names) | set(calling_kwarg_names)
+ param_set = set(called_param_names)
+ if arg_set != param_set:
+ return
+
+ # Warn based on the equality of argument ordering
+ if calling_parg_names != called_param_names[: len(calling_parg_names)]:
+ self.add_message("arguments-out-of-order", node=node, args=())
+
+ # pylint: disable=too-many-branches,too-many-locals
+ @check_messages(*(list(MSGS.keys())))
+ def visit_call(self, node):
+ """check that called functions/methods are inferred to callable objects,
+ and that the arguments passed to the function match the parameters in
+ the inferred function's definition
+ """
+ called = safe_infer(node.func)
+ # only function, generator and object defining __call__ are allowed
+ # Ignore instances of descriptors since astroid cannot properly handle them
+ # yet
+ if called and not called.callable():
+ if isinstance(called, astroid.Instance) and (
+ not has_known_bases(called)
+ or (
+ called.parent is not None
+ and isinstance(called.scope(), astroid.ClassDef)
+ and "__get__" in called.locals
+ )
+ ):
+ # Don't emit if we can't make sure this object is callable.
+ pass
+ else:
+ self.add_message("not-callable", node=node, args=node.func.as_string())
+
+ self._check_uninferable_call(node)
+ try:
+ called, implicit_args, callable_name = _determine_callable(called)
+ except ValueError:
+ # Any error occurred during determining the function type, most of
+ # those errors are handled by different warnings.
+ return
+
+ if called.args.args is None:
+ # Built-in functions have no argument information.
+ return
+
+ if len(called.argnames()) != len(set(called.argnames())):
+ # Duplicate parameter name (see duplicate-argument). We can't really
+ # make sense of the function call in this case, so just return.
+ return
+
+ # Build the set of keyword arguments, checking for duplicate keywords,
+ # and count the positional arguments.
+ call_site = astroid.arguments.CallSite.from_call(node)
+
+ # Warn about duplicated keyword arguments, such as `f=24, **{'f': 24}`
+ for keyword in call_site.duplicated_keywords:
+ self.add_message("repeated-keyword", node=node, args=(keyword,))
+
+ if call_site.has_invalid_arguments() or call_site.has_invalid_keywords():
+ # Can't make sense of this.
+ return
+
+ # Has the function signature changed in ways we cannot reliably detect?
+ if hasattr(called, "decorators") and decorated_with(
+ called, self.config.signature_mutators
+ ):
+ return
+
+ num_positional_args = len(call_site.positional_arguments)
+ keyword_args = list(call_site.keyword_arguments.keys())
+ overload_function = is_overload_stub(called)
+
+ # Determine if we don't have a context for our call and we use variadics.
+ node_scope = node.scope()
+ if isinstance(node_scope, (astroid.Lambda, astroid.FunctionDef)):
+ has_no_context_positional_variadic = _no_context_variadic_positional(
+ node, node_scope
+ )
+ has_no_context_keywords_variadic = _no_context_variadic_keywords(
+ node, node_scope
+ )
+ else:
+ has_no_context_positional_variadic = (
+ has_no_context_keywords_variadic
+ ) = False
+
+ # These are coming from the functools.partial implementation in astroid
+ already_filled_positionals = getattr(called, "filled_positionals", 0)
+ already_filled_keywords = getattr(called, "filled_keywords", {})
+
+ keyword_args += list(already_filled_keywords)
+ num_positional_args += implicit_args + already_filled_positionals
+
+ # Analyze the list of formal parameters.
+ args = list(itertools.chain(called.args.posonlyargs or (), called.args.args))
+ num_mandatory_parameters = len(args) - len(called.args.defaults)
+ parameters = []
+ parameter_name_to_index = {}
+ for i, arg in enumerate(args):
+ if isinstance(arg, astroid.Tuple):
+ name = None
+ # Don't store any parameter names within the tuple, since those
+ # are not assignable from keyword arguments.
+ else:
+ assert isinstance(arg, astroid.AssignName)
+ # This occurs with:
+ # def f( (a), (b) ): pass
+ name = arg.name
+ parameter_name_to_index[name] = i
+ if i >= num_mandatory_parameters:
+ defval = called.args.defaults[i - num_mandatory_parameters]
+ else:
+ defval = None
+ parameters.append([(name, defval), False])
+
+ kwparams = {}
+ for i, arg in enumerate(called.args.kwonlyargs):
+ if isinstance(arg, astroid.Keyword):
+ name = arg.arg
+ else:
+ assert isinstance(arg, astroid.AssignName)
+ name = arg.name
+ kwparams[name] = [called.args.kw_defaults[i], False]
+
+ self._check_argument_order(
+ node, call_site, called, [p[0][0] for p in parameters]
+ )
+
+ # 1. Match the positional arguments.
+ for i in range(num_positional_args):
+ if i < len(parameters):
+ parameters[i][1] = True
+ elif called.args.vararg is not None:
+ # The remaining positional arguments get assigned to the *args
+ # parameter.
+ break
+ else:
+ if not overload_function:
+ # Too many positional arguments.
+ self.add_message(
+ "too-many-function-args", node=node, args=(callable_name,)
+ )
+ break
+
+ # 2. Match the keyword arguments.
+ for keyword in keyword_args:
+ if keyword in parameter_name_to_index:
+ i = parameter_name_to_index[keyword]
+ if parameters[i][1]:
+ # Duplicate definition of function parameter.
+
+ # Might be too hardcoded, but this can actually
+ # happen when using str.format and `self` is passed
+ # by keyword argument, as in `.format(self=self)`.
+ # It's perfectly valid to so, so we're just skipping
+ # it if that's the case.
+ if not (keyword == "self" and called.qname() in STR_FORMAT):
+ self.add_message(
+ "redundant-keyword-arg",
+ node=node,
+ args=(keyword, callable_name),
+ )
+ else:
+ parameters[i][1] = True
+ elif keyword in kwparams:
+ if kwparams[keyword][1]:
+ # Duplicate definition of function parameter.
+ self.add_message(
+ "redundant-keyword-arg",
+ node=node,
+ args=(keyword, callable_name),
+ )
+ else:
+ kwparams[keyword][1] = True
+ elif called.args.kwarg is not None:
+ # The keyword argument gets assigned to the **kwargs parameter.
+ pass
+ elif not overload_function:
+ # Unexpected keyword argument.
+ self.add_message(
+ "unexpected-keyword-arg", node=node, args=(keyword, callable_name)
+ )
+
+ # 3. Match the **kwargs, if any.
+ if node.kwargs:
+ for i, [(name, defval), assigned] in enumerate(parameters):
+ # Assume that *kwargs provides values for all remaining
+ # unassigned named parameters.
+ if name is not None:
+ parameters[i][1] = True
+ else:
+ # **kwargs can't assign to tuples.
+ pass
+
+ # Check that any parameters without a default have been assigned
+ # values.
+ for [(name, defval), assigned] in parameters:
+ if (defval is None) and not assigned:
+ if name is None:
+ display_name = "<tuple>"
+ else:
+ display_name = repr(name)
+ if not has_no_context_positional_variadic and not overload_function:
+ self.add_message(
+ "no-value-for-parameter",
+ node=node,
+ args=(display_name, callable_name),
+ )
+
+ for name in kwparams:
+ defval, assigned = kwparams[name]
+ if defval is None and not assigned and not has_no_context_keywords_variadic:
+ self.add_message("missing-kwoa", node=node, args=(name, callable_name))
+
+ @check_messages("invalid-sequence-index")
+ def visit_extslice(self, node):
+ # Check extended slice objects as if they were used as a sequence
+ # index to check if the object being sliced can support them
+ return self.visit_index(node)
+
+ @check_messages("invalid-sequence-index")
+ def visit_index(self, node):
+ if not node.parent or not hasattr(node.parent, "value"):
+ return None
+ # Look for index operations where the parent is a sequence type.
+ # If the types can be determined, only allow indices to be int,
+ # slice or instances with __index__.
+ parent_type = safe_infer(node.parent.value)
+ if not isinstance(
+ parent_type, (astroid.ClassDef, astroid.Instance)
+ ) or not has_known_bases(parent_type):
+ return None
+
+ # Determine what method on the parent this index will use
+ # The parent of this node will be a Subscript, and the parent of that
+ # node determines if the Subscript is a get, set, or delete operation.
+ if node.parent.ctx is astroid.Store:
+ methodname = "__setitem__"
+ elif node.parent.ctx is astroid.Del:
+ methodname = "__delitem__"
+ else:
+ methodname = "__getitem__"
+
+ # Check if this instance's __getitem__, __setitem__, or __delitem__, as
+ # appropriate to the statement, is implemented in a builtin sequence
+ # type. This way we catch subclasses of sequence types but skip classes
+ # that override __getitem__ and which may allow non-integer indices.
+ try:
+ methods = dunder_lookup.lookup(parent_type, methodname)
+ if methods is astroid.Uninferable:
+ return None
+ itemmethod = methods[0]
+ except (
+ exceptions.NotFoundError,
+ exceptions.AttributeInferenceError,
+ IndexError,
+ ):
+ return None
+
+ if (
+ not isinstance(itemmethod, astroid.FunctionDef)
+ or itemmethod.root().name != BUILTINS
+ or not itemmethod.parent
+ or itemmethod.parent.name not in SEQUENCE_TYPES
+ ):
+ return None
+
+ # For ExtSlice objects coming from visit_extslice, no further
+ # inference is necessary, since if we got this far the ExtSlice
+ # is an error.
+ if isinstance(node, astroid.ExtSlice):
+ index_type = node
+ else:
+ index_type = safe_infer(node)
+ if index_type is None or index_type is astroid.Uninferable:
+ return None
+ # Constants must be of type int
+ if isinstance(index_type, astroid.Const):
+ if isinstance(index_type.value, int):
+ return None
+ # Instance values must be int, slice, or have an __index__ method
+ elif isinstance(index_type, astroid.Instance):
+ if index_type.pytype() in (BUILTINS + ".int", BUILTINS + ".slice"):
+ return None
+ try:
+ index_type.getattr("__index__")
+ return None
+ except exceptions.NotFoundError:
+ pass
+ elif isinstance(index_type, astroid.Slice):
+ # Delegate to visit_slice. A slice can be present
+ # here after inferring the index node, which could
+ # be a `slice(...)` call for instance.
+ return self.visit_slice(index_type)
+
+ # Anything else is an error
+ self.add_message("invalid-sequence-index", node=node)
+ return None
+
+ @check_messages("invalid-slice-index")
+ def visit_slice(self, node):
+ # Check the type of each part of the slice
+ invalid_slices = 0
+ for index in (node.lower, node.upper, node.step):
+ if index is None:
+ continue
+
+ index_type = safe_infer(index)
+ if index_type is None or index_type is astroid.Uninferable:
+ continue
+
+ # Constants must of type int or None
+ if isinstance(index_type, astroid.Const):
+ if isinstance(index_type.value, (int, type(None))):
+ continue
+ # Instance values must be of type int, None or an object
+ # with __index__
+ elif isinstance(index_type, astroid.Instance):
+ if index_type.pytype() in (BUILTINS + ".int", BUILTINS + ".NoneType"):
+ continue
+
+ try:
+ index_type.getattr("__index__")
+ return
+ except exceptions.NotFoundError:
+ pass
+ invalid_slices += 1
+
+ if not invalid_slices:
+ return
+
+ # Anything else is an error, unless the object that is indexed
+ # is a custom object, which knows how to handle this kind of slices
+ parent = node.parent
+ if isinstance(parent, astroid.ExtSlice):
+ parent = parent.parent
+ if isinstance(parent, astroid.Subscript):
+ inferred = safe_infer(parent.value)
+ if inferred is None or inferred is astroid.Uninferable:
+ # Don't know what this is
+ return
+ known_objects = (
+ astroid.List,
+ astroid.Dict,
+ astroid.Tuple,
+ astroid.objects.FrozenSet,
+ astroid.Set,
+ )
+ if not isinstance(inferred, known_objects):
+ # Might be an instance that knows how to handle this slice object
+ return
+ for _ in range(invalid_slices):
+ self.add_message("invalid-slice-index", node=node)
+
+ @check_messages("not-context-manager")
+ def visit_with(self, node):
+ for ctx_mgr, _ in node.items:
+ context = astroid.context.InferenceContext()
+ inferred = safe_infer(ctx_mgr, context=context)
+ if inferred is None or inferred is astroid.Uninferable:
+ continue
+
+ if isinstance(inferred, bases.Generator):
+ # Check if we are dealing with a function decorated
+ # with contextlib.contextmanager.
+ if decorated_with(
+ inferred.parent, self.config.contextmanager_decorators
+ ):
+ continue
+ # If the parent of the generator is not the context manager itself,
+ # that means that it could have been returned from another
+ # function which was the real context manager.
+ # The following approach is more of a hack rather than a real
+ # solution: walk all the inferred statements for the
+ # given *ctx_mgr* and if you find one function scope
+ # which is decorated, consider it to be the real
+ # manager and give up, otherwise emit not-context-manager.
+ # See the test file for not_context_manager for a couple
+ # of self explaining tests.
+
+ # Retrieve node from all previusly visited nodes in the the inference history
+ context_path_names = filter(None, _unflatten(context.path))
+ inferred_paths = _flatten_container(
+ safe_infer(path) for path in context_path_names
+ )
+ for inferred_path in inferred_paths:
+ if not inferred_path:
+ continue
+ scope = inferred_path.scope()
+ if not isinstance(scope, astroid.FunctionDef):
+ continue
+ if decorated_with(scope, self.config.contextmanager_decorators):
+ break
+ else:
+ self.add_message(
+ "not-context-manager", node=node, args=(inferred.name,)
+ )
+ else:
+ try:
+ inferred.getattr("__enter__")
+ inferred.getattr("__exit__")
+ except exceptions.NotFoundError:
+ if isinstance(inferred, astroid.Instance):
+ # If we do not know the bases of this class,
+ # just skip it.
+ if not has_known_bases(inferred):
+ continue
+ # Just ignore mixin classes.
+ if self.config.ignore_mixin_members:
+ if inferred.name[-5:].lower() == "mixin":
+ continue
+
+ self.add_message(
+ "not-context-manager", node=node, args=(inferred.name,)
+ )
+
+ @check_messages("invalid-unary-operand-type")
+ def visit_unaryop(self, node):
+ """Detect TypeErrors for unary operands."""
+
+ for error in node.type_errors():
+ # Let the error customize its output.
+ self.add_message("invalid-unary-operand-type", args=str(error), node=node)
+
+ @check_messages("unsupported-binary-operation")
+ def _visit_binop(self, node):
+ """Detect TypeErrors for binary arithmetic operands."""
+ self._check_binop_errors(node)
+
+ @check_messages("unsupported-binary-operation")
+ def _visit_augassign(self, node):
+ """Detect TypeErrors for augmented binary arithmetic operands."""
+ self._check_binop_errors(node)
+
+ def _check_binop_errors(self, node):
+ for error in node.type_errors():
+ # Let the error customize its output.
+ if any(
+ isinstance(obj, astroid.ClassDef) and not has_known_bases(obj)
+ for obj in (error.left_type, error.right_type)
+ ):
+ continue
+ self.add_message("unsupported-binary-operation", args=str(error), node=node)
+
+ def _check_membership_test(self, node):
+ if is_inside_abstract_class(node):
+ return
+ if is_comprehension(node):
+ return
+ inferred = safe_infer(node)
+ if inferred is None or inferred is astroid.Uninferable:
+ return
+ if not supports_membership_test(inferred):
+ self.add_message(
+ "unsupported-membership-test", args=node.as_string(), node=node
+ )
+
+ @check_messages("unsupported-membership-test")
+ def visit_compare(self, node):
+ if len(node.ops) != 1:
+ return
+
+ op, right = node.ops[0]
+ if op in ["in", "not in"]:
+ self._check_membership_test(right)
+
+ @check_messages(
+ "unsubscriptable-object",
+ "unsupported-assignment-operation",
+ "unsupported-delete-operation",
+ "unhashable-dict-key",
+ )
+ def visit_subscript(self, node):
+ supported_protocol = None
+ if isinstance(node.value, (astroid.ListComp, astroid.DictComp)):
+ return
+
+ if isinstance(node.value, astroid.Dict):
+ # Assert dict key is hashable
+ inferred = safe_infer(node.slice.value)
+ if inferred not in (None, astroid.Uninferable):
+ try:
+ hash_fn = next(inferred.igetattr("__hash__"))
+ except astroid.InferenceError:
+ pass
+ else:
+ if getattr(hash_fn, "value", True) is None:
+ self.add_message("unhashable-dict-key", node=node.value)
+
+ if node.ctx == astroid.Load:
+ supported_protocol = supports_getitem
+ msg = "unsubscriptable-object"
+ elif node.ctx == astroid.Store:
+ supported_protocol = supports_setitem
+ msg = "unsupported-assignment-operation"
+ elif node.ctx == astroid.Del:
+ supported_protocol = supports_delitem
+ msg = "unsupported-delete-operation"
+
+ if isinstance(node.value, astroid.SetComp):
+ self.add_message(msg, args=node.value.as_string(), node=node.value)
+ return
+
+ if is_inside_abstract_class(node):
+ return
+
+ inferred = safe_infer(node.value)
+ if inferred is None or inferred is astroid.Uninferable:
+ return
+
+ if not supported_protocol(inferred):
+ self.add_message(msg, args=node.value.as_string(), node=node.value)
+
+ @check_messages("dict-items-missing-iter")
+ def visit_for(self, node):
+ if not isinstance(node.target, astroid.node_classes.Tuple):
+ # target is not a tuple
+ return
+ if not len(node.target.elts) == 2:
+ # target is not a tuple of two elements
+ return
+
+ iterable = node.iter
+ if not isinstance(iterable, astroid.node_classes.Name):
+ # it's not a bare variable
+ return
+
+ inferred = safe_infer(iterable)
+ if not inferred:
+ return
+ if not isinstance(inferred, astroid.node_classes.Dict):
+ # the iterable is not a dict
+ return
+
+ self.add_message("dict-iter-missing-items", node=node)
+
+
+class IterableChecker(BaseChecker):
+ """
+ Checks for non-iterables used in an iterable context.
+ Contexts include:
+ - for-statement
+ - starargs in function call
+ - `yield from`-statement
+ - list, dict and set comprehensions
+ - generator expressions
+ Also checks for non-mappings in function call kwargs.
+ """
+
+ __implements__ = (IAstroidChecker,)
+ name = "typecheck"
+
+ msgs = {
+ "E1133": (
+ "Non-iterable value %s is used in an iterating context",
+ "not-an-iterable",
+ "Used when a non-iterable value is used in place where "
+ "iterable is expected",
+ ),
+ "E1134": (
+ "Non-mapping value %s is used in a mapping context",
+ "not-a-mapping",
+ "Used when a non-mapping value is used in place where "
+ "mapping is expected",
+ ),
+ }
+
+ @staticmethod
+ def _is_asyncio_coroutine(node):
+ if not isinstance(node, astroid.Call):
+ return False
+
+ inferred_func = safe_infer(node.func)
+ if not isinstance(inferred_func, astroid.FunctionDef):
+ return False
+ if not inferred_func.decorators:
+ return False
+ for decorator in inferred_func.decorators.nodes:
+ inferred_decorator = safe_infer(decorator)
+ if not isinstance(inferred_decorator, astroid.FunctionDef):
+ continue
+ if inferred_decorator.qname() != ASYNCIO_COROUTINE:
+ continue
+ return True
+ return False
+
+ def _check_iterable(self, node, check_async=False):
+ if is_inside_abstract_class(node) or is_comprehension(node):
+ return
+ inferred = safe_infer(node)
+ if not inferred:
+ return
+ if not is_iterable(inferred, check_async=check_async):
+ self.add_message("not-an-iterable", args=node.as_string(), node=node)
+
+ def _check_mapping(self, node):
+ if is_inside_abstract_class(node):
+ return
+ if isinstance(node, astroid.DictComp):
+ return
+ inferred = safe_infer(node)
+ if inferred is None or inferred is astroid.Uninferable:
+ return
+ if not is_mapping(inferred):
+ self.add_message("not-a-mapping", args=node.as_string(), node=node)
+
+ @check_messages("not-an-iterable")
+ def visit_for(self, node):
+ self._check_iterable(node.iter)
+
+ @check_messages("not-an-iterable")
+ def visit_asyncfor(self, node):
+ self._check_iterable(node.iter, check_async=True)
+
+ @check_messages("not-an-iterable")
+ def visit_yieldfrom(self, node):
+ if self._is_asyncio_coroutine(node.value):
+ return
+ self._check_iterable(node.value)
+
+ @check_messages("not-an-iterable", "not-a-mapping")
+ def visit_call(self, node):
+ for stararg in node.starargs:
+ self._check_iterable(stararg.value)
+ for kwarg in node.kwargs:
+ self._check_mapping(kwarg.value)
+
+ @check_messages("not-an-iterable")
+ def visit_listcomp(self, node):
+ for gen in node.generators:
+ self._check_iterable(gen.iter, check_async=gen.is_async)
+
+ @check_messages("not-an-iterable")
+ def visit_dictcomp(self, node):
+ for gen in node.generators:
+ self._check_iterable(gen.iter, check_async=gen.is_async)
+
+ @check_messages("not-an-iterable")
+ def visit_setcomp(self, node):
+ for gen in node.generators:
+ self._check_iterable(gen.iter, check_async=gen.is_async)
+
+ @check_messages("not-an-iterable")
+ def visit_generatorexp(self, node):
+ for gen in node.generators:
+ self._check_iterable(gen.iter, check_async=gen.is_async)
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(TypeChecker(linter))
+ linter.register_checker(IterableChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/utils.py b/venv/Lib/site-packages/pylint/checkers/utils.py
new file mode 100644
index 0000000..2a6820a
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/utils.py
@@ -0,0 +1,1253 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2007, 2009-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009 Mads Kiilerich <mads@kiilerich.com>
+# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Ricardo Gemignani <ricardo.gemignani@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Radu Ciorba <radu@devrandom.ro>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016, 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016-2017 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Brian C. Lane <bcl@redhat.com>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 ttenhoeve-aa <ttenhoeve@appannie.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.guinta@protonmail.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Brian Shaginaw <brian.shaginaw@warbyparker.com>
+# Copyright (c) 2018 Caio Carrara <ccarrara@redhat.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""some functions that may be useful for various checkers
+"""
+import builtins
+import itertools
+import numbers
+import re
+import string
+from functools import lru_cache, partial
+from typing import Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
+
+import astroid
+from astroid import bases as _bases
+from astroid import helpers, scoped_nodes
+from astroid.exceptions import _NonDeducibleTypeHierarchy
+
+import _string # pylint: disable=wrong-import-position, wrong-import-order
+
+BUILTINS_NAME = builtins.__name__
+COMP_NODE_TYPES = (
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+)
+EXCEPTIONS_MODULE = "builtins"
+ABC_METHODS = {
+ "abc.abstractproperty",
+ "abc.abstractmethod",
+ "abc.abstractclassmethod",
+ "abc.abstractstaticmethod",
+}
+TYPING_PROTOCOLS = frozenset({"typing.Protocol", "typing_extensions.Protocol"})
+ITER_METHOD = "__iter__"
+AITER_METHOD = "__aiter__"
+NEXT_METHOD = "__next__"
+GETITEM_METHOD = "__getitem__"
+CLASS_GETITEM_METHOD = "__class_getitem__"
+SETITEM_METHOD = "__setitem__"
+DELITEM_METHOD = "__delitem__"
+CONTAINS_METHOD = "__contains__"
+KEYS_METHOD = "keys"
+
+# Dictionary which maps the number of expected parameters a
+# special method can have to a set of special methods.
+# The following keys are used to denote the parameters restrictions:
+#
+# * None: variable number of parameters
+# * number: exactly that number of parameters
+# * tuple: this are the odd ones. Basically it means that the function
+# can work with any number of arguments from that tuple,
+# although it's best to implement it in order to accept
+# all of them.
+_SPECIAL_METHODS_PARAMS = {
+ None: ("__new__", "__init__", "__call__"),
+ 0: (
+ "__del__",
+ "__repr__",
+ "__str__",
+ "__bytes__",
+ "__hash__",
+ "__bool__",
+ "__dir__",
+ "__len__",
+ "__length_hint__",
+ "__iter__",
+ "__reversed__",
+ "__neg__",
+ "__pos__",
+ "__abs__",
+ "__invert__",
+ "__complex__",
+ "__int__",
+ "__float__",
+ "__neg__",
+ "__pos__",
+ "__abs__",
+ "__complex__",
+ "__int__",
+ "__float__",
+ "__index__",
+ "__enter__",
+ "__aenter__",
+ "__getnewargs_ex__",
+ "__getnewargs__",
+ "__getstate__",
+ "__reduce__",
+ "__copy__",
+ "__unicode__",
+ "__nonzero__",
+ "__await__",
+ "__aiter__",
+ "__anext__",
+ "__fspath__",
+ ),
+ 1: (
+ "__format__",
+ "__lt__",
+ "__le__",
+ "__eq__",
+ "__ne__",
+ "__gt__",
+ "__ge__",
+ "__getattr__",
+ "__getattribute__",
+ "__delattr__",
+ "__delete__",
+ "__instancecheck__",
+ "__subclasscheck__",
+ "__getitem__",
+ "__missing__",
+ "__delitem__",
+ "__contains__",
+ "__add__",
+ "__sub__",
+ "__mul__",
+ "__truediv__",
+ "__floordiv__",
+ "__rfloordiv__",
+ "__mod__",
+ "__divmod__",
+ "__lshift__",
+ "__rshift__",
+ "__and__",
+ "__xor__",
+ "__or__",
+ "__radd__",
+ "__rsub__",
+ "__rmul__",
+ "__rtruediv__",
+ "__rmod__",
+ "__rdivmod__",
+ "__rpow__",
+ "__rlshift__",
+ "__rrshift__",
+ "__rand__",
+ "__rxor__",
+ "__ror__",
+ "__iadd__",
+ "__isub__",
+ "__imul__",
+ "__itruediv__",
+ "__ifloordiv__",
+ "__imod__",
+ "__ilshift__",
+ "__irshift__",
+ "__iand__",
+ "__ixor__",
+ "__ior__",
+ "__ipow__",
+ "__setstate__",
+ "__reduce_ex__",
+ "__deepcopy__",
+ "__cmp__",
+ "__matmul__",
+ "__rmatmul__",
+ "__div__",
+ ),
+ 2: ("__setattr__", "__get__", "__set__", "__setitem__", "__set_name__"),
+ 3: ("__exit__", "__aexit__"),
+ (0, 1): ("__round__",),
+}
+
+SPECIAL_METHODS_PARAMS = {
+ name: params
+ for params, methods in _SPECIAL_METHODS_PARAMS.items()
+ for name in methods # type: ignore
+}
+PYMETHODS = set(SPECIAL_METHODS_PARAMS)
+
+
+class NoSuchArgumentError(Exception):
+ pass
+
+
+def is_inside_except(node):
+ """Returns true if node is inside the name of an except handler."""
+ current = node
+ while current and not isinstance(current.parent, astroid.ExceptHandler):
+ current = current.parent
+
+ return current and current is current.parent.name
+
+
+def is_inside_lambda(node: astroid.node_classes.NodeNG) -> bool:
+ """Return true if given node is inside lambda"""
+ parent = node.parent
+ while parent is not None:
+ if isinstance(parent, astroid.Lambda):
+ return True
+ parent = parent.parent
+ return False
+
+
+def get_all_elements(
+ node: astroid.node_classes.NodeNG
+) -> Iterable[astroid.node_classes.NodeNG]:
+ """Recursively returns all atoms in nested lists and tuples."""
+ if isinstance(node, (astroid.Tuple, astroid.List)):
+ for child in node.elts:
+ yield from get_all_elements(child)
+ else:
+ yield node
+
+
+def clobber_in_except(
+ node: astroid.node_classes.NodeNG
+) -> Tuple[bool, Optional[Tuple[str, str]]]:
+ """Checks if an assignment node in an except handler clobbers an existing
+ variable.
+
+ Returns (True, args for W0623) if assignment clobbers an existing variable,
+ (False, None) otherwise.
+ """
+ if isinstance(node, astroid.AssignAttr):
+ return True, (node.attrname, "object %r" % (node.expr.as_string(),))
+ if isinstance(node, astroid.AssignName):
+ name = node.name
+ if is_builtin(name):
+ return True, (name, "builtins")
+
+ stmts = node.lookup(name)[1]
+ if stmts and not isinstance(
+ stmts[0].assign_type(),
+ (astroid.Assign, astroid.AugAssign, astroid.ExceptHandler),
+ ):
+ return True, (name, "outer scope (line %s)" % stmts[0].fromlineno)
+ return False, None
+
+
+def is_super(node: astroid.node_classes.NodeNG) -> bool:
+ """return True if the node is referencing the "super" builtin function
+ """
+ if getattr(node, "name", None) == "super" and node.root().name == BUILTINS_NAME:
+ return True
+ return False
+
+
+def is_error(node: astroid.node_classes.NodeNG) -> bool:
+ """return true if the function does nothing but raising an exception"""
+ raises = False
+ returns = False
+ for child_node in node.nodes_of_class((astroid.Raise, astroid.Return)):
+ if isinstance(child_node, astroid.Raise):
+ raises = True
+ if isinstance(child_node, astroid.Return):
+ returns = True
+ return raises and not returns
+
+
+builtins = builtins.__dict__.copy() # type: ignore
+SPECIAL_BUILTINS = ("__builtins__",) # '__path__', '__file__')
+
+
+def is_builtin_object(node: astroid.node_classes.NodeNG) -> bool:
+ """Returns True if the given node is an object from the __builtin__ module."""
+ return node and node.root().name == BUILTINS_NAME
+
+
+def is_builtin(name: str) -> bool:
+ """return true if <name> could be considered as a builtin defined by python
+ """
+ return name in builtins or name in SPECIAL_BUILTINS # type: ignore
+
+
+def is_defined_in_scope(
+ var_node: astroid.node_classes.NodeNG,
+ varname: str,
+ scope: astroid.node_classes.NodeNG,
+) -> bool:
+ if isinstance(scope, astroid.If):
+ for node in scope.body:
+ if (
+ isinstance(node, astroid.Assign)
+ and any(
+ isinstance(target, astroid.AssignName) and target.name == varname
+ for target in node.targets
+ )
+ ) or (isinstance(node, astroid.Nonlocal) and varname in node.names):
+ return True
+ elif isinstance(scope, (COMP_NODE_TYPES, astroid.For)):
+ for ass_node in scope.nodes_of_class(astroid.AssignName):
+ if ass_node.name == varname:
+ return True
+ elif isinstance(scope, astroid.With):
+ for expr, ids in scope.items:
+ if expr.parent_of(var_node):
+ break
+ if ids and isinstance(ids, astroid.AssignName) and ids.name == varname:
+ return True
+ elif isinstance(scope, (astroid.Lambda, astroid.FunctionDef)):
+ if scope.args.is_argument(varname):
+ # If the name is found inside a default value
+ # of a function, then let the search continue
+ # in the parent's tree.
+ if scope.args.parent_of(var_node):
+ try:
+ scope.args.default_value(varname)
+ scope = scope.parent
+ is_defined_in_scope(var_node, varname, scope)
+ except astroid.NoDefault:
+ pass
+ return True
+ if getattr(scope, "name", None) == varname:
+ return True
+ elif isinstance(scope, astroid.ExceptHandler):
+ if isinstance(scope.name, astroid.AssignName):
+ ass_node = scope.name
+ if ass_node.name == varname:
+ return True
+ return False
+
+
+def is_defined_before(var_node: astroid.node_classes.NodeNG) -> bool:
+ """return True if the variable node is defined by a parent node (list,
+ set, dict, or generator comprehension, lambda) or in a previous sibling
+ node on the same line (statement_defining ; statement_using)
+ """
+ varname = var_node.name
+ _node = var_node.parent
+ while _node:
+ if is_defined_in_scope(var_node, varname, _node):
+ return True
+ _node = _node.parent
+ # possibly multiple statements on the same line using semi colon separator
+ stmt = var_node.statement()
+ _node = stmt.previous_sibling()
+ lineno = stmt.fromlineno
+ while _node and _node.fromlineno == lineno:
+ for assign_node in _node.nodes_of_class(astroid.AssignName):
+ if assign_node.name == varname:
+ return True
+ for imp_node in _node.nodes_of_class((astroid.ImportFrom, astroid.Import)):
+ if varname in [name[1] or name[0] for name in imp_node.names]:
+ return True
+ _node = _node.previous_sibling()
+ return False
+
+
+def is_default_argument(node: astroid.node_classes.NodeNG) -> bool:
+ """return true if the given Name node is used in function or lambda
+ default argument's value
+ """
+ parent = node.scope()
+ if isinstance(parent, (astroid.FunctionDef, astroid.Lambda)):
+ for default_node in parent.args.defaults:
+ for default_name_node in default_node.nodes_of_class(astroid.Name):
+ if default_name_node is node:
+ return True
+ return False
+
+
+def is_func_decorator(node: astroid.node_classes.NodeNG) -> bool:
+ """return true if the name is used in function decorator"""
+ parent = node.parent
+ while parent is not None:
+ if isinstance(parent, astroid.Decorators):
+ return True
+ if parent.is_statement or isinstance(
+ parent,
+ (astroid.Lambda, scoped_nodes.ComprehensionScope, scoped_nodes.ListComp),
+ ):
+ break
+ parent = parent.parent
+ return False
+
+
+def is_ancestor_name(
+ frame: astroid.node_classes.NodeNG, node: astroid.node_classes.NodeNG
+) -> bool:
+ """return True if `frame` is an astroid.Class node with `node` in the
+ subtree of its bases attribute
+ """
+ try:
+ bases = frame.bases
+ except AttributeError:
+ return False
+ for base in bases:
+ if node in base.nodes_of_class(astroid.Name):
+ return True
+ return False
+
+
+def assign_parent(node: astroid.node_classes.NodeNG) -> astroid.node_classes.NodeNG:
+ """return the higher parent which is not an AssignName, Tuple or List node
+ """
+ while node and isinstance(node, (astroid.AssignName, astroid.Tuple, astroid.List)):
+ node = node.parent
+ return node
+
+
+def overrides_a_method(class_node: astroid.node_classes.NodeNG, name: str) -> bool:
+ """return True if <name> is a method overridden from an ancestor"""
+ for ancestor in class_node.ancestors():
+ if name in ancestor and isinstance(ancestor[name], astroid.FunctionDef):
+ return True
+ return False
+
+
+def check_messages(*messages: str) -> Callable:
+ """decorator to store messages that are handled by a checker method"""
+
+ def store_messages(func):
+ func.checks_msgs = messages
+ return func
+
+ return store_messages
+
+
+class IncompleteFormatString(Exception):
+ """A format string ended in the middle of a format specifier."""
+
+
+class UnsupportedFormatCharacter(Exception):
+ """A format character in a format string is not one of the supported
+ format characters."""
+
+ def __init__(self, index):
+ Exception.__init__(self, index)
+ self.index = index
+
+
+def parse_format_string(
+ format_string: str
+) -> Tuple[Set[str], int, Dict[str, str], List[str]]:
+ """Parses a format string, returning a tuple of (keys, num_args), where keys
+ is the set of mapping keys in the format string, and num_args is the number
+ of arguments required by the format string. Raises
+ IncompleteFormatString or UnsupportedFormatCharacter if a
+ parse error occurs."""
+ keys = set()
+ key_types = dict()
+ pos_types = []
+ num_args = 0
+
+ def next_char(i):
+ i += 1
+ if i == len(format_string):
+ raise IncompleteFormatString
+ return (i, format_string[i])
+
+ i = 0
+ while i < len(format_string):
+ char = format_string[i]
+ if char == "%":
+ i, char = next_char(i)
+ # Parse the mapping key (optional).
+ key = None
+ if char == "(":
+ depth = 1
+ i, char = next_char(i)
+ key_start = i
+ while depth != 0:
+ if char == "(":
+ depth += 1
+ elif char == ")":
+ depth -= 1
+ i, char = next_char(i)
+ key_end = i - 1
+ key = format_string[key_start:key_end]
+
+ # Parse the conversion flags (optional).
+ while char in "#0- +":
+ i, char = next_char(i)
+ # Parse the minimum field width (optional).
+ if char == "*":
+ num_args += 1
+ i, char = next_char(i)
+ else:
+ while char in string.digits:
+ i, char = next_char(i)
+ # Parse the precision (optional).
+ if char == ".":
+ i, char = next_char(i)
+ if char == "*":
+ num_args += 1
+ i, char = next_char(i)
+ else:
+ while char in string.digits:
+ i, char = next_char(i)
+ # Parse the length modifier (optional).
+ if char in "hlL":
+ i, char = next_char(i)
+ # Parse the conversion type (mandatory).
+ flags = "diouxXeEfFgGcrs%a"
+ if char not in flags:
+ raise UnsupportedFormatCharacter(i)
+ if key:
+ keys.add(key)
+ key_types[key] = char
+ elif char != "%":
+ num_args += 1
+ pos_types.append(char)
+ i += 1
+ return keys, num_args, key_types, pos_types
+
+
+def split_format_field_names(format_string) -> Tuple[str, Iterable[Tuple[bool, str]]]:
+ try:
+ return _string.formatter_field_name_split(format_string)
+ except ValueError:
+ raise IncompleteFormatString()
+
+
+def collect_string_fields(format_string) -> Iterable[Optional[str]]:
+ """ Given a format string, return an iterator
+ of all the valid format fields. It handles nested fields
+ as well.
+ """
+ formatter = string.Formatter()
+ try:
+ parseiterator = formatter.parse(format_string)
+ for result in parseiterator:
+ if all(item is None for item in result[1:]):
+ # not a replacement format
+ continue
+ name = result[1]
+ nested = result[2]
+ yield name
+ if nested:
+ for field in collect_string_fields(nested):
+ yield field
+ except ValueError as exc:
+ # Probably the format string is invalid.
+ if exc.args[0].startswith("cannot switch from manual"):
+ # On Jython, parsing a string with both manual
+ # and automatic positions will fail with a ValueError,
+ # while on CPython it will simply return the fields,
+ # the validation being done in the interpreter (?).
+ # We're just returning two mixed fields in order
+ # to trigger the format-combined-specification check.
+ yield ""
+ yield "1"
+ return
+ raise IncompleteFormatString(format_string)
+
+
+def parse_format_method_string(
+ format_string: str
+) -> Tuple[List[Tuple[str, List[Tuple[bool, str]]]], int, int]:
+ """
+ Parses a PEP 3101 format string, returning a tuple of
+ (keyword_arguments, implicit_pos_args_cnt, explicit_pos_args),
+ where keyword_arguments is the set of mapping keys in the format string, implicit_pos_args_cnt
+ is the number of arguments required by the format string and
+ explicit_pos_args is the number of arguments passed with the position.
+ """
+ keyword_arguments = []
+ implicit_pos_args_cnt = 0
+ explicit_pos_args = set()
+ for name in collect_string_fields(format_string):
+ if name and str(name).isdigit():
+ explicit_pos_args.add(str(name))
+ elif name:
+ keyname, fielditerator = split_format_field_names(name)
+ if isinstance(keyname, numbers.Number):
+ # In Python 2 it will return long which will lead
+ # to different output between 2 and 3
+ explicit_pos_args.add(str(keyname))
+ keyname = int(keyname)
+ try:
+ keyword_arguments.append((keyname, list(fielditerator)))
+ except ValueError:
+ raise IncompleteFormatString()
+ else:
+ implicit_pos_args_cnt += 1
+ return keyword_arguments, implicit_pos_args_cnt, len(explicit_pos_args)
+
+
+def is_attr_protected(attrname: str) -> bool:
+ """return True if attribute name is protected (start with _ and some other
+ details), False otherwise.
+ """
+ return (
+ attrname[0] == "_"
+ and attrname != "_"
+ and not (attrname.startswith("__") and attrname.endswith("__"))
+ )
+
+
+def node_frame_class(node: astroid.node_classes.NodeNG) -> Optional[astroid.ClassDef]:
+ """Return the class that is wrapping the given node
+
+ The function returns a class for a method node (or a staticmethod or a
+ classmethod), otherwise it returns `None`.
+ """
+ klass = node.frame()
+
+ while klass is not None and not isinstance(klass, astroid.ClassDef):
+ if klass.parent is None:
+ klass = None
+ else:
+ klass = klass.parent.frame()
+
+ return klass
+
+
+def is_attr_private(attrname: str) -> Optional[Match[str]]:
+ """Check that attribute name is private (at least two leading underscores,
+ at most one trailing underscore)
+ """
+ regex = re.compile("^_{2,}.*[^_]+_?$")
+ return regex.match(attrname)
+
+
+def get_argument_from_call(
+ call_node: astroid.Call, position: int = None, keyword: str = None
+) -> astroid.Name:
+ """Returns the specified argument from a function call.
+
+ :param astroid.Call call_node: Node representing a function call to check.
+ :param int position: position of the argument.
+ :param str keyword: the keyword of the argument.
+
+ :returns: The node representing the argument, None if the argument is not found.
+ :rtype: astroid.Name
+ :raises ValueError: if both position and keyword are None.
+ :raises NoSuchArgumentError: if no argument at the provided position or with
+ the provided keyword.
+ """
+ if position is None and keyword is None:
+ raise ValueError("Must specify at least one of: position or keyword.")
+ if position is not None:
+ try:
+ return call_node.args[position]
+ except IndexError:
+ pass
+ if keyword and call_node.keywords:
+ for arg in call_node.keywords:
+ if arg.arg == keyword:
+ return arg.value
+
+ raise NoSuchArgumentError
+
+
+def inherit_from_std_ex(node: astroid.node_classes.NodeNG) -> bool:
+ """
+ Return true if the given class node is subclass of
+ exceptions.Exception.
+ """
+ ancestors = node.ancestors() if hasattr(node, "ancestors") else []
+ for ancestor in itertools.chain([node], ancestors):
+ if (
+ ancestor.name in ("Exception", "BaseException")
+ and ancestor.root().name == EXCEPTIONS_MODULE
+ ):
+ return True
+ return False
+
+
+def error_of_type(handler: astroid.ExceptHandler, error_type) -> bool:
+ """
+ Check if the given exception handler catches
+ the given error_type.
+
+ The *handler* parameter is a node, representing an ExceptHandler node.
+ The *error_type* can be an exception, such as AttributeError,
+ the name of an exception, or it can be a tuple of errors.
+ The function will return True if the handler catches any of the
+ given errors.
+ """
+
+ def stringify_error(error):
+ if not isinstance(error, str):
+ return error.__name__
+ return error
+
+ if not isinstance(error_type, tuple):
+ error_type = (error_type,) # type: ignore
+ expected_errors = {stringify_error(error) for error in error_type} # type: ignore
+ if not handler.type:
+ return True
+ return handler.catch(expected_errors)
+
+
+def decorated_with_property(node: astroid.FunctionDef) -> bool:
+ """Detect if the given function node is decorated with a property. """
+ if not node.decorators:
+ return False
+ for decorator in node.decorators.nodes:
+ try:
+ if _is_property_decorator(decorator):
+ return True
+ except astroid.InferenceError:
+ pass
+ return False
+
+
+def _is_property_kind(node, *kinds):
+ if not isinstance(node, (astroid.UnboundMethod, astroid.FunctionDef)):
+ return False
+ if node.decorators:
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, astroid.Attribute) and decorator.attrname in kinds:
+ return True
+ return False
+
+
+def is_property_setter(node: astroid.FunctionDef) -> bool:
+ """Check if the given node is a property setter"""
+ return _is_property_kind(node, "setter")
+
+
+def is_property_setter_or_deleter(node: astroid.FunctionDef) -> bool:
+ """Check if the given node is either a property setter or a deleter"""
+ return _is_property_kind(node, "setter", "deleter")
+
+
+def _is_property_decorator(decorator: astroid.Name) -> bool:
+ for inferred in decorator.infer():
+ if isinstance(inferred, astroid.ClassDef):
+ if inferred.root().name == BUILTINS_NAME and inferred.name == "property":
+ return True
+ for ancestor in inferred.ancestors():
+ if (
+ ancestor.name == "property"
+ and ancestor.root().name == BUILTINS_NAME
+ ):
+ return True
+ return False
+
+
+def decorated_with(
+ func: Union[astroid.FunctionDef, astroid.BoundMethod, astroid.UnboundMethod],
+ qnames: Iterable[str],
+) -> bool:
+ """Determine if the `func` node has a decorator with the qualified name `qname`."""
+ decorators = func.decorators.nodes if func.decorators else []
+ for decorator_node in decorators:
+ if isinstance(decorator_node, astroid.Call):
+ # We only want to infer the function name
+ decorator_node = decorator_node.func
+ try:
+ if any(
+ i is not None and i.qname() in qnames or i.name in qnames
+ for i in decorator_node.infer()
+ ):
+ return True
+ except astroid.InferenceError:
+ continue
+ return False
+
+
+@lru_cache(maxsize=1024)
+def unimplemented_abstract_methods(
+ node: astroid.node_classes.NodeNG, is_abstract_cb: astroid.FunctionDef = None
+) -> Dict[str, astroid.node_classes.NodeNG]:
+ """
+ Get the unimplemented abstract methods for the given *node*.
+
+ A method can be considered abstract if the callback *is_abstract_cb*
+ returns a ``True`` value. The check defaults to verifying that
+ a method is decorated with abstract methods.
+ The function will work only for new-style classes. For old-style
+ classes, it will simply return an empty dictionary.
+ For the rest of them, it will return a dictionary of abstract method
+ names and their inferred objects.
+ """
+ if is_abstract_cb is None:
+ is_abstract_cb = partial(decorated_with, qnames=ABC_METHODS)
+ visited = {} # type: Dict[str, astroid.node_classes.NodeNG]
+ try:
+ mro = reversed(node.mro())
+ except NotImplementedError:
+ # Old style class, it will not have a mro.
+ return {}
+ except astroid.ResolveError:
+ # Probably inconsistent hierarchy, don'try
+ # to figure this out here.
+ return {}
+ for ancestor in mro:
+ for obj in ancestor.values():
+ inferred = obj
+ if isinstance(obj, astroid.AssignName):
+ inferred = safe_infer(obj)
+ if not inferred:
+ # Might be an abstract function,
+ # but since we don't have enough information
+ # in order to take this decision, we're taking
+ # the *safe* decision instead.
+ if obj.name in visited:
+ del visited[obj.name]
+ continue
+ if not isinstance(inferred, astroid.FunctionDef):
+ if obj.name in visited:
+ del visited[obj.name]
+ if isinstance(inferred, astroid.FunctionDef):
+ # It's critical to use the original name,
+ # since after inferring, an object can be something
+ # else than expected, as in the case of the
+ # following assignment.
+ #
+ # class A:
+ # def keys(self): pass
+ # __iter__ = keys
+ abstract = is_abstract_cb(inferred)
+ if abstract:
+ visited[obj.name] = inferred
+ elif not abstract and obj.name in visited:
+ del visited[obj.name]
+ return visited
+
+
+def find_try_except_wrapper_node(
+ node: astroid.node_classes.NodeNG
+) -> Union[astroid.ExceptHandler, astroid.TryExcept]:
+ """Return the ExceptHandler or the TryExcept node in which the node is."""
+ current = node
+ ignores = (astroid.ExceptHandler, astroid.TryExcept)
+ while current and not isinstance(current.parent, ignores):
+ current = current.parent
+
+ if current and isinstance(current.parent, ignores):
+ return current.parent
+ return None
+
+
+def is_from_fallback_block(node: astroid.node_classes.NodeNG) -> bool:
+ """Check if the given node is from a fallback import block."""
+ context = find_try_except_wrapper_node(node)
+ if not context:
+ return False
+
+ if isinstance(context, astroid.ExceptHandler):
+ other_body = context.parent.body
+ handlers = context.parent.handlers
+ else:
+ other_body = itertools.chain.from_iterable(
+ handler.body for handler in context.handlers
+ )
+ handlers = context.handlers
+
+ has_fallback_imports = any(
+ isinstance(import_node, (astroid.ImportFrom, astroid.Import))
+ for import_node in other_body
+ )
+ ignores_import_error = _except_handlers_ignores_exception(handlers, ImportError)
+ return ignores_import_error or has_fallback_imports
+
+
+def _except_handlers_ignores_exception(
+ handlers: astroid.ExceptHandler, exception
+) -> bool:
+ func = partial(error_of_type, error_type=(exception,))
+ return any(map(func, handlers))
+
+
+def get_exception_handlers(
+ node: astroid.node_classes.NodeNG, exception=Exception
+) -> Optional[List[astroid.ExceptHandler]]:
+ """Return the collections of handlers handling the exception in arguments.
+
+ Args:
+ node (astroid.NodeNG): A node that is potentially wrapped in a try except.
+ exception (builtin.Exception or str): exception or name of the exception.
+
+ Returns:
+ list: the collection of handlers that are handling the exception or None.
+
+ """
+ context = find_try_except_wrapper_node(node)
+ if isinstance(context, astroid.TryExcept):
+ return [
+ handler for handler in context.handlers if error_of_type(handler, exception)
+ ]
+ return []
+
+
+def is_node_inside_try_except(node: astroid.Raise) -> bool:
+ """Check if the node is directly under a Try/Except statement.
+ (but not under an ExceptHandler!)
+
+ Args:
+ node (astroid.Raise): the node raising the exception.
+
+ Returns:
+ bool: True if the node is inside a try/except statement, False otherwise.
+ """
+ context = find_try_except_wrapper_node(node)
+ return isinstance(context, astroid.TryExcept)
+
+
+def node_ignores_exception(
+ node: astroid.node_classes.NodeNG, exception=Exception
+) -> bool:
+ """Check if the node is in a TryExcept which handles the given exception.
+
+ If the exception is not given, the function is going to look for bare
+ excepts.
+ """
+ managing_handlers = get_exception_handlers(node, exception)
+ if not managing_handlers:
+ return False
+ return any(managing_handlers)
+
+
+def class_is_abstract(node: astroid.ClassDef) -> bool:
+ """return true if the given class node should be considered as an abstract
+ class
+ """
+ for method in node.methods():
+ if method.parent.frame() is node:
+ if method.is_abstract(pass_is_abstract=False):
+ return True
+ return False
+
+
+def _supports_protocol_method(value: astroid.node_classes.NodeNG, attr: str) -> bool:
+ try:
+ attributes = value.getattr(attr)
+ except astroid.NotFoundError:
+ return False
+
+ first = attributes[0]
+ if isinstance(first, astroid.AssignName):
+ if isinstance(first.parent.value, astroid.Const):
+ return False
+ return True
+
+
+def is_comprehension(node: astroid.node_classes.NodeNG) -> bool:
+ comprehensions = (
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+ )
+ return isinstance(node, comprehensions)
+
+
+def _supports_mapping_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(
+ value, GETITEM_METHOD
+ ) and _supports_protocol_method(value, KEYS_METHOD)
+
+
+def _supports_membership_test_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, CONTAINS_METHOD)
+
+
+def _supports_iteration_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, ITER_METHOD) or _supports_protocol_method(
+ value, GETITEM_METHOD
+ )
+
+
+def _supports_async_iteration_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, AITER_METHOD)
+
+
+def _supports_getitem_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, GETITEM_METHOD)
+
+
+def _supports_setitem_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, SETITEM_METHOD)
+
+
+def _supports_delitem_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, DELITEM_METHOD)
+
+
+def _is_abstract_class_name(name: str) -> bool:
+ lname = name.lower()
+ is_mixin = lname.endswith("mixin")
+ is_abstract = lname.startswith("abstract")
+ is_base = lname.startswith("base") or lname.endswith("base")
+ return is_mixin or is_abstract or is_base
+
+
+def is_inside_abstract_class(node: astroid.node_classes.NodeNG) -> bool:
+ while node is not None:
+ if isinstance(node, astroid.ClassDef):
+ if class_is_abstract(node):
+ return True
+ name = getattr(node, "name", None)
+ if name is not None and _is_abstract_class_name(name):
+ return True
+ node = node.parent
+ return False
+
+
+def _supports_protocol(
+ value: astroid.node_classes.NodeNG, protocol_callback: astroid.FunctionDef
+) -> bool:
+ if isinstance(value, astroid.ClassDef):
+ if not has_known_bases(value):
+ return True
+ # classobj can only be iterable if it has an iterable metaclass
+ meta = value.metaclass()
+ if meta is not None:
+ if protocol_callback(meta):
+ return True
+ if isinstance(value, astroid.BaseInstance):
+ if not has_known_bases(value):
+ return True
+ if value.has_dynamic_getattr():
+ return True
+ if protocol_callback(value):
+ return True
+
+ if (
+ isinstance(value, _bases.Proxy)
+ and isinstance(value._proxied, astroid.BaseInstance)
+ and has_known_bases(value._proxied)
+ ):
+ value = value._proxied
+ return protocol_callback(value)
+
+ return False
+
+
+def is_iterable(value: astroid.node_classes.NodeNG, check_async: bool = False) -> bool:
+ if check_async:
+ protocol_check = _supports_async_iteration_protocol
+ else:
+ protocol_check = _supports_iteration_protocol
+ return _supports_protocol(value, protocol_check)
+
+
+def is_mapping(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol(value, _supports_mapping_protocol)
+
+
+def supports_membership_test(value: astroid.node_classes.NodeNG) -> bool:
+ supported = _supports_protocol(value, _supports_membership_test_protocol)
+ return supported or is_iterable(value)
+
+
+def supports_getitem(value: astroid.node_classes.NodeNG) -> bool:
+ if isinstance(value, astroid.ClassDef):
+ if _supports_protocol_method(value, CLASS_GETITEM_METHOD):
+ return True
+ return _supports_protocol(value, _supports_getitem_protocol)
+
+
+def supports_setitem(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol(value, _supports_setitem_protocol)
+
+
+def supports_delitem(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol(value, _supports_delitem_protocol)
+
+
+@lru_cache(maxsize=1024)
+def safe_infer(
+ node: astroid.node_classes.NodeNG, context=None
+) -> Optional[astroid.node_classes.NodeNG]:
+ """Return the inferred value for the given node.
+
+ Return None if inference failed or if there is some ambiguity (more than
+ one node has been inferred).
+ """
+ try:
+ inferit = node.infer(context=context)
+ value = next(inferit)
+ except astroid.InferenceError:
+ return None
+ try:
+ next(inferit)
+ return None # None if there is ambiguity on the inferred node
+ except astroid.InferenceError:
+ return None # there is some kind of ambiguity
+ except StopIteration:
+ return value
+
+
+def has_known_bases(klass: astroid.ClassDef, context=None) -> bool:
+ """Return true if all base classes of a class could be inferred."""
+ try:
+ return klass._all_bases_known
+ except AttributeError:
+ pass
+ for base in klass.bases:
+ result = safe_infer(base, context=context)
+ if (
+ not isinstance(result, astroid.ClassDef)
+ or result is klass
+ or not has_known_bases(result, context=context)
+ ):
+ klass._all_bases_known = False
+ return False
+ klass._all_bases_known = True
+ return True
+
+
+def is_none(node: astroid.node_classes.NodeNG) -> bool:
+ return (
+ node is None
+ or (isinstance(node, astroid.Const) and node.value is None)
+ or (isinstance(node, astroid.Name) and node.name == "None")
+ )
+
+
+def node_type(node: astroid.node_classes.NodeNG) -> Optional[type]:
+ """Return the inferred type for `node`
+
+ If there is more than one possible type, or if inferred type is Uninferable or None,
+ return None
+ """
+ # check there is only one possible type for the assign node. Else we
+ # don't handle it for now
+ types = set()
+ try:
+ for var_type in node.infer():
+ if var_type == astroid.Uninferable or is_none(var_type):
+ continue
+ types.add(var_type)
+ if len(types) > 1:
+ return None
+ except astroid.InferenceError:
+ return None
+ return types.pop() if types else None
+
+
+def is_registered_in_singledispatch_function(node: astroid.FunctionDef) -> bool:
+ """Check if the given function node is a singledispatch function."""
+
+ singledispatch_qnames = (
+ "functools.singledispatch",
+ "singledispatch.singledispatch",
+ )
+
+ if not isinstance(node, astroid.FunctionDef):
+ return False
+
+ decorators = node.decorators.nodes if node.decorators else []
+ for decorator in decorators:
+ # func.register are function calls
+ if not isinstance(decorator, astroid.Call):
+ continue
+
+ func = decorator.func
+ if not isinstance(func, astroid.Attribute) or func.attrname != "register":
+ continue
+
+ try:
+ func_def = next(func.expr.infer())
+ except astroid.InferenceError:
+ continue
+
+ if isinstance(func_def, astroid.FunctionDef):
+ # pylint: disable=redundant-keyword-arg; some flow inference goes wrong here
+ return decorated_with(func_def, singledispatch_qnames)
+
+ return False
+
+
+def get_node_last_lineno(node: astroid.node_classes.NodeNG) -> int:
+ """
+ Get the last lineno of the given node. For a simple statement this will just be node.lineno,
+ but for a node that has child statements (e.g. a method) this will be the lineno of the last
+ child statement recursively.
+ """
+ # 'finalbody' is always the last clause in a try statement, if present
+ if getattr(node, "finalbody", False):
+ return get_node_last_lineno(node.finalbody[-1])
+ # For if, while, and for statements 'orelse' is always the last clause.
+ # For try statements 'orelse' is the last in the absence of a 'finalbody'
+ if getattr(node, "orelse", False):
+ return get_node_last_lineno(node.orelse[-1])
+ # try statements have the 'handlers' last if there is no 'orelse' or 'finalbody'
+ if getattr(node, "handlers", False):
+ return get_node_last_lineno(node.handlers[-1])
+ # All compound statements have a 'body'
+ if getattr(node, "body", False):
+ return get_node_last_lineno(node.body[-1])
+ # Not a compound statement
+ return node.lineno
+
+
+def is_postponed_evaluation_enabled(node: astroid.node_classes.NodeNG) -> bool:
+ """Check if the postponed evaluation of annotations is enabled"""
+ name = "annotations"
+ module = node.root()
+ stmt = module.locals.get(name)
+ return (
+ stmt
+ and isinstance(stmt[0], astroid.ImportFrom)
+ and stmt[0].modname == "__future__"
+ )
+
+
+def is_subclass_of(child: astroid.ClassDef, parent: astroid.ClassDef) -> bool:
+ """
+ Check if first node is a subclass of second node.
+ :param child: Node to check for subclass.
+ :param parent: Node to check for superclass.
+ :returns: True if child is derived from parent. False otherwise.
+ """
+ if not all(isinstance(node, astroid.ClassDef) for node in (child, parent)):
+ return False
+
+ for ancestor in child.ancestors():
+ try:
+ if helpers.is_subtype(ancestor, parent):
+ return True
+ except _NonDeducibleTypeHierarchy:
+ continue
+ return False
+
+
+@lru_cache(maxsize=1024)
+def is_overload_stub(node: astroid.node_classes.NodeNG) -> bool:
+ """Check if a node if is a function stub decorated with typing.overload.
+
+ :param node: Node to check.
+ :returns: True if node is an overload function stub. False otherwise.
+ """
+ decorators = getattr(node, "decorators", None)
+ return bool(decorators and decorated_with(node, ["typing.overload", "overload"]))
+
+
+def is_protocol_class(cls: astroid.node_classes.NodeNG) -> bool:
+ """Check if the given node represents a protocol class
+
+ :param cls: The node to check
+ :returns: True if the node is a typing protocol class, false otherwise.
+ """
+ if not isinstance(cls, astroid.ClassDef):
+ return False
+
+ # Use .ancestors() since not all protocol classes can have
+ # their mro deduced.
+ return any(parent.qname() in TYPING_PROTOCOLS for parent in cls.ancestors())
diff --git a/venv/Lib/site-packages/pylint/checkers/variables.py b/venv/Lib/site-packages/pylint/checkers/variables.py
new file mode 100644
index 0000000..e13f9b5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/variables.py
@@ -0,0 +1,1987 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009 Mads Kiilerich <mads@kiilerich.com>
+# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
+# Copyright (c) 2011-2014, 2017 Google, Inc.
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Ricardo Gemignani <ricardo.gemignani@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Radu Ciorba <radu@devrandom.ro>
+# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016, 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Grant Welch <gwelch925+github@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Dan Garrette <dhgarrette@gmail.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.guinta@protonmail.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Marianna Polatoglou <mpolatoglou@bloomberg.net>
+# Copyright (c) 2018 mar-chi-pan <mar.polatoglou@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""variables checkers for Python code
+"""
+import collections
+import copy
+import itertools
+import os
+import re
+from functools import lru_cache
+
+import astroid
+from astroid import decorators, modutils, objects
+from astroid.context import InferenceContext
+
+from pylint.checkers import BaseChecker, utils
+from pylint.checkers.utils import is_postponed_evaluation_enabled
+from pylint.interfaces import HIGH, INFERENCE, INFERENCE_FAILURE, IAstroidChecker
+from pylint.utils import get_global_option
+
+SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
+FUTURE = "__future__"
+# regexp for ignored argument name
+IGNORED_ARGUMENT_NAMES = re.compile("_.*|^ignored_|^unused_")
+# In Python 3.7 abc has a Python implementation which is preferred
+# by astroid. Unfortunately this also messes up our explicit checks
+# for `abc`
+METACLASS_NAME_TRANSFORMS = {"_py_abc": "abc"}
+TYPING_TYPE_CHECKS_GUARDS = frozenset({"typing.TYPE_CHECKING", "TYPE_CHECKING"})
+BUILTIN_RANGE = "builtins.range"
+TYPING_MODULE = "typing"
+TYPING_NAMES = frozenset(
+ {
+ "Any",
+ "Callable",
+ "ClassVar",
+ "Generic",
+ "Optional",
+ "Tuple",
+ "Type",
+ "TypeVar",
+ "Union",
+ "AbstractSet",
+ "ByteString",
+ "Container",
+ "ContextManager",
+ "Hashable",
+ "ItemsView",
+ "Iterable",
+ "Iterator",
+ "KeysView",
+ "Mapping",
+ "MappingView",
+ "MutableMapping",
+ "MutableSequence",
+ "MutableSet",
+ "Sequence",
+ "Sized",
+ "ValuesView",
+ "Awaitable",
+ "AsyncIterator",
+ "AsyncIterable",
+ "Coroutine",
+ "Collection",
+ "AsyncGenerator",
+ "AsyncContextManager",
+ "Reversible",
+ "SupportsAbs",
+ "SupportsBytes",
+ "SupportsComplex",
+ "SupportsFloat",
+ "SupportsInt",
+ "SupportsRound",
+ "Counter",
+ "Deque",
+ "Dict",
+ "DefaultDict",
+ "List",
+ "Set",
+ "FrozenSet",
+ "NamedTuple",
+ "Generator",
+ "AnyStr",
+ "Text",
+ "Pattern",
+ }
+)
+
+
+def _is_from_future_import(stmt, name):
+ """Check if the name is a future import from another module."""
+ try:
+ module = stmt.do_import_module(stmt.modname)
+ except astroid.AstroidBuildingException:
+ return None
+
+ for local_node in module.locals.get(name, []):
+ if isinstance(local_node, astroid.ImportFrom) and local_node.modname == FUTURE:
+ return True
+ return None
+
+
+def in_for_else_branch(parent, stmt):
+ """Returns True if stmt in inside the else branch for a parent For stmt."""
+ return isinstance(parent, astroid.For) and any(
+ else_stmt.parent_of(stmt) or else_stmt == stmt for else_stmt in parent.orelse
+ )
+
+
+@lru_cache(maxsize=1000)
+def overridden_method(klass, name):
+ """get overridden method if any"""
+ try:
+ parent = next(klass.local_attr_ancestors(name))
+ except (StopIteration, KeyError):
+ return None
+ try:
+ meth_node = parent[name]
+ except KeyError:
+ # We have found an ancestor defining <name> but it's not in the local
+ # dictionary. This may happen with astroid built from living objects.
+ return None
+ if isinstance(meth_node, astroid.FunctionDef):
+ return meth_node
+ return None
+
+
+def _get_unpacking_extra_info(node, inferred):
+ """return extra information to add to the message for unpacking-non-sequence
+ and unbalanced-tuple-unpacking errors
+ """
+ more = ""
+ inferred_module = inferred.root().name
+ if node.root().name == inferred_module:
+ if node.lineno == inferred.lineno:
+ more = " %s" % inferred.as_string()
+ elif inferred.lineno:
+ more = " defined at line %s" % inferred.lineno
+ elif inferred.lineno:
+ more = " defined at line %s of %s" % (inferred.lineno, inferred_module)
+ return more
+
+
+def _detect_global_scope(node, frame, defframe):
+ """ Detect that the given frames shares a global
+ scope.
+
+ Two frames shares a global scope when neither
+ of them are hidden under a function scope, as well
+ as any of parent scope of them, until the root scope.
+ In this case, depending from something defined later on
+ will not work, because it is still undefined.
+
+ Example:
+ class A:
+ # B has the same global scope as `C`, leading to a NameError.
+ class B(C): ...
+ class C: ...
+
+ """
+ def_scope = scope = None
+ if frame and frame.parent:
+ scope = frame.parent.scope()
+ if defframe and defframe.parent:
+ def_scope = defframe.parent.scope()
+ if isinstance(frame, astroid.FunctionDef):
+ # If the parent of the current node is a
+ # function, then it can be under its scope
+ # (defined in, which doesn't concern us) or
+ # the `->` part of annotations. The same goes
+ # for annotations of function arguments, they'll have
+ # their parent the Arguments node.
+ if not isinstance(node.parent, (astroid.FunctionDef, astroid.Arguments)):
+ return False
+ elif any(
+ not isinstance(f, (astroid.ClassDef, astroid.Module)) for f in (frame, defframe)
+ ):
+ # Not interested in other frames, since they are already
+ # not in a global scope.
+ return False
+
+ break_scopes = []
+ for current_scope in (scope, def_scope):
+ # Look for parent scopes. If there is anything different
+ # than a module or a class scope, then they frames don't
+ # share a global scope.
+ parent_scope = current_scope
+ while parent_scope:
+ if not isinstance(parent_scope, (astroid.ClassDef, astroid.Module)):
+ break_scopes.append(parent_scope)
+ break
+ if parent_scope.parent:
+ parent_scope = parent_scope.parent.scope()
+ else:
+ break
+ if break_scopes and len(set(break_scopes)) != 1:
+ # Store different scopes than expected.
+ # If the stored scopes are, in fact, the very same, then it means
+ # that the two frames (frame and defframe) shares the same scope,
+ # and we could apply our lineno analysis over them.
+ # For instance, this works when they are inside a function, the node
+ # that uses a definition and the definition itself.
+ return False
+ # At this point, we are certain that frame and defframe shares a scope
+ # and the definition of the first depends on the second.
+ return frame.lineno < defframe.lineno
+
+
+def _infer_name_module(node, name):
+ context = InferenceContext()
+ context.lookupname = name
+ return node.infer(context, asname=False)
+
+
+def _fix_dot_imports(not_consumed):
+ """ Try to fix imports with multiple dots, by returning a dictionary
+ with the import names expanded. The function unflattens root imports,
+ like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree'
+ and 'xml.sax' respectively.
+ """
+ names = {}
+ for name, stmts in not_consumed.items():
+ if any(
+ isinstance(stmt, astroid.AssignName)
+ and isinstance(stmt.assign_type(), astroid.AugAssign)
+ for stmt in stmts
+ ):
+ continue
+ for stmt in stmts:
+ if not isinstance(stmt, (astroid.ImportFrom, astroid.Import)):
+ continue
+ for imports in stmt.names:
+ second_name = None
+ import_module_name = imports[0]
+ if import_module_name == "*":
+ # In case of wildcard imports,
+ # pick the name from inside the imported module.
+ second_name = name
+ else:
+ name_matches_dotted_import = False
+ if (
+ import_module_name.startswith(name)
+ and import_module_name.find(".") > -1
+ ):
+ name_matches_dotted_import = True
+
+ if name_matches_dotted_import or name in imports:
+ # Most likely something like 'xml.etree',
+ # which will appear in the .locals as 'xml'.
+ # Only pick the name if it wasn't consumed.
+ second_name = import_module_name
+ if second_name and second_name not in names:
+ names[second_name] = stmt
+ return sorted(names.items(), key=lambda a: a[1].fromlineno)
+
+
+def _find_frame_imports(name, frame):
+ """
+ Detect imports in the frame, with the required
+ *name*. Such imports can be considered assignments.
+ Returns True if an import for the given name was found.
+ """
+ imports = frame.nodes_of_class((astroid.Import, astroid.ImportFrom))
+ for import_node in imports:
+ for import_name, import_alias in import_node.names:
+ # If the import uses an alias, check only that.
+ # Otherwise, check only the import name.
+ if import_alias:
+ if import_alias == name:
+ return True
+ elif import_name and import_name == name:
+ return True
+ return None
+
+
+def _import_name_is_global(stmt, global_names):
+ for import_name, import_alias in stmt.names:
+ # If the import uses an alias, check only that.
+ # Otherwise, check only the import name.
+ if import_alias:
+ if import_alias in global_names:
+ return True
+ elif import_name in global_names:
+ return True
+ return False
+
+
+def _flattened_scope_names(iterator):
+ values = (set(stmt.names) for stmt in iterator)
+ return set(itertools.chain.from_iterable(values))
+
+
+def _assigned_locally(name_node):
+ """
+ Checks if name_node has corresponding assign statement in same scope
+ """
+ assign_stmts = name_node.scope().nodes_of_class(astroid.AssignName)
+ return any(a.name == name_node.name for a in assign_stmts)
+
+
+def _is_type_checking_import(node):
+ parent = node.parent
+ if not isinstance(parent, astroid.If):
+ return False
+ test = parent.test
+ return test.as_string() in TYPING_TYPE_CHECKS_GUARDS
+
+
+def _has_locals_call_after_node(stmt, scope):
+ skip_nodes = (
+ astroid.FunctionDef,
+ astroid.ClassDef,
+ astroid.Import,
+ astroid.ImportFrom,
+ )
+ for call in scope.nodes_of_class(astroid.Call, skip_klass=skip_nodes):
+ inferred = utils.safe_infer(call.func)
+ if (
+ utils.is_builtin_object(inferred)
+ and getattr(inferred, "name", None) == "locals"
+ ):
+ if stmt.lineno < call.lineno:
+ return True
+ return False
+
+
+MSGS = {
+ "E0601": (
+ "Using variable %r before assignment",
+ "used-before-assignment",
+ "Used when a local variable is accessed before its assignment.",
+ ),
+ "E0602": (
+ "Undefined variable %r",
+ "undefined-variable",
+ "Used when an undefined variable is accessed.",
+ ),
+ "E0603": (
+ "Undefined variable name %r in __all__",
+ "undefined-all-variable",
+ "Used when an undefined variable name is referenced in __all__.",
+ ),
+ "E0604": (
+ "Invalid object %r in __all__, must contain only strings",
+ "invalid-all-object",
+ "Used when an invalid (non-string) object occurs in __all__.",
+ ),
+ "E0611": (
+ "No name %r in module %r",
+ "no-name-in-module",
+ "Used when a name cannot be found in a module.",
+ ),
+ "W0601": (
+ "Global variable %r undefined at the module level",
+ "global-variable-undefined",
+ 'Used when a variable is defined through the "global" statement '
+ "but the variable is not defined in the module scope.",
+ ),
+ "W0602": (
+ "Using global for %r but no assignment is done",
+ "global-variable-not-assigned",
+ 'Used when a variable is defined through the "global" statement '
+ "but no assignment to this variable is done.",
+ ),
+ "W0603": (
+ "Using the global statement", # W0121
+ "global-statement",
+ 'Used when you use the "global" statement to update a global '
+ "variable. Pylint just try to discourage this "
+ "usage. That doesn't mean you cannot use it !",
+ ),
+ "W0604": (
+ "Using the global statement at the module level", # W0103
+ "global-at-module-level",
+ 'Used when you use the "global" statement at the module level '
+ "since it has no effect",
+ ),
+ "W0611": (
+ "Unused %s",
+ "unused-import",
+ "Used when an imported module or variable is not used.",
+ ),
+ "W0612": (
+ "Unused variable %r",
+ "unused-variable",
+ "Used when a variable is defined but not used.",
+ ),
+ "W0613": (
+ "Unused argument %r",
+ "unused-argument",
+ "Used when a function or method argument is not used.",
+ ),
+ "W0614": (
+ "Unused import %s from wildcard import",
+ "unused-wildcard-import",
+ "Used when an imported module or variable is not used from a "
+ "`'from X import *'` style import.",
+ ),
+ "W0621": (
+ "Redefining name %r from outer scope (line %s)",
+ "redefined-outer-name",
+ "Used when a variable's name hides a name defined in the outer scope.",
+ ),
+ "W0622": (
+ "Redefining built-in %r",
+ "redefined-builtin",
+ "Used when a variable or function override a built-in.",
+ ),
+ "W0623": (
+ "Redefining name %r from %s in exception handler",
+ "redefine-in-handler",
+ "Used when an exception handler assigns the exception to an existing name",
+ ),
+ "W0631": (
+ "Using possibly undefined loop variable %r",
+ "undefined-loop-variable",
+ "Used when a loop variable (i.e. defined by a for loop or "
+ "a list comprehension or a generator expression) is used outside "
+ "the loop.",
+ ),
+ "W0632": (
+ "Possible unbalanced tuple unpacking with "
+ "sequence%s: "
+ "left side has %d label(s), right side has %d value(s)",
+ "unbalanced-tuple-unpacking",
+ "Used when there is an unbalanced tuple unpacking in assignment",
+ {"old_names": [("E0632", "old-unbalanced-tuple-unpacking")]},
+ ),
+ "E0633": (
+ "Attempting to unpack a non-sequence%s",
+ "unpacking-non-sequence",
+ "Used when something which is not "
+ "a sequence is used in an unpack assignment",
+ {"old_names": [("W0633", "old-unpacking-non-sequence")]},
+ ),
+ "W0640": (
+ "Cell variable %s defined in loop",
+ "cell-var-from-loop",
+ "A variable used in a closure is defined in a loop. "
+ "This will result in all closures using the same value for "
+ "the closed-over variable.",
+ ),
+ "W0641": (
+ "Possibly unused variable %r",
+ "possibly-unused-variable",
+ "Used when a variable is defined but might not be used. "
+ "The possibility comes from the fact that locals() might be used, "
+ "which could consume or not the said variable",
+ ),
+ "W0642": (
+ "Invalid assignment to %s in method",
+ "self-cls-assignment",
+ "Invalid assignment to self or cls in instance or class method "
+ "respectively.",
+ ),
+}
+
+
+ScopeConsumer = collections.namedtuple(
+ "ScopeConsumer", "to_consume consumed scope_type"
+)
+
+
+class NamesConsumer:
+ """
+ A simple class to handle consumed, to consume and scope type info of node locals
+ """
+
+ def __init__(self, node, scope_type):
+ self._atomic = ScopeConsumer(copy.copy(node.locals), {}, scope_type)
+
+ def __repr__(self):
+ msg = "\nto_consume : {:s}\n".format(
+ ", ".join(
+ [
+ "{}->{}".format(key, val)
+ for key, val in self._atomic.to_consume.items()
+ ]
+ )
+ )
+ msg += "consumed : {:s}\n".format(
+ ", ".join(
+ [
+ "{}->{}".format(key, val)
+ for key, val in self._atomic.consumed.items()
+ ]
+ )
+ )
+ msg += "scope_type : {:s}\n".format(self._atomic.scope_type)
+ return msg
+
+ def __iter__(self):
+ return iter(self._atomic)
+
+ @property
+ def to_consume(self):
+ return self._atomic.to_consume
+
+ @property
+ def consumed(self):
+ return self._atomic.consumed
+
+ @property
+ def scope_type(self):
+ return self._atomic.scope_type
+
+ def mark_as_consumed(self, name, new_node):
+ """
+ Mark the name as consumed and delete it from
+ the to_consume dictionary
+ """
+ self.consumed[name] = new_node
+ del self.to_consume[name]
+
+ def get_next_to_consume(self, node):
+ # mark the name as consumed if it's defined in this scope
+ name = node.name
+ parent_node = node.parent
+ found_node = self.to_consume.get(name)
+ if (
+ found_node
+ and isinstance(parent_node, astroid.Assign)
+ and parent_node == found_node[0].parent
+ ):
+ lhs = found_node[0].parent.targets[0]
+ if lhs.name == name: # this name is defined in this very statement
+ found_node = None
+ return found_node
+
+
+# pylint: disable=too-many-public-methods
+class VariablesChecker(BaseChecker):
+ """checks for
+ * unused variables / imports
+ * undefined variables
+ * redefinition of variable from builtins or from an outer scope
+ * use of variable before assignment
+ * __all__ consistency
+ * self/cls assignment
+ """
+
+ __implements__ = IAstroidChecker
+
+ name = "variables"
+ msgs = MSGS
+ priority = -1
+ options = (
+ (
+ "init-import",
+ {
+ "default": 0,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether we should check for unused import in "
+ "__init__ files.",
+ },
+ ),
+ (
+ "dummy-variables-rgx",
+ {
+ "default": "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_",
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "A regular expression matching the name of dummy "
+ "variables (i.e. expected to not be used).",
+ },
+ ),
+ (
+ "additional-builtins",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "List of additional names supposed to be defined in "
+ "builtins. Remember that you should avoid defining new builtins "
+ "when possible.",
+ },
+ ),
+ (
+ "callbacks",
+ {
+ "default": ("cb_", "_cb"),
+ "type": "csv",
+ "metavar": "<callbacks>",
+ "help": "List of strings which can identify a callback "
+ "function by name. A callback name must start or "
+ "end with one of those strings.",
+ },
+ ),
+ (
+ "redefining-builtins-modules",
+ {
+ "default": (
+ "six.moves",
+ "past.builtins",
+ "future.builtins",
+ "builtins",
+ "io",
+ ),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "List of qualified module names which can have objects "
+ "that can redefine builtins.",
+ },
+ ),
+ (
+ "ignored-argument-names",
+ {
+ "default": IGNORED_ARGUMENT_NAMES,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Argument names that match this expression will be "
+ "ignored. Default to name with leading underscore.",
+ },
+ ),
+ (
+ "allow-global-unused-variables",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether unused global variables should be treated as a violation.",
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self._to_consume = (
+ None
+ ) # list of tuples: (to_consume:dict, consumed:dict, scope_type:str)
+ self._checking_mod_attr = None
+ self._loop_variables = []
+ self._type_annotation_names = []
+ self._postponed_evaluation_enabled = False
+
+ @utils.check_messages("redefined-outer-name")
+ def visit_for(self, node):
+ assigned_to = [
+ var.name for var in node.target.nodes_of_class(astroid.AssignName)
+ ]
+
+ # Only check variables that are used
+ dummy_rgx = self.config.dummy_variables_rgx
+ assigned_to = [var for var in assigned_to if not dummy_rgx.match(var)]
+
+ for variable in assigned_to:
+ for outer_for, outer_variables in self._loop_variables:
+ if variable in outer_variables and not in_for_else_branch(
+ outer_for, node
+ ):
+ self.add_message(
+ "redefined-outer-name",
+ args=(variable, outer_for.fromlineno),
+ node=node,
+ )
+ break
+
+ self._loop_variables.append((node, assigned_to))
+
+ @utils.check_messages("redefined-outer-name")
+ def leave_for(self, node):
+ self._loop_variables.pop()
+ self._store_type_annotation_names(node)
+
+ def visit_module(self, node):
+ """visit module : update consumption analysis variable
+ checks globals doesn't overrides builtins
+ """
+ self._to_consume = [NamesConsumer(node, "module")]
+ self._postponed_evaluation_enabled = is_postponed_evaluation_enabled(node)
+
+ for name, stmts in node.locals.items():
+ if utils.is_builtin(name) and not utils.is_inside_except(stmts[0]):
+ if self._should_ignore_redefined_builtin(stmts[0]) or name == "__doc__":
+ continue
+ self.add_message("redefined-builtin", args=name, node=stmts[0])
+
+ @utils.check_messages(
+ "unused-import",
+ "unused-wildcard-import",
+ "redefined-builtin",
+ "undefined-all-variable",
+ "invalid-all-object",
+ "unused-variable",
+ )
+ def leave_module(self, node):
+ """leave module: check globals
+ """
+ assert len(self._to_consume) == 1
+
+ self._check_metaclasses(node)
+ not_consumed = self._to_consume.pop().to_consume
+ # attempt to check for __all__ if defined
+ if "__all__" in node.locals:
+ self._check_all(node, not_consumed)
+
+ # check for unused globals
+ self._check_globals(not_consumed)
+
+ # don't check unused imports in __init__ files
+ if not self.config.init_import and node.package:
+ return
+
+ self._check_imports(not_consumed)
+
+ def visit_classdef(self, node):
+ """visit class: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "class"))
+
+ def leave_classdef(self, _):
+ """leave class: update consumption analysis variable
+ """
+ # do not check for not used locals here (no sense)
+ self._to_consume.pop()
+
+ def visit_lambda(self, node):
+ """visit lambda: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "lambda"))
+
+ def leave_lambda(self, _):
+ """leave lambda: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_generatorexp(self, node):
+ """visit genexpr: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
+
+ def leave_generatorexp(self, _):
+ """leave genexpr: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_dictcomp(self, node):
+ """visit dictcomp: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
+
+ def leave_dictcomp(self, _):
+ """leave dictcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_setcomp(self, node):
+ """visit setcomp: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
+
+ def leave_setcomp(self, _):
+ """leave setcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_functiondef(self, node):
+ """visit function: update consumption analysis variable and check locals
+ """
+ self._to_consume.append(NamesConsumer(node, "function"))
+ if not (
+ self.linter.is_message_enabled("redefined-outer-name")
+ or self.linter.is_message_enabled("redefined-builtin")
+ ):
+ return
+ globs = node.root().globals
+ for name, stmt in node.items():
+ if utils.is_inside_except(stmt):
+ continue
+ if name in globs and not isinstance(stmt, astroid.Global):
+ definition = globs[name][0]
+ if (
+ isinstance(definition, astroid.ImportFrom)
+ and definition.modname == FUTURE
+ ):
+ # It is a __future__ directive, not a symbol.
+ continue
+
+ # Do not take in account redefined names for the purpose
+ # of type checking.:
+ if any(
+ isinstance(definition.parent, astroid.If)
+ and definition.parent.test.as_string() in TYPING_TYPE_CHECKS_GUARDS
+ for definition in globs[name]
+ ):
+ continue
+
+ line = definition.fromlineno
+ if not self._is_name_ignored(stmt, name):
+ self.add_message(
+ "redefined-outer-name", args=(name, line), node=stmt
+ )
+
+ elif utils.is_builtin(name) and not self._should_ignore_redefined_builtin(
+ stmt
+ ):
+ # do not print Redefining builtin for additional builtins
+ self.add_message("redefined-builtin", args=name, node=stmt)
+
+ def leave_functiondef(self, node):
+ """leave function: check function's locals are consumed"""
+ self._check_metaclasses(node)
+
+ if node.type_comment_returns:
+ self._store_type_annotation_node(node.type_comment_returns)
+ if node.type_comment_args:
+ for argument_annotation in node.type_comment_args:
+ self._store_type_annotation_node(argument_annotation)
+
+ not_consumed = self._to_consume.pop().to_consume
+ if not (
+ self.linter.is_message_enabled("unused-variable")
+ or self.linter.is_message_enabled("possibly-unused-variable")
+ or self.linter.is_message_enabled("unused-argument")
+ ):
+ return
+
+ # Don't check arguments of function which are only raising an exception.
+ if utils.is_error(node):
+ return
+
+ # Don't check arguments of abstract methods or within an interface.
+ is_method = node.is_method()
+ if is_method and node.is_abstract():
+ return
+
+ global_names = _flattened_scope_names(node.nodes_of_class(astroid.Global))
+ nonlocal_names = _flattened_scope_names(node.nodes_of_class(astroid.Nonlocal))
+ for name, stmts in not_consumed.items():
+ self._check_is_unused(name, node, stmts[0], global_names, nonlocal_names)
+
+ visit_asyncfunctiondef = visit_functiondef
+ leave_asyncfunctiondef = leave_functiondef
+
+ @utils.check_messages(
+ "global-variable-undefined",
+ "global-variable-not-assigned",
+ "global-statement",
+ "global-at-module-level",
+ "redefined-builtin",
+ )
+ def visit_global(self, node):
+ """check names imported exists in the global scope"""
+ frame = node.frame()
+ if isinstance(frame, astroid.Module):
+ self.add_message("global-at-module-level", node=node)
+ return
+
+ module = frame.root()
+ default_message = True
+ locals_ = node.scope().locals
+ for name in node.names:
+ try:
+ assign_nodes = module.getattr(name)
+ except astroid.NotFoundError:
+ # unassigned global, skip
+ assign_nodes = []
+
+ not_defined_locally_by_import = not any(
+ isinstance(local, astroid.node_classes.Import)
+ for local in locals_.get(name, ())
+ )
+ if not assign_nodes and not_defined_locally_by_import:
+ self.add_message("global-variable-not-assigned", args=name, node=node)
+ default_message = False
+ continue
+
+ for anode in assign_nodes:
+ if (
+ isinstance(anode, astroid.AssignName)
+ and anode.name in module.special_attributes
+ ):
+ self.add_message("redefined-builtin", args=name, node=node)
+ break
+ if anode.frame() is module:
+ # module level assignment
+ break
+ else:
+ if not_defined_locally_by_import:
+ # global undefined at the module scope
+ self.add_message("global-variable-undefined", args=name, node=node)
+ default_message = False
+
+ if default_message:
+ self.add_message("global-statement", node=node)
+
+ def visit_assignname(self, node):
+ if isinstance(node.assign_type(), astroid.AugAssign):
+ self.visit_name(node)
+
+ def visit_delname(self, node):
+ self.visit_name(node)
+
+ @utils.check_messages(*MSGS)
+ def visit_name(self, node):
+ """check that a name is defined if the current scope and doesn't
+ redefine a built-in
+ """
+ stmt = node.statement()
+ if stmt.fromlineno is None:
+ # name node from an astroid built from live code, skip
+ assert not stmt.root().file.endswith(".py")
+ return
+
+ name = node.name
+ frame = stmt.scope()
+ # if the name node is used as a function default argument's value or as
+ # a decorator, then start from the parent frame of the function instead
+ # of the function frame - and thus open an inner class scope
+ if (
+ utils.is_default_argument(node)
+ or utils.is_func_decorator(node)
+ or utils.is_ancestor_name(frame, node)
+ ):
+ start_index = len(self._to_consume) - 2
+ else:
+ start_index = len(self._to_consume) - 1
+ # iterates through parent scopes, from the inner to the outer
+ base_scope_type = self._to_consume[start_index].scope_type
+ # pylint: disable=too-many-nested-blocks; refactoring this block is a pain.
+ for i in range(start_index, -1, -1):
+ current_consumer = self._to_consume[i]
+ # if the current scope is a class scope but it's not the inner
+ # scope, ignore it. This prevents to access this scope instead of
+ # the globals one in function members when there are some common
+ # names. The only exception is when the starting scope is a
+ # comprehension and its direct outer scope is a class
+ if (
+ current_consumer.scope_type == "class"
+ and i != start_index
+ and not (base_scope_type == "comprehension" and i == start_index - 1)
+ ):
+ if self._ignore_class_scope(node):
+ continue
+
+ # the name has already been consumed, only check it's not a loop
+ # variable used outside the loop
+ # avoid the case where there are homonyms inside function scope and
+ #  comprehension current scope (avoid bug #1731)
+ if name in current_consumer.consumed and not (
+ current_consumer.scope_type == "comprehension"
+ and self._has_homonym_in_upper_function_scope(node, i)
+ ):
+ defnode = utils.assign_parent(current_consumer.consumed[name][0])
+ self._check_late_binding_closure(node, defnode)
+ self._loopvar_name(node, name)
+ break
+
+ found_node = current_consumer.get_next_to_consume(node)
+ if found_node is None:
+ continue
+
+ # checks for use before assignment
+ defnode = utils.assign_parent(current_consumer.to_consume[name][0])
+
+ if defnode is not None:
+ self._check_late_binding_closure(node, defnode)
+ defstmt = defnode.statement()
+ defframe = defstmt.frame()
+ # The class reuses itself in the class scope.
+ recursive_klass = (
+ frame is defframe
+ and defframe.parent_of(node)
+ and isinstance(defframe, astroid.ClassDef)
+ and node.name == defframe.name
+ )
+
+ if (
+ recursive_klass
+ and utils.is_inside_lambda(node)
+ and (
+ not utils.is_default_argument(node)
+ or node.scope().parent.scope() is not defframe
+ )
+ ):
+ # Self-referential class references are fine in lambda's --
+ # As long as they are not part of the default argument directly
+ # under the scope of the parent self-referring class.
+ # Example of valid default argument:
+ # class MyName3:
+ # myattr = 1
+ # mylambda3 = lambda: lambda a=MyName3: a
+ # Example of invalid default argument:
+ # class MyName4:
+ # myattr = 1
+ # mylambda4 = lambda a=MyName4: lambda: a
+
+ # If the above conditional is True,
+ # there is no possibility of undefined-variable
+ # Also do not consume class name
+ # (since consuming blocks subsequent checks)
+ # -- quit
+ break
+
+ maybee0601, annotation_return, use_outer_definition = self._is_variable_violation(
+ node,
+ name,
+ defnode,
+ stmt,
+ defstmt,
+ frame,
+ defframe,
+ base_scope_type,
+ recursive_klass,
+ )
+
+ if use_outer_definition:
+ continue
+
+ if (
+ maybee0601
+ and not utils.is_defined_before(node)
+ and not astroid.are_exclusive(stmt, defstmt, ("NameError",))
+ ):
+
+ # Used and defined in the same place, e.g `x += 1` and `del x`
+ defined_by_stmt = defstmt is stmt and isinstance(
+ node, (astroid.DelName, astroid.AssignName)
+ )
+ if (
+ recursive_klass
+ or defined_by_stmt
+ or annotation_return
+ or isinstance(defstmt, astroid.Delete)
+ ):
+ if not utils.node_ignores_exception(node, NameError):
+
+ # Handle postponed evaluation of annotations
+ if not (
+ self._postponed_evaluation_enabled
+ and isinstance(
+ stmt,
+ (
+ astroid.AnnAssign,
+ astroid.FunctionDef,
+ astroid.Arguments,
+ ),
+ )
+ and name in node.root().locals
+ ):
+ self.add_message(
+ "undefined-variable", args=name, node=node
+ )
+ elif base_scope_type != "lambda":
+ # E0601 may *not* occurs in lambda scope.
+
+ # Handle postponed evaluation of annotations
+ if not (
+ self._postponed_evaluation_enabled
+ and isinstance(
+ stmt, (astroid.AnnAssign, astroid.FunctionDef)
+ )
+ ):
+ self.add_message(
+ "used-before-assignment", args=name, node=node
+ )
+ elif base_scope_type == "lambda":
+ # E0601 can occur in class-level scope in lambdas, as in
+ # the following example:
+ # class A:
+ # x = lambda attr: f + attr
+ # f = 42
+ if isinstance(frame, astroid.ClassDef) and name in frame.locals:
+ if isinstance(node.parent, astroid.Arguments):
+ if stmt.fromlineno <= defstmt.fromlineno:
+ # Doing the following is fine:
+ # class A:
+ # x = 42
+ # y = lambda attr=x: attr
+ self.add_message(
+ "used-before-assignment", args=name, node=node
+ )
+ else:
+ self.add_message(
+ "undefined-variable", args=name, node=node
+ )
+ elif current_consumer.scope_type == "lambda":
+ self.add_message("undefined-variable", node=node, args=name)
+
+ current_consumer.mark_as_consumed(name, found_node)
+ # check it's not a loop variable used outside the loop
+ self._loopvar_name(node, name)
+ break
+ else:
+ # we have not found the name, if it isn't a builtin, that's an
+ # undefined name !
+ if not (
+ name in astroid.Module.scope_attrs
+ or utils.is_builtin(name)
+ or name in self.config.additional_builtins
+ ):
+ if not utils.node_ignores_exception(node, NameError):
+ self.add_message("undefined-variable", args=name, node=node)
+
+ @utils.check_messages("no-name-in-module")
+ def visit_import(self, node):
+ """check modules attribute accesses"""
+ if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
+ # No need to verify this, since ImportError is already
+ # handled by the client code.
+ return
+
+ for name, _ in node.names:
+ parts = name.split(".")
+ try:
+ module = next(_infer_name_module(node, parts[0]))
+ except astroid.ResolveError:
+ continue
+ self._check_module_attrs(node, module, parts[1:])
+
+ @utils.check_messages("no-name-in-module")
+ def visit_importfrom(self, node):
+ """check modules attribute accesses"""
+ if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
+ # No need to verify this, since ImportError is already
+ # handled by the client code.
+ return
+
+ name_parts = node.modname.split(".")
+ try:
+ module = node.do_import_module(name_parts[0])
+ except astroid.AstroidBuildingException:
+ return
+ module = self._check_module_attrs(node, module, name_parts[1:])
+ if not module:
+ return
+ for name, _ in node.names:
+ if name == "*":
+ continue
+ self._check_module_attrs(node, module, name.split("."))
+
+ @utils.check_messages(
+ "unbalanced-tuple-unpacking", "unpacking-non-sequence", "self-cls-assignment"
+ )
+ def visit_assign(self, node):
+ """Check unbalanced tuple unpacking for assignments
+ and unpacking non-sequences as well as in case self/cls
+ get assigned.
+ """
+ self._check_self_cls_assign(node)
+ if not isinstance(node.targets[0], (astroid.Tuple, astroid.List)):
+ return
+
+ targets = node.targets[0].itered()
+ try:
+ inferred = utils.safe_infer(node.value)
+ if inferred is not None:
+ self._check_unpacking(inferred, node, targets)
+ except astroid.InferenceError:
+ return
+
+ # listcomp have now also their scope
+ def visit_listcomp(self, node):
+ """visit dictcomp: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
+
+ def leave_listcomp(self, _):
+ """leave dictcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def leave_assign(self, node):
+ self._store_type_annotation_names(node)
+
+ def leave_with(self, node):
+ self._store_type_annotation_names(node)
+
+ def visit_arguments(self, node):
+ for annotation in node.type_comment_args:
+ self._store_type_annotation_node(annotation)
+
+ # Relying on other checker's options, which might not have been initialized yet.
+ @decorators.cachedproperty
+ def _analyse_fallback_blocks(self):
+ return get_global_option(self, "analyse-fallback-blocks", default=False)
+
+ @decorators.cachedproperty
+ def _ignored_modules(self):
+ return get_global_option(self, "ignored-modules", default=[])
+
+ @decorators.cachedproperty
+ def _allow_global_unused_variables(self):
+ return get_global_option(self, "allow-global-unused-variables", default=True)
+
+ @staticmethod
+ def _defined_in_function_definition(node, frame):
+ in_annotation_or_default = False
+ if isinstance(frame, astroid.FunctionDef) and node.statement() is frame:
+ in_annotation_or_default = (
+ node in frame.args.annotations
+ or node in frame.args.kwonlyargs_annotations
+ or node is frame.args.varargannotation
+ or node is frame.args.kwargannotation
+ ) or frame.args.parent_of(node)
+ return in_annotation_or_default
+
+ @staticmethod
+ def _is_variable_violation(
+ node,
+ name,
+ defnode,
+ stmt,
+ defstmt,
+ frame,
+ defframe,
+ base_scope_type,
+ recursive_klass,
+ ):
+ # pylint: disable=too-many-nested-blocks
+ # node: Node to check for violation
+ # name: name of node to check violation for
+ # frame: Scope of statement of node
+ # base_scope_type: local scope type
+ maybee0601 = True
+ annotation_return = False
+ use_outer_definition = False
+ if frame is not defframe:
+ maybee0601 = _detect_global_scope(node, frame, defframe)
+ elif defframe.parent is None:
+ # we are at the module level, check the name is not
+ # defined in builtins
+ if name in defframe.scope_attrs or astroid.builtin_lookup(name)[1]:
+ maybee0601 = False
+ else:
+ # we are in a local scope, check the name is not
+ # defined in global or builtin scope
+ # skip this lookup if name is assigned later in function scope/lambda
+ # Note: the node.frame() is not the same as the `frame` argument which is
+ # equivalent to frame.statement().scope()
+ forbid_lookup = (
+ isinstance(frame, astroid.FunctionDef)
+ or isinstance(node.frame(), astroid.Lambda)
+ ) and _assigned_locally(node)
+ if not forbid_lookup and defframe.root().lookup(name)[1]:
+ maybee0601 = False
+ use_outer_definition = stmt == defstmt and not isinstance(
+ defnode, astroid.node_classes.Comprehension
+ )
+ else:
+ # check if we have a nonlocal
+ if name in defframe.locals:
+ maybee0601 = not any(
+ isinstance(child, astroid.Nonlocal) and name in child.names
+ for child in defframe.get_children()
+ )
+
+ if (
+ base_scope_type == "lambda"
+ and isinstance(frame, astroid.ClassDef)
+ and name in frame.locals
+ ):
+
+ # This rule verifies that if the definition node of the
+ # checked name is an Arguments node and if the name
+ # is used a default value in the arguments defaults
+ # and the actual definition of the variable label
+ # is happening before the Arguments definition.
+ #
+ # bar = None
+ # foo = lambda bar=bar: bar
+ #
+ # In this case, maybee0601 should be False, otherwise
+ # it should be True.
+ maybee0601 = not (
+ isinstance(defnode, astroid.Arguments)
+ and node in defnode.defaults
+ and frame.locals[name][0].fromlineno < defstmt.fromlineno
+ )
+ elif isinstance(defframe, astroid.ClassDef) and isinstance(
+ frame, astroid.FunctionDef
+ ):
+ # Special rule for function return annotations,
+ # which uses the same name as the class where
+ # the function lives.
+ if node is frame.returns and defframe.parent_of(frame.returns):
+ maybee0601 = annotation_return = True
+
+ if (
+ maybee0601
+ and defframe.name in defframe.locals
+ and defframe.locals[name][0].lineno < frame.lineno
+ ):
+ # Detect class assignments with the same
+ # name as the class. In this case, no warning
+ # should be raised.
+ maybee0601 = False
+ if isinstance(node.parent, astroid.Arguments):
+ maybee0601 = stmt.fromlineno <= defstmt.fromlineno
+ elif recursive_klass:
+ maybee0601 = True
+ else:
+ maybee0601 = maybee0601 and stmt.fromlineno <= defstmt.fromlineno
+ if maybee0601 and stmt.fromlineno == defstmt.fromlineno:
+ if (
+ isinstance(defframe, astroid.FunctionDef)
+ and frame is defframe
+ and defframe.parent_of(node)
+ and stmt is not defstmt
+ ):
+ # Single statement function, with the statement on the
+ # same line as the function definition
+ maybee0601 = False
+
+ # Look for type checking definitions inside a type checking guard.
+ if isinstance(defstmt, (astroid.Import, astroid.ImportFrom)):
+ defstmt_parent = defstmt.parent
+
+ if (
+ isinstance(defstmt_parent, astroid.If)
+ and defstmt_parent.test.as_string() in TYPING_TYPE_CHECKS_GUARDS
+ ):
+ # Exempt those definitions that are used inside the type checking
+ # guard or that are defined in both type checking guard branches.
+ used_in_branch = defstmt_parent.parent_of(node)
+ defined_in_or_else = False
+
+ for definition in defstmt_parent.orelse:
+ if isinstance(definition, astroid.Assign):
+ defined_in_or_else = any(
+ target.name == name for target in definition.targets
+ )
+ if defined_in_or_else:
+ break
+
+ if not used_in_branch and not defined_in_or_else:
+ maybee0601 = True
+
+ return maybee0601, annotation_return, use_outer_definition
+
+ def _ignore_class_scope(self, node):
+ """
+ Return True if the node is in a local class scope, as an assignment.
+
+ :param node: Node considered
+ :type node: astroid.Node
+ :return: True if the node is in a local class scope, as an assignment. False otherwise.
+ :rtype: bool
+ """
+ # Detect if we are in a local class scope, as an assignment.
+ # For example, the following is fair game.
+ #
+ # class A:
+ # b = 1
+ # c = lambda b=b: b * b
+ #
+ # class B:
+ # tp = 1
+ # def func(self, arg: tp):
+ # ...
+ # class C:
+ # tp = 2
+ # def func(self, arg=tp):
+ # ...
+
+ name = node.name
+ frame = node.statement().scope()
+ in_annotation_or_default = self._defined_in_function_definition(node, frame)
+ if in_annotation_or_default:
+ frame_locals = frame.parent.scope().locals
+ else:
+ frame_locals = frame.locals
+ return not (
+ (isinstance(frame, astroid.ClassDef) or in_annotation_or_default)
+ and name in frame_locals
+ )
+
+ def _loopvar_name(self, node, name):
+ # filter variables according to node's scope
+ if not self.linter.is_message_enabled("undefined-loop-variable"):
+ return
+ astmts = [stmt for stmt in node.lookup(name)[1] if hasattr(stmt, "assign_type")]
+ # If this variable usage exists inside a function definition
+ # that exists in the same loop,
+ # the usage is safe because the function will not be defined either if
+ # the variable is not defined.
+ scope = node.scope()
+ if isinstance(scope, astroid.FunctionDef) and any(
+ asmt.statement().parent_of(scope) for asmt in astmts
+ ):
+ return
+
+ # filter variables according their respective scope test is_statement
+ # and parent to avoid #74747. This is not a total fix, which would
+ # introduce a mechanism similar to special attribute lookup in
+ # modules. Also, in order to get correct inference in this case, the
+ # scope lookup rules would need to be changed to return the initial
+ # assignment (which does not exist in code per se) as well as any later
+ # modifications.
+ if (
+ not astmts
+ or (astmts[0].is_statement or astmts[0].parent)
+ and astmts[0].statement().parent_of(node)
+ ):
+ _astmts = []
+ else:
+ _astmts = astmts[:1]
+ for i, stmt in enumerate(astmts[1:]):
+ if astmts[i].statement().parent_of(stmt) and not in_for_else_branch(
+ astmts[i].statement(), stmt
+ ):
+ continue
+ _astmts.append(stmt)
+ astmts = _astmts
+ if len(astmts) != 1:
+ return
+
+ assign = astmts[0].assign_type()
+ if not (
+ isinstance(
+ assign, (astroid.For, astroid.Comprehension, astroid.GeneratorExp)
+ )
+ and assign.statement() is not node.statement()
+ ):
+ return
+
+ # For functions we can do more by inferring the length of the itered object
+ if not isinstance(assign, astroid.For):
+ self.add_message("undefined-loop-variable", args=name, node=node)
+ return
+
+ try:
+ inferred = next(assign.iter.infer())
+ except astroid.InferenceError:
+ self.add_message("undefined-loop-variable", args=name, node=node)
+ else:
+ if (
+ isinstance(inferred, astroid.Instance)
+ and inferred.qname() == BUILTIN_RANGE
+ ):
+ # Consider range() objects safe, even if they might not yield any results.
+ return
+
+ # Consider sequences.
+ sequences = (
+ astroid.List,
+ astroid.Tuple,
+ astroid.Dict,
+ astroid.Set,
+ objects.FrozenSet,
+ )
+ if not isinstance(inferred, sequences):
+ self.add_message("undefined-loop-variable", args=name, node=node)
+ return
+
+ elements = getattr(inferred, "elts", getattr(inferred, "items", []))
+ if not elements:
+ self.add_message("undefined-loop-variable", args=name, node=node)
+
+ def _check_is_unused(self, name, node, stmt, global_names, nonlocal_names):
+ # pylint: disable=too-many-branches
+ # Ignore some special names specified by user configuration.
+ if self._is_name_ignored(stmt, name):
+ return
+ # Ignore names that were added dynamically to the Function scope
+ if (
+ isinstance(node, astroid.FunctionDef)
+ and name == "__class__"
+ and len(node.locals["__class__"]) == 1
+ and isinstance(node.locals["__class__"][0], astroid.ClassDef)
+ ):
+ return
+
+ # Ignore names imported by the global statement.
+ if isinstance(stmt, (astroid.Global, astroid.Import, astroid.ImportFrom)):
+ # Detect imports, assigned to global statements.
+ if global_names and _import_name_is_global(stmt, global_names):
+ return
+
+ argnames = list(
+ itertools.chain(node.argnames(), [arg.name for arg in node.args.kwonlyargs])
+ )
+ # Care about functions with unknown argument (builtins)
+ if name in argnames:
+ self._check_unused_arguments(name, node, stmt, argnames)
+ else:
+ if stmt.parent and isinstance(
+ stmt.parent, (astroid.Assign, astroid.AnnAssign)
+ ):
+ if name in nonlocal_names:
+ return
+
+ qname = asname = None
+ if isinstance(stmt, (astroid.Import, astroid.ImportFrom)):
+ # Need the complete name, which we don't have in .locals.
+ if len(stmt.names) > 1:
+ import_names = next(
+ (names for names in stmt.names if name in names), None
+ )
+ else:
+ import_names = stmt.names[0]
+ if import_names:
+ qname, asname = import_names
+ name = asname or qname
+
+ if _has_locals_call_after_node(stmt, node.scope()):
+ message_name = "possibly-unused-variable"
+ else:
+ if isinstance(stmt, astroid.Import):
+ if asname is not None:
+ msg = "%s imported as %s" % (qname, asname)
+ else:
+ msg = "import %s" % name
+ self.add_message("unused-import", args=msg, node=stmt)
+ return
+ if isinstance(stmt, astroid.ImportFrom):
+ if asname is not None:
+ msg = "%s imported from %s as %s" % (
+ qname,
+ stmt.modname,
+ asname,
+ )
+ else:
+ msg = "%s imported from %s" % (name, stmt.modname)
+ self.add_message("unused-import", args=msg, node=stmt)
+ return
+ message_name = "unused-variable"
+
+ # Don't check function stubs created only for type information
+ if utils.is_overload_stub(node):
+ return
+
+ self.add_message(message_name, args=name, node=stmt)
+
+ def _is_name_ignored(self, stmt, name):
+ authorized_rgx = self.config.dummy_variables_rgx
+ if (
+ isinstance(stmt, astroid.AssignName)
+ and isinstance(stmt.parent, astroid.Arguments)
+ or isinstance(stmt, astroid.Arguments)
+ ):
+ regex = self.config.ignored_argument_names
+ else:
+ regex = authorized_rgx
+ return regex and regex.match(name)
+
+ def _check_unused_arguments(self, name, node, stmt, argnames):
+ is_method = node.is_method()
+ klass = node.parent.frame()
+ if is_method and isinstance(klass, astroid.ClassDef):
+ confidence = (
+ INFERENCE if utils.has_known_bases(klass) else INFERENCE_FAILURE
+ )
+ else:
+ confidence = HIGH
+
+ if is_method:
+ # Don't warn for the first argument of a (non static) method
+ if node.type != "staticmethod" and name == argnames[0]:
+ return
+ # Don't warn for argument of an overridden method
+ overridden = overridden_method(klass, node.name)
+ if overridden is not None and name in overridden.argnames():
+ return
+ if node.name in utils.PYMETHODS and node.name not in (
+ "__init__",
+ "__new__",
+ ):
+ return
+ # Don't check callback arguments
+ if any(
+ node.name.startswith(cb) or node.name.endswith(cb)
+ for cb in self.config.callbacks
+ ):
+ return
+ # Don't check arguments of singledispatch.register function.
+ if utils.is_registered_in_singledispatch_function(node):
+ return
+
+ # Don't check function stubs created only for type information
+ if utils.is_overload_stub(node):
+ return
+
+ # Don't check protocol classes
+ if utils.is_protocol_class(klass):
+ return
+
+ self.add_message("unused-argument", args=name, node=stmt, confidence=confidence)
+
+ def _check_late_binding_closure(self, node, assignment_node):
+ def _is_direct_lambda_call():
+ return (
+ isinstance(node_scope.parent, astroid.Call)
+ and node_scope.parent.func is node_scope
+ )
+
+ node_scope = node.scope()
+ if not isinstance(node_scope, (astroid.Lambda, astroid.FunctionDef)):
+ return
+ if isinstance(node.parent, astroid.Arguments):
+ return
+
+ if isinstance(assignment_node, astroid.Comprehension):
+ if assignment_node.parent.parent_of(node.scope()):
+ self.add_message("cell-var-from-loop", node=node, args=node.name)
+ else:
+ assign_scope = assignment_node.scope()
+ maybe_for = assignment_node
+ while not isinstance(maybe_for, astroid.For):
+ if maybe_for is assign_scope:
+ break
+ maybe_for = maybe_for.parent
+ else:
+ if (
+ maybe_for.parent_of(node_scope)
+ and not _is_direct_lambda_call()
+ and not isinstance(node_scope.statement(), astroid.Return)
+ ):
+ self.add_message("cell-var-from-loop", node=node, args=node.name)
+
+ def _should_ignore_redefined_builtin(self, stmt):
+ if not isinstance(stmt, astroid.ImportFrom):
+ return False
+ return stmt.modname in self.config.redefining_builtins_modules
+
+ def _has_homonym_in_upper_function_scope(self, node, index):
+ """
+ Return True if there is a node with the same name in the to_consume dict of an upper scope
+ and if that scope is a function
+
+ :param node: node to check for
+ :type node: astroid.Node
+ :param index: index of the current consumer inside self._to_consume
+ :type index: int
+ :return: True if there is a node with the same name in the to_consume dict of an upper scope
+ and if that scope is a function
+ :rtype: bool
+ """
+ for _consumer in self._to_consume[index - 1 :: -1]:
+ if _consumer.scope_type == "function" and node.name in _consumer.to_consume:
+ return True
+ return False
+
+ def _store_type_annotation_node(self, type_annotation):
+ """Given a type annotation, store all the name nodes it refers to"""
+ if isinstance(type_annotation, astroid.Name):
+ self._type_annotation_names.append(type_annotation.name)
+ return
+
+ if not isinstance(type_annotation, astroid.Subscript):
+ return
+
+ if (
+ isinstance(type_annotation.value, astroid.Attribute)
+ and isinstance(type_annotation.value.expr, astroid.Name)
+ and type_annotation.value.expr.name == TYPING_MODULE
+ ):
+ self._type_annotation_names.append(TYPING_MODULE)
+ return
+
+ self._type_annotation_names.extend(
+ annotation.name
+ for annotation in type_annotation.nodes_of_class(astroid.Name)
+ )
+
+ def _store_type_annotation_names(self, node):
+ type_annotation = node.type_annotation
+ if not type_annotation:
+ return
+ self._store_type_annotation_node(node.type_annotation)
+
+ def _check_self_cls_assign(self, node):
+ """Check that self/cls don't get assigned"""
+ assign_names = {
+ target.name
+ for target in node.targets
+ if isinstance(target, astroid.AssignName)
+ }
+ scope = node.scope()
+ nonlocals_with_same_name = any(
+ child
+ for child in scope.body
+ if isinstance(child, astroid.Nonlocal) and assign_names & set(child.names)
+ )
+ if nonlocals_with_same_name:
+ scope = node.scope().parent.scope()
+
+ if not (
+ isinstance(scope, astroid.scoped_nodes.FunctionDef)
+ and scope.is_method()
+ and "builtins.staticmethod" not in scope.decoratornames()
+ ):
+ return
+ argument_names = scope.argnames()
+ if not argument_names:
+ return
+ self_cls_name = argument_names[0]
+ target_assign_names = (
+ target.name
+ for target in node.targets
+ if isinstance(target, astroid.node_classes.AssignName)
+ )
+ if self_cls_name in target_assign_names:
+ self.add_message("self-cls-assignment", node=node, args=(self_cls_name))
+
+ def _check_unpacking(self, inferred, node, targets):
+ """ Check for unbalanced tuple unpacking
+ and unpacking non sequences.
+ """
+ if utils.is_inside_abstract_class(node):
+ return
+ if utils.is_comprehension(node):
+ return
+ if inferred is astroid.Uninferable:
+ return
+ if (
+ isinstance(inferred.parent, astroid.Arguments)
+ and isinstance(node.value, astroid.Name)
+ and node.value.name == inferred.parent.vararg
+ ):
+ # Variable-length argument, we can't determine the length.
+ return
+ if isinstance(inferred, (astroid.Tuple, astroid.List)):
+ # attempt to check unpacking is properly balanced
+ values = inferred.itered()
+ if len(targets) != len(values):
+ # Check if we have starred nodes.
+ if any(isinstance(target, astroid.Starred) for target in targets):
+ return
+ self.add_message(
+ "unbalanced-tuple-unpacking",
+ node=node,
+ args=(
+ _get_unpacking_extra_info(node, inferred),
+ len(targets),
+ len(values),
+ ),
+ )
+ # attempt to check unpacking may be possible (ie RHS is iterable)
+ else:
+ if not utils.is_iterable(inferred):
+ self.add_message(
+ "unpacking-non-sequence",
+ node=node,
+ args=(_get_unpacking_extra_info(node, inferred),),
+ )
+
+ def _check_module_attrs(self, node, module, module_names):
+ """check that module_names (list of string) are accessible through the
+ given module
+ if the latest access name corresponds to a module, return it
+ """
+ assert isinstance(module, astroid.Module), module
+ while module_names:
+ name = module_names.pop(0)
+ if name == "__dict__":
+ module = None
+ break
+ try:
+ module = next(module.getattr(name)[0].infer())
+ if module is astroid.Uninferable:
+ return None
+ except astroid.NotFoundError:
+ if module.name in self._ignored_modules:
+ return None
+ self.add_message(
+ "no-name-in-module", args=(name, module.name), node=node
+ )
+ return None
+ except astroid.InferenceError:
+ return None
+ if module_names:
+ modname = module.name if module else "__dict__"
+ self.add_message(
+ "no-name-in-module", node=node, args=(".".join(module_names), modname)
+ )
+ return None
+ if isinstance(module, astroid.Module):
+ return module
+ return None
+
+ def _check_all(self, node, not_consumed):
+ assigned = next(node.igetattr("__all__"))
+ if assigned is astroid.Uninferable:
+ return
+
+ for elt in getattr(assigned, "elts", ()):
+ try:
+ elt_name = next(elt.infer())
+ except astroid.InferenceError:
+ continue
+ if elt_name is astroid.Uninferable:
+ continue
+ if not elt_name.parent:
+ continue
+
+ if not isinstance(elt_name, astroid.Const) or not isinstance(
+ elt_name.value, str
+ ):
+ self.add_message("invalid-all-object", args=elt.as_string(), node=elt)
+ continue
+
+ elt_name = elt_name.value
+ # If elt is in not_consumed, remove it from not_consumed
+ if elt_name in not_consumed:
+ del not_consumed[elt_name]
+ continue
+
+ if elt_name not in node.locals:
+ if not node.package:
+ self.add_message(
+ "undefined-all-variable", args=(elt_name,), node=elt
+ )
+ else:
+ basename = os.path.splitext(node.file)[0]
+ if os.path.basename(basename) == "__init__":
+ name = node.name + "." + elt_name
+ try:
+ modutils.file_from_modpath(name.split("."))
+ except ImportError:
+ self.add_message(
+ "undefined-all-variable", args=(elt_name,), node=elt
+ )
+ except SyntaxError:
+ # don't yield a syntax-error warning,
+ # because it will be later yielded
+ # when the file will be checked
+ pass
+
+ def _check_globals(self, not_consumed):
+ if self._allow_global_unused_variables:
+ return
+ for name, nodes in not_consumed.items():
+ for node in nodes:
+ self.add_message("unused-variable", args=(name,), node=node)
+
+ def _check_imports(self, not_consumed):
+ local_names = _fix_dot_imports(not_consumed)
+ checked = set()
+ for name, stmt in local_names:
+ for imports in stmt.names:
+ real_name = imported_name = imports[0]
+ if imported_name == "*":
+ real_name = name
+ as_name = imports[1]
+ if real_name in checked:
+ continue
+ if name not in (real_name, as_name):
+ continue
+ checked.add(real_name)
+
+ if isinstance(stmt, astroid.Import) or (
+ isinstance(stmt, astroid.ImportFrom) and not stmt.modname
+ ):
+ if isinstance(stmt, astroid.ImportFrom) and SPECIAL_OBJ.search(
+ imported_name
+ ):
+ # Filter special objects (__doc__, __all__) etc.,
+ # because they can be imported for exporting.
+ continue
+
+ if imported_name in self._type_annotation_names:
+ # Most likely a typing import if it wasn't used so far.
+ continue
+
+ if as_name == "_":
+ continue
+ if as_name is None:
+ msg = "import %s" % imported_name
+ else:
+ msg = "%s imported as %s" % (imported_name, as_name)
+ if not _is_type_checking_import(stmt):
+ self.add_message("unused-import", args=msg, node=stmt)
+ elif isinstance(stmt, astroid.ImportFrom) and stmt.modname != FUTURE:
+ if SPECIAL_OBJ.search(imported_name):
+ # Filter special objects (__doc__, __all__) etc.,
+ # because they can be imported for exporting.
+ continue
+
+ if _is_from_future_import(stmt, name):
+ # Check if the name is in fact loaded from a
+ # __future__ import in another module.
+ continue
+
+ if imported_name in self._type_annotation_names:
+ # Most likely a typing import if it wasn't used so far.
+ continue
+
+ if imported_name == "*":
+ self.add_message("unused-wildcard-import", args=name, node=stmt)
+ else:
+ if as_name is None:
+ msg = "%s imported from %s" % (imported_name, stmt.modname)
+ else:
+ fields = (imported_name, stmt.modname, as_name)
+ msg = "%s imported from %s as %s" % fields
+ if not _is_type_checking_import(stmt):
+ self.add_message("unused-import", args=msg, node=stmt)
+ del self._to_consume
+
+ def _check_metaclasses(self, node):
+ """ Update consumption analysis for metaclasses. """
+ consumed = [] # [(scope_locals, consumed_key)]
+
+ for child_node in node.get_children():
+ if isinstance(child_node, astroid.ClassDef):
+ consumed.extend(self._check_classdef_metaclasses(child_node, node))
+
+ # Pop the consumed items, in order to avoid having
+ # unused-import and unused-variable false positives
+ for scope_locals, name in consumed:
+ scope_locals.pop(name, None)
+
+ def _check_classdef_metaclasses(self, klass, parent_node):
+ if not klass._metaclass:
+ # Skip if this class doesn't use explicitly a metaclass, but inherits it from ancestors
+ return []
+
+ consumed = [] # [(scope_locals, consumed_key)]
+ metaclass = klass.metaclass()
+
+ name = None
+ if isinstance(klass._metaclass, astroid.Name):
+ name = klass._metaclass.name
+ elif metaclass:
+ name = metaclass.root().name
+
+ found = None
+ name = METACLASS_NAME_TRANSFORMS.get(name, name)
+ if name:
+ # check enclosing scopes starting from most local
+ for scope_locals, _, _ in self._to_consume[::-1]:
+ found = scope_locals.get(name)
+ if found:
+ consumed.append((scope_locals, name))
+ break
+
+ if found is None and not metaclass:
+ name = None
+ if isinstance(klass._metaclass, astroid.Name):
+ name = klass._metaclass.name
+ elif isinstance(klass._metaclass, astroid.Attribute):
+ name = klass._metaclass.as_string()
+
+ if name is not None:
+ if not (
+ name in astroid.Module.scope_attrs
+ or utils.is_builtin(name)
+ or name in self.config.additional_builtins
+ or name in parent_node.locals
+ ):
+ self.add_message("undefined-variable", node=klass, args=(name,))
+
+ return consumed
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(VariablesChecker(linter))