summaryrefslogtreecommitdiff
path: root/venv/Lib/site-packages/pylint
diff options
context:
space:
mode:
Diffstat (limited to 'venv/Lib/site-packages/pylint')
-rw-r--r--venv/Lib/site-packages/pylint/__init__.py43
-rw-r--r--venv/Lib/site-packages/pylint/__main__.py7
-rw-r--r--venv/Lib/site-packages/pylint/__pkginfo__.py85
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/__init__.cpython-37.pycbin0 -> 1084 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/__main__.cpython-37.pycbin0 -> 210 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/__pkginfo__.cpython-37.pycbin0 -> 2632 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/config.cpython-37.pycbin0 -> 25815 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/constants.cpython-37.pycbin0 -> 1018 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/epylint.cpython-37.pycbin0 -> 4958 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/exceptions.cpython-37.pycbin0 -> 1361 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/graph.cpython-37.pycbin0 -> 5211 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/interfaces.cpython-37.pycbin0 -> 3665 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/lint.cpython-37.pycbin0 -> 45362 bytes
-rw-r--r--venv/Lib/site-packages/pylint/__pycache__/testutils.cpython-37.pycbin0 -> 9521 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__init__.py64
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pycbin0 -> 1580 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pycbin0 -> 2722 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pycbin0 -> 61785 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pycbin0 -> 6481 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pycbin0 -> 44537 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pycbin0 -> 11667 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pycbin0 -> 15668 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pycbin0 -> 31580 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pycbin0 -> 25427 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pycbin0 -> 10919 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pycbin0 -> 4597 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pycbin0 -> 2422 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pycbin0 -> 34941 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pycbin0 -> 3254 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pycbin0 -> 45321 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pycbin0 -> 12304 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pycbin0 -> 9755 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pycbin0 -> 12738 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pycbin0 -> 17427 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pycbin0 -> 40274 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pycbin0 -> 31460 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pycbin0 -> 44587 bytes
-rw-r--r--venv/Lib/site-packages/pylint/checkers/async.py89
-rw-r--r--venv/Lib/site-packages/pylint/checkers/base.py2333
-rw-r--r--venv/Lib/site-packages/pylint/checkers/base_checker.py187
-rw-r--r--venv/Lib/site-packages/pylint/checkers/classes.py1844
-rw-r--r--venv/Lib/site-packages/pylint/checkers/design_analysis.py496
-rw-r--r--venv/Lib/site-packages/pylint/checkers/exceptions.py546
-rw-r--r--venv/Lib/site-packages/pylint/checkers/format.py1332
-rw-r--r--venv/Lib/site-packages/pylint/checkers/imports.py981
-rw-r--r--venv/Lib/site-packages/pylint/checkers/logging.py384
-rw-r--r--venv/Lib/site-packages/pylint/checkers/misc.py171
-rw-r--r--venv/Lib/site-packages/pylint/checkers/newstyle.py127
-rw-r--r--venv/Lib/site-packages/pylint/checkers/python3.py1398
-rw-r--r--venv/Lib/site-packages/pylint/checkers/raw_metrics.py119
-rw-r--r--venv/Lib/site-packages/pylint/checkers/refactoring.py1510
-rw-r--r--venv/Lib/site-packages/pylint/checkers/similar.py452
-rw-r--r--venv/Lib/site-packages/pylint/checkers/spelling.py411
-rw-r--r--venv/Lib/site-packages/pylint/checkers/stdlib.py452
-rw-r--r--venv/Lib/site-packages/pylint/checkers/strings.py755
-rw-r--r--venv/Lib/site-packages/pylint/checkers/typecheck.py1770
-rw-r--r--venv/Lib/site-packages/pylint/checkers/utils.py1253
-rw-r--r--venv/Lib/site-packages/pylint/checkers/variables.py1987
-rw-r--r--venv/Lib/site-packages/pylint/config.py913
-rw-r--r--venv/Lib/site-packages/pylint/constants.py43
-rw-r--r--venv/Lib/site-packages/pylint/epylint.py197
-rw-r--r--venv/Lib/site-packages/pylint/exceptions.py29
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__init__.py0
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/__init__.cpython-37.pycbin0 -> 181 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/_check_docs_utils.cpython-37.pycbin0 -> 18819 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/bad_builtin.cpython-37.pycbin0 -> 1967 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/broad_try_clause.cpython-37.pycbin0 -> 1702 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/check_docs.cpython-37.pycbin0 -> 685 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/check_elif.cpython-37.pycbin0 -> 2647 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/comparetozero.cpython-37.pycbin0 -> 1959 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/docparams.cpython-37.pycbin0 -> 14746 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/docstyle.cpython-37.pycbin0 -> 2503 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/emptystring.cpython-37.pycbin0 -> 2035 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/mccabe.cpython-37.pycbin0 -> 5579 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/overlapping_exceptions.cpython-37.pycbin0 -> 2604 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/__pycache__/redefined_variable_type.cpython-37.pycbin0 -> 3248 bytes
-rw-r--r--venv/Lib/site-packages/pylint/extensions/_check_docs_utils.py792
-rw-r--r--venv/Lib/site-packages/pylint/extensions/bad_builtin.py69
-rw-r--r--venv/Lib/site-packages/pylint/extensions/broad_try_clause.py59
-rw-r--r--venv/Lib/site-packages/pylint/extensions/check_docs.py23
-rw-r--r--venv/Lib/site-packages/pylint/extensions/check_elif.py77
-rw-r--r--venv/Lib/site-packages/pylint/extensions/comparetozero.py74
-rw-r--r--venv/Lib/site-packages/pylint/extensions/docparams.py536
-rw-r--r--venv/Lib/site-packages/pylint/extensions/docstyle.py89
-rw-r--r--venv/Lib/site-packages/pylint/extensions/emptystring.py74
-rw-r--r--venv/Lib/site-packages/pylint/extensions/mccabe.py196
-rw-r--r--venv/Lib/site-packages/pylint/extensions/overlapping_exceptions.py88
-rw-r--r--venv/Lib/site-packages/pylint/extensions/redefined_variable_type.py116
-rw-r--r--venv/Lib/site-packages/pylint/graph.py197
-rw-r--r--venv/Lib/site-packages/pylint/interfaces.py102
-rw-r--r--venv/Lib/site-packages/pylint/lint.py1817
-rw-r--r--venv/Lib/site-packages/pylint/message/__init__.py54
-rw-r--r--venv/Lib/site-packages/pylint/message/__pycache__/__init__.cpython-37.pycbin0 -> 664 bytes
-rw-r--r--venv/Lib/site-packages/pylint/message/__pycache__/message.cpython-37.pycbin0 -> 1225 bytes
-rw-r--r--venv/Lib/site-packages/pylint/message/__pycache__/message_definition.cpython-37.pycbin0 -> 2982 bytes
-rw-r--r--venv/Lib/site-packages/pylint/message/__pycache__/message_definition_store.cpython-37.pycbin0 -> 4075 bytes
-rw-r--r--venv/Lib/site-packages/pylint/message/__pycache__/message_handler_mix_in.cpython-37.pycbin0 -> 11049 bytes
-rw-r--r--venv/Lib/site-packages/pylint/message/__pycache__/message_id_store.cpython-37.pycbin0 -> 4925 bytes
-rw-r--r--venv/Lib/site-packages/pylint/message/message.py53
-rw-r--r--venv/Lib/site-packages/pylint/message/message_definition.py84
-rw-r--r--venv/Lib/site-packages/pylint/message/message_definition_store.py90
-rw-r--r--venv/Lib/site-packages/pylint/message/message_handler_mix_in.py393
-rw-r--r--venv/Lib/site-packages/pylint/message/message_id_store.py128
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__init__.py8
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__pycache__/__init__.cpython-37.pycbin0 -> 241 bytes
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__pycache__/diadefslib.cpython-37.pycbin0 -> 7621 bytes
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__pycache__/diagrams.cpython-37.pycbin0 -> 8716 bytes
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__pycache__/inspector.cpython-37.pycbin0 -> 10187 bytes
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__pycache__/main.cpython-37.pycbin0 -> 4522 bytes
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__pycache__/utils.cpython-37.pycbin0 -> 5787 bytes
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__pycache__/vcgutils.cpython-37.pycbin0 -> 4697 bytes
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/__pycache__/writer.cpython-37.pycbin0 -> 7286 bytes
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/diadefslib.py238
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/diagrams.py268
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/inspector.py357
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/main.py214
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/utils.py220
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/vcgutils.py229
-rw-r--r--venv/Lib/site-packages/pylint/pyreverse/writer.py213
-rw-r--r--venv/Lib/site-packages/pylint/reporters/__init__.py34
-rw-r--r--venv/Lib/site-packages/pylint/reporters/__pycache__/__init__.cpython-37.pycbin0 -> 823 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/__pycache__/base_reporter.cpython-37.pycbin0 -> 2767 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/__pycache__/collecting_reporter.cpython-37.pycbin0 -> 817 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/__pycache__/json_reporter.cpython-37.pycbin0 -> 2003 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/__pycache__/reports_handler_mix_in.cpython-37.pycbin0 -> 3028 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/__pycache__/text.cpython-37.pycbin0 -> 7263 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/base_reporter.py66
-rw-r--r--venv/Lib/site-packages/pylint/reporters/collecting_reporter.py21
-rw-r--r--venv/Lib/site-packages/pylint/reporters/json_reporter.py58
-rw-r--r--venv/Lib/site-packages/pylint/reporters/reports_handler_mix_in.py79
-rw-r--r--venv/Lib/site-packages/pylint/reporters/text.py247
-rw-r--r--venv/Lib/site-packages/pylint/reporters/ureports/__init__.py96
-rw-r--r--venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/__init__.cpython-37.pycbin0 -> 3065 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/nodes.cpython-37.pycbin0 -> 6062 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/text_writer.cpython-37.pycbin0 -> 3673 bytes
-rw-r--r--venv/Lib/site-packages/pylint/reporters/ureports/nodes.py188
-rw-r--r--venv/Lib/site-packages/pylint/reporters/ureports/text_writer.py94
-rw-r--r--venv/Lib/site-packages/pylint/testutils.py298
-rw-r--r--venv/Lib/site-packages/pylint/utils/__init__.py64
-rw-r--r--venv/Lib/site-packages/pylint/utils/__pycache__/__init__.cpython-37.pycbin0 -> 869 bytes
-rw-r--r--venv/Lib/site-packages/pylint/utils/__pycache__/ast_walker.cpython-37.pycbin0 -> 2078 bytes
-rw-r--r--venv/Lib/site-packages/pylint/utils/__pycache__/file_state.cpython-37.pycbin0 -> 3852 bytes
-rw-r--r--venv/Lib/site-packages/pylint/utils/__pycache__/utils.cpython-37.pycbin0 -> 10339 bytes
-rw-r--r--venv/Lib/site-packages/pylint/utils/ast_walker.py79
-rw-r--r--venv/Lib/site-packages/pylint/utils/file_state.py138
-rw-r--r--venv/Lib/site-packages/pylint/utils/utils.py371
146 files changed, 28669 insertions, 0 deletions
diff --git a/venv/Lib/site-packages/pylint/__init__.py b/venv/Lib/site-packages/pylint/__init__.py
new file mode 100644
index 0000000..8980938
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__init__.py
@@ -0,0 +1,43 @@
+# Copyright (c) 2008, 2012 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2014, 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import sys
+
+from pylint.__pkginfo__ import version as __version__
+from pylint.checkers.similar import Run as SimilarRun
+from pylint.epylint import Run as EpylintRun
+from pylint.lint import Run as PylintRun
+from pylint.pyreverse.main import Run as PyreverseRun
+
+
+def run_pylint():
+ """run pylint"""
+
+ try:
+ PylintRun(sys.argv[1:])
+ except KeyboardInterrupt:
+ sys.exit(1)
+
+
+def run_epylint():
+ """run pylint"""
+
+ EpylintRun()
+
+
+def run_pyreverse():
+ """run pyreverse"""
+
+ PyreverseRun(sys.argv[1:])
+
+
+def run_symilar():
+ """run symilar"""
+
+ SimilarRun(sys.argv[1:])
diff --git a/venv/Lib/site-packages/pylint/__main__.py b/venv/Lib/site-packages/pylint/__main__.py
new file mode 100644
index 0000000..e12309b
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__main__.py
@@ -0,0 +1,7 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+#!/usr/bin/env python
+import pylint
+
+pylint.run_pylint()
diff --git a/venv/Lib/site-packages/pylint/__pkginfo__.py b/venv/Lib/site-packages/pylint/__pkginfo__.py
new file mode 100644
index 0000000..68702f4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pkginfo__.py
@@ -0,0 +1,85 @@
+# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2010 Julien Jehannet <julien.jehannet@logilab.fr>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Ricardo Gemignani <ricardo.gemignani@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Florian Bruhin <git@the-compiler.org>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2017-2018 Hugo <hugovk@users.noreply.github.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+# pylint: disable=redefined-builtin,invalid-name
+"""pylint packaging information"""
+
+from os.path import join
+
+# For an official release, use dev_version = None
+numversion = (2, 4, 4)
+dev_version = None
+
+version = ".".join(str(num) for num in numversion)
+if dev_version is not None:
+ version += "-dev" + str(dev_version)
+
+install_requires = ["astroid>=2.3.0,<2.4", "isort>=4.2.5,<5", "mccabe>=0.6,<0.7"]
+
+dependency_links = [] # type: ignore
+
+extras_require = {}
+extras_require[':sys_platform=="win32"'] = ["colorama"]
+
+license = "GPL"
+description = "python code static checker"
+web = "https://github.com/PyCQA/pylint"
+mailinglist = "mailto:code-quality@python.org"
+author = "Python Code Quality Authority"
+author_email = "code-quality@python.org"
+
+classifiers = [
+ "Development Status :: 6 - Mature",
+ "Environment :: Console",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: GNU General Public License (GPL)",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+ "Topic :: Software Development :: Debuggers",
+ "Topic :: Software Development :: Quality Assurance",
+ "Topic :: Software Development :: Testing",
+]
+
+
+long_desc = """\
+ Pylint is a Python source code analyzer which looks for programming
+ errors, helps enforcing a coding standard and sniffs for some code
+ smells (as defined in Martin Fowler's Refactoring book)
+ .
+ Pylint can be seen as another PyChecker since nearly all tests you
+ can do with PyChecker can also be done with Pylint. However, Pylint
+ offers some more features, like checking length of lines of code,
+ checking if variable names are well-formed according to your coding
+ standard, or checking if declared interfaces are truly implemented,
+ and much more.
+ .
+ Additionally, it is possible to write plugins to add your own checks.
+ .
+ Pylint is shipped with "pyreverse" (UML diagram generator)
+ and "symilar" (an independent similarities checker)."""
+
+scripts = [
+ join("bin", filename) for filename in ("pylint", "symilar", "epylint", "pyreverse")
+]
diff --git a/venv/Lib/site-packages/pylint/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..4a5176d
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/__main__.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/__main__.cpython-37.pyc
new file mode 100644
index 0000000..06de374
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/__main__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/__pkginfo__.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/__pkginfo__.cpython-37.pyc
new file mode 100644
index 0000000..41da823
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/__pkginfo__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/config.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/config.cpython-37.pyc
new file mode 100644
index 0000000..0d3fde9
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/config.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/constants.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/constants.cpython-37.pyc
new file mode 100644
index 0000000..1d96028
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/constants.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/epylint.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/epylint.cpython-37.pyc
new file mode 100644
index 0000000..1b8630e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/epylint.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/exceptions.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 0000000..9766c27
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/graph.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/graph.cpython-37.pyc
new file mode 100644
index 0000000..3a0dd39
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/graph.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/interfaces.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/interfaces.cpython-37.pyc
new file mode 100644
index 0000000..53b4224
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/interfaces.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/lint.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/lint.cpython-37.pyc
new file mode 100644
index 0000000..ed84248
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/lint.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/__pycache__/testutils.cpython-37.pyc b/venv/Lib/site-packages/pylint/__pycache__/testutils.cpython-37.pyc
new file mode 100644
index 0000000..8db991c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/__pycache__/testutils.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__init__.py b/venv/Lib/site-packages/pylint/checkers/__init__.py
new file mode 100644
index 0000000..9c6306f
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__init__.py
@@ -0,0 +1,64 @@
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
+# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""utilities methods and classes for checkers
+
+Base id of standard checkers (used in msg and report ids):
+01: base
+02: classes
+03: format
+04: import
+05: misc
+06: variables
+07: exceptions
+08: similar
+09: design_analysis
+10: newstyle
+11: typecheck
+12: logging
+13: string_format
+14: string_constant
+15: stdlib
+16: python3
+17: refactoring
+18-50: not yet used: reserved for future internal checkers.
+51-99: perhaps used: reserved for external checkers
+
+The raw_metrics checker has no number associated since it doesn't emit any
+messages nor reports. XXX not true, emit a 07 report !
+
+"""
+
+from pylint.checkers.base_checker import BaseChecker, BaseTokenChecker
+from pylint.utils import register_plugins
+
+
+def table_lines_from_stats(stats, _, columns):
+ """get values listed in <columns> from <stats> and <old_stats>,
+ and return a formated list of values, designed to be given to a
+ ureport.Table object
+ """
+ lines = []
+ for m_type in columns:
+ new = stats[m_type]
+ new = "%.3f" % new if isinstance(new, float) else str(new)
+ lines += (m_type.replace("_", " "), new, "NC", "NC")
+ return lines
+
+
+def initialize(linter):
+ """initialize linter with checkers in this package """
+ register_plugins(linter, __path__[0])
+
+
+__all__ = ("BaseChecker", "BaseTokenChecker", "initialize")
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..3782086
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pyc
new file mode 100644
index 0000000..ea14658
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pyc
new file mode 100644
index 0000000..aaa3e51
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pyc
new file mode 100644
index 0000000..e4f8221
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pyc
new file mode 100644
index 0000000..d0f58b4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pyc
new file mode 100644
index 0000000..647b5aa
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 0000000..5371c29
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pyc
new file mode 100644
index 0000000..8a6a0c0
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pyc
new file mode 100644
index 0000000..f8b924d
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pyc
new file mode 100644
index 0000000..90cc06e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pyc
new file mode 100644
index 0000000..9f449d4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pyc
new file mode 100644
index 0000000..e409591
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pyc
new file mode 100644
index 0000000..b405dd3
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pyc
new file mode 100644
index 0000000..fdf16f6
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pyc
new file mode 100644
index 0000000..f65c6b5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pyc
new file mode 100644
index 0000000..09b77e5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pyc
new file mode 100644
index 0000000..dbf748c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pyc
new file mode 100644
index 0000000..97576df
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pyc
new file mode 100644
index 0000000..0aab77c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pyc
new file mode 100644
index 0000000..cc0c9b4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 0000000..90e8ff1
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pyc b/venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pyc
new file mode 100644
index 0000000..943ffbd
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/checkers/async.py b/venv/Lib/site-packages/pylint/checkers/async.py
new file mode 100644
index 0000000..c33071e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/async.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checker for anything related to the async protocol (PEP 492)."""
+
+import sys
+
+import astroid
+from astroid import bases, exceptions
+
+from pylint import checkers, interfaces, utils
+from pylint.checkers import utils as checker_utils
+from pylint.checkers.utils import decorated_with
+
+
+class AsyncChecker(checkers.BaseChecker):
+ __implements__ = interfaces.IAstroidChecker
+ name = "async"
+ msgs = {
+ "E1700": (
+ "Yield inside async function",
+ "yield-inside-async-function",
+ "Used when an `yield` or `yield from` statement is "
+ "found inside an async function.",
+ {"minversion": (3, 5)},
+ ),
+ "E1701": (
+ "Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
+ "not-async-context-manager",
+ "Used when an async context manager is used with an object "
+ "that does not implement the async context management protocol.",
+ {"minversion": (3, 5)},
+ ),
+ }
+
+ def open(self):
+ self._ignore_mixin_members = utils.get_global_option(
+ self, "ignore-mixin-members"
+ )
+ self._async_generators = ["contextlib.asynccontextmanager"]
+
+ @checker_utils.check_messages("yield-inside-async-function")
+ def visit_asyncfunctiondef(self, node):
+ for child in node.nodes_of_class(astroid.Yield):
+ if child.scope() is node and (
+ sys.version_info[:2] == (3, 5) or isinstance(child, astroid.YieldFrom)
+ ):
+ self.add_message("yield-inside-async-function", node=child)
+
+ @checker_utils.check_messages("not-async-context-manager")
+ def visit_asyncwith(self, node):
+ for ctx_mgr, _ in node.items:
+ inferred = checker_utils.safe_infer(ctx_mgr)
+ if inferred is None or inferred is astroid.Uninferable:
+ continue
+
+ if isinstance(inferred, bases.AsyncGenerator):
+ # Check if we are dealing with a function decorated
+ # with contextlib.asynccontextmanager.
+ if decorated_with(inferred.parent, self._async_generators):
+ continue
+ else:
+ try:
+ inferred.getattr("__aenter__")
+ inferred.getattr("__aexit__")
+ except exceptions.NotFoundError:
+ if isinstance(inferred, astroid.Instance):
+ # If we do not know the bases of this class,
+ # just skip it.
+ if not checker_utils.has_known_bases(inferred):
+ continue
+ # Just ignore mixin classes.
+ if self._ignore_mixin_members:
+ if inferred.name[-5:].lower() == "mixin":
+ continue
+ else:
+ continue
+
+ self.add_message(
+ "not-async-context-manager", node=node, args=(inferred.name,)
+ )
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(AsyncChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/base.py b/venv/Lib/site-packages/pylint/checkers/base.py
new file mode 100644
index 0000000..c94676e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/base.py
@@ -0,0 +1,2333 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2016 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Nick Bastin <nick.bastin@gmail.com>
+# Copyright (c) 2015 Michael Kefeder <oss@multiwave.ch>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Stephane Wirtel <stephane@wirtel.be>
+# Copyright (c) 2015 Cosmin Poieana <cmin@ropython.org>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Radu Ciorba <radu@devrandom.ro>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Elias Dorneles <eliasdorneles@gmail.com>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016 Yannack <yannack@users.noreply.github.com>
+# Copyright (c) 2016 Alex Jurkiewicz <alex@jurkiewi.cz>
+# Copyright (c) 2017 Jacques Kvam <jwkvam@gmail.com>
+# Copyright (c) 2017 ttenhoeve-aa <ttenhoeve@appannie.com>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+# Copyright (c) 2018 Steven M. Vascellaro <svascellaro@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Chris Lamb <chris@chris-lamb.co.uk>
+# Copyright (c) 2018 glmdgrielson <32415403+glmdgrielson@users.noreply.github.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""basic checker for Python code"""
+
+import builtins
+import collections
+import itertools
+import re
+import sys
+from typing import Pattern
+
+import astroid
+import astroid.bases
+import astroid.scoped_nodes
+from astroid.arguments import CallSite
+
+import pylint.utils as lint_utils
+from pylint import checkers, exceptions, interfaces
+from pylint.checkers import utils
+from pylint.checkers.utils import is_property_setter_or_deleter
+from pylint.reporters.ureports import nodes as reporter_nodes
+
+
+class NamingStyle:
+ # It may seem counterintuitive that single naming style
+ # has multiple "accepted" forms of regular expressions,
+ # but we need to special-case stuff like dunder names
+ # in method names.
+ CLASS_NAME_RGX = None # type: Pattern[str]
+ MOD_NAME_RGX = None # type: Pattern[str]
+ CONST_NAME_RGX = None # type: Pattern[str]
+ COMP_VAR_RGX = None # type: Pattern[str]
+ DEFAULT_NAME_RGX = None # type: Pattern[str]
+ CLASS_ATTRIBUTE_RGX = None # type: Pattern[str]
+
+ @classmethod
+ def get_regex(cls, name_type):
+ return {
+ "module": cls.MOD_NAME_RGX,
+ "const": cls.CONST_NAME_RGX,
+ "class": cls.CLASS_NAME_RGX,
+ "function": cls.DEFAULT_NAME_RGX,
+ "method": cls.DEFAULT_NAME_RGX,
+ "attr": cls.DEFAULT_NAME_RGX,
+ "argument": cls.DEFAULT_NAME_RGX,
+ "variable": cls.DEFAULT_NAME_RGX,
+ "class_attribute": cls.CLASS_ATTRIBUTE_RGX,
+ "inlinevar": cls.COMP_VAR_RGX,
+ }[name_type]
+
+
+class SnakeCaseStyle(NamingStyle):
+ """Regex rules for snake_case naming style."""
+
+ CLASS_NAME_RGX = re.compile("[a-z_][a-z0-9_]+$")
+ MOD_NAME_RGX = re.compile("([a-z_][a-z0-9_]*)$")
+ CONST_NAME_RGX = re.compile("(([a-z_][a-z0-9_]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[a-z_][a-z0-9_]*$")
+ DEFAULT_NAME_RGX = re.compile(
+ "(([a-z_][a-z0-9_]{2,})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$"
+ )
+ CLASS_ATTRIBUTE_RGX = re.compile(r"(([a-z_][a-z0-9_]{2,}|(__.*__)))$")
+
+
+class CamelCaseStyle(NamingStyle):
+ """Regex rules for camelCase naming style."""
+
+ CLASS_NAME_RGX = re.compile("[a-z_][a-zA-Z0-9]+$")
+ MOD_NAME_RGX = re.compile("([a-z_][a-zA-Z0-9]*)$")
+ CONST_NAME_RGX = re.compile("(([a-z_][A-Za-z0-9]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[a-z_][A-Za-z0-9]*$")
+ DEFAULT_NAME_RGX = re.compile("(([a-z_][a-zA-Z0-9]{2,})|(__[a-z][a-zA-Z0-9_]+__))$")
+ CLASS_ATTRIBUTE_RGX = re.compile(r"([a-z_][A-Za-z0-9]{2,}|(__.*__))$")
+
+
+class PascalCaseStyle(NamingStyle):
+ """Regex rules for PascalCase naming style."""
+
+ CLASS_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ MOD_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ CONST_NAME_RGX = re.compile("(([A-Z_][A-Za-z0-9]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[A-Z_][a-zA-Z0-9]+$")
+ DEFAULT_NAME_RGX = re.compile("[A-Z_][a-zA-Z0-9]{2,}$|(__[a-z][a-zA-Z0-9_]+__)$")
+ CLASS_ATTRIBUTE_RGX = re.compile("[A-Z_][a-zA-Z0-9]{2,}$")
+
+
+class UpperCaseStyle(NamingStyle):
+ """Regex rules for UPPER_CASE naming style."""
+
+ CLASS_NAME_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ MOD_NAME_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ CONST_NAME_RGX = re.compile("(([A-Z_][A-Z0-9_]*)|(__.*__))$")
+ COMP_VAR_RGX = re.compile("[A-Z_][A-Z0-9_]+$")
+ DEFAULT_NAME_RGX = re.compile("([A-Z_][A-Z0-9_]{2,})|(__[a-z][a-zA-Z0-9_]+__)$")
+ CLASS_ATTRIBUTE_RGX = re.compile("[A-Z_][A-Z0-9_]{2,}$")
+
+
+class AnyStyle(NamingStyle):
+ @classmethod
+ def get_regex(cls, name_type):
+ return re.compile(".*")
+
+
+NAMING_STYLES = {
+ "snake_case": SnakeCaseStyle,
+ "camelCase": CamelCaseStyle,
+ "PascalCase": PascalCaseStyle,
+ "UPPER_CASE": UpperCaseStyle,
+ "any": AnyStyle,
+}
+
+# do not require a doc string on private/system methods
+NO_REQUIRED_DOC_RGX = re.compile("^_")
+REVERSED_PROTOCOL_METHOD = "__reversed__"
+SEQUENCE_PROTOCOL_METHODS = ("__getitem__", "__len__")
+REVERSED_METHODS = (SEQUENCE_PROTOCOL_METHODS, (REVERSED_PROTOCOL_METHOD,))
+TYPECHECK_COMPARISON_OPERATORS = frozenset(("is", "is not", "==", "!=", "in", "not in"))
+LITERAL_NODE_TYPES = (astroid.Const, astroid.Dict, astroid.List, astroid.Set)
+UNITTEST_CASE = "unittest.case"
+BUILTINS = builtins.__name__
+TYPE_QNAME = "%s.type" % BUILTINS
+ABC_METACLASSES = {"_py_abc.ABCMeta", "abc.ABCMeta"} # Python 3.7+,
+
+# Name categories that are always consistent with all naming conventions.
+EXEMPT_NAME_CATEGORIES = {"exempt", "ignore"}
+
+# A mapping from builtin-qname -> symbol, to be used when generating messages
+# about dangerous default values as arguments
+DEFAULT_ARGUMENT_SYMBOLS = dict(
+ zip(
+ [".".join([BUILTINS, x]) for x in ("set", "dict", "list")],
+ ["set()", "{}", "[]"],
+ )
+)
+REVERSED_COMPS = {"<": ">", "<=": ">=", ">": "<", ">=": "<="}
+COMPARISON_OPERATORS = frozenset(("==", "!=", "<", ">", "<=", ">="))
+# List of methods which can be redefined
+REDEFINABLE_METHODS = frozenset(("__module__",))
+TYPING_FORWARD_REF_QNAME = "typing.ForwardRef"
+
+
+def _redefines_import(node):
+ """ Detect that the given node (AssignName) is inside an
+ exception handler and redefines an import from the tryexcept body.
+ Returns True if the node redefines an import, False otherwise.
+ """
+ current = node
+ while current and not isinstance(current.parent, astroid.ExceptHandler):
+ current = current.parent
+ if not current or not utils.error_of_type(current.parent, ImportError):
+ return False
+ try_block = current.parent.parent
+ for import_node in try_block.nodes_of_class((astroid.ImportFrom, astroid.Import)):
+ for name, alias in import_node.names:
+ if alias:
+ if alias == node.name:
+ return True
+ elif name == node.name:
+ return True
+ return False
+
+
+def in_loop(node):
+ """return True if the node is inside a kind of for loop"""
+ parent = node.parent
+ while parent is not None:
+ if isinstance(
+ parent,
+ (
+ astroid.For,
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+ ),
+ ):
+ return True
+ parent = parent.parent
+ return False
+
+
+def in_nested_list(nested_list, obj):
+ """return true if the object is an element of <nested_list> or of a nested
+ list
+ """
+ for elmt in nested_list:
+ if isinstance(elmt, (list, tuple)):
+ if in_nested_list(elmt, obj):
+ return True
+ elif elmt == obj:
+ return True
+ return False
+
+
+def _get_break_loop_node(break_node):
+ """
+ Returns the loop node that holds the break node in arguments.
+
+ Args:
+ break_node (astroid.Break): the break node of interest.
+
+ Returns:
+ astroid.For or astroid.While: the loop node holding the break node.
+ """
+ loop_nodes = (astroid.For, astroid.While)
+ parent = break_node.parent
+ while not isinstance(parent, loop_nodes) or break_node in getattr(
+ parent, "orelse", []
+ ):
+ break_node = parent
+ parent = parent.parent
+ if parent is None:
+ break
+ return parent
+
+
+def _loop_exits_early(loop):
+ """
+ Returns true if a loop may ends up in a break statement.
+
+ Args:
+ loop (astroid.For, astroid.While): the loop node inspected.
+
+ Returns:
+ bool: True if the loop may ends up in a break statement, False otherwise.
+ """
+ loop_nodes = (astroid.For, astroid.While)
+ definition_nodes = (astroid.FunctionDef, astroid.ClassDef)
+ inner_loop_nodes = [
+ _node
+ for _node in loop.nodes_of_class(loop_nodes, skip_klass=definition_nodes)
+ if _node != loop
+ ]
+ return any(
+ _node
+ for _node in loop.nodes_of_class(astroid.Break, skip_klass=definition_nodes)
+ if _get_break_loop_node(_node) not in inner_loop_nodes
+ )
+
+
+def _is_multi_naming_match(match, node_type, confidence):
+ return (
+ match is not None
+ and match.lastgroup is not None
+ and match.lastgroup not in EXEMPT_NAME_CATEGORIES
+ and (node_type != "method" or confidence != interfaces.INFERENCE_FAILURE)
+ )
+
+
+BUILTIN_PROPERTY = "builtins.property"
+
+
+def _get_properties(config):
+ """Returns a tuple of property classes and names.
+
+ Property classes are fully qualified, such as 'abc.abstractproperty' and
+ property names are the actual names, such as 'abstract_property'.
+ """
+ property_classes = {BUILTIN_PROPERTY}
+ property_names = set() # Not returning 'property', it has its own check.
+ if config is not None:
+ property_classes.update(config.property_classes)
+ property_names.update(
+ (prop.rsplit(".", 1)[-1] for prop in config.property_classes)
+ )
+ return property_classes, property_names
+
+
+def _determine_function_name_type(node, config=None):
+ """Determine the name type whose regex the a function's name should match.
+
+ :param node: A function node.
+ :type node: astroid.node_classes.NodeNG
+ :param config: Configuration from which to pull additional property classes.
+ :type config: :class:`optparse.Values`
+
+ :returns: One of ('function', 'method', 'attr')
+ :rtype: str
+ """
+ property_classes, property_names = _get_properties(config)
+ if not node.is_method():
+ return "function"
+
+ if is_property_setter_or_deleter(node):
+ # If the function is decorated using the prop_method.{setter,getter}
+ # form, treat it like an attribute as well.
+ return "attr"
+
+ if node.decorators:
+ decorators = node.decorators.nodes
+ else:
+ decorators = []
+ for decorator in decorators:
+ # If the function is a property (decorated with @property
+ # or @abc.abstractproperty), the name type is 'attr'.
+ if isinstance(decorator, astroid.Name) or (
+ isinstance(decorator, astroid.Attribute)
+ and decorator.attrname in property_names
+ ):
+ inferred = utils.safe_infer(decorator)
+ if inferred and inferred.qname() in property_classes:
+ return "attr"
+ return "method"
+
+
+def _has_abstract_methods(node):
+ """
+ Determine if the given `node` has abstract methods.
+
+ The methods should be made abstract by decorating them
+ with `abc` decorators.
+ """
+ return len(utils.unimplemented_abstract_methods(node)) > 0
+
+
+def report_by_type_stats(sect, stats, _):
+ """make a report of
+
+ * percentage of different types documented
+ * percentage of different types with a bad name
+ """
+ # percentage of different types documented and/or with a bad name
+ nice_stats = {}
+ for node_type in ("module", "class", "method", "function"):
+ try:
+ total = stats[node_type]
+ except KeyError:
+ raise exceptions.EmptyReportError()
+ nice_stats[node_type] = {}
+ if total != 0:
+ try:
+ documented = total - stats["undocumented_" + node_type]
+ percent = (documented * 100.0) / total
+ nice_stats[node_type]["percent_documented"] = "%.2f" % percent
+ except KeyError:
+ nice_stats[node_type]["percent_documented"] = "NC"
+ try:
+ percent = (stats["badname_" + node_type] * 100.0) / total
+ nice_stats[node_type]["percent_badname"] = "%.2f" % percent
+ except KeyError:
+ nice_stats[node_type]["percent_badname"] = "NC"
+ lines = ("type", "number", "old number", "difference", "%documented", "%badname")
+ for node_type in ("module", "class", "method", "function"):
+ new = stats[node_type]
+ lines += (
+ node_type,
+ str(new),
+ "NC",
+ "NC",
+ nice_stats[node_type].get("percent_documented", "0"),
+ nice_stats[node_type].get("percent_badname", "0"),
+ )
+ sect.append(reporter_nodes.Table(children=lines, cols=6, rheaders=1))
+
+
+def redefined_by_decorator(node):
+ """return True if the object is a method redefined via decorator.
+
+ For example:
+ @property
+ def x(self): return self._x
+ @x.setter
+ def x(self, value): self._x = value
+ """
+ if node.decorators:
+ for decorator in node.decorators.nodes:
+ if (
+ isinstance(decorator, astroid.Attribute)
+ and getattr(decorator.expr, "name", None) == node.name
+ ):
+ return True
+ return False
+
+
+class _BasicChecker(checkers.BaseChecker):
+ __implements__ = interfaces.IAstroidChecker
+ name = "basic"
+
+
+class BasicErrorChecker(_BasicChecker):
+ msgs = {
+ "E0100": (
+ "__init__ method is a generator",
+ "init-is-generator",
+ "Used when the special class method __init__ is turned into a "
+ "generator by a yield in its body.",
+ ),
+ "E0101": (
+ "Explicit return in __init__",
+ "return-in-init",
+ "Used when the special class method __init__ has an explicit "
+ "return value.",
+ ),
+ "E0102": (
+ "%s already defined line %s",
+ "function-redefined",
+ "Used when a function / class / method is redefined.",
+ ),
+ "E0103": (
+ "%r not properly in loop",
+ "not-in-loop",
+ "Used when break or continue keywords are used outside a loop.",
+ ),
+ "E0104": (
+ "Return outside function",
+ "return-outside-function",
+ 'Used when a "return" statement is found outside a function or method.',
+ ),
+ "E0105": (
+ "Yield outside function",
+ "yield-outside-function",
+ 'Used when a "yield" statement is found outside a function or method.',
+ ),
+ "E0106": (
+ "Return with argument inside generator",
+ "return-arg-in-generator",
+ 'Used when a "return" statement with an argument is found '
+ "outside in a generator function or method (e.g. with some "
+ '"yield" statements).',
+ {"maxversion": (3, 3)},
+ ),
+ "E0107": (
+ "Use of the non-existent %s operator",
+ "nonexistent-operator",
+ "Used when you attempt to use the C-style pre-increment or "
+ "pre-decrement operator -- and ++, which doesn't exist in Python.",
+ ),
+ "E0108": (
+ "Duplicate argument name %s in function definition",
+ "duplicate-argument-name",
+ "Duplicate argument names in function definitions are syntax errors.",
+ ),
+ "E0110": (
+ "Abstract class %r with abstract methods instantiated",
+ "abstract-class-instantiated",
+ "Used when an abstract class with `abc.ABCMeta` as metaclass "
+ "has abstract methods and is instantiated.",
+ ),
+ "W0120": (
+ "Else clause on loop without a break statement",
+ "useless-else-on-loop",
+ "Loops should only have an else clause if they can exit early "
+ "with a break statement, otherwise the statements under else "
+ "should be on the same scope as the loop itself.",
+ ),
+ "E0112": (
+ "More than one starred expression in assignment",
+ "too-many-star-expressions",
+ "Emitted when there are more than one starred "
+ "expressions (`*x`) in an assignment. This is a SyntaxError.",
+ ),
+ "E0113": (
+ "Starred assignment target must be in a list or tuple",
+ "invalid-star-assignment-target",
+ "Emitted when a star expression is used as a starred assignment target.",
+ ),
+ "E0114": (
+ "Can use starred expression only in assignment target",
+ "star-needs-assignment-target",
+ "Emitted when a star expression is not used in an assignment target.",
+ ),
+ "E0115": (
+ "Name %r is nonlocal and global",
+ "nonlocal-and-global",
+ "Emitted when a name is both nonlocal and global.",
+ ),
+ "E0116": (
+ "'continue' not supported inside 'finally' clause",
+ "continue-in-finally",
+ "Emitted when the `continue` keyword is found "
+ "inside a finally clause, which is a SyntaxError.",
+ ),
+ "E0117": (
+ "nonlocal name %s found without binding",
+ "nonlocal-without-binding",
+ "Emitted when a nonlocal variable does not have an attached "
+ "name somewhere in the parent scopes",
+ ),
+ "E0118": (
+ "Name %r is used prior to global declaration",
+ "used-prior-global-declaration",
+ "Emitted when a name is used prior a global declaration, "
+ "which results in an error since Python 3.6.",
+ {"minversion": (3, 6)},
+ ),
+ }
+
+ @utils.check_messages("function-redefined")
+ def visit_classdef(self, node):
+ self._check_redefinition("class", node)
+
+ def _too_many_starred_for_tuple(self, assign_tuple):
+ starred_count = 0
+ for elem in assign_tuple.itered():
+ if isinstance(elem, astroid.Tuple):
+ return self._too_many_starred_for_tuple(elem)
+ if isinstance(elem, astroid.Starred):
+ starred_count += 1
+ return starred_count > 1
+
+ @utils.check_messages("too-many-star-expressions", "invalid-star-assignment-target")
+ def visit_assign(self, node):
+ # Check *a, *b = ...
+ assign_target = node.targets[0]
+ # Check *a = b
+ if isinstance(node.targets[0], astroid.Starred):
+ self.add_message("invalid-star-assignment-target", node=node)
+
+ if not isinstance(assign_target, astroid.Tuple):
+ return
+ if self._too_many_starred_for_tuple(assign_target):
+ self.add_message("too-many-star-expressions", node=node)
+
+ @utils.check_messages("star-needs-assignment-target")
+ def visit_starred(self, node):
+ """Check that a Starred expression is used in an assignment target."""
+ if isinstance(node.parent, astroid.Call):
+ # f(*args) is converted to Call(args=[Starred]), so ignore
+ # them for this check.
+ return
+ if isinstance(
+ node.parent, (astroid.List, astroid.Tuple, astroid.Set, astroid.Dict)
+ ):
+ # PEP 448 unpacking.
+ return
+
+ stmt = node.statement()
+ if not isinstance(stmt, astroid.Assign):
+ return
+
+ if stmt.value is node or stmt.value.parent_of(node):
+ self.add_message("star-needs-assignment-target", node=node)
+
+ @utils.check_messages(
+ "init-is-generator",
+ "return-in-init",
+ "function-redefined",
+ "return-arg-in-generator",
+ "duplicate-argument-name",
+ "nonlocal-and-global",
+ "used-prior-global-declaration",
+ )
+ def visit_functiondef(self, node):
+ self._check_nonlocal_and_global(node)
+ self._check_name_used_prior_global(node)
+ if not redefined_by_decorator(
+ node
+ ) and not utils.is_registered_in_singledispatch_function(node):
+ self._check_redefinition(node.is_method() and "method" or "function", node)
+ # checks for max returns, branch, return in __init__
+ returns = node.nodes_of_class(
+ astroid.Return, skip_klass=(astroid.FunctionDef, astroid.ClassDef)
+ )
+ if node.is_method() and node.name == "__init__":
+ if node.is_generator():
+ self.add_message("init-is-generator", node=node)
+ else:
+ values = [r.value for r in returns]
+ # Are we returning anything but None from constructors
+ if any(v for v in values if not utils.is_none(v)):
+ self.add_message("return-in-init", node=node)
+ # Check for duplicate names by clustering args with same name for detailed report
+ arg_clusters = collections.defaultdict(list)
+ arguments = filter(None, [node.args.args, node.args.kwonlyargs])
+
+ for arg in itertools.chain.from_iterable(arguments):
+ arg_clusters[arg.name].append(arg)
+
+ # provide detailed report about each repeated argument
+ for argument_duplicates in arg_clusters.values():
+ if len(argument_duplicates) != 1:
+ for argument in argument_duplicates:
+ self.add_message(
+ "duplicate-argument-name",
+ line=argument.lineno,
+ node=argument,
+ args=(argument.name,),
+ )
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_name_used_prior_global(self, node):
+
+ scope_globals = {
+ name: child
+ for child in node.nodes_of_class(astroid.Global)
+ for name in child.names
+ if child.scope() is node
+ }
+
+ if not scope_globals:
+ return
+
+ for node_name in node.nodes_of_class(astroid.Name):
+ if node_name.scope() is not node:
+ continue
+
+ name = node_name.name
+ corresponding_global = scope_globals.get(name)
+ if not corresponding_global:
+ continue
+
+ global_lineno = corresponding_global.fromlineno
+ if global_lineno and global_lineno > node_name.fromlineno:
+ self.add_message(
+ "used-prior-global-declaration", node=node_name, args=(name,)
+ )
+
+ def _check_nonlocal_and_global(self, node):
+ """Check that a name is both nonlocal and global."""
+
+ def same_scope(current):
+ return current.scope() is node
+
+ from_iter = itertools.chain.from_iterable
+ nonlocals = set(
+ from_iter(
+ child.names
+ for child in node.nodes_of_class(astroid.Nonlocal)
+ if same_scope(child)
+ )
+ )
+
+ if not nonlocals:
+ return
+
+ global_vars = set(
+ from_iter(
+ child.names
+ for child in node.nodes_of_class(astroid.Global)
+ if same_scope(child)
+ )
+ )
+ for name in nonlocals.intersection(global_vars):
+ self.add_message("nonlocal-and-global", args=(name,), node=node)
+
+ @utils.check_messages("return-outside-function")
+ def visit_return(self, node):
+ if not isinstance(node.frame(), astroid.FunctionDef):
+ self.add_message("return-outside-function", node=node)
+
+ @utils.check_messages("yield-outside-function")
+ def visit_yield(self, node):
+ self._check_yield_outside_func(node)
+
+ @utils.check_messages("yield-outside-function")
+ def visit_yieldfrom(self, node):
+ self._check_yield_outside_func(node)
+
+ @utils.check_messages("not-in-loop", "continue-in-finally")
+ def visit_continue(self, node):
+ self._check_in_loop(node, "continue")
+
+ @utils.check_messages("not-in-loop")
+ def visit_break(self, node):
+ self._check_in_loop(node, "break")
+
+ @utils.check_messages("useless-else-on-loop")
+ def visit_for(self, node):
+ self._check_else_on_loop(node)
+
+ @utils.check_messages("useless-else-on-loop")
+ def visit_while(self, node):
+ self._check_else_on_loop(node)
+
+ @utils.check_messages("nonexistent-operator")
+ def visit_unaryop(self, node):
+ """check use of the non-existent ++ and -- operator operator"""
+ if (
+ (node.op in "+-")
+ and isinstance(node.operand, astroid.UnaryOp)
+ and (node.operand.op == node.op)
+ ):
+ self.add_message("nonexistent-operator", node=node, args=node.op * 2)
+
+ def _check_nonlocal_without_binding(self, node, name):
+ current_scope = node.scope()
+ while True:
+ if current_scope.parent is None:
+ break
+
+ if not isinstance(current_scope, (astroid.ClassDef, astroid.FunctionDef)):
+ self.add_message("nonlocal-without-binding", args=(name,), node=node)
+ return
+
+ if name not in current_scope.locals:
+ current_scope = current_scope.parent.scope()
+ continue
+
+ # Okay, found it.
+ return
+
+ if not isinstance(current_scope, astroid.FunctionDef):
+ self.add_message("nonlocal-without-binding", args=(name,), node=node)
+
+ @utils.check_messages("nonlocal-without-binding")
+ def visit_nonlocal(self, node):
+ for name in node.names:
+ self._check_nonlocal_without_binding(node, name)
+
+ @utils.check_messages("abstract-class-instantiated")
+ def visit_call(self, node):
+ """ Check instantiating abstract class with
+ abc.ABCMeta as metaclass.
+ """
+ try:
+ for inferred in node.func.infer():
+ self._check_inferred_class_is_abstract(inferred, node)
+ except astroid.InferenceError:
+ return
+
+ def _check_inferred_class_is_abstract(self, inferred, node):
+ if not isinstance(inferred, astroid.ClassDef):
+ return
+
+ klass = utils.node_frame_class(node)
+ if klass is inferred:
+ # Don't emit the warning if the class is instantiated
+ # in its own body or if the call is not an instance
+ # creation. If the class is instantiated into its own
+ # body, we're expecting that it knows what it is doing.
+ return
+
+ # __init__ was called
+ abstract_methods = _has_abstract_methods(inferred)
+
+ if not abstract_methods:
+ return
+
+ metaclass = inferred.metaclass()
+
+ if metaclass is None:
+ # Python 3.4 has `abc.ABC`, which won't be detected
+ # by ClassNode.metaclass()
+ for ancestor in inferred.ancestors():
+ if ancestor.qname() == "abc.ABC":
+ self.add_message(
+ "abstract-class-instantiated", args=(inferred.name,), node=node
+ )
+ break
+
+ return
+
+ if metaclass.qname() in ABC_METACLASSES:
+ self.add_message(
+ "abstract-class-instantiated", args=(inferred.name,), node=node
+ )
+
+ def _check_yield_outside_func(self, node):
+ if not isinstance(node.frame(), (astroid.FunctionDef, astroid.Lambda)):
+ self.add_message("yield-outside-function", node=node)
+
+ def _check_else_on_loop(self, node):
+ """Check that any loop with an else clause has a break statement."""
+ if node.orelse and not _loop_exits_early(node):
+ self.add_message(
+ "useless-else-on-loop",
+ node=node,
+ # This is not optimal, but the line previous
+ # to the first statement in the else clause
+ # will usually be the one that contains the else:.
+ line=node.orelse[0].lineno - 1,
+ )
+
+ def _check_in_loop(self, node, node_name):
+ """check that a node is inside a for or while loop"""
+ _node = node.parent
+ while _node:
+ if isinstance(_node, (astroid.For, astroid.While)):
+ if node not in _node.orelse:
+ return
+
+ if isinstance(_node, (astroid.ClassDef, astroid.FunctionDef)):
+ break
+ if (
+ isinstance(_node, astroid.TryFinally)
+ and node in _node.finalbody
+ and isinstance(node, astroid.Continue)
+ ):
+ self.add_message("continue-in-finally", node=node)
+
+ _node = _node.parent
+
+ self.add_message("not-in-loop", node=node, args=node_name)
+
+ def _check_redefinition(self, redeftype, node):
+ """check for redefinition of a function / method / class name"""
+ parent_frame = node.parent.frame()
+
+ # Ignore function stubs created for type information
+ redefinitions = parent_frame.locals[node.name]
+ defined_self = next(
+ (local for local in redefinitions if not utils.is_overload_stub(local)),
+ node,
+ )
+ if defined_self is not node and not astroid.are_exclusive(node, defined_self):
+
+ # Additional checks for methods which are not considered
+ # redefined, since they are already part of the base API.
+ if (
+ isinstance(parent_frame, astroid.ClassDef)
+ and node.name in REDEFINABLE_METHODS
+ ):
+ return
+
+ if utils.is_overload_stub(node):
+ return
+
+ # Check if we have forward references for this node.
+ try:
+ redefinition_index = redefinitions.index(node)
+ except ValueError:
+ pass
+ else:
+ for redefinition in redefinitions[:redefinition_index]:
+ inferred = utils.safe_infer(redefinition)
+ if (
+ inferred
+ and isinstance(inferred, astroid.Instance)
+ and inferred.qname() == TYPING_FORWARD_REF_QNAME
+ ):
+ return
+
+ dummy_variables_rgx = lint_utils.get_global_option(
+ self, "dummy-variables-rgx", default=None
+ )
+ if dummy_variables_rgx and dummy_variables_rgx.match(node.name):
+ return
+ self.add_message(
+ "function-redefined",
+ node=node,
+ args=(redeftype, defined_self.fromlineno),
+ )
+
+
+class BasicChecker(_BasicChecker):
+ """checks for :
+ * doc strings
+ * number of arguments, local variables, branches, returns and statements in
+ functions, methods
+ * required module attributes
+ * dangerous default values as arguments
+ * redefinition of function / method / class
+ * uses of the global statement
+ """
+
+ __implements__ = interfaces.IAstroidChecker
+
+ name = "basic"
+ msgs = {
+ "W0101": (
+ "Unreachable code",
+ "unreachable",
+ 'Used when there is some code behind a "return" or "raise" '
+ "statement, which will never be accessed.",
+ ),
+ "W0102": (
+ "Dangerous default value %s as argument",
+ "dangerous-default-value",
+ "Used when a mutable value as list or dictionary is detected in "
+ "a default value for an argument.",
+ ),
+ "W0104": (
+ "Statement seems to have no effect",
+ "pointless-statement",
+ "Used when a statement doesn't have (or at least seems to) any effect.",
+ ),
+ "W0105": (
+ "String statement has no effect",
+ "pointless-string-statement",
+ "Used when a string is used as a statement (which of course "
+ "has no effect). This is a particular case of W0104 with its "
+ "own message so you can easily disable it if you're using "
+ "those strings as documentation, instead of comments.",
+ ),
+ "W0106": (
+ 'Expression "%s" is assigned to nothing',
+ "expression-not-assigned",
+ "Used when an expression that is not a function call is assigned "
+ "to nothing. Probably something else was intended.",
+ ),
+ "W0108": (
+ "Lambda may not be necessary",
+ "unnecessary-lambda",
+ "Used when the body of a lambda expression is a function call "
+ "on the same argument list as the lambda itself; such lambda "
+ "expressions are in all but a few cases replaceable with the "
+ "function being called in the body of the lambda.",
+ ),
+ "W0109": (
+ "Duplicate key %r in dictionary",
+ "duplicate-key",
+ "Used when a dictionary expression binds the same key multiple times.",
+ ),
+ "W0122": (
+ "Use of exec",
+ "exec-used",
+ 'Used when you use the "exec" statement (function for Python '
+ "3), to discourage its usage. That doesn't "
+ "mean you cannot use it !",
+ ),
+ "W0123": (
+ "Use of eval",
+ "eval-used",
+ 'Used when you use the "eval" function, to discourage its '
+ "usage. Consider using `ast.literal_eval` for safely evaluating "
+ "strings containing Python expressions "
+ "from untrusted sources. ",
+ ),
+ "W0150": (
+ "%s statement in finally block may swallow exception",
+ "lost-exception",
+ "Used when a break or a return statement is found inside the "
+ "finally clause of a try...finally block: the exceptions raised "
+ "in the try clause will be silently swallowed instead of being "
+ "re-raised.",
+ ),
+ "W0199": (
+ "Assert called on a 2-item-tuple. Did you mean 'assert x,y'?",
+ "assert-on-tuple",
+ "A call of assert on a tuple will always evaluate to true if "
+ "the tuple is not empty, and will always evaluate to false if "
+ "it is.",
+ ),
+ "W0124": (
+ 'Following "as" with another context manager looks like a tuple.',
+ "confusing-with-statement",
+ "Emitted when a `with` statement component returns multiple values "
+ "and uses name binding with `as` only for a part of those values, "
+ "as in with ctx() as a, b. This can be misleading, since it's not "
+ "clear if the context manager returns a tuple or if the node without "
+ "a name binding is another context manager.",
+ ),
+ "W0125": (
+ "Using a conditional statement with a constant value",
+ "using-constant-test",
+ "Emitted when a conditional statement (If or ternary if) "
+ "uses a constant value for its test. This might not be what "
+ "the user intended to do.",
+ ),
+ "W0126": (
+ "Using a conditional statement with potentially wrong function or method call due to missing parentheses",
+ "missing-parentheses-for-call-in-test",
+ "Emitted when a conditional statement (If or ternary if) "
+ "seems to wrongly call a function due to missing parentheses",
+ ),
+ "W0127": (
+ "Assigning the same variable %r to itself",
+ "self-assigning-variable",
+ "Emitted when we detect that a variable is assigned to itself",
+ ),
+ "W0128": (
+ "Redeclared variable %r in assignment",
+ "redeclared-assigned-name",
+ "Emitted when we detect that a variable was redeclared in the same assignment.",
+ ),
+ "E0111": (
+ "The first reversed() argument is not a sequence",
+ "bad-reversed-sequence",
+ "Used when the first argument to reversed() builtin "
+ "isn't a sequence (does not implement __reversed__, "
+ "nor __getitem__ and __len__",
+ ),
+ "E0119": (
+ "format function is not called on str",
+ "misplaced-format-function",
+ "Emitted when format function is not called on str object. "
+ 'e.g doing print("value: {}").format(123) instead of '
+ 'print("value: {}".format(123)). This might not be what the user '
+ "intended to do.",
+ ),
+ }
+
+ reports = (("RP0101", "Statistics by type", report_by_type_stats),)
+
+ def __init__(self, linter):
+ _BasicChecker.__init__(self, linter)
+ self.stats = None
+ self._tryfinallys = None
+
+ def open(self):
+ """initialize visit variables and statistics
+ """
+ self._tryfinallys = []
+ self.stats = self.linter.add_stats(module=0, function=0, method=0, class_=0)
+
+ @utils.check_messages("using-constant-test", "missing-parentheses-for-call-in-test")
+ def visit_if(self, node):
+ self._check_using_constant_test(node, node.test)
+
+ @utils.check_messages("using-constant-test", "missing-parentheses-for-call-in-test")
+ def visit_ifexp(self, node):
+ self._check_using_constant_test(node, node.test)
+
+ @utils.check_messages("using-constant-test", "missing-parentheses-for-call-in-test")
+ def visit_comprehension(self, node):
+ if node.ifs:
+ for if_test in node.ifs:
+ self._check_using_constant_test(node, if_test)
+
+ def _check_using_constant_test(self, node, test):
+ const_nodes = (
+ astroid.Module,
+ astroid.scoped_nodes.GeneratorExp,
+ astroid.Lambda,
+ astroid.FunctionDef,
+ astroid.ClassDef,
+ astroid.bases.Generator,
+ astroid.UnboundMethod,
+ astroid.BoundMethod,
+ astroid.Module,
+ )
+ structs = (astroid.Dict, astroid.Tuple, astroid.Set)
+
+ # These nodes are excepted, since they are not constant
+ # values, requiring a computation to happen.
+ except_nodes = (
+ astroid.Call,
+ astroid.BinOp,
+ astroid.BoolOp,
+ astroid.UnaryOp,
+ astroid.Subscript,
+ )
+ inferred = None
+ emit = isinstance(test, (astroid.Const,) + structs + const_nodes)
+ if not isinstance(test, except_nodes):
+ inferred = utils.safe_infer(test)
+
+ if emit:
+ self.add_message("using-constant-test", node=node)
+ elif isinstance(inferred, const_nodes):
+ # If the constant node is a FunctionDef or Lambda then
+ #  it may be a illicit function call due to missing parentheses
+ call_inferred = None
+ if isinstance(inferred, astroid.FunctionDef):
+ call_inferred = inferred.infer_call_result()
+ elif isinstance(inferred, astroid.Lambda):
+ call_inferred = inferred.infer_call_result(node)
+ if call_inferred:
+ try:
+ for inf_call in call_inferred:
+ if inf_call != astroid.Uninferable:
+ self.add_message(
+ "missing-parentheses-for-call-in-test", node=node
+ )
+ break
+ except astroid.InferenceError:
+ pass
+ self.add_message("using-constant-test", node=node)
+
+ def visit_module(self, _):
+ """check module name, docstring and required arguments
+ """
+ self.stats["module"] += 1
+
+ def visit_classdef(self, node): # pylint: disable=unused-argument
+ """check module name, docstring and redefinition
+ increment branch counter
+ """
+ self.stats["class"] += 1
+
+ @utils.check_messages(
+ "pointless-statement", "pointless-string-statement", "expression-not-assigned"
+ )
+ def visit_expr(self, node):
+ """Check for various kind of statements without effect"""
+ expr = node.value
+ if isinstance(expr, astroid.Const) and isinstance(expr.value, str):
+ # treat string statement in a separated message
+ # Handle PEP-257 attribute docstrings.
+ # An attribute docstring is defined as being a string right after
+ # an assignment at the module level, class level or __init__ level.
+ scope = expr.scope()
+ if isinstance(
+ scope, (astroid.ClassDef, astroid.Module, astroid.FunctionDef)
+ ):
+ if isinstance(scope, astroid.FunctionDef) and scope.name != "__init__":
+ pass
+ else:
+ sibling = expr.previous_sibling()
+ if (
+ sibling is not None
+ and sibling.scope() is scope
+ and isinstance(sibling, (astroid.Assign, astroid.AnnAssign))
+ ):
+ return
+ self.add_message("pointless-string-statement", node=node)
+ return
+
+ # Ignore if this is :
+ # * a direct function call
+ # * the unique child of a try/except body
+ # * a yield statement
+ # * an ellipsis (which can be used on Python 3 instead of pass)
+ # warn W0106 if we have any underlying function call (we can't predict
+ # side effects), else pointless-statement
+ if (
+ isinstance(
+ expr, (astroid.Yield, astroid.Await, astroid.Ellipsis, astroid.Call)
+ )
+ or (
+ isinstance(node.parent, astroid.TryExcept)
+ and node.parent.body == [node]
+ )
+ or (isinstance(expr, astroid.Const) and expr.value is Ellipsis)
+ ):
+ return
+ if any(expr.nodes_of_class(astroid.Call)):
+ self.add_message(
+ "expression-not-assigned", node=node, args=expr.as_string()
+ )
+ else:
+ self.add_message("pointless-statement", node=node)
+
+ @staticmethod
+ def _filter_vararg(node, call_args):
+ # Return the arguments for the given call which are
+ # not passed as vararg.
+ for arg in call_args:
+ if isinstance(arg, astroid.Starred):
+ if (
+ isinstance(arg.value, astroid.Name)
+ and arg.value.name != node.args.vararg
+ ):
+ yield arg
+ else:
+ yield arg
+
+ @staticmethod
+ def _has_variadic_argument(args, variadic_name):
+ if not args:
+ return True
+ for arg in args:
+ if isinstance(arg.value, astroid.Name):
+ if arg.value.name != variadic_name:
+ return True
+ else:
+ return True
+ return False
+
+ @utils.check_messages("unnecessary-lambda")
+ def visit_lambda(self, node):
+ """check whether or not the lambda is suspicious
+ """
+ # if the body of the lambda is a call expression with the same
+ # argument list as the lambda itself, then the lambda is
+ # possibly unnecessary and at least suspicious.
+ if node.args.defaults:
+ # If the arguments of the lambda include defaults, then a
+ # judgment cannot be made because there is no way to check
+ # that the defaults defined by the lambda are the same as
+ # the defaults defined by the function called in the body
+ # of the lambda.
+ return
+ call = node.body
+ if not isinstance(call, astroid.Call):
+ # The body of the lambda must be a function call expression
+ # for the lambda to be unnecessary.
+ return
+ if isinstance(node.body.func, astroid.Attribute) and isinstance(
+ node.body.func.expr, astroid.Call
+ ):
+ # Chained call, the intermediate call might
+ # return something else (but we don't check that, yet).
+ return
+
+ call_site = CallSite.from_call(call)
+ ordinary_args = list(node.args.args)
+ new_call_args = list(self._filter_vararg(node, call.args))
+ if node.args.kwarg:
+ if self._has_variadic_argument(call.kwargs, node.args.kwarg):
+ return
+
+ if node.args.vararg:
+ if self._has_variadic_argument(call.starargs, node.args.vararg):
+ return
+ elif call.starargs:
+ return
+
+ if call.keywords:
+ # Look for additional keyword arguments that are not part
+ # of the lambda's signature
+ lambda_kwargs = {keyword.name for keyword in node.args.defaults}
+ if len(lambda_kwargs) != len(call_site.keyword_arguments):
+ # Different lengths, so probably not identical
+ return
+ if set(call_site.keyword_arguments).difference(lambda_kwargs):
+ return
+
+ # The "ordinary" arguments must be in a correspondence such that:
+ # ordinary_args[i].name == call.args[i].name.
+ if len(ordinary_args) != len(new_call_args):
+ return
+ for arg, passed_arg in zip(ordinary_args, new_call_args):
+ if not isinstance(passed_arg, astroid.Name):
+ return
+ if arg.name != passed_arg.name:
+ return
+
+ self.add_message("unnecessary-lambda", line=node.fromlineno, node=node)
+
+ @utils.check_messages("dangerous-default-value")
+ def visit_functiondef(self, node):
+ """check function name, docstring, arguments, redefinition,
+ variable names, max locals
+ """
+ self.stats["method" if node.is_method() else "function"] += 1
+ self._check_dangerous_default(node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_dangerous_default(self, node):
+ # check for dangerous default values as arguments
+ is_iterable = lambda n: isinstance(n, (astroid.List, astroid.Set, astroid.Dict))
+ for default in node.args.defaults:
+ try:
+ value = next(default.infer())
+ except astroid.InferenceError:
+ continue
+
+ if (
+ isinstance(value, astroid.Instance)
+ and value.qname() in DEFAULT_ARGUMENT_SYMBOLS
+ ):
+
+ if value is default:
+ msg = DEFAULT_ARGUMENT_SYMBOLS[value.qname()]
+ elif isinstance(value, astroid.Instance) or is_iterable(value):
+ # We are here in the following situation(s):
+ # * a dict/set/list/tuple call which wasn't inferred
+ # to a syntax node ({}, () etc.). This can happen
+ # when the arguments are invalid or unknown to
+ # the inference.
+ # * a variable from somewhere else, which turns out to be a list
+ # or a dict.
+ if is_iterable(default):
+ msg = value.pytype()
+ elif isinstance(default, astroid.Call):
+ msg = "%s() (%s)" % (value.name, value.qname())
+ else:
+ msg = "%s (%s)" % (default.as_string(), value.qname())
+ else:
+ # this argument is a name
+ msg = "%s (%s)" % (
+ default.as_string(),
+ DEFAULT_ARGUMENT_SYMBOLS[value.qname()],
+ )
+ self.add_message("dangerous-default-value", node=node, args=(msg,))
+
+ @utils.check_messages("unreachable", "lost-exception")
+ def visit_return(self, node):
+ """1 - check is the node has a right sibling (if so, that's some
+ unreachable code)
+ 2 - check is the node is inside the finally clause of a try...finally
+ block
+ """
+ self._check_unreachable(node)
+ # Is it inside final body of a try...finally bloc ?
+ self._check_not_in_finally(node, "return", (astroid.FunctionDef,))
+
+ @utils.check_messages("unreachable")
+ def visit_continue(self, node):
+ """check is the node has a right sibling (if so, that's some unreachable
+ code)
+ """
+ self._check_unreachable(node)
+
+ @utils.check_messages("unreachable", "lost-exception")
+ def visit_break(self, node):
+ """1 - check is the node has a right sibling (if so, that's some
+ unreachable code)
+ 2 - check is the node is inside the finally clause of a try...finally
+ block
+ """
+ # 1 - Is it right sibling ?
+ self._check_unreachable(node)
+ # 2 - Is it inside final body of a try...finally bloc ?
+ self._check_not_in_finally(node, "break", (astroid.For, astroid.While))
+
+ @utils.check_messages("unreachable")
+ def visit_raise(self, node):
+ """check if the node has a right sibling (if so, that's some unreachable
+ code)
+ """
+ self._check_unreachable(node)
+
+ @utils.check_messages("exec-used")
+ def visit_exec(self, node):
+ """just print a warning on exec statements"""
+ self.add_message("exec-used", node=node)
+
+ def _check_misplaced_format_function(self, call_node):
+ if not isinstance(call_node.func, astroid.Attribute):
+ return
+ if call_node.func.attrname != "format":
+ return
+
+ expr = utils.safe_infer(call_node.func.expr)
+ if expr is astroid.Uninferable:
+ return
+ if not expr:
+ # we are doubtful on inferred type of node, so here just check if format
+ # was called on print()
+ call_expr = call_node.func.expr
+ if not isinstance(call_expr, astroid.Call):
+ return
+ if (
+ isinstance(call_expr.func, astroid.Name)
+ and call_expr.func.name == "print"
+ ):
+ self.add_message("misplaced-format-function", node=call_node)
+
+ @utils.check_messages(
+ "eval-used", "exec-used", "bad-reversed-sequence", "misplaced-format-function"
+ )
+ def visit_call(self, node):
+ """visit a Call node -> check if this is not a blacklisted builtin
+ call and check for * or ** use
+ """
+ self._check_misplaced_format_function(node)
+ if isinstance(node.func, astroid.Name):
+ name = node.func.name
+ # ignore the name if it's not a builtin (i.e. not defined in the
+ # locals nor globals scope)
+ if not (name in node.frame() or name in node.root()):
+ if name == "exec":
+ self.add_message("exec-used", node=node)
+ elif name == "reversed":
+ self._check_reversed(node)
+ elif name == "eval":
+ self.add_message("eval-used", node=node)
+
+ @utils.check_messages("assert-on-tuple")
+ def visit_assert(self, node):
+ """check the use of an assert statement on a tuple."""
+ if (
+ node.fail is None
+ and isinstance(node.test, astroid.Tuple)
+ and len(node.test.elts) == 2
+ ):
+ self.add_message("assert-on-tuple", node=node)
+
+ @utils.check_messages("duplicate-key")
+ def visit_dict(self, node):
+ """check duplicate key in dictionary"""
+ keys = set()
+ for k, _ in node.items:
+ if isinstance(k, astroid.Const):
+ key = k.value
+ if key in keys:
+ self.add_message("duplicate-key", node=node, args=key)
+ keys.add(key)
+
+ def visit_tryfinally(self, node):
+ """update try...finally flag"""
+ self._tryfinallys.append(node)
+
+ def leave_tryfinally(self, node): # pylint: disable=unused-argument
+ """update try...finally flag"""
+ self._tryfinallys.pop()
+
+ def _check_unreachable(self, node):
+ """check unreachable code"""
+ unreach_stmt = node.next_sibling()
+ if unreach_stmt is not None:
+ self.add_message("unreachable", node=unreach_stmt)
+
+ def _check_not_in_finally(self, node, node_name, breaker_classes=()):
+ """check that a node is not inside a finally clause of a
+ try...finally statement.
+ If we found before a try...finally bloc a parent which its type is
+ in breaker_classes, we skip the whole check."""
+ # if self._tryfinallys is empty, we're not an in try...finally block
+ if not self._tryfinallys:
+ return
+ # the node could be a grand-grand...-children of the try...finally
+ _parent = node.parent
+ _node = node
+ while _parent and not isinstance(_parent, breaker_classes):
+ if hasattr(_parent, "finalbody") and _node in _parent.finalbody:
+ self.add_message("lost-exception", node=node, args=node_name)
+ return
+ _node = _parent
+ _parent = _node.parent
+
+ def _check_reversed(self, node):
+ """ check that the argument to `reversed` is a sequence """
+ try:
+ argument = utils.safe_infer(utils.get_argument_from_call(node, position=0))
+ except utils.NoSuchArgumentError:
+ pass
+ else:
+ if argument is astroid.Uninferable:
+ return
+ if argument is None:
+ # Nothing was inferred.
+ # Try to see if we have iter().
+ if isinstance(node.args[0], astroid.Call):
+ try:
+ func = next(node.args[0].func.infer())
+ except astroid.InferenceError:
+ return
+ if getattr(
+ func, "name", None
+ ) == "iter" and utils.is_builtin_object(func):
+ self.add_message("bad-reversed-sequence", node=node)
+ return
+
+ if isinstance(argument, (astroid.List, astroid.Tuple)):
+ return
+
+ if isinstance(argument, astroid.Instance):
+ if argument._proxied.name == "dict" and utils.is_builtin_object(
+ argument._proxied
+ ):
+ self.add_message("bad-reversed-sequence", node=node)
+ return
+ if any(
+ ancestor.name == "dict" and utils.is_builtin_object(ancestor)
+ for ancestor in argument._proxied.ancestors()
+ ):
+ # Mappings aren't accepted by reversed(), unless
+ # they provide explicitly a __reversed__ method.
+ try:
+ argument.locals[REVERSED_PROTOCOL_METHOD]
+ except KeyError:
+ self.add_message("bad-reversed-sequence", node=node)
+ return
+
+ if hasattr(argument, "getattr"):
+ # everything else is not a proper sequence for reversed()
+ for methods in REVERSED_METHODS:
+ for meth in methods:
+ try:
+ argument.getattr(meth)
+ except astroid.NotFoundError:
+ break
+ else:
+ break
+ else:
+ self.add_message("bad-reversed-sequence", node=node)
+ else:
+ self.add_message("bad-reversed-sequence", node=node)
+
+ @utils.check_messages("confusing-with-statement")
+ def visit_with(self, node):
+ # a "with" statement with multiple managers coresponds
+ # to one AST "With" node with multiple items
+ pairs = node.items
+ if pairs:
+ for prev_pair, pair in zip(pairs, pairs[1:]):
+ if isinstance(prev_pair[1], astroid.AssignName) and (
+ pair[1] is None and not isinstance(pair[0], astroid.Call)
+ ):
+ # Don't emit a message if the second is a function call
+ # there's no way that can be mistaken for a name assignment.
+ # If the line number doesn't match
+ # we assume it's a nested "with".
+ self.add_message("confusing-with-statement", node=node)
+
+ def _check_self_assigning_variable(self, node):
+ # Detect assigning to the same variable.
+
+ scope = node.scope()
+ scope_locals = scope.locals
+
+ rhs_names = []
+ targets = node.targets
+ if isinstance(targets[0], astroid.Tuple):
+ if len(targets) != 1:
+ # A complex assignment, so bail out early.
+ return
+ targets = targets[0].elts
+
+ if isinstance(node.value, astroid.Name):
+ if len(targets) != 1:
+ return
+ rhs_names = [node.value]
+ elif isinstance(node.value, astroid.Tuple):
+ rhs_count = len(node.value.elts)
+ if len(targets) != rhs_count or rhs_count == 1:
+ return
+ rhs_names = node.value.elts
+
+ for target, lhs_name in zip(targets, rhs_names):
+ if not isinstance(lhs_name, astroid.Name):
+ continue
+ if not isinstance(target, astroid.AssignName):
+ continue
+ if isinstance(scope, astroid.ClassDef) and target.name in scope_locals:
+ # Check that the scope is different than a class level, which is usually
+ # a pattern to expose module level attributes as class level ones.
+ continue
+ if target.name == lhs_name.name:
+ self.add_message(
+ "self-assigning-variable", args=(target.name,), node=target
+ )
+
+ def _check_redeclared_assign_name(self, targets):
+ for target in targets:
+ if not isinstance(target, astroid.Tuple):
+ continue
+
+ found_names = []
+ for element in target.elts:
+ if isinstance(element, astroid.Tuple):
+ self._check_redeclared_assign_name([element])
+ elif isinstance(element, astroid.AssignName) and element.name != "_":
+ found_names.append(element.name)
+
+ names = collections.Counter(found_names)
+ for name, count in names.most_common():
+ if count > 1:
+ self.add_message(
+ "redeclared-assigned-name", args=(name,), node=target
+ )
+
+ @utils.check_messages("self-assigning-variable", "redeclared-assigned-name")
+ def visit_assign(self, node):
+ self._check_self_assigning_variable(node)
+ self._check_redeclared_assign_name(node.targets)
+
+ @utils.check_messages("redeclared-assigned-name")
+ def visit_for(self, node):
+ self._check_redeclared_assign_name([node.target])
+
+
+KNOWN_NAME_TYPES = {
+ "module",
+ "const",
+ "class",
+ "function",
+ "method",
+ "attr",
+ "argument",
+ "variable",
+ "class_attribute",
+ "inlinevar",
+}
+
+
+HUMAN_READABLE_TYPES = {
+ "module": "module",
+ "const": "constant",
+ "class": "class",
+ "function": "function",
+ "method": "method",
+ "attr": "attribute",
+ "argument": "argument",
+ "variable": "variable",
+ "class_attribute": "class attribute",
+ "inlinevar": "inline iteration",
+}
+
+DEFAULT_NAMING_STYLES = {
+ "module": "snake_case",
+ "const": "UPPER_CASE",
+ "class": "PascalCase",
+ "function": "snake_case",
+ "method": "snake_case",
+ "attr": "snake_case",
+ "argument": "snake_case",
+ "variable": "snake_case",
+ "class_attribute": "any",
+ "inlinevar": "any",
+}
+
+
+def _create_naming_options():
+ name_options = []
+ for name_type in sorted(KNOWN_NAME_TYPES):
+ human_readable_name = HUMAN_READABLE_TYPES[name_type]
+ default_style = DEFAULT_NAMING_STYLES[name_type]
+ name_type = name_type.replace("_", "-")
+ name_options.append(
+ (
+ "%s-naming-style" % (name_type,),
+ {
+ "default": default_style,
+ "type": "choice",
+ "choices": list(NAMING_STYLES.keys()),
+ "metavar": "<style>",
+ "help": "Naming style matching correct %s names."
+ % (human_readable_name,),
+ },
+ )
+ )
+ name_options.append(
+ (
+ "%s-rgx" % (name_type,),
+ {
+ "default": None,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Regular expression matching correct %s names. Overrides %s-naming-style."
+ % (human_readable_name, name_type),
+ },
+ )
+ )
+ return tuple(name_options)
+
+
+class NameChecker(_BasicChecker):
+
+ msgs = {
+ "C0102": (
+ 'Black listed name "%s"',
+ "blacklisted-name",
+ "Used when the name is listed in the black list (unauthorized names).",
+ ),
+ "C0103": (
+ '%s name "%s" doesn\'t conform to %s',
+ "invalid-name",
+ "Used when the name doesn't conform to naming rules "
+ "associated to its type (constant, variable, class...).",
+ ),
+ "W0111": (
+ "Name %s will become a keyword in Python %s",
+ "assign-to-new-keyword",
+ "Used when assignment will become invalid in future "
+ "Python release due to introducing new keyword.",
+ ),
+ }
+
+ options = (
+ (
+ "good-names",
+ {
+ "default": ("i", "j", "k", "ex", "Run", "_"),
+ "type": "csv",
+ "metavar": "<names>",
+ "help": "Good variable names which should always be accepted,"
+ " separated by a comma.",
+ },
+ ),
+ (
+ "bad-names",
+ {
+ "default": ("foo", "bar", "baz", "toto", "tutu", "tata"),
+ "type": "csv",
+ "metavar": "<names>",
+ "help": "Bad variable names which should always be refused, "
+ "separated by a comma.",
+ },
+ ),
+ (
+ "name-group",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<name1:name2>",
+ "help": (
+ "Colon-delimited sets of names that determine each"
+ " other's naming style when the name regexes"
+ " allow several styles."
+ ),
+ },
+ ),
+ (
+ "include-naming-hint",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Include a hint for the correct naming format with invalid-name.",
+ },
+ ),
+ (
+ "property-classes",
+ {
+ "default": ("abc.abstractproperty",),
+ "type": "csv",
+ "metavar": "<decorator names>",
+ "help": "List of decorators that produce properties, such as "
+ "abc.abstractproperty. Add to this list to register "
+ "other decorators that produce valid properties. "
+ "These decorators are taken in consideration only for invalid-name.",
+ },
+ ),
+ ) + _create_naming_options()
+
+ KEYWORD_ONSET = {(3, 7): {"async", "await"}}
+
+ def __init__(self, linter):
+ _BasicChecker.__init__(self, linter)
+ self._name_category = {}
+ self._name_group = {}
+ self._bad_names = {}
+ self._name_regexps = {}
+ self._name_hints = {}
+
+ def open(self):
+ self.stats = self.linter.add_stats(
+ badname_module=0,
+ badname_class=0,
+ badname_function=0,
+ badname_method=0,
+ badname_attr=0,
+ badname_const=0,
+ badname_variable=0,
+ badname_inlinevar=0,
+ badname_argument=0,
+ badname_class_attribute=0,
+ )
+ for group in self.config.name_group:
+ for name_type in group.split(":"):
+ self._name_group[name_type] = "group_%s" % (group,)
+
+ regexps, hints = self._create_naming_rules()
+ self._name_regexps = regexps
+ self._name_hints = hints
+
+ def _create_naming_rules(self):
+ regexps = {}
+ hints = {}
+
+ for name_type in KNOWN_NAME_TYPES:
+ naming_style_option_name = "%s_naming_style" % (name_type,)
+ naming_style_name = getattr(self.config, naming_style_option_name)
+
+ regexps[name_type] = NAMING_STYLES[naming_style_name].get_regex(name_type)
+
+ custom_regex_setting_name = "%s_rgx" % (name_type,)
+ custom_regex = getattr(self.config, custom_regex_setting_name, None)
+ if custom_regex is not None:
+ regexps[name_type] = custom_regex
+
+ if custom_regex is not None:
+ hints[name_type] = "%r pattern" % custom_regex.pattern
+ else:
+ hints[name_type] = "%s naming style" % naming_style_name
+
+ return regexps, hints
+
+ @utils.check_messages("blacklisted-name", "invalid-name")
+ def visit_module(self, node):
+ self._check_name("module", node.name.split(".")[-1], node)
+ self._bad_names = {}
+
+ def leave_module(self, node): # pylint: disable=unused-argument
+ for all_groups in self._bad_names.values():
+ if len(all_groups) < 2:
+ continue
+ groups = collections.defaultdict(list)
+ min_warnings = sys.maxsize
+ for group in all_groups.values():
+ groups[len(group)].append(group)
+ min_warnings = min(len(group), min_warnings)
+ if len(groups[min_warnings]) > 1:
+ by_line = sorted(
+ groups[min_warnings],
+ key=lambda group: min(warning[0].lineno for warning in group),
+ )
+ warnings = itertools.chain(*by_line[1:])
+ else:
+ warnings = groups[min_warnings][0]
+ for args in warnings:
+ self._raise_name_warning(*args)
+
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
+ def visit_classdef(self, node):
+ self._check_assign_to_new_keyword_violation(node.name, node)
+ self._check_name("class", node.name, node)
+ for attr, anodes in node.instance_attrs.items():
+ if not any(node.instance_attr_ancestors(attr)):
+ self._check_name("attr", attr, anodes[0])
+
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
+ def visit_functiondef(self, node):
+ # Do not emit any warnings if the method is just an implementation
+ # of a base class method.
+ self._check_assign_to_new_keyword_violation(node.name, node)
+ confidence = interfaces.HIGH
+ if node.is_method():
+ if utils.overrides_a_method(node.parent.frame(), node.name):
+ return
+ confidence = (
+ interfaces.INFERENCE
+ if utils.has_known_bases(node.parent.frame())
+ else interfaces.INFERENCE_FAILURE
+ )
+
+ self._check_name(
+ _determine_function_name_type(node, config=self.config),
+ node.name,
+ node,
+ confidence,
+ )
+ # Check argument names
+ args = node.args.args
+ if args is not None:
+ self._recursive_check_names(args, node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ @utils.check_messages("blacklisted-name", "invalid-name")
+ def visit_global(self, node):
+ for name in node.names:
+ self._check_name("const", name, node)
+
+ @utils.check_messages("blacklisted-name", "invalid-name", "assign-to-new-keyword")
+ def visit_assignname(self, node):
+ """check module level assigned names"""
+ self._check_assign_to_new_keyword_violation(node.name, node)
+ frame = node.frame()
+ assign_type = node.assign_type()
+ if isinstance(assign_type, astroid.Comprehension):
+ self._check_name("inlinevar", node.name, node)
+ elif isinstance(frame, astroid.Module):
+ if isinstance(assign_type, astroid.Assign) and not in_loop(assign_type):
+ if isinstance(utils.safe_infer(assign_type.value), astroid.ClassDef):
+ self._check_name("class", node.name, node)
+ else:
+ if not _redefines_import(node):
+ # Don't emit if the name redefines an import
+ # in an ImportError except handler.
+ self._check_name("const", node.name, node)
+ elif isinstance(assign_type, astroid.ExceptHandler):
+ self._check_name("variable", node.name, node)
+ elif isinstance(frame, astroid.FunctionDef):
+ # global introduced variable aren't in the function locals
+ if node.name in frame and node.name not in frame.argnames():
+ if not _redefines_import(node):
+ self._check_name("variable", node.name, node)
+ elif isinstance(frame, astroid.ClassDef):
+ if not list(frame.local_attr_ancestors(node.name)):
+ self._check_name("class_attribute", node.name, node)
+
+ def _recursive_check_names(self, args, node):
+ """check names in a possibly recursive list <arg>"""
+ for arg in args:
+ if isinstance(arg, astroid.AssignName):
+ self._check_name("argument", arg.name, node)
+ else:
+ self._recursive_check_names(arg.elts, node)
+
+ def _find_name_group(self, node_type):
+ return self._name_group.get(node_type, node_type)
+
+ def _raise_name_warning(self, node, node_type, name, confidence):
+ type_label = HUMAN_READABLE_TYPES[node_type]
+ hint = self._name_hints[node_type]
+ if self.config.include_naming_hint:
+ hint += " (%r pattern)" % self._name_regexps[node_type].pattern
+ args = (type_label.capitalize(), name, hint)
+
+ self.add_message("invalid-name", node=node, args=args, confidence=confidence)
+ self.stats["badname_" + node_type] += 1
+
+ def _check_name(self, node_type, name, node, confidence=interfaces.HIGH):
+ """check for a name using the type's regexp"""
+
+ def _should_exempt_from_invalid_name(node):
+ if node_type == "variable":
+ inferred = utils.safe_infer(node)
+ if isinstance(inferred, astroid.ClassDef):
+ return True
+ return False
+
+ if utils.is_inside_except(node):
+ clobbering, _ = utils.clobber_in_except(node)
+ if clobbering:
+ return
+ if name in self.config.good_names:
+ return
+ if name in self.config.bad_names:
+ self.stats["badname_" + node_type] += 1
+ self.add_message("blacklisted-name", node=node, args=name)
+ return
+ regexp = self._name_regexps[node_type]
+ match = regexp.match(name)
+
+ if _is_multi_naming_match(match, node_type, confidence):
+ name_group = self._find_name_group(node_type)
+ bad_name_group = self._bad_names.setdefault(name_group, {})
+ warnings = bad_name_group.setdefault(match.lastgroup, [])
+ warnings.append((node, node_type, name, confidence))
+
+ if match is None and not _should_exempt_from_invalid_name(node):
+ self._raise_name_warning(node, node_type, name, confidence)
+
+ def _check_assign_to_new_keyword_violation(self, name, node):
+ keyword_first_version = self._name_became_keyword_in_version(
+ name, self.KEYWORD_ONSET
+ )
+ if keyword_first_version is not None:
+ self.add_message(
+ "assign-to-new-keyword",
+ node=node,
+ args=(name, keyword_first_version),
+ confidence=interfaces.HIGH,
+ )
+
+ @staticmethod
+ def _name_became_keyword_in_version(name, rules):
+ for version, keywords in rules.items():
+ if name in keywords and sys.version_info < version:
+ return ".".join(map(str, version))
+ return None
+
+
+class DocStringChecker(_BasicChecker):
+ msgs = {
+ "C0112": (
+ "Empty %s docstring",
+ "empty-docstring",
+ "Used when a module, function, class or method has an empty "
+ "docstring (it would be too easy ;).",
+ {"old_names": [("W0132", "old-empty-docstring")]},
+ ),
+ "C0114": (
+ "Missing module docstring",
+ "missing-module-docstring",
+ "Used when a module has no docstring."
+ "Empty modules do not require a docstring.",
+ {"old_names": [("C0111", "missing-docstring")]},
+ ),
+ "C0115": (
+ "Missing class docstring",
+ "missing-class-docstring",
+ "Used when a class has no docstring."
+ "Even an empty class must have a docstring.",
+ {"old_names": [("C0111", "missing-docstring")]},
+ ),
+ "C0116": (
+ "Missing function or method docstring",
+ "missing-function-docstring",
+ "Used when a function or method has no docstring."
+ "Some special methods like __init__ do not require a "
+ "docstring.",
+ {"old_names": [("C0111", "missing-docstring")]},
+ ),
+ }
+ options = (
+ (
+ "no-docstring-rgx",
+ {
+ "default": NO_REQUIRED_DOC_RGX,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Regular expression which should only match "
+ "function or class names that do not require a "
+ "docstring.",
+ },
+ ),
+ (
+ "docstring-min-length",
+ {
+ "default": -1,
+ "type": "int",
+ "metavar": "<int>",
+ "help": (
+ "Minimum line length for functions/classes that"
+ " require docstrings, shorter ones are exempt."
+ ),
+ },
+ ),
+ )
+
+ def open(self):
+ self.stats = self.linter.add_stats(
+ undocumented_module=0,
+ undocumented_function=0,
+ undocumented_method=0,
+ undocumented_class=0,
+ )
+
+ @utils.check_messages("missing-docstring", "empty-docstring")
+ def visit_module(self, node):
+ self._check_docstring("module", node)
+
+ @utils.check_messages("missing-docstring", "empty-docstring")
+ def visit_classdef(self, node):
+ if self.config.no_docstring_rgx.match(node.name) is None:
+ self._check_docstring("class", node)
+
+ @utils.check_messages("missing-docstring", "empty-docstring")
+ def visit_functiondef(self, node):
+ if self.config.no_docstring_rgx.match(node.name) is None:
+ ftype = "method" if node.is_method() else "function"
+ if is_property_setter_or_deleter(node):
+ return
+
+ if isinstance(node.parent.frame(), astroid.ClassDef):
+ overridden = False
+ confidence = (
+ interfaces.INFERENCE
+ if utils.has_known_bases(node.parent.frame())
+ else interfaces.INFERENCE_FAILURE
+ )
+ # check if node is from a method overridden by its ancestor
+ for ancestor in node.parent.frame().ancestors():
+ if node.name in ancestor and isinstance(
+ ancestor[node.name], astroid.FunctionDef
+ ):
+ overridden = True
+ break
+ self._check_docstring(
+ ftype, node, report_missing=not overridden, confidence=confidence
+ )
+ elif isinstance(node.parent.frame(), astroid.Module):
+ self._check_docstring(ftype, node)
+ else:
+ return
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_docstring(
+ self, node_type, node, report_missing=True, confidence=interfaces.HIGH
+ ):
+ """check the node has a non empty docstring"""
+ docstring = node.doc
+ if docstring is None:
+ if not report_missing:
+ return
+ lines = utils.get_node_last_lineno(node) - node.lineno
+
+ if node_type == "module" and not lines:
+ # If the module has no body, there's no reason
+ # to require a docstring.
+ return
+ max_lines = self.config.docstring_min_length
+
+ if node_type != "module" and max_lines > -1 and lines < max_lines:
+ return
+ self.stats["undocumented_" + node_type] += 1
+ if (
+ node.body
+ and isinstance(node.body[0], astroid.Expr)
+ and isinstance(node.body[0].value, astroid.Call)
+ ):
+ # Most likely a string with a format call. Let's see.
+ func = utils.safe_infer(node.body[0].value.func)
+ if isinstance(func, astroid.BoundMethod) and isinstance(
+ func.bound, astroid.Instance
+ ):
+ # Strings.
+ if func.bound.name == "str":
+ return
+ if func.bound.name in ("str", "unicode", "bytes"):
+ return
+ if node_type == "module":
+ message = "missing-module-docstring"
+ elif node_type == "class":
+ message = "missing-class-docstring"
+ else:
+ message = "missing-function-docstring"
+ self.add_message(message, node=node, confidence=confidence)
+ elif not docstring.strip():
+ self.stats["undocumented_" + node_type] += 1
+ self.add_message(
+ "empty-docstring", node=node, args=(node_type,), confidence=confidence
+ )
+
+
+class PassChecker(_BasicChecker):
+ """check if the pass statement is really necessary"""
+
+ msgs = {
+ "W0107": (
+ "Unnecessary pass statement",
+ "unnecessary-pass",
+ 'Used when a "pass" statement that can be avoided is encountered.',
+ )
+ }
+
+ @utils.check_messages("unnecessary-pass")
+ def visit_pass(self, node):
+ if len(node.parent.child_sequence(node)) > 1 or (
+ isinstance(node.parent, (astroid.ClassDef, astroid.FunctionDef))
+ and (node.parent.doc is not None)
+ ):
+ self.add_message("unnecessary-pass", node=node)
+
+
+def _is_one_arg_pos_call(call):
+ """Is this a call with exactly 1 argument,
+ where that argument is positional?
+ """
+ return isinstance(call, astroid.Call) and len(call.args) == 1 and not call.keywords
+
+
+class ComparisonChecker(_BasicChecker):
+ """Checks for comparisons
+
+ - singleton comparison: 'expr == True', 'expr == False' and 'expr == None'
+ - yoda condition: 'const "comp" right' where comp can be '==', '!=', '<',
+ '<=', '>' or '>=', and right can be a variable, an attribute, a method or
+ a function
+ """
+
+ msgs = {
+ "C0121": (
+ "Comparison to %s should be %s",
+ "singleton-comparison",
+ "Used when an expression is compared to singleton "
+ "values like True, False or None.",
+ ),
+ "C0122": (
+ "Comparison should be %s",
+ "misplaced-comparison-constant",
+ "Used when the constant is placed on the left side "
+ "of a comparison. It is usually clearer in intent to "
+ "place it in the right hand side of the comparison.",
+ ),
+ "C0123": (
+ "Using type() instead of isinstance() for a typecheck.",
+ "unidiomatic-typecheck",
+ "The idiomatic way to perform an explicit typecheck in "
+ "Python is to use isinstance(x, Y) rather than "
+ "type(x) == Y, type(x) is Y. Though there are unusual "
+ "situations where these give different results.",
+ {"old_names": [("W0154", "old-unidiomatic-typecheck")]},
+ ),
+ "R0123": (
+ "Comparison to literal",
+ "literal-comparison",
+ "Used when comparing an object to a literal, which is usually "
+ "what you do not want to do, since you can compare to a different "
+ "literal than what was expected altogether.",
+ ),
+ "R0124": (
+ "Redundant comparison - %s",
+ "comparison-with-itself",
+ "Used when something is compared against itself.",
+ ),
+ "W0143": (
+ "Comparing against a callable, did you omit the parenthesis?",
+ "comparison-with-callable",
+ "This message is emitted when pylint detects that a comparison with a "
+ "callable was made, which might suggest that some parenthesis were omitted, "
+ "resulting in potential unwanted behaviour.",
+ ),
+ }
+
+ def _check_singleton_comparison(self, singleton, root_node, negative_check=False):
+ if singleton.value is True:
+ if not negative_check:
+ suggestion = "just 'expr'"
+ else:
+ suggestion = "just 'not expr'"
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(True, suggestion)
+ )
+ elif singleton.value is False:
+ if not negative_check:
+ suggestion = "'not expr'"
+ else:
+ suggestion = "'expr'"
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(False, suggestion)
+ )
+ elif singleton.value is None:
+ if not negative_check:
+ suggestion = "'expr is None'"
+ else:
+ suggestion = "'expr is not None'"
+ self.add_message(
+ "singleton-comparison", node=root_node, args=(None, suggestion)
+ )
+
+ def _check_literal_comparison(self, literal, node):
+ """Check if we compare to a literal, which is usually what we do not want to do."""
+ nodes = (astroid.List, astroid.Tuple, astroid.Dict, astroid.Set)
+ is_other_literal = isinstance(literal, nodes)
+ is_const = False
+ if isinstance(literal, astroid.Const):
+ if isinstance(literal.value, bool) or literal.value is None:
+ # Not interested in this values.
+ return
+ is_const = isinstance(literal.value, (bytes, str, int, float))
+
+ if is_const or is_other_literal:
+ self.add_message("literal-comparison", node=node)
+
+ def _check_misplaced_constant(self, node, left, right, operator):
+ if isinstance(right, astroid.Const):
+ return
+ operator = REVERSED_COMPS.get(operator, operator)
+ suggestion = "%s %s %r" % (right.as_string(), operator, left.value)
+ self.add_message("misplaced-comparison-constant", node=node, args=(suggestion,))
+
+ def _check_logical_tautology(self, node):
+ """Check if identifier is compared against itself.
+ :param node: Compare node
+ :type node: astroid.node_classes.Compare
+ :Example:
+ val = 786
+ if val == val: # [comparison-with-itself]
+ pass
+ """
+ left_operand = node.left
+ right_operand = node.ops[0][1]
+ operator = node.ops[0][0]
+ if isinstance(left_operand, astroid.Const) and isinstance(
+ right_operand, astroid.Const
+ ):
+ left_operand = left_operand.value
+ right_operand = right_operand.value
+ elif isinstance(left_operand, astroid.Name) and isinstance(
+ right_operand, astroid.Name
+ ):
+ left_operand = left_operand.name
+ right_operand = right_operand.name
+
+ if left_operand == right_operand:
+ suggestion = "%s %s %s" % (left_operand, operator, right_operand)
+ self.add_message("comparison-with-itself", node=node, args=(suggestion,))
+
+ def _check_callable_comparison(self, node):
+ operator = node.ops[0][0]
+ if operator not in COMPARISON_OPERATORS:
+ return
+
+ bare_callables = (astroid.FunctionDef, astroid.BoundMethod)
+ left_operand, right_operand = node.left, node.ops[0][1]
+ # this message should be emitted only when there is comparison of bare callable
+ # with non bare callable.
+ if (
+ sum(
+ 1
+ for operand in (left_operand, right_operand)
+ if isinstance(utils.safe_infer(operand), bare_callables)
+ )
+ == 1
+ ):
+ self.add_message("comparison-with-callable", node=node)
+
+ @utils.check_messages(
+ "singleton-comparison",
+ "misplaced-comparison-constant",
+ "unidiomatic-typecheck",
+ "literal-comparison",
+ "comparison-with-itself",
+ "comparison-with-callable",
+ )
+ def visit_compare(self, node):
+ self._check_callable_comparison(node)
+ self._check_logical_tautology(node)
+ self._check_unidiomatic_typecheck(node)
+ # NOTE: this checker only works with binary comparisons like 'x == 42'
+ # but not 'x == y == 42'
+ if len(node.ops) != 1:
+ return
+
+ left = node.left
+ operator, right = node.ops[0]
+ if operator in COMPARISON_OPERATORS and isinstance(left, astroid.Const):
+ self._check_misplaced_constant(node, left, right, operator)
+
+ if operator == "==":
+ if isinstance(left, astroid.Const):
+ self._check_singleton_comparison(left, node)
+ elif isinstance(right, astroid.Const):
+ self._check_singleton_comparison(right, node)
+ if operator == "!=":
+ if isinstance(right, astroid.Const):
+ self._check_singleton_comparison(right, node, negative_check=True)
+ if operator in ("is", "is not"):
+ self._check_literal_comparison(right, node)
+
+ def _check_unidiomatic_typecheck(self, node):
+ operator, right = node.ops[0]
+ if operator in TYPECHECK_COMPARISON_OPERATORS:
+ left = node.left
+ if _is_one_arg_pos_call(left):
+ self._check_type_x_is_y(node, left, operator, right)
+
+ def _check_type_x_is_y(self, node, left, operator, right):
+ """Check for expressions like type(x) == Y."""
+ left_func = utils.safe_infer(left.func)
+ if not (
+ isinstance(left_func, astroid.ClassDef) and left_func.qname() == TYPE_QNAME
+ ):
+ return
+
+ if operator in ("is", "is not") and _is_one_arg_pos_call(right):
+ right_func = utils.safe_infer(right.func)
+ if (
+ isinstance(right_func, astroid.ClassDef)
+ and right_func.qname() == TYPE_QNAME
+ ):
+ # type(x) == type(a)
+ right_arg = utils.safe_infer(right.args[0])
+ if not isinstance(right_arg, LITERAL_NODE_TYPES):
+ # not e.g. type(x) == type([])
+ return
+ self.add_message("unidiomatic-typecheck", node=node)
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(BasicErrorChecker(linter))
+ linter.register_checker(BasicChecker(linter))
+ linter.register_checker(NameChecker(linter))
+ linter.register_checker(DocStringChecker(linter))
+ linter.register_checker(PassChecker(linter))
+ linter.register_checker(ComparisonChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/base_checker.py b/venv/Lib/site-packages/pylint/checkers/base_checker.py
new file mode 100644
index 0000000..f2ae4e5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/base_checker.py
@@ -0,0 +1,187 @@
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
+# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+from inspect import cleandoc
+from typing import Any
+
+from pylint.config import OptionsProviderMixIn
+from pylint.constants import _MSG_ORDER, WarningScope
+from pylint.exceptions import InvalidMessageError
+from pylint.interfaces import UNDEFINED, IRawChecker, ITokenChecker, implements
+from pylint.message.message_definition import MessageDefinition
+from pylint.utils import get_rst_section, get_rst_title
+
+
+class BaseChecker(OptionsProviderMixIn):
+
+ # checker name (you may reuse an existing one)
+ name = None # type: str
+ # options level (0 will be displaying in --help, 1 in --long-help)
+ level = 1
+ # ordered list of options to control the checker behaviour
+ options = () # type: Any
+ # messages issued by this checker
+ msgs = {} # type: Any
+ # reports issued by this checker
+ reports = () # type: Any
+ # mark this checker as enabled or not.
+ enabled = True
+
+ def __init__(self, linter=None):
+ """checker instances should have the linter as argument
+
+ :param ILinter linter: is an object implementing ILinter."""
+ if self.name is not None:
+ self.name = self.name.lower()
+ OptionsProviderMixIn.__init__(self)
+ self.linter = linter
+
+ def __gt__(self, other):
+ """Permit to sort a list of Checker by name."""
+ return "{}{}".format(self.name, self.msgs).__gt__(
+ "{}{}".format(other.name, other.msgs)
+ )
+
+ def __repr__(self):
+ status = "Checker" if self.enabled else "Disabled checker"
+ return "{} '{}' (responsible for '{}')".format(
+ status, self.name, "', '".join(self.msgs.keys())
+ )
+
+ def __str__(self):
+ """This might be incomplete because multiple class inheriting BaseChecker
+ can have the same name. Cf MessageHandlerMixIn.get_full_documentation()"""
+ return self.get_full_documentation(
+ msgs=self.msgs, options=self.options_and_values(), reports=self.reports
+ )
+
+ def get_full_documentation(self, msgs, options, reports, doc=None, module=None):
+ result = ""
+ checker_title = "%s checker" % (self.name.replace("_", " ").title())
+ if module:
+ # Provide anchor to link against
+ result += ".. _%s:\n\n" % module
+ result += "%s\n" % get_rst_title(checker_title, "~")
+ if module:
+ result += "This checker is provided by ``%s``.\n" % module
+ result += "Verbatim name of the checker is ``%s``.\n\n" % self.name
+ if doc:
+ # Provide anchor to link against
+ result += get_rst_title("{} Documentation".format(checker_title), "^")
+ result += "%s\n\n" % cleandoc(doc)
+ # options might be an empty generator and not be False when casted to boolean
+ options = list(options)
+ if options:
+ result += get_rst_title("{} Options".format(checker_title), "^")
+ result += "%s\n" % get_rst_section(None, options)
+ if msgs:
+ result += get_rst_title("{} Messages".format(checker_title), "^")
+ for msgid, msg in sorted(
+ msgs.items(), key=lambda kv: (_MSG_ORDER.index(kv[0][0]), kv[1])
+ ):
+ msg = self.create_message_definition_from_tuple(msgid, msg)
+ result += "%s\n" % msg.format_help(checkerref=False)
+ result += "\n"
+ if reports:
+ result += get_rst_title("{} Reports".format(checker_title), "^")
+ for report in reports:
+ result += ":%s: %s\n" % report[:2]
+ result += "\n"
+ result += "\n"
+ return result
+
+ def add_message(
+ self, msgid, line=None, node=None, args=None, confidence=None, col_offset=None
+ ):
+ if not confidence:
+ confidence = UNDEFINED
+ self.linter.add_message(msgid, line, node, args, confidence, col_offset)
+
+ def check_consistency(self):
+ """Check the consistency of msgid.
+
+ msg ids for a checker should be a string of len 4, where the two first
+ characters are the checker id and the two last the msg id in this
+ checker.
+
+ :raises InvalidMessageError: If the checker id in the messages are not
+ always the same. """
+ checker_id = None
+ existing_ids = []
+ for message in self.messages:
+ if checker_id is not None and checker_id != message.msgid[1:3]:
+ error_msg = "Inconsistent checker part in message id "
+ error_msg += "'{}' (expected 'x{checker_id}xx' ".format(
+ message.msgid, checker_id=checker_id
+ )
+ error_msg += "because we already had {existing_ids}).".format(
+ existing_ids=existing_ids
+ )
+ raise InvalidMessageError(error_msg)
+ checker_id = message.msgid[1:3]
+ existing_ids.append(message.msgid)
+
+ def create_message_definition_from_tuple(self, msgid, msg_tuple):
+ if implements(self, (IRawChecker, ITokenChecker)):
+ default_scope = WarningScope.LINE
+ else:
+ default_scope = WarningScope.NODE
+ options = {}
+ if len(msg_tuple) > 3:
+ (msg, symbol, descr, options) = msg_tuple
+ elif len(msg_tuple) > 2:
+ (msg, symbol, descr) = msg_tuple
+ else:
+ error_msg = """Messages should have a msgid and a symbol. Something like this :
+
+"W1234": (
+ "message",
+ "message-symbol",
+ "Message description with detail.",
+ ...
+),
+"""
+ raise InvalidMessageError(error_msg)
+ options.setdefault("scope", default_scope)
+ return MessageDefinition(self, msgid, msg, descr, symbol, **options)
+
+ @property
+ def messages(self) -> list:
+ return [
+ self.create_message_definition_from_tuple(msgid, msg_tuple)
+ for msgid, msg_tuple in sorted(self.msgs.items())
+ ]
+
+ # dummy methods implementing the IChecker interface
+
+ def get_message_definition(self, msgid):
+ for message_definition in self.messages:
+ if message_definition.msgid == msgid:
+ return message_definition
+ error_msg = "MessageDefinition for '{}' does not exists. ".format(msgid)
+ error_msg += "Choose from {}.".format([m.msgid for m in self.messages])
+ raise InvalidMessageError(error_msg)
+
+ def open(self):
+ """called before visiting project (i.e set of modules)"""
+
+ def close(self):
+ """called after visiting project (i.e set of modules)"""
+
+
+class BaseTokenChecker(BaseChecker):
+ """Base class for checkers that want to have access to the token stream."""
+
+ def process_tokens(self, tokens):
+ """Should be overridden by subclasses."""
+ raise NotImplementedError()
diff --git a/venv/Lib/site-packages/pylint/checkers/classes.py b/venv/Lib/site-packages/pylint/checkers/classes.py
new file mode 100644
index 0000000..9f5d099
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/classes.py
@@ -0,0 +1,1844 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2016 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2010 Maarten ter Huurne <maarten@treewalker.org>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2014 David Pursehouse <david.pursehouse@gmail.com>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2016 Anthony Foglia <afoglia@users.noreply.github.com>
+# Copyright (c) 2016 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Ben Green <benhgreen@icloud.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""classes checker for Python code
+"""
+import collections
+from itertools import chain, zip_longest
+
+import astroid
+from astroid import decorators, objects
+from astroid.bases import BUILTINS, Generator
+from astroid.exceptions import DuplicateBasesError, InconsistentMroError
+from astroid.scoped_nodes import function_to_method
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import (
+ PYMETHODS,
+ SPECIAL_METHODS_PARAMS,
+ check_messages,
+ class_is_abstract,
+ decorated_with,
+ decorated_with_property,
+ has_known_bases,
+ is_attr_private,
+ is_attr_protected,
+ is_builtin_object,
+ is_comprehension,
+ is_iterable,
+ is_property_setter,
+ is_property_setter_or_deleter,
+ is_protocol_class,
+ node_frame_class,
+ overrides_a_method,
+ safe_infer,
+ unimplemented_abstract_methods,
+)
+from pylint.interfaces import IAstroidChecker
+from pylint.utils import get_global_option
+
+NEXT_METHOD = "__next__"
+INVALID_BASE_CLASSES = {"bool", "range", "slice", "memoryview"}
+BUILTIN_DECORATORS = {"builtins.property", "builtins.classmethod"}
+
+# Dealing with useless override detection, with regard
+# to parameters vs arguments
+
+_CallSignature = collections.namedtuple(
+ "_CallSignature", "args kws starred_args starred_kws"
+)
+_ParameterSignature = collections.namedtuple(
+ "_ParameterSignature", "args kwonlyargs varargs kwargs"
+)
+
+
+def _signature_from_call(call):
+ kws = {}
+ args = []
+ starred_kws = []
+ starred_args = []
+ for keyword in call.keywords or []:
+ arg, value = keyword.arg, keyword.value
+ if arg is None and isinstance(value, astroid.Name):
+ # Starred node and we are interested only in names,
+ # otherwise some transformation might occur for the parameter.
+ starred_kws.append(value.name)
+ elif isinstance(value, astroid.Name):
+ kws[arg] = value.name
+ else:
+ kws[arg] = None
+
+ for arg in call.args:
+ if isinstance(arg, astroid.Starred) and isinstance(arg.value, astroid.Name):
+ # Positional variadic and a name, otherwise some transformation
+ # might have occurred.
+ starred_args.append(arg.value.name)
+ elif isinstance(arg, astroid.Name):
+ args.append(arg.name)
+ else:
+ args.append(None)
+
+ return _CallSignature(args, kws, starred_args, starred_kws)
+
+
+def _signature_from_arguments(arguments):
+ kwarg = arguments.kwarg
+ vararg = arguments.vararg
+ args = [arg.name for arg in arguments.args if arg.name != "self"]
+ kwonlyargs = [arg.name for arg in arguments.kwonlyargs]
+ return _ParameterSignature(args, kwonlyargs, vararg, kwarg)
+
+
+def _definition_equivalent_to_call(definition, call):
+ """Check if a definition signature is equivalent to a call."""
+ if definition.kwargs:
+ same_kw_variadics = definition.kwargs in call.starred_kws
+ else:
+ same_kw_variadics = not call.starred_kws
+ if definition.varargs:
+ same_args_variadics = definition.varargs in call.starred_args
+ else:
+ same_args_variadics = not call.starred_args
+ same_kwonlyargs = all(kw in call.kws for kw in definition.kwonlyargs)
+ same_args = definition.args == call.args
+
+ no_additional_kwarg_arguments = True
+ if call.kws:
+ for keyword in call.kws:
+ is_arg = keyword in call.args
+ is_kwonly = keyword in definition.kwonlyargs
+ if not is_arg and not is_kwonly:
+ # Maybe this argument goes into **kwargs,
+ # or it is an extraneous argument.
+ # In any case, the signature is different than
+ # the call site, which stops our search.
+ no_additional_kwarg_arguments = False
+ break
+
+ return all(
+ (
+ same_args,
+ same_kwonlyargs,
+ same_args_variadics,
+ same_kw_variadics,
+ no_additional_kwarg_arguments,
+ )
+ )
+
+
+# Deal with parameters overridding in two methods.
+
+
+def _positional_parameters(method):
+ positional = method.args.args
+ if method.type in ("classmethod", "method"):
+ positional = positional[1:]
+ return positional
+
+
+def _get_node_type(node, potential_types):
+ """
+ Return the type of the node if it exists in potential_types.
+
+ Args:
+ node (astroid.node): node to get the type of.
+ potential_types (tuple): potential types of the node.
+
+ Returns:
+ type: type of the node or None.
+ """
+ for potential_type in potential_types:
+ if isinstance(node, potential_type):
+ return potential_type
+ return None
+
+
+def _check_arg_equality(node_a, node_b, attr_name):
+ """
+ Check equality of nodes based on the comparison of their attributes named attr_name.
+
+ Args:
+ node_a (astroid.node): first node to compare.
+ node_b (astroid.node): second node to compare.
+ attr_name (str): name of the nodes attribute to use for comparison.
+
+ Returns:
+ bool: True if node_a.attr_name == node_b.attr_name, False otherwise.
+ """
+ return getattr(node_a, attr_name) == getattr(node_b, attr_name)
+
+
+def _has_different_parameters_default_value(original, overridden):
+ """
+ Check if original and overridden methods arguments have different default values
+
+ Return True if one of the overridden arguments has a default
+ value different from the default value of the original argument
+ If one of the method doesn't have argument (.args is None)
+ return False
+ """
+ if original.args is None or overridden.args is None:
+ return False
+
+ all_args = chain(original.args, original.kwonlyargs)
+ original_param_names = [param.name for param in all_args]
+ default_missing = object()
+ for param_name in original_param_names:
+ try:
+ original_default = original.default_value(param_name)
+ except astroid.exceptions.NoDefault:
+ original_default = default_missing
+ try:
+ overridden_default = overridden.default_value(param_name)
+ except astroid.exceptions.NoDefault:
+ overridden_default = default_missing
+
+ default_list = [
+ arg == default_missing for arg in (original_default, overridden_default)
+ ]
+ if any(default_list) and not all(default_list):
+ # Only one arg has no default value
+ return True
+
+ astroid_type_compared_attr = {
+ astroid.Const: "value",
+ astroid.ClassDef: "name",
+ astroid.Tuple: "elts",
+ astroid.List: "elts",
+ }
+ handled_types = tuple(
+ astroid_type for astroid_type in astroid_type_compared_attr
+ )
+ original_type = _get_node_type(original_default, handled_types)
+ if original_type:
+ #  We handle only astroid types that are inside the dict astroid_type_compared_attr
+ if not isinstance(overridden_default, original_type):
+ #  Two args with same name but different types
+ return True
+ if not _check_arg_equality(
+ original_default,
+ overridden_default,
+ astroid_type_compared_attr[original_type],
+ ):
+ # Two args with same type but different values
+ return True
+ return False
+
+
+def _has_different_parameters(original, overridden, dummy_parameter_regex):
+ zipped = zip_longest(original, overridden)
+ for original_param, overridden_param in zipped:
+ params = (original_param, overridden_param)
+ if not all(params):
+ return True
+
+ names = [param.name for param in params]
+ if any(map(dummy_parameter_regex.match, names)):
+ continue
+ if original_param.name != overridden_param.name:
+ return True
+ return False
+
+
+def _different_parameters(original, overridden, dummy_parameter_regex):
+ """Determine if the two methods have different parameters
+
+ They are considered to have different parameters if:
+
+ * they have different positional parameters, including different names
+
+ * one of the methods is having variadics, while the other is not
+
+ * they have different keyword only parameters.
+
+ """
+ original_parameters = _positional_parameters(original)
+ overridden_parameters = _positional_parameters(overridden)
+
+ different_positional = _has_different_parameters(
+ original_parameters, overridden_parameters, dummy_parameter_regex
+ )
+ different_kwonly = _has_different_parameters(
+ original.args.kwonlyargs, overridden.args.kwonlyargs, dummy_parameter_regex
+ )
+ if original.name in PYMETHODS:
+ # Ignore the difference for special methods. If the parameter
+ # numbers are different, then that is going to be caught by
+ # unexpected-special-method-signature.
+ # If the names are different, it doesn't matter, since they can't
+ # be used as keyword arguments anyway.
+ different_positional = different_kwonly = False
+
+ # Both or none should have extra variadics, otherwise the method
+ # loses or gains capabilities that are not reflected into the parent method,
+ # leading to potential inconsistencies in the code.
+ different_kwarg = (
+ sum(1 for param in (original.args.kwarg, overridden.args.kwarg) if not param)
+ == 1
+ )
+ different_vararg = (
+ sum(1 for param in (original.args.vararg, overridden.args.vararg) if not param)
+ == 1
+ )
+
+ return any(
+ (different_positional, different_kwarg, different_vararg, different_kwonly)
+ )
+
+
+def _is_invalid_base_class(cls):
+ return cls.name in INVALID_BASE_CLASSES and is_builtin_object(cls)
+
+
+def _has_data_descriptor(cls, attr):
+ attributes = cls.getattr(attr)
+ for attribute in attributes:
+ try:
+ for inferred in attribute.infer():
+ if isinstance(inferred, astroid.Instance):
+ try:
+ inferred.getattr("__get__")
+ inferred.getattr("__set__")
+ except astroid.NotFoundError:
+ continue
+ else:
+ return True
+ except astroid.InferenceError:
+ # Can't infer, avoid emitting a false positive in this case.
+ return True
+ return False
+
+
+def _called_in_methods(func, klass, methods):
+ """ Check if the func was called in any of the given methods,
+ belonging to the *klass*. Returns True if so, False otherwise.
+ """
+ if not isinstance(func, astroid.FunctionDef):
+ return False
+ for method in methods:
+ try:
+ inferred = klass.getattr(method)
+ except astroid.NotFoundError:
+ continue
+ for infer_method in inferred:
+ for call in infer_method.nodes_of_class(astroid.Call):
+ try:
+ bound = next(call.func.infer())
+ except (astroid.InferenceError, StopIteration):
+ continue
+ if not isinstance(bound, astroid.BoundMethod):
+ continue
+ func_obj = bound._proxied
+ if isinstance(func_obj, astroid.UnboundMethod):
+ func_obj = func_obj._proxied
+ if func_obj.name == func.name:
+ return True
+ return False
+
+
+def _is_attribute_property(name, klass):
+ """ Check if the given attribute *name* is a property
+ in the given *klass*.
+
+ It will look for `property` calls or for functions
+ with the given name, decorated by `property` or `property`
+ subclasses.
+ Returns ``True`` if the name is a property in the given klass,
+ ``False`` otherwise.
+ """
+
+ try:
+ attributes = klass.getattr(name)
+ except astroid.NotFoundError:
+ return False
+ property_name = "{}.property".format(BUILTINS)
+ for attr in attributes:
+ if attr is astroid.Uninferable:
+ continue
+ try:
+ inferred = next(attr.infer())
+ except astroid.InferenceError:
+ continue
+ if isinstance(inferred, astroid.FunctionDef) and decorated_with_property(
+ inferred
+ ):
+ return True
+ if inferred.pytype() == property_name:
+ return True
+ return False
+
+
+def _has_bare_super_call(fundef_node):
+ for call in fundef_node.nodes_of_class(astroid.Call):
+ func = call.func
+ if isinstance(func, astroid.Name) and func.name == "super" and not call.args:
+ return True
+ return False
+
+
+def _safe_infer_call_result(node, caller, context=None):
+ """
+ Safely infer the return value of a function.
+
+ Returns None if inference failed or if there is some ambiguity (more than
+ one node has been inferred). Otherwise returns inferred value.
+ """
+ try:
+ inferit = node.infer_call_result(caller, context=context)
+ value = next(inferit)
+ except astroid.InferenceError:
+ return None # inference failed
+ except StopIteration:
+ return None # no values inferred
+ try:
+ next(inferit)
+ return None # there is ambiguity on the inferred node
+ except astroid.InferenceError:
+ return None # there is some kind of ambiguity
+ except StopIteration:
+ return value
+
+
+def _has_same_layout_slots(slots, assigned_value):
+ inferred = next(assigned_value.infer())
+ if isinstance(inferred, astroid.ClassDef):
+ other_slots = inferred.slots()
+ if all(
+ first_slot and second_slot and first_slot.value == second_slot.value
+ for (first_slot, second_slot) in zip_longest(slots, other_slots)
+ ):
+ return True
+ return False
+
+
+MSGS = {
+ "F0202": (
+ "Unable to check methods signature (%s / %s)",
+ "method-check-failed",
+ "Used when Pylint has been unable to check methods signature "
+ "compatibility for an unexpected reason. Please report this kind "
+ "if you don't make sense of it.",
+ ),
+ "E0202": (
+ "An attribute defined in %s line %s hides this method",
+ "method-hidden",
+ "Used when a class defines a method which is hidden by an "
+ "instance attribute from an ancestor class or set by some "
+ "client code.",
+ ),
+ "E0203": (
+ "Access to member %r before its definition line %s",
+ "access-member-before-definition",
+ "Used when an instance member is accessed before it's actually assigned.",
+ ),
+ "W0201": (
+ "Attribute %r defined outside __init__",
+ "attribute-defined-outside-init",
+ "Used when an instance attribute is defined outside the __init__ method.",
+ ),
+ "W0212": (
+ "Access to a protected member %s of a client class", # E0214
+ "protected-access",
+ "Used when a protected member (i.e. class member with a name "
+ "beginning with an underscore) is access outside the class or a "
+ "descendant of the class where it's defined.",
+ ),
+ "E0211": (
+ "Method has no argument",
+ "no-method-argument",
+ "Used when a method which should have the bound instance as "
+ "first argument has no argument defined.",
+ ),
+ "E0213": (
+ 'Method should have "self" as first argument',
+ "no-self-argument",
+ 'Used when a method has an attribute different the "self" as '
+ "first argument. This is considered as an error since this is "
+ "a so common convention that you shouldn't break it!",
+ ),
+ "C0202": (
+ "Class method %s should have %s as first argument",
+ "bad-classmethod-argument",
+ "Used when a class method has a first argument named differently "
+ "than the value specified in valid-classmethod-first-arg option "
+ '(default to "cls"), recommended to easily differentiate them '
+ "from regular instance methods.",
+ ),
+ "C0203": (
+ "Metaclass method %s should have %s as first argument",
+ "bad-mcs-method-argument",
+ "Used when a metaclass method has a first argument named "
+ "differently than the value specified in valid-classmethod-first"
+ '-arg option (default to "cls"), recommended to easily '
+ "differentiate them from regular instance methods.",
+ ),
+ "C0204": (
+ "Metaclass class method %s should have %s as first argument",
+ "bad-mcs-classmethod-argument",
+ "Used when a metaclass class method has a first argument named "
+ "differently than the value specified in valid-metaclass-"
+ 'classmethod-first-arg option (default to "mcs"), recommended to '
+ "easily differentiate them from regular instance methods.",
+ ),
+ "W0211": (
+ "Static method with %r as first argument",
+ "bad-staticmethod-argument",
+ 'Used when a static method has "self" or a value specified in '
+ "valid-classmethod-first-arg option or "
+ "valid-metaclass-classmethod-first-arg option as first argument.",
+ ),
+ "R0201": (
+ "Method could be a function",
+ "no-self-use",
+ "Used when a method doesn't use its bound instance, and so could "
+ "be written as a function.",
+ ),
+ "W0221": (
+ "Parameters differ from %s %r method",
+ "arguments-differ",
+ "Used when a method has a different number of arguments than in "
+ "the implemented interface or in an overridden method.",
+ ),
+ "W0222": (
+ "Signature differs from %s %r method",
+ "signature-differs",
+ "Used when a method signature is different than in the "
+ "implemented interface or in an overridden method.",
+ ),
+ "W0223": (
+ "Method %r is abstract in class %r but is not overridden",
+ "abstract-method",
+ "Used when an abstract method (i.e. raise NotImplementedError) is "
+ "not overridden in concrete class.",
+ ),
+ "W0231": (
+ "__init__ method from base class %r is not called",
+ "super-init-not-called",
+ "Used when an ancestor class method has an __init__ method "
+ "which is not called by a derived class.",
+ ),
+ "W0232": (
+ "Class has no __init__ method",
+ "no-init",
+ "Used when a class has no __init__ method, neither its parent classes.",
+ ),
+ "W0233": (
+ "__init__ method from a non direct base class %r is called",
+ "non-parent-init-called",
+ "Used when an __init__ method is called on a class which is not "
+ "in the direct ancestors for the analysed class.",
+ ),
+ "W0235": (
+ "Useless super delegation in method %r",
+ "useless-super-delegation",
+ "Used whenever we can detect that an overridden method is useless, "
+ "relying on super() delegation to do the same thing as another method "
+ "from the MRO.",
+ ),
+ "W0236": (
+ "Method %r was expected to be %r, found it instead as %r",
+ "invalid-overridden-method",
+ "Used when we detect that a method was overridden as a property "
+ "or the other way around, which could result in potential bugs at "
+ "runtime.",
+ ),
+ "E0236": (
+ "Invalid object %r in __slots__, must contain only non empty strings",
+ "invalid-slots-object",
+ "Used when an invalid (non-string) object occurs in __slots__.",
+ ),
+ "E0237": (
+ "Assigning to attribute %r not defined in class slots",
+ "assigning-non-slot",
+ "Used when assigning to an attribute not defined in the class slots.",
+ ),
+ "E0238": (
+ "Invalid __slots__ object",
+ "invalid-slots",
+ "Used when an invalid __slots__ is found in class. "
+ "Only a string, an iterable or a sequence is permitted.",
+ ),
+ "E0239": (
+ "Inheriting %r, which is not a class.",
+ "inherit-non-class",
+ "Used when a class inherits from something which is not a class.",
+ ),
+ "E0240": (
+ "Inconsistent method resolution order for class %r",
+ "inconsistent-mro",
+ "Used when a class has an inconsistent method resolution order.",
+ ),
+ "E0241": (
+ "Duplicate bases for class %r",
+ "duplicate-bases",
+ "Used when a class has duplicate bases.",
+ ),
+ "E0242": (
+ "Value %r in slots conflicts with class variable",
+ "class-variable-slots-conflict",
+ "Used when a value in __slots__ conflicts with a class variable, property or method.",
+ ),
+ "R0202": (
+ "Consider using a decorator instead of calling classmethod",
+ "no-classmethod-decorator",
+ "Used when a class method is defined without using the decorator syntax.",
+ ),
+ "R0203": (
+ "Consider using a decorator instead of calling staticmethod",
+ "no-staticmethod-decorator",
+ "Used when a static method is defined without using the decorator syntax.",
+ ),
+ "C0205": (
+ "Class __slots__ should be a non-string iterable",
+ "single-string-used-for-slots",
+ "Used when a class __slots__ is a simple string, rather than an iterable.",
+ ),
+ "R0205": (
+ "Class %r inherits from object, can be safely removed from bases in python3",
+ "useless-object-inheritance",
+ "Used when a class inherit from object, which under python3 is implicit, "
+ "hence can be safely removed from bases.",
+ ),
+ "R0206": (
+ "Cannot have defined parameters for properties",
+ "property-with-parameters",
+ "Used when we detect that a property also has parameters, which are useless, "
+ "given that properties cannot be called with additional arguments.",
+ ),
+}
+
+
+class ScopeAccessMap:
+ """Store the accessed variables per scope."""
+
+ def __init__(self):
+ self._scopes = collections.defaultdict(lambda: collections.defaultdict(list))
+
+ def set_accessed(self, node):
+ """Set the given node as accessed."""
+
+ frame = node_frame_class(node)
+ if frame is None:
+ # The node does not live in a class.
+ return
+ self._scopes[frame][node.attrname].append(node)
+
+ def accessed(self, scope):
+ """Get the accessed variables for the given scope."""
+ return self._scopes.get(scope, {})
+
+
+class ClassChecker(BaseChecker):
+ """checks for :
+ * methods without self as first argument
+ * overridden methods signature
+ * access only to existent members via self
+ * attributes not defined in the __init__ method
+ * unreachable code
+ """
+
+ __implements__ = (IAstroidChecker,)
+
+ # configuration section name
+ name = "classes"
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = (
+ (
+ "defining-attr-methods",
+ {
+ "default": ("__init__", "__new__", "setUp", "__post_init__"),
+ "type": "csv",
+ "metavar": "<method names>",
+ "help": "List of method names used to declare (i.e. assign) \
+instance attributes.",
+ },
+ ),
+ (
+ "valid-classmethod-first-arg",
+ {
+ "default": ("cls",),
+ "type": "csv",
+ "metavar": "<argument names>",
+ "help": "List of valid names for the first argument in \
+a class method.",
+ },
+ ),
+ (
+ "valid-metaclass-classmethod-first-arg",
+ {
+ "default": ("cls",),
+ "type": "csv",
+ "metavar": "<argument names>",
+ "help": "List of valid names for the first argument in \
+a metaclass class method.",
+ },
+ ),
+ (
+ "exclude-protected",
+ {
+ "default": (
+ # namedtuple public API.
+ "_asdict",
+ "_fields",
+ "_replace",
+ "_source",
+ "_make",
+ ),
+ "type": "csv",
+ "metavar": "<protected access exclusions>",
+ "help": (
+ "List of member names, which should be excluded "
+ "from the protected access warning."
+ ),
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self._accessed = ScopeAccessMap()
+ self._first_attrs = []
+ self._meth_could_be_func = None
+
+ @decorators.cachedproperty
+ def _dummy_rgx(self):
+ return get_global_option(self, "dummy-variables-rgx", default=None)
+
+ @decorators.cachedproperty
+ def _ignore_mixin(self):
+ return get_global_option(self, "ignore-mixin-members", default=True)
+
+ @check_messages(
+ "abstract-method",
+ "no-init",
+ "invalid-slots",
+ "single-string-used-for-slots",
+ "invalid-slots-object",
+ "class-variable-slots-conflict",
+ "inherit-non-class",
+ "useless-object-inheritance",
+ "inconsistent-mro",
+ "duplicate-bases",
+ )
+ def visit_classdef(self, node):
+ """init visit variable _accessed
+ """
+ self._check_bases_classes(node)
+ # if not an exception or a metaclass
+ if node.type == "class" and has_known_bases(node):
+ try:
+ node.local_attr("__init__")
+ except astroid.NotFoundError:
+ self.add_message("no-init", args=node, node=node)
+ self._check_slots(node)
+ self._check_proper_bases(node)
+ self._check_consistent_mro(node)
+
+ def _check_consistent_mro(self, node):
+ """Detect that a class has a consistent mro or duplicate bases."""
+ try:
+ node.mro()
+ except InconsistentMroError:
+ self.add_message("inconsistent-mro", args=node.name, node=node)
+ except DuplicateBasesError:
+ self.add_message("duplicate-bases", args=node.name, node=node)
+ except NotImplementedError:
+ # Old style class, there's no mro so don't do anything.
+ pass
+
+ def _check_proper_bases(self, node):
+ """
+ Detect that a class inherits something which is not
+ a class or a type.
+ """
+ for base in node.bases:
+ ancestor = safe_infer(base)
+ if ancestor in (astroid.Uninferable, None):
+ continue
+ if isinstance(ancestor, astroid.Instance) and ancestor.is_subtype_of(
+ "%s.type" % (BUILTINS,)
+ ):
+ continue
+
+ if not isinstance(ancestor, astroid.ClassDef) or _is_invalid_base_class(
+ ancestor
+ ):
+ self.add_message("inherit-non-class", args=base.as_string(), node=node)
+
+ if ancestor.name == object.__name__:
+ self.add_message(
+ "useless-object-inheritance", args=node.name, node=node
+ )
+
+ def leave_classdef(self, cnode):
+ """close a class node:
+ check that instance attributes are defined in __init__ and check
+ access to existent members
+ """
+ # check access to existent members on non metaclass classes
+ if self._ignore_mixin and cnode.name[-5:].lower() == "mixin":
+ # We are in a mixin class. No need to try to figure out if
+ # something is missing, since it is most likely that it will
+ # miss.
+ return
+
+ accessed = self._accessed.accessed(cnode)
+ if cnode.type != "metaclass":
+ self._check_accessed_members(cnode, accessed)
+ # checks attributes are defined in an allowed method such as __init__
+ if not self.linter.is_message_enabled("attribute-defined-outside-init"):
+ return
+ defining_methods = self.config.defining_attr_methods
+ current_module = cnode.root()
+ for attr, nodes in cnode.instance_attrs.items():
+ # Exclude `__dict__` as it is already defined.
+ if attr == "__dict__":
+ continue
+
+ # Skip nodes which are not in the current module and it may screw up
+ # the output, while it's not worth it
+ nodes = [
+ n
+ for n in nodes
+ if not isinstance(n.statement(), (astroid.Delete, astroid.AugAssign))
+ and n.root() is current_module
+ ]
+ if not nodes:
+ continue # error detected by typechecking
+
+ # Check if any method attr is defined in is a defining method
+ # or if we have the attribute defined in a setter.
+ frames = (node.frame() for node in nodes)
+ if any(
+ frame.name in defining_methods or is_property_setter(frame)
+ for frame in frames
+ ):
+ continue
+
+ # check attribute is defined in a parent's __init__
+ for parent in cnode.instance_attr_ancestors(attr):
+ attr_defined = False
+ # check if any parent method attr is defined in is a defining method
+ for node in parent.instance_attrs[attr]:
+ if node.frame().name in defining_methods:
+ attr_defined = True
+ if attr_defined:
+ # we're done :)
+ break
+ else:
+ # check attribute is defined as a class attribute
+ try:
+ cnode.local_attr(attr)
+ except astroid.NotFoundError:
+ for node in nodes:
+ if node.frame().name not in defining_methods:
+ # If the attribute was set by a call in any
+ # of the defining methods, then don't emit
+ # the warning.
+ if _called_in_methods(
+ node.frame(), cnode, defining_methods
+ ):
+ continue
+ self.add_message(
+ "attribute-defined-outside-init", args=attr, node=node
+ )
+
+ def visit_functiondef(self, node):
+ """check method arguments, overriding"""
+ # ignore actual functions
+ if not node.is_method():
+ return
+
+ self._check_useless_super_delegation(node)
+ self._check_property_with_parameters(node)
+
+ klass = node.parent.frame()
+ self._meth_could_be_func = True
+ # check first argument is self if this is actually a method
+ self._check_first_arg_for_type(node, klass.type == "metaclass")
+ if node.name == "__init__":
+ self._check_init(node)
+ return
+ # check signature if the method overloads inherited method
+ for overridden in klass.local_attr_ancestors(node.name):
+ # get astroid for the searched method
+ try:
+ parent_function = overridden[node.name]
+ except KeyError:
+ # we have found the method but it's not in the local
+ # dictionary.
+ # This may happen with astroid build from living objects
+ continue
+ if not isinstance(parent_function, astroid.FunctionDef):
+ continue
+ self._check_signature(node, parent_function, "overridden", klass)
+ self._check_invalid_overridden_method(node, parent_function)
+ break
+
+ if node.decorators:
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, astroid.Attribute) and decorator.attrname in (
+ "getter",
+ "setter",
+ "deleter",
+ ):
+ # attribute affectation will call this method, not hiding it
+ return
+ if isinstance(decorator, astroid.Name):
+ if decorator.name == "property":
+ # attribute affectation will either call a setter or raise
+ # an attribute error, anyway not hiding the function
+ return
+
+ # Infer the decorator and see if it returns something useful
+ inferred = safe_infer(decorator)
+ if not inferred:
+ return
+ if isinstance(inferred, astroid.FunctionDef):
+ # Okay, it's a decorator, let's see what it can infer.
+ try:
+ inferred = next(inferred.infer_call_result(inferred))
+ except astroid.InferenceError:
+ return
+ try:
+ if (
+ isinstance(inferred, (astroid.Instance, astroid.ClassDef))
+ and inferred.getattr("__get__")
+ and inferred.getattr("__set__")
+ ):
+ return
+ except astroid.AttributeInferenceError:
+ pass
+
+ # check if the method is hidden by an attribute
+ try:
+ overridden = klass.instance_attr(node.name)[0]
+ overridden_frame = overridden.frame()
+ if (
+ isinstance(overridden_frame, astroid.FunctionDef)
+ and overridden_frame.type == "method"
+ ):
+ overridden_frame = overridden_frame.parent.frame()
+ if isinstance(overridden_frame, astroid.ClassDef) and klass.is_subtype_of(
+ overridden_frame.qname()
+ ):
+ args = (overridden.root().name, overridden.fromlineno)
+ self.add_message("method-hidden", args=args, node=node)
+ except astroid.NotFoundError:
+ pass
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_useless_super_delegation(self, function):
+ """Check if the given function node is an useless method override
+
+ We consider it *useless* if it uses the super() builtin, but having
+ nothing additional whatsoever than not implementing the method at all.
+ If the method uses super() to delegate an operation to the rest of the MRO,
+ and if the method called is the same as the current one, the arguments
+ passed to super() are the same as the parameters that were passed to
+ this method, then the method could be removed altogether, by letting
+ other implementation to take precedence.
+ """
+
+ if (
+ not function.is_method()
+ # With decorators is a change of use
+ or function.decorators
+ ):
+ return
+
+ body = function.body
+ if len(body) != 1:
+ # Multiple statements, which means this overridden method
+ # could do multiple things we are not aware of.
+ return
+
+ statement = body[0]
+ if not isinstance(statement, (astroid.Expr, astroid.Return)):
+ # Doing something else than what we are interested into.
+ return
+
+ call = statement.value
+ if (
+ not isinstance(call, astroid.Call)
+ # Not a super() attribute access.
+ or not isinstance(call.func, astroid.Attribute)
+ ):
+ return
+
+ # Should be a super call.
+ try:
+ super_call = next(call.func.expr.infer())
+ except astroid.InferenceError:
+ return
+ else:
+ if not isinstance(super_call, objects.Super):
+ return
+
+ # The name should be the same.
+ if call.func.attrname != function.name:
+ return
+
+ # Should be a super call with the MRO pointer being the
+ # current class and the type being the current instance.
+ current_scope = function.parent.scope()
+ if (
+ super_call.mro_pointer != current_scope
+ or not isinstance(super_call.type, astroid.Instance)
+ or super_call.type.name != current_scope.name
+ ):
+ return
+
+ #  Check values of default args
+ klass = function.parent.frame()
+ meth_node = None
+ for overridden in klass.local_attr_ancestors(function.name):
+ # get astroid for the searched method
+ try:
+ meth_node = overridden[function.name]
+ except KeyError:
+ # we have found the method but it's not in the local
+ # dictionary.
+ # This may happen with astroid build from living objects
+ continue
+ if (
+ not isinstance(meth_node, astroid.FunctionDef)
+ # If the method have an ancestor which is not a
+ # function then it is legitimate to redefine it
+ or _has_different_parameters_default_value(
+ meth_node.args, function.args
+ )
+ ):
+ return
+ break
+
+ # Detect if the parameters are the same as the call's arguments.
+ params = _signature_from_arguments(function.args)
+ args = _signature_from_call(call)
+
+ if meth_node is not None:
+
+ def form_annotations(annotations):
+ return [
+ annotation.as_string() for annotation in filter(None, annotations)
+ ]
+
+ called_annotations = form_annotations(function.args.annotations)
+ overridden_annotations = form_annotations(meth_node.args.annotations)
+ if called_annotations and overridden_annotations:
+ if called_annotations != overridden_annotations:
+ return
+
+ if _definition_equivalent_to_call(params, args):
+ self.add_message(
+ "useless-super-delegation", node=function, args=(function.name,)
+ )
+
+ def _check_property_with_parameters(self, node):
+ if node.args.args and len(node.args.args) > 1 and decorated_with_property(node):
+ self.add_message("property-with-parameters", node=node)
+
+ def _check_invalid_overridden_method(self, function_node, parent_function_node):
+ parent_is_property = decorated_with_property(
+ parent_function_node
+ ) or is_property_setter_or_deleter(parent_function_node)
+ current_is_property = decorated_with_property(
+ function_node
+ ) or is_property_setter_or_deleter(function_node)
+ if parent_is_property and not current_is_property:
+ self.add_message(
+ "invalid-overridden-method",
+ args=(function_node.name, "property", function_node.type),
+ node=function_node,
+ )
+ elif not parent_is_property and current_is_property:
+ self.add_message(
+ "invalid-overridden-method",
+ args=(function_node.name, "method", "property"),
+ node=function_node,
+ )
+
+ def _check_slots(self, node):
+ if "__slots__" not in node.locals:
+ return
+ for slots in node.igetattr("__slots__"):
+ # check if __slots__ is a valid type
+ if slots is astroid.Uninferable:
+ continue
+ if not is_iterable(slots) and not is_comprehension(slots):
+ self.add_message("invalid-slots", node=node)
+ continue
+
+ if isinstance(slots, astroid.Const):
+ # a string, ignore the following checks
+ self.add_message("single-string-used-for-slots", node=node)
+ continue
+ if not hasattr(slots, "itered"):
+ # we can't obtain the values, maybe a .deque?
+ continue
+
+ if isinstance(slots, astroid.Dict):
+ values = [item[0] for item in slots.items]
+ else:
+ values = slots.itered()
+ if values is astroid.Uninferable:
+ return
+ for elt in values:
+ try:
+ self._check_slots_elt(elt, node)
+ except astroid.InferenceError:
+ continue
+
+ def _check_slots_elt(self, elt, node):
+ for inferred in elt.infer():
+ if inferred is astroid.Uninferable:
+ continue
+ if not isinstance(inferred, astroid.Const) or not isinstance(
+ inferred.value, str
+ ):
+ self.add_message(
+ "invalid-slots-object", args=inferred.as_string(), node=elt
+ )
+ continue
+ if not inferred.value:
+ self.add_message(
+ "invalid-slots-object", args=inferred.as_string(), node=elt
+ )
+
+ # Check if we have a conflict with a class variable.
+ class_variable = node.locals.get(inferred.value)
+ if class_variable:
+ # Skip annotated assignments which don't conflict at all with slots.
+ if len(class_variable) == 1:
+ parent = class_variable[0].parent
+ if isinstance(parent, astroid.AnnAssign) and parent.value is None:
+ return
+ self.add_message(
+ "class-variable-slots-conflict", args=(inferred.value,), node=elt
+ )
+
+ def leave_functiondef(self, node):
+ """on method node, check if this method couldn't be a function
+
+ ignore class, static and abstract methods, initializer,
+ methods overridden from a parent class.
+ """
+ if node.is_method():
+ if node.args.args is not None:
+ self._first_attrs.pop()
+ if not self.linter.is_message_enabled("no-self-use"):
+ return
+ class_node = node.parent.frame()
+ if (
+ self._meth_could_be_func
+ and node.type == "method"
+ and node.name not in PYMETHODS
+ and not (
+ node.is_abstract()
+ or overrides_a_method(class_node, node.name)
+ or decorated_with_property(node)
+ or _has_bare_super_call(node)
+ or is_protocol_class(class_node)
+ )
+ ):
+ self.add_message("no-self-use", node=node)
+
+ def visit_attribute(self, node):
+ """check if the getattr is an access to a class member
+ if so, register it. Also check for access to protected
+ class member from outside its class (but ignore __special__
+ methods)
+ """
+ # Check self
+ if self._uses_mandatory_method_param(node):
+ self._accessed.set_accessed(node)
+ return
+ if not self.linter.is_message_enabled("protected-access"):
+ return
+
+ self._check_protected_attribute_access(node)
+
+ def visit_assignattr(self, node):
+ if isinstance(
+ node.assign_type(), astroid.AugAssign
+ ) and self._uses_mandatory_method_param(node):
+ self._accessed.set_accessed(node)
+ self._check_in_slots(node)
+
+ def _check_in_slots(self, node):
+ """ Check that the given AssignAttr node
+ is defined in the class slots.
+ """
+ inferred = safe_infer(node.expr)
+ if not isinstance(inferred, astroid.Instance):
+ return
+
+ klass = inferred._proxied
+ if not has_known_bases(klass):
+ return
+ if "__slots__" not in klass.locals or not klass.newstyle:
+ return
+
+ slots = klass.slots()
+ if slots is None:
+ return
+ # If any ancestor doesn't use slots, the slots
+ # defined for this class are superfluous.
+ if any(
+ "__slots__" not in ancestor.locals and ancestor.name != "object"
+ for ancestor in klass.ancestors()
+ ):
+ return
+
+ if not any(slot.value == node.attrname for slot in slots):
+ # If we have a '__dict__' in slots, then
+ # assigning any name is valid.
+ if not any(slot.value == "__dict__" for slot in slots):
+ if _is_attribute_property(node.attrname, klass):
+ # Properties circumvent the slots mechanism,
+ # so we should not emit a warning for them.
+ return
+ if node.attrname in klass.locals and _has_data_descriptor(
+ klass, node.attrname
+ ):
+ # Descriptors circumvent the slots mechanism as well.
+ return
+ if node.attrname == "__class__" and _has_same_layout_slots(
+ slots, node.parent.value
+ ):
+ return
+ self.add_message("assigning-non-slot", args=(node.attrname,), node=node)
+
+ @check_messages(
+ "protected-access", "no-classmethod-decorator", "no-staticmethod-decorator"
+ )
+ def visit_assign(self, assign_node):
+ self._check_classmethod_declaration(assign_node)
+ node = assign_node.targets[0]
+ if not isinstance(node, astroid.AssignAttr):
+ return
+
+ if self._uses_mandatory_method_param(node):
+ return
+ self._check_protected_attribute_access(node)
+
+ def _check_classmethod_declaration(self, node):
+ """Checks for uses of classmethod() or staticmethod()
+
+ When a @classmethod or @staticmethod decorator should be used instead.
+ A message will be emitted only if the assignment is at a class scope
+ and only if the classmethod's argument belongs to the class where it
+ is defined.
+ `node` is an assign node.
+ """
+ if not isinstance(node.value, astroid.Call):
+ return
+
+ # check the function called is "classmethod" or "staticmethod"
+ func = node.value.func
+ if not isinstance(func, astroid.Name) or func.name not in (
+ "classmethod",
+ "staticmethod",
+ ):
+ return
+
+ msg = (
+ "no-classmethod-decorator"
+ if func.name == "classmethod"
+ else "no-staticmethod-decorator"
+ )
+ # assignment must be at a class scope
+ parent_class = node.scope()
+ if not isinstance(parent_class, astroid.ClassDef):
+ return
+
+ # Check if the arg passed to classmethod is a class member
+ classmeth_arg = node.value.args[0]
+ if not isinstance(classmeth_arg, astroid.Name):
+ return
+
+ method_name = classmeth_arg.name
+ if any(method_name == member.name for member in parent_class.mymethods()):
+ self.add_message(msg, node=node.targets[0])
+
+ def _check_protected_attribute_access(self, node):
+ """Given an attribute access node (set or get), check if attribute
+ access is legitimate. Call _check_first_attr with node before calling
+ this method. Valid cases are:
+ * self._attr in a method or cls._attr in a classmethod. Checked by
+ _check_first_attr.
+ * Klass._attr inside "Klass" class.
+ * Klass2._attr inside "Klass" class when Klass2 is a base class of
+ Klass.
+ """
+ attrname = node.attrname
+
+ if (
+ is_attr_protected(attrname)
+ and attrname not in self.config.exclude_protected
+ ):
+
+ klass = node_frame_class(node)
+
+ # In classes, check we are not getting a parent method
+ # through the class object or through super
+ callee = node.expr.as_string()
+
+ # We are not in a class, no remaining valid case
+ if klass is None:
+ self.add_message("protected-access", node=node, args=attrname)
+ return
+
+ # If the expression begins with a call to super, that's ok.
+ if (
+ isinstance(node.expr, astroid.Call)
+ and isinstance(node.expr.func, astroid.Name)
+ and node.expr.func.name == "super"
+ ):
+ return
+
+ # If the expression begins with a call to type(self), that's ok.
+ if self._is_type_self_call(node.expr):
+ return
+
+ # We are in a class, one remaining valid cases, Klass._attr inside
+ # Klass
+ if not (callee == klass.name or callee in klass.basenames):
+ # Detect property assignments in the body of the class.
+ # This is acceptable:
+ #
+ # class A:
+ # b = property(lambda: self._b)
+
+ stmt = node.parent.statement()
+ if (
+ isinstance(stmt, astroid.Assign)
+ and len(stmt.targets) == 1
+ and isinstance(stmt.targets[0], astroid.AssignName)
+ ):
+ name = stmt.targets[0].name
+ if _is_attribute_property(name, klass):
+ return
+
+ #  A licit use of protected member is inside a special method
+ if not attrname.startswith(
+ "__"
+ ) and self._is_called_inside_special_method(node):
+ return
+
+ self.add_message("protected-access", node=node, args=attrname)
+
+ @staticmethod
+ def _is_called_inside_special_method(node: astroid.node_classes.NodeNG) -> bool:
+ """
+ Returns true if the node is located inside a special (aka dunder) method
+ """
+ try:
+ frame_name = node.frame().name
+ except AttributeError:
+ return False
+ return frame_name and frame_name in PYMETHODS
+
+ def _is_type_self_call(self, expr):
+ return (
+ isinstance(expr, astroid.Call)
+ and isinstance(expr.func, astroid.Name)
+ and expr.func.name == "type"
+ and len(expr.args) == 1
+ and self._is_mandatory_method_param(expr.args[0])
+ )
+
+ def visit_name(self, node):
+ """check if the name handle an access to a class member
+ if so, register it
+ """
+ if self._first_attrs and (
+ node.name == self._first_attrs[-1] or not self._first_attrs[-1]
+ ):
+ self._meth_could_be_func = False
+
+ def _check_accessed_members(self, node, accessed):
+ """check that accessed members are defined"""
+ excs = ("AttributeError", "Exception", "BaseException")
+ for attr, nodes in accessed.items():
+ try:
+ # is it a class attribute ?
+ node.local_attr(attr)
+ # yes, stop here
+ continue
+ except astroid.NotFoundError:
+ pass
+ # is it an instance attribute of a parent class ?
+ try:
+ next(node.instance_attr_ancestors(attr))
+ # yes, stop here
+ continue
+ except StopIteration:
+ pass
+ # is it an instance attribute ?
+ try:
+ defstmts = node.instance_attr(attr)
+ except astroid.NotFoundError:
+ pass
+ else:
+ # filter out augment assignment nodes
+ defstmts = [stmt for stmt in defstmts if stmt not in nodes]
+ if not defstmts:
+ # only augment assignment for this node, no-member should be
+ # triggered by the typecheck checker
+ continue
+ # filter defstmts to only pick the first one when there are
+ # several assignments in the same scope
+ scope = defstmts[0].scope()
+ defstmts = [
+ stmt
+ for i, stmt in enumerate(defstmts)
+ if i == 0 or stmt.scope() is not scope
+ ]
+ # if there are still more than one, don't attempt to be smarter
+ # than we can be
+ if len(defstmts) == 1:
+ defstmt = defstmts[0]
+ # check that if the node is accessed in the same method as
+ # it's defined, it's accessed after the initial assignment
+ frame = defstmt.frame()
+ lno = defstmt.fromlineno
+ for _node in nodes:
+ if (
+ _node.frame() is frame
+ and _node.fromlineno < lno
+ and not astroid.are_exclusive(
+ _node.statement(), defstmt, excs
+ )
+ ):
+ self.add_message(
+ "access-member-before-definition",
+ node=_node,
+ args=(attr, lno),
+ )
+
+ def _check_first_arg_for_type(self, node, metaclass=0):
+ """check the name of first argument, expect:
+
+ * 'self' for a regular method
+ * 'cls' for a class method or a metaclass regular method (actually
+ valid-classmethod-first-arg value)
+ * 'mcs' for a metaclass class method (actually
+ valid-metaclass-classmethod-first-arg)
+ * not one of the above for a static method
+ """
+ # don't care about functions with unknown argument (builtins)
+ if node.args.args is None:
+ return
+ if node.args.args:
+ first_arg = node.argnames()[0]
+ elif node.args.posonlyargs:
+ first_arg = node.args.posonlyargs[0].name
+ else:
+ first_arg = None
+ self._first_attrs.append(first_arg)
+ first = self._first_attrs[-1]
+ # static method
+ if node.type == "staticmethod":
+ if (
+ first_arg == "self"
+ or first_arg in self.config.valid_classmethod_first_arg
+ or first_arg in self.config.valid_metaclass_classmethod_first_arg
+ ):
+ self.add_message("bad-staticmethod-argument", args=first, node=node)
+ return
+ self._first_attrs[-1] = None
+ # class / regular method with no args
+ elif not node.args.args and not node.args.posonlyargs:
+ self.add_message("no-method-argument", node=node)
+ # metaclass
+ elif metaclass:
+ # metaclass __new__ or classmethod
+ if node.type == "classmethod":
+ self._check_first_arg_config(
+ first,
+ self.config.valid_metaclass_classmethod_first_arg,
+ node,
+ "bad-mcs-classmethod-argument",
+ node.name,
+ )
+ # metaclass regular method
+ else:
+ self._check_first_arg_config(
+ first,
+ self.config.valid_classmethod_first_arg,
+ node,
+ "bad-mcs-method-argument",
+ node.name,
+ )
+ # regular class
+ else:
+ # class method
+ if node.type == "classmethod" or node.name == "__class_getitem__":
+ self._check_first_arg_config(
+ first,
+ self.config.valid_classmethod_first_arg,
+ node,
+ "bad-classmethod-argument",
+ node.name,
+ )
+ # regular method without self as argument
+ elif first != "self":
+ self.add_message("no-self-argument", node=node)
+
+ def _check_first_arg_config(self, first, config, node, message, method_name):
+ if first not in config:
+ if len(config) == 1:
+ valid = repr(config[0])
+ else:
+ valid = ", ".join(repr(v) for v in config[:-1])
+ valid = "%s or %r" % (valid, config[-1])
+ self.add_message(message, args=(method_name, valid), node=node)
+
+ def _check_bases_classes(self, node):
+ """check that the given class node implements abstract methods from
+ base classes
+ """
+
+ def is_abstract(method):
+ return method.is_abstract(pass_is_abstract=False)
+
+ # check if this class abstract
+ if class_is_abstract(node):
+ return
+
+ methods = sorted(
+ unimplemented_abstract_methods(node, is_abstract).items(),
+ key=lambda item: item[0],
+ )
+ for name, method in methods:
+ owner = method.parent.frame()
+ if owner is node:
+ continue
+ # owner is not this class, it must be a parent class
+ # check that the ancestor's method is not abstract
+ if name in node.locals:
+ # it is redefined as an attribute or with a descriptor
+ continue
+ self.add_message("abstract-method", node=node, args=(name, owner.name))
+
+ def _check_init(self, node):
+ """check that the __init__ method call super or ancestors'__init__
+ method (unless it is used for type hinting with `typing.overload`)
+ """
+ if not self.linter.is_message_enabled(
+ "super-init-not-called"
+ ) and not self.linter.is_message_enabled("non-parent-init-called"):
+ return
+ klass_node = node.parent.frame()
+ to_call = _ancestors_to_call(klass_node)
+ not_called_yet = dict(to_call)
+ for stmt in node.nodes_of_class(astroid.Call):
+ expr = stmt.func
+ if not isinstance(expr, astroid.Attribute) or expr.attrname != "__init__":
+ continue
+ # skip the test if using super
+ if (
+ isinstance(expr.expr, astroid.Call)
+ and isinstance(expr.expr.func, astroid.Name)
+ and expr.expr.func.name == "super"
+ ):
+ return
+ try:
+ for klass in expr.expr.infer():
+ if klass is astroid.Uninferable:
+ continue
+ # The inferred klass can be super(), which was
+ # assigned to a variable and the `__init__`
+ # was called later.
+ #
+ # base = super()
+ # base.__init__(...)
+
+ if (
+ isinstance(klass, astroid.Instance)
+ and isinstance(klass._proxied, astroid.ClassDef)
+ and is_builtin_object(klass._proxied)
+ and klass._proxied.name == "super"
+ ):
+ return
+ if isinstance(klass, objects.Super):
+ return
+ try:
+ del not_called_yet[klass]
+ except KeyError:
+ if klass not in to_call:
+ self.add_message(
+ "non-parent-init-called", node=expr, args=klass.name
+ )
+ except astroid.InferenceError:
+ continue
+ for klass, method in not_called_yet.items():
+ if decorated_with(node, ["typing.overload"]):
+ continue
+ cls = node_frame_class(method)
+ if klass.name == "object" or (cls and cls.name == "object"):
+ continue
+ self.add_message("super-init-not-called", args=klass.name, node=node)
+
+ def _check_signature(self, method1, refmethod, class_type, cls):
+ """check that the signature of the two given methods match
+ """
+ if not (
+ isinstance(method1, astroid.FunctionDef)
+ and isinstance(refmethod, astroid.FunctionDef)
+ ):
+ self.add_message(
+ "method-check-failed", args=(method1, refmethod), node=method1
+ )
+ return
+
+ instance = cls.instantiate_class()
+ method1 = function_to_method(method1, instance)
+ refmethod = function_to_method(refmethod, instance)
+
+ # Don't care about functions with unknown argument (builtins).
+ if method1.args.args is None or refmethod.args.args is None:
+ return
+
+ # Ignore private to class methods.
+ if is_attr_private(method1.name):
+ return
+ # Ignore setters, they have an implicit extra argument,
+ # which shouldn't be taken in consideration.
+ if is_property_setter(method1):
+ return
+
+ if _different_parameters(
+ refmethod, method1, dummy_parameter_regex=self._dummy_rgx
+ ):
+ self.add_message(
+ "arguments-differ", args=(class_type, method1.name), node=method1
+ )
+ elif len(method1.args.defaults) < len(refmethod.args.defaults):
+ self.add_message(
+ "signature-differs", args=(class_type, method1.name), node=method1
+ )
+
+ def _uses_mandatory_method_param(self, node):
+ """Check that attribute lookup name use first attribute variable name
+
+ Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
+ """
+ return self._is_mandatory_method_param(node.expr)
+
+ def _is_mandatory_method_param(self, node):
+ """Check if astroid.Name corresponds to first attribute variable name
+
+ Name is `self` for method, `cls` for classmethod and `mcs` for metaclass.
+ """
+ return (
+ self._first_attrs
+ and isinstance(node, astroid.Name)
+ and node.name == self._first_attrs[-1]
+ )
+
+
+class SpecialMethodsChecker(BaseChecker):
+ """Checker which verifies that special methods
+ are implemented correctly.
+ """
+
+ __implements__ = (IAstroidChecker,)
+ name = "classes"
+ msgs = {
+ "E0301": (
+ "__iter__ returns non-iterator",
+ "non-iterator-returned",
+ "Used when an __iter__ method returns something which is not an "
+ "iterable (i.e. has no `%s` method)" % NEXT_METHOD,
+ {
+ "old_names": [
+ ("W0234", "old-non-iterator-returned-1"),
+ ("E0234", "old-non-iterator-returned-2"),
+ ]
+ },
+ ),
+ "E0302": (
+ "The special method %r expects %s param(s), %d %s given",
+ "unexpected-special-method-signature",
+ "Emitted when a special method was defined with an "
+ "invalid number of parameters. If it has too few or "
+ "too many, it might not work at all.",
+ {"old_names": [("E0235", "bad-context-manager")]},
+ ),
+ "E0303": (
+ "__len__ does not return non-negative integer",
+ "invalid-length-returned",
+ "Used when a __len__ method returns something which is not a "
+ "non-negative integer",
+ {},
+ ),
+ }
+ priority = -2
+
+ @check_messages(
+ "unexpected-special-method-signature",
+ "non-iterator-returned",
+ "invalid-length-returned",
+ )
+ def visit_functiondef(self, node):
+ if not node.is_method():
+ return
+ if node.name == "__iter__":
+ self._check_iter(node)
+ if node.name == "__len__":
+ self._check_len(node)
+ if node.name in PYMETHODS:
+ self._check_unexpected_method_signature(node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_unexpected_method_signature(self, node):
+ expected_params = SPECIAL_METHODS_PARAMS[node.name]
+
+ if expected_params is None:
+ # This can support a variable number of parameters.
+ return
+ if not node.args.args and not node.args.vararg:
+ # Method has no parameter, will be caught
+ # by no-method-argument.
+ return
+
+ if decorated_with(node, [BUILTINS + ".staticmethod"]):
+ # We expect to not take in consideration self.
+ all_args = node.args.args
+ else:
+ all_args = node.args.args[1:]
+ mandatory = len(all_args) - len(node.args.defaults)
+ optional = len(node.args.defaults)
+ current_params = mandatory + optional
+
+ if isinstance(expected_params, tuple):
+ # The expected number of parameters can be any value from this
+ # tuple, although the user should implement the method
+ # to take all of them in consideration.
+ emit = mandatory not in expected_params
+ expected_params = "between %d or %d" % expected_params
+ else:
+ # If the number of mandatory parameters doesn't
+ # suffice, the expected parameters for this
+ # function will be deduced from the optional
+ # parameters.
+ rest = expected_params - mandatory
+ if rest == 0:
+ emit = False
+ elif rest < 0:
+ emit = True
+ elif rest > 0:
+ emit = not ((optional - rest) >= 0 or node.args.vararg)
+
+ if emit:
+ verb = "was" if current_params <= 1 else "were"
+ self.add_message(
+ "unexpected-special-method-signature",
+ args=(node.name, expected_params, current_params, verb),
+ node=node,
+ )
+
+ @staticmethod
+ def _is_iterator(node):
+ if node is astroid.Uninferable:
+ # Just ignore Uninferable objects.
+ return True
+ if isinstance(node, Generator):
+ # Generators can be itered.
+ return True
+
+ if isinstance(node, astroid.Instance):
+ try:
+ node.local_attr(NEXT_METHOD)
+ return True
+ except astroid.NotFoundError:
+ pass
+ elif isinstance(node, astroid.ClassDef):
+ metaclass = node.metaclass()
+ if metaclass and isinstance(metaclass, astroid.ClassDef):
+ try:
+ metaclass.local_attr(NEXT_METHOD)
+ return True
+ except astroid.NotFoundError:
+ pass
+ return False
+
+ def _check_iter(self, node):
+ inferred = _safe_infer_call_result(node, node)
+ if inferred is not None:
+ if not self._is_iterator(inferred):
+ self.add_message("non-iterator-returned", node=node)
+
+ def _check_len(self, node):
+ inferred = _safe_infer_call_result(node, node)
+ if not inferred or inferred is astroid.Uninferable:
+ return
+
+ if (
+ isinstance(inferred, astroid.Instance)
+ and inferred.name == "int"
+ and not isinstance(inferred, astroid.Const)
+ ):
+ # Assume it's good enough, since the int() call might wrap
+ # something that's uninferable for us
+ return
+
+ if not isinstance(inferred, astroid.Const):
+ self.add_message("invalid-length-returned", node=node)
+ return
+
+ value = inferred.value
+ if not isinstance(value, int) or value < 0:
+ self.add_message("invalid-length-returned", node=node)
+
+
+def _ancestors_to_call(klass_node, method="__init__"):
+ """return a dictionary where keys are the list of base classes providing
+ the queried method, and so that should/may be called from the method node
+ """
+ to_call = {}
+ for base_node in klass_node.ancestors(recurs=False):
+ try:
+ to_call[base_node] = next(base_node.igetattr(method))
+ except astroid.InferenceError:
+ continue
+ return to_call
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(ClassChecker(linter))
+ linter.register_checker(SpecialMethodsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/design_analysis.py b/venv/Lib/site-packages/pylint/checkers/design_analysis.py
new file mode 100644
index 0000000..50d8eaa
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/design_analysis.py
@@ -0,0 +1,496 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006, 2009-2010, 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012, 2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Mark Miller <725mrm@gmail.com>
+# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Jakub Wilk <jwilk@jwilk.net>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""check for signs of poor design"""
+
+import re
+from collections import defaultdict
+
+import astroid
+from astroid import BoolOp, If, decorators
+
+from pylint import utils
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import IAstroidChecker
+
+MSGS = {
+ "R0901": (
+ "Too many ancestors (%s/%s)",
+ "too-many-ancestors",
+ "Used when class has too many parent classes, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0902": (
+ "Too many instance attributes (%s/%s)",
+ "too-many-instance-attributes",
+ "Used when class has too many instance attributes, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0903": (
+ "Too few public methods (%s/%s)",
+ "too-few-public-methods",
+ "Used when class has too few public methods, so be sure it's "
+ "really worth it.",
+ ),
+ "R0904": (
+ "Too many public methods (%s/%s)",
+ "too-many-public-methods",
+ "Used when class has too many public methods, try to reduce "
+ "this to get a simpler (and so easier to use) class.",
+ ),
+ "R0911": (
+ "Too many return statements (%s/%s)",
+ "too-many-return-statements",
+ "Used when a function or method has too many return statement, "
+ "making it hard to follow.",
+ ),
+ "R0912": (
+ "Too many branches (%s/%s)",
+ "too-many-branches",
+ "Used when a function or method has too many branches, "
+ "making it hard to follow.",
+ ),
+ "R0913": (
+ "Too many arguments (%s/%s)",
+ "too-many-arguments",
+ "Used when a function or method takes too many arguments.",
+ ),
+ "R0914": (
+ "Too many local variables (%s/%s)",
+ "too-many-locals",
+ "Used when a function or method has too many local variables.",
+ ),
+ "R0915": (
+ "Too many statements (%s/%s)",
+ "too-many-statements",
+ "Used when a function or method has too many statements. You "
+ "should then split it in smaller functions / methods.",
+ ),
+ "R0916": (
+ "Too many boolean expressions in if statement (%s/%s)",
+ "too-many-boolean-expressions",
+ "Used when an if statement contains too many boolean expressions.",
+ ),
+}
+SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
+DATACLASSES_DECORATORS = frozenset({"dataclass", "attrs"})
+DATACLASS_IMPORT = "dataclasses"
+TYPING_NAMEDTUPLE = "typing.NamedTuple"
+
+
+def _is_exempt_from_public_methods(node: astroid.ClassDef) -> bool:
+ """Check if a class is exempt from too-few-public-methods"""
+
+ # If it's a typing.Namedtuple or an Enum
+ for ancestor in node.ancestors():
+ if ancestor.name == "Enum" and ancestor.root().name == "enum":
+ return True
+ if ancestor.qname() == TYPING_NAMEDTUPLE:
+ return True
+
+ # Or if it's a dataclass
+ if not node.decorators:
+ return False
+
+ root_locals = set(node.root().locals)
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, astroid.Call):
+ decorator = decorator.func
+ if not isinstance(decorator, (astroid.Name, astroid.Attribute)):
+ continue
+ if isinstance(decorator, astroid.Name):
+ name = decorator.name
+ else:
+ name = decorator.attrname
+ if name in DATACLASSES_DECORATORS and (
+ root_locals.intersection(DATACLASSES_DECORATORS)
+ or DATACLASS_IMPORT in root_locals
+ ):
+ return True
+ return False
+
+
+def _count_boolean_expressions(bool_op):
+ """Counts the number of boolean expressions in BoolOp `bool_op` (recursive)
+
+ example: a and (b or c or (d and e)) ==> 5 boolean expressions
+ """
+ nb_bool_expr = 0
+ for bool_expr in bool_op.get_children():
+ if isinstance(bool_expr, BoolOp):
+ nb_bool_expr += _count_boolean_expressions(bool_expr)
+ else:
+ nb_bool_expr += 1
+ return nb_bool_expr
+
+
+def _count_methods_in_class(node):
+ all_methods = sum(1 for method in node.methods() if not method.name.startswith("_"))
+ # Special methods count towards the number of public methods,
+ # but don't count towards there being too many methods.
+ for method in node.mymethods():
+ if SPECIAL_OBJ.search(method.name) and method.name != "__init__":
+ all_methods += 1
+ return all_methods
+
+
+class MisdesignChecker(BaseChecker):
+ """checks for sign of poor/misdesign:
+ * number of methods, attributes, local variables...
+ * size, complexity of functions, methods
+ """
+
+ __implements__ = (IAstroidChecker,)
+
+ # configuration section name
+ name = "design"
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = (
+ (
+ "max-args",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of arguments for function / method.",
+ },
+ ),
+ (
+ "max-locals",
+ {
+ "default": 15,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of locals for function / method body.",
+ },
+ ),
+ (
+ "max-returns",
+ {
+ "default": 6,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of return / yield for function / "
+ "method body.",
+ },
+ ),
+ (
+ "max-branches",
+ {
+ "default": 12,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of branch for function / method body.",
+ },
+ ),
+ (
+ "max-statements",
+ {
+ "default": 50,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of statements in function / method " "body.",
+ },
+ ),
+ (
+ "max-parents",
+ {
+ "default": 7,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of parents for a class (see R0901).",
+ },
+ ),
+ (
+ "max-attributes",
+ {
+ "default": 7,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of attributes for a class \
+(see R0902).",
+ },
+ ),
+ (
+ "min-public-methods",
+ {
+ "default": 2,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Minimum number of public methods for a class \
+(see R0903).",
+ },
+ ),
+ (
+ "max-public-methods",
+ {
+ "default": 20,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of public methods for a class \
+(see R0904).",
+ },
+ ),
+ (
+ "max-bool-expr",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<num>",
+ "help": "Maximum number of boolean expressions in an if "
+ "statement (see R0916).",
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self.stats = None
+ self._returns = None
+ self._branches = None
+ self._stmts = None
+
+ def open(self):
+ """initialize visit variables"""
+ self.stats = self.linter.add_stats()
+ self._returns = []
+ self._branches = defaultdict(int)
+ self._stmts = []
+
+ def _inc_all_stmts(self, amount):
+ for i in range(len(self._stmts)):
+ self._stmts[i] += amount
+
+ @decorators.cachedproperty
+ def _ignored_argument_names(self):
+ return utils.get_global_option(self, "ignored-argument-names", default=None)
+
+ @check_messages(
+ "too-many-ancestors",
+ "too-many-instance-attributes",
+ "too-few-public-methods",
+ "too-many-public-methods",
+ )
+ def visit_classdef(self, node):
+ """check size of inheritance hierarchy and number of instance attributes
+ """
+ nb_parents = len(list(node.ancestors()))
+ if nb_parents > self.config.max_parents:
+ self.add_message(
+ "too-many-ancestors",
+ node=node,
+ args=(nb_parents, self.config.max_parents),
+ )
+
+ if len(node.instance_attrs) > self.config.max_attributes:
+ self.add_message(
+ "too-many-instance-attributes",
+ node=node,
+ args=(len(node.instance_attrs), self.config.max_attributes),
+ )
+
+ @check_messages("too-few-public-methods", "too-many-public-methods")
+ def leave_classdef(self, node):
+ """check number of public methods"""
+ my_methods = sum(
+ 1 for method in node.mymethods() if not method.name.startswith("_")
+ )
+
+ # Does the class contain less than n public methods ?
+ # This checks only the methods defined in the current class,
+ # since the user might not have control over the classes
+ # from the ancestors. It avoids some false positives
+ # for classes such as unittest.TestCase, which provides
+ # a lot of assert methods. It doesn't make sense to warn
+ # when the user subclasses TestCase to add his own tests.
+ if my_methods > self.config.max_public_methods:
+ self.add_message(
+ "too-many-public-methods",
+ node=node,
+ args=(my_methods, self.config.max_public_methods),
+ )
+
+ # Stop here for exception, metaclass, interface classes and other
+ # classes for which we don't need to count the methods.
+ if node.type != "class" or _is_exempt_from_public_methods(node):
+ return
+
+ # Does the class contain more than n public methods ?
+ # This checks all the methods defined by ancestors and
+ # by the current class.
+ all_methods = _count_methods_in_class(node)
+ if all_methods < self.config.min_public_methods:
+ self.add_message(
+ "too-few-public-methods",
+ node=node,
+ args=(all_methods, self.config.min_public_methods),
+ )
+
+ @check_messages(
+ "too-many-return-statements",
+ "too-many-branches",
+ "too-many-arguments",
+ "too-many-locals",
+ "too-many-statements",
+ "keyword-arg-before-vararg",
+ )
+ def visit_functiondef(self, node):
+ """check function name, docstring, arguments, redefinition,
+ variable names, max locals
+ """
+ # init branch and returns counters
+ self._returns.append(0)
+ # check number of arguments
+ args = node.args.args
+ ignored_argument_names = self._ignored_argument_names
+ if args is not None:
+ ignored_args_num = 0
+ if ignored_argument_names:
+ ignored_args_num = sum(
+ 1 for arg in args if ignored_argument_names.match(arg.name)
+ )
+
+ argnum = len(args) - ignored_args_num
+ if argnum > self.config.max_args:
+ self.add_message(
+ "too-many-arguments",
+ node=node,
+ args=(len(args), self.config.max_args),
+ )
+ else:
+ ignored_args_num = 0
+ # check number of local variables
+ locnum = len(node.locals) - ignored_args_num
+ if locnum > self.config.max_locals:
+ self.add_message(
+ "too-many-locals", node=node, args=(locnum, self.config.max_locals)
+ )
+ # init new statements counter
+ self._stmts.append(1)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ @check_messages(
+ "too-many-return-statements",
+ "too-many-branches",
+ "too-many-arguments",
+ "too-many-locals",
+ "too-many-statements",
+ )
+ def leave_functiondef(self, node):
+ """most of the work is done here on close:
+ checks for max returns, branch, return in __init__
+ """
+ returns = self._returns.pop()
+ if returns > self.config.max_returns:
+ self.add_message(
+ "too-many-return-statements",
+ node=node,
+ args=(returns, self.config.max_returns),
+ )
+ branches = self._branches[node]
+ if branches > self.config.max_branches:
+ self.add_message(
+ "too-many-branches",
+ node=node,
+ args=(branches, self.config.max_branches),
+ )
+ # check number of statements
+ stmts = self._stmts.pop()
+ if stmts > self.config.max_statements:
+ self.add_message(
+ "too-many-statements",
+ node=node,
+ args=(stmts, self.config.max_statements),
+ )
+
+ leave_asyncfunctiondef = leave_functiondef
+
+ def visit_return(self, _):
+ """count number of returns"""
+ if not self._returns:
+ return # return outside function, reported by the base checker
+ self._returns[-1] += 1
+
+ def visit_default(self, node):
+ """default visit method -> increments the statements counter if
+ necessary
+ """
+ if node.is_statement:
+ self._inc_all_stmts(1)
+
+ def visit_tryexcept(self, node):
+ """increments the branches counter"""
+ branches = len(node.handlers)
+ if node.orelse:
+ branches += 1
+ self._inc_branch(node, branches)
+ self._inc_all_stmts(branches)
+
+ def visit_tryfinally(self, node):
+ """increments the branches counter"""
+ self._inc_branch(node, 2)
+ self._inc_all_stmts(2)
+
+ @check_messages("too-many-boolean-expressions")
+ def visit_if(self, node):
+ """increments the branches counter and checks boolean expressions"""
+ self._check_boolean_expressions(node)
+ branches = 1
+ # don't double count If nodes coming from some 'elif'
+ if node.orelse and (len(node.orelse) > 1 or not isinstance(node.orelse[0], If)):
+ branches += 1
+ self._inc_branch(node, branches)
+ self._inc_all_stmts(branches)
+
+ def _check_boolean_expressions(self, node):
+ """Go through "if" node `node` and counts its boolean expressions
+
+ if the "if" node test is a BoolOp node
+ """
+ condition = node.test
+ if not isinstance(condition, BoolOp):
+ return
+ nb_bool_expr = _count_boolean_expressions(condition)
+ if nb_bool_expr > self.config.max_bool_expr:
+ self.add_message(
+ "too-many-boolean-expressions",
+ node=condition,
+ args=(nb_bool_expr, self.config.max_bool_expr),
+ )
+
+ def visit_while(self, node):
+ """increments the branches counter"""
+ branches = 1
+ if node.orelse:
+ branches += 1
+ self._inc_branch(node, branches)
+
+ visit_for = visit_while
+
+ def _inc_branch(self, node, branchesnum=1):
+ """increments the branches counter"""
+ self._branches[node.scope()] += branchesnum
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(MisdesignChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/exceptions.py b/venv/Lib/site-packages/pylint/checkers/exceptions.py
new file mode 100644
index 0000000..360e1d1
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/exceptions.py
@@ -0,0 +1,546 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2011-2014 Google, Inc.
+# Copyright (c) 2012 Tim Hatch <tim@timhatch.com>
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Steven Myint <hg@stevenmyint.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Martin von Gagern <gagern@google.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checks for various exception related errors."""
+import builtins
+import inspect
+import typing
+
+import astroid
+from astroid.node_classes import NodeNG
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+
+
+def _builtin_exceptions():
+ def predicate(obj):
+ return isinstance(obj, type) and issubclass(obj, BaseException)
+
+ members = inspect.getmembers(builtins, predicate)
+ return {exc.__name__ for (_, exc) in members}
+
+
+def _annotated_unpack_infer(stmt, context=None):
+ """
+ Recursively generate nodes inferred by the given statement.
+ If the inferred value is a list or a tuple, recurse on the elements.
+ Returns an iterator which yields tuples in the format
+ ('original node', 'inferred node').
+ """
+ if isinstance(stmt, (astroid.List, astroid.Tuple)):
+ for elt in stmt.elts:
+ inferred = utils.safe_infer(elt)
+ if inferred and inferred is not astroid.Uninferable:
+ yield elt, inferred
+ return
+ for inferred in stmt.infer(context):
+ if inferred is astroid.Uninferable:
+ continue
+ yield stmt, inferred
+
+
+def _is_raising(body: typing.List) -> bool:
+ """Return true if the given statement node raise an exception"""
+ for node in body:
+ if isinstance(node, astroid.Raise):
+ return True
+ return False
+
+
+OVERGENERAL_EXCEPTIONS = ("BaseException", "Exception")
+BUILTINS_NAME = builtins.__name__
+
+MSGS = {
+ "E0701": (
+ "Bad except clauses order (%s)",
+ "bad-except-order",
+ "Used when except clauses are not in the correct order (from the "
+ "more specific to the more generic). If you don't fix the order, "
+ "some exceptions may not be caught by the most specific handler.",
+ ),
+ "E0702": (
+ "Raising %s while only classes or instances are allowed",
+ "raising-bad-type",
+ "Used when something which is neither a class, an instance or a "
+ "string is raised (i.e. a `TypeError` will be raised).",
+ ),
+ "E0703": (
+ "Exception context set to something which is not an exception, nor None",
+ "bad-exception-context",
+ 'Used when using the syntax "raise ... from ...", '
+ "where the exception context is not an exception, "
+ "nor None.",
+ ),
+ "E0704": (
+ "The raise statement is not inside an except clause",
+ "misplaced-bare-raise",
+ "Used when a bare raise is not used inside an except clause. "
+ "This generates an error, since there are no active exceptions "
+ "to be reraised. An exception to this rule is represented by "
+ "a bare raise inside a finally clause, which might work, as long "
+ "as an exception is raised inside the try block, but it is "
+ "nevertheless a code smell that must not be relied upon.",
+ ),
+ "E0710": (
+ "Raising a new style class which doesn't inherit from BaseException",
+ "raising-non-exception",
+ "Used when a new style class which doesn't inherit from "
+ "BaseException is raised.",
+ ),
+ "E0711": (
+ "NotImplemented raised - should raise NotImplementedError",
+ "notimplemented-raised",
+ "Used when NotImplemented is raised instead of NotImplementedError",
+ ),
+ "E0712": (
+ "Catching an exception which doesn't inherit from Exception: %s",
+ "catching-non-exception",
+ "Used when a class which doesn't inherit from "
+ "Exception is used as an exception in an except clause.",
+ ),
+ "W0702": (
+ "No exception type(s) specified",
+ "bare-except",
+ "Used when an except clause doesn't specify exceptions type to catch.",
+ ),
+ "W0703": (
+ "Catching too general exception %s",
+ "broad-except",
+ "Used when an except catches a too general exception, "
+ "possibly burying unrelated errors.",
+ ),
+ "W0705": (
+ "Catching previously caught exception type %s",
+ "duplicate-except",
+ "Used when an except catches a type that was already caught by "
+ "a previous handler.",
+ ),
+ "W0706": (
+ "The except handler raises immediately",
+ "try-except-raise",
+ "Used when an except handler uses raise as its first or only "
+ "operator. This is useless because it raises back the exception "
+ "immediately. Remove the raise operator or the entire "
+ "try-except-raise block!",
+ ),
+ "W0711": (
+ 'Exception to catch is the result of a binary "%s" operation',
+ "binary-op-exception",
+ "Used when the exception to catch is of the form "
+ '"except A or B:". If intending to catch multiple, '
+ 'rewrite as "except (A, B):"',
+ ),
+ "W0715": (
+ "Exception arguments suggest string formatting might be intended",
+ "raising-format-tuple",
+ "Used when passing multiple arguments to an exception "
+ "constructor, the first of them a string literal containing what "
+ "appears to be placeholders intended for formatting",
+ ),
+ "W0716": (
+ "Invalid exception operation. %s",
+ "wrong-exception-operation",
+ "Used when an operation is done against an exception, but the operation "
+ "is not valid for the exception in question. Usually emitted when having "
+ "binary operations between exceptions in except handlers.",
+ ),
+}
+
+
+class BaseVisitor:
+ """Base class for visitors defined in this module."""
+
+ def __init__(self, checker, node):
+ self._checker = checker
+ self._node = node
+
+ def visit(self, node):
+ name = node.__class__.__name__.lower()
+ dispatch_meth = getattr(self, "visit_" + name, None)
+ if dispatch_meth:
+ dispatch_meth(node)
+ else:
+ self.visit_default(node)
+
+ def visit_default(self, node): # pylint: disable=unused-argument
+ """Default implementation for all the nodes."""
+
+
+class ExceptionRaiseRefVisitor(BaseVisitor):
+ """Visit references (anything that is not an AST leaf)."""
+
+ def visit_name(self, name):
+ if name.name == "NotImplemented":
+ self._checker.add_message("notimplemented-raised", node=self._node)
+
+ def visit_call(self, call):
+ if isinstance(call.func, astroid.Name):
+ self.visit_name(call.func)
+ if (
+ len(call.args) > 1
+ and isinstance(call.args[0], astroid.Const)
+ and isinstance(call.args[0].value, str)
+ ):
+ msg = call.args[0].value
+ if "%" in msg or ("{" in msg and "}" in msg):
+ self._checker.add_message("raising-format-tuple", node=self._node)
+
+
+class ExceptionRaiseLeafVisitor(BaseVisitor):
+ """Visitor for handling leaf kinds of a raise value."""
+
+ def visit_const(self, const):
+ if not isinstance(const.value, str):
+ # raising-string will be emitted from python3 porting checker.
+ self._checker.add_message(
+ "raising-bad-type", node=self._node, args=const.value.__class__.__name__
+ )
+
+ def visit_instance(self, instance):
+ # pylint: disable=protected-access
+ cls = instance._proxied
+ self.visit_classdef(cls)
+
+ # Exception instances have a particular class type
+ visit_exceptioninstance = visit_instance
+
+ def visit_classdef(self, cls):
+ if not utils.inherit_from_std_ex(cls) and utils.has_known_bases(cls):
+ if cls.newstyle:
+ self._checker.add_message("raising-non-exception", node=self._node)
+
+ def visit_tuple(self, _):
+ self._checker.add_message("raising-bad-type", node=self._node, args="tuple")
+
+ def visit_default(self, node):
+ name = getattr(node, "name", node.__class__.__name__)
+ self._checker.add_message("raising-bad-type", node=self._node, args=name)
+
+
+class ExceptionsChecker(checkers.BaseChecker):
+ """Exception related checks."""
+
+ __implements__ = interfaces.IAstroidChecker
+
+ name = "exceptions"
+ msgs = MSGS
+ priority = -4
+ options = (
+ (
+ "overgeneral-exceptions",
+ {
+ "default": OVERGENERAL_EXCEPTIONS,
+ "type": "csv",
+ "metavar": "<comma-separated class names>",
+ "help": "Exceptions that will emit a warning "
+ 'when being caught. Defaults to "%s".'
+ % (", ".join(OVERGENERAL_EXCEPTIONS),),
+ },
+ ),
+ )
+
+ def open(self):
+ self._builtin_exceptions = _builtin_exceptions()
+ super(ExceptionsChecker, self).open()
+
+ @utils.check_messages(
+ "misplaced-bare-raise",
+ "raising-bad-type",
+ "raising-non-exception",
+ "notimplemented-raised",
+ "bad-exception-context",
+ "raising-format-tuple",
+ )
+ def visit_raise(self, node):
+ if node.exc is None:
+ self._check_misplaced_bare_raise(node)
+ return
+
+ if node.cause:
+ self._check_bad_exception_context(node)
+
+ expr = node.exc
+ ExceptionRaiseRefVisitor(self, node).visit(expr)
+
+ try:
+ inferred_value = expr.inferred()[-1]
+ except astroid.InferenceError:
+ pass
+ else:
+ if inferred_value:
+ ExceptionRaiseLeafVisitor(self, node).visit(inferred_value)
+
+ def _check_misplaced_bare_raise(self, node):
+ # Filter out if it's present in __exit__.
+ scope = node.scope()
+ if (
+ isinstance(scope, astroid.FunctionDef)
+ and scope.is_method()
+ and scope.name == "__exit__"
+ ):
+ return
+
+ current = node
+ # Stop when a new scope is generated or when the raise
+ # statement is found inside a TryFinally.
+ ignores = (astroid.ExceptHandler, astroid.FunctionDef)
+ while current and not isinstance(current.parent, ignores):
+ current = current.parent
+
+ expected = (astroid.ExceptHandler,)
+ if not current or not isinstance(current.parent, expected):
+ self.add_message("misplaced-bare-raise", node=node)
+
+ def _check_bad_exception_context(self, node):
+ """Verify that the exception context is properly set.
+
+ An exception context can be only `None` or an exception.
+ """
+ cause = utils.safe_infer(node.cause)
+ if cause in (astroid.Uninferable, None):
+ return
+
+ if isinstance(cause, astroid.Const):
+ if cause.value is not None:
+ self.add_message("bad-exception-context", node=node)
+ elif not isinstance(cause, astroid.ClassDef) and not utils.inherit_from_std_ex(
+ cause
+ ):
+ self.add_message("bad-exception-context", node=node)
+
+ def _check_catching_non_exception(self, handler, exc, part):
+ if isinstance(exc, astroid.Tuple):
+ # Check if it is a tuple of exceptions.
+ inferred = [utils.safe_infer(elt) for elt in exc.elts]
+ if any(node is astroid.Uninferable for node in inferred):
+ # Don't emit if we don't know every component.
+ return
+ if all(
+ node
+ and (utils.inherit_from_std_ex(node) or not utils.has_known_bases(node))
+ for node in inferred
+ ):
+ return
+
+ if not isinstance(exc, astroid.ClassDef):
+ # Don't emit the warning if the inferred stmt
+ # is None, but the exception handler is something else,
+ # maybe it was redefined.
+ if isinstance(exc, astroid.Const) and exc.value is None:
+ if (
+ isinstance(handler.type, astroid.Const)
+ and handler.type.value is None
+ ) or handler.type.parent_of(exc):
+ # If the exception handler catches None or
+ # the exception component, which is None, is
+ # defined by the entire exception handler, then
+ # emit a warning.
+ self.add_message(
+ "catching-non-exception",
+ node=handler.type,
+ args=(part.as_string(),),
+ )
+ else:
+ self.add_message(
+ "catching-non-exception",
+ node=handler.type,
+ args=(part.as_string(),),
+ )
+ return
+
+ if (
+ not utils.inherit_from_std_ex(exc)
+ and exc.name not in self._builtin_exceptions
+ ):
+ if utils.has_known_bases(exc):
+ self.add_message(
+ "catching-non-exception", node=handler.type, args=(exc.name,)
+ )
+
+ def _check_try_except_raise(self, node):
+ def gather_exceptions_from_handler(
+ handler
+ ) -> typing.Optional[typing.List[NodeNG]]:
+ exceptions = [] # type: typing.List[NodeNG]
+ if handler.type:
+ exceptions_in_handler = utils.safe_infer(handler.type)
+ if isinstance(exceptions_in_handler, astroid.Tuple):
+ exceptions = list(
+ {
+ exception
+ for exception in exceptions_in_handler.elts
+ if isinstance(exception, astroid.Name)
+ }
+ )
+ elif exceptions_in_handler:
+ exceptions = [exceptions_in_handler]
+ else:
+ # Break when we cannot infer anything reliably.
+ return None
+ return exceptions
+
+ bare_raise = False
+ handler_having_bare_raise = None
+ excs_in_bare_handler = []
+ for handler in node.handlers:
+ if bare_raise:
+ # check that subsequent handler is not parent of handler which had bare raise.
+ # since utils.safe_infer can fail for bare except, check it before.
+ # also break early if bare except is followed by bare except.
+
+ excs_in_current_handler = gather_exceptions_from_handler(handler)
+
+ if not excs_in_current_handler:
+ bare_raise = False
+ break
+ if excs_in_bare_handler is None:
+ # It can be `None` when the inference failed
+ break
+
+ for exc_in_current_handler in excs_in_current_handler:
+ inferred_current = utils.safe_infer(exc_in_current_handler)
+ if any(
+ utils.is_subclass_of(
+ utils.safe_infer(exc_in_bare_handler), inferred_current
+ )
+ for exc_in_bare_handler in excs_in_bare_handler
+ ):
+ bare_raise = False
+ break
+
+ # `raise` as the first operator inside the except handler
+ if _is_raising([handler.body[0]]):
+ # flags when there is a bare raise
+ if handler.body[0].exc is None:
+ bare_raise = True
+ handler_having_bare_raise = handler
+ excs_in_bare_handler = gather_exceptions_from_handler(handler)
+ else:
+ if bare_raise:
+ self.add_message("try-except-raise", node=handler_having_bare_raise)
+
+ @utils.check_messages("wrong-exception-operation")
+ def visit_binop(self, node):
+ if isinstance(node.parent, astroid.ExceptHandler):
+ # except (V | A)
+ suggestion = "Did you mean '(%s, %s)' instead?" % (
+ node.left.as_string(),
+ node.right.as_string(),
+ )
+ self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
+
+ @utils.check_messages("wrong-exception-operation")
+ def visit_compare(self, node):
+ if isinstance(node.parent, astroid.ExceptHandler):
+ # except (V < A)
+ suggestion = "Did you mean '(%s, %s)' instead?" % (
+ node.left.as_string(),
+ ", ".join(operand.as_string() for _, operand in node.ops),
+ )
+ self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
+
+ @utils.check_messages(
+ "bare-except",
+ "broad-except",
+ "try-except-raise",
+ "binary-op-exception",
+ "bad-except-order",
+ "catching-non-exception",
+ "duplicate-except",
+ )
+ def visit_tryexcept(self, node):
+ """check for empty except"""
+ self._check_try_except_raise(node)
+ exceptions_classes = []
+ nb_handlers = len(node.handlers)
+ for index, handler in enumerate(node.handlers):
+ if handler.type is None:
+ if not _is_raising(handler.body):
+ self.add_message("bare-except", node=handler)
+
+ # check if an "except:" is followed by some other
+ # except
+ if index < (nb_handlers - 1):
+ msg = "empty except clause should always appear last"
+ self.add_message("bad-except-order", node=node, args=msg)
+
+ elif isinstance(handler.type, astroid.BoolOp):
+ self.add_message(
+ "binary-op-exception", node=handler, args=handler.type.op
+ )
+ else:
+ try:
+ excs = list(_annotated_unpack_infer(handler.type))
+ except astroid.InferenceError:
+ continue
+
+ for part, exc in excs:
+ if exc is astroid.Uninferable:
+ continue
+ if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(
+ exc
+ ):
+ # pylint: disable=protected-access
+ exc = exc._proxied
+
+ self._check_catching_non_exception(handler, exc, part)
+
+ if not isinstance(exc, astroid.ClassDef):
+ continue
+
+ exc_ancestors = [
+ anc
+ for anc in exc.ancestors()
+ if isinstance(anc, astroid.ClassDef)
+ ]
+
+ for previous_exc in exceptions_classes:
+ if previous_exc in exc_ancestors:
+ msg = "%s is an ancestor class of %s" % (
+ previous_exc.name,
+ exc.name,
+ )
+ self.add_message(
+ "bad-except-order", node=handler.type, args=msg
+ )
+ if (
+ exc.name in self.config.overgeneral_exceptions
+ and exc.root().name == utils.EXCEPTIONS_MODULE
+ and not _is_raising(handler.body)
+ ):
+ self.add_message(
+ "broad-except", args=exc.name, node=handler.type
+ )
+
+ if exc in exceptions_classes:
+ self.add_message(
+ "duplicate-except", args=exc.name, node=handler.type
+ )
+
+ exceptions_classes += [exc for _, exc in excs]
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(ExceptionsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/format.py b/venv/Lib/site-packages/pylint/checkers/format.py
new file mode 100644
index 0000000..c4cad31
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/format.py
@@ -0,0 +1,1332 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2015 Google, Inc.
+# Copyright (c) 2013 moxian <aleftmail@inbox.ru>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 frost-nzcr4 <frost.nzcr4@jagmort.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
+# Copyright (c) 2015 Fabio Natali <me@fabionatali.com>
+# Copyright (c) 2015 Harut <yes@harutune.name>
+# Copyright (c) 2015 Mihai Balint <balint.mihai@gmail.com>
+# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Petr Pulc <petrpulc@gmail.com>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Krzysztof Czapla <k.czapla68@gmail.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 James M. Allen <james.m.allen@gmail.com>
+# Copyright (c) 2017 vinnyrose <vinnyrose@users.noreply.github.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.guinta@protonmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Fureigh <rhys.fureigh@gsa.gov>
+# Copyright (c) 2018 Pierre Sassoulas <pierre.sassoulas@wisebim.fr>
+# Copyright (c) 2018 Andreas Freimuth <andreas.freimuth@united-bits.de>
+# Copyright (c) 2018 Jakub Wilk <jwilk@jwilk.net>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Python code format's checker.
+
+By default try to follow Guido's style guide :
+
+https://www.python.org/doc/essays/styleguide/
+
+Some parts of the process_token method is based from The Tab Nanny std module.
+"""
+
+import keyword
+import tokenize
+from functools import reduce # pylint: disable=redefined-builtin
+
+from astroid import nodes
+
+from pylint.checkers import BaseTokenChecker
+from pylint.checkers.utils import check_messages
+from pylint.constants import OPTION_RGX, WarningScope
+from pylint.interfaces import IAstroidChecker, IRawChecker, ITokenChecker
+
+_ASYNC_TOKEN = "async"
+_CONTINUATION_BLOCK_OPENERS = [
+ "elif",
+ "except",
+ "for",
+ "if",
+ "while",
+ "def",
+ "class",
+ "with",
+]
+_KEYWORD_TOKENS = [
+ "assert",
+ "del",
+ "elif",
+ "except",
+ "for",
+ "if",
+ "in",
+ "not",
+ "raise",
+ "return",
+ "while",
+ "yield",
+ "with",
+]
+
+_SPACED_OPERATORS = [
+ "==",
+ "<",
+ ">",
+ "!=",
+ "<>",
+ "<=",
+ ">=",
+ "+=",
+ "-=",
+ "*=",
+ "**=",
+ "/=",
+ "//=",
+ "&=",
+ "|=",
+ "^=",
+ "%=",
+ ">>=",
+ "<<=",
+]
+_OPENING_BRACKETS = ["(", "[", "{"]
+_CLOSING_BRACKETS = [")", "]", "}"]
+_TAB_LENGTH = 8
+
+_EOL = frozenset([tokenize.NEWLINE, tokenize.NL, tokenize.COMMENT])
+_JUNK_TOKENS = (tokenize.COMMENT, tokenize.NL)
+
+# Whitespace checking policy constants
+_MUST = 0
+_MUST_NOT = 1
+_IGNORE = 2
+
+# Whitespace checking config constants
+_DICT_SEPARATOR = "dict-separator"
+_TRAILING_COMMA = "trailing-comma"
+_EMPTY_LINE = "empty-line"
+_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR, _EMPTY_LINE]
+_DEFAULT_NO_SPACE_CHECK_CHOICES = [_TRAILING_COMMA, _DICT_SEPARATOR]
+
+MSGS = {
+ "C0301": (
+ "Line too long (%s/%s)",
+ "line-too-long",
+ "Used when a line is longer than a given number of characters.",
+ ),
+ "C0302": (
+ "Too many lines in module (%s/%s)", # was W0302
+ "too-many-lines",
+ "Used when a module has too many lines, reducing its readability.",
+ ),
+ "C0303": (
+ "Trailing whitespace",
+ "trailing-whitespace",
+ "Used when there is whitespace between the end of a line and the newline.",
+ ),
+ "C0304": (
+ "Final newline missing",
+ "missing-final-newline",
+ "Used when the last line in a file is missing a newline.",
+ ),
+ "C0305": (
+ "Trailing newlines",
+ "trailing-newlines",
+ "Used when there are trailing blank lines in a file.",
+ ),
+ "W0311": (
+ "Bad indentation. Found %s %s, expected %s",
+ "bad-indentation",
+ "Used when an unexpected number of indentation's tabulations or "
+ "spaces has been found.",
+ ),
+ "C0330": ("Wrong %s indentation%s%s.\n%s%s", "bad-continuation", "TODO"),
+ "W0312": (
+ "Found indentation with %ss instead of %ss",
+ "mixed-indentation",
+ "Used when there are some mixed tabs and spaces in a module.",
+ ),
+ "W0301": (
+ "Unnecessary semicolon", # was W0106
+ "unnecessary-semicolon",
+ 'Used when a statement is ended by a semi-colon (";"), which '
+ "isn't necessary (that's python, not C ;).",
+ ),
+ "C0321": (
+ "More than one statement on a single line",
+ "multiple-statements",
+ "Used when more than on statement are found on the same line.",
+ {"scope": WarningScope.NODE},
+ ),
+ "C0325": (
+ "Unnecessary parens after %r keyword",
+ "superfluous-parens",
+ "Used when a single item in parentheses follows an if, for, or "
+ "other keyword.",
+ ),
+ "C0326": (
+ "%s space %s %s %s\n%s",
+ "bad-whitespace",
+ (
+ "Used when a wrong number of spaces is used around an operator, "
+ "bracket or block opener."
+ ),
+ {
+ "old_names": [
+ ("C0323", "no-space-after-operator"),
+ ("C0324", "no-space-after-comma"),
+ ("C0322", "no-space-before-operator"),
+ ]
+ },
+ ),
+ "C0327": (
+ "Mixed line endings LF and CRLF",
+ "mixed-line-endings",
+ "Used when there are mixed (LF and CRLF) newline signs in a file.",
+ ),
+ "C0328": (
+ "Unexpected line ending format. There is '%s' while it should be '%s'.",
+ "unexpected-line-ending-format",
+ "Used when there is different newline than expected.",
+ ),
+}
+
+
+def _underline_token(token):
+ length = token[3][1] - token[2][1]
+ offset = token[2][1]
+ referenced_line = token[4]
+ # If the referenced line does not end with a newline char, fix it
+ if referenced_line[-1] != "\n":
+ referenced_line += "\n"
+ return referenced_line + (" " * offset) + ("^" * length)
+
+
+def _column_distance(token1, token2):
+ if token1 == token2:
+ return 0
+ if token2[3] < token1[3]:
+ token1, token2 = token2, token1
+ if token1[3][0] != token2[2][0]:
+ return None
+ return token2[2][1] - token1[3][1]
+
+
+def _last_token_on_line_is(tokens, line_end, token):
+ return (
+ line_end > 0
+ and tokens.token(line_end - 1) == token
+ or line_end > 1
+ and tokens.token(line_end - 2) == token
+ and tokens.type(line_end - 1) == tokenize.COMMENT
+ )
+
+
+def _token_followed_by_eol(tokens, position):
+ return (
+ tokens.type(position + 1) == tokenize.NL
+ or tokens.type(position + 1) == tokenize.COMMENT
+ and tokens.type(position + 2) == tokenize.NL
+ )
+
+
+def _get_indent_string(line):
+ """Return the indention string of the given line."""
+ result = ""
+ for char in line:
+ if char in " \t":
+ result += char
+ else:
+ break
+ return result
+
+
+def _get_indent_length(line):
+ """Return the length of the indentation on the given token's line."""
+ result = 0
+ for char in line:
+ if char == " ":
+ result += 1
+ elif char == "\t":
+ result += _TAB_LENGTH
+ else:
+ break
+ return result
+
+
+def _get_indent_hint_line(bar_positions, bad_position):
+ """Return a line with |s for each of the positions in the given lists."""
+ if not bar_positions:
+ return "", ""
+
+ bar_positions = [_get_indent_length(indent) for indent in bar_positions]
+ bad_position = _get_indent_length(bad_position)
+ delta_message = ""
+ markers = [(pos, "|") for pos in bar_positions]
+ if len(markers) == 1:
+ # if we have only one marker we'll provide an extra hint on how to fix
+ expected_position = markers[0][0]
+ delta = abs(expected_position - bad_position)
+ direction = "add" if expected_position > bad_position else "remove"
+ delta_message = _CONTINUATION_HINT_MESSAGE % (
+ direction,
+ delta,
+ "s" if delta > 1 else "",
+ )
+ markers.append((bad_position, "^"))
+ markers.sort()
+ line = [" "] * (markers[-1][0] + 1)
+ for position, marker in markers:
+ line[position] = marker
+ return "".join(line), delta_message
+
+
+class _ContinuedIndent:
+ __slots__ = (
+ "valid_outdent_strings",
+ "valid_continuation_strings",
+ "context_type",
+ "token",
+ "position",
+ )
+
+ def __init__(
+ self,
+ context_type,
+ token,
+ position,
+ valid_outdent_strings,
+ valid_continuation_strings,
+ ):
+ self.valid_outdent_strings = valid_outdent_strings
+ self.valid_continuation_strings = valid_continuation_strings
+ self.context_type = context_type
+ self.position = position
+ self.token = token
+
+
+# The contexts for hanging indents.
+# A hanging indented dictionary value after :
+HANGING_DICT_VALUE = "dict-value"
+# Hanging indentation in an expression.
+HANGING = "hanging"
+# Hanging indentation in a block header.
+HANGING_BLOCK = "hanging-block"
+# Continued indentation inside an expression.
+CONTINUED = "continued"
+# Continued indentation in a block header.
+CONTINUED_BLOCK = "continued-block"
+
+SINGLE_LINE = "single"
+WITH_BODY = "multi"
+
+_CONTINUATION_MSG_PARTS = {
+ HANGING_DICT_VALUE: ("hanging", " in dict value"),
+ HANGING: ("hanging", ""),
+ HANGING_BLOCK: ("hanging", " before block"),
+ CONTINUED: ("continued", ""),
+ CONTINUED_BLOCK: ("continued", " before block"),
+}
+
+_CONTINUATION_HINT_MESSAGE = " (%s %d space%s)" # Ex: (remove 2 spaces)
+
+
+def _Indentations(*args):
+ """Valid indentation strings for a continued line."""
+ return {a: None for a in args}
+
+
+def _BeforeBlockIndentations(single, with_body):
+ """Valid alternative indentation strings for continued lines before blocks.
+
+ :param int single: Valid indentation string for statements on a single logical line.
+ :param int with_body: Valid indentation string for statements on several lines.
+
+ :returns: A dictionary mapping indent offsets to a string representing
+ whether the indent if for a line or block.
+ :rtype: dict
+ """
+ return {single: SINGLE_LINE, with_body: WITH_BODY}
+
+
+class TokenWrapper:
+ """A wrapper for readable access to token information."""
+
+ def __init__(self, tokens):
+ self._tokens = tokens
+
+ def token(self, idx):
+ return self._tokens[idx][1]
+
+ def type(self, idx):
+ return self._tokens[idx][0]
+
+ def start_line(self, idx):
+ return self._tokens[idx][2][0]
+
+ def start_col(self, idx):
+ return self._tokens[idx][2][1]
+
+ def line(self, idx):
+ return self._tokens[idx][4]
+
+ def line_indent(self, idx):
+ """Get the string of TABs and Spaces used for indentation of the line of this token"""
+ return _get_indent_string(self.line(idx))
+
+ def token_indent(self, idx):
+ """Get an indentation string for hanging indentation, consisting of the line-indent plus
+ a number of spaces to fill up to the column of this token.
+
+ e.g. the token indent for foo
+ in "<TAB><TAB>print(foo)"
+ is "<TAB><TAB> "
+ """
+ line_indent = self.line_indent(idx)
+ return line_indent + " " * (self.start_col(idx) - len(line_indent))
+
+
+class ContinuedLineState:
+ """Tracker for continued indentation inside a logical line."""
+
+ def __init__(self, tokens, config):
+ self._line_start = -1
+ self._cont_stack = []
+ self._is_block_opener = False
+ self.retained_warnings = []
+ self._config = config
+ self._tokens = TokenWrapper(tokens)
+
+ @property
+ def has_content(self):
+ return bool(self._cont_stack)
+
+ @property
+ def _block_indent_string(self):
+ return self._config.indent_string.replace("\\t", "\t")
+
+ @property
+ def _continuation_string(self):
+ return self._block_indent_string[0] * self._config.indent_after_paren
+
+ @property
+ def _continuation_size(self):
+ return self._config.indent_after_paren
+
+ def handle_line_start(self, pos):
+ """Record the first non-junk token at the start of a line."""
+ if self._line_start > -1:
+ return
+
+ check_token_position = pos
+ if self._tokens.token(pos) == _ASYNC_TOKEN:
+ check_token_position += 1
+ self._is_block_opener = (
+ self._tokens.token(check_token_position) in _CONTINUATION_BLOCK_OPENERS
+ )
+ self._line_start = pos
+
+ def next_physical_line(self):
+ """Prepares the tracker for a new physical line (NL)."""
+ self._line_start = -1
+ self._is_block_opener = False
+
+ def next_logical_line(self):
+ """Prepares the tracker for a new logical line (NEWLINE).
+
+ A new logical line only starts with block indentation.
+ """
+ self.next_physical_line()
+ self.retained_warnings = []
+ self._cont_stack = []
+
+ def add_block_warning(self, token_position, state, valid_indentations):
+ self.retained_warnings.append((token_position, state, valid_indentations))
+
+ def get_valid_indentations(self, idx):
+ """Returns the valid offsets for the token at the given position."""
+ # The closing brace on a dict or the 'for' in a dict comprehension may
+ # reset two indent levels because the dict value is ended implicitly
+ stack_top = -1
+ if (
+ self._tokens.token(idx) in ("}", "for")
+ and self._cont_stack[-1].token == ":"
+ ):
+ stack_top = -2
+ indent = self._cont_stack[stack_top]
+ if self._tokens.token(idx) in _CLOSING_BRACKETS:
+ valid_indentations = indent.valid_outdent_strings
+ else:
+ valid_indentations = indent.valid_continuation_strings
+ return indent, valid_indentations.copy()
+
+ def _hanging_indent_after_bracket(self, bracket, position):
+ """Extracts indentation information for a hanging indent
+
+ Case of hanging indent after a bracket (including parenthesis)
+
+ :param str bracket: bracket in question
+ :param int position: Position of bracket in self._tokens
+
+ :returns: the state and valid positions for hanging indentation
+ :rtype: _ContinuedIndent
+ """
+ indentation = self._tokens.line_indent(position)
+ if (
+ self._is_block_opener
+ and self._continuation_string == self._block_indent_string
+ ):
+ return _ContinuedIndent(
+ HANGING_BLOCK,
+ bracket,
+ position,
+ _Indentations(indentation + self._continuation_string, indentation),
+ _BeforeBlockIndentations(
+ indentation + self._continuation_string,
+ indentation + self._continuation_string * 2,
+ ),
+ )
+ if bracket == ":":
+ # If the dict key was on the same line as the open brace, the new
+ # correct indent should be relative to the key instead of the
+ # current indent level
+ paren_align = self._cont_stack[-1].valid_outdent_strings
+ next_align = self._cont_stack[-1].valid_continuation_strings.copy()
+ next_align_keys = list(next_align.keys())
+ next_align[next_align_keys[0] + self._continuation_string] = True
+ # Note that the continuation of
+ # d = {
+ # 'a': 'b'
+ # 'c'
+ # }
+ # is handled by the special-casing for hanging continued string indents.
+ return _ContinuedIndent(
+ HANGING_DICT_VALUE, bracket, position, paren_align, next_align
+ )
+ return _ContinuedIndent(
+ HANGING,
+ bracket,
+ position,
+ _Indentations(indentation, indentation + self._continuation_string),
+ _Indentations(indentation + self._continuation_string),
+ )
+
+ def _continuation_inside_bracket(self, bracket, position):
+ """Extracts indentation information for a continued indent."""
+ indentation = self._tokens.line_indent(position)
+ token_indent = self._tokens.token_indent(position)
+ next_token_indent = self._tokens.token_indent(position + 1)
+ if (
+ self._is_block_opener
+ and next_token_indent == indentation + self._block_indent_string
+ ):
+ return _ContinuedIndent(
+ CONTINUED_BLOCK,
+ bracket,
+ position,
+ _Indentations(token_indent),
+ _BeforeBlockIndentations(
+ next_token_indent, next_token_indent + self._continuation_string
+ ),
+ )
+ return _ContinuedIndent(
+ CONTINUED,
+ bracket,
+ position,
+ _Indentations(token_indent, next_token_indent),
+ _Indentations(next_token_indent),
+ )
+
+ def pop_token(self):
+ self._cont_stack.pop()
+
+ def push_token(self, token, position):
+ """Pushes a new token for continued indentation on the stack.
+
+ Tokens that can modify continued indentation offsets are:
+ * opening brackets
+ * 'lambda'
+ * : inside dictionaries
+
+ push_token relies on the caller to filter out those
+ interesting tokens.
+
+ :param int token: The concrete token
+ :param int position: The position of the token in the stream.
+ """
+ if _token_followed_by_eol(self._tokens, position):
+ self._cont_stack.append(self._hanging_indent_after_bracket(token, position))
+ else:
+ self._cont_stack.append(self._continuation_inside_bracket(token, position))
+
+
+class FormatChecker(BaseTokenChecker):
+ """checks for :
+ * unauthorized constructions
+ * strict indentation
+ * line length
+ """
+
+ __implements__ = (ITokenChecker, IAstroidChecker, IRawChecker)
+
+ # configuration section name
+ name = "format"
+ # messages
+ msgs = MSGS
+ # configuration options
+ # for available dict keys/values see the optik parser 'add_option' method
+ options = (
+ (
+ "max-line-length",
+ {
+ "default": 100,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of characters on a single line.",
+ },
+ ),
+ (
+ "ignore-long-lines",
+ {
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "default": r"^\s*(# )?<?https?://\S+>?$",
+ "help": (
+ "Regexp for a line that is allowed to be longer than " "the limit."
+ ),
+ },
+ ),
+ (
+ "single-line-if-stmt",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": (
+ "Allow the body of an if to be on the same "
+ "line as the test if there is no else."
+ ),
+ },
+ ),
+ (
+ "single-line-class-stmt",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": (
+ "Allow the body of a class to be on the same "
+ "line as the declaration if body contains "
+ "single statement."
+ ),
+ },
+ ),
+ (
+ "no-space-check",
+ {
+ "default": ",".join(_DEFAULT_NO_SPACE_CHECK_CHOICES),
+ "metavar": ",".join(_NO_SPACE_CHECK_CHOICES),
+ "type": "multiple_choice",
+ "choices": _NO_SPACE_CHECK_CHOICES,
+ "help": (
+ "List of optional constructs for which whitespace "
+ "checking is disabled. "
+ "`" + _DICT_SEPARATOR + "` is used to allow tabulation "
+ "in dicts, etc.: {1 : 1,\\n222: 2}. "
+ "`" + _TRAILING_COMMA + "` allows a space between comma "
+ "and closing bracket: (a, ). "
+ "`" + _EMPTY_LINE + "` allows space-only lines."
+ ),
+ },
+ ),
+ (
+ "max-module-lines",
+ {
+ "default": 1000,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of lines in a module.",
+ },
+ ),
+ (
+ "indent-string",
+ {
+ "default": " ",
+ "type": "non_empty_string",
+ "metavar": "<string>",
+ "help": "String used as indentation unit. This is usually "
+ '" " (4 spaces) or "\\t" (1 tab).',
+ },
+ ),
+ (
+ "indent-after-paren",
+ {
+ "type": "int",
+ "metavar": "<int>",
+ "default": 4,
+ "help": "Number of spaces of indent required inside a hanging "
+ "or continued line.",
+ },
+ ),
+ (
+ "expected-line-ending-format",
+ {
+ "type": "choice",
+ "metavar": "<empty or LF or CRLF>",
+ "default": "",
+ "choices": ["", "LF", "CRLF"],
+ "help": (
+ "Expected format of line ending, "
+ "e.g. empty (any line ending), LF or CRLF."
+ ),
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseTokenChecker.__init__(self, linter)
+ self._lines = None
+ self._visited_lines = None
+ self._bracket_stack = [None]
+
+ def _pop_token(self):
+ self._bracket_stack.pop()
+ self._current_line.pop_token()
+
+ def _push_token(self, token, idx):
+ self._bracket_stack.append(token)
+ self._current_line.push_token(token, idx)
+
+ def new_line(self, tokens, line_end, line_start):
+ """a new line has been encountered, process it if necessary"""
+ if _last_token_on_line_is(tokens, line_end, ";"):
+ self.add_message("unnecessary-semicolon", line=tokens.start_line(line_end))
+
+ line_num = tokens.start_line(line_start)
+ line = tokens.line(line_start)
+ if tokens.type(line_start) not in _JUNK_TOKENS:
+ self._lines[line_num] = line.split("\n")[0]
+ self.check_lines(line, line_num)
+
+ def process_module(self, _module):
+ self._keywords_with_parens = set()
+
+ def _check_keyword_parentheses(self, tokens, start):
+ """Check that there are not unnecessary parens after a keyword.
+
+ Parens are unnecessary if there is exactly one balanced outer pair on a
+ line, and it is followed by a colon, and contains no commas (i.e. is not a
+ tuple).
+
+ Args:
+ tokens: list of Tokens; the entire list of Tokens.
+ start: int; the position of the keyword in the token list.
+ """
+ # If the next token is not a paren, we're fine.
+ if self._inside_brackets(":") and tokens[start][1] == "for":
+ self._pop_token()
+ if tokens[start + 1][1] != "(":
+ return
+
+ found_and_or = False
+ depth = 0
+ keyword_token = str(tokens[start][1])
+ line_num = tokens[start][2][0]
+
+ for i in range(start, len(tokens) - 1):
+ token = tokens[i]
+
+ # If we hit a newline, then assume any parens were for continuation.
+ if token[0] == tokenize.NL:
+ return
+
+ if token[1] == "(":
+ depth += 1
+ elif token[1] == ")":
+ depth -= 1
+ if depth:
+ continue
+ # ')' can't happen after if (foo), since it would be a syntax error.
+ if tokens[i + 1][1] in (":", ")", "]", "}", "in") or tokens[i + 1][
+ 0
+ ] in (tokenize.NEWLINE, tokenize.ENDMARKER, tokenize.COMMENT):
+ # The empty tuple () is always accepted.
+ if i == start + 2:
+ return
+ if keyword_token == "not":
+ if not found_and_or:
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
+ elif keyword_token in ("return", "yield"):
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
+ elif keyword_token not in self._keywords_with_parens:
+ if not found_and_or:
+ self.add_message(
+ "superfluous-parens", line=line_num, args=keyword_token
+ )
+ return
+ elif depth == 1:
+ # This is a tuple, which is always acceptable.
+ if token[1] == ",":
+ return
+ # 'and' and 'or' are the only boolean operators with lower precedence
+ # than 'not', so parens are only required when they are found.
+ if token[1] in ("and", "or"):
+ found_and_or = True
+ # A yield inside an expression must always be in parentheses,
+ # quit early without error.
+ elif token[1] == "yield":
+ return
+ # A generator expression always has a 'for' token in it, and
+ # the 'for' token is only legal inside parens when it is in a
+ # generator expression. The parens are necessary here, so bail
+ # without an error.
+ elif token[1] == "for":
+ return
+
+ def _opening_bracket(self, tokens, i):
+ self._push_token(tokens[i][1], i)
+ # Special case: ignore slices
+ if tokens[i][1] == "[" and tokens[i + 1][1] == ":":
+ return
+
+ if i > 0 and (
+ tokens[i - 1][0] == tokenize.NAME
+ and not (keyword.iskeyword(tokens[i - 1][1]))
+ or tokens[i - 1][1] in _CLOSING_BRACKETS
+ ):
+ self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
+ else:
+ self._check_space(tokens, i, (_IGNORE, _MUST_NOT))
+
+ def _closing_bracket(self, tokens, i):
+ if self._inside_brackets(":"):
+ self._pop_token()
+ self._pop_token()
+ # Special case: ignore slices
+ if tokens[i - 1][1] == ":" and tokens[i][1] == "]":
+ return
+ policy_before = _MUST_NOT
+ if tokens[i][1] in _CLOSING_BRACKETS and tokens[i - 1][1] == ",":
+ if _TRAILING_COMMA in self.config.no_space_check:
+ policy_before = _IGNORE
+
+ self._check_space(tokens, i, (policy_before, _IGNORE))
+
+ def _has_valid_type_annotation(self, tokens, i):
+ """Extended check of PEP-484 type hint presence"""
+ if not self._inside_brackets("("):
+ return False
+ # token_info
+ # type string start end line
+ # 0 1 2 3 4
+ bracket_level = 0
+ for token in tokens[i - 1 :: -1]:
+ if token[1] == ":":
+ return True
+ if token[1] == "(":
+ return False
+ if token[1] == "]":
+ bracket_level += 1
+ elif token[1] == "[":
+ bracket_level -= 1
+ elif token[1] == ",":
+ if not bracket_level:
+ return False
+ elif token[1] in (".", "..."):
+ continue
+ elif token[0] not in (tokenize.NAME, tokenize.STRING, tokenize.NL):
+ return False
+ return False
+
+ def _check_equals_spacing(self, tokens, i):
+ """Check the spacing of a single equals sign."""
+ if self._has_valid_type_annotation(tokens, i):
+ self._check_space(tokens, i, (_MUST, _MUST))
+ elif self._inside_brackets("(") or self._inside_brackets("lambda"):
+ self._check_space(tokens, i, (_MUST_NOT, _MUST_NOT))
+ else:
+ self._check_space(tokens, i, (_MUST, _MUST))
+
+ def _open_lambda(self, tokens, i): # pylint:disable=unused-argument
+ self._push_token("lambda", i)
+
+ def _handle_colon(self, tokens, i):
+ # Special case: ignore slices
+ if self._inside_brackets("["):
+ return
+ if self._inside_brackets("{") and _DICT_SEPARATOR in self.config.no_space_check:
+ policy = (_IGNORE, _IGNORE)
+ else:
+ policy = (_MUST_NOT, _MUST)
+ self._check_space(tokens, i, policy)
+
+ if self._inside_brackets("lambda"):
+ self._pop_token()
+ elif self._inside_brackets("{"):
+ self._push_token(":", i)
+
+ def _handle_comma(self, tokens, i):
+ # Only require a following whitespace if this is
+ # not a hanging comma before a closing bracket.
+ if tokens[i + 1][1] in _CLOSING_BRACKETS:
+ self._check_space(tokens, i, (_MUST_NOT, _IGNORE))
+ else:
+ self._check_space(tokens, i, (_MUST_NOT, _MUST))
+ if self._inside_brackets(":"):
+ self._pop_token()
+
+ def _check_surrounded_by_space(self, tokens, i):
+ """Check that a binary operator is surrounded by exactly one space."""
+ self._check_space(tokens, i, (_MUST, _MUST))
+
+ def _check_space(self, tokens, i, policies):
+ def _policy_string(policy):
+ if policy == _MUST:
+ return "Exactly one", "required"
+ return "No", "allowed"
+
+ def _name_construct(token):
+ if token[1] == ",":
+ return "comma"
+ if token[1] == ":":
+ return ":"
+ if token[1] in "()[]{}":
+ return "bracket"
+ if token[1] in ("<", ">", "<=", ">=", "!=", "=="):
+ return "comparison"
+ if self._inside_brackets("("):
+ return "keyword argument assignment"
+ return "assignment"
+
+ good_space = [True, True]
+ token = tokens[i]
+ pairs = [(tokens[i - 1], token), (token, tokens[i + 1])]
+
+ for other_idx, (policy, token_pair) in enumerate(zip(policies, pairs)):
+ if token_pair[other_idx][0] in _EOL or policy == _IGNORE:
+ continue
+
+ distance = _column_distance(*token_pair)
+ if distance is None:
+ continue
+ good_space[other_idx] = (policy == _MUST and distance == 1) or (
+ policy == _MUST_NOT and distance == 0
+ )
+
+ warnings = []
+ if not any(good_space) and policies[0] == policies[1]:
+ warnings.append((policies[0], "around"))
+ else:
+ for ok, policy, position in zip(good_space, policies, ("before", "after")):
+ if not ok:
+ warnings.append((policy, position))
+ for policy, position in warnings:
+ construct = _name_construct(token)
+ count, state = _policy_string(policy)
+ self.add_message(
+ "bad-whitespace",
+ line=token[2][0],
+ args=(count, state, position, construct, _underline_token(token)),
+ col_offset=token[2][1],
+ )
+
+ def _inside_brackets(self, left):
+ return self._bracket_stack[-1] == left
+
+ def _prepare_token_dispatcher(self):
+ raw = [
+ (_KEYWORD_TOKENS, self._check_keyword_parentheses),
+ (_OPENING_BRACKETS, self._opening_bracket),
+ (_CLOSING_BRACKETS, self._closing_bracket),
+ (["="], self._check_equals_spacing),
+ (_SPACED_OPERATORS, self._check_surrounded_by_space),
+ ([","], self._handle_comma),
+ ([":"], self._handle_colon),
+ (["lambda"], self._open_lambda),
+ ]
+
+ dispatch = {}
+ for tokens, handler in raw:
+ for token in tokens:
+ dispatch[token] = handler
+ return dispatch
+
+ def process_tokens(self, tokens):
+ """process tokens and search for :
+
+ _ non strict indentation (i.e. not always using the <indent> parameter as
+ indent unit)
+ _ too long lines (i.e. longer than <max_chars>)
+ _ optionally bad construct (if given, bad_construct must be a compiled
+ regular expression).
+ """
+ self._bracket_stack = [None]
+ indents = [0]
+ check_equal = False
+ line_num = 0
+ self._lines = {}
+ self._visited_lines = {}
+ token_handlers = self._prepare_token_dispatcher()
+ self._last_line_ending = None
+ last_blank_line_num = 0
+
+ self._current_line = ContinuedLineState(tokens, self.config)
+ for idx, (tok_type, token, start, _, line) in enumerate(tokens):
+ if start[0] != line_num:
+ line_num = start[0]
+ # A tokenizer oddity: if an indented line contains a multi-line
+ # docstring, the line member of the INDENT token does not contain
+ # the full line; therefore we check the next token on the line.
+ if tok_type == tokenize.INDENT:
+ self.new_line(TokenWrapper(tokens), idx - 1, idx + 1)
+ else:
+ self.new_line(TokenWrapper(tokens), idx - 1, idx)
+
+ if tok_type == tokenize.NEWLINE:
+ # a program statement, or ENDMARKER, will eventually follow,
+ # after some (possibly empty) run of tokens of the form
+ # (NL | COMMENT)* (INDENT | DEDENT+)?
+ # If an INDENT appears, setting check_equal is wrong, and will
+ # be undone when we see the INDENT.
+ check_equal = True
+ self._process_retained_warnings(TokenWrapper(tokens), idx)
+ self._current_line.next_logical_line()
+ self._check_line_ending(token, line_num)
+ elif tok_type == tokenize.INDENT:
+ check_equal = False
+ self.check_indent_level(token, indents[-1] + 1, line_num)
+ indents.append(indents[-1] + 1)
+ elif tok_type == tokenize.DEDENT:
+ # there's nothing we need to check here! what's important is
+ # that when the run of DEDENTs ends, the indentation of the
+ # program statement (or ENDMARKER) that triggered the run is
+ # equal to what's left at the top of the indents stack
+ check_equal = True
+ if len(indents) > 1:
+ del indents[-1]
+ elif tok_type == tokenize.NL:
+ if not line.strip("\r\n"):
+ last_blank_line_num = line_num
+ self._check_continued_indentation(TokenWrapper(tokens), idx + 1)
+ self._current_line.next_physical_line()
+ elif tok_type not in (tokenize.COMMENT, tokenize.ENCODING):
+ self._current_line.handle_line_start(idx)
+ # This is the first concrete token following a NEWLINE, so it
+ # must be the first token of the next program statement, or an
+ # ENDMARKER; the "line" argument exposes the leading whitespace
+ # for this statement; in the case of ENDMARKER, line is an empty
+ # string, so will properly match the empty string with which the
+ # "indents" stack was seeded
+ if check_equal:
+ check_equal = False
+ self.check_indent_level(line, indents[-1], line_num)
+
+ if tok_type == tokenize.NUMBER and token.endswith("l"):
+ self.add_message("lowercase-l-suffix", line=line_num)
+
+ try:
+ handler = token_handlers[token]
+ except KeyError:
+ pass
+ else:
+ handler(tokens, idx)
+
+ line_num -= 1 # to be ok with "wc -l"
+ if line_num > self.config.max_module_lines:
+ # Get the line where the too-many-lines (or its message id)
+ # was disabled or default to 1.
+ message_definition = self.linter.msgs_store.get_message_definitions(
+ "too-many-lines"
+ )[0]
+ names = (message_definition.msgid, "too-many-lines")
+ line = next(filter(None, map(self.linter._pragma_lineno.get, names)), 1)
+ self.add_message(
+ "too-many-lines",
+ args=(line_num, self.config.max_module_lines),
+ line=line,
+ )
+
+ # See if there are any trailing lines. Do not complain about empty
+ # files like __init__.py markers.
+ if line_num == last_blank_line_num and line_num > 0:
+ self.add_message("trailing-newlines", line=line_num)
+
+ def _check_line_ending(self, line_ending, line_num):
+ # check if line endings are mixed
+ if self._last_line_ending is not None:
+ # line_ending == "" indicates a synthetic newline added at
+ # the end of a file that does not, in fact, end with a
+ # newline.
+ if line_ending and line_ending != self._last_line_ending:
+ self.add_message("mixed-line-endings", line=line_num)
+
+ self._last_line_ending = line_ending
+
+ # check if line ending is as expected
+ expected = self.config.expected_line_ending_format
+ if expected:
+ # reduce multiple \n\n\n\n to one \n
+ line_ending = reduce(lambda x, y: x + y if x != y else x, line_ending, "")
+ line_ending = "LF" if line_ending == "\n" else "CRLF"
+ if line_ending != expected:
+ self.add_message(
+ "unexpected-line-ending-format",
+ args=(line_ending, expected),
+ line=line_num,
+ )
+
+ def _process_retained_warnings(self, tokens, current_pos):
+ single_line_block_stmt = not _last_token_on_line_is(tokens, current_pos, ":")
+
+ for indent_pos, state, indentations in self._current_line.retained_warnings:
+ block_type = indentations[tokens.token_indent(indent_pos)]
+ hints = {k: v for k, v in indentations.items() if v != block_type}
+ if single_line_block_stmt and block_type == WITH_BODY:
+ self._add_continuation_message(state, hints, tokens, indent_pos)
+ elif not single_line_block_stmt and block_type == SINGLE_LINE:
+ self._add_continuation_message(state, hints, tokens, indent_pos)
+
+ def _check_continued_indentation(self, tokens, next_idx):
+ def same_token_around_nl(token_type):
+ return (
+ tokens.type(next_idx) == token_type
+ and tokens.type(next_idx - 2) == token_type
+ )
+
+ # Do not issue any warnings if the next line is empty.
+ if not self._current_line.has_content or tokens.type(next_idx) == tokenize.NL:
+ return
+
+ state, valid_indentations = self._current_line.get_valid_indentations(next_idx)
+ # Special handling for hanging comments and strings. If the last line ended
+ # with a comment (string) and the new line contains only a comment, the line
+ # may also be indented to the start of the previous token.
+ if same_token_around_nl(tokenize.COMMENT) or same_token_around_nl(
+ tokenize.STRING
+ ):
+ valid_indentations[tokens.token_indent(next_idx - 2)] = True
+
+ # We can only decide if the indentation of a continued line before opening
+ # a new block is valid once we know of the body of the block is on the
+ # same line as the block opener. Since the token processing is single-pass,
+ # emitting those warnings is delayed until the block opener is processed.
+ if (
+ state.context_type in (HANGING_BLOCK, CONTINUED_BLOCK)
+ and tokens.token_indent(next_idx) in valid_indentations
+ ):
+ self._current_line.add_block_warning(next_idx, state, valid_indentations)
+ elif tokens.token_indent(next_idx) not in valid_indentations:
+ length_indentation = len(tokens.token_indent(next_idx))
+ if not any(
+ length_indentation == 2 * len(indentation)
+ for indentation in valid_indentations
+ ):
+ self._add_continuation_message(
+ state, valid_indentations, tokens, next_idx
+ )
+
+ def _add_continuation_message(self, state, indentations, tokens, position):
+ readable_type, readable_position = _CONTINUATION_MSG_PARTS[state.context_type]
+ hint_line, delta_message = _get_indent_hint_line(
+ indentations, tokens.token_indent(position)
+ )
+ self.add_message(
+ "bad-continuation",
+ line=tokens.start_line(position),
+ args=(
+ readable_type,
+ readable_position,
+ delta_message,
+ tokens.line(position),
+ hint_line,
+ ),
+ )
+
+ @check_messages("multiple-statements")
+ def visit_default(self, node):
+ """check the node line number and check it if not yet done"""
+ if not node.is_statement:
+ return
+ if not node.root().pure_python:
+ return
+ prev_sibl = node.previous_sibling()
+ if prev_sibl is not None:
+ prev_line = prev_sibl.fromlineno
+ else:
+ # The line on which a finally: occurs in a try/finally
+ # is not directly represented in the AST. We infer it
+ # by taking the last line of the body and adding 1, which
+ # should be the line of finally:
+ if (
+ isinstance(node.parent, nodes.TryFinally)
+ and node in node.parent.finalbody
+ ):
+ prev_line = node.parent.body[0].tolineno + 1
+ else:
+ prev_line = node.parent.statement().fromlineno
+ line = node.fromlineno
+ assert line, node
+ if prev_line == line and self._visited_lines.get(line) != 2:
+ self._check_multi_statement_line(node, line)
+ return
+ if line in self._visited_lines:
+ return
+ try:
+ tolineno = node.blockstart_tolineno
+ except AttributeError:
+ tolineno = node.tolineno
+ assert tolineno, node
+ lines = []
+ for line in range(line, tolineno + 1):
+ self._visited_lines[line] = 1
+ try:
+ lines.append(self._lines[line].rstrip())
+ except KeyError:
+ lines.append("")
+
+ def _check_multi_statement_line(self, node, line):
+ """Check for lines containing multiple statements."""
+ # Do not warn about multiple nested context managers
+ # in with statements.
+ if isinstance(node, nodes.With):
+ return
+ # For try... except... finally..., the two nodes
+ # appear to be on the same line due to how the AST is built.
+ if isinstance(node, nodes.TryExcept) and isinstance(
+ node.parent, nodes.TryFinally
+ ):
+ return
+ if (
+ isinstance(node.parent, nodes.If)
+ and not node.parent.orelse
+ and self.config.single_line_if_stmt
+ ):
+ return
+ if (
+ isinstance(node.parent, nodes.ClassDef)
+ and len(node.parent.body) == 1
+ and self.config.single_line_class_stmt
+ ):
+ return
+ self.add_message("multiple-statements", node=node)
+ self._visited_lines[line] = 2
+
+ def check_lines(self, lines, i):
+ """check lines have less than a maximum number of characters
+ """
+ max_chars = self.config.max_line_length
+ ignore_long_line = self.config.ignore_long_lines
+
+ def check_line(line, i):
+ if not line.endswith("\n"):
+ self.add_message("missing-final-newline", line=i)
+ else:
+ # exclude \f (formfeed) from the rstrip
+ stripped_line = line.rstrip("\t\n\r\v ")
+ if not stripped_line and _EMPTY_LINE in self.config.no_space_check:
+ # allow empty lines
+ pass
+ elif line[len(stripped_line) :] not in ("\n", "\r\n"):
+ self.add_message(
+ "trailing-whitespace", line=i, col_offset=len(stripped_line)
+ )
+ # Don't count excess whitespace in the line length.
+ line = stripped_line
+ mobj = OPTION_RGX.search(line)
+ if mobj and "=" in line:
+ front_of_equal, _, back_of_equal = mobj.group(1).partition("=")
+ if front_of_equal.strip() == "disable":
+ if "line-too-long" in {
+ _msg_id.strip() for _msg_id in back_of_equal.split(",")
+ }:
+ return None
+ line = line.rsplit("#", 1)[0].rstrip()
+
+ if len(line) > max_chars and not ignore_long_line.search(line):
+ self.add_message("line-too-long", line=i, args=(len(line), max_chars))
+ return i + 1
+
+ unsplit_ends = {
+ "\v",
+ "\x0b",
+ "\f",
+ "\x0c",
+ "\x1c",
+ "\x1d",
+ "\x1e",
+ "\x85",
+ "\u2028",
+ "\u2029",
+ }
+ unsplit = []
+ for line in lines.splitlines(True):
+ if line[-1] in unsplit_ends:
+ unsplit.append(line)
+ continue
+
+ if unsplit:
+ unsplit.append(line)
+ line = "".join(unsplit)
+ unsplit = []
+
+ i = check_line(line, i)
+ if i is None:
+ break
+
+ if unsplit:
+ check_line("".join(unsplit), i)
+
+ def check_indent_level(self, string, expected, line_num):
+ """return the indent level of the string
+ """
+ indent = self.config.indent_string
+ if indent == "\\t": # \t is not interpreted in the configuration file
+ indent = "\t"
+ level = 0
+ unit_size = len(indent)
+ while string[:unit_size] == indent:
+ string = string[unit_size:]
+ level += 1
+ suppl = ""
+ while string and string[0] in " \t":
+ if string[0] != indent[0]:
+ if string[0] == "\t":
+ args = ("tab", "space")
+ else:
+ args = ("space", "tab")
+ self.add_message("mixed-indentation", args=args, line=line_num)
+ return level
+ suppl += string[0]
+ string = string[1:]
+ if level != expected or suppl:
+ i_type = "spaces"
+ if indent[0] == "\t":
+ i_type = "tabs"
+ self.add_message(
+ "bad-indentation",
+ line=line_num,
+ args=(level * unit_size + len(suppl), i_type, expected * unit_size),
+ )
+ return None
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(FormatChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/imports.py b/venv/Lib/site-packages/pylint/checkers/imports.py
new file mode 100644
index 0000000..42d4362
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/imports.py
@@ -0,0 +1,981 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Noam Yorav-Raphael <noamraph@gmail.com>
+# Copyright (c) 2015 James Morgensen <james.morgensen@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
+# Copyright (c) 2016 Maik Röder <maikroeder@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Michka Popoff <michkapopoff@gmail.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Erik Wright <erik.wright@shopify.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Marianna Polatoglou <mpolatoglou@bloomberg.net>
+# Copyright (c) 2019 Paul Renvoise <renvoisepaul@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""imports checkers for Python code"""
+
+import collections
+import copy
+import os
+import sys
+from distutils import sysconfig
+
+import astroid
+import isort
+from astroid import modutils
+from astroid.decorators import cached
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import (
+ check_messages,
+ is_from_fallback_block,
+ node_ignores_exception,
+)
+from pylint.exceptions import EmptyReportError
+from pylint.graph import DotBackend, get_cycles
+from pylint.interfaces import IAstroidChecker
+from pylint.reporters.ureports.nodes import Paragraph, VerbatimText
+from pylint.utils import get_global_option
+
+
+def _qualified_names(modname):
+ """Split the names of the given module into subparts
+
+ For example,
+ _qualified_names('pylint.checkers.ImportsChecker')
+ returns
+ ['pylint', 'pylint.checkers', 'pylint.checkers.ImportsChecker']
+ """
+ names = modname.split(".")
+ return [".".join(names[0 : i + 1]) for i in range(len(names))]
+
+
+def _get_import_name(importnode, modname):
+ """Get a prepared module name from the given import node
+
+ In the case of relative imports, this will return the
+ absolute qualified module name, which might be useful
+ for debugging. Otherwise, the initial module name
+ is returned unchanged.
+ """
+ if isinstance(importnode, astroid.ImportFrom):
+ if importnode.level:
+ root = importnode.root()
+ if isinstance(root, astroid.Module):
+ modname = root.relative_to_absolute_name(
+ modname, level=importnode.level
+ )
+ return modname
+
+
+def _get_first_import(node, context, name, base, level, alias):
+ """return the node where [base.]<name> is imported or None if not found
+ """
+ fullname = "%s.%s" % (base, name) if base else name
+
+ first = None
+ found = False
+ for first in context.body:
+ if first is node:
+ continue
+ if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
+ continue
+ if isinstance(first, astroid.Import):
+ if any(fullname == iname[0] for iname in first.names):
+ found = True
+ break
+ elif isinstance(first, astroid.ImportFrom):
+ if level == first.level:
+ for imported_name, imported_alias in first.names:
+ if fullname == "%s.%s" % (first.modname, imported_name):
+ found = True
+ break
+ if (
+ name != "*"
+ and name == imported_name
+ and not (alias or imported_alias)
+ ):
+ found = True
+ break
+ if found:
+ break
+ if found and not astroid.are_exclusive(first, node):
+ return first
+ return None
+
+
+def _ignore_import_failure(node, modname, ignored_modules):
+ for submodule in _qualified_names(modname):
+ if submodule in ignored_modules:
+ return True
+
+ return node_ignores_exception(node, ImportError)
+
+
+# utilities to represents import dependencies as tree and dot graph ###########
+
+
+def _make_tree_defs(mod_files_list):
+ """get a list of 2-uple (module, list_of_files_which_import_this_module),
+ it will return a dictionary to represent this as a tree
+ """
+ tree_defs = {}
+ for mod, files in mod_files_list:
+ node = (tree_defs, ())
+ for prefix in mod.split("."):
+ node = node[0].setdefault(prefix, [{}, []])
+ node[1] += files
+ return tree_defs
+
+
+def _repr_tree_defs(data, indent_str=None):
+ """return a string which represents imports as a tree"""
+ lines = []
+ nodes = data.items()
+ for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
+ if not files:
+ files = ""
+ else:
+ files = "(%s)" % ",".join(sorted(files))
+ if indent_str is None:
+ lines.append("%s %s" % (mod, files))
+ sub_indent_str = " "
+ else:
+ lines.append(r"%s\-%s %s" % (indent_str, mod, files))
+ if i == len(nodes) - 1:
+ sub_indent_str = "%s " % indent_str
+ else:
+ sub_indent_str = "%s| " % indent_str
+ if sub:
+ lines.append(_repr_tree_defs(sub, sub_indent_str))
+ return "\n".join(lines)
+
+
+def _dependencies_graph(filename, dep_info):
+ """write dependencies as a dot (graphviz) file
+ """
+ done = {}
+ printer = DotBackend(filename[:-4], rankdir="LR")
+ printer.emit('URL="." node[shape="box"]')
+ for modname, dependencies in sorted(dep_info.items()):
+ done[modname] = 1
+ printer.emit_node(modname)
+ for depmodname in dependencies:
+ if depmodname not in done:
+ done[depmodname] = 1
+ printer.emit_node(depmodname)
+ for depmodname, dependencies in sorted(dep_info.items()):
+ for modname in dependencies:
+ printer.emit_edge(modname, depmodname)
+ printer.generate(filename)
+
+
+def _make_graph(filename, dep_info, sect, gtype):
+ """generate a dependencies graph and add some information about it in the
+ report's section
+ """
+ _dependencies_graph(filename, dep_info)
+ sect.append(Paragraph("%simports graph has been written to %s" % (gtype, filename)))
+
+
+# the import checker itself ###################################################
+
+MSGS = {
+ "E0401": (
+ "Unable to import %s",
+ "import-error",
+ "Used when pylint has been unable to import a module.",
+ {"old_names": [("F0401", "old-import-error")]},
+ ),
+ "E0402": (
+ "Attempted relative import beyond top-level package",
+ "relative-beyond-top-level",
+ "Used when a relative import tries to access too many levels "
+ "in the current package.",
+ ),
+ "R0401": (
+ "Cyclic import (%s)",
+ "cyclic-import",
+ "Used when a cyclic import between two or more modules is detected.",
+ ),
+ "W0401": (
+ "Wildcard import %s",
+ "wildcard-import",
+ "Used when `from module import *` is detected.",
+ ),
+ "W0402": (
+ "Uses of a deprecated module %r",
+ "deprecated-module",
+ "Used a module marked as deprecated is imported.",
+ ),
+ "W0404": (
+ "Reimport %r (imported line %s)",
+ "reimported",
+ "Used when a module is reimported multiple times.",
+ ),
+ "W0406": (
+ "Module import itself",
+ "import-self",
+ "Used when a module is importing itself.",
+ ),
+ "W0407": (
+ "Prefer importing %r instead of %r",
+ "preferred-module",
+ "Used when a module imported has a preferred replacement module.",
+ ),
+ "W0410": (
+ "__future__ import is not the first non docstring statement",
+ "misplaced-future",
+ "Python 2.5 and greater require __future__ import to be the "
+ "first non docstring statement in the module.",
+ ),
+ "C0410": (
+ "Multiple imports on one line (%s)",
+ "multiple-imports",
+ "Used when import statement importing multiple modules is detected.",
+ ),
+ "C0411": (
+ "%s should be placed before %s",
+ "wrong-import-order",
+ "Used when PEP8 import order is not respected (standard imports "
+ "first, then third-party libraries, then local imports)",
+ ),
+ "C0412": (
+ "Imports from package %s are not grouped",
+ "ungrouped-imports",
+ "Used when imports are not grouped by packages",
+ ),
+ "C0413": (
+ 'Import "%s" should be placed at the top of the module',
+ "wrong-import-position",
+ "Used when code and imports are mixed",
+ ),
+ "C0414": (
+ "Import alias does not rename original package",
+ "useless-import-alias",
+ "Used when an import alias is same as original package."
+ "e.g using import numpy as numpy instead of import numpy as np",
+ ),
+ "C0415": (
+ "Import outside toplevel (%s)",
+ "import-outside-toplevel",
+ "Used when an import statement is used anywhere other than the module "
+ "toplevel. Move this import to the top of the file.",
+ ),
+}
+
+
+DEFAULT_STANDARD_LIBRARY = ()
+DEFAULT_KNOWN_THIRD_PARTY = ("enchant",)
+DEFAULT_PREFERRED_MODULES = ()
+
+
+class ImportsChecker(BaseChecker):
+ """checks for
+ * external modules dependencies
+ * relative / wildcard imports
+ * cyclic imports
+ * uses of deprecated modules
+ * uses of modules instead of preferred modules
+ """
+
+ __implements__ = IAstroidChecker
+
+ name = "imports"
+ msgs = MSGS
+ priority = -2
+ deprecated_modules = ("optparse", "tkinter.tix")
+
+ options = (
+ (
+ "deprecated-modules",
+ {
+ "default": deprecated_modules,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Deprecated modules which should not be used,"
+ " separated by a comma.",
+ },
+ ),
+ (
+ "preferred-modules",
+ {
+ "default": DEFAULT_PREFERRED_MODULES,
+ "type": "csv",
+ "metavar": "<module:preferred-module>",
+ "help": "Couples of modules and preferred modules,"
+ " separated by a comma.",
+ },
+ ),
+ (
+ "import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of every (i.e. internal and"
+ " external) dependencies in the given file"
+ " (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "ext-import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of external dependencies in the"
+ " given file (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "int-import-graph",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<file.dot>",
+ "help": "Create a graph of internal dependencies in the"
+ " given file (report RP0402 must not be disabled).",
+ },
+ ),
+ (
+ "known-standard-library",
+ {
+ "default": DEFAULT_STANDARD_LIBRARY,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Force import order to recognize a module as part of "
+ "the standard compatibility libraries.",
+ },
+ ),
+ (
+ "known-third-party",
+ {
+ "default": DEFAULT_KNOWN_THIRD_PARTY,
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": "Force import order to recognize a module as part of "
+ "a third party library.",
+ },
+ ),
+ (
+ "allow-any-import-level",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<modules>",
+ "help": (
+ "List of modules that can be imported at any level, not just "
+ "the top level one."
+ ),
+ },
+ ),
+ (
+ "analyse-fallback-blocks",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Analyse import fallback blocks. This can be used to "
+ "support both Python 2 and 3 compatible code, which "
+ "means that the block might have code that exists "
+ "only in one or another interpreter, leading to false "
+ "positives when analysed.",
+ },
+ ),
+ (
+ "allow-wildcard-with-all",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Allow wildcard imports from modules that define __all__.",
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self.stats = None
+ self.import_graph = None
+ self._imports_stack = []
+ self._first_non_import_node = None
+ self._module_pkg = {} # mapping of modules to the pkg they belong in
+ self._allow_any_import_level = set()
+ self.reports = (
+ ("RP0401", "External dependencies", self._report_external_dependencies),
+ ("RP0402", "Modules dependencies graph", self._report_dependencies_graph),
+ )
+
+ self._site_packages = self._compute_site_packages()
+
+ @staticmethod
+ def _compute_site_packages():
+ def _normalized_path(path):
+ return os.path.normcase(os.path.abspath(path))
+
+ paths = set()
+ real_prefix = getattr(sys, "real_prefix", None)
+ for prefix in filter(None, (real_prefix, sys.prefix)):
+ path = sysconfig.get_python_lib(prefix=prefix)
+ path = _normalized_path(path)
+ paths.add(path)
+
+ # Handle Debian's derivatives /usr/local.
+ if os.path.isfile("/etc/debian_version"):
+ for prefix in filter(None, (real_prefix, sys.prefix)):
+ libpython = os.path.join(
+ prefix,
+ "local",
+ "lib",
+ "python" + sysconfig.get_python_version(),
+ "dist-packages",
+ )
+ paths.add(libpython)
+ return paths
+
+ def open(self):
+ """called before visiting project (i.e set of modules)"""
+ self.linter.add_stats(dependencies={})
+ self.linter.add_stats(cycles=[])
+ self.stats = self.linter.stats
+ self.import_graph = collections.defaultdict(set)
+ self._module_pkg = {} # mapping of modules to the pkg they belong in
+ self._excluded_edges = collections.defaultdict(set)
+ self._ignored_modules = get_global_option(self, "ignored-modules", default=[])
+ # Build a mapping {'module': 'preferred-module'}
+ self.preferred_modules = dict(
+ module.split(":")
+ for module in self.config.preferred_modules
+ if ":" in module
+ )
+ self._allow_any_import_level = set(self.config.allow_any_import_level)
+
+ def _import_graph_without_ignored_edges(self):
+ filtered_graph = copy.deepcopy(self.import_graph)
+ for node in filtered_graph:
+ filtered_graph[node].difference_update(self._excluded_edges[node])
+ return filtered_graph
+
+ def close(self):
+ """called before visiting project (i.e set of modules)"""
+ if self.linter.is_message_enabled("cyclic-import"):
+ graph = self._import_graph_without_ignored_edges()
+ vertices = list(graph)
+ for cycle in get_cycles(graph, vertices=vertices):
+ self.add_message("cyclic-import", args=" -> ".join(cycle))
+
+ @check_messages(*MSGS)
+ def visit_import(self, node):
+ """triggered when an import statement is seen"""
+ self._check_reimport(node)
+ self._check_import_as_rename(node)
+ self._check_toplevel(node)
+
+ names = [name for name, _ in node.names]
+ if len(names) >= 2:
+ self.add_message("multiple-imports", args=", ".join(names), node=node)
+
+ for name in names:
+ self._check_deprecated_module(node, name)
+ self._check_preferred_module(node, name)
+ imported_module = self._get_imported_module(node, name)
+ if isinstance(node.parent, astroid.Module):
+ # Allow imports nested
+ self._check_position(node)
+ if isinstance(node.scope(), astroid.Module):
+ self._record_import(node, imported_module)
+
+ if imported_module is None:
+ continue
+
+ self._add_imported_module(node, imported_module.name)
+
+ @check_messages(*MSGS)
+ def visit_importfrom(self, node):
+ """triggered when a from statement is seen"""
+ basename = node.modname
+ imported_module = self._get_imported_module(node, basename)
+
+ self._check_import_as_rename(node)
+ self._check_misplaced_future(node)
+ self._check_deprecated_module(node, basename)
+ self._check_preferred_module(node, basename)
+ self._check_wildcard_imports(node, imported_module)
+ self._check_same_line_imports(node)
+ self._check_reimport(node, basename=basename, level=node.level)
+ self._check_toplevel(node)
+
+ if isinstance(node.parent, astroid.Module):
+ # Allow imports nested
+ self._check_position(node)
+ if isinstance(node.scope(), astroid.Module):
+ self._record_import(node, imported_module)
+ if imported_module is None:
+ return
+ for name, _ in node.names:
+ if name != "*":
+ self._add_imported_module(node, "%s.%s" % (imported_module.name, name))
+ else:
+ self._add_imported_module(node, imported_module.name)
+
+ @check_messages(*MSGS)
+ def leave_module(self, node):
+ # Check imports are grouped by category (standard, 3rd party, local)
+ std_imports, ext_imports, loc_imports = self._check_imports_order(node)
+
+ # Check that imports are grouped by package within a given category
+ met_import = set() #  set for 'import x' style
+ met_from = set() #  set for 'from x import y' style
+ current_package = None
+ for import_node, import_name in std_imports + ext_imports + loc_imports:
+ if not self.linter.is_message_enabled(
+ "ungrouped-imports", import_node.fromlineno
+ ):
+ continue
+ if isinstance(import_node, astroid.node_classes.ImportFrom):
+ met = met_from
+ else:
+ met = met_import
+ package, _, _ = import_name.partition(".")
+ if current_package and current_package != package and package in met:
+ self.add_message("ungrouped-imports", node=import_node, args=package)
+ current_package = package
+ met.add(package)
+
+ self._imports_stack = []
+ self._first_non_import_node = None
+
+ def compute_first_non_import_node(self, node):
+ if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
+ return
+ # if the node does not contain an import instruction, and if it is the
+ # first node of the module, keep a track of it (all the import positions
+ # of the module will be compared to the position of this first
+ # instruction)
+ if self._first_non_import_node:
+ return
+ if not isinstance(node.parent, astroid.Module):
+ return
+ nested_allowed = [astroid.TryExcept, astroid.TryFinally]
+ is_nested_allowed = [
+ allowed for allowed in nested_allowed if isinstance(node, allowed)
+ ]
+ if is_nested_allowed and any(
+ node.nodes_of_class((astroid.Import, astroid.ImportFrom))
+ ):
+ return
+ if isinstance(node, astroid.Assign):
+ # Add compatibility for module level dunder names
+ # https://www.python.org/dev/peps/pep-0008/#module-level-dunder-names
+ valid_targets = [
+ isinstance(target, astroid.AssignName)
+ and target.name.startswith("__")
+ and target.name.endswith("__")
+ for target in node.targets
+ ]
+ if all(valid_targets):
+ return
+ self._first_non_import_node = node
+
+ visit_tryfinally = (
+ visit_tryexcept
+ ) = (
+ visit_assignattr
+ ) = (
+ visit_assign
+ ) = (
+ visit_ifexp
+ ) = visit_comprehension = visit_expr = visit_if = compute_first_non_import_node
+
+ def visit_functiondef(self, node):
+ if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
+ return
+ # If it is the first non import instruction of the module, record it.
+ if self._first_non_import_node:
+ return
+
+ # Check if the node belongs to an `If` or a `Try` block. If they
+ # contain imports, skip recording this node.
+ if not isinstance(node.parent.scope(), astroid.Module):
+ return
+
+ root = node
+ while not isinstance(root.parent, astroid.Module):
+ root = root.parent
+
+ if isinstance(root, (astroid.If, astroid.TryFinally, astroid.TryExcept)):
+ if any(root.nodes_of_class((astroid.Import, astroid.ImportFrom))):
+ return
+
+ self._first_non_import_node = node
+
+ visit_classdef = visit_for = visit_while = visit_functiondef
+
+ def _check_misplaced_future(self, node):
+ basename = node.modname
+ if basename == "__future__":
+ # check if this is the first non-docstring statement in the module
+ prev = node.previous_sibling()
+ if prev:
+ # consecutive future statements are possible
+ if not (
+ isinstance(prev, astroid.ImportFrom)
+ and prev.modname == "__future__"
+ ):
+ self.add_message("misplaced-future", node=node)
+ return
+
+ def _check_same_line_imports(self, node):
+ # Detect duplicate imports on the same line.
+ names = (name for name, _ in node.names)
+ counter = collections.Counter(names)
+ for name, count in counter.items():
+ if count > 1:
+ self.add_message("reimported", node=node, args=(name, node.fromlineno))
+
+ def _check_position(self, node):
+ """Check `node` import or importfrom node position is correct
+
+ Send a message if `node` comes before another instruction
+ """
+ # if a first non-import instruction has already been encountered,
+ # it means the import comes after it and therefore is not well placed
+ if self._first_non_import_node:
+ self.add_message("wrong-import-position", node=node, args=node.as_string())
+
+ def _record_import(self, node, importedmodnode):
+ """Record the package `node` imports from"""
+ if isinstance(node, astroid.ImportFrom):
+ importedname = node.modname
+ else:
+ importedname = importedmodnode.name if importedmodnode else None
+ if not importedname:
+ importedname = node.names[0][0].split(".")[0]
+
+ if isinstance(node, astroid.ImportFrom) and (node.level or 0) >= 1:
+ # We need the importedname with first point to detect local package
+ # Example of node:
+ # 'from .my_package1 import MyClass1'
+ # the output should be '.my_package1' instead of 'my_package1'
+ # Example of node:
+ # 'from . import my_package2'
+ # the output should be '.my_package2' instead of '{pyfile}'
+ importedname = "." + importedname
+
+ self._imports_stack.append((node, importedname))
+
+ @staticmethod
+ def _is_fallback_import(node, imports):
+ imports = [import_node for (import_node, _) in imports]
+ return any(astroid.are_exclusive(import_node, node) for import_node in imports)
+
+ def _check_imports_order(self, _module_node):
+ """Checks imports of module `node` are grouped by category
+
+ Imports must follow this order: standard, 3rd party, local
+ """
+ std_imports = []
+ third_party_imports = []
+ first_party_imports = []
+ # need of a list that holds third or first party ordered import
+ external_imports = []
+ local_imports = []
+ third_party_not_ignored = []
+ first_party_not_ignored = []
+ local_not_ignored = []
+ isort_obj = isort.SortImports(
+ file_contents="",
+ known_third_party=self.config.known_third_party,
+ known_standard_library=self.config.known_standard_library,
+ )
+ for node, modname in self._imports_stack:
+ if modname.startswith("."):
+ package = "." + modname.split(".")[1]
+ else:
+ package = modname.split(".")[0]
+ nested = not isinstance(node.parent, astroid.Module)
+ ignore_for_import_order = not self.linter.is_message_enabled(
+ "wrong-import-order", node.fromlineno
+ )
+ import_category = isort_obj.place_module(package)
+ node_and_package_import = (node, package)
+ if import_category in ("FUTURE", "STDLIB"):
+ std_imports.append(node_and_package_import)
+ wrong_import = (
+ third_party_not_ignored
+ or first_party_not_ignored
+ or local_not_ignored
+ )
+ if self._is_fallback_import(node, wrong_import):
+ continue
+ if wrong_import and not nested:
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'standard import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "THIRDPARTY":
+ third_party_imports.append(node_and_package_import)
+ external_imports.append(node_and_package_import)
+ if not nested and not ignore_for_import_order:
+ third_party_not_ignored.append(node_and_package_import)
+ wrong_import = first_party_not_ignored or local_not_ignored
+ if wrong_import and not nested:
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'third party import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "FIRSTPARTY":
+ first_party_imports.append(node_and_package_import)
+ external_imports.append(node_and_package_import)
+ if not nested and not ignore_for_import_order:
+ first_party_not_ignored.append(node_and_package_import)
+ wrong_import = local_not_ignored
+ if wrong_import and not nested:
+ self.add_message(
+ "wrong-import-order",
+ node=node,
+ args=(
+ 'first party import "%s"' % node.as_string(),
+ '"%s"' % wrong_import[0][0].as_string(),
+ ),
+ )
+ elif import_category == "LOCALFOLDER":
+ local_imports.append((node, package))
+ if not nested and not ignore_for_import_order:
+ local_not_ignored.append((node, package))
+ return std_imports, external_imports, local_imports
+
+ def _get_imported_module(self, importnode, modname):
+ try:
+ return importnode.do_import_module(modname)
+ except astroid.TooManyLevelsError:
+ if _ignore_import_failure(importnode, modname, self._ignored_modules):
+ return None
+
+ self.add_message("relative-beyond-top-level", node=importnode)
+ except astroid.AstroidSyntaxError as exc:
+ message = "Cannot import {!r} due to syntax error {!r}".format(
+ modname, str(exc.error) # pylint: disable=no-member; false positive
+ )
+ self.add_message("syntax-error", line=importnode.lineno, args=message)
+
+ except astroid.AstroidBuildingException:
+ if not self.linter.is_message_enabled("import-error"):
+ return None
+ if _ignore_import_failure(importnode, modname, self._ignored_modules):
+ return None
+ if not self.config.analyse_fallback_blocks and is_from_fallback_block(
+ importnode
+ ):
+ return None
+
+ dotted_modname = _get_import_name(importnode, modname)
+ self.add_message("import-error", args=repr(dotted_modname), node=importnode)
+
+ def _add_imported_module(self, node, importedmodname):
+ """notify an imported module, used to analyze dependencies"""
+ module_file = node.root().file
+ context_name = node.root().name
+ base = os.path.splitext(os.path.basename(module_file))[0]
+
+ try:
+ importedmodname = modutils.get_module_part(importedmodname, module_file)
+ except ImportError:
+ pass
+
+ if context_name == importedmodname:
+ self.add_message("import-self", node=node)
+
+ elif not modutils.is_standard_module(importedmodname):
+ # if this is not a package __init__ module
+ if base != "__init__" and context_name not in self._module_pkg:
+ # record the module's parent, or the module itself if this is
+ # a top level module, as the package it belongs to
+ self._module_pkg[context_name] = context_name.rsplit(".", 1)[0]
+
+ # handle dependencies
+ importedmodnames = self.stats["dependencies"].setdefault(
+ importedmodname, set()
+ )
+ if context_name not in importedmodnames:
+ importedmodnames.add(context_name)
+
+ # update import graph
+ self.import_graph[context_name].add(importedmodname)
+ if not self.linter.is_message_enabled("cyclic-import", line=node.lineno):
+ self._excluded_edges[context_name].add(importedmodname)
+
+ def _check_deprecated_module(self, node, mod_path):
+ """check if the module is deprecated"""
+ for mod_name in self.config.deprecated_modules:
+ if mod_path == mod_name or mod_path.startswith(mod_name + "."):
+ self.add_message("deprecated-module", node=node, args=mod_path)
+
+ def _check_preferred_module(self, node, mod_path):
+ """check if the module has a preferred replacement"""
+ if mod_path in self.preferred_modules:
+ self.add_message(
+ "preferred-module",
+ node=node,
+ args=(self.preferred_modules[mod_path], mod_path),
+ )
+
+ def _check_import_as_rename(self, node):
+ names = node.names
+ for name in names:
+ if not all(name):
+ return
+
+ real_name = name[0]
+ splitted_packages = real_name.rsplit(".")
+ real_name = splitted_packages[-1]
+ imported_name = name[1]
+ # consider only following cases
+ # import x as x
+ # and ignore following
+ # import x.y.z as z
+ if real_name == imported_name and len(splitted_packages) == 1:
+ self.add_message("useless-import-alias", node=node)
+
+ def _check_reimport(self, node, basename=None, level=None):
+ """check if the import is necessary (i.e. not already done)"""
+ if not self.linter.is_message_enabled("reimported"):
+ return
+
+ frame = node.frame()
+ root = node.root()
+ contexts = [(frame, level)]
+ if root is not frame:
+ contexts.append((root, None))
+
+ for known_context, known_level in contexts:
+ for name, alias in node.names:
+ first = _get_first_import(
+ node, known_context, name, basename, known_level, alias
+ )
+ if first is not None:
+ self.add_message(
+ "reimported", node=node, args=(name, first.fromlineno)
+ )
+
+ def _report_external_dependencies(self, sect, _, _dummy):
+ """return a verbatim layout for displaying dependencies"""
+ dep_info = _make_tree_defs(self._external_dependencies_info().items())
+ if not dep_info:
+ raise EmptyReportError()
+ tree_str = _repr_tree_defs(dep_info)
+ sect.append(VerbatimText(tree_str))
+
+ def _report_dependencies_graph(self, sect, _, _dummy):
+ """write dependencies as a dot (graphviz) file"""
+ dep_info = self.stats["dependencies"]
+ if not dep_info or not (
+ self.config.import_graph
+ or self.config.ext_import_graph
+ or self.config.int_import_graph
+ ):
+ raise EmptyReportError()
+ filename = self.config.import_graph
+ if filename:
+ _make_graph(filename, dep_info, sect, "")
+ filename = self.config.ext_import_graph
+ if filename:
+ _make_graph(filename, self._external_dependencies_info(), sect, "external ")
+ filename = self.config.int_import_graph
+ if filename:
+ _make_graph(filename, self._internal_dependencies_info(), sect, "internal ")
+
+ def _filter_dependencies_graph(self, internal):
+ """build the internal or the external dependency graph"""
+ graph = collections.defaultdict(set)
+ for importee, importers in self.stats["dependencies"].items():
+ for importer in importers:
+ package = self._module_pkg.get(importer, importer)
+ is_inside = importee.startswith(package)
+ if is_inside and internal or not is_inside and not internal:
+ graph[importee].add(importer)
+ return graph
+
+ @cached
+ def _external_dependencies_info(self):
+ """return cached external dependencies information or build and
+ cache them
+ """
+ return self._filter_dependencies_graph(internal=False)
+
+ @cached
+ def _internal_dependencies_info(self):
+ """return cached internal dependencies information or build and
+ cache them
+ """
+ return self._filter_dependencies_graph(internal=True)
+
+ def _check_wildcard_imports(self, node, imported_module):
+ if node.root().package:
+ # Skip the check if in __init__.py issue #2026
+ return
+
+ wildcard_import_is_allowed = self._wildcard_import_is_allowed(imported_module)
+ for name, _ in node.names:
+ if name == "*" and not wildcard_import_is_allowed:
+ self.add_message("wildcard-import", args=node.modname, node=node)
+
+ def _wildcard_import_is_allowed(self, imported_module):
+ return (
+ self.config.allow_wildcard_with_all
+ and imported_module is not None
+ and "__all__" in imported_module.locals
+ )
+
+ def _check_toplevel(self, node):
+ """Check whether the import is made outside the module toplevel.
+ """
+ # If the scope of the import is a module, then obviously it is
+ # not outside the module toplevel.
+ if isinstance(node.scope(), astroid.Module):
+ return
+
+ if isinstance(node, astroid.ImportFrom):
+ module_names = [node.modname]
+ else:
+ module_names = [name[0] for name in node.names]
+
+ # Get the full names of all the imports that are not whitelisted.
+ scoped_imports = [
+ name for name in module_names if name not in self._allow_any_import_level
+ ]
+
+ if scoped_imports:
+ self.add_message(
+ "import-outside-toplevel", args=", ".join(scoped_imports), node=node
+ )
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(ImportsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/logging.py b/venv/Lib/site-packages/pylint/checkers/logging.py
new file mode 100644
index 0000000..5ad0e76
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/logging.py
@@ -0,0 +1,384 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009, 2012, 2014 Google, Inc.
+# Copyright (c) 2012 Mike Bryant <leachim@leachim.info>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Chris Murray <chris@chrismurray.scot>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2017 guillaume2 <guillaume.peillex@gmail.col>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Mariatta Wijaya <mariatta@python.org>
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""checker for use of Python logging
+"""
+import string
+
+import astroid
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+from pylint.checkers.utils import check_messages
+
+MSGS = {
+ "W1201": (
+ "Specify string format arguments as logging function parameters",
+ "logging-not-lazy",
+ "Used when a logging statement has a call form of "
+ '"logging.<logging method>(format_string % (format_args...))". '
+ "Such calls should leave string interpolation to the logging "
+ "method itself and be written "
+ '"logging.<logging method>(format_string, format_args...)" '
+ "so that the program may avoid incurring the cost of the "
+ "interpolation in those cases in which no message will be "
+ "logged. For more, see "
+ "http://www.python.org/dev/peps/pep-0282/.",
+ ),
+ "W1202": (
+ "Use %s formatting in logging functions%s",
+ "logging-format-interpolation",
+ "Used when a logging statement has a call form of "
+ '"logging.<logging method>(<string formatting>)".'
+ " with invalid string formatting. "
+ "Use another way for format the string instead.",
+ ),
+ "E1200": (
+ "Unsupported logging format character %r (%#02x) at index %d",
+ "logging-unsupported-format",
+ "Used when an unsupported format character is used in a logging "
+ "statement format string.",
+ ),
+ "E1201": (
+ "Logging format string ends in middle of conversion specifier",
+ "logging-format-truncated",
+ "Used when a logging statement format string terminates before "
+ "the end of a conversion specifier.",
+ ),
+ "E1205": (
+ "Too many arguments for logging format string",
+ "logging-too-many-args",
+ "Used when a logging format string is given too many arguments.",
+ ),
+ "E1206": (
+ "Not enough arguments for logging format string",
+ "logging-too-few-args",
+ "Used when a logging format string is given too few arguments.",
+ ),
+}
+
+
+CHECKED_CONVENIENCE_FUNCTIONS = {
+ "critical",
+ "debug",
+ "error",
+ "exception",
+ "fatal",
+ "info",
+ "warn",
+ "warning",
+}
+
+
+def is_method_call(func, types=(), methods=()):
+ """Determines if a BoundMethod node represents a method call.
+
+ Args:
+ func (astroid.BoundMethod): The BoundMethod AST node to check.
+ types (Optional[String]): Optional sequence of caller type names to restrict check.
+ methods (Optional[String]): Optional sequence of method names to restrict check.
+
+ Returns:
+ bool: true if the node represents a method call for the given type and
+ method names, False otherwise.
+ """
+ return (
+ isinstance(func, astroid.BoundMethod)
+ and isinstance(func.bound, astroid.Instance)
+ and (func.bound.name in types if types else True)
+ and (func.name in methods if methods else True)
+ )
+
+
+class LoggingChecker(checkers.BaseChecker):
+ """Checks use of the logging module."""
+
+ __implements__ = interfaces.IAstroidChecker
+ name = "logging"
+ msgs = MSGS
+
+ options = (
+ (
+ "logging-modules",
+ {
+ "default": ("logging",),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "Logging modules to check that the string format "
+ "arguments are in logging function parameter format.",
+ },
+ ),
+ (
+ "logging-format-style",
+ {
+ "default": "old",
+ "type": "choice",
+ "metavar": "<old (%) or new ({) or fstr (f'')>",
+ "choices": ["old", "new", "fstr"],
+ "help": "Format style used to check logging format string. "
+ "`old` means using % formatting, `new` is for `{}` formatting,"
+ "and `fstr` is for f-strings.",
+ },
+ ),
+ )
+
+ def visit_module(self, node): # pylint: disable=unused-argument
+ """Clears any state left in this checker from last module checked."""
+ # The code being checked can just as easily "import logging as foo",
+ # so it is necessary to process the imports and store in this field
+ # what name the logging module is actually given.
+ self._logging_names = set()
+ logging_mods = self.config.logging_modules
+
+ self._format_style = self.config.logging_format_style
+ format_styles = {"old": "%", "new": "{", "fstr": "f-string"}
+ format_style_help = ""
+ if self._format_style == "old":
+ format_style_help = " and pass the % parameters as arguments"
+
+ self._format_style_args = (format_styles[self._format_style], format_style_help)
+
+ self._logging_modules = set(logging_mods)
+ self._from_imports = {}
+ for logging_mod in logging_mods:
+ parts = logging_mod.rsplit(".", 1)
+ if len(parts) > 1:
+ self._from_imports[parts[0]] = parts[1]
+
+ def visit_importfrom(self, node):
+ """Checks to see if a module uses a non-Python logging module."""
+ try:
+ logging_name = self._from_imports[node.modname]
+ for module, as_name in node.names:
+ if module == logging_name:
+ self._logging_names.add(as_name or module)
+ except KeyError:
+ pass
+
+ def visit_import(self, node):
+ """Checks to see if this module uses Python's built-in logging."""
+ for module, as_name in node.names:
+ if module in self._logging_modules:
+ self._logging_names.add(as_name or module)
+
+ @check_messages(*MSGS)
+ def visit_call(self, node):
+ """Checks calls to logging methods."""
+
+ def is_logging_name():
+ return (
+ isinstance(node.func, astroid.Attribute)
+ and isinstance(node.func.expr, astroid.Name)
+ and node.func.expr.name in self._logging_names
+ )
+
+ def is_logger_class():
+ try:
+ for inferred in node.func.infer():
+ if isinstance(inferred, astroid.BoundMethod):
+ parent = inferred._proxied.parent
+ if isinstance(parent, astroid.ClassDef) and (
+ parent.qname() == "logging.Logger"
+ or any(
+ ancestor.qname() == "logging.Logger"
+ for ancestor in parent.ancestors()
+ )
+ ):
+ return True, inferred._proxied.name
+ except astroid.exceptions.InferenceError:
+ pass
+ return False, None
+
+ if is_logging_name():
+ name = node.func.attrname
+ else:
+ result, name = is_logger_class()
+ if not result:
+ return
+ self._check_log_method(node, name)
+
+ def _check_log_method(self, node, name):
+ """Checks calls to logging.log(level, format, *format_args)."""
+ if name == "log":
+ if node.starargs or node.kwargs or len(node.args) < 2:
+ # Either a malformed call, star args, or double-star args. Beyond
+ # the scope of this checker.
+ return
+ format_pos = 1
+ elif name in CHECKED_CONVENIENCE_FUNCTIONS:
+ if node.starargs or node.kwargs or not node.args:
+ # Either no args, star args, or double-star args. Beyond the
+ # scope of this checker.
+ return
+ format_pos = 0
+ else:
+ return
+
+ if isinstance(node.args[format_pos], astroid.BinOp):
+ binop = node.args[format_pos]
+ emit = binop.op == "%"
+ if binop.op == "+":
+ total_number_of_strings = sum(
+ 1
+ for operand in (binop.left, binop.right)
+ if self._is_operand_literal_str(utils.safe_infer(operand))
+ )
+ emit = total_number_of_strings > 0
+ if emit:
+ self.add_message("logging-not-lazy", node=node)
+ elif isinstance(node.args[format_pos], astroid.Call):
+ self._check_call_func(node.args[format_pos])
+ elif isinstance(node.args[format_pos], astroid.Const):
+ self._check_format_string(node, format_pos)
+ elif isinstance(
+ node.args[format_pos], (astroid.FormattedValue, astroid.JoinedStr)
+ ):
+ if self._format_style != "fstr":
+ self.add_message(
+ "logging-format-interpolation",
+ node=node,
+ args=self._format_style_args,
+ )
+
+ @staticmethod
+ def _is_operand_literal_str(operand):
+ """
+ Return True if the operand in argument is a literal string
+ """
+ return isinstance(operand, astroid.Const) and operand.name == "str"
+
+ def _check_call_func(self, node):
+ """Checks that function call is not format_string.format().
+
+ Args:
+ node (astroid.node_classes.Call):
+ Call AST node to be checked.
+ """
+ func = utils.safe_infer(node.func)
+ types = ("str", "unicode")
+ methods = ("format",)
+ if is_method_call(func, types, methods) and not is_complex_format_str(
+ func.bound
+ ):
+ self.add_message(
+ "logging-format-interpolation", node=node, args=self._format_style_args
+ )
+
+ def _check_format_string(self, node, format_arg):
+ """Checks that format string tokens match the supplied arguments.
+
+ Args:
+ node (astroid.node_classes.NodeNG): AST node to be checked.
+ format_arg (int): Index of the format string in the node arguments.
+ """
+ num_args = _count_supplied_tokens(node.args[format_arg + 1 :])
+ if not num_args:
+ # If no args were supplied the string is not interpolated and can contain
+ # formatting characters - it's used verbatim. Don't check any further.
+ return
+
+ format_string = node.args[format_arg].value
+ required_num_args = 0
+ if isinstance(format_string, bytes):
+ format_string = format_string.decode()
+ if isinstance(format_string, str):
+ try:
+ if self._format_style == "old":
+ keyword_args, required_num_args, _, _ = utils.parse_format_string(
+ format_string
+ )
+ if keyword_args:
+ # Keyword checking on logging strings is complicated by
+ # special keywords - out of scope.
+ return
+ elif self._format_style == "new":
+ keyword_arguments, implicit_pos_args, explicit_pos_args = utils.parse_format_method_string(
+ format_string
+ )
+
+ keyword_args_cnt = len(
+ set(k for k, l in keyword_arguments if not isinstance(k, int))
+ )
+ required_num_args = (
+ keyword_args_cnt + implicit_pos_args + explicit_pos_args
+ )
+ else:
+ self.add_message(
+ "logging-format-interpolation",
+ node=node,
+ args=self._format_style_args,
+ )
+ except utils.UnsupportedFormatCharacter as ex:
+ char = format_string[ex.index]
+ self.add_message(
+ "logging-unsupported-format",
+ node=node,
+ args=(char, ord(char), ex.index),
+ )
+ return
+ except utils.IncompleteFormatString:
+ self.add_message("logging-format-truncated", node=node)
+ return
+ if num_args > required_num_args:
+ self.add_message("logging-too-many-args", node=node)
+ elif num_args < required_num_args:
+ self.add_message("logging-too-few-args", node=node)
+
+
+def is_complex_format_str(node):
+ """Checks if node represents a string with complex formatting specs.
+
+ Args:
+ node (astroid.node_classes.NodeNG): AST node to check
+ Returns:
+ bool: True if inferred string uses complex formatting, False otherwise
+ """
+ inferred = utils.safe_infer(node)
+ if inferred is None or not (
+ isinstance(inferred, astroid.Const) and isinstance(inferred.value, str)
+ ):
+ return True
+ try:
+ parsed = list(string.Formatter().parse(inferred.value))
+ except ValueError:
+ # This format string is invalid
+ return False
+ for _, _, format_spec, _ in parsed:
+ if format_spec:
+ return True
+ return False
+
+
+def _count_supplied_tokens(args):
+ """Counts the number of tokens in an args list.
+
+ The Python log functions allow for special keyword arguments: func,
+ exc_info and extra. To handle these cases correctly, we only count
+ arguments that aren't keywords.
+
+ Args:
+ args (list): AST nodes that are arguments for a log format string.
+
+ Returns:
+ int: Number of AST nodes that aren't keywords.
+ """
+ return sum(1 for arg in args if not isinstance(arg, astroid.Keyword))
+
+
+def register(linter):
+ """Required method to auto-register this checker."""
+ linter.register_checker(LoggingChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/misc.py b/venv/Lib/site-packages/pylint/checkers/misc.py
new file mode 100644
index 0000000..dcf7a3e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/misc.py
@@ -0,0 +1,171 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006, 2009-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Alexandru Coman <fcoman@bitdefender.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 glegoux <gilles.legoux@gmail.com>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+
+"""Check source code is ascii only or has an encoding declaration (PEP 263)"""
+
+import re
+import tokenize
+
+from pylint.checkers import BaseChecker
+from pylint.constants import OPTION_RGX
+from pylint.interfaces import IRawChecker, ITokenChecker
+from pylint.message import MessagesHandlerMixIn
+
+
+class ByIdManagedMessagesChecker(BaseChecker):
+
+ """checks for messages that are enabled or disabled by id instead of symbol."""
+
+ __implements__ = IRawChecker
+
+ # configuration section name
+ name = "miscellaneous"
+ msgs = {
+ "I0023": (
+ "%s",
+ "use-symbolic-message-instead",
+ "Used when a message is enabled or disabled by id.",
+ )
+ }
+
+ options = ()
+
+ def process_module(self, module):
+ """inspect the source file to find messages activated or deactivated by id."""
+ managed_msgs = MessagesHandlerMixIn.get_by_id_managed_msgs()
+ for (mod_name, msg_id, msg_symbol, lineno, is_disabled) in managed_msgs:
+ if mod_name == module.name:
+ if is_disabled:
+ txt = "Id '{ident}' is used to disable '{symbol}' message emission".format(
+ ident=msg_id, symbol=msg_symbol
+ )
+ else:
+ txt = "Id '{ident}' is used to enable '{symbol}' message emission".format(
+ ident=msg_id, symbol=msg_symbol
+ )
+ self.add_message("use-symbolic-message-instead", line=lineno, args=txt)
+ MessagesHandlerMixIn.clear_by_id_managed_msgs()
+
+
+class EncodingChecker(BaseChecker):
+
+ """checks for:
+ * warning notes in the code like FIXME, XXX
+ * encoding issues.
+ """
+
+ __implements__ = (IRawChecker, ITokenChecker)
+
+ # configuration section name
+ name = "miscellaneous"
+ msgs = {
+ "W0511": (
+ "%s",
+ "fixme",
+ "Used when a warning note as FIXME or XXX is detected.",
+ )
+ }
+
+ options = (
+ (
+ "notes",
+ {
+ "type": "csv",
+ "metavar": "<comma separated values>",
+ "default": ("FIXME", "XXX", "TODO"),
+ "help": (
+ "List of note tags to take in consideration, "
+ "separated by a comma."
+ ),
+ },
+ ),
+ )
+
+ def open(self):
+ super().open()
+ self._fixme_pattern = re.compile(
+ r"#\s*(%s)\b" % "|".join(map(re.escape, self.config.notes)), re.I
+ )
+
+ def _check_encoding(self, lineno, line, file_encoding):
+ try:
+ return line.decode(file_encoding)
+ except UnicodeDecodeError:
+ pass
+ except LookupError:
+ if line.startswith("#") and "coding" in line and file_encoding in line:
+ self.add_message(
+ "syntax-error",
+ line=lineno,
+ args='Cannot decode using encoding "{}",'
+ " bad encoding".format(file_encoding),
+ )
+
+ def process_module(self, module):
+ """inspect the source file to find encoding problem"""
+ if module.file_encoding:
+ encoding = module.file_encoding
+ else:
+ encoding = "ascii"
+
+ with module.stream() as stream:
+ for lineno, line in enumerate(stream):
+ self._check_encoding(lineno + 1, line, encoding)
+
+ def process_tokens(self, tokens):
+ """inspect the source to find fixme problems"""
+ if not self.config.notes:
+ return
+ comments = (
+ token_info for token_info in tokens if token_info.type == tokenize.COMMENT
+ )
+ for comment in comments:
+ comment_text = comment.string[1:].lstrip() # trim '#' and whitespaces
+
+ # handle pylint disable clauses
+ disable_option_match = OPTION_RGX.search(comment_text)
+ if disable_option_match:
+ try:
+ _, value = disable_option_match.group(1).split("=", 1)
+ values = [_val.strip().upper() for _val in value.split(",")]
+ if set(values) & set(self.config.notes):
+ continue
+ except ValueError:
+ self.add_message(
+ "bad-inline-option",
+ args=disable_option_match.group(1).strip(),
+ line=comment.start[0],
+ )
+ continue
+
+ # emit warnings if necessary
+ match = self._fixme_pattern.search("#" + comment_text.lower())
+ if match:
+ note = match.group(1)
+ self.add_message(
+ "fixme",
+ col_offset=comment.string.lower().index(note.lower()),
+ args=comment_text,
+ line=comment.start[0],
+ )
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(EncodingChecker(linter))
+ linter.register_checker(ByIdManagedMessagesChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/newstyle.py b/venv/Lib/site-packages/pylint/checkers/newstyle.py
new file mode 100644
index 0000000..46f4e4e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/newstyle.py
@@ -0,0 +1,127 @@
+# Copyright (c) 2006, 2008-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""check for new / old style related problems
+"""
+import astroid
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages, has_known_bases, node_frame_class
+from pylint.interfaces import IAstroidChecker
+
+MSGS = {
+ "E1003": (
+ "Bad first argument %r given to super()",
+ "bad-super-call",
+ "Used when another argument than the current class is given as "
+ "first argument of the super builtin.",
+ )
+}
+
+
+class NewStyleConflictChecker(BaseChecker):
+ """checks for usage of new style capabilities on old style classes and
+ other new/old styles conflicts problems
+ * use of property, __slots__, super
+ * "super" usage
+ """
+
+ __implements__ = (IAstroidChecker,)
+
+ # configuration section name
+ name = "newstyle"
+ # messages
+ msgs = MSGS
+ priority = -2
+ # configuration options
+ options = ()
+
+ @check_messages("bad-super-call")
+ def visit_functiondef(self, node):
+ """check use of super"""
+ # ignore actual functions or method within a new style class
+ if not node.is_method():
+ return
+ klass = node.parent.frame()
+ for stmt in node.nodes_of_class(astroid.Call):
+ if node_frame_class(stmt) != node_frame_class(node):
+ # Don't look down in other scopes.
+ continue
+
+ expr = stmt.func
+ if not isinstance(expr, astroid.Attribute):
+ continue
+
+ call = expr.expr
+ # skip the test if using super
+ if not (
+ isinstance(call, astroid.Call)
+ and isinstance(call.func, astroid.Name)
+ and call.func.name == "super"
+ ):
+ continue
+
+ # super should not be used on an old style class
+ if klass.newstyle or not has_known_bases(klass):
+ # super first arg should not be the class
+ if not call.args:
+ continue
+
+ # calling super(type(self), self) can lead to recursion loop
+ # in derived classes
+ arg0 = call.args[0]
+ if (
+ isinstance(arg0, astroid.Call)
+ and isinstance(arg0.func, astroid.Name)
+ and arg0.func.name == "type"
+ ):
+ self.add_message("bad-super-call", node=call, args=("type",))
+ continue
+
+ # calling super(self.__class__, self) can lead to recursion loop
+ # in derived classes
+ if (
+ len(call.args) >= 2
+ and isinstance(call.args[1], astroid.Name)
+ and call.args[1].name == "self"
+ and isinstance(arg0, astroid.Attribute)
+ and arg0.attrname == "__class__"
+ ):
+ self.add_message(
+ "bad-super-call", node=call, args=("self.__class__",)
+ )
+ continue
+
+ try:
+ supcls = call.args and next(call.args[0].infer(), None)
+ except astroid.InferenceError:
+ continue
+
+ if klass is not supcls:
+ name = None
+ # if supcls is not Uninferable, then supcls was inferred
+ # and use its name. Otherwise, try to look
+ # for call.args[0].name
+ if supcls:
+ name = supcls.name
+ elif call.args and hasattr(call.args[0], "name"):
+ name = call.args[0].name
+ if name:
+ self.add_message("bad-super-call", node=call, args=(name,))
+
+ visit_asyncfunctiondef = visit_functiondef
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(NewStyleConflictChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/python3.py b/venv/Lib/site-packages/pylint/checkers/python3.py
new file mode 100644
index 0000000..583b1c2
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/python3.py
@@ -0,0 +1,1398 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014-2015 Brett Cannon <brett@python.org>
+# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
+# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2015 Cosmin Poieana <cmin@ropython.org>
+# Copyright (c) 2015 Viorel Stirbu <viorels@gmail.com>
+# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016-2017 Roy Williams <roy.williams.iii@gmail.com>
+# Copyright (c) 2016 Roy Williams <rwilliams@lyft.com>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017 Daniel Miller <millerdev@gmail.com>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 gaurikholkar <f2013002@goa.bits-pilani.ac.in>
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Check Python 2 code for Python 2/3 source-compatible issues."""
+import re
+import tokenize
+from collections import namedtuple
+
+import astroid
+from astroid import bases
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+from pylint.checkers.utils import find_try_except_wrapper_node, node_ignores_exception
+from pylint.constants import WarningScope
+from pylint.interfaces import INFERENCE, INFERENCE_FAILURE
+
+_ZERO = re.compile("^0+$")
+
+
+def _is_old_octal(literal):
+ if _ZERO.match(literal):
+ return False
+ if re.match(r"0\d+", literal):
+ try:
+ int(literal, 8)
+ except ValueError:
+ return False
+ return True
+ return None
+
+
+def _inferred_value_is_dict(value):
+ if isinstance(value, astroid.Dict):
+ return True
+ return isinstance(value, astroid.Instance) and "dict" in value.basenames
+
+
+def _is_builtin(node):
+ return getattr(node, "name", None) in ("__builtin__", "builtins")
+
+
+_ACCEPTS_ITERATOR = {
+ "iter",
+ "list",
+ "tuple",
+ "sorted",
+ "set",
+ "sum",
+ "any",
+ "all",
+ "enumerate",
+ "dict",
+ "filter",
+ "reversed",
+ "max",
+ "min",
+ "frozenset",
+ "OrderedDict",
+}
+ATTRIBUTES_ACCEPTS_ITERATOR = {"join", "from_iterable"}
+_BUILTIN_METHOD_ACCEPTS_ITERATOR = {
+ "builtins.list.extend",
+ "builtins.dict.update",
+ "builtins.set.update",
+}
+DICT_METHODS = {"items", "keys", "values"}
+
+
+def _in_iterating_context(node):
+ """Check if the node is being used as an iterator.
+
+ Definition is taken from lib2to3.fixer_util.in_special_context().
+ """
+ parent = node.parent
+ # Since a call can't be the loop variant we only need to know if the node's
+ # parent is a 'for' loop to know it's being used as the iterator for the
+ # loop.
+ if isinstance(parent, astroid.For):
+ return True
+ # Need to make sure the use of the node is in the iterator part of the
+ # comprehension.
+ if isinstance(parent, astroid.Comprehension):
+ if parent.iter == node:
+ return True
+ # Various built-ins can take in an iterable or list and lead to the same
+ # value.
+ elif isinstance(parent, astroid.Call):
+ if isinstance(parent.func, astroid.Name):
+ if parent.func.name in _ACCEPTS_ITERATOR:
+ return True
+ elif isinstance(parent.func, astroid.Attribute):
+ if parent.func.attrname in ATTRIBUTES_ACCEPTS_ITERATOR:
+ return True
+
+ inferred = utils.safe_infer(parent.func)
+ if inferred:
+ if inferred.qname() in _BUILTIN_METHOD_ACCEPTS_ITERATOR:
+ return True
+ root = inferred.root()
+ if root and root.name == "itertools":
+ return True
+ # If the call is in an unpacking, there's no need to warn,
+ # since it can be considered iterating.
+ elif isinstance(parent, astroid.Assign) and isinstance(
+ parent.targets[0], (astroid.List, astroid.Tuple)
+ ):
+ if len(parent.targets[0].elts) > 1:
+ return True
+ # If the call is in a containment check, we consider that to
+ # be an iterating context
+ elif (
+ isinstance(parent, astroid.Compare)
+ and len(parent.ops) == 1
+ and parent.ops[0][0] == "in"
+ ):
+ return True
+ # Also if it's an `yield from`, that's fair
+ elif isinstance(parent, astroid.YieldFrom):
+ return True
+ if isinstance(parent, astroid.Starred):
+ return True
+ return False
+
+
+def _is_conditional_import(node):
+ """Checks if an import node is in the context of a conditional.
+ """
+ parent = node.parent
+ return isinstance(
+ parent, (astroid.TryExcept, astroid.ExceptHandler, astroid.If, astroid.IfExp)
+ )
+
+
+Branch = namedtuple("Branch", ["node", "is_py2_only"])
+
+
+class Python3Checker(checkers.BaseChecker):
+
+ __implements__ = interfaces.IAstroidChecker
+ enabled = False
+ name = "python3"
+
+ msgs = {
+ # Errors for what will syntactically break in Python 3, warnings for
+ # everything else.
+ "E1601": (
+ "print statement used",
+ "print-statement",
+ "Used when a print statement is used "
+ "(`print` is a function in Python 3)",
+ ),
+ "E1602": (
+ "Parameter unpacking specified",
+ "parameter-unpacking",
+ "Used when parameter unpacking is specified for a function"
+ "(Python 3 doesn't allow it)",
+ ),
+ "E1603": (
+ "Implicit unpacking of exceptions is not supported in Python 3",
+ "unpacking-in-except",
+ "Python3 will not allow implicit unpacking of "
+ "exceptions in except clauses. "
+ "See http://www.python.org/dev/peps/pep-3110/",
+ {"old_names": [("W0712", "old-unpacking-in-except")]},
+ ),
+ "E1604": (
+ "Use raise ErrorClass(args) instead of raise ErrorClass, args.",
+ "old-raise-syntax",
+ "Used when the alternate raise syntax "
+ "'raise foo, bar' is used "
+ "instead of 'raise foo(bar)'.",
+ {"old_names": [("W0121", "old-old-raise-syntax")]},
+ ),
+ "E1605": (
+ "Use of the `` operator",
+ "backtick",
+ 'Used when the deprecated "``" (backtick) operator is used '
+ "instead of the str() function.",
+ {"scope": WarningScope.NODE, "old_names": [("W0333", "old-backtick")]},
+ ),
+ "E1609": (
+ "Import * only allowed at module level",
+ "import-star-module-level",
+ "Used when the import star syntax is used somewhere "
+ "else than the module level.",
+ {"maxversion": (3, 0)},
+ ),
+ "W1601": (
+ "apply built-in referenced",
+ "apply-builtin",
+ "Used when the apply built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1602": (
+ "basestring built-in referenced",
+ "basestring-builtin",
+ "Used when the basestring built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1603": (
+ "buffer built-in referenced",
+ "buffer-builtin",
+ "Used when the buffer built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1604": (
+ "cmp built-in referenced",
+ "cmp-builtin",
+ "Used when the cmp built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1605": (
+ "coerce built-in referenced",
+ "coerce-builtin",
+ "Used when the coerce built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1606": (
+ "execfile built-in referenced",
+ "execfile-builtin",
+ "Used when the execfile built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1607": (
+ "file built-in referenced",
+ "file-builtin",
+ "Used when the file built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1608": (
+ "long built-in referenced",
+ "long-builtin",
+ "Used when the long built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1609": (
+ "raw_input built-in referenced",
+ "raw_input-builtin",
+ "Used when the raw_input built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1610": (
+ "reduce built-in referenced",
+ "reduce-builtin",
+ "Used when the reduce built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1611": (
+ "StandardError built-in referenced",
+ "standarderror-builtin",
+ "Used when the StandardError built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1612": (
+ "unicode built-in referenced",
+ "unicode-builtin",
+ "Used when the unicode built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1613": (
+ "xrange built-in referenced",
+ "xrange-builtin",
+ "Used when the xrange built-in function is referenced "
+ "(missing from Python 3)",
+ ),
+ "W1614": (
+ "__coerce__ method defined",
+ "coerce-method",
+ "Used when a __coerce__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1615": (
+ "__delslice__ method defined",
+ "delslice-method",
+ "Used when a __delslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1616": (
+ "__getslice__ method defined",
+ "getslice-method",
+ "Used when a __getslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1617": (
+ "__setslice__ method defined",
+ "setslice-method",
+ "Used when a __setslice__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1618": (
+ "import missing `from __future__ import absolute_import`",
+ "no-absolute-import",
+ "Used when an import is not accompanied by "
+ "``from __future__ import absolute_import`` "
+ "(default behaviour in Python 3)",
+ ),
+ "W1619": (
+ "division w/o __future__ statement",
+ "old-division",
+ "Used for non-floor division w/o a float literal or "
+ "``from __future__ import division`` "
+ "(Python 3 returns a float for int division unconditionally)",
+ ),
+ "W1620": (
+ "Calling a dict.iter*() method",
+ "dict-iter-method",
+ "Used for calls to dict.iterkeys(), itervalues() or iteritems() "
+ "(Python 3 lacks these methods)",
+ ),
+ "W1621": (
+ "Calling a dict.view*() method",
+ "dict-view-method",
+ "Used for calls to dict.viewkeys(), viewvalues() or viewitems() "
+ "(Python 3 lacks these methods)",
+ ),
+ "W1622": (
+ "Called a next() method on an object",
+ "next-method-called",
+ "Used when an object's next() method is called "
+ "(Python 3 uses the next() built-in function)",
+ ),
+ "W1623": (
+ "Assigning to a class's __metaclass__ attribute",
+ "metaclass-assignment",
+ "Used when a metaclass is specified by assigning to __metaclass__ "
+ "(Python 3 specifies the metaclass as a class statement argument)",
+ ),
+ "W1624": (
+ "Indexing exceptions will not work on Python 3",
+ "indexing-exception",
+ "Indexing exceptions will not work on Python 3. Use "
+ "`exception.args[index]` instead.",
+ {"old_names": [("W0713", "old-indexing-exception")]},
+ ),
+ "W1625": (
+ "Raising a string exception",
+ "raising-string",
+ "Used when a string exception is raised. This will not "
+ "work on Python 3.",
+ {"old_names": [("W0701", "old-raising-string")]},
+ ),
+ "W1626": (
+ "reload built-in referenced",
+ "reload-builtin",
+ "Used when the reload built-in function is referenced "
+ "(missing from Python 3). You can use instead imp.reload "
+ "or importlib.reload.",
+ ),
+ "W1627": (
+ "__oct__ method defined",
+ "oct-method",
+ "Used when an __oct__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1628": (
+ "__hex__ method defined",
+ "hex-method",
+ "Used when a __hex__ method is defined (method is not used by Python 3)",
+ ),
+ "W1629": (
+ "__nonzero__ method defined",
+ "nonzero-method",
+ "Used when a __nonzero__ method is defined "
+ "(method is not used by Python 3)",
+ ),
+ "W1630": (
+ "__cmp__ method defined",
+ "cmp-method",
+ "Used when a __cmp__ method is defined (method is not used by Python 3)",
+ ),
+ # 'W1631': replaced by W1636
+ "W1632": (
+ "input built-in referenced",
+ "input-builtin",
+ "Used when the input built-in is referenced "
+ "(backwards-incompatible semantics in Python 3)",
+ ),
+ "W1633": (
+ "round built-in referenced",
+ "round-builtin",
+ "Used when the round built-in is referenced "
+ "(backwards-incompatible semantics in Python 3)",
+ ),
+ "W1634": (
+ "intern built-in referenced",
+ "intern-builtin",
+ "Used when the intern built-in is referenced "
+ "(Moved to sys.intern in Python 3)",
+ ),
+ "W1635": (
+ "unichr built-in referenced",
+ "unichr-builtin",
+ "Used when the unichr built-in is referenced (Use chr in Python 3)",
+ ),
+ "W1636": (
+ "map built-in referenced when not iterating",
+ "map-builtin-not-iterating",
+ "Used when the map built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ {"old_names": [("W1631", "implicit-map-evaluation")]},
+ ),
+ "W1637": (
+ "zip built-in referenced when not iterating",
+ "zip-builtin-not-iterating",
+ "Used when the zip built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1638": (
+ "range built-in referenced when not iterating",
+ "range-builtin-not-iterating",
+ "Used when the range built-in is referenced in a non-iterating "
+ "context (returns a range in Python 3)",
+ ),
+ "W1639": (
+ "filter built-in referenced when not iterating",
+ "filter-builtin-not-iterating",
+ "Used when the filter built-in is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1640": (
+ "Using the cmp argument for list.sort / sorted",
+ "using-cmp-argument",
+ "Using the cmp argument for list.sort or the sorted "
+ "builtin should be avoided, since it was removed in "
+ "Python 3. Using either `key` or `functools.cmp_to_key` "
+ "should be preferred.",
+ ),
+ "W1641": (
+ "Implementing __eq__ without also implementing __hash__",
+ "eq-without-hash",
+ "Used when a class implements __eq__ but not __hash__. In Python 2, objects "
+ "get object.__hash__ as the default implementation, in Python 3 objects get "
+ "None as their default __hash__ implementation if they also implement __eq__.",
+ ),
+ "W1642": (
+ "__div__ method defined",
+ "div-method",
+ "Used when a __div__ method is defined. Using `__truediv__` and setting"
+ "__div__ = __truediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1643": (
+ "__idiv__ method defined",
+ "idiv-method",
+ "Used when an __idiv__ method is defined. Using `__itruediv__` and setting"
+ "__idiv__ = __itruediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1644": (
+ "__rdiv__ method defined",
+ "rdiv-method",
+ "Used when a __rdiv__ method is defined. Using `__rtruediv__` and setting"
+ "__rdiv__ = __rtruediv__ should be preferred."
+ "(method is not used by Python 3)",
+ ),
+ "W1645": (
+ "Exception.message removed in Python 3",
+ "exception-message-attribute",
+ "Used when the message attribute is accessed on an Exception. Use "
+ "str(exception) instead.",
+ ),
+ "W1646": (
+ "non-text encoding used in str.decode",
+ "invalid-str-codec",
+ "Used when using str.encode or str.decode with a non-text encoding. Use "
+ "codecs module to handle arbitrary codecs.",
+ ),
+ "W1647": (
+ "sys.maxint removed in Python 3",
+ "sys-max-int",
+ "Used when accessing sys.maxint. Use sys.maxsize instead.",
+ ),
+ "W1648": (
+ "Module moved in Python 3",
+ "bad-python3-import",
+ "Used when importing a module that no longer exists in Python 3.",
+ ),
+ "W1649": (
+ "Accessing a deprecated function on the string module",
+ "deprecated-string-function",
+ "Used when accessing a string function that has been deprecated in Python 3.",
+ ),
+ "W1650": (
+ "Using str.translate with deprecated deletechars parameters",
+ "deprecated-str-translate-call",
+ "Used when using the deprecated deletechars parameters from str.translate. Use "
+ "re.sub to remove the desired characters ",
+ ),
+ "W1651": (
+ "Accessing a deprecated function on the itertools module",
+ "deprecated-itertools-function",
+ "Used when accessing a function on itertools that has been removed in Python 3.",
+ ),
+ "W1652": (
+ "Accessing a deprecated fields on the types module",
+ "deprecated-types-field",
+ "Used when accessing a field on types that has been removed in Python 3.",
+ ),
+ "W1653": (
+ "next method defined",
+ "next-method-defined",
+ "Used when a next method is defined that would be an iterator in Python 2 but "
+ "is treated as a normal function in Python 3.",
+ ),
+ "W1654": (
+ "dict.items referenced when not iterating",
+ "dict-items-not-iterating",
+ "Used when dict.items is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1655": (
+ "dict.keys referenced when not iterating",
+ "dict-keys-not-iterating",
+ "Used when dict.keys is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1656": (
+ "dict.values referenced when not iterating",
+ "dict-values-not-iterating",
+ "Used when dict.values is referenced in a non-iterating "
+ "context (returns an iterator in Python 3)",
+ ),
+ "W1657": (
+ "Accessing a removed attribute on the operator module",
+ "deprecated-operator-function",
+ "Used when accessing a field on operator module that has been "
+ "removed in Python 3.",
+ ),
+ "W1658": (
+ "Accessing a removed attribute on the urllib module",
+ "deprecated-urllib-function",
+ "Used when accessing a field on urllib module that has been "
+ "removed or moved in Python 3.",
+ ),
+ "W1659": (
+ "Accessing a removed xreadlines attribute",
+ "xreadlines-attribute",
+ "Used when accessing the xreadlines() function on a file stream, "
+ "removed in Python 3.",
+ ),
+ "W1660": (
+ "Accessing a removed attribute on the sys module",
+ "deprecated-sys-function",
+ "Used when accessing a field on sys module that has been "
+ "removed in Python 3.",
+ ),
+ "W1661": (
+ "Using an exception object that was bound by an except handler",
+ "exception-escape",
+ "Emitted when using an exception, that was bound in an except "
+ "handler, outside of the except handler. On Python 3 these "
+ "exceptions will be deleted once they get out "
+ "of the except handler.",
+ ),
+ "W1662": (
+ "Using a variable that was bound inside a comprehension",
+ "comprehension-escape",
+ "Emitted when using a variable, that was bound in a comprehension "
+ "handler, outside of the comprehension itself. On Python 3 these "
+ "variables will be deleted outside of the "
+ "comprehension.",
+ ),
+ }
+
+ _bad_builtins = frozenset(
+ [
+ "apply",
+ "basestring",
+ "buffer",
+ "cmp",
+ "coerce",
+ "execfile",
+ "file",
+ "input", # Not missing, but incompatible semantics
+ "intern",
+ "long",
+ "raw_input",
+ "reduce",
+ "round", # Not missing, but incompatible semantics
+ "StandardError",
+ "unichr",
+ "unicode",
+ "xrange",
+ "reload",
+ ]
+ )
+
+ _unused_magic_methods = frozenset(
+ [
+ "__coerce__",
+ "__delslice__",
+ "__getslice__",
+ "__setslice__",
+ "__oct__",
+ "__hex__",
+ "__nonzero__",
+ "__cmp__",
+ "__div__",
+ "__idiv__",
+ "__rdiv__",
+ ]
+ )
+
+ _invalid_encodings = frozenset(
+ [
+ "base64_codec",
+ "base64",
+ "base_64",
+ "bz2_codec",
+ "bz2",
+ "hex_codec",
+ "hex",
+ "quopri_codec",
+ "quopri",
+ "quotedprintable",
+ "quoted_printable",
+ "uu_codec",
+ "uu",
+ "zlib_codec",
+ "zlib",
+ "zip",
+ "rot13",
+ "rot_13",
+ ]
+ )
+
+ _bad_python3_module_map = {
+ "sys-max-int": {"sys": frozenset(["maxint"])},
+ "deprecated-itertools-function": {
+ "itertools": frozenset(
+ ["izip", "ifilter", "imap", "izip_longest", "ifilterfalse"]
+ )
+ },
+ "deprecated-types-field": {
+ "types": frozenset(
+ [
+ "EllipsisType",
+ "XRangeType",
+ "ComplexType",
+ "StringType",
+ "TypeType",
+ "LongType",
+ "UnicodeType",
+ "ClassType",
+ "BufferType",
+ "StringTypes",
+ "NotImplementedType",
+ "NoneType",
+ "InstanceType",
+ "FloatType",
+ "SliceType",
+ "UnboundMethodType",
+ "ObjectType",
+ "IntType",
+ "TupleType",
+ "ListType",
+ "DictType",
+ "FileType",
+ "DictionaryType",
+ "BooleanType",
+ "DictProxyType",
+ ]
+ )
+ },
+ "bad-python3-import": frozenset(
+ [
+ "anydbm",
+ "BaseHTTPServer",
+ "__builtin__",
+ "CGIHTTPServer",
+ "ConfigParser",
+ "copy_reg",
+ "cPickle",
+ "cStringIO",
+ "Cookie",
+ "cookielib",
+ "dbhash",
+ "dumbdbm",
+ "dumbdb",
+ "Dialog",
+ "DocXMLRPCServer",
+ "FileDialog",
+ "FixTk",
+ "gdbm",
+ "htmlentitydefs",
+ "HTMLParser",
+ "httplib",
+ "markupbase",
+ "Queue",
+ "repr",
+ "robotparser",
+ "ScrolledText",
+ "SimpleDialog",
+ "SimpleHTTPServer",
+ "SimpleXMLRPCServer",
+ "StringIO",
+ "dummy_thread",
+ "SocketServer",
+ "test.test_support",
+ "Tkinter",
+ "Tix",
+ "Tkconstants",
+ "tkColorChooser",
+ "tkCommonDialog",
+ "Tkdnd",
+ "tkFileDialog",
+ "tkFont",
+ "tkMessageBox",
+ "tkSimpleDialog",
+ "UserList",
+ "UserString",
+ "whichdb",
+ "_winreg",
+ "xmlrpclib",
+ "audiodev",
+ "Bastion",
+ "bsddb185",
+ "bsddb3",
+ "Canvas",
+ "cfmfile",
+ "cl",
+ "commands",
+ "compiler",
+ "dircache",
+ "dl",
+ "exception",
+ "fpformat",
+ "htmllib",
+ "ihooks",
+ "imageop",
+ "imputil",
+ "linuxaudiodev",
+ "md5",
+ "mhlib",
+ "mimetools",
+ "MimeWriter",
+ "mimify",
+ "multifile",
+ "mutex",
+ "new",
+ "popen2",
+ "posixfile",
+ "pure",
+ "rexec",
+ "rfc822",
+ "sets",
+ "sha",
+ "sgmllib",
+ "sre",
+ "stringold",
+ "sunaudio",
+ "sv",
+ "test.testall",
+ "thread",
+ "timing",
+ "toaiff",
+ "user",
+ "urllib2",
+ "urlparse",
+ ]
+ ),
+ "deprecated-string-function": {
+ "string": frozenset(
+ [
+ "maketrans",
+ "atof",
+ "atoi",
+ "atol",
+ "capitalize",
+ "expandtabs",
+ "find",
+ "rfind",
+ "index",
+ "rindex",
+ "count",
+ "lower",
+ "letters",
+ "split",
+ "rsplit",
+ "splitfields",
+ "join",
+ "joinfields",
+ "lstrip",
+ "rstrip",
+ "strip",
+ "swapcase",
+ "translate",
+ "upper",
+ "ljust",
+ "rjust",
+ "center",
+ "zfill",
+ "replace",
+ "lowercase",
+ "letters",
+ "uppercase",
+ "atol_error",
+ "atof_error",
+ "atoi_error",
+ "index_error",
+ ]
+ )
+ },
+ "deprecated-operator-function": {"operator": frozenset({"div"})},
+ "deprecated-urllib-function": {
+ "urllib": frozenset(
+ {
+ "addbase",
+ "addclosehook",
+ "addinfo",
+ "addinfourl",
+ "always_safe",
+ "basejoin",
+ "ftpcache",
+ "ftperrors",
+ "ftpwrapper",
+ "getproxies",
+ "getproxies_environment",
+ "getproxies_macosx_sysconf",
+ "main",
+ "noheaders",
+ "pathname2url",
+ "proxy_bypass",
+ "proxy_bypass_environment",
+ "proxy_bypass_macosx_sysconf",
+ "quote",
+ "quote_plus",
+ "reporthook",
+ "splitattr",
+ "splithost",
+ "splitnport",
+ "splitpasswd",
+ "splitport",
+ "splitquery",
+ "splittag",
+ "splittype",
+ "splituser",
+ "splitvalue",
+ "unquote",
+ "unquote_plus",
+ "unwrap",
+ "url2pathname",
+ "urlcleanup",
+ "urlencode",
+ "urlopen",
+ "urlretrieve",
+ }
+ )
+ },
+ "deprecated-sys-function": {"sys": frozenset({"exc_clear"})},
+ }
+
+ _python_2_tests = frozenset(
+ [
+ astroid.extract_node(x).repr_tree()
+ for x in [
+ "sys.version_info[0] == 2",
+ "sys.version_info[0] < 3",
+ "sys.version_info == (2, 7)",
+ "sys.version_info <= (2, 7)",
+ "sys.version_info < (3, 0)",
+ ]
+ ]
+ )
+
+ def __init__(self, *args, **kwargs):
+ self._future_division = False
+ self._future_absolute_import = False
+ self._modules_warned_about = set()
+ self._branch_stack = []
+ super(Python3Checker, self).__init__(*args, **kwargs)
+
+ # pylint: disable=keyword-arg-before-vararg, arguments-differ
+ def add_message(self, msg_id, always_warn=False, *args, **kwargs):
+ if always_warn or not (
+ self._branch_stack and self._branch_stack[-1].is_py2_only
+ ):
+ super(Python3Checker, self).add_message(msg_id, *args, **kwargs)
+
+ def _is_py2_test(self, node):
+ if isinstance(node.test, astroid.Attribute) and isinstance(
+ node.test.expr, astroid.Name
+ ):
+ if node.test.expr.name == "six" and node.test.attrname == "PY2":
+ return True
+ elif (
+ isinstance(node.test, astroid.Compare)
+ and node.test.repr_tree() in self._python_2_tests
+ ):
+ return True
+ return False
+
+ def visit_if(self, node):
+ self._branch_stack.append(Branch(node, self._is_py2_test(node)))
+
+ def leave_if(self, node):
+ assert self._branch_stack.pop().node == node
+
+ def visit_ifexp(self, node):
+ self._branch_stack.append(Branch(node, self._is_py2_test(node)))
+
+ def leave_ifexp(self, node):
+ assert self._branch_stack.pop().node == node
+
+ def visit_module(self, node): # pylint: disable=unused-argument
+ """Clear checker state after previous module."""
+ self._future_division = False
+ self._future_absolute_import = False
+
+ def visit_functiondef(self, node):
+ if node.is_method():
+ if node.name in self._unused_magic_methods:
+ method_name = node.name
+ if node.name.startswith("__"):
+ method_name = node.name[2:-2]
+ self.add_message(method_name + "-method", node=node)
+ elif node.name == "next":
+ # If there is a method named `next` declared, if it is invokable
+ # with zero arguments then it implements the Iterator protocol.
+ # This means if the method is an instance method or a
+ # classmethod 1 argument should cause a failure, if it is a
+ # staticmethod 0 arguments should cause a failure.
+ failing_arg_count = 1
+ if utils.decorated_with(node, [bases.BUILTINS + ".staticmethod"]):
+ failing_arg_count = 0
+ if len(node.args.args) == failing_arg_count:
+ self.add_message("next-method-defined", node=node)
+
+ @utils.check_messages("parameter-unpacking")
+ def visit_arguments(self, node):
+ for arg in node.args:
+ if isinstance(arg, astroid.Tuple):
+ self.add_message("parameter-unpacking", node=arg)
+
+ @utils.check_messages("comprehension-escape")
+ def visit_listcomp(self, node):
+ names = {
+ generator.target.name
+ for generator in node.generators
+ if isinstance(generator.target, astroid.AssignName)
+ }
+ scope = node.parent.scope()
+ scope_names = scope.nodes_of_class(astroid.Name, skip_klass=astroid.FunctionDef)
+ has_redefined_assign_name = any(
+ assign_name
+ for assign_name in scope.nodes_of_class(
+ astroid.AssignName, skip_klass=astroid.FunctionDef
+ )
+ if assign_name.name in names and assign_name.lineno > node.lineno
+ )
+ if has_redefined_assign_name:
+ return
+
+ emitted_for_names = set()
+ scope_names = list(scope_names)
+ for scope_name in scope_names:
+ if (
+ scope_name.name not in names
+ or scope_name.lineno <= node.lineno
+ or scope_name.name in emitted_for_names
+ or scope_name.scope() == node
+ ):
+ continue
+
+ emitted_for_names.add(scope_name.name)
+ self.add_message("comprehension-escape", node=scope_name)
+
+ def visit_name(self, node):
+ """Detect when a "bad" built-in is referenced."""
+ found_node, _ = node.lookup(node.name)
+ if not _is_builtin(found_node):
+ return
+ if node.name not in self._bad_builtins:
+ return
+ if node_ignores_exception(node) or isinstance(
+ find_try_except_wrapper_node(node), astroid.ExceptHandler
+ ):
+ return
+
+ message = node.name.lower() + "-builtin"
+ self.add_message(message, node=node)
+
+ @utils.check_messages("print-statement")
+ def visit_print(self, node):
+ self.add_message("print-statement", node=node, always_warn=True)
+
+ def _warn_if_deprecated(self, node, module, attributes, report_on_modules=True):
+ for message, module_map in self._bad_python3_module_map.items():
+ if module in module_map and module not in self._modules_warned_about:
+ if isinstance(module_map, frozenset):
+ if report_on_modules:
+ self._modules_warned_about.add(module)
+ self.add_message(message, node=node)
+ elif attributes and module_map[module].intersection(attributes):
+ self.add_message(message, node=node)
+
+ def visit_importfrom(self, node):
+ if node.modname == "__future__":
+ for name, _ in node.names:
+ if name == "division":
+ self._future_division = True
+ elif name == "absolute_import":
+ self._future_absolute_import = True
+ else:
+ if not self._future_absolute_import:
+ if self.linter.is_message_enabled("no-absolute-import"):
+ self.add_message("no-absolute-import", node=node)
+ self._future_absolute_import = True
+ if not _is_conditional_import(node) and not node.level:
+ self._warn_if_deprecated(node, node.modname, {x[0] for x in node.names})
+
+ if node.names[0][0] == "*":
+ if self.linter.is_message_enabled("import-star-module-level"):
+ if not isinstance(node.scope(), astroid.Module):
+ self.add_message("import-star-module-level", node=node)
+
+ def visit_import(self, node):
+ if not self._future_absolute_import:
+ if self.linter.is_message_enabled("no-absolute-import"):
+ self.add_message("no-absolute-import", node=node)
+ self._future_absolute_import = True
+ if not _is_conditional_import(node):
+ for name, _ in node.names:
+ self._warn_if_deprecated(node, name, None)
+
+ @utils.check_messages("metaclass-assignment")
+ def visit_classdef(self, node):
+ if "__metaclass__" in node.locals:
+ self.add_message("metaclass-assignment", node=node)
+ locals_and_methods = set(node.locals).union(x.name for x in node.mymethods())
+ if "__eq__" in locals_and_methods and "__hash__" not in locals_and_methods:
+ self.add_message("eq-without-hash", node=node)
+
+ @utils.check_messages("old-division")
+ def visit_binop(self, node):
+ if not self._future_division and node.op == "/":
+ for arg in (node.left, node.right):
+ inferred = utils.safe_infer(arg)
+ # If we can infer the object and that object is not an int, bail out.
+ if inferred and not (
+ (
+ isinstance(inferred, astroid.Const)
+ and isinstance(inferred.value, int)
+ )
+ or (
+ isinstance(inferred, astroid.Instance)
+ and inferred.name == "int"
+ )
+ ):
+ break
+ else:
+ self.add_message("old-division", node=node)
+
+ def _check_cmp_argument(self, node):
+ # Check that the `cmp` argument is used
+ kwargs = []
+ if isinstance(node.func, astroid.Attribute) and node.func.attrname == "sort":
+ inferred = utils.safe_infer(node.func.expr)
+ if not inferred:
+ return
+
+ builtins_list = "{}.list".format(bases.BUILTINS)
+ if isinstance(inferred, astroid.List) or inferred.qname() == builtins_list:
+ kwargs = node.keywords
+
+ elif isinstance(node.func, astroid.Name) and node.func.name == "sorted":
+ inferred = utils.safe_infer(node.func)
+ if not inferred:
+ return
+
+ builtins_sorted = "{}.sorted".format(bases.BUILTINS)
+ if inferred.qname() == builtins_sorted:
+ kwargs = node.keywords
+
+ for kwarg in kwargs or []:
+ if kwarg.arg == "cmp":
+ self.add_message("using-cmp-argument", node=node)
+ return
+
+ @staticmethod
+ def _is_constant_string_or_name(node):
+ if isinstance(node, astroid.Const):
+ return isinstance(node.value, str)
+ return isinstance(node, astroid.Name)
+
+ @staticmethod
+ def _is_none(node):
+ return isinstance(node, astroid.Const) and node.value is None
+
+ @staticmethod
+ def _has_only_n_positional_args(node, number_of_args):
+ return len(node.args) == number_of_args and all(node.args) and not node.keywords
+
+ @staticmethod
+ def _could_be_string(inferred_types):
+ confidence = INFERENCE if inferred_types else INFERENCE_FAILURE
+ for inferred_type in inferred_types:
+ if inferred_type is astroid.Uninferable:
+ confidence = INFERENCE_FAILURE
+ elif not (
+ isinstance(inferred_type, astroid.Const)
+ and isinstance(inferred_type.value, str)
+ ):
+ return None
+ return confidence
+
+ def visit_call(self, node):
+ self._check_cmp_argument(node)
+
+ if isinstance(node.func, astroid.Attribute):
+ inferred_types = set()
+ try:
+ for inferred_receiver in node.func.expr.infer():
+ if inferred_receiver is astroid.Uninferable:
+ continue
+ inferred_types.add(inferred_receiver)
+ if isinstance(inferred_receiver, astroid.Module):
+ self._warn_if_deprecated(
+ node,
+ inferred_receiver.name,
+ {node.func.attrname},
+ report_on_modules=False,
+ )
+ if (
+ _inferred_value_is_dict(inferred_receiver)
+ and node.func.attrname in DICT_METHODS
+ ):
+ if not _in_iterating_context(node):
+ checker = "dict-{}-not-iterating".format(node.func.attrname)
+ self.add_message(checker, node=node)
+ except astroid.InferenceError:
+ pass
+ if node.args:
+ is_str_confidence = self._could_be_string(inferred_types)
+ if is_str_confidence:
+ if (
+ node.func.attrname in ("encode", "decode")
+ and len(node.args) >= 1
+ and node.args[0]
+ ):
+ first_arg = node.args[0]
+ self._validate_encoding(first_arg, node)
+ if (
+ node.func.attrname == "translate"
+ and self._has_only_n_positional_args(node, 2)
+ and self._is_none(node.args[0])
+ and self._is_constant_string_or_name(node.args[1])
+ ):
+ # The above statement looking for calls of the form:
+ #
+ # foo.translate(None, 'abc123')
+ #
+ # or
+ #
+ # foo.translate(None, some_variable)
+ #
+ # This check is somewhat broad and _may_ have some false positives, but
+ # after checking several large codebases it did not have any false
+ # positives while finding several real issues. This call pattern seems
+ # rare enough that the trade off is worth it.
+ self.add_message(
+ "deprecated-str-translate-call",
+ node=node,
+ confidence=is_str_confidence,
+ )
+ return
+ if node.keywords:
+ return
+ if node.func.attrname == "next":
+ self.add_message("next-method-called", node=node)
+ else:
+ if node.func.attrname in ("iterkeys", "itervalues", "iteritems"):
+ self.add_message("dict-iter-method", node=node)
+ elif node.func.attrname in ("viewkeys", "viewvalues", "viewitems"):
+ self.add_message("dict-view-method", node=node)
+ elif isinstance(node.func, astroid.Name):
+ found_node = node.func.lookup(node.func.name)[0]
+ if _is_builtin(found_node):
+ if node.func.name in ("filter", "map", "range", "zip"):
+ if not _in_iterating_context(node):
+ checker = "{}-builtin-not-iterating".format(node.func.name)
+ self.add_message(checker, node=node)
+ if node.func.name == "open" and node.keywords:
+ kwargs = node.keywords
+ for kwarg in kwargs or []:
+ if kwarg.arg == "encoding":
+ self._validate_encoding(kwarg.value, node)
+ break
+
+ def _validate_encoding(self, encoding, node):
+ if isinstance(encoding, astroid.Const):
+ value = encoding.value
+ if value in self._invalid_encodings:
+ self.add_message("invalid-str-codec", node=node)
+
+ @utils.check_messages("indexing-exception")
+ def visit_subscript(self, node):
+ """ Look for indexing exceptions. """
+ try:
+ for inferred in node.value.infer():
+ if not isinstance(inferred, astroid.Instance):
+ continue
+ if utils.inherit_from_std_ex(inferred):
+ self.add_message("indexing-exception", node=node)
+ except astroid.InferenceError:
+ return
+
+ def visit_assignattr(self, node):
+ if isinstance(node.assign_type(), astroid.AugAssign):
+ self.visit_attribute(node)
+
+ def visit_delattr(self, node):
+ self.visit_attribute(node)
+
+ @utils.check_messages("exception-message-attribute", "xreadlines-attribute")
+ def visit_attribute(self, node):
+ """Look for removed attributes"""
+ if node.attrname == "xreadlines":
+ self.add_message("xreadlines-attribute", node=node)
+ return
+
+ exception_message = "message"
+ try:
+ for inferred in node.expr.infer():
+ if isinstance(inferred, astroid.Instance) and utils.inherit_from_std_ex(
+ inferred
+ ):
+ if node.attrname == exception_message:
+
+ # Exceptions with .message clearly defined are an exception
+ if exception_message in inferred.instance_attrs:
+ continue
+ self.add_message("exception-message-attribute", node=node)
+ if isinstance(inferred, astroid.Module):
+ self._warn_if_deprecated(
+ node, inferred.name, {node.attrname}, report_on_modules=False
+ )
+ except astroid.InferenceError:
+ return
+
+ @utils.check_messages("unpacking-in-except", "comprehension-escape")
+ def visit_excepthandler(self, node):
+ """Visit an except handler block and check for exception unpacking."""
+
+ def _is_used_in_except_block(node):
+ scope = node.scope()
+ current = node
+ while (
+ current
+ and current != scope
+ and not isinstance(current, astroid.ExceptHandler)
+ ):
+ current = current.parent
+ return isinstance(current, astroid.ExceptHandler) and current.type != node
+
+ if isinstance(node.name, (astroid.Tuple, astroid.List)):
+ self.add_message("unpacking-in-except", node=node)
+ return
+
+ if not node.name:
+ return
+
+ # Find any names
+ scope = node.parent.scope()
+ scope_names = scope.nodes_of_class(astroid.Name, skip_klass=astroid.FunctionDef)
+ scope_names = list(scope_names)
+ potential_leaked_names = [
+ scope_name
+ for scope_name in scope_names
+ if scope_name.name == node.name.name
+ and scope_name.lineno > node.lineno
+ and not _is_used_in_except_block(scope_name)
+ ]
+ reassignments_for_same_name = {
+ assign_name.lineno
+ for assign_name in scope.nodes_of_class(
+ astroid.AssignName, skip_klass=astroid.FunctionDef
+ )
+ if assign_name.name == node.name.name
+ }
+ for leaked_name in potential_leaked_names:
+ if any(
+ node.lineno < elem < leaked_name.lineno
+ for elem in reassignments_for_same_name
+ ):
+ continue
+ self.add_message("exception-escape", node=leaked_name)
+
+ @utils.check_messages("backtick")
+ def visit_repr(self, node):
+ self.add_message("backtick", node=node)
+
+ @utils.check_messages("raising-string", "old-raise-syntax")
+ def visit_raise(self, node):
+ """Visit a raise statement and check for raising
+ strings or old-raise-syntax.
+ """
+
+ # Ignore empty raise.
+ if node.exc is None:
+ return
+ expr = node.exc
+ if self._check_raise_value(node, expr):
+ return
+ try:
+ value = next(astroid.unpack_infer(expr))
+ except astroid.InferenceError:
+ return
+ self._check_raise_value(node, value)
+
+ def _check_raise_value(self, node, expr):
+ if isinstance(expr, astroid.Const):
+ value = expr.value
+ if isinstance(value, str):
+ self.add_message("raising-string", node=node)
+ return True
+ return None
+
+
+class Python3TokenChecker(checkers.BaseTokenChecker):
+ __implements__ = interfaces.ITokenChecker
+ name = "python3"
+ enabled = False
+
+ msgs = {
+ "E1606": (
+ "Use of long suffix",
+ "long-suffix",
+ 'Used when "l" or "L" is used to mark a long integer. '
+ "This will not work in Python 3, since `int` and `long` "
+ "types have merged.",
+ {"maxversion": (3, 0)},
+ ),
+ "E1607": (
+ "Use of the <> operator",
+ "old-ne-operator",
+ 'Used when the deprecated "<>" operator is used instead '
+ 'of "!=". This is removed in Python 3.',
+ {"maxversion": (3, 0), "old_names": [("W0331", "old-old-ne-operator")]},
+ ),
+ "E1608": (
+ "Use of old octal literal",
+ "old-octal-literal",
+ "Used when encountering the old octal syntax, "
+ "removed in Python 3. To use the new syntax, "
+ "prepend 0o on the number.",
+ {"maxversion": (3, 0)},
+ ),
+ "E1610": (
+ "Non-ascii bytes literals not supported in 3.x",
+ "non-ascii-bytes-literal",
+ "Used when non-ascii bytes literals are found in a program. "
+ "They are no longer supported in Python 3.",
+ {"maxversion": (3, 0)},
+ ),
+ }
+
+ def process_tokens(self, tokens):
+ for idx, (tok_type, token, start, _, _) in enumerate(tokens):
+ if tok_type == tokenize.NUMBER:
+ if token.lower().endswith("l"):
+ # This has a different semantic than lowercase-l-suffix.
+ self.add_message("long-suffix", line=start[0])
+ elif _is_old_octal(token):
+ self.add_message("old-octal-literal", line=start[0])
+ if tokens[idx][1] == "<>":
+ self.add_message("old-ne-operator", line=tokens[idx][2][0])
+ if tok_type == tokenize.STRING and token.startswith("b"):
+ if any(elem for elem in token if ord(elem) > 127):
+ self.add_message("non-ascii-bytes-literal", line=start[0])
+
+
+def register(linter):
+ linter.register_checker(Python3Checker(linter))
+ linter.register_checker(Python3TokenChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/raw_metrics.py b/venv/Lib/site-packages/pylint/checkers/raw_metrics.py
new file mode 100644
index 0000000..0564398
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/raw_metrics.py
@@ -0,0 +1,119 @@
+# Copyright (c) 2007, 2010, 2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013 Google, Inc.
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+""" Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
+ http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+Raw metrics checker
+"""
+
+import tokenize
+from typing import Any
+
+from pylint.checkers import BaseTokenChecker
+from pylint.exceptions import EmptyReportError
+from pylint.interfaces import ITokenChecker
+from pylint.reporters.ureports.nodes import Table
+
+
+def report_raw_stats(sect, stats, _):
+ """calculate percentage of code / doc / comment / empty
+ """
+ total_lines = stats["total_lines"]
+ if not total_lines:
+ raise EmptyReportError()
+ sect.description = "%s lines have been analyzed" % total_lines
+ lines = ("type", "number", "%", "previous", "difference")
+ for node_type in ("code", "docstring", "comment", "empty"):
+ key = node_type + "_lines"
+ total = stats[key]
+ percent = float(total * 100) / total_lines
+ lines += (node_type, str(total), "%.2f" % percent, "NC", "NC")
+ sect.append(Table(children=lines, cols=5, rheaders=1))
+
+
+class RawMetricsChecker(BaseTokenChecker):
+ """does not check anything but gives some raw metrics :
+ * total number of lines
+ * total number of code lines
+ * total number of docstring lines
+ * total number of comments lines
+ * total number of empty lines
+ """
+
+ __implements__ = (ITokenChecker,)
+
+ # configuration section name
+ name = "metrics"
+ # configuration options
+ options = ()
+ # messages
+ msgs = {} # type: Any
+ # reports
+ reports = (("RP0701", "Raw metrics", report_raw_stats),)
+
+ def __init__(self, linter):
+ BaseTokenChecker.__init__(self, linter)
+ self.stats = None
+
+ def open(self):
+ """init statistics"""
+ self.stats = self.linter.add_stats(
+ total_lines=0,
+ code_lines=0,
+ empty_lines=0,
+ docstring_lines=0,
+ comment_lines=0,
+ )
+
+ def process_tokens(self, tokens):
+ """update stats"""
+ i = 0
+ tokens = list(tokens)
+ while i < len(tokens):
+ i, lines_number, line_type = get_type(tokens, i)
+ self.stats["total_lines"] += lines_number
+ self.stats[line_type] += lines_number
+
+
+JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
+
+
+def get_type(tokens, start_index):
+ """return the line type : docstring, comment, code, empty"""
+ i = start_index
+ tok_type = tokens[i][0]
+ start = tokens[i][2]
+ pos = start
+ line_type = None
+ while i < len(tokens) and tokens[i][2][0] == start[0]:
+ tok_type = tokens[i][0]
+ pos = tokens[i][3]
+ if line_type is None:
+ if tok_type == tokenize.STRING:
+ line_type = "docstring_lines"
+ elif tok_type == tokenize.COMMENT:
+ line_type = "comment_lines"
+ elif tok_type in JUNK:
+ pass
+ else:
+ line_type = "code_lines"
+ i += 1
+ if line_type is None:
+ line_type = "empty_lines"
+ elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
+ i += 1
+ return i, pos[0] - start[0] + 1, line_type
+
+
+def register(linter):
+ """ required method to auto register this checker """
+ linter.register_checker(RawMetricsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/refactoring.py b/venv/Lib/site-packages/pylint/checkers/refactoring.py
new file mode 100644
index 0000000..2831343
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/refactoring.py
@@ -0,0 +1,1510 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
+# Copyright (c) 2017 Łukasz Sznuk <ls@rdprojekt.pl>
+# Copyright (c) 2017 Alex Hearn <alex.d.hearn@gmail.com>
+# Copyright (c) 2017 Antonio Ossa <aaossa@uc.cl>
+# Copyright (c) 2018 Konstantin Manna <Konstantin@Manna.uno>
+# Copyright (c) 2018 Konstantin <Github@pheanex.de>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Matej Marušák <marusak.matej@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Mr. Senko <atodorov@mrsenko.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Looks for code which can be refactored."""
+import builtins
+import collections
+import itertools
+import tokenize
+from functools import reduce
+
+import astroid
+from astroid import decorators
+
+from pylint import checkers, interfaces
+from pylint import utils as lint_utils
+from pylint.checkers import utils
+
+KNOWN_INFINITE_ITERATORS = {"itertools.count"}
+BUILTIN_EXIT_FUNCS = frozenset(("quit", "exit"))
+
+
+def _if_statement_is_always_returning(if_node, returning_node_class):
+ for node in if_node.body:
+ if isinstance(node, returning_node_class):
+ return True
+ return False
+
+
+def _is_len_call(node):
+ """Checks if node is len(SOMETHING)."""
+ return (
+ isinstance(node, astroid.Call)
+ and isinstance(node.func, astroid.Name)
+ and node.func.name == "len"
+ )
+
+
+def _is_constant_zero(node):
+ return isinstance(node, astroid.Const) and node.value == 0
+
+
+def _node_is_test_condition(node):
+ """ Checks if node is an if, while, assert or if expression statement."""
+ return isinstance(node, (astroid.If, astroid.While, astroid.Assert, astroid.IfExp))
+
+
+def _is_trailing_comma(tokens, index):
+ """Check if the given token is a trailing comma
+
+ :param tokens: Sequence of modules tokens
+ :type tokens: list[tokenize.TokenInfo]
+ :param int index: Index of token under check in tokens
+ :returns: True if the token is a comma which trails an expression
+ :rtype: bool
+ """
+ token = tokens[index]
+ if token.exact_type != tokenize.COMMA:
+ return False
+ # Must have remaining tokens on the same line such as NEWLINE
+ left_tokens = itertools.islice(tokens, index + 1, None)
+ same_line_remaining_tokens = list(
+ itertools.takewhile(
+ lambda other_token, _token=token: other_token.start[0] == _token.start[0],
+ left_tokens,
+ )
+ )
+ # Note: If the newline is tokenize.NEWLINE and not tokenize.NL
+ # then the newline denotes the end of expression
+ is_last_element = all(
+ other_token.type in (tokenize.NEWLINE, tokenize.COMMENT)
+ for other_token in same_line_remaining_tokens
+ )
+ if not same_line_remaining_tokens or not is_last_element:
+ return False
+
+ def get_curline_index_start():
+ """Get the index denoting the start of the current line"""
+ for subindex, token in enumerate(reversed(tokens[:index])):
+ # See Lib/tokenize.py and Lib/token.py in cpython for more info
+ if token.type in (tokenize.NEWLINE, tokenize.NL):
+ return index - subindex
+ return 0
+
+ curline_start = get_curline_index_start()
+ expected_tokens = {"return", "yield"}
+ for prevtoken in tokens[curline_start:index]:
+ if "=" in prevtoken.string or prevtoken.string in expected_tokens:
+ return True
+ return False
+
+
+class RefactoringChecker(checkers.BaseTokenChecker):
+ """Looks for code which can be refactored
+
+ This checker also mixes the astroid and the token approaches
+ in order to create knowledge about whether an "else if" node
+ is a true "else if" node, or an "elif" node.
+ """
+
+ __implements__ = (interfaces.ITokenChecker, interfaces.IAstroidChecker)
+
+ name = "refactoring"
+
+ msgs = {
+ "R1701": (
+ "Consider merging these isinstance calls to isinstance(%s, (%s))",
+ "consider-merging-isinstance",
+ "Used when multiple consecutive isinstance calls can be merged into one.",
+ ),
+ "R1706": (
+ "Consider using ternary (%s)",
+ "consider-using-ternary",
+ "Used when one of known pre-python 2.5 ternary syntax is used.",
+ ),
+ "R1709": (
+ "Boolean expression may be simplified to %s",
+ "simplify-boolean-expression",
+ "Emitted when redundant pre-python 2.5 ternary syntax is used.",
+ ),
+ "R1702": (
+ "Too many nested blocks (%s/%s)",
+ "too-many-nested-blocks",
+ "Used when a function or a method has too many nested "
+ "blocks. This makes the code less understandable and "
+ "maintainable.",
+ {"old_names": [("R0101", "old-too-many-nested-blocks")]},
+ ),
+ "R1703": (
+ "The if statement can be replaced with %s",
+ "simplifiable-if-statement",
+ "Used when an if statement can be replaced with 'bool(test)'. ",
+ {"old_names": [("R0102", "old-simplifiable-if-statement")]},
+ ),
+ "R1704": (
+ "Redefining argument with the local name %r",
+ "redefined-argument-from-local",
+ "Used when a local name is redefining an argument, which might "
+ "suggest a potential error. This is taken in account only for "
+ "a handful of name binding operations, such as for iteration, "
+ "with statement assignment and exception handler assignment.",
+ ),
+ "R1705": (
+ 'Unnecessary "%s" after "return"',
+ "no-else-return",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a return statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "return statement.",
+ ),
+ "R1707": (
+ "Disallow trailing comma tuple",
+ "trailing-comma-tuple",
+ "In Python, a tuple is actually created by the comma symbol, "
+ "not by the parentheses. Unfortunately, one can actually create a "
+ "tuple by misplacing a trailing comma, which can lead to potential "
+ "weird bugs in your code. You should always use parentheses "
+ "explicitly for creating a tuple.",
+ ),
+ "R1708": (
+ "Do not raise StopIteration in generator, use return statement instead",
+ "stop-iteration-return",
+ "According to PEP479, the raise of StopIteration to end the loop of "
+ "a generator may lead to hard to find bugs. This PEP specify that "
+ "raise StopIteration has to be replaced by a simple return statement",
+ ),
+ "R1710": (
+ "Either all return statements in a function should return an expression, "
+ "or none of them should.",
+ "inconsistent-return-statements",
+ "According to PEP8, if any return statement returns an expression, "
+ "any return statements where no value is returned should explicitly "
+ "state this as return None, and an explicit return statement "
+ "should be present at the end of the function (if reachable)",
+ ),
+ "R1711": (
+ "Useless return at end of function or method",
+ "useless-return",
+ 'Emitted when a single "return" or "return None" statement is found '
+ "at the end of function or method definition. This statement can safely be "
+ "removed because Python will implicitly return None",
+ ),
+ "R1712": (
+ "Consider using tuple unpacking for swapping variables",
+ "consider-swap-variables",
+ "You do not have to use a temporary variable in order to "
+ 'swap variables. Using "tuple unpacking" to directly swap '
+ "variables makes the intention more clear.",
+ ),
+ "R1713": (
+ "Consider using str.join(sequence) for concatenating "
+ "strings from an iterable",
+ "consider-using-join",
+ "Using str.join(sequence) is faster, uses less memory "
+ "and increases readability compared to for-loop iteration.",
+ ),
+ "R1714": (
+ 'Consider merging these comparisons with "in" to %r',
+ "consider-using-in",
+ "To check if a variable is equal to one of many values,"
+ 'combine the values into a tuple and check if the variable is contained "in" it '
+ "instead of checking for equality against each of the values."
+ "This is faster and less verbose.",
+ ),
+ "R1715": (
+ "Consider using dict.get for getting values from a dict "
+ "if a key is present or a default if not",
+ "consider-using-get",
+ "Using the builtin dict.get for getting a value from a dictionary "
+ "if a key is present or a default if not, is simpler and considered "
+ "more idiomatic, although sometimes a bit slower",
+ ),
+ "R1716": (
+ "Simplify chained comparison between the operands",
+ "chained-comparison",
+ "This message is emitted when pylint encounters boolean operation like"
+ '"a < b and b < c", suggesting instead to refactor it to "a < b < c"',
+ ),
+ "R1717": (
+ "Consider using a dictionary comprehension",
+ "consider-using-dict-comprehension",
+ "Emitted when we detect the creation of a dictionary "
+ "using the dict() callable and a transient list. "
+ "Although there is nothing syntactically wrong with this code, "
+ "it is hard to read and can be simplified to a dict comprehension."
+ "Also it is faster since you don't need to create another "
+ "transient list",
+ ),
+ "R1718": (
+ "Consider using a set comprehension",
+ "consider-using-set-comprehension",
+ "Although there is nothing syntactically wrong with this code, "
+ "it is hard to read and can be simplified to a set comprehension."
+ "Also it is faster since you don't need to create another "
+ "transient list",
+ ),
+ "R1719": (
+ "The if expression can be replaced with %s",
+ "simplifiable-if-expression",
+ "Used when an if expression can be replaced with 'bool(test)'. ",
+ ),
+ "R1720": (
+ 'Unnecessary "%s" after "raise"',
+ "no-else-raise",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a raise statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "raise statement.",
+ ),
+ "R1721": (
+ "Unnecessary use of a comprehension",
+ "unnecessary-comprehension",
+ "Instead of using an identitiy comprehension, "
+ "consider using the list, dict or set constructor. "
+ "It is faster and simpler.",
+ ),
+ "R1722": (
+ "Consider using sys.exit()",
+ "consider-using-sys-exit",
+ "Instead of using exit() or quit(), consider using the sys.exit().",
+ ),
+ "R1723": (
+ 'Unnecessary "%s" after "break"',
+ "no-else-break",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a break statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "break statement.",
+ ),
+ "R1724": (
+ 'Unnecessary "%s" after "continue"',
+ "no-else-continue",
+ "Used in order to highlight an unnecessary block of "
+ "code following an if containing a continue statement. "
+ "As such, it will warn when it encounters an else "
+ "following a chain of ifs, all of them containing a "
+ "continue statement.",
+ ),
+ }
+ options = (
+ (
+ "max-nested-blocks",
+ {
+ "default": 5,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of nested blocks for function / method body",
+ },
+ ),
+ (
+ "never-returning-functions",
+ {
+ "default": ("sys.exit",),
+ "type": "csv",
+ "help": "Complete name of functions that never returns. When checking "
+ "for inconsistent-return-statements if a never returning function is "
+ "called then it will be considered as an explicit return statement "
+ "and no message will be printed.",
+ },
+ ),
+ )
+
+ priority = 0
+
+ def __init__(self, linter=None):
+ checkers.BaseTokenChecker.__init__(self, linter)
+ self._return_nodes = {}
+ self._init()
+ self._never_returning_functions = None
+
+ def _init(self):
+ self._nested_blocks = []
+ self._elifs = []
+ self._nested_blocks_msg = None
+ self._reported_swap_nodes = set()
+
+ def open(self):
+ # do this in open since config not fully initialized in __init__
+ self._never_returning_functions = set(self.config.never_returning_functions)
+
+ @decorators.cachedproperty
+ def _dummy_rgx(self):
+ return lint_utils.get_global_option(self, "dummy-variables-rgx", default=None)
+
+ @staticmethod
+ def _is_bool_const(node):
+ return isinstance(node.value, astroid.Const) and isinstance(
+ node.value.value, bool
+ )
+
+ def _is_actual_elif(self, node):
+ """Check if the given node is an actual elif
+
+ This is a problem we're having with the builtin ast module,
+ which splits `elif` branches into a separate if statement.
+ Unfortunately we need to know the exact type in certain
+ cases.
+ """
+ if isinstance(node.parent, astroid.If):
+ orelse = node.parent.orelse
+ # current if node must directly follow an "else"
+ if orelse and orelse == [node]:
+ if (node.lineno, node.col_offset) in self._elifs:
+ return True
+ return False
+
+ def _check_simplifiable_if(self, node):
+ """Check if the given if node can be simplified.
+
+ The if statement can be reduced to a boolean expression
+ in some cases. For instance, if there are two branches
+ and both of them return a boolean value that depends on
+ the result of the statement's test, then this can be reduced
+ to `bool(test)` without losing any functionality.
+ """
+
+ if self._is_actual_elif(node):
+ # Not interested in if statements with multiple branches.
+ return
+ if len(node.orelse) != 1 or len(node.body) != 1:
+ return
+
+ # Check if both branches can be reduced.
+ first_branch = node.body[0]
+ else_branch = node.orelse[0]
+ if isinstance(first_branch, astroid.Return):
+ if not isinstance(else_branch, astroid.Return):
+ return
+ first_branch_is_bool = self._is_bool_const(first_branch)
+ else_branch_is_bool = self._is_bool_const(else_branch)
+ reduced_to = "'return bool(test)'"
+ elif isinstance(first_branch, astroid.Assign):
+ if not isinstance(else_branch, astroid.Assign):
+ return
+
+ # Check if we assign to the same value
+ first_branch_targets = [
+ target.name
+ for target in first_branch.targets
+ if isinstance(target, astroid.AssignName)
+ ]
+ else_branch_targets = [
+ target.name
+ for target in else_branch.targets
+ if isinstance(target, astroid.AssignName)
+ ]
+ if not first_branch_targets or not else_branch_targets:
+ return
+ if sorted(first_branch_targets) != sorted(else_branch_targets):
+ return
+
+ first_branch_is_bool = self._is_bool_const(first_branch)
+ else_branch_is_bool = self._is_bool_const(else_branch)
+ reduced_to = "'var = bool(test)'"
+ else:
+ return
+
+ if not first_branch_is_bool or not else_branch_is_bool:
+ return
+ if not first_branch.value.value:
+ # This is a case that can't be easily simplified and
+ # if it can be simplified, it will usually result in a
+ # code that's harder to understand and comprehend.
+ # Let's take for instance `arg and arg <= 3`. This could theoretically be
+ # reduced to `not arg or arg > 3`, but the net result is that now the
+ # condition is harder to understand, because it requires understanding of
+ # an extra clause:
+ # * first, there is the negation of truthness with `not arg`
+ # * the second clause is `arg > 3`, which occurs when arg has a
+ # a truth value, but it implies that `arg > 3` is equivalent
+ # with `arg and arg > 3`, which means that the user must
+ # think about this assumption when evaluating `arg > 3`.
+ # The original form is easier to grasp.
+ return
+
+ self.add_message("simplifiable-if-statement", node=node, args=(reduced_to,))
+
+ def process_tokens(self, tokens):
+ # Process tokens and look for 'if' or 'elif'
+ for index, token in enumerate(tokens):
+ token_string = token[1]
+ if token_string == "elif":
+ # AST exists by the time process_tokens is called, so
+ # it's safe to assume tokens[index+1]
+ # exists. tokens[index+1][2] is the elif's position as
+ # reported by CPython and PyPy,
+ # tokens[index][2] is the actual position and also is
+ # reported by IronPython.
+ self._elifs.extend([tokens[index][2], tokens[index + 1][2]])
+ elif _is_trailing_comma(tokens, index):
+ if self.linter.is_message_enabled("trailing-comma-tuple"):
+ self.add_message("trailing-comma-tuple", line=token.start[0])
+
+ def leave_module(self, _):
+ self._init()
+
+ @utils.check_messages("too-many-nested-blocks")
+ def visit_tryexcept(self, node):
+ self._check_nested_blocks(node)
+
+ visit_tryfinally = visit_tryexcept
+ visit_while = visit_tryexcept
+
+ def _check_redefined_argument_from_local(self, name_node):
+ if self._dummy_rgx and self._dummy_rgx.match(name_node.name):
+ return
+ if not name_node.lineno:
+ # Unknown position, maybe it is a manually built AST?
+ return
+
+ scope = name_node.scope()
+ if not isinstance(scope, astroid.FunctionDef):
+ return
+
+ for defined_argument in scope.args.nodes_of_class(
+ astroid.AssignName, skip_klass=(astroid.Lambda,)
+ ):
+ if defined_argument.name == name_node.name:
+ self.add_message(
+ "redefined-argument-from-local",
+ node=name_node,
+ args=(name_node.name,),
+ )
+
+ @utils.check_messages("redefined-argument-from-local", "too-many-nested-blocks")
+ def visit_for(self, node):
+ self._check_nested_blocks(node)
+
+ for name in node.target.nodes_of_class(astroid.AssignName):
+ self._check_redefined_argument_from_local(name)
+
+ @utils.check_messages("redefined-argument-from-local")
+ def visit_excepthandler(self, node):
+ if node.name and isinstance(node.name, astroid.AssignName):
+ self._check_redefined_argument_from_local(node.name)
+
+ @utils.check_messages("redefined-argument-from-local")
+ def visit_with(self, node):
+ for _, names in node.items:
+ if not names:
+ continue
+ for name in names.nodes_of_class(astroid.AssignName):
+ self._check_redefined_argument_from_local(name)
+
+ def _check_superfluous_else(self, node, msg_id, returning_node_class):
+ if not node.orelse:
+ # Not interested in if statements without else.
+ return
+
+ if self._is_actual_elif(node):
+ # Not interested in elif nodes; only if
+ return
+
+ if _if_statement_is_always_returning(node, returning_node_class):
+ orelse = node.orelse[0]
+ followed_by_elif = (orelse.lineno, orelse.col_offset) in self._elifs
+ self.add_message(
+ msg_id, node=node, args="elif" if followed_by_elif else "else"
+ )
+
+ def _check_superfluous_else_return(self, node):
+ return self._check_superfluous_else(
+ node, msg_id="no-else-return", returning_node_class=astroid.Return
+ )
+
+ def _check_superfluous_else_raise(self, node):
+ return self._check_superfluous_else(
+ node, msg_id="no-else-raise", returning_node_class=astroid.Raise
+ )
+
+ def _check_superfluous_else_break(self, node):
+ return self._check_superfluous_else(
+ node, msg_id="no-else-break", returning_node_class=astroid.Break
+ )
+
+ def _check_superfluous_else_continue(self, node):
+ return self._check_superfluous_else(
+ node, msg_id="no-else-continue", returning_node_class=astroid.Continue
+ )
+
+ def _check_consider_get(self, node):
+ def type_and_name_are_equal(node_a, node_b):
+ for _type in [astroid.Name, astroid.AssignName]:
+ if all(isinstance(_node, _type) for _node in [node_a, node_b]):
+ return node_a.name == node_b.name
+ if all(isinstance(_node, astroid.Const) for _node in [node_a, node_b]):
+ return node_a.value == node_b.value
+ return False
+
+ if_block_ok = (
+ isinstance(node.test, astroid.Compare)
+ and len(node.body) == 1
+ and isinstance(node.body[0], astroid.Assign)
+ and isinstance(node.body[0].value, astroid.Subscript)
+ and type_and_name_are_equal(node.body[0].value.value, node.test.ops[0][1])
+ and isinstance(node.body[0].value.slice, astroid.Index)
+ and type_and_name_are_equal(node.body[0].value.slice.value, node.test.left)
+ and len(node.body[0].targets) == 1
+ and isinstance(node.body[0].targets[0], astroid.AssignName)
+ and isinstance(utils.safe_infer(node.test.ops[0][1]), astroid.Dict)
+ )
+
+ if if_block_ok and not node.orelse:
+ self.add_message("consider-using-get", node=node)
+ elif (
+ if_block_ok
+ and len(node.orelse) == 1
+ and isinstance(node.orelse[0], astroid.Assign)
+ and type_and_name_are_equal(
+ node.orelse[0].targets[0], node.body[0].targets[0]
+ )
+ and len(node.orelse[0].targets) == 1
+ ):
+ self.add_message("consider-using-get", node=node)
+
+ @utils.check_messages(
+ "too-many-nested-blocks",
+ "simplifiable-if-statement",
+ "no-else-return",
+ "no-else-raise",
+ "no-else-break",
+ "no-else-continue",
+ "consider-using-get",
+ )
+ def visit_if(self, node):
+ self._check_simplifiable_if(node)
+ self._check_nested_blocks(node)
+ self._check_superfluous_else_return(node)
+ self._check_superfluous_else_raise(node)
+ self._check_superfluous_else_break(node)
+ self._check_superfluous_else_continue(node)
+ self._check_consider_get(node)
+
+ @utils.check_messages("simplifiable-if-expression")
+ def visit_ifexp(self, node):
+ self._check_simplifiable_ifexp(node)
+
+ def _check_simplifiable_ifexp(self, node):
+ if not isinstance(node.body, astroid.Const) or not isinstance(
+ node.orelse, astroid.Const
+ ):
+ return
+
+ if not isinstance(node.body.value, bool) or not isinstance(
+ node.orelse.value, bool
+ ):
+ return
+
+ if isinstance(node.test, astroid.Compare):
+ test_reduced_to = "test"
+ else:
+ test_reduced_to = "bool(test)"
+
+ if (node.body.value, node.orelse.value) == (True, False):
+ reduced_to = "'{}'".format(test_reduced_to)
+ elif (node.body.value, node.orelse.value) == (False, True):
+ reduced_to = "'not test'"
+ else:
+ return
+
+ self.add_message("simplifiable-if-expression", node=node, args=(reduced_to,))
+
+ @utils.check_messages(
+ "too-many-nested-blocks", "inconsistent-return-statements", "useless-return"
+ )
+ def leave_functiondef(self, node):
+ # check left-over nested blocks stack
+ self._emit_nested_blocks_message_if_needed(self._nested_blocks)
+ # new scope = reinitialize the stack of nested blocks
+ self._nested_blocks = []
+ #  check consistent return statements
+ self._check_consistent_returns(node)
+ # check for single return or return None at the end
+ self._check_return_at_the_end(node)
+ self._return_nodes[node.name] = []
+
+ @utils.check_messages("stop-iteration-return")
+ def visit_raise(self, node):
+ self._check_stop_iteration_inside_generator(node)
+
+ def _check_stop_iteration_inside_generator(self, node):
+ """Check if an exception of type StopIteration is raised inside a generator"""
+ frame = node.frame()
+ if not isinstance(frame, astroid.FunctionDef) or not frame.is_generator():
+ return
+ if utils.node_ignores_exception(node, StopIteration):
+ return
+ if not node.exc:
+ return
+ exc = utils.safe_infer(node.exc)
+ if exc is None or exc is astroid.Uninferable:
+ return
+ if self._check_exception_inherit_from_stopiteration(exc):
+ self.add_message("stop-iteration-return", node=node)
+
+ @staticmethod
+ def _check_exception_inherit_from_stopiteration(exc):
+ """Return True if the exception node in argument inherit from StopIteration"""
+ stopiteration_qname = "{}.StopIteration".format(utils.EXCEPTIONS_MODULE)
+ return any(_class.qname() == stopiteration_qname for _class in exc.mro())
+
+ def _check_consider_using_comprehension_constructor(self, node):
+ if (
+ isinstance(node.func, astroid.Name)
+ and node.args
+ and isinstance(node.args[0], astroid.ListComp)
+ ):
+ if node.func.name == "dict" and not isinstance(
+ node.args[0].elt, astroid.Call
+ ):
+ message_name = "consider-using-dict-comprehension"
+ self.add_message(message_name, node=node)
+ elif node.func.name == "set":
+ message_name = "consider-using-set-comprehension"
+ self.add_message(message_name, node=node)
+
+ @utils.check_messages(
+ "stop-iteration-return",
+ "consider-using-dict-comprehension",
+ "consider-using-set-comprehension",
+ "consider-using-sys-exit",
+ )
+ def visit_call(self, node):
+ self._check_raising_stopiteration_in_generator_next_call(node)
+ self._check_consider_using_comprehension_constructor(node)
+ self._check_quit_exit_call(node)
+
+ @staticmethod
+ def _has_exit_in_scope(scope):
+ exit_func = scope.locals.get("exit")
+ return bool(
+ exit_func and isinstance(exit_func[0], (astroid.ImportFrom, astroid.Import))
+ )
+
+ def _check_quit_exit_call(self, node):
+
+ if isinstance(node.func, astroid.Name) and node.func.name in BUILTIN_EXIT_FUNCS:
+ # If we have `exit` imported from `sys` in the current or global scope, exempt this instance.
+ local_scope = node.scope()
+ if self._has_exit_in_scope(local_scope) or self._has_exit_in_scope(
+ node.root()
+ ):
+ return
+ self.add_message("consider-using-sys-exit", node=node)
+
+ def _check_raising_stopiteration_in_generator_next_call(self, node):
+ """Check if a StopIteration exception is raised by the call to next function
+
+ If the next value has a default value, then do not add message.
+
+ :param node: Check to see if this Call node is a next function
+ :type node: :class:`astroid.node_classes.Call`
+ """
+
+ def _looks_like_infinite_iterator(param):
+ inferred = utils.safe_infer(param)
+ if inferred:
+ return inferred.qname() in KNOWN_INFINITE_ITERATORS
+ return False
+
+ if isinstance(node.func, astroid.Attribute):
+ # A next() method, which is now what we want.
+ return
+
+ inferred = utils.safe_infer(node.func)
+ if getattr(inferred, "name", "") == "next":
+ frame = node.frame()
+ # The next builtin can only have up to two
+ # positional arguments and no keyword arguments
+ has_sentinel_value = len(node.args) > 1
+ if (
+ isinstance(frame, astroid.FunctionDef)
+ and frame.is_generator()
+ and not has_sentinel_value
+ and not utils.node_ignores_exception(node, StopIteration)
+ and not _looks_like_infinite_iterator(node.args[0])
+ ):
+ self.add_message("stop-iteration-return", node=node)
+
+ def _check_nested_blocks(self, node):
+ """Update and check the number of nested blocks
+ """
+ # only check block levels inside functions or methods
+ if not isinstance(node.scope(), astroid.FunctionDef):
+ return
+ # messages are triggered on leaving the nested block. Here we save the
+ # stack in case the current node isn't nested in the previous one
+ nested_blocks = self._nested_blocks[:]
+ if node.parent == node.scope():
+ self._nested_blocks = [node]
+ else:
+ # go through ancestors from the most nested to the less
+ for ancestor_node in reversed(self._nested_blocks):
+ if ancestor_node == node.parent:
+ break
+ self._nested_blocks.pop()
+ # if the node is an elif, this should not be another nesting level
+ if isinstance(node, astroid.If) and self._is_actual_elif(node):
+ if self._nested_blocks:
+ self._nested_blocks.pop()
+ self._nested_blocks.append(node)
+
+ # send message only once per group of nested blocks
+ if len(nested_blocks) > len(self._nested_blocks):
+ self._emit_nested_blocks_message_if_needed(nested_blocks)
+
+ def _emit_nested_blocks_message_if_needed(self, nested_blocks):
+ if len(nested_blocks) > self.config.max_nested_blocks:
+ self.add_message(
+ "too-many-nested-blocks",
+ node=nested_blocks[0],
+ args=(len(nested_blocks), self.config.max_nested_blocks),
+ )
+
+ @staticmethod
+ def _duplicated_isinstance_types(node):
+ """Get the duplicated types from the underlying isinstance calls.
+
+ :param astroid.BoolOp node: Node which should contain a bunch of isinstance calls.
+ :returns: Dictionary of the comparison objects from the isinstance calls,
+ to duplicate values from consecutive calls.
+ :rtype: dict
+ """
+ duplicated_objects = set()
+ all_types = collections.defaultdict(set)
+
+ for call in node.values:
+ if not isinstance(call, astroid.Call) or len(call.args) != 2:
+ continue
+
+ inferred = utils.safe_infer(call.func)
+ if not inferred or not utils.is_builtin_object(inferred):
+ continue
+
+ if inferred.name != "isinstance":
+ continue
+
+ isinstance_object = call.args[0].as_string()
+ isinstance_types = call.args[1]
+
+ if isinstance_object in all_types:
+ duplicated_objects.add(isinstance_object)
+
+ if isinstance(isinstance_types, astroid.Tuple):
+ elems = [
+ class_type.as_string() for class_type in isinstance_types.itered()
+ ]
+ else:
+ elems = [isinstance_types.as_string()]
+ all_types[isinstance_object].update(elems)
+
+ # Remove all keys which not duplicated
+ return {
+ key: value for key, value in all_types.items() if key in duplicated_objects
+ }
+
+ def _check_consider_merging_isinstance(self, node):
+ """Check isinstance calls which can be merged together."""
+ if node.op != "or":
+ return
+
+ first_args = self._duplicated_isinstance_types(node)
+ for duplicated_name, class_names in first_args.items():
+ names = sorted(name for name in class_names)
+ self.add_message(
+ "consider-merging-isinstance",
+ node=node,
+ args=(duplicated_name, ", ".join(names)),
+ )
+
+ def _check_consider_using_in(self, node):
+ allowed_ops = {"or": "==", "and": "!="}
+
+ if node.op not in allowed_ops or len(node.values) < 2:
+ return
+
+ for value in node.values:
+ if (
+ not isinstance(value, astroid.Compare)
+ or len(value.ops) != 1
+ or value.ops[0][0] not in allowed_ops[node.op]
+ ):
+ return
+ for comparable in value.left, value.ops[0][1]:
+ if isinstance(comparable, astroid.Call):
+ return
+
+ # Gather variables and values from comparisons
+ variables, values = [], []
+ for value in node.values:
+ variable_set = set()
+ for comparable in value.left, value.ops[0][1]:
+ if isinstance(comparable, astroid.Name):
+ variable_set.add(comparable.as_string())
+ values.append(comparable.as_string())
+ variables.append(variable_set)
+
+ # Look for (common-)variables that occur in all comparisons
+ common_variables = reduce(lambda a, b: a.intersection(b), variables)
+
+ if not common_variables:
+ return
+
+ # Gather information for the suggestion
+ common_variable = sorted(list(common_variables))[0]
+ comprehension = "in" if node.op == "or" else "not in"
+ values = list(collections.OrderedDict.fromkeys(values))
+ values.remove(common_variable)
+ values_string = ", ".join(values) if len(values) != 1 else values[0] + ","
+ suggestion = "%s %s (%s)" % (common_variable, comprehension, values_string)
+
+ self.add_message("consider-using-in", node=node, args=(suggestion,))
+
+ def _check_chained_comparison(self, node):
+ """Check if there is any chained comparison in the expression.
+
+ Add a refactoring message if a boolOp contains comparison like a < b and b < c,
+ which can be chained as a < b < c.
+
+ Care is taken to avoid simplifying a < b < c and b < d.
+ """
+ if node.op != "and" or len(node.values) < 2:
+ return
+
+ def _find_lower_upper_bounds(comparison_node, uses):
+ left_operand = comparison_node.left
+ for operator, right_operand in comparison_node.ops:
+ for operand in (left_operand, right_operand):
+ value = None
+ if isinstance(operand, astroid.Name):
+ value = operand.name
+ elif isinstance(operand, astroid.Const):
+ value = operand.value
+
+ if value is None:
+ continue
+
+ if operator in ("<", "<="):
+ if operand is left_operand:
+ uses[value]["lower_bound"].add(comparison_node)
+ elif operand is right_operand:
+ uses[value]["upper_bound"].add(comparison_node)
+ elif operator in (">", ">="):
+ if operand is left_operand:
+ uses[value]["upper_bound"].add(comparison_node)
+ elif operand is right_operand:
+ uses[value]["lower_bound"].add(comparison_node)
+ left_operand = right_operand
+
+ uses = collections.defaultdict(
+ lambda: {"lower_bound": set(), "upper_bound": set()}
+ )
+ for comparison_node in node.values:
+ if isinstance(comparison_node, astroid.Compare):
+ _find_lower_upper_bounds(comparison_node, uses)
+
+ for _, bounds in uses.items():
+ num_shared = len(bounds["lower_bound"].intersection(bounds["upper_bound"]))
+ num_lower_bounds = len(bounds["lower_bound"])
+ num_upper_bounds = len(bounds["upper_bound"])
+ if num_shared < num_lower_bounds and num_shared < num_upper_bounds:
+ self.add_message("chained-comparison", node=node)
+ break
+
+ @utils.check_messages(
+ "consider-merging-isinstance", "consider-using-in", "chained-comparison"
+ )
+ def visit_boolop(self, node):
+ self._check_consider_merging_isinstance(node)
+ self._check_consider_using_in(node)
+ self._check_chained_comparison(node)
+
+ @staticmethod
+ def _is_simple_assignment(node):
+ return (
+ isinstance(node, astroid.Assign)
+ and len(node.targets) == 1
+ and isinstance(node.targets[0], astroid.node_classes.AssignName)
+ and isinstance(node.value, astroid.node_classes.Name)
+ )
+
+ def _check_swap_variables(self, node):
+ if not node.next_sibling() or not node.next_sibling().next_sibling():
+ return
+ assignments = [node, node.next_sibling(), node.next_sibling().next_sibling()]
+ if not all(self._is_simple_assignment(node) for node in assignments):
+ return
+ if any(node in self._reported_swap_nodes for node in assignments):
+ return
+ left = [node.targets[0].name for node in assignments]
+ right = [node.value.name for node in assignments]
+ if left[0] == right[-1] and left[1:] == right[:-1]:
+ self._reported_swap_nodes.update(assignments)
+ message = "consider-swap-variables"
+ self.add_message(message, node=node)
+
+ @utils.check_messages(
+ "simplify-boolean-expression",
+ "consider-using-ternary",
+ "consider-swap-variables",
+ )
+ def visit_assign(self, node):
+ self._check_swap_variables(node)
+ if self._is_and_or_ternary(node.value):
+ cond, truth_value, false_value = self._and_or_ternary_arguments(node.value)
+ else:
+ return
+
+ if all(
+ isinstance(value, astroid.Compare) for value in (truth_value, false_value)
+ ):
+ return
+
+ inferred_truth_value = utils.safe_infer(truth_value)
+ if inferred_truth_value in (None, astroid.Uninferable):
+ truth_boolean_value = True
+ else:
+ truth_boolean_value = truth_value.bool_value()
+
+ if truth_boolean_value is False:
+ message = "simplify-boolean-expression"
+ suggestion = false_value.as_string()
+ else:
+ message = "consider-using-ternary"
+ suggestion = "{truth} if {cond} else {false}".format(
+ truth=truth_value.as_string(),
+ cond=cond.as_string(),
+ false=false_value.as_string(),
+ )
+ self.add_message(message, node=node, args=(suggestion,))
+
+ visit_return = visit_assign
+
+ def _check_consider_using_join(self, aug_assign):
+ """
+ We start with the augmented assignment and work our way upwards.
+ Names of variables for nodes if match successful:
+ result = '' # assign
+ for number in ['1', '2', '3'] # for_loop
+ result += number # aug_assign
+ """
+ for_loop = aug_assign.parent
+ if not isinstance(for_loop, astroid.For) or len(for_loop.body) > 1:
+ return
+ assign = for_loop.previous_sibling()
+ if not isinstance(assign, astroid.Assign):
+ return
+ result_assign_names = {
+ target.name
+ for target in assign.targets
+ if isinstance(target, astroid.AssignName)
+ }
+
+ is_concat_loop = (
+ aug_assign.op == "+="
+ and isinstance(aug_assign.target, astroid.AssignName)
+ and len(for_loop.body) == 1
+ and aug_assign.target.name in result_assign_names
+ and isinstance(assign.value, astroid.Const)
+ and isinstance(assign.value.value, str)
+ and isinstance(aug_assign.value, astroid.Name)
+ and aug_assign.value.name == for_loop.target.name
+ )
+ if is_concat_loop:
+ self.add_message("consider-using-join", node=aug_assign)
+
+ @utils.check_messages("consider-using-join")
+ def visit_augassign(self, node):
+ self._check_consider_using_join(node)
+
+ @utils.check_messages("unnecessary-comprehension")
+ def visit_comprehension(self, node):
+ self._check_unnecessary_comprehension(node)
+
+ def _check_unnecessary_comprehension(self, node):
+ if (
+ isinstance(node.parent, astroid.GeneratorExp)
+ or len(node.ifs) != 0
+ or len(node.parent.generators) != 1
+ or node.is_async
+ ):
+ return
+
+ if (
+ isinstance(node.parent, astroid.DictComp)
+ and isinstance(node.parent.key, astroid.Name)
+ and isinstance(node.parent.value, astroid.Name)
+ and isinstance(node.target, astroid.Tuple)
+ and all(isinstance(elt, astroid.AssignName) for elt in node.target.elts)
+ ):
+ expr_list = [node.parent.key.name, node.parent.value.name]
+ target_list = [elt.name for elt in node.target.elts]
+
+ elif isinstance(node.parent, (astroid.ListComp, astroid.SetComp)):
+ expr = node.parent.elt
+ if isinstance(expr, astroid.Name):
+ expr_list = expr.name
+ elif isinstance(expr, astroid.Tuple):
+ if any(not isinstance(elt, astroid.Name) for elt in expr.elts):
+ return
+ expr_list = [elt.name for elt in expr.elts]
+ else:
+ expr_list = []
+ target = node.parent.generators[0].target
+ target_list = (
+ target.name
+ if isinstance(target, astroid.AssignName)
+ else (
+ [
+ elt.name
+ for elt in target.elts
+ if isinstance(elt, astroid.AssignName)
+ ]
+ if isinstance(target, astroid.Tuple)
+ else []
+ )
+ )
+ else:
+ return
+ if expr_list == target_list != []:
+ self.add_message("unnecessary-comprehension", node=node)
+
+ @staticmethod
+ def _is_and_or_ternary(node):
+ """
+ Returns true if node is 'condition and true_value or false_value' form.
+
+ All of: condition, true_value and false_value should not be a complex boolean expression
+ """
+ return (
+ isinstance(node, astroid.BoolOp)
+ and node.op == "or"
+ and len(node.values) == 2
+ and isinstance(node.values[0], astroid.BoolOp)
+ and not isinstance(node.values[1], astroid.BoolOp)
+ and node.values[0].op == "and"
+ and not isinstance(node.values[0].values[1], astroid.BoolOp)
+ and len(node.values[0].values) == 2
+ )
+
+ @staticmethod
+ def _and_or_ternary_arguments(node):
+ false_value = node.values[1]
+ condition, true_value = node.values[0].values
+ return condition, true_value, false_value
+
+ def visit_functiondef(self, node):
+ self._return_nodes[node.name] = list(
+ node.nodes_of_class(astroid.Return, skip_klass=astroid.FunctionDef)
+ )
+
+ def _check_consistent_returns(self, node):
+ """Check that all return statements inside a function are consistent.
+
+ Return statements are consistent if:
+ - all returns are explicit and if there is no implicit return;
+ - all returns are empty and if there is, possibly, an implicit return.
+
+ Args:
+ node (astroid.FunctionDef): the function holding the return statements.
+
+ """
+ # explicit return statements are those with a not None value
+ explicit_returns = [
+ _node for _node in self._return_nodes[node.name] if _node.value is not None
+ ]
+ if not explicit_returns:
+ return
+ if len(explicit_returns) == len(
+ self._return_nodes[node.name]
+ ) and self._is_node_return_ended(node):
+ return
+ self.add_message("inconsistent-return-statements", node=node)
+
+ def _is_node_return_ended(self, node):
+ """Check if the node ends with an explicit return statement.
+
+ Args:
+ node (astroid.NodeNG): node to be checked.
+
+ Returns:
+ bool: True if the node ends with an explicit statement, False otherwise.
+
+ """
+ #  Recursion base case
+ if isinstance(node, astroid.Return):
+ return True
+ if isinstance(node, astroid.Call):
+ try:
+ funcdef_node = node.func.inferred()[0]
+ if self._is_function_def_never_returning(funcdef_node):
+ return True
+ except astroid.InferenceError:
+ pass
+ # Avoid the check inside while loop as we don't know
+ #  if they will be completed
+ if isinstance(node, astroid.While):
+ return True
+ if isinstance(node, astroid.Raise):
+ # a Raise statement doesn't need to end with a return statement
+ # but if the exception raised is handled, then the handler has to
+ # ends with a return statement
+ if not node.exc:
+ # Ignore bare raises
+ return True
+ if not utils.is_node_inside_try_except(node):
+ # If the raise statement is not inside a try/except statement
+ #  then the exception is raised and cannot be caught. No need
+ #  to infer it.
+ return True
+ exc = utils.safe_infer(node.exc)
+ if exc is None or exc is astroid.Uninferable:
+ return False
+ exc_name = exc.pytype().split(".")[-1]
+ handlers = utils.get_exception_handlers(node, exc_name)
+ handlers = list(handlers) if handlers is not None else []
+ if handlers:
+ # among all the handlers handling the exception at least one
+ # must end with a return statement
+ return any(
+ self._is_node_return_ended(_handler) for _handler in handlers
+ )
+ # if no handlers handle the exception then it's ok
+ return True
+ if isinstance(node, astroid.If):
+ # if statement is returning if there are exactly two return statements in its
+ #  children : one for the body part, the other for the orelse part
+ # Do not check if inner function definition are return ended.
+ is_orelse_returning = any(
+ self._is_node_return_ended(_ore)
+ for _ore in node.orelse
+ if not isinstance(_ore, astroid.FunctionDef)
+ )
+ is_if_returning = any(
+ self._is_node_return_ended(_ifn)
+ for _ifn in node.body
+ if not isinstance(_ifn, astroid.FunctionDef)
+ )
+ return is_if_returning and is_orelse_returning
+ #  recurses on the children of the node except for those which are except handler
+ # because one cannot be sure that the handler will really be used
+ return any(
+ self._is_node_return_ended(_child)
+ for _child in node.get_children()
+ if not isinstance(_child, astroid.ExceptHandler)
+ )
+
+ def _is_function_def_never_returning(self, node):
+ """Return True if the function never returns. False otherwise.
+
+ Args:
+ node (astroid.FunctionDef): function definition node to be analyzed.
+
+ Returns:
+ bool: True if the function never returns, False otherwise.
+ """
+ try:
+ return node.qname() in self._never_returning_functions
+ except TypeError:
+ return False
+
+ def _check_return_at_the_end(self, node):
+ """Check for presence of a *single* return statement at the end of a
+ function. "return" or "return None" are useless because None is the
+ default return type if they are missing.
+
+ NOTE: produces a message only if there is a single return statement
+ in the function body. Otherwise _check_consistent_returns() is called!
+ Per its implementation and PEP8 we can have a "return None" at the end
+ of the function body if there are other return statements before that!
+ """
+ if len(self._return_nodes[node.name]) > 1:
+ return
+ if len(node.body) <= 1:
+ return
+
+ last = node.body[-1]
+ if isinstance(last, astroid.Return):
+ # e.g. "return"
+ if last.value is None:
+ self.add_message("useless-return", node=node)
+ # return None"
+ elif isinstance(last.value, astroid.Const) and (last.value.value is None):
+ self.add_message("useless-return", node=node)
+
+
+class RecommandationChecker(checkers.BaseChecker):
+ __implements__ = (interfaces.IAstroidChecker,)
+ name = "refactoring"
+ msgs = {
+ "C0200": (
+ "Consider using enumerate instead of iterating with range and len",
+ "consider-using-enumerate",
+ "Emitted when code that iterates with range and len is "
+ "encountered. Such code can be simplified by using the "
+ "enumerate builtin.",
+ ),
+ "C0201": (
+ "Consider iterating the dictionary directly instead of calling .keys()",
+ "consider-iterating-dictionary",
+ "Emitted when the keys of a dictionary are iterated through the .keys() "
+ "method. It is enough to just iterate through the dictionary itself, as "
+ 'in "for key in dictionary".',
+ ),
+ }
+
+ @staticmethod
+ def _is_builtin(node, function):
+ inferred = utils.safe_infer(node)
+ if not inferred:
+ return False
+ return utils.is_builtin_object(inferred) and inferred.name == function
+
+ @utils.check_messages("consider-iterating-dictionary")
+ def visit_call(self, node):
+ if not isinstance(node.func, astroid.Attribute):
+ return
+ if node.func.attrname != "keys":
+ return
+ if not isinstance(node.parent, (astroid.For, astroid.Comprehension)):
+ return
+
+ inferred = utils.safe_infer(node.func)
+ if not isinstance(inferred, astroid.BoundMethod) or not isinstance(
+ inferred.bound, astroid.Dict
+ ):
+ return
+
+ if isinstance(node.parent, (astroid.For, astroid.Comprehension)):
+ self.add_message("consider-iterating-dictionary", node=node)
+
+ @utils.check_messages("consider-using-enumerate")
+ def visit_for(self, node):
+ """Emit a convention whenever range and len are used for indexing."""
+ # Verify that we have a `range([start], len(...), [stop])` call and
+ # that the object which is iterated is used as a subscript in the
+ # body of the for.
+
+ # Is it a proper range call?
+ if not isinstance(node.iter, astroid.Call):
+ return
+ if not self._is_builtin(node.iter.func, "range"):
+ return
+ if len(node.iter.args) == 2 and not _is_constant_zero(node.iter.args[0]):
+ return
+ if len(node.iter.args) > 2:
+ return
+
+ # Is it a proper len call?
+ if not isinstance(node.iter.args[-1], astroid.Call):
+ return
+ second_func = node.iter.args[-1].func
+ if not self._is_builtin(second_func, "len"):
+ return
+ len_args = node.iter.args[-1].args
+ if not len_args or len(len_args) != 1:
+ return
+ iterating_object = len_args[0]
+ if not isinstance(iterating_object, astroid.Name):
+ return
+ # If we're defining __iter__ on self, enumerate won't work
+ scope = node.scope()
+ if iterating_object.name == "self" and scope.name == "__iter__":
+ return
+
+ # Verify that the body of the for loop uses a subscript
+ # with the object that was iterated. This uses some heuristics
+ # in order to make sure that the same object is used in the
+ # for body.
+ for child in node.body:
+ for subscript in child.nodes_of_class(astroid.Subscript):
+ if not isinstance(subscript.value, astroid.Name):
+ continue
+ if not isinstance(subscript.slice, astroid.Index):
+ continue
+ if not isinstance(subscript.slice.value, astroid.Name):
+ continue
+ if subscript.slice.value.name != node.target.name:
+ continue
+ if iterating_object.name != subscript.value.name:
+ continue
+ if subscript.value.scope() != node.scope():
+ # Ignore this subscript if it's not in the same
+ # scope. This means that in the body of the for
+ # loop, another scope was created, where the same
+ # name for the iterating object was used.
+ continue
+ self.add_message("consider-using-enumerate", node=node)
+ return
+
+
+class NotChecker(checkers.BaseChecker):
+ """checks for too many not in comparison expressions
+
+ - "not not" should trigger a warning
+ - "not" followed by a comparison should trigger a warning
+ """
+
+ __implements__ = (interfaces.IAstroidChecker,)
+ msgs = {
+ "C0113": (
+ 'Consider changing "%s" to "%s"',
+ "unneeded-not",
+ "Used when a boolean expression contains an unneeded negation.",
+ )
+ }
+ name = "refactoring"
+ reverse_op = {
+ "<": ">=",
+ "<=": ">",
+ ">": "<=",
+ ">=": "<",
+ "==": "!=",
+ "!=": "==",
+ "in": "not in",
+ "is": "is not",
+ }
+ # sets are not ordered, so for example "not set(LEFT_VALS) <= set(RIGHT_VALS)" is
+ # not equivalent to "set(LEFT_VALS) > set(RIGHT_VALS)"
+ skipped_nodes = (astroid.Set,)
+ # 'builtins' py3, '__builtin__' py2
+ skipped_classnames = [
+ "%s.%s" % (builtins.__name__, qname) for qname in ("set", "frozenset")
+ ]
+
+ @utils.check_messages("unneeded-not")
+ def visit_unaryop(self, node):
+ if node.op != "not":
+ return
+ operand = node.operand
+
+ if isinstance(operand, astroid.UnaryOp) and operand.op == "not":
+ self.add_message(
+ "unneeded-not",
+ node=node,
+ args=(node.as_string(), operand.operand.as_string()),
+ )
+ elif isinstance(operand, astroid.Compare):
+ left = operand.left
+ # ignore multiple comparisons
+ if len(operand.ops) > 1:
+ return
+ operator, right = operand.ops[0]
+ if operator not in self.reverse_op:
+ return
+ # Ignore __ne__ as function of __eq__
+ frame = node.frame()
+ if frame.name == "__ne__" and operator == "==":
+ return
+ for _type in (utils.node_type(left), utils.node_type(right)):
+ if not _type:
+ return
+ if isinstance(_type, self.skipped_nodes):
+ return
+ if (
+ isinstance(_type, astroid.Instance)
+ and _type.qname() in self.skipped_classnames
+ ):
+ return
+ suggestion = "%s %s %s" % (
+ left.as_string(),
+ self.reverse_op[operator],
+ right.as_string(),
+ )
+ self.add_message(
+ "unneeded-not", node=node, args=(node.as_string(), suggestion)
+ )
+
+
+class LenChecker(checkers.BaseChecker):
+ """Checks for incorrect usage of len() inside conditions.
+ Pep8 states:
+ For sequences, (strings, lists, tuples), use the fact that empty sequences are false.
+
+ Yes: if not seq:
+ if seq:
+
+ No: if len(seq):
+ if not len(seq):
+
+ Problems detected:
+ * if len(sequence):
+ * if not len(sequence):
+ * elif len(sequence):
+ * elif not len(sequence):
+ * while len(sequence):
+ * while not len(sequence):
+ * assert len(sequence):
+ * assert not len(sequence):
+ """
+
+ __implements__ = (interfaces.IAstroidChecker,)
+
+ # configuration section name
+ name = "refactoring"
+ msgs = {
+ "C1801": (
+ "Do not use `len(SEQUENCE)` without comparison to determine if a sequence is empty",
+ "len-as-condition",
+ "Used when Pylint detects that len(sequence) is being used "
+ "without explicit comparison inside a condition to determine if a sequence is empty. "
+ "Instead of coercing the length to a boolean, either "
+ "rely on the fact that empty sequences are false or "
+ "compare the length against a scalar.",
+ )
+ }
+
+ priority = -2
+ options = ()
+
+ @utils.check_messages("len-as-condition")
+ def visit_call(self, node):
+ # a len(S) call is used inside a test condition
+ # could be if, while, assert or if expression statement
+ # e.g. `if len(S):`
+ if _is_len_call(node):
+ # the len() call could also be nested together with other
+ # boolean operations, e.g. `if z or len(x):`
+ parent = node.parent
+ while isinstance(parent, astroid.BoolOp):
+ parent = parent.parent
+
+ # we're finally out of any nested boolean operations so check if
+ # this len() call is part of a test condition
+ if not _node_is_test_condition(parent):
+ return
+ if not (node is parent.test or parent.test.parent_of(node)):
+ return
+ self.add_message("len-as-condition", node=node)
+
+ @utils.check_messages("len-as-condition")
+ def visit_unaryop(self, node):
+ """`not len(S)` must become `not S` regardless if the parent block
+ is a test condition or something else (boolean expression)
+ e.g. `if not len(S):`"""
+ if (
+ isinstance(node, astroid.UnaryOp)
+ and node.op == "not"
+ and _is_len_call(node.operand)
+ ):
+ self.add_message("len-as-condition", node=node)
+
+
+def register(linter):
+ """Required method to auto register this checker."""
+ linter.register_checker(RefactoringChecker(linter))
+ linter.register_checker(NotChecker(linter))
+ linter.register_checker(RecommandationChecker(linter))
+ linter.register_checker(LenChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/similar.py b/venv/Lib/site-packages/pylint/checkers/similar.py
new file mode 100644
index 0000000..019b55f
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/similar.py
@@ -0,0 +1,452 @@
+# Copyright (c) 2006, 2008-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012 Ry4an Brase <ry4an-hg@ry4an.org>
+# Copyright (c) 2012 Google, Inc.
+# Copyright (c) 2012 Anthony VEREZ <anthony.verez.external@cassidian.com>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2017 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+# pylint: disable=redefined-builtin
+"""a similarities / code duplication command line tool and pylint checker
+"""
+
+import sys
+from collections import defaultdict
+from getopt import getopt
+from itertools import groupby
+
+import astroid
+
+from pylint.checkers import BaseChecker, table_lines_from_stats
+from pylint.interfaces import IRawChecker
+from pylint.reporters.ureports.nodes import Table
+from pylint.utils import decoding_stream
+
+
+class Similar:
+ """finds copy-pasted lines of code in a project"""
+
+ def __init__(
+ self,
+ min_lines=4,
+ ignore_comments=False,
+ ignore_docstrings=False,
+ ignore_imports=False,
+ ):
+ self.min_lines = min_lines
+ self.ignore_comments = ignore_comments
+ self.ignore_docstrings = ignore_docstrings
+ self.ignore_imports = ignore_imports
+ self.linesets = []
+
+ def append_stream(self, streamid, stream, encoding=None):
+ """append a file to search for similarities"""
+ if encoding is None:
+ readlines = stream.readlines
+ else:
+ readlines = decoding_stream(stream, encoding).readlines
+ try:
+ self.linesets.append(
+ LineSet(
+ streamid,
+ readlines(),
+ self.ignore_comments,
+ self.ignore_docstrings,
+ self.ignore_imports,
+ )
+ )
+ except UnicodeDecodeError:
+ pass
+
+ def run(self):
+ """start looking for similarities and display results on stdout"""
+ self._display_sims(self._compute_sims())
+
+ def _compute_sims(self):
+ """compute similarities in appended files"""
+ no_duplicates = defaultdict(list)
+ for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
+ duplicate = no_duplicates[num]
+ for couples in duplicate:
+ if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
+ couples.add((lineset1, idx1))
+ couples.add((lineset2, idx2))
+ break
+ else:
+ duplicate.append({(lineset1, idx1), (lineset2, idx2)})
+ sims = []
+ for num, ensembles in no_duplicates.items():
+ for couples in ensembles:
+ sims.append((num, couples))
+ sims.sort()
+ sims.reverse()
+ return sims
+
+ def _display_sims(self, sims):
+ """display computed similarities on stdout"""
+ nb_lignes_dupliquees = 0
+ for num, couples in sims:
+ print()
+ print(num, "similar lines in", len(couples), "files")
+ couples = sorted(couples)
+ lineset = idx = None
+ for lineset, idx in couples:
+ print("==%s:%s" % (lineset.name, idx))
+ if lineset:
+ for line in lineset._real_lines[idx : idx + num]:
+ print(" ", line.rstrip())
+ nb_lignes_dupliquees += num * (len(couples) - 1)
+ nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
+ print(
+ "TOTAL lines=%s duplicates=%s percent=%.2f"
+ % (
+ nb_total_lignes,
+ nb_lignes_dupliquees,
+ nb_lignes_dupliquees * 100.0 / nb_total_lignes,
+ )
+ )
+
+ def _find_common(self, lineset1, lineset2):
+ """find similarities in the two given linesets"""
+ lines1 = lineset1.enumerate_stripped
+ lines2 = lineset2.enumerate_stripped
+ find = lineset2.find
+ index1 = 0
+ min_lines = self.min_lines
+ while index1 < len(lineset1):
+ skip = 1
+ num = 0
+ for index2 in find(lineset1[index1]):
+ non_blank = 0
+ for num, ((_, line1), (_, line2)) in enumerate(
+ zip(lines1(index1), lines2(index2))
+ ):
+ if line1 != line2:
+ if non_blank > min_lines:
+ yield num, lineset1, index1, lineset2, index2
+ skip = max(skip, num)
+ break
+ if line1:
+ non_blank += 1
+ else:
+ # we may have reach the end
+ num += 1
+ if non_blank > min_lines:
+ yield num, lineset1, index1, lineset2, index2
+ skip = max(skip, num)
+ index1 += skip
+
+ def _iter_sims(self):
+ """iterate on similarities among all files, by making a cartesian
+ product
+ """
+ for idx, lineset in enumerate(self.linesets[:-1]):
+ for lineset2 in self.linesets[idx + 1 :]:
+ for sim in self._find_common(lineset, lineset2):
+ yield sim
+
+
+def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
+ """return lines with leading/trailing whitespace and any ignored code
+ features removed
+ """
+ if ignore_imports:
+ tree = astroid.parse("".join(lines))
+ node_is_import_by_lineno = (
+ (node.lineno, isinstance(node, (astroid.Import, astroid.ImportFrom)))
+ for node in tree.body
+ )
+ line_begins_import = {
+ lineno: all(is_import for _, is_import in node_is_import_group)
+ for lineno, node_is_import_group in groupby(
+ node_is_import_by_lineno, key=lambda x: x[0]
+ )
+ }
+ current_line_is_import = False
+
+ strippedlines = []
+ docstring = None
+ for lineno, line in enumerate(lines, start=1):
+ line = line.strip()
+ if ignore_docstrings:
+ if not docstring and any(
+ line.startswith(i) for i in ['"""', "'''", 'r"""', "r'''"]
+ ):
+ docstring = line[:3]
+ line = line[3:]
+ if docstring:
+ if line.endswith(docstring):
+ docstring = None
+ line = ""
+ if ignore_imports:
+ current_line_is_import = line_begins_import.get(
+ lineno, current_line_is_import
+ )
+ if current_line_is_import:
+ line = ""
+ if ignore_comments:
+ line = line.split("#", 1)[0].strip()
+ strippedlines.append(line)
+ return strippedlines
+
+
+class LineSet:
+ """Holds and indexes all the lines of a single source file"""
+
+ def __init__(
+ self,
+ name,
+ lines,
+ ignore_comments=False,
+ ignore_docstrings=False,
+ ignore_imports=False,
+ ):
+ self.name = name
+ self._real_lines = lines
+ self._stripped_lines = stripped_lines(
+ lines, ignore_comments, ignore_docstrings, ignore_imports
+ )
+ self._index = self._mk_index()
+
+ def __str__(self):
+ return "<Lineset for %s>" % self.name
+
+ def __len__(self):
+ return len(self._real_lines)
+
+ def __getitem__(self, index):
+ return self._stripped_lines[index]
+
+ def __lt__(self, other):
+ return self.name < other.name
+
+ def __hash__(self):
+ return id(self)
+
+ def enumerate_stripped(self, start_at=0):
+ """return an iterator on stripped lines, starting from a given index
+ if specified, else 0
+ """
+ idx = start_at
+ if start_at:
+ lines = self._stripped_lines[start_at:]
+ else:
+ lines = self._stripped_lines
+ for line in lines:
+ # if line:
+ yield idx, line
+ idx += 1
+
+ def find(self, stripped_line):
+ """return positions of the given stripped line in this set"""
+ return self._index.get(stripped_line, ())
+
+ def _mk_index(self):
+ """create the index for this set"""
+ index = defaultdict(list)
+ for line_no, line in enumerate(self._stripped_lines):
+ if line:
+ index[line].append(line_no)
+ return index
+
+
+MSGS = {
+ "R0801": (
+ "Similar lines in %s files\n%s",
+ "duplicate-code",
+ "Indicates that a set of similar lines has been detected "
+ "among multiple file. This usually means that the code should "
+ "be refactored to avoid this duplication.",
+ )
+}
+
+
+def report_similarities(sect, stats, old_stats):
+ """make a layout with some stats about duplication"""
+ lines = ["", "now", "previous", "difference"]
+ lines += table_lines_from_stats(
+ stats, old_stats, ("nb_duplicated_lines", "percent_duplicated_lines")
+ )
+ sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
+
+
+# wrapper to get a pylint checker from the similar class
+class SimilarChecker(BaseChecker, Similar):
+ """checks for similarities and duplicated code. This computation may be
+ memory / CPU intensive, so you should disable it if you experiment some
+ problems.
+ """
+
+ __implements__ = (IRawChecker,)
+ # configuration section name
+ name = "similarities"
+ # messages
+ msgs = MSGS
+ # configuration options
+ # for available dict keys/values see the optik parser 'add_option' method
+ options = (
+ (
+ "min-similarity-lines", # type: ignore
+ {
+ "default": 4,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Minimum lines number of a similarity.",
+ },
+ ),
+ (
+ "ignore-comments",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore comments when computing similarities.",
+ },
+ ),
+ (
+ "ignore-docstrings",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore docstrings when computing similarities.",
+ },
+ ),
+ (
+ "ignore-imports",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Ignore imports when computing similarities.",
+ },
+ ),
+ )
+ # reports
+ reports = (("RP0801", "Duplication", report_similarities),) # type: ignore
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ Similar.__init__(
+ self, min_lines=4, ignore_comments=True, ignore_docstrings=True
+ )
+ self.stats = None
+
+ def set_option(self, optname, value, action=None, optdict=None):
+ """method called to set an option (registered in the options list)
+
+ overridden to report options setting to Similar
+ """
+ BaseChecker.set_option(self, optname, value, action, optdict)
+ if optname == "min-similarity-lines":
+ self.min_lines = self.config.min_similarity_lines
+ elif optname == "ignore-comments":
+ self.ignore_comments = self.config.ignore_comments
+ elif optname == "ignore-docstrings":
+ self.ignore_docstrings = self.config.ignore_docstrings
+ elif optname == "ignore-imports":
+ self.ignore_imports = self.config.ignore_imports
+
+ def open(self):
+ """init the checkers: reset linesets and statistics information"""
+ self.linesets = []
+ self.stats = self.linter.add_stats(
+ nb_duplicated_lines=0, percent_duplicated_lines=0
+ )
+
+ def process_module(self, node):
+ """process a module
+
+ the module's content is accessible via the stream object
+
+ stream must implement the readlines method
+ """
+ with node.stream() as stream:
+ self.append_stream(self.linter.current_name, stream, node.file_encoding)
+
+ def close(self):
+ """compute and display similarities on closing (i.e. end of parsing)"""
+ total = sum(len(lineset) for lineset in self.linesets)
+ duplicated = 0
+ stats = self.stats
+ for num, couples in self._compute_sims():
+ msg = []
+ lineset = idx = None
+ for lineset, idx in couples:
+ msg.append("==%s:%s" % (lineset.name, idx))
+ msg.sort()
+
+ if lineset:
+ for line in lineset._real_lines[idx : idx + num]:
+ msg.append(line.rstrip())
+
+ self.add_message("R0801", args=(len(couples), "\n".join(msg)))
+ duplicated += num * (len(couples) - 1)
+ stats["nb_duplicated_lines"] = duplicated
+ stats["percent_duplicated_lines"] = total and duplicated * 100.0 / total
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(SimilarChecker(linter))
+
+
+def usage(status=0):
+ """display command line usage information"""
+ print("finds copy pasted blocks in a set of files")
+ print()
+ print(
+ "Usage: symilar [-d|--duplicates min_duplicated_lines] \
+[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1..."
+ )
+ sys.exit(status)
+
+
+def Run(argv=None):
+ """standalone command line access point"""
+ if argv is None:
+ argv = sys.argv[1:]
+
+ s_opts = "hdi"
+ l_opts = (
+ "help",
+ "duplicates=",
+ "ignore-comments",
+ "ignore-imports",
+ "ignore-docstrings",
+ )
+ min_lines = 4
+ ignore_comments = False
+ ignore_docstrings = False
+ ignore_imports = False
+ opts, args = getopt(argv, s_opts, l_opts)
+ for opt, val in opts:
+ if opt in ("-d", "--duplicates"):
+ min_lines = int(val)
+ elif opt in ("-h", "--help"):
+ usage()
+ elif opt in ("-i", "--ignore-comments"):
+ ignore_comments = True
+ elif opt in ("--ignore-docstrings",):
+ ignore_docstrings = True
+ elif opt in ("--ignore-imports",):
+ ignore_imports = True
+ if not args:
+ usage(1)
+ sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
+ for filename in args:
+ with open(filename) as stream:
+ sim.append_stream(filename, stream)
+ sim.run()
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ Run()
diff --git a/venv/Lib/site-packages/pylint/checkers/spelling.py b/venv/Lib/site-packages/pylint/checkers/spelling.py
new file mode 100644
index 0000000..b1a5334
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/spelling.py
@@ -0,0 +1,411 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016-2017 Pedro Algarvio <pedro@algarvio.me>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checker for spelling errors in comments and docstrings.
+"""
+
+import os
+import re
+import tokenize
+
+from pylint.checkers import BaseTokenChecker
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import IAstroidChecker, ITokenChecker
+
+try:
+ import enchant
+ from enchant.tokenize import ( # type: ignore
+ get_tokenizer,
+ Chunker,
+ Filter,
+ EmailFilter,
+ URLFilter,
+ WikiWordFilter,
+ )
+except ImportError:
+ enchant = None
+ # pylint: disable=no-init
+ class Filter: # type: ignore
+ def _skip(self, word):
+ raise NotImplementedError
+
+ class Chunker: # type: ignore
+ pass
+
+
+if enchant is not None:
+ br = enchant.Broker()
+ dicts = br.list_dicts()
+ dict_choices = [""] + [d[0] for d in dicts]
+ dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
+ dicts = ", ".join(dicts)
+ instr = ""
+else:
+ dicts = "none"
+ dict_choices = [""]
+ instr = " To make it work, install the python-enchant package."
+
+
+class WordsWithDigigtsFilter(Filter):
+ """Skips words with digits.
+ """
+
+ def _skip(self, word):
+ for char in word:
+ if char.isdigit():
+ return True
+ return False
+
+
+class WordsWithUnderscores(Filter):
+ """Skips words with underscores.
+
+ They are probably function parameter names.
+ """
+
+ def _skip(self, word):
+ return "_" in word
+
+
+class CamelCasedWord(Filter):
+ r"""Filter skipping over camelCasedWords.
+ This filter skips any words matching the following regular expression:
+
+ ^([a-z]\w+[A-Z]+\w+)
+
+ That is, any words that are camelCasedWords.
+ """
+ _pattern = re.compile(r"^([a-z]+([\d]|[A-Z])(?:\w+)?)")
+
+ def _skip(self, word):
+ return bool(self._pattern.match(word))
+
+
+class SphinxDirectives(Filter):
+ r"""Filter skipping over Sphinx Directives.
+ This filter skips any words matching the following regular expression:
+
+ ^:([a-z]+):`([^`]+)(`)?
+
+ That is, for example, :class:`BaseQuery`
+ """
+ # The final ` in the pattern is optional because enchant strips it out
+ _pattern = re.compile(r"^:([a-z]+):`([^`]+)(`)?")
+
+ def _skip(self, word):
+ return bool(self._pattern.match(word))
+
+
+class ForwardSlashChunkder(Chunker):
+ """
+ This chunker allows splitting words like 'before/after' into 'before' and 'after'
+ """
+
+ def next(self):
+ while True:
+ if not self._text:
+ raise StopIteration()
+ if "/" not in self._text:
+ text = self._text
+ self._offset = 0
+ self._text = ""
+ return (text, 0)
+ pre_text, post_text = self._text.split("/", 1)
+ self._text = post_text
+ self._offset = 0
+ if (
+ not pre_text
+ or not post_text
+ or not pre_text[-1].isalpha()
+ or not post_text[0].isalpha()
+ ):
+ self._text = ""
+ self._offset = 0
+ return (pre_text + "/" + post_text, 0)
+ return (pre_text, 0)
+
+ def _next(self):
+ while True:
+ if "/" not in self._text:
+ return (self._text, 0)
+ pre_text, post_text = self._text.split("/", 1)
+ if not pre_text or not post_text:
+ break
+ if not pre_text[-1].isalpha() or not post_text[0].isalpha():
+ raise StopIteration()
+ self._text = pre_text + " " + post_text
+ raise StopIteration()
+
+
+class SpellingChecker(BaseTokenChecker):
+ """Check spelling in comments and docstrings"""
+
+ __implements__ = (ITokenChecker, IAstroidChecker)
+ name = "spelling"
+ msgs = {
+ "C0401": (
+ "Wrong spelling of a word '%s' in a comment:\n%s\n"
+ "%s\nDid you mean: '%s'?",
+ "wrong-spelling-in-comment",
+ "Used when a word in comment is not spelled correctly.",
+ ),
+ "C0402": (
+ "Wrong spelling of a word '%s' in a docstring:\n%s\n"
+ "%s\nDid you mean: '%s'?",
+ "wrong-spelling-in-docstring",
+ "Used when a word in docstring is not spelled correctly.",
+ ),
+ "C0403": (
+ "Invalid characters %r in a docstring",
+ "invalid-characters-in-docstring",
+ "Used when a word in docstring cannot be checked by enchant.",
+ ),
+ }
+ options = (
+ (
+ "spelling-dict",
+ {
+ "default": "",
+ "type": "choice",
+ "metavar": "<dict name>",
+ "choices": dict_choices,
+ "help": "Spelling dictionary name. "
+ "Available dictionaries: %s.%s" % (dicts, instr),
+ },
+ ),
+ (
+ "spelling-ignore-words",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<comma separated words>",
+ "help": "List of comma separated words that " "should not be checked.",
+ },
+ ),
+ (
+ "spelling-private-dict-file",
+ {
+ "default": "",
+ "type": "string",
+ "metavar": "<path to file>",
+ "help": "A path to a file that contains the private "
+ "dictionary; one word per line.",
+ },
+ ),
+ (
+ "spelling-store-unknown-words",
+ {
+ "default": "n",
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether to store unknown words to the "
+ "private dictionary (see the "
+ "--spelling-private-dict-file option) instead of "
+ "raising a message.",
+ },
+ ),
+ (
+ "max-spelling-suggestions",
+ {
+ "default": 4,
+ "type": "int",
+ "metavar": "N",
+ "help": "Limits count of emitted suggestions for " "spelling mistakes.",
+ },
+ ),
+ )
+
+ def open(self):
+ self.initialized = False
+ self.private_dict_file = None
+
+ if enchant is None:
+ return
+ dict_name = self.config.spelling_dict
+ if not dict_name:
+ return
+
+ self.ignore_list = [
+ w.strip() for w in self.config.spelling_ignore_words.split(",")
+ ]
+ # "param" appears in docstring in param description and
+ # "pylint" appears in comments in pylint pragmas.
+ self.ignore_list.extend(["param", "pylint"])
+
+ # Expand tilde to allow e.g. spelling-private-dict-file = ~/.pylintdict
+ if self.config.spelling_private_dict_file:
+ self.config.spelling_private_dict_file = os.path.expanduser(
+ self.config.spelling_private_dict_file
+ )
+
+ if self.config.spelling_private_dict_file:
+ self.spelling_dict = enchant.DictWithPWL(
+ dict_name, self.config.spelling_private_dict_file
+ )
+ self.private_dict_file = open(self.config.spelling_private_dict_file, "a")
+ else:
+ self.spelling_dict = enchant.Dict(dict_name)
+
+ if self.config.spelling_store_unknown_words:
+ self.unknown_words = set()
+
+ self.tokenizer = get_tokenizer(
+ dict_name,
+ chunkers=[ForwardSlashChunkder],
+ filters=[
+ EmailFilter,
+ URLFilter,
+ WikiWordFilter,
+ WordsWithDigigtsFilter,
+ WordsWithUnderscores,
+ CamelCasedWord,
+ SphinxDirectives,
+ ],
+ )
+ self.initialized = True
+
+ def close(self):
+ if self.private_dict_file:
+ self.private_dict_file.close()
+
+ def _check_spelling(self, msgid, line, line_num):
+ original_line = line
+ try:
+ initial_space = re.search(r"^[^\S]\s*", line).regs[0][1]
+ except (IndexError, AttributeError):
+ initial_space = 0
+ if line.strip().startswith("#"):
+ line = line.strip()[1:]
+ starts_with_comment = True
+ else:
+ starts_with_comment = False
+ for word, word_start_at in self.tokenizer(line.strip()):
+ word_start_at += initial_space
+ lower_cased_word = word.casefold()
+
+ # Skip words from ignore list.
+ if word in self.ignore_list or lower_cased_word in self.ignore_list:
+ continue
+
+ # Strip starting u' from unicode literals and r' from raw strings.
+ if word.startswith(("u'", 'u"', "r'", 'r"')) and len(word) > 2:
+ word = word[2:]
+ lower_cased_word = lower_cased_word[2:]
+
+ # If it is a known word, then continue.
+ try:
+ if self.spelling_dict.check(lower_cased_word):
+ # The lower cased version of word passed spell checking
+ continue
+
+ # If we reached this far, it means there was a spelling mistake.
+ # Let's retry with the original work because 'unicode' is a
+ # spelling mistake but 'Unicode' is not
+ if self.spelling_dict.check(word):
+ continue
+ except enchant.errors.Error:
+ self.add_message(
+ "invalid-characters-in-docstring", line=line_num, args=(word,)
+ )
+ continue
+
+ # Store word to private dict or raise a message.
+ if self.config.spelling_store_unknown_words:
+ if lower_cased_word not in self.unknown_words:
+ self.private_dict_file.write("%s\n" % lower_cased_word)
+ self.unknown_words.add(lower_cased_word)
+ else:
+ # Present up to N suggestions.
+ suggestions = self.spelling_dict.suggest(word)
+ del suggestions[self.config.max_spelling_suggestions :]
+
+ line_segment = line[word_start_at:]
+ match = re.search(r"(\W|^)(%s)(\W|$)" % word, line_segment)
+ if match:
+ # Start position of second group in regex.
+ col = match.regs[2][0]
+ else:
+ col = line_segment.index(word)
+
+ col += word_start_at
+
+ if starts_with_comment:
+ col += 1
+ indicator = (" " * col) + ("^" * len(word))
+
+ self.add_message(
+ msgid,
+ line=line_num,
+ args=(
+ word,
+ original_line,
+ indicator,
+ "'{}'".format("' or '".join(suggestions)),
+ ),
+ )
+
+ def process_tokens(self, tokens):
+ if not self.initialized:
+ return
+
+ # Process tokens and look for comments.
+ for (tok_type, token, (start_row, _), _, _) in tokens:
+ if tok_type == tokenize.COMMENT:
+ if start_row == 1 and token.startswith("#!/"):
+ # Skip shebang lines
+ continue
+ if token.startswith("# pylint:"):
+ # Skip pylint enable/disable comments
+ continue
+ self._check_spelling("wrong-spelling-in-comment", token, start_row)
+
+ @check_messages("wrong-spelling-in-docstring")
+ def visit_module(self, node):
+ if not self.initialized:
+ return
+ self._check_docstring(node)
+
+ @check_messages("wrong-spelling-in-docstring")
+ def visit_classdef(self, node):
+ if not self.initialized:
+ return
+ self._check_docstring(node)
+
+ @check_messages("wrong-spelling-in-docstring")
+ def visit_functiondef(self, node):
+ if not self.initialized:
+ return
+ self._check_docstring(node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_docstring(self, node):
+ """check the node has any spelling errors"""
+ docstring = node.doc
+ if not docstring:
+ return
+
+ start_line = node.lineno + 1
+
+ # Go through lines of docstring
+ for idx, line in enumerate(docstring.splitlines()):
+ self._check_spelling("wrong-spelling-in-docstring", line, start_line + idx)
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(SpellingChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/stdlib.py b/venv/Lib/site-packages/pylint/checkers/stdlib.py
new file mode 100644
index 0000000..a945107
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/stdlib.py
@@ -0,0 +1,452 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Cosmin Poieana <cmin@ropython.org>
+# Copyright (c) 2014 Vlad Temian <vladtemian@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
+# Copyright (c) 2015 Chris Rebert <code@rebertia.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
+# Copyright (c) 2017 Renat Galimov <renat2017@gmail.com>
+# Copyright (c) 2017 Martin <MartinBasti@users.noreply.github.com>
+# Copyright (c) 2017 Christopher Zurcher <zurcher@users.noreply.github.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2018 Banjamin Freeman <befreeman@users.noreply.github.com>
+# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checkers for various standard library functions."""
+
+import sys
+
+import astroid
+from astroid.bases import Instance
+from astroid.node_classes import Const
+
+from pylint.checkers import BaseChecker, utils
+from pylint.interfaces import IAstroidChecker
+
+OPEN_FILES = {"open", "file"}
+UNITTEST_CASE = "unittest.case"
+THREADING_THREAD = "threading.Thread"
+COPY_COPY = "copy.copy"
+OS_ENVIRON = "os._Environ"
+ENV_GETTERS = {"os.getenv"}
+SUBPROCESS_POPEN = "subprocess.Popen"
+SUBPROCESS_RUN = "subprocess.run"
+OPEN_MODULE = "_io"
+
+
+def _check_mode_str(mode):
+ # check type
+ if not isinstance(mode, str):
+ return False
+ # check syntax
+ modes = set(mode)
+ _mode = "rwatb+Ux"
+ creating = "x" in modes
+ if modes - set(_mode) or len(mode) > len(modes):
+ return False
+ # check logic
+ reading = "r" in modes
+ writing = "w" in modes
+ appending = "a" in modes
+ text = "t" in modes
+ binary = "b" in modes
+ if "U" in modes:
+ if writing or appending or creating:
+ return False
+ reading = True
+ if text and binary:
+ return False
+ total = reading + writing + appending + creating
+ if total > 1:
+ return False
+ if not (reading or writing or appending or creating):
+ return False
+ return True
+
+
+class StdlibChecker(BaseChecker):
+ __implements__ = (IAstroidChecker,)
+ name = "stdlib"
+
+ msgs = {
+ "W1501": (
+ '"%s" is not a valid mode for open.',
+ "bad-open-mode",
+ "Python supports: r, w, a[, x] modes with b, +, "
+ "and U (only with r) options. "
+ "See http://docs.python.org/2/library/functions.html#open",
+ ),
+ "W1502": (
+ "Using datetime.time in a boolean context.",
+ "boolean-datetime",
+ "Using datetime.time in a boolean context can hide "
+ "subtle bugs when the time they represent matches "
+ "midnight UTC. This behaviour was fixed in Python 3.5. "
+ "See http://bugs.python.org/issue13936 for reference.",
+ {"maxversion": (3, 5)},
+ ),
+ "W1503": (
+ "Redundant use of %s with constant value %r",
+ "redundant-unittest-assert",
+ "The first argument of assertTrue and assertFalse is "
+ "a condition. If a constant is passed as parameter, that "
+ "condition will be always true. In this case a warning "
+ "should be emitted.",
+ ),
+ "W1505": (
+ "Using deprecated method %s()",
+ "deprecated-method",
+ "The method is marked as deprecated and will be removed in "
+ "a future version of Python. Consider looking for an "
+ "alternative in the documentation.",
+ ),
+ "W1506": (
+ "threading.Thread needs the target function",
+ "bad-thread-instantiation",
+ "The warning is emitted when a threading.Thread class "
+ "is instantiated without the target function being passed. "
+ "By default, the first parameter is the group param, not the target param. ",
+ ),
+ "W1507": (
+ "Using copy.copy(os.environ). Use os.environ.copy() instead. ",
+ "shallow-copy-environ",
+ "os.environ is not a dict object but proxy object, so "
+ "shallow copy has still effects on original object. "
+ "See https://bugs.python.org/issue15373 for reference. ",
+ ),
+ "E1507": (
+ "%s does not support %s type argument",
+ "invalid-envvar-value",
+ "Env manipulation functions support only string type arguments. "
+ "See https://docs.python.org/3/library/os.html#os.getenv. ",
+ ),
+ "W1508": (
+ "%s default type is %s. Expected str or None.",
+ "invalid-envvar-default",
+ "Env manipulation functions return None or str values. "
+ "Supplying anything different as a default may cause bugs. "
+ "See https://docs.python.org/3/library/os.html#os.getenv. ",
+ ),
+ "W1509": (
+ "Using preexec_fn keyword which may be unsafe in the presence "
+ "of threads",
+ "subprocess-popen-preexec-fn",
+ "The preexec_fn parameter is not safe to use in the presence "
+ "of threads in your application. The child process could "
+ "deadlock before exec is called. If you must use it, keep it "
+ "trivial! Minimize the number of libraries you call into."
+ "https://docs.python.org/3/library/subprocess.html#popen-constructor",
+ ),
+ "W1510": (
+ "Using subprocess.run without explicitly set `check` is not recommended.",
+ "subprocess-run-check",
+ "The check parameter should always be used with explicitly set "
+ "`check` keyword to make clear what the error-handling behavior is."
+ "https://docs.python.org/3/library/subprocess.html#subprocess.runs",
+ ),
+ }
+
+ deprecated = {
+ 0: {
+ "cgi.parse_qs",
+ "cgi.parse_qsl",
+ "ctypes.c_buffer",
+ "distutils.command.register.register.check_metadata",
+ "distutils.command.sdist.sdist.check_metadata",
+ "tkinter.Misc.tk_menuBar",
+ "tkinter.Menu.tk_bindForTraversal",
+ },
+ 2: {
+ (2, 6, 0): {
+ "commands.getstatus",
+ "os.popen2",
+ "os.popen3",
+ "os.popen4",
+ "macostools.touched",
+ },
+ (2, 7, 0): {
+ "unittest.case.TestCase.assertEquals",
+ "unittest.case.TestCase.assertNotEquals",
+ "unittest.case.TestCase.assertAlmostEquals",
+ "unittest.case.TestCase.assertNotAlmostEquals",
+ "unittest.case.TestCase.assert_",
+ "xml.etree.ElementTree.Element.getchildren",
+ "xml.etree.ElementTree.Element.getiterator",
+ "xml.etree.ElementTree.XMLParser.getiterator",
+ "xml.etree.ElementTree.XMLParser.doctype",
+ },
+ },
+ 3: {
+ (3, 0, 0): {
+ "inspect.getargspec",
+ "failUnlessEqual",
+ "assertEquals",
+ "failIfEqual",
+ "assertNotEquals",
+ "failUnlessAlmostEqual",
+ "assertAlmostEquals",
+ "failIfAlmostEqual",
+ "assertNotAlmostEquals",
+ "failUnless",
+ "assert_",
+ "failUnlessRaises",
+ "failIf",
+ "assertRaisesRegexp",
+ "assertRegexpMatches",
+ "assertNotRegexpMatches",
+ },
+ (3, 1, 0): {
+ "base64.encodestring",
+ "base64.decodestring",
+ "ntpath.splitunc",
+ },
+ (3, 2, 0): {
+ "cgi.escape",
+ "configparser.RawConfigParser.readfp",
+ "xml.etree.ElementTree.Element.getchildren",
+ "xml.etree.ElementTree.Element.getiterator",
+ "xml.etree.ElementTree.XMLParser.getiterator",
+ "xml.etree.ElementTree.XMLParser.doctype",
+ },
+ (3, 3, 0): {
+ "inspect.getmoduleinfo",
+ "logging.warn",
+ "logging.Logger.warn",
+ "logging.LoggerAdapter.warn",
+ "nntplib._NNTPBase.xpath",
+ "platform.popen",
+ },
+ (3, 4, 0): {
+ "importlib.find_loader",
+ "plistlib.readPlist",
+ "plistlib.writePlist",
+ "plistlib.readPlistFromBytes",
+ "plistlib.writePlistToBytes",
+ },
+ (3, 4, 4): {"asyncio.tasks.async"},
+ (3, 5, 0): {
+ "fractions.gcd",
+ "inspect.getargvalues",
+ "inspect.formatargspec",
+ "inspect.formatargvalues",
+ "inspect.getcallargs",
+ "platform.linux_distribution",
+ "platform.dist",
+ },
+ (3, 6, 0): {"importlib._bootstrap_external.FileLoader.load_module"},
+ },
+ }
+
+ def _check_bad_thread_instantiation(self, node):
+ if not node.kwargs and not node.keywords and len(node.args) <= 1:
+ self.add_message("bad-thread-instantiation", node=node)
+
+ def _check_for_preexec_fn_in_popen(self, node):
+ if node.keywords:
+ for keyword in node.keywords:
+ if keyword.arg == "preexec_fn":
+ self.add_message("subprocess-popen-preexec-fn", node=node)
+
+ def _check_for_check_kw_in_run(self, node):
+ kwargs = {keyword.arg for keyword in (node.keywords or ())}
+ if "check" not in kwargs:
+ self.add_message("subprocess-run-check", node=node)
+
+ def _check_shallow_copy_environ(self, node):
+ arg = utils.get_argument_from_call(node, position=0)
+ for inferred in arg.inferred():
+ if inferred.qname() == OS_ENVIRON:
+ self.add_message("shallow-copy-environ", node=node)
+ break
+
+ @utils.check_messages(
+ "bad-open-mode",
+ "redundant-unittest-assert",
+ "deprecated-method",
+ "bad-thread-instantiation",
+ "shallow-copy-environ",
+ "invalid-envvar-value",
+ "invalid-envvar-default",
+ "subprocess-popen-preexec-fn",
+ "subprocess-run-check",
+ )
+ def visit_call(self, node):
+ """Visit a Call node."""
+ try:
+ for inferred in node.func.infer():
+ if inferred is astroid.Uninferable:
+ continue
+ if inferred.root().name == OPEN_MODULE:
+ if getattr(node.func, "name", None) in OPEN_FILES:
+ self._check_open_mode(node)
+ elif inferred.root().name == UNITTEST_CASE:
+ self._check_redundant_assert(node, inferred)
+ elif isinstance(inferred, astroid.ClassDef):
+ if inferred.qname() == THREADING_THREAD:
+ self._check_bad_thread_instantiation(node)
+ elif inferred.qname() == SUBPROCESS_POPEN:
+ self._check_for_preexec_fn_in_popen(node)
+ elif isinstance(inferred, astroid.FunctionDef):
+ name = inferred.qname()
+ if name == COPY_COPY:
+ self._check_shallow_copy_environ(node)
+ elif name in ENV_GETTERS:
+ self._check_env_function(node, inferred)
+ elif name == SUBPROCESS_RUN:
+ self._check_for_check_kw_in_run(node)
+ self._check_deprecated_method(node, inferred)
+ except astroid.InferenceError:
+ return
+
+ @utils.check_messages("boolean-datetime")
+ def visit_unaryop(self, node):
+ if node.op == "not":
+ self._check_datetime(node.operand)
+
+ @utils.check_messages("boolean-datetime")
+ def visit_if(self, node):
+ self._check_datetime(node.test)
+
+ @utils.check_messages("boolean-datetime")
+ def visit_ifexp(self, node):
+ self._check_datetime(node.test)
+
+ @utils.check_messages("boolean-datetime")
+ def visit_boolop(self, node):
+ for value in node.values:
+ self._check_datetime(value)
+
+ def _check_deprecated_method(self, node, inferred):
+ py_vers = sys.version_info[0]
+
+ if isinstance(node.func, astroid.Attribute):
+ func_name = node.func.attrname
+ elif isinstance(node.func, astroid.Name):
+ func_name = node.func.name
+ else:
+ # Not interested in other nodes.
+ return
+
+ # Reject nodes which aren't of interest to us.
+ acceptable_nodes = (
+ astroid.BoundMethod,
+ astroid.UnboundMethod,
+ astroid.FunctionDef,
+ )
+ if not isinstance(inferred, acceptable_nodes):
+ return
+
+ qname = inferred.qname()
+ if any(name in self.deprecated[0] for name in (qname, func_name)):
+ self.add_message("deprecated-method", node=node, args=(func_name,))
+ else:
+ for since_vers, func_list in self.deprecated[py_vers].items():
+ if since_vers <= sys.version_info and any(
+ name in func_list for name in (qname, func_name)
+ ):
+ self.add_message("deprecated-method", node=node, args=(func_name,))
+ break
+
+ def _check_redundant_assert(self, node, infer):
+ if (
+ isinstance(infer, astroid.BoundMethod)
+ and node.args
+ and isinstance(node.args[0], astroid.Const)
+ and infer.name in ["assertTrue", "assertFalse"]
+ ):
+ self.add_message(
+ "redundant-unittest-assert",
+ args=(infer.name, node.args[0].value),
+ node=node,
+ )
+
+ def _check_datetime(self, node):
+ """ Check that a datetime was inferred.
+ If so, emit boolean-datetime warning.
+ """
+ try:
+ inferred = next(node.infer())
+ except astroid.InferenceError:
+ return
+ if isinstance(inferred, Instance) and inferred.qname() == "datetime.time":
+ self.add_message("boolean-datetime", node=node)
+
+ def _check_open_mode(self, node):
+ """Check that the mode argument of an open or file call is valid."""
+ try:
+ mode_arg = utils.get_argument_from_call(node, position=1, keyword="mode")
+ except utils.NoSuchArgumentError:
+ return
+ if mode_arg:
+ mode_arg = utils.safe_infer(mode_arg)
+ if isinstance(mode_arg, astroid.Const) and not _check_mode_str(
+ mode_arg.value
+ ):
+ self.add_message("bad-open-mode", node=node, args=mode_arg.value)
+
+ def _check_env_function(self, node, infer):
+ env_name_kwarg = "key"
+ env_value_kwarg = "default"
+ if node.keywords:
+ kwargs = {keyword.arg: keyword.value for keyword in node.keywords}
+ else:
+ kwargs = None
+ if node.args:
+ env_name_arg = node.args[0]
+ elif kwargs and env_name_kwarg in kwargs:
+ env_name_arg = kwargs[env_name_kwarg]
+ else:
+ env_name_arg = None
+
+ if env_name_arg:
+ self._check_invalid_envvar_value(
+ node=node,
+ message="invalid-envvar-value",
+ call_arg=utils.safe_infer(env_name_arg),
+ infer=infer,
+ allow_none=False,
+ )
+
+ if len(node.args) == 2:
+ env_value_arg = node.args[1]
+ elif kwargs and env_value_kwarg in kwargs:
+ env_value_arg = kwargs[env_value_kwarg]
+ else:
+ env_value_arg = None
+
+ if env_value_arg:
+ self._check_invalid_envvar_value(
+ node=node,
+ infer=infer,
+ message="invalid-envvar-default",
+ call_arg=utils.safe_infer(env_value_arg),
+ allow_none=True,
+ )
+
+ def _check_invalid_envvar_value(self, node, infer, message, call_arg, allow_none):
+ if call_arg in (astroid.Uninferable, None):
+ return
+
+ name = infer.qname()
+ if isinstance(call_arg, Const):
+ emit = False
+ if call_arg.value is None:
+ emit = not allow_none
+ elif not isinstance(call_arg.value, str):
+ emit = True
+ if emit:
+ self.add_message(message, node=node, args=(name, call_arg.pytype()))
+ else:
+ self.add_message(message, node=node, args=(name, call_arg.pytype()))
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(StdlibChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/strings.py b/venv/Lib/site-packages/pylint/checkers/strings.py
new file mode 100644
index 0000000..9470f46
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/strings.py
@@ -0,0 +1,755 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009 Charles Hebert <charles.hebert@logilab.fr>
+# Copyright (c) 2010-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016 Peter Dawyndt <Peter.Dawyndt@UGent.be>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checker for string formatting operations.
+"""
+
+import builtins
+import numbers
+import tokenize
+from collections import Counter
+
+import astroid
+from astroid.arguments import CallSite
+from astroid.node_classes import Const
+
+from pylint.checkers import BaseChecker, BaseTokenChecker, utils
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import IAstroidChecker, IRawChecker, ITokenChecker
+
+_AST_NODE_STR_TYPES = ("__builtin__.unicode", "__builtin__.str", "builtins.str")
+
+MSGS = {
+ "E1300": (
+ "Unsupported format character %r (%#02x) at index %d",
+ "bad-format-character",
+ "Used when an unsupported format character is used in a format string.",
+ ),
+ "E1301": (
+ "Format string ends in middle of conversion specifier",
+ "truncated-format-string",
+ "Used when a format string terminates before the end of a "
+ "conversion specifier.",
+ ),
+ "E1302": (
+ "Mixing named and unnamed conversion specifiers in format string",
+ "mixed-format-string",
+ "Used when a format string contains both named (e.g. '%(foo)d') "
+ "and unnamed (e.g. '%d') conversion specifiers. This is also "
+ "used when a named conversion specifier contains * for the "
+ "minimum field width and/or precision.",
+ ),
+ "E1303": (
+ "Expected mapping for format string, not %s",
+ "format-needs-mapping",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with an argument that is not a mapping.",
+ ),
+ "W1300": (
+ "Format string dictionary key should be a string, not %s",
+ "bad-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary whose keys are not all strings.",
+ ),
+ "W1301": (
+ "Unused key %r in format string dictionary",
+ "unused-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary that contains keys not required by the "
+ "format string.",
+ ),
+ "E1304": (
+ "Missing key %r in format string dictionary",
+ "missing-format-string-key",
+ "Used when a format string that uses named conversion specifiers "
+ "is used with a dictionary that doesn't contain all the keys "
+ "required by the format string.",
+ ),
+ "E1305": (
+ "Too many arguments for format string",
+ "too-many-format-args",
+ "Used when a format string that uses unnamed conversion "
+ "specifiers is given too many arguments.",
+ ),
+ "E1306": (
+ "Not enough arguments for format string",
+ "too-few-format-args",
+ "Used when a format string that uses unnamed conversion "
+ "specifiers is given too few arguments",
+ ),
+ "E1307": (
+ "Argument %r does not match format type %r",
+ "bad-string-format-type",
+ "Used when a type required by format string "
+ "is not suitable for actual argument type",
+ ),
+ "E1310": (
+ "Suspicious argument in %s.%s call",
+ "bad-str-strip-call",
+ "The argument to a str.{l,r,}strip call contains a duplicate character, ",
+ ),
+ "W1302": (
+ "Invalid format string",
+ "bad-format-string",
+ "Used when a PEP 3101 format string is invalid.",
+ ),
+ "W1303": (
+ "Missing keyword argument %r for format string",
+ "missing-format-argument-key",
+ "Used when a PEP 3101 format string that uses named fields "
+ "doesn't receive one or more required keywords.",
+ ),
+ "W1304": (
+ "Unused format argument %r",
+ "unused-format-string-argument",
+ "Used when a PEP 3101 format string that uses named "
+ "fields is used with an argument that "
+ "is not required by the format string.",
+ ),
+ "W1305": (
+ "Format string contains both automatic field numbering "
+ "and manual field specification",
+ "format-combined-specification",
+ "Used when a PEP 3101 format string contains both automatic "
+ "field numbering (e.g. '{}') and manual field "
+ "specification (e.g. '{0}').",
+ ),
+ "W1306": (
+ "Missing format attribute %r in format specifier %r",
+ "missing-format-attribute",
+ "Used when a PEP 3101 format string uses an "
+ "attribute specifier ({0.length}), but the argument "
+ "passed for formatting doesn't have that attribute.",
+ ),
+ "W1307": (
+ "Using invalid lookup key %r in format specifier %r",
+ "invalid-format-index",
+ "Used when a PEP 3101 format string uses a lookup specifier "
+ "({a[1]}), but the argument passed for formatting "
+ "doesn't contain or doesn't have that key as an attribute.",
+ ),
+ "W1308": (
+ "Duplicate string formatting argument %r, consider passing as named argument",
+ "duplicate-string-formatting-argument",
+ "Used when we detect that a string formatting is "
+ "repeating an argument instead of using named string arguments",
+ ),
+}
+
+OTHER_NODES = (
+ astroid.Const,
+ astroid.List,
+ astroid.Lambda,
+ astroid.FunctionDef,
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.GeneratorExp,
+)
+
+BUILTINS_STR = builtins.__name__ + ".str"
+BUILTINS_FLOAT = builtins.__name__ + ".float"
+BUILTINS_INT = builtins.__name__ + ".int"
+
+
+def get_access_path(key, parts):
+ """ Given a list of format specifiers, returns
+ the final access path (e.g. a.b.c[0][1]).
+ """
+ path = []
+ for is_attribute, specifier in parts:
+ if is_attribute:
+ path.append(".{}".format(specifier))
+ else:
+ path.append("[{!r}]".format(specifier))
+ return str(key) + "".join(path)
+
+
+def arg_matches_format_type(arg_type, format_type):
+ if format_type in "sr":
+ # All types can be printed with %s and %r
+ return True
+ if isinstance(arg_type, astroid.Instance):
+ arg_type = arg_type.pytype()
+ if arg_type == BUILTINS_STR:
+ return format_type == "c"
+ if arg_type == BUILTINS_FLOAT:
+ return format_type in "deEfFgGn%"
+ if arg_type == BUILTINS_INT:
+ # Integers allow all types
+ return True
+ return False
+ return True
+
+
+class StringFormatChecker(BaseChecker):
+ """Checks string formatting operations to ensure that the format string
+ is valid and the arguments match the format string.
+ """
+
+ __implements__ = (IAstroidChecker,)
+ name = "string"
+ msgs = MSGS
+
+ # pylint: disable=too-many-branches
+ @check_messages(*MSGS)
+ def visit_binop(self, node):
+ if node.op != "%":
+ return
+ left = node.left
+ args = node.right
+
+ if not (isinstance(left, astroid.Const) and isinstance(left.value, str)):
+ return
+ format_string = left.value
+ try:
+ required_keys, required_num_args, required_key_types, required_arg_types = utils.parse_format_string(
+ format_string
+ )
+ except utils.UnsupportedFormatCharacter as exc:
+ formatted = format_string[exc.index]
+ self.add_message(
+ "bad-format-character",
+ node=node,
+ args=(formatted, ord(formatted), exc.index),
+ )
+ return
+ except utils.IncompleteFormatString:
+ self.add_message("truncated-format-string", node=node)
+ return
+ if required_keys and required_num_args:
+ # The format string uses both named and unnamed format
+ # specifiers.
+ self.add_message("mixed-format-string", node=node)
+ elif required_keys:
+ # The format string uses only named format specifiers.
+ # Check that the RHS of the % operator is a mapping object
+ # that contains precisely the set of keys required by the
+ # format string.
+ if isinstance(args, astroid.Dict):
+ keys = set()
+ unknown_keys = False
+ for k, _ in args.items:
+ if isinstance(k, astroid.Const):
+ key = k.value
+ if isinstance(key, str):
+ keys.add(key)
+ else:
+ self.add_message(
+ "bad-format-string-key", node=node, args=key
+ )
+ else:
+ # One of the keys was something other than a
+ # constant. Since we can't tell what it is,
+ # suppress checks for missing keys in the
+ # dictionary.
+ unknown_keys = True
+ if not unknown_keys:
+ for key in required_keys:
+ if key not in keys:
+ self.add_message(
+ "missing-format-string-key", node=node, args=key
+ )
+ for key in keys:
+ if key not in required_keys:
+ self.add_message(
+ "unused-format-string-key", node=node, args=key
+ )
+ for key, arg in args.items:
+ if not isinstance(key, astroid.Const):
+ continue
+ format_type = required_key_types.get(key.value, None)
+ arg_type = utils.safe_infer(arg)
+ if (
+ format_type is not None
+ and arg_type not in (None, astroid.Uninferable)
+ and not arg_matches_format_type(arg_type, format_type)
+ ):
+ self.add_message(
+ "bad-string-format-type",
+ node=node,
+ args=(arg_type.pytype(), format_type),
+ )
+ elif isinstance(args, (OTHER_NODES, astroid.Tuple)):
+ type_name = type(args).__name__
+ self.add_message("format-needs-mapping", node=node, args=type_name)
+ # else:
+ # The RHS of the format specifier is a name or
+ # expression. It may be a mapping object, so
+ # there's nothing we can check.
+ else:
+ # The format string uses only unnamed format specifiers.
+ # Check that the number of arguments passed to the RHS of
+ # the % operator matches the number required by the format
+ # string.
+ args_elts = ()
+ if isinstance(args, astroid.Tuple):
+ rhs_tuple = utils.safe_infer(args)
+ num_args = None
+ if hasattr(rhs_tuple, "elts"):
+ args_elts = rhs_tuple.elts
+ num_args = len(args_elts)
+ elif isinstance(args, (OTHER_NODES, (astroid.Dict, astroid.DictComp))):
+ args_elts = [args]
+ num_args = 1
+ else:
+ # The RHS of the format specifier is a name or
+ # expression. It could be a tuple of unknown size, so
+ # there's nothing we can check.
+ num_args = None
+ if num_args is not None:
+ if num_args > required_num_args:
+ self.add_message("too-many-format-args", node=node)
+ elif num_args < required_num_args:
+ self.add_message("too-few-format-args", node=node)
+ for arg, format_type in zip(args_elts, required_arg_types):
+ if not arg:
+ continue
+ arg_type = utils.safe_infer(arg)
+ if arg_type not in (
+ None,
+ astroid.Uninferable,
+ ) and not arg_matches_format_type(arg_type, format_type):
+ self.add_message(
+ "bad-string-format-type",
+ node=node,
+ args=(arg_type.pytype(), format_type),
+ )
+
+ @check_messages(*MSGS)
+ def visit_call(self, node):
+ func = utils.safe_infer(node.func)
+ if (
+ isinstance(func, astroid.BoundMethod)
+ and isinstance(func.bound, astroid.Instance)
+ and func.bound.name in ("str", "unicode", "bytes")
+ ):
+ if func.name in ("strip", "lstrip", "rstrip") and node.args:
+ arg = utils.safe_infer(node.args[0])
+ if not isinstance(arg, astroid.Const) or not isinstance(arg.value, str):
+ return
+ if len(arg.value) != len(set(arg.value)):
+ self.add_message(
+ "bad-str-strip-call",
+ node=node,
+ args=(func.bound.name, func.name),
+ )
+ elif func.name == "format":
+ self._check_new_format(node, func)
+
+ def _detect_vacuous_formatting(self, node, positional_arguments):
+ counter = Counter(
+ arg.name for arg in positional_arguments if isinstance(arg, astroid.Name)
+ )
+ for name, count in counter.items():
+ if count == 1:
+ continue
+ self.add_message(
+ "duplicate-string-formatting-argument", node=node, args=(name,)
+ )
+
+ def _check_new_format(self, node, func):
+ """Check the new string formatting. """
+ # Skip ormat nodes which don't have an explicit string on the
+ # left side of the format operation.
+ # We do this because our inference engine can't properly handle
+ # redefinitions of the original string.
+ # Note that there may not be any left side at all, if the format method
+ # has been assigned to another variable. See issue 351. For example:
+ #
+ # fmt = 'some string {}'.format
+ # fmt('arg')
+ if isinstance(node.func, astroid.Attribute) and not isinstance(
+ node.func.expr, astroid.Const
+ ):
+ return
+ if node.starargs or node.kwargs:
+ return
+ try:
+ strnode = next(func.bound.infer())
+ except astroid.InferenceError:
+ return
+ if not (isinstance(strnode, astroid.Const) and isinstance(strnode.value, str)):
+ return
+ try:
+ call_site = CallSite.from_call(node)
+ except astroid.InferenceError:
+ return
+
+ try:
+ fields, num_args, manual_pos = utils.parse_format_method_string(
+ strnode.value
+ )
+ except utils.IncompleteFormatString:
+ self.add_message("bad-format-string", node=node)
+ return
+
+ positional_arguments = call_site.positional_arguments
+ named_arguments = call_site.keyword_arguments
+ named_fields = {field[0] for field in fields if isinstance(field[0], str)}
+ if num_args and manual_pos:
+ self.add_message("format-combined-specification", node=node)
+ return
+
+ check_args = False
+ # Consider "{[0]} {[1]}" as num_args.
+ num_args += sum(1 for field in named_fields if field == "")
+ if named_fields:
+ for field in named_fields:
+ if field and field not in named_arguments:
+ self.add_message(
+ "missing-format-argument-key", node=node, args=(field,)
+ )
+ for field in named_arguments:
+ if field not in named_fields:
+ self.add_message(
+ "unused-format-string-argument", node=node, args=(field,)
+ )
+ # num_args can be 0 if manual_pos is not.
+ num_args = num_args or manual_pos
+ if positional_arguments or num_args:
+ empty = any(True for field in named_fields if field == "")
+ if named_arguments or empty:
+ # Verify the required number of positional arguments
+ # only if the .format got at least one keyword argument.
+ # This means that the format strings accepts both
+ # positional and named fields and we should warn
+ # when one of the them is missing or is extra.
+ check_args = True
+ else:
+ check_args = True
+ if check_args:
+ # num_args can be 0 if manual_pos is not.
+ num_args = num_args or manual_pos
+ if len(positional_arguments) > num_args:
+ self.add_message("too-many-format-args", node=node)
+ elif len(positional_arguments) < num_args:
+ self.add_message("too-few-format-args", node=node)
+
+ self._detect_vacuous_formatting(node, positional_arguments)
+ self._check_new_format_specifiers(node, fields, named_arguments)
+
+ def _check_new_format_specifiers(self, node, fields, named):
+ """
+ Check attribute and index access in the format
+ string ("{0.a}" and "{0[a]}").
+ """
+ for key, specifiers in fields:
+ # Obtain the argument. If it can't be obtained
+ # or inferred, skip this check.
+ if key == "":
+ # {[0]} will have an unnamed argument, defaulting
+ # to 0. It will not be present in `named`, so use the value
+ # 0 for it.
+ key = 0
+ if isinstance(key, numbers.Number):
+ try:
+ argname = utils.get_argument_from_call(node, key)
+ except utils.NoSuchArgumentError:
+ continue
+ else:
+ if key not in named:
+ continue
+ argname = named[key]
+ if argname in (astroid.Uninferable, None):
+ continue
+ try:
+ argument = utils.safe_infer(argname)
+ except astroid.InferenceError:
+ continue
+ if not specifiers or not argument:
+ # No need to check this key if it doesn't
+ # use attribute / item access
+ continue
+ if argument.parent and isinstance(argument.parent, astroid.Arguments):
+ # Ignore any object coming from an argument,
+ # because we can't infer its value properly.
+ continue
+ previous = argument
+ parsed = []
+ for is_attribute, specifier in specifiers:
+ if previous is astroid.Uninferable:
+ break
+ parsed.append((is_attribute, specifier))
+ if is_attribute:
+ try:
+ previous = previous.getattr(specifier)[0]
+ except astroid.NotFoundError:
+ if (
+ hasattr(previous, "has_dynamic_getattr")
+ and previous.has_dynamic_getattr()
+ ):
+ # Don't warn if the object has a custom __getattr__
+ break
+ path = get_access_path(key, parsed)
+ self.add_message(
+ "missing-format-attribute",
+ args=(specifier, path),
+ node=node,
+ )
+ break
+ else:
+ warn_error = False
+ if hasattr(previous, "getitem"):
+ try:
+ previous = previous.getitem(astroid.Const(specifier))
+ except (
+ astroid.AstroidIndexError,
+ astroid.AstroidTypeError,
+ astroid.AttributeInferenceError,
+ ):
+ warn_error = True
+ except astroid.InferenceError:
+ break
+ if previous is astroid.Uninferable:
+ break
+ else:
+ try:
+ # Lookup __getitem__ in the current node,
+ # but skip further checks, because we can't
+ # retrieve the looked object
+ previous.getattr("__getitem__")
+ break
+ except astroid.NotFoundError:
+ warn_error = True
+ if warn_error:
+ path = get_access_path(key, parsed)
+ self.add_message(
+ "invalid-format-index", args=(specifier, path), node=node
+ )
+ break
+
+ try:
+ previous = next(previous.infer())
+ except astroid.InferenceError:
+ # can't check further if we can't infer it
+ break
+
+
+class StringConstantChecker(BaseTokenChecker):
+ """Check string literals"""
+
+ __implements__ = (IAstroidChecker, ITokenChecker, IRawChecker)
+ name = "string"
+ msgs = {
+ "W1401": (
+ "Anomalous backslash in string: '%s'. "
+ "String constant might be missing an r prefix.",
+ "anomalous-backslash-in-string",
+ "Used when a backslash is in a literal string but not as an escape.",
+ ),
+ "W1402": (
+ "Anomalous Unicode escape in byte string: '%s'. "
+ "String constant might be missing an r or u prefix.",
+ "anomalous-unicode-escape-in-string",
+ "Used when an escape like \\u is encountered in a byte "
+ "string where it has no effect.",
+ ),
+ "W1403": (
+ "Implicit string concatenation found in %s",
+ "implicit-str-concat-in-sequence",
+ "String literals are implicitly concatenated in a "
+ "literal iterable definition : "
+ "maybe a comma is missing ?",
+ ),
+ }
+ options = (
+ (
+ "check-str-concat-over-line-jumps",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "This flag controls whether the "
+ "implicit-str-concat-in-sequence should generate a warning "
+ "on implicit string concatenation in sequences defined over "
+ "several lines.",
+ },
+ ),
+ )
+
+ # Characters that have a special meaning after a backslash in either
+ # Unicode or byte strings.
+ ESCAPE_CHARACTERS = "abfnrtvx\n\r\t\\'\"01234567"
+
+ # Characters that have a special meaning after a backslash but only in
+ # Unicode strings.
+ UNICODE_ESCAPE_CHARACTERS = "uUN"
+
+ def __init__(self, *args, **kwargs):
+ super(StringConstantChecker, self).__init__(*args, **kwargs)
+ self.string_tokens = {} # token position -> (token value, next token)
+
+ def process_module(self, module):
+ self._unicode_literals = "unicode_literals" in module.future_imports
+
+ def process_tokens(self, tokens):
+ encoding = "ascii"
+ for i, (tok_type, token, start, _, line) in enumerate(tokens):
+ if tok_type == tokenize.ENCODING:
+ # this is always the first token processed
+ encoding = token
+ elif tok_type == tokenize.STRING:
+ # 'token' is the whole un-parsed token; we can look at the start
+ # of it to see whether it's a raw or unicode string etc.
+ self.process_string_token(token, start[0])
+ # We figure the next token, ignoring comments & newlines:
+ j = i + 1
+ while j < len(tokens) and tokens[j].type in (
+ tokenize.NEWLINE,
+ tokenize.NL,
+ tokenize.COMMENT,
+ ):
+ j += 1
+ next_token = tokens[j] if j < len(tokens) else None
+ if encoding != "ascii":
+ # We convert `tokenize` character count into a byte count,
+ # to match with astroid `.col_offset`
+ start = (start[0], len(line[: start[1]].encode(encoding)))
+ self.string_tokens[start] = (str_eval(token), next_token)
+
+ @check_messages(*(msgs.keys()))
+ def visit_list(self, node):
+ self.check_for_concatenated_strings(node, "list")
+
+ @check_messages(*(msgs.keys()))
+ def visit_set(self, node):
+ self.check_for_concatenated_strings(node, "set")
+
+ @check_messages(*(msgs.keys()))
+ def visit_tuple(self, node):
+ self.check_for_concatenated_strings(node, "tuple")
+
+ def check_for_concatenated_strings(self, iterable_node, iterable_type):
+ for elt in iterable_node.elts:
+ if isinstance(elt, Const) and elt.pytype() in _AST_NODE_STR_TYPES:
+ if elt.col_offset < 0:
+ # This can happen in case of escaped newlines
+ continue
+ if (elt.lineno, elt.col_offset) not in self.string_tokens:
+ # This may happen with Latin1 encoding
+ # cf. https://github.com/PyCQA/pylint/issues/2610
+ continue
+ matching_token, next_token = self.string_tokens[
+ (elt.lineno, elt.col_offset)
+ ]
+ # We detect string concatenation: the AST Const is the
+ # combination of 2 string tokens
+ if matching_token != elt.value and next_token is not None:
+ if next_token.type == tokenize.STRING and (
+ next_token.start[0] == elt.lineno
+ or self.config.check_str_concat_over_line_jumps
+ ):
+ self.add_message(
+ "implicit-str-concat-in-sequence",
+ line=elt.lineno,
+ args=(iterable_type,),
+ )
+
+ def process_string_token(self, token, start_row):
+ quote_char = None
+ index = None
+ for index, char in enumerate(token):
+ if char in "'\"":
+ quote_char = char
+ break
+ if quote_char is None:
+ return
+
+ prefix = token[:index].lower() # markers like u, b, r.
+ after_prefix = token[index:]
+ if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
+ string_body = after_prefix[3:-3]
+ else:
+ string_body = after_prefix[1:-1] # Chop off quotes
+ # No special checks on raw strings at the moment.
+ if "r" not in prefix:
+ self.process_non_raw_string_token(prefix, string_body, start_row)
+
+ def process_non_raw_string_token(self, prefix, string_body, start_row):
+ """check for bad escapes in a non-raw string.
+
+ prefix: lowercase string of eg 'ur' string prefix markers.
+ string_body: the un-parsed body of the string, not including the quote
+ marks.
+ start_row: integer line number in the source.
+ """
+ # Walk through the string; if we see a backslash then escape the next
+ # character, and skip over it. If we see a non-escaped character,
+ # alert, and continue.
+ #
+ # Accept a backslash when it escapes a backslash, or a quote, or
+ # end-of-line, or one of the letters that introduce a special escape
+ # sequence <http://docs.python.org/reference/lexical_analysis.html>
+ #
+ index = 0
+ while True:
+ index = string_body.find("\\", index)
+ if index == -1:
+ break
+ # There must be a next character; having a backslash at the end
+ # of the string would be a SyntaxError.
+ next_char = string_body[index + 1]
+ match = string_body[index : index + 2]
+ if next_char in self.UNICODE_ESCAPE_CHARACTERS:
+ if "u" in prefix:
+ pass
+ elif "b" not in prefix:
+ pass # unicode by default
+ else:
+ self.add_message(
+ "anomalous-unicode-escape-in-string",
+ line=start_row,
+ args=(match,),
+ col_offset=index,
+ )
+ elif next_char not in self.ESCAPE_CHARACTERS:
+ self.add_message(
+ "anomalous-backslash-in-string",
+ line=start_row,
+ args=(match,),
+ col_offset=index,
+ )
+ # Whether it was a valid escape or not, backslash followed by
+ # another character can always be consumed whole: the second
+ # character can never be the start of a new backslash escape.
+ index += 2
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(StringFormatChecker(linter))
+ linter.register_checker(StringConstantChecker(linter))
+
+
+def str_eval(token):
+ """
+ Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit.
+ This supports f-strings, contrary to `ast.literal_eval`.
+ We have to support all string literal notations:
+ https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
+ """
+ if token[0:2].lower() in ("fr", "rf"):
+ token = token[2:]
+ elif token[0].lower() in ("r", "u", "f"):
+ token = token[1:]
+ if token[0:3] in ('"""', "'''"):
+ return token[3:-3]
+ return token[1:-1]
diff --git a/venv/Lib/site-packages/pylint/checkers/typecheck.py b/venv/Lib/site-packages/pylint/checkers/typecheck.py
new file mode 100644
index 0000000..a288f49
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/typecheck.py
@@ -0,0 +1,1770 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009 James Lingard <jchl@aristanetworks.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 David Shea <dshea@redhat.com>
+# Copyright (c) 2014 Steven Myint <hg@stevenmyint.com>
+# Copyright (c) 2014 Holger Peters <email@holger-peters.de>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Anentropic <ego@anentropic.com>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
+# Copyright (c) 2015 Radu Ciorba <radu@devrandom.ro>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016 Jürgen Hermann <jh@web.de>
+# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016 Filipe Brandenburger <filbranden@google.com>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 Ben Green <benhgreen@icloud.com>
+# Copyright (c) 2018 Konstantin <Github@pheanex.de>
+# Copyright (c) 2018 Justin Li <justinnhli@users.noreply.github.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""try to find more bugs in the code using astroid inference capabilities
+"""
+
+import builtins
+import fnmatch
+import heapq
+import itertools
+import operator
+import re
+import shlex
+import sys
+import types
+from collections import deque
+from collections.abc import Sequence
+from functools import singledispatch
+
+import astroid
+import astroid.arguments
+import astroid.context
+import astroid.nodes
+from astroid import bases, decorators, exceptions, modutils, objects
+from astroid.interpreter import dunder_lookup
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import (
+ check_messages,
+ decorated_with,
+ decorated_with_property,
+ has_known_bases,
+ is_builtin_object,
+ is_comprehension,
+ is_inside_abstract_class,
+ is_iterable,
+ is_mapping,
+ is_overload_stub,
+ is_super,
+ node_ignores_exception,
+ safe_infer,
+ supports_delitem,
+ supports_getitem,
+ supports_membership_test,
+ supports_setitem,
+)
+from pylint.interfaces import INFERENCE, IAstroidChecker
+from pylint.utils import get_global_option
+
+BUILTINS = builtins.__name__
+STR_FORMAT = {"%s.str.format" % BUILTINS}
+ASYNCIO_COROUTINE = "asyncio.coroutines.coroutine"
+
+
+def _unflatten(iterable):
+ for index, elem in enumerate(iterable):
+ if isinstance(elem, Sequence) and not isinstance(elem, str):
+ for single_elem in _unflatten(elem):
+ yield single_elem
+ elif elem and not index:
+ # We're interested only in the first element.
+ yield elem
+
+
+def _flatten_container(iterable):
+ # Flatten nested containers into a single iterable
+ for item in iterable:
+ if isinstance(item, (list, tuple, types.GeneratorType)):
+ yield from _flatten_container(item)
+ else:
+ yield item
+
+
+def _is_owner_ignored(owner, attrname, ignored_classes, ignored_modules):
+ """Check if the given owner should be ignored
+
+ This will verify if the owner's module is in *ignored_modules*
+ or the owner's module fully qualified name is in *ignored_modules*
+ or if the *ignored_modules* contains a pattern which catches
+ the fully qualified name of the module.
+
+ Also, similar checks are done for the owner itself, if its name
+ matches any name from the *ignored_classes* or if its qualified
+ name can be found in *ignored_classes*.
+ """
+ ignored_modules = set(ignored_modules)
+ module_name = owner.root().name
+ module_qname = owner.root().qname()
+
+ for ignore in ignored_modules:
+ # Try to match the module name / fully qualified name directly
+ if module_qname in ignored_modules or module_name in ignored_modules:
+ return True
+
+ # Try to see if the ignores pattern match against the module name.
+ if fnmatch.fnmatch(module_qname, ignore):
+ return True
+
+ # Otherwise we might have a root module name being ignored,
+ # and the qualified owner has more levels of depth.
+ parts = deque(module_name.split("."))
+ current_module = ""
+
+ while parts:
+ part = parts.popleft()
+ if not current_module:
+ current_module = part
+ else:
+ current_module += ".{}".format(part)
+ if current_module in ignored_modules:
+ return True
+
+ # Match against ignored classes.
+ ignored_classes = set(ignored_classes)
+ if hasattr(owner, "qname"):
+ qname = owner.qname()
+ else:
+ qname = ""
+ return any(ignore in (attrname, qname) for ignore in ignored_classes)
+
+
+@singledispatch
+def _node_names(node):
+ if not hasattr(node, "locals"):
+ return []
+ return node.locals.keys()
+
+
+@_node_names.register(astroid.ClassDef)
+@_node_names.register(astroid.Instance)
+def _(node):
+ values = itertools.chain(node.instance_attrs.keys(), node.locals.keys())
+
+ try:
+ mro = node.mro()[1:]
+ except (NotImplementedError, TypeError):
+ mro = node.ancestors()
+
+ other_values = [value for cls in mro for value in _node_names(cls)]
+ return itertools.chain(values, other_values)
+
+
+def _string_distance(seq1, seq2):
+ seq2_length = len(seq2)
+
+ row = list(range(1, seq2_length + 1)) + [0]
+ for seq1_index, seq1_char in enumerate(seq1):
+ last_row = row
+ row = [0] * seq2_length + [seq1_index + 1]
+
+ for seq2_index, seq2_char in enumerate(seq2):
+ row[seq2_index] = min(
+ last_row[seq2_index] + 1,
+ row[seq2_index - 1] + 1,
+ last_row[seq2_index - 1] + (seq1_char != seq2_char),
+ )
+
+ return row[seq2_length - 1]
+
+
+def _similar_names(owner, attrname, distance_threshold, max_choices):
+ """Given an owner and a name, try to find similar names
+
+ The similar names are searched given a distance metric and only
+ a given number of choices will be returned.
+ """
+ possible_names = []
+ names = _node_names(owner)
+
+ for name in names:
+ if name == attrname:
+ continue
+
+ distance = _string_distance(attrname, name)
+ if distance <= distance_threshold:
+ possible_names.append((name, distance))
+
+ # Now get back the values with a minimum, up to the given
+ # limit or choices.
+ picked = [
+ name
+ for (name, _) in heapq.nsmallest(
+ max_choices, possible_names, key=operator.itemgetter(1)
+ )
+ ]
+ return sorted(picked)
+
+
+def _missing_member_hint(owner, attrname, distance_threshold, max_choices):
+ names = _similar_names(owner, attrname, distance_threshold, max_choices)
+ if not names:
+ # No similar name.
+ return ""
+
+ names = list(map(repr, names))
+ if len(names) == 1:
+ names = ", ".join(names)
+ else:
+ names = "one of {} or {}".format(", ".join(names[:-1]), names[-1])
+
+ return "; maybe {}?".format(names)
+
+
+MSGS = {
+ "E1101": (
+ "%s %r has no %r member%s",
+ "no-member",
+ "Used when a variable is accessed for an unexistent member.",
+ {"old_names": [("E1103", "maybe-no-member")]},
+ ),
+ "I1101": (
+ "%s %r has no %r member%s, but source is unavailable. Consider "
+ "adding this module to extension-pkg-whitelist if you want "
+ "to perform analysis based on run-time introspection of living objects.",
+ "c-extension-no-member",
+ "Used when a variable is accessed for non-existent member of C "
+ "extension. Due to unavailability of source static analysis is impossible, "
+ "but it may be performed by introspecting living objects in run-time.",
+ ),
+ "E1102": (
+ "%s is not callable",
+ "not-callable",
+ "Used when an object being called has been inferred to a non "
+ "callable object.",
+ ),
+ "E1111": (
+ "Assigning result of a function call, where the function has no return",
+ "assignment-from-no-return",
+ "Used when an assignment is done on a function call but the "
+ "inferred function doesn't return anything.",
+ ),
+ "E1120": (
+ "No value for argument %s in %s call",
+ "no-value-for-parameter",
+ "Used when a function call passes too few arguments.",
+ ),
+ "E1121": (
+ "Too many positional arguments for %s call",
+ "too-many-function-args",
+ "Used when a function call passes too many positional arguments.",
+ ),
+ "E1123": (
+ "Unexpected keyword argument %r in %s call",
+ "unexpected-keyword-arg",
+ "Used when a function call passes a keyword argument that "
+ "doesn't correspond to one of the function's parameter names.",
+ ),
+ "E1124": (
+ "Argument %r passed by position and keyword in %s call",
+ "redundant-keyword-arg",
+ "Used when a function call would result in assigning multiple "
+ "values to a function parameter, one value from a positional "
+ "argument and one from a keyword argument.",
+ ),
+ "E1125": (
+ "Missing mandatory keyword argument %r in %s call",
+ "missing-kwoa",
+ (
+ "Used when a function call does not pass a mandatory"
+ " keyword-only argument."
+ ),
+ ),
+ "E1126": (
+ "Sequence index is not an int, slice, or instance with __index__",
+ "invalid-sequence-index",
+ "Used when a sequence type is indexed with an invalid type. "
+ "Valid types are ints, slices, and objects with an __index__ "
+ "method.",
+ ),
+ "E1127": (
+ "Slice index is not an int, None, or instance with __index__",
+ "invalid-slice-index",
+ "Used when a slice index is not an integer, None, or an object "
+ "with an __index__ method.",
+ ),
+ "E1128": (
+ "Assigning result of a function call, where the function returns None",
+ "assignment-from-none",
+ "Used when an assignment is done on a function call but the "
+ "inferred function returns nothing but None.",
+ {"old_names": [("W1111", "old-assignment-from-none")]},
+ ),
+ "E1129": (
+ "Context manager '%s' doesn't implement __enter__ and __exit__.",
+ "not-context-manager",
+ "Used when an instance in a with statement doesn't implement "
+ "the context manager protocol(__enter__/__exit__).",
+ ),
+ "E1130": (
+ "%s",
+ "invalid-unary-operand-type",
+ "Emitted when a unary operand is used on an object which does not "
+ "support this type of operation.",
+ ),
+ "E1131": (
+ "%s",
+ "unsupported-binary-operation",
+ "Emitted when a binary arithmetic operation between two "
+ "operands is not supported.",
+ ),
+ "E1132": (
+ "Got multiple values for keyword argument %r in function call",
+ "repeated-keyword",
+ "Emitted when a function call got multiple values for a keyword.",
+ ),
+ "E1135": (
+ "Value '%s' doesn't support membership test",
+ "unsupported-membership-test",
+ "Emitted when an instance in membership test expression doesn't "
+ "implement membership protocol (__contains__/__iter__/__getitem__).",
+ ),
+ "E1136": (
+ "Value '%s' is unsubscriptable",
+ "unsubscriptable-object",
+ "Emitted when a subscripted value doesn't support subscription "
+ "(i.e. doesn't define __getitem__ method or __class_getitem__ for a class).",
+ ),
+ "E1137": (
+ "%r does not support item assignment",
+ "unsupported-assignment-operation",
+ "Emitted when an object does not support item assignment "
+ "(i.e. doesn't define __setitem__ method).",
+ ),
+ "E1138": (
+ "%r does not support item deletion",
+ "unsupported-delete-operation",
+ "Emitted when an object does not support item deletion "
+ "(i.e. doesn't define __delitem__ method).",
+ ),
+ "E1139": (
+ "Invalid metaclass %r used",
+ "invalid-metaclass",
+ "Emitted whenever we can detect that a class is using, "
+ "as a metaclass, something which might be invalid for using as "
+ "a metaclass.",
+ ),
+ "E1140": (
+ "Dict key is unhashable",
+ "unhashable-dict-key",
+ "Emitted when a dict key is not hashable "
+ "(i.e. doesn't define __hash__ method).",
+ ),
+ "E1141": (
+ "Unpacking a dictionary in iteration without calling .items()",
+ "dict-iter-missing-items",
+ "Emitted when trying to iterate through a dict without calling .items()",
+ ),
+ "W1113": (
+ "Keyword argument before variable positional arguments list "
+ "in the definition of %s function",
+ "keyword-arg-before-vararg",
+ "When defining a keyword argument before variable positional arguments, one can "
+ "end up in having multiple values passed for the aforementioned parameter in "
+ "case the method is called with keyword arguments.",
+ ),
+ "W1114": (
+ "Positional arguments appear to be out of order",
+ "arguments-out-of-order",
+ "Emitted when the caller's argument names fully match the parameter "
+ "names in the function signature but do not have the same order.",
+ ),
+}
+
+# builtin sequence types in Python 2 and 3.
+SEQUENCE_TYPES = {
+ "str",
+ "unicode",
+ "list",
+ "tuple",
+ "bytearray",
+ "xrange",
+ "range",
+ "bytes",
+ "memoryview",
+}
+
+
+def _emit_no_member(node, owner, owner_name, ignored_mixins=True, ignored_none=True):
+ """Try to see if no-member should be emitted for the given owner.
+
+ The following cases are ignored:
+
+ * the owner is a function and it has decorators.
+ * the owner is an instance and it has __getattr__, __getattribute__ implemented
+ * the module is explicitly ignored from no-member checks
+ * the owner is a class and the name can be found in its metaclass.
+ * The access node is protected by an except handler, which handles
+ AttributeError, Exception or bare except.
+ """
+ # pylint: disable=too-many-return-statements
+ if node_ignores_exception(node, AttributeError):
+ return False
+ if ignored_none and isinstance(owner, astroid.Const) and owner.value is None:
+ return False
+ if is_super(owner) or getattr(owner, "type", None) == "metaclass":
+ return False
+ if owner_name and ignored_mixins and owner_name[-5:].lower() == "mixin":
+ return False
+ if isinstance(owner, astroid.FunctionDef) and owner.decorators:
+ return False
+ if isinstance(owner, (astroid.Instance, astroid.ClassDef)):
+ if owner.has_dynamic_getattr():
+ # Issue #2565: Don't ignore enums, as they have a `__getattr__` but it's not
+ # invoked at this point.
+ try:
+ metaclass = owner.metaclass()
+ except exceptions.MroError:
+ return False
+ if metaclass:
+ return metaclass.qname() == "enum.EnumMeta"
+ return False
+ if not has_known_bases(owner):
+ return False
+
+ # Exclude typed annotations, since these might actually exist
+ # at some point during the runtime of the program.
+ attribute = owner.locals.get(node.attrname, [None])[0]
+ if (
+ attribute
+ and isinstance(attribute, astroid.AssignName)
+ and isinstance(attribute.parent, astroid.AnnAssign)
+ ):
+ return False
+ if isinstance(owner, objects.Super):
+ # Verify if we are dealing with an invalid Super object.
+ # If it is invalid, then there's no point in checking that
+ # it has the required attribute. Also, don't fail if the
+ # MRO is invalid.
+ try:
+ owner.super_mro()
+ except (exceptions.MroError, exceptions.SuperError):
+ return False
+ if not all(map(has_known_bases, owner.type.mro())):
+ return False
+ if isinstance(owner, astroid.Module):
+ try:
+ owner.getattr("__getattr__")
+ return False
+ except astroid.NotFoundError:
+ pass
+ if owner_name and node.attrname.startswith("_" + owner_name):
+ # Test if an attribute has been mangled ('private' attribute)
+ unmangled_name = node.attrname.split("_" + owner_name)[-1]
+ try:
+ if owner.getattr(unmangled_name, context=None) is not None:
+ return False
+ except astroid.NotFoundError:
+ return True
+ return True
+
+
+def _determine_callable(callable_obj):
+ # Ordering is important, since BoundMethod is a subclass of UnboundMethod,
+ # and Function inherits Lambda.
+ parameters = 0
+ if hasattr(callable_obj, "implicit_parameters"):
+ parameters = callable_obj.implicit_parameters()
+ if isinstance(callable_obj, astroid.BoundMethod):
+ # Bound methods have an extra implicit 'self' argument.
+ return callable_obj, parameters, callable_obj.type
+ if isinstance(callable_obj, astroid.UnboundMethod):
+ return callable_obj, parameters, "unbound method"
+ if isinstance(callable_obj, astroid.FunctionDef):
+ return callable_obj, parameters, callable_obj.type
+ if isinstance(callable_obj, astroid.Lambda):
+ return callable_obj, parameters, "lambda"
+ if isinstance(callable_obj, astroid.ClassDef):
+ # Class instantiation, lookup __new__ instead.
+ # If we only find object.__new__, we can safely check __init__
+ # instead. If __new__ belongs to builtins, then we look
+ # again for __init__ in the locals, since we won't have
+ # argument information for the builtin __new__ function.
+ try:
+ # Use the last definition of __new__.
+ new = callable_obj.local_attr("__new__")[-1]
+ except exceptions.NotFoundError:
+ new = None
+
+ from_object = new and new.parent.scope().name == "object"
+ from_builtins = new and new.root().name in sys.builtin_module_names
+
+ if not new or from_object or from_builtins:
+ try:
+ # Use the last definition of __init__.
+ callable_obj = callable_obj.local_attr("__init__")[-1]
+ except exceptions.NotFoundError:
+ # do nothing, covered by no-init.
+ raise ValueError
+ else:
+ callable_obj = new
+
+ if not isinstance(callable_obj, astroid.FunctionDef):
+ raise ValueError
+ # both have an extra implicit 'cls'/'self' argument.
+ return callable_obj, parameters, "constructor"
+
+ raise ValueError
+
+
+def _has_parent_of_type(node, node_type, statement):
+ """Check if the given node has a parent of the given type."""
+ parent = node.parent
+ while not isinstance(parent, node_type) and statement.parent_of(parent):
+ parent = parent.parent
+ return isinstance(parent, node_type)
+
+
+def _no_context_variadic_keywords(node, scope):
+ statement = node.statement()
+ variadics = ()
+
+ if isinstance(scope, astroid.Lambda) and not isinstance(scope, astroid.FunctionDef):
+ variadics = list(node.keywords or []) + node.kwargs
+ else:
+ if isinstance(statement, (astroid.Return, astroid.Expr)) and isinstance(
+ statement.value, astroid.Call
+ ):
+ call = statement.value
+ variadics = list(call.keywords or []) + call.kwargs
+
+ return _no_context_variadic(node, scope.args.kwarg, astroid.Keyword, variadics)
+
+
+def _no_context_variadic_positional(node, scope):
+ variadics = ()
+ if isinstance(scope, astroid.Lambda) and not isinstance(scope, astroid.FunctionDef):
+ variadics = node.starargs + node.kwargs
+ else:
+ statement = node.statement()
+ if isinstance(statement, (astroid.Expr, astroid.Return)) and isinstance(
+ statement.value, astroid.Call
+ ):
+ call = statement.value
+ variadics = call.starargs + call.kwargs
+
+ return _no_context_variadic(node, scope.args.vararg, astroid.Starred, variadics)
+
+
+def _no_context_variadic(node, variadic_name, variadic_type, variadics):
+ """Verify if the given call node has variadic nodes without context
+
+ This is a workaround for handling cases of nested call functions
+ which don't have the specific call context at hand.
+ Variadic arguments (variable positional arguments and variable
+ keyword arguments) are inferred, inherently wrong, by astroid
+ as a Tuple, respectively a Dict with empty elements.
+ This can lead pylint to believe that a function call receives
+ too few arguments.
+ """
+ scope = node.scope()
+ is_in_lambda_scope = not isinstance(scope, astroid.FunctionDef) and isinstance(
+ scope, astroid.Lambda
+ )
+ statement = node.statement()
+ for name in statement.nodes_of_class(astroid.Name):
+ if name.name != variadic_name:
+ continue
+
+ inferred = safe_infer(name)
+ if isinstance(inferred, (astroid.List, astroid.Tuple)):
+ length = len(inferred.elts)
+ elif isinstance(inferred, astroid.Dict):
+ length = len(inferred.items)
+ else:
+ continue
+
+ if is_in_lambda_scope and isinstance(inferred.parent, astroid.Arguments):
+ # The statement of the variadic will be the assignment itself,
+ # so we need to go the lambda instead
+ inferred_statement = inferred.parent.parent
+ else:
+ inferred_statement = inferred.statement()
+
+ if not length and isinstance(inferred_statement, astroid.Lambda):
+ is_in_starred_context = _has_parent_of_type(node, variadic_type, statement)
+ used_as_starred_argument = any(
+ variadic.value == name or variadic.value.parent_of(name)
+ for variadic in variadics
+ )
+ if is_in_starred_context or used_as_starred_argument:
+ return True
+ return False
+
+
+def _is_invalid_metaclass(metaclass):
+ try:
+ mro = metaclass.mro()
+ except NotImplementedError:
+ # Cannot have a metaclass which is not a newstyle class.
+ return True
+ else:
+ if not any(is_builtin_object(cls) and cls.name == "type" for cls in mro):
+ return True
+ return False
+
+
+def _infer_from_metaclass_constructor(cls, func):
+ """Try to infer what the given *func* constructor is building
+
+ :param astroid.FunctionDef func:
+ A metaclass constructor. Metaclass definitions can be
+ functions, which should accept three arguments, the name of
+ the class, the bases of the class and the attributes.
+ The function could return anything, but usually it should
+ be a proper metaclass.
+ :param astroid.ClassDef cls:
+ The class for which the *func* parameter should generate
+ a metaclass.
+ :returns:
+ The class generated by the function or None,
+ if we couldn't infer it.
+ :rtype: astroid.ClassDef
+ """
+ context = astroid.context.InferenceContext()
+
+ class_bases = astroid.List()
+ class_bases.postinit(elts=cls.bases)
+
+ attrs = astroid.Dict()
+ local_names = [(name, values[-1]) for name, values in cls.locals.items()]
+ attrs.postinit(local_names)
+
+ builder_args = astroid.Tuple()
+ builder_args.postinit([cls.name, class_bases, attrs])
+
+ context.callcontext = astroid.context.CallContext(builder_args)
+ try:
+ inferred = next(func.infer_call_result(func, context), None)
+ except astroid.InferenceError:
+ return None
+ return inferred or None
+
+
+def _is_c_extension(module_node):
+ return (
+ not modutils.is_standard_module(module_node.name)
+ and not module_node.fully_defined()
+ )
+
+
+class TypeChecker(BaseChecker):
+ """try to find bugs in the code using type inference
+ """
+
+ __implements__ = (IAstroidChecker,)
+
+ # configuration section name
+ name = "typecheck"
+ # messages
+ msgs = MSGS
+ priority = -1
+ # configuration options
+ options = (
+ (
+ "ignore-on-opaque-inference",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "This flag controls whether pylint should warn about "
+ "no-member and similar checks whenever an opaque object "
+ "is returned when inferring. The inference can return "
+ "multiple potential results while evaluating a Python object, "
+ "but some branches might not be evaluated, which results in "
+ "partial inference. In that case, it might be useful to still emit "
+ "no-member and other checks for the rest of the inferred objects.",
+ },
+ ),
+ (
+ "ignore-mixin-members",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": 'Tells whether missing members accessed in mixin \
+class should be ignored. A mixin class is detected if its name ends with \
+"mixin" (case insensitive).',
+ },
+ ),
+ (
+ "ignore-none",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether to warn about missing members when the owner "
+ "of the attribute is inferred to be None.",
+ },
+ ),
+ (
+ "ignored-modules",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<module names>",
+ "help": "List of module names for which member attributes "
+ "should not be checked (useful for modules/projects "
+ "where namespaces are manipulated during runtime and "
+ "thus existing member attributes cannot be "
+ "deduced by static analysis). It supports qualified "
+ "module names, as well as Unix pattern matching.",
+ },
+ ),
+ # the defaults here are *stdlib* names that (almost) always
+ # lead to false positives, since their idiomatic use is
+ # 'too dynamic' for pylint to grok.
+ (
+ "ignored-classes",
+ {
+ "default": ("optparse.Values", "thread._local", "_thread._local"),
+ "type": "csv",
+ "metavar": "<members names>",
+ "help": "List of class names for which member attributes "
+ "should not be checked (useful for classes with "
+ "dynamically set attributes). This supports "
+ "the use of qualified names.",
+ },
+ ),
+ (
+ "generated-members",
+ {
+ "default": (),
+ "type": "string",
+ "metavar": "<members names>",
+ "help": "List of members which are set dynamically and \
+missed by pylint inference system, and so shouldn't trigger E1101 when \
+accessed. Python regular expressions are accepted.",
+ },
+ ),
+ (
+ "contextmanager-decorators",
+ {
+ "default": ["contextlib.contextmanager"],
+ "type": "csv",
+ "metavar": "<decorator names>",
+ "help": "List of decorators that produce context managers, "
+ "such as contextlib.contextmanager. Add to this list "
+ "to register other decorators that produce valid "
+ "context managers.",
+ },
+ ),
+ (
+ "missing-member-hint-distance",
+ {
+ "default": 1,
+ "type": "int",
+ "metavar": "<member hint edit distance>",
+ "help": "The minimum edit distance a name should have in order "
+ "to be considered a similar match for a missing member name.",
+ },
+ ),
+ (
+ "missing-member-max-choices",
+ {
+ "default": 1,
+ "type": "int",
+ "metavar": "<member hint max choices>",
+ "help": "The total number of similar names that should be taken in "
+ "consideration when showing a hint for a missing member.",
+ },
+ ),
+ (
+ "missing-member-hint",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<missing member hint>",
+ "help": "Show a hint with possible names when a member name was not "
+ "found. The aspect of finding the hint is based on edit distance.",
+ },
+ ),
+ (
+ "signature-mutators",
+ {
+ "default": [],
+ "type": "csv",
+ "metavar": "<decorator names>",
+ "help": "List of decorators that change the signature of "
+ "a decorated function.",
+ },
+ ),
+ )
+
+ @decorators.cachedproperty
+ def _suggestion_mode(self):
+ return get_global_option(self, "suggestion-mode", default=True)
+
+ def open(self):
+ # do this in open since config not fully initialized in __init__
+ # generated_members may contain regular expressions
+ # (surrounded by quote `"` and followed by a comma `,`)
+ # REQUEST,aq_parent,"[a-zA-Z]+_set{1,2}"' =>
+ # ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}')
+ if isinstance(self.config.generated_members, str):
+ gen = shlex.shlex(self.config.generated_members)
+ gen.whitespace += ","
+ gen.wordchars += r"[]-+\.*?()|"
+ self.config.generated_members = tuple(tok.strip('"') for tok in gen)
+
+ @check_messages("keyword-arg-before-vararg")
+ def visit_functiondef(self, node):
+ # check for keyword arg before varargs
+ if node.args.vararg and node.args.defaults:
+ self.add_message("keyword-arg-before-vararg", node=node, args=(node.name))
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ @check_messages("invalid-metaclass")
+ def visit_classdef(self, node):
+ def _metaclass_name(metaclass):
+ if isinstance(metaclass, (astroid.ClassDef, astroid.FunctionDef)):
+ return metaclass.name
+ return metaclass.as_string()
+
+ metaclass = node.declared_metaclass()
+ if not metaclass:
+ return
+
+ if isinstance(metaclass, astroid.FunctionDef):
+ # Try to infer the result.
+ metaclass = _infer_from_metaclass_constructor(node, metaclass)
+ if not metaclass:
+ # Don't do anything if we cannot infer the result.
+ return
+
+ if isinstance(metaclass, astroid.ClassDef):
+ if _is_invalid_metaclass(metaclass):
+ self.add_message(
+ "invalid-metaclass", node=node, args=(_metaclass_name(metaclass),)
+ )
+ else:
+ self.add_message(
+ "invalid-metaclass", node=node, args=(_metaclass_name(metaclass),)
+ )
+
+ def visit_assignattr(self, node):
+ if isinstance(node.assign_type(), astroid.AugAssign):
+ self.visit_attribute(node)
+
+ def visit_delattr(self, node):
+ self.visit_attribute(node)
+
+ @check_messages("no-member", "c-extension-no-member")
+ def visit_attribute(self, node):
+ """check that the accessed attribute exists
+
+ to avoid too much false positives for now, we'll consider the code as
+ correct if a single of the inferred nodes has the accessed attribute.
+
+ function/method, super call and metaclasses are ignored
+ """
+ for pattern in self.config.generated_members:
+ # attribute is marked as generated, stop here
+ if re.match(pattern, node.attrname):
+ return
+ if re.match(pattern, node.as_string()):
+ return
+
+ try:
+ inferred = list(node.expr.infer())
+ except exceptions.InferenceError:
+ return
+
+ # list of (node, nodename) which are missing the attribute
+ missingattr = set()
+
+ non_opaque_inference_results = [
+ owner
+ for owner in inferred
+ if owner is not astroid.Uninferable
+ and not isinstance(owner, astroid.nodes.Unknown)
+ ]
+ if (
+ len(non_opaque_inference_results) != len(inferred)
+ and self.config.ignore_on_opaque_inference
+ ):
+ # There is an ambiguity in the inference. Since we can't
+ # make sure that we won't emit a false positive, we just stop
+ # whenever the inference returns an opaque inference object.
+ return
+ for owner in non_opaque_inference_results:
+ name = getattr(owner, "name", None)
+ if _is_owner_ignored(
+ owner, name, self.config.ignored_classes, self.config.ignored_modules
+ ):
+ continue
+
+ try:
+ if not [
+ n
+ for n in owner.getattr(node.attrname)
+ if not isinstance(n.statement(), astroid.AugAssign)
+ ]:
+ missingattr.add((owner, name))
+ continue
+ except AttributeError:
+ continue
+ except exceptions.NotFoundError:
+ # This can't be moved before the actual .getattr call,
+ # because there can be more values inferred and we are
+ # stopping after the first one which has the attribute in question.
+ # The problem is that if the first one has the attribute,
+ # but we continue to the next values which doesn't have the
+ # attribute, then we'll have a false positive.
+ # So call this only after the call has been made.
+ if not _emit_no_member(
+ node,
+ owner,
+ name,
+ ignored_mixins=self.config.ignore_mixin_members,
+ ignored_none=self.config.ignore_none,
+ ):
+ continue
+ missingattr.add((owner, name))
+ continue
+ # stop on the first found
+ break
+ else:
+ # we have not found any node with the attributes, display the
+ # message for inferred nodes
+ done = set()
+ for owner, name in missingattr:
+ if isinstance(owner, astroid.Instance):
+ actual = owner._proxied
+ else:
+ actual = owner
+ if actual in done:
+ continue
+ done.add(actual)
+
+ msg, hint = self._get_nomember_msgid_hint(node, owner)
+ self.add_message(
+ msg,
+ node=node,
+ args=(owner.display_type(), name, node.attrname, hint),
+ confidence=INFERENCE,
+ )
+
+ def _get_nomember_msgid_hint(self, node, owner):
+ suggestions_are_possible = self._suggestion_mode and isinstance(
+ owner, astroid.Module
+ )
+ if suggestions_are_possible and _is_c_extension(owner):
+ msg = "c-extension-no-member"
+ hint = ""
+ else:
+ msg = "no-member"
+ if self.config.missing_member_hint:
+ hint = _missing_member_hint(
+ owner,
+ node.attrname,
+ self.config.missing_member_hint_distance,
+ self.config.missing_member_max_choices,
+ )
+ else:
+ hint = ""
+ return msg, hint
+
+ @check_messages("assignment-from-no-return", "assignment-from-none")
+ def visit_assign(self, node):
+ """check that if assigning to a function call, the function is
+ possibly returning something valuable
+ """
+ if not isinstance(node.value, astroid.Call):
+ return
+
+ function_node = safe_infer(node.value.func)
+ funcs = (astroid.FunctionDef, astroid.UnboundMethod, astroid.BoundMethod)
+ if not isinstance(function_node, funcs):
+ return
+
+ # Unwrap to get the actual function object
+ if isinstance(function_node, astroid.BoundMethod) and isinstance(
+ function_node._proxied, astroid.UnboundMethod
+ ):
+ function_node = function_node._proxied._proxied
+
+ # Make sure that it's a valid function that we can analyze.
+ # Ordered from less expensive to more expensive checks.
+ # pylint: disable=too-many-boolean-expressions
+ if (
+ not function_node.is_function
+ or isinstance(function_node, astroid.AsyncFunctionDef)
+ or function_node.decorators
+ or function_node.is_generator()
+ or function_node.is_abstract(pass_is_abstract=False)
+ or not function_node.root().fully_defined()
+ ):
+ return
+
+ returns = list(
+ function_node.nodes_of_class(astroid.Return, skip_klass=astroid.FunctionDef)
+ )
+ if not returns:
+ self.add_message("assignment-from-no-return", node=node)
+ else:
+ for rnode in returns:
+ if not (
+ isinstance(rnode.value, astroid.Const)
+ and rnode.value.value is None
+ or rnode.value is None
+ ):
+ break
+ else:
+ self.add_message("assignment-from-none", node=node)
+
+ def _check_uninferable_call(self, node):
+ """
+ Check that the given uninferable Call node does not
+ call an actual function.
+ """
+ if not isinstance(node.func, astroid.Attribute):
+ return
+
+ # Look for properties. First, obtain
+ # the lhs of the Attribute node and search the attribute
+ # there. If that attribute is a property or a subclass of properties,
+ # then most likely it's not callable.
+
+ expr = node.func.expr
+ klass = safe_infer(expr)
+ if (
+ klass is None
+ or klass is astroid.Uninferable
+ or not isinstance(klass, astroid.Instance)
+ ):
+ return
+
+ try:
+ attrs = klass._proxied.getattr(node.func.attrname)
+ except exceptions.NotFoundError:
+ return
+
+ for attr in attrs:
+ if attr is astroid.Uninferable:
+ continue
+ if not isinstance(attr, astroid.FunctionDef):
+ continue
+
+ # Decorated, see if it is decorated with a property.
+ # Also, check the returns and see if they are callable.
+ if decorated_with_property(attr):
+
+ try:
+ all_returns_are_callable = all(
+ return_node.callable() or return_node is astroid.Uninferable
+ for return_node in attr.infer_call_result(node)
+ )
+ except astroid.InferenceError:
+ continue
+
+ if not all_returns_are_callable:
+ self.add_message(
+ "not-callable", node=node, args=node.func.as_string()
+ )
+ break
+
+ def _check_argument_order(self, node, call_site, called, called_param_names):
+ """Match the supplied argument names against the function parameters.
+ Warn if some argument names are not in the same order as they are in
+ the function signature.
+ """
+ # Check for called function being an object instance function
+ # If so, ignore the initial 'self' argument in the signature
+ try:
+ is_classdef = isinstance(called.parent, astroid.scoped_nodes.ClassDef)
+ if is_classdef and called_param_names[0] == "self":
+ called_param_names = called_param_names[1:]
+ except IndexError:
+ return
+
+ try:
+ # extract argument names, if they have names
+ calling_parg_names = [p.name for p in call_site.positional_arguments]
+
+ # Additionally get names of keyword arguments to use in a full match
+ # against parameters
+ calling_kwarg_names = [
+ arg.name for arg in call_site.keyword_arguments.values()
+ ]
+ except AttributeError:
+ # the type of arg does not provide a `.name`. In this case we
+ # stop checking for out-of-order arguments because it is only relevant
+ # for named variables.
+ return
+
+ # Don't check for ordering if there is an unmatched arg or param
+ arg_set = set(calling_parg_names) | set(calling_kwarg_names)
+ param_set = set(called_param_names)
+ if arg_set != param_set:
+ return
+
+ # Warn based on the equality of argument ordering
+ if calling_parg_names != called_param_names[: len(calling_parg_names)]:
+ self.add_message("arguments-out-of-order", node=node, args=())
+
+ # pylint: disable=too-many-branches,too-many-locals
+ @check_messages(*(list(MSGS.keys())))
+ def visit_call(self, node):
+ """check that called functions/methods are inferred to callable objects,
+ and that the arguments passed to the function match the parameters in
+ the inferred function's definition
+ """
+ called = safe_infer(node.func)
+ # only function, generator and object defining __call__ are allowed
+ # Ignore instances of descriptors since astroid cannot properly handle them
+ # yet
+ if called and not called.callable():
+ if isinstance(called, astroid.Instance) and (
+ not has_known_bases(called)
+ or (
+ called.parent is not None
+ and isinstance(called.scope(), astroid.ClassDef)
+ and "__get__" in called.locals
+ )
+ ):
+ # Don't emit if we can't make sure this object is callable.
+ pass
+ else:
+ self.add_message("not-callable", node=node, args=node.func.as_string())
+
+ self._check_uninferable_call(node)
+ try:
+ called, implicit_args, callable_name = _determine_callable(called)
+ except ValueError:
+ # Any error occurred during determining the function type, most of
+ # those errors are handled by different warnings.
+ return
+
+ if called.args.args is None:
+ # Built-in functions have no argument information.
+ return
+
+ if len(called.argnames()) != len(set(called.argnames())):
+ # Duplicate parameter name (see duplicate-argument). We can't really
+ # make sense of the function call in this case, so just return.
+ return
+
+ # Build the set of keyword arguments, checking for duplicate keywords,
+ # and count the positional arguments.
+ call_site = astroid.arguments.CallSite.from_call(node)
+
+ # Warn about duplicated keyword arguments, such as `f=24, **{'f': 24}`
+ for keyword in call_site.duplicated_keywords:
+ self.add_message("repeated-keyword", node=node, args=(keyword,))
+
+ if call_site.has_invalid_arguments() or call_site.has_invalid_keywords():
+ # Can't make sense of this.
+ return
+
+ # Has the function signature changed in ways we cannot reliably detect?
+ if hasattr(called, "decorators") and decorated_with(
+ called, self.config.signature_mutators
+ ):
+ return
+
+ num_positional_args = len(call_site.positional_arguments)
+ keyword_args = list(call_site.keyword_arguments.keys())
+ overload_function = is_overload_stub(called)
+
+ # Determine if we don't have a context for our call and we use variadics.
+ node_scope = node.scope()
+ if isinstance(node_scope, (astroid.Lambda, astroid.FunctionDef)):
+ has_no_context_positional_variadic = _no_context_variadic_positional(
+ node, node_scope
+ )
+ has_no_context_keywords_variadic = _no_context_variadic_keywords(
+ node, node_scope
+ )
+ else:
+ has_no_context_positional_variadic = (
+ has_no_context_keywords_variadic
+ ) = False
+
+ # These are coming from the functools.partial implementation in astroid
+ already_filled_positionals = getattr(called, "filled_positionals", 0)
+ already_filled_keywords = getattr(called, "filled_keywords", {})
+
+ keyword_args += list(already_filled_keywords)
+ num_positional_args += implicit_args + already_filled_positionals
+
+ # Analyze the list of formal parameters.
+ args = list(itertools.chain(called.args.posonlyargs or (), called.args.args))
+ num_mandatory_parameters = len(args) - len(called.args.defaults)
+ parameters = []
+ parameter_name_to_index = {}
+ for i, arg in enumerate(args):
+ if isinstance(arg, astroid.Tuple):
+ name = None
+ # Don't store any parameter names within the tuple, since those
+ # are not assignable from keyword arguments.
+ else:
+ assert isinstance(arg, astroid.AssignName)
+ # This occurs with:
+ # def f( (a), (b) ): pass
+ name = arg.name
+ parameter_name_to_index[name] = i
+ if i >= num_mandatory_parameters:
+ defval = called.args.defaults[i - num_mandatory_parameters]
+ else:
+ defval = None
+ parameters.append([(name, defval), False])
+
+ kwparams = {}
+ for i, arg in enumerate(called.args.kwonlyargs):
+ if isinstance(arg, astroid.Keyword):
+ name = arg.arg
+ else:
+ assert isinstance(arg, astroid.AssignName)
+ name = arg.name
+ kwparams[name] = [called.args.kw_defaults[i], False]
+
+ self._check_argument_order(
+ node, call_site, called, [p[0][0] for p in parameters]
+ )
+
+ # 1. Match the positional arguments.
+ for i in range(num_positional_args):
+ if i < len(parameters):
+ parameters[i][1] = True
+ elif called.args.vararg is not None:
+ # The remaining positional arguments get assigned to the *args
+ # parameter.
+ break
+ else:
+ if not overload_function:
+ # Too many positional arguments.
+ self.add_message(
+ "too-many-function-args", node=node, args=(callable_name,)
+ )
+ break
+
+ # 2. Match the keyword arguments.
+ for keyword in keyword_args:
+ if keyword in parameter_name_to_index:
+ i = parameter_name_to_index[keyword]
+ if parameters[i][1]:
+ # Duplicate definition of function parameter.
+
+ # Might be too hardcoded, but this can actually
+ # happen when using str.format and `self` is passed
+ # by keyword argument, as in `.format(self=self)`.
+ # It's perfectly valid to so, so we're just skipping
+ # it if that's the case.
+ if not (keyword == "self" and called.qname() in STR_FORMAT):
+ self.add_message(
+ "redundant-keyword-arg",
+ node=node,
+ args=(keyword, callable_name),
+ )
+ else:
+ parameters[i][1] = True
+ elif keyword in kwparams:
+ if kwparams[keyword][1]:
+ # Duplicate definition of function parameter.
+ self.add_message(
+ "redundant-keyword-arg",
+ node=node,
+ args=(keyword, callable_name),
+ )
+ else:
+ kwparams[keyword][1] = True
+ elif called.args.kwarg is not None:
+ # The keyword argument gets assigned to the **kwargs parameter.
+ pass
+ elif not overload_function:
+ # Unexpected keyword argument.
+ self.add_message(
+ "unexpected-keyword-arg", node=node, args=(keyword, callable_name)
+ )
+
+ # 3. Match the **kwargs, if any.
+ if node.kwargs:
+ for i, [(name, defval), assigned] in enumerate(parameters):
+ # Assume that *kwargs provides values for all remaining
+ # unassigned named parameters.
+ if name is not None:
+ parameters[i][1] = True
+ else:
+ # **kwargs can't assign to tuples.
+ pass
+
+ # Check that any parameters without a default have been assigned
+ # values.
+ for [(name, defval), assigned] in parameters:
+ if (defval is None) and not assigned:
+ if name is None:
+ display_name = "<tuple>"
+ else:
+ display_name = repr(name)
+ if not has_no_context_positional_variadic and not overload_function:
+ self.add_message(
+ "no-value-for-parameter",
+ node=node,
+ args=(display_name, callable_name),
+ )
+
+ for name in kwparams:
+ defval, assigned = kwparams[name]
+ if defval is None and not assigned and not has_no_context_keywords_variadic:
+ self.add_message("missing-kwoa", node=node, args=(name, callable_name))
+
+ @check_messages("invalid-sequence-index")
+ def visit_extslice(self, node):
+ # Check extended slice objects as if they were used as a sequence
+ # index to check if the object being sliced can support them
+ return self.visit_index(node)
+
+ @check_messages("invalid-sequence-index")
+ def visit_index(self, node):
+ if not node.parent or not hasattr(node.parent, "value"):
+ return None
+ # Look for index operations where the parent is a sequence type.
+ # If the types can be determined, only allow indices to be int,
+ # slice or instances with __index__.
+ parent_type = safe_infer(node.parent.value)
+ if not isinstance(
+ parent_type, (astroid.ClassDef, astroid.Instance)
+ ) or not has_known_bases(parent_type):
+ return None
+
+ # Determine what method on the parent this index will use
+ # The parent of this node will be a Subscript, and the parent of that
+ # node determines if the Subscript is a get, set, or delete operation.
+ if node.parent.ctx is astroid.Store:
+ methodname = "__setitem__"
+ elif node.parent.ctx is astroid.Del:
+ methodname = "__delitem__"
+ else:
+ methodname = "__getitem__"
+
+ # Check if this instance's __getitem__, __setitem__, or __delitem__, as
+ # appropriate to the statement, is implemented in a builtin sequence
+ # type. This way we catch subclasses of sequence types but skip classes
+ # that override __getitem__ and which may allow non-integer indices.
+ try:
+ methods = dunder_lookup.lookup(parent_type, methodname)
+ if methods is astroid.Uninferable:
+ return None
+ itemmethod = methods[0]
+ except (
+ exceptions.NotFoundError,
+ exceptions.AttributeInferenceError,
+ IndexError,
+ ):
+ return None
+
+ if (
+ not isinstance(itemmethod, astroid.FunctionDef)
+ or itemmethod.root().name != BUILTINS
+ or not itemmethod.parent
+ or itemmethod.parent.name not in SEQUENCE_TYPES
+ ):
+ return None
+
+ # For ExtSlice objects coming from visit_extslice, no further
+ # inference is necessary, since if we got this far the ExtSlice
+ # is an error.
+ if isinstance(node, astroid.ExtSlice):
+ index_type = node
+ else:
+ index_type = safe_infer(node)
+ if index_type is None or index_type is astroid.Uninferable:
+ return None
+ # Constants must be of type int
+ if isinstance(index_type, astroid.Const):
+ if isinstance(index_type.value, int):
+ return None
+ # Instance values must be int, slice, or have an __index__ method
+ elif isinstance(index_type, astroid.Instance):
+ if index_type.pytype() in (BUILTINS + ".int", BUILTINS + ".slice"):
+ return None
+ try:
+ index_type.getattr("__index__")
+ return None
+ except exceptions.NotFoundError:
+ pass
+ elif isinstance(index_type, astroid.Slice):
+ # Delegate to visit_slice. A slice can be present
+ # here after inferring the index node, which could
+ # be a `slice(...)` call for instance.
+ return self.visit_slice(index_type)
+
+ # Anything else is an error
+ self.add_message("invalid-sequence-index", node=node)
+ return None
+
+ @check_messages("invalid-slice-index")
+ def visit_slice(self, node):
+ # Check the type of each part of the slice
+ invalid_slices = 0
+ for index in (node.lower, node.upper, node.step):
+ if index is None:
+ continue
+
+ index_type = safe_infer(index)
+ if index_type is None or index_type is astroid.Uninferable:
+ continue
+
+ # Constants must of type int or None
+ if isinstance(index_type, astroid.Const):
+ if isinstance(index_type.value, (int, type(None))):
+ continue
+ # Instance values must be of type int, None or an object
+ # with __index__
+ elif isinstance(index_type, astroid.Instance):
+ if index_type.pytype() in (BUILTINS + ".int", BUILTINS + ".NoneType"):
+ continue
+
+ try:
+ index_type.getattr("__index__")
+ return
+ except exceptions.NotFoundError:
+ pass
+ invalid_slices += 1
+
+ if not invalid_slices:
+ return
+
+ # Anything else is an error, unless the object that is indexed
+ # is a custom object, which knows how to handle this kind of slices
+ parent = node.parent
+ if isinstance(parent, astroid.ExtSlice):
+ parent = parent.parent
+ if isinstance(parent, astroid.Subscript):
+ inferred = safe_infer(parent.value)
+ if inferred is None or inferred is astroid.Uninferable:
+ # Don't know what this is
+ return
+ known_objects = (
+ astroid.List,
+ astroid.Dict,
+ astroid.Tuple,
+ astroid.objects.FrozenSet,
+ astroid.Set,
+ )
+ if not isinstance(inferred, known_objects):
+ # Might be an instance that knows how to handle this slice object
+ return
+ for _ in range(invalid_slices):
+ self.add_message("invalid-slice-index", node=node)
+
+ @check_messages("not-context-manager")
+ def visit_with(self, node):
+ for ctx_mgr, _ in node.items:
+ context = astroid.context.InferenceContext()
+ inferred = safe_infer(ctx_mgr, context=context)
+ if inferred is None or inferred is astroid.Uninferable:
+ continue
+
+ if isinstance(inferred, bases.Generator):
+ # Check if we are dealing with a function decorated
+ # with contextlib.contextmanager.
+ if decorated_with(
+ inferred.parent, self.config.contextmanager_decorators
+ ):
+ continue
+ # If the parent of the generator is not the context manager itself,
+ # that means that it could have been returned from another
+ # function which was the real context manager.
+ # The following approach is more of a hack rather than a real
+ # solution: walk all the inferred statements for the
+ # given *ctx_mgr* and if you find one function scope
+ # which is decorated, consider it to be the real
+ # manager and give up, otherwise emit not-context-manager.
+ # See the test file for not_context_manager for a couple
+ # of self explaining tests.
+
+ # Retrieve node from all previusly visited nodes in the the inference history
+ context_path_names = filter(None, _unflatten(context.path))
+ inferred_paths = _flatten_container(
+ safe_infer(path) for path in context_path_names
+ )
+ for inferred_path in inferred_paths:
+ if not inferred_path:
+ continue
+ scope = inferred_path.scope()
+ if not isinstance(scope, astroid.FunctionDef):
+ continue
+ if decorated_with(scope, self.config.contextmanager_decorators):
+ break
+ else:
+ self.add_message(
+ "not-context-manager", node=node, args=(inferred.name,)
+ )
+ else:
+ try:
+ inferred.getattr("__enter__")
+ inferred.getattr("__exit__")
+ except exceptions.NotFoundError:
+ if isinstance(inferred, astroid.Instance):
+ # If we do not know the bases of this class,
+ # just skip it.
+ if not has_known_bases(inferred):
+ continue
+ # Just ignore mixin classes.
+ if self.config.ignore_mixin_members:
+ if inferred.name[-5:].lower() == "mixin":
+ continue
+
+ self.add_message(
+ "not-context-manager", node=node, args=(inferred.name,)
+ )
+
+ @check_messages("invalid-unary-operand-type")
+ def visit_unaryop(self, node):
+ """Detect TypeErrors for unary operands."""
+
+ for error in node.type_errors():
+ # Let the error customize its output.
+ self.add_message("invalid-unary-operand-type", args=str(error), node=node)
+
+ @check_messages("unsupported-binary-operation")
+ def _visit_binop(self, node):
+ """Detect TypeErrors for binary arithmetic operands."""
+ self._check_binop_errors(node)
+
+ @check_messages("unsupported-binary-operation")
+ def _visit_augassign(self, node):
+ """Detect TypeErrors for augmented binary arithmetic operands."""
+ self._check_binop_errors(node)
+
+ def _check_binop_errors(self, node):
+ for error in node.type_errors():
+ # Let the error customize its output.
+ if any(
+ isinstance(obj, astroid.ClassDef) and not has_known_bases(obj)
+ for obj in (error.left_type, error.right_type)
+ ):
+ continue
+ self.add_message("unsupported-binary-operation", args=str(error), node=node)
+
+ def _check_membership_test(self, node):
+ if is_inside_abstract_class(node):
+ return
+ if is_comprehension(node):
+ return
+ inferred = safe_infer(node)
+ if inferred is None or inferred is astroid.Uninferable:
+ return
+ if not supports_membership_test(inferred):
+ self.add_message(
+ "unsupported-membership-test", args=node.as_string(), node=node
+ )
+
+ @check_messages("unsupported-membership-test")
+ def visit_compare(self, node):
+ if len(node.ops) != 1:
+ return
+
+ op, right = node.ops[0]
+ if op in ["in", "not in"]:
+ self._check_membership_test(right)
+
+ @check_messages(
+ "unsubscriptable-object",
+ "unsupported-assignment-operation",
+ "unsupported-delete-operation",
+ "unhashable-dict-key",
+ )
+ def visit_subscript(self, node):
+ supported_protocol = None
+ if isinstance(node.value, (astroid.ListComp, astroid.DictComp)):
+ return
+
+ if isinstance(node.value, astroid.Dict):
+ # Assert dict key is hashable
+ inferred = safe_infer(node.slice.value)
+ if inferred not in (None, astroid.Uninferable):
+ try:
+ hash_fn = next(inferred.igetattr("__hash__"))
+ except astroid.InferenceError:
+ pass
+ else:
+ if getattr(hash_fn, "value", True) is None:
+ self.add_message("unhashable-dict-key", node=node.value)
+
+ if node.ctx == astroid.Load:
+ supported_protocol = supports_getitem
+ msg = "unsubscriptable-object"
+ elif node.ctx == astroid.Store:
+ supported_protocol = supports_setitem
+ msg = "unsupported-assignment-operation"
+ elif node.ctx == astroid.Del:
+ supported_protocol = supports_delitem
+ msg = "unsupported-delete-operation"
+
+ if isinstance(node.value, astroid.SetComp):
+ self.add_message(msg, args=node.value.as_string(), node=node.value)
+ return
+
+ if is_inside_abstract_class(node):
+ return
+
+ inferred = safe_infer(node.value)
+ if inferred is None or inferred is astroid.Uninferable:
+ return
+
+ if not supported_protocol(inferred):
+ self.add_message(msg, args=node.value.as_string(), node=node.value)
+
+ @check_messages("dict-items-missing-iter")
+ def visit_for(self, node):
+ if not isinstance(node.target, astroid.node_classes.Tuple):
+ # target is not a tuple
+ return
+ if not len(node.target.elts) == 2:
+ # target is not a tuple of two elements
+ return
+
+ iterable = node.iter
+ if not isinstance(iterable, astroid.node_classes.Name):
+ # it's not a bare variable
+ return
+
+ inferred = safe_infer(iterable)
+ if not inferred:
+ return
+ if not isinstance(inferred, astroid.node_classes.Dict):
+ # the iterable is not a dict
+ return
+
+ self.add_message("dict-iter-missing-items", node=node)
+
+
+class IterableChecker(BaseChecker):
+ """
+ Checks for non-iterables used in an iterable context.
+ Contexts include:
+ - for-statement
+ - starargs in function call
+ - `yield from`-statement
+ - list, dict and set comprehensions
+ - generator expressions
+ Also checks for non-mappings in function call kwargs.
+ """
+
+ __implements__ = (IAstroidChecker,)
+ name = "typecheck"
+
+ msgs = {
+ "E1133": (
+ "Non-iterable value %s is used in an iterating context",
+ "not-an-iterable",
+ "Used when a non-iterable value is used in place where "
+ "iterable is expected",
+ ),
+ "E1134": (
+ "Non-mapping value %s is used in a mapping context",
+ "not-a-mapping",
+ "Used when a non-mapping value is used in place where "
+ "mapping is expected",
+ ),
+ }
+
+ @staticmethod
+ def _is_asyncio_coroutine(node):
+ if not isinstance(node, astroid.Call):
+ return False
+
+ inferred_func = safe_infer(node.func)
+ if not isinstance(inferred_func, astroid.FunctionDef):
+ return False
+ if not inferred_func.decorators:
+ return False
+ for decorator in inferred_func.decorators.nodes:
+ inferred_decorator = safe_infer(decorator)
+ if not isinstance(inferred_decorator, astroid.FunctionDef):
+ continue
+ if inferred_decorator.qname() != ASYNCIO_COROUTINE:
+ continue
+ return True
+ return False
+
+ def _check_iterable(self, node, check_async=False):
+ if is_inside_abstract_class(node) or is_comprehension(node):
+ return
+ inferred = safe_infer(node)
+ if not inferred:
+ return
+ if not is_iterable(inferred, check_async=check_async):
+ self.add_message("not-an-iterable", args=node.as_string(), node=node)
+
+ def _check_mapping(self, node):
+ if is_inside_abstract_class(node):
+ return
+ if isinstance(node, astroid.DictComp):
+ return
+ inferred = safe_infer(node)
+ if inferred is None or inferred is astroid.Uninferable:
+ return
+ if not is_mapping(inferred):
+ self.add_message("not-a-mapping", args=node.as_string(), node=node)
+
+ @check_messages("not-an-iterable")
+ def visit_for(self, node):
+ self._check_iterable(node.iter)
+
+ @check_messages("not-an-iterable")
+ def visit_asyncfor(self, node):
+ self._check_iterable(node.iter, check_async=True)
+
+ @check_messages("not-an-iterable")
+ def visit_yieldfrom(self, node):
+ if self._is_asyncio_coroutine(node.value):
+ return
+ self._check_iterable(node.value)
+
+ @check_messages("not-an-iterable", "not-a-mapping")
+ def visit_call(self, node):
+ for stararg in node.starargs:
+ self._check_iterable(stararg.value)
+ for kwarg in node.kwargs:
+ self._check_mapping(kwarg.value)
+
+ @check_messages("not-an-iterable")
+ def visit_listcomp(self, node):
+ for gen in node.generators:
+ self._check_iterable(gen.iter, check_async=gen.is_async)
+
+ @check_messages("not-an-iterable")
+ def visit_dictcomp(self, node):
+ for gen in node.generators:
+ self._check_iterable(gen.iter, check_async=gen.is_async)
+
+ @check_messages("not-an-iterable")
+ def visit_setcomp(self, node):
+ for gen in node.generators:
+ self._check_iterable(gen.iter, check_async=gen.is_async)
+
+ @check_messages("not-an-iterable")
+ def visit_generatorexp(self, node):
+ for gen in node.generators:
+ self._check_iterable(gen.iter, check_async=gen.is_async)
+
+
+def register(linter):
+ """required method to auto register this checker """
+ linter.register_checker(TypeChecker(linter))
+ linter.register_checker(IterableChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/checkers/utils.py b/venv/Lib/site-packages/pylint/checkers/utils.py
new file mode 100644
index 0000000..2a6820a
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/utils.py
@@ -0,0 +1,1253 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2007, 2009-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009 Mads Kiilerich <mads@kiilerich.com>
+# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Ricardo Gemignani <ricardo.gemignani@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Radu Ciorba <radu@devrandom.ro>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016, 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016-2017 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Brian C. Lane <bcl@redhat.com>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 ttenhoeve-aa <ttenhoeve@appannie.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.guinta@protonmail.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Brian Shaginaw <brian.shaginaw@warbyparker.com>
+# Copyright (c) 2018 Caio Carrara <ccarrara@redhat.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""some functions that may be useful for various checkers
+"""
+import builtins
+import itertools
+import numbers
+import re
+import string
+from functools import lru_cache, partial
+from typing import Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
+
+import astroid
+from astroid import bases as _bases
+from astroid import helpers, scoped_nodes
+from astroid.exceptions import _NonDeducibleTypeHierarchy
+
+import _string # pylint: disable=wrong-import-position, wrong-import-order
+
+BUILTINS_NAME = builtins.__name__
+COMP_NODE_TYPES = (
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+)
+EXCEPTIONS_MODULE = "builtins"
+ABC_METHODS = {
+ "abc.abstractproperty",
+ "abc.abstractmethod",
+ "abc.abstractclassmethod",
+ "abc.abstractstaticmethod",
+}
+TYPING_PROTOCOLS = frozenset({"typing.Protocol", "typing_extensions.Protocol"})
+ITER_METHOD = "__iter__"
+AITER_METHOD = "__aiter__"
+NEXT_METHOD = "__next__"
+GETITEM_METHOD = "__getitem__"
+CLASS_GETITEM_METHOD = "__class_getitem__"
+SETITEM_METHOD = "__setitem__"
+DELITEM_METHOD = "__delitem__"
+CONTAINS_METHOD = "__contains__"
+KEYS_METHOD = "keys"
+
+# Dictionary which maps the number of expected parameters a
+# special method can have to a set of special methods.
+# The following keys are used to denote the parameters restrictions:
+#
+# * None: variable number of parameters
+# * number: exactly that number of parameters
+# * tuple: this are the odd ones. Basically it means that the function
+# can work with any number of arguments from that tuple,
+# although it's best to implement it in order to accept
+# all of them.
+_SPECIAL_METHODS_PARAMS = {
+ None: ("__new__", "__init__", "__call__"),
+ 0: (
+ "__del__",
+ "__repr__",
+ "__str__",
+ "__bytes__",
+ "__hash__",
+ "__bool__",
+ "__dir__",
+ "__len__",
+ "__length_hint__",
+ "__iter__",
+ "__reversed__",
+ "__neg__",
+ "__pos__",
+ "__abs__",
+ "__invert__",
+ "__complex__",
+ "__int__",
+ "__float__",
+ "__neg__",
+ "__pos__",
+ "__abs__",
+ "__complex__",
+ "__int__",
+ "__float__",
+ "__index__",
+ "__enter__",
+ "__aenter__",
+ "__getnewargs_ex__",
+ "__getnewargs__",
+ "__getstate__",
+ "__reduce__",
+ "__copy__",
+ "__unicode__",
+ "__nonzero__",
+ "__await__",
+ "__aiter__",
+ "__anext__",
+ "__fspath__",
+ ),
+ 1: (
+ "__format__",
+ "__lt__",
+ "__le__",
+ "__eq__",
+ "__ne__",
+ "__gt__",
+ "__ge__",
+ "__getattr__",
+ "__getattribute__",
+ "__delattr__",
+ "__delete__",
+ "__instancecheck__",
+ "__subclasscheck__",
+ "__getitem__",
+ "__missing__",
+ "__delitem__",
+ "__contains__",
+ "__add__",
+ "__sub__",
+ "__mul__",
+ "__truediv__",
+ "__floordiv__",
+ "__rfloordiv__",
+ "__mod__",
+ "__divmod__",
+ "__lshift__",
+ "__rshift__",
+ "__and__",
+ "__xor__",
+ "__or__",
+ "__radd__",
+ "__rsub__",
+ "__rmul__",
+ "__rtruediv__",
+ "__rmod__",
+ "__rdivmod__",
+ "__rpow__",
+ "__rlshift__",
+ "__rrshift__",
+ "__rand__",
+ "__rxor__",
+ "__ror__",
+ "__iadd__",
+ "__isub__",
+ "__imul__",
+ "__itruediv__",
+ "__ifloordiv__",
+ "__imod__",
+ "__ilshift__",
+ "__irshift__",
+ "__iand__",
+ "__ixor__",
+ "__ior__",
+ "__ipow__",
+ "__setstate__",
+ "__reduce_ex__",
+ "__deepcopy__",
+ "__cmp__",
+ "__matmul__",
+ "__rmatmul__",
+ "__div__",
+ ),
+ 2: ("__setattr__", "__get__", "__set__", "__setitem__", "__set_name__"),
+ 3: ("__exit__", "__aexit__"),
+ (0, 1): ("__round__",),
+}
+
+SPECIAL_METHODS_PARAMS = {
+ name: params
+ for params, methods in _SPECIAL_METHODS_PARAMS.items()
+ for name in methods # type: ignore
+}
+PYMETHODS = set(SPECIAL_METHODS_PARAMS)
+
+
+class NoSuchArgumentError(Exception):
+ pass
+
+
+def is_inside_except(node):
+ """Returns true if node is inside the name of an except handler."""
+ current = node
+ while current and not isinstance(current.parent, astroid.ExceptHandler):
+ current = current.parent
+
+ return current and current is current.parent.name
+
+
+def is_inside_lambda(node: astroid.node_classes.NodeNG) -> bool:
+ """Return true if given node is inside lambda"""
+ parent = node.parent
+ while parent is not None:
+ if isinstance(parent, astroid.Lambda):
+ return True
+ parent = parent.parent
+ return False
+
+
+def get_all_elements(
+ node: astroid.node_classes.NodeNG
+) -> Iterable[astroid.node_classes.NodeNG]:
+ """Recursively returns all atoms in nested lists and tuples."""
+ if isinstance(node, (astroid.Tuple, astroid.List)):
+ for child in node.elts:
+ yield from get_all_elements(child)
+ else:
+ yield node
+
+
+def clobber_in_except(
+ node: astroid.node_classes.NodeNG
+) -> Tuple[bool, Optional[Tuple[str, str]]]:
+ """Checks if an assignment node in an except handler clobbers an existing
+ variable.
+
+ Returns (True, args for W0623) if assignment clobbers an existing variable,
+ (False, None) otherwise.
+ """
+ if isinstance(node, astroid.AssignAttr):
+ return True, (node.attrname, "object %r" % (node.expr.as_string(),))
+ if isinstance(node, astroid.AssignName):
+ name = node.name
+ if is_builtin(name):
+ return True, (name, "builtins")
+
+ stmts = node.lookup(name)[1]
+ if stmts and not isinstance(
+ stmts[0].assign_type(),
+ (astroid.Assign, astroid.AugAssign, astroid.ExceptHandler),
+ ):
+ return True, (name, "outer scope (line %s)" % stmts[0].fromlineno)
+ return False, None
+
+
+def is_super(node: astroid.node_classes.NodeNG) -> bool:
+ """return True if the node is referencing the "super" builtin function
+ """
+ if getattr(node, "name", None) == "super" and node.root().name == BUILTINS_NAME:
+ return True
+ return False
+
+
+def is_error(node: astroid.node_classes.NodeNG) -> bool:
+ """return true if the function does nothing but raising an exception"""
+ raises = False
+ returns = False
+ for child_node in node.nodes_of_class((astroid.Raise, astroid.Return)):
+ if isinstance(child_node, astroid.Raise):
+ raises = True
+ if isinstance(child_node, astroid.Return):
+ returns = True
+ return raises and not returns
+
+
+builtins = builtins.__dict__.copy() # type: ignore
+SPECIAL_BUILTINS = ("__builtins__",) # '__path__', '__file__')
+
+
+def is_builtin_object(node: astroid.node_classes.NodeNG) -> bool:
+ """Returns True if the given node is an object from the __builtin__ module."""
+ return node and node.root().name == BUILTINS_NAME
+
+
+def is_builtin(name: str) -> bool:
+ """return true if <name> could be considered as a builtin defined by python
+ """
+ return name in builtins or name in SPECIAL_BUILTINS # type: ignore
+
+
+def is_defined_in_scope(
+ var_node: astroid.node_classes.NodeNG,
+ varname: str,
+ scope: astroid.node_classes.NodeNG,
+) -> bool:
+ if isinstance(scope, astroid.If):
+ for node in scope.body:
+ if (
+ isinstance(node, astroid.Assign)
+ and any(
+ isinstance(target, astroid.AssignName) and target.name == varname
+ for target in node.targets
+ )
+ ) or (isinstance(node, astroid.Nonlocal) and varname in node.names):
+ return True
+ elif isinstance(scope, (COMP_NODE_TYPES, astroid.For)):
+ for ass_node in scope.nodes_of_class(astroid.AssignName):
+ if ass_node.name == varname:
+ return True
+ elif isinstance(scope, astroid.With):
+ for expr, ids in scope.items:
+ if expr.parent_of(var_node):
+ break
+ if ids and isinstance(ids, astroid.AssignName) and ids.name == varname:
+ return True
+ elif isinstance(scope, (astroid.Lambda, astroid.FunctionDef)):
+ if scope.args.is_argument(varname):
+ # If the name is found inside a default value
+ # of a function, then let the search continue
+ # in the parent's tree.
+ if scope.args.parent_of(var_node):
+ try:
+ scope.args.default_value(varname)
+ scope = scope.parent
+ is_defined_in_scope(var_node, varname, scope)
+ except astroid.NoDefault:
+ pass
+ return True
+ if getattr(scope, "name", None) == varname:
+ return True
+ elif isinstance(scope, astroid.ExceptHandler):
+ if isinstance(scope.name, astroid.AssignName):
+ ass_node = scope.name
+ if ass_node.name == varname:
+ return True
+ return False
+
+
+def is_defined_before(var_node: astroid.node_classes.NodeNG) -> bool:
+ """return True if the variable node is defined by a parent node (list,
+ set, dict, or generator comprehension, lambda) or in a previous sibling
+ node on the same line (statement_defining ; statement_using)
+ """
+ varname = var_node.name
+ _node = var_node.parent
+ while _node:
+ if is_defined_in_scope(var_node, varname, _node):
+ return True
+ _node = _node.parent
+ # possibly multiple statements on the same line using semi colon separator
+ stmt = var_node.statement()
+ _node = stmt.previous_sibling()
+ lineno = stmt.fromlineno
+ while _node and _node.fromlineno == lineno:
+ for assign_node in _node.nodes_of_class(astroid.AssignName):
+ if assign_node.name == varname:
+ return True
+ for imp_node in _node.nodes_of_class((astroid.ImportFrom, astroid.Import)):
+ if varname in [name[1] or name[0] for name in imp_node.names]:
+ return True
+ _node = _node.previous_sibling()
+ return False
+
+
+def is_default_argument(node: astroid.node_classes.NodeNG) -> bool:
+ """return true if the given Name node is used in function or lambda
+ default argument's value
+ """
+ parent = node.scope()
+ if isinstance(parent, (astroid.FunctionDef, astroid.Lambda)):
+ for default_node in parent.args.defaults:
+ for default_name_node in default_node.nodes_of_class(astroid.Name):
+ if default_name_node is node:
+ return True
+ return False
+
+
+def is_func_decorator(node: astroid.node_classes.NodeNG) -> bool:
+ """return true if the name is used in function decorator"""
+ parent = node.parent
+ while parent is not None:
+ if isinstance(parent, astroid.Decorators):
+ return True
+ if parent.is_statement or isinstance(
+ parent,
+ (astroid.Lambda, scoped_nodes.ComprehensionScope, scoped_nodes.ListComp),
+ ):
+ break
+ parent = parent.parent
+ return False
+
+
+def is_ancestor_name(
+ frame: astroid.node_classes.NodeNG, node: astroid.node_classes.NodeNG
+) -> bool:
+ """return True if `frame` is an astroid.Class node with `node` in the
+ subtree of its bases attribute
+ """
+ try:
+ bases = frame.bases
+ except AttributeError:
+ return False
+ for base in bases:
+ if node in base.nodes_of_class(astroid.Name):
+ return True
+ return False
+
+
+def assign_parent(node: astroid.node_classes.NodeNG) -> astroid.node_classes.NodeNG:
+ """return the higher parent which is not an AssignName, Tuple or List node
+ """
+ while node and isinstance(node, (astroid.AssignName, astroid.Tuple, astroid.List)):
+ node = node.parent
+ return node
+
+
+def overrides_a_method(class_node: astroid.node_classes.NodeNG, name: str) -> bool:
+ """return True if <name> is a method overridden from an ancestor"""
+ for ancestor in class_node.ancestors():
+ if name in ancestor and isinstance(ancestor[name], astroid.FunctionDef):
+ return True
+ return False
+
+
+def check_messages(*messages: str) -> Callable:
+ """decorator to store messages that are handled by a checker method"""
+
+ def store_messages(func):
+ func.checks_msgs = messages
+ return func
+
+ return store_messages
+
+
+class IncompleteFormatString(Exception):
+ """A format string ended in the middle of a format specifier."""
+
+
+class UnsupportedFormatCharacter(Exception):
+ """A format character in a format string is not one of the supported
+ format characters."""
+
+ def __init__(self, index):
+ Exception.__init__(self, index)
+ self.index = index
+
+
+def parse_format_string(
+ format_string: str
+) -> Tuple[Set[str], int, Dict[str, str], List[str]]:
+ """Parses a format string, returning a tuple of (keys, num_args), where keys
+ is the set of mapping keys in the format string, and num_args is the number
+ of arguments required by the format string. Raises
+ IncompleteFormatString or UnsupportedFormatCharacter if a
+ parse error occurs."""
+ keys = set()
+ key_types = dict()
+ pos_types = []
+ num_args = 0
+
+ def next_char(i):
+ i += 1
+ if i == len(format_string):
+ raise IncompleteFormatString
+ return (i, format_string[i])
+
+ i = 0
+ while i < len(format_string):
+ char = format_string[i]
+ if char == "%":
+ i, char = next_char(i)
+ # Parse the mapping key (optional).
+ key = None
+ if char == "(":
+ depth = 1
+ i, char = next_char(i)
+ key_start = i
+ while depth != 0:
+ if char == "(":
+ depth += 1
+ elif char == ")":
+ depth -= 1
+ i, char = next_char(i)
+ key_end = i - 1
+ key = format_string[key_start:key_end]
+
+ # Parse the conversion flags (optional).
+ while char in "#0- +":
+ i, char = next_char(i)
+ # Parse the minimum field width (optional).
+ if char == "*":
+ num_args += 1
+ i, char = next_char(i)
+ else:
+ while char in string.digits:
+ i, char = next_char(i)
+ # Parse the precision (optional).
+ if char == ".":
+ i, char = next_char(i)
+ if char == "*":
+ num_args += 1
+ i, char = next_char(i)
+ else:
+ while char in string.digits:
+ i, char = next_char(i)
+ # Parse the length modifier (optional).
+ if char in "hlL":
+ i, char = next_char(i)
+ # Parse the conversion type (mandatory).
+ flags = "diouxXeEfFgGcrs%a"
+ if char not in flags:
+ raise UnsupportedFormatCharacter(i)
+ if key:
+ keys.add(key)
+ key_types[key] = char
+ elif char != "%":
+ num_args += 1
+ pos_types.append(char)
+ i += 1
+ return keys, num_args, key_types, pos_types
+
+
+def split_format_field_names(format_string) -> Tuple[str, Iterable[Tuple[bool, str]]]:
+ try:
+ return _string.formatter_field_name_split(format_string)
+ except ValueError:
+ raise IncompleteFormatString()
+
+
+def collect_string_fields(format_string) -> Iterable[Optional[str]]:
+ """ Given a format string, return an iterator
+ of all the valid format fields. It handles nested fields
+ as well.
+ """
+ formatter = string.Formatter()
+ try:
+ parseiterator = formatter.parse(format_string)
+ for result in parseiterator:
+ if all(item is None for item in result[1:]):
+ # not a replacement format
+ continue
+ name = result[1]
+ nested = result[2]
+ yield name
+ if nested:
+ for field in collect_string_fields(nested):
+ yield field
+ except ValueError as exc:
+ # Probably the format string is invalid.
+ if exc.args[0].startswith("cannot switch from manual"):
+ # On Jython, parsing a string with both manual
+ # and automatic positions will fail with a ValueError,
+ # while on CPython it will simply return the fields,
+ # the validation being done in the interpreter (?).
+ # We're just returning two mixed fields in order
+ # to trigger the format-combined-specification check.
+ yield ""
+ yield "1"
+ return
+ raise IncompleteFormatString(format_string)
+
+
+def parse_format_method_string(
+ format_string: str
+) -> Tuple[List[Tuple[str, List[Tuple[bool, str]]]], int, int]:
+ """
+ Parses a PEP 3101 format string, returning a tuple of
+ (keyword_arguments, implicit_pos_args_cnt, explicit_pos_args),
+ where keyword_arguments is the set of mapping keys in the format string, implicit_pos_args_cnt
+ is the number of arguments required by the format string and
+ explicit_pos_args is the number of arguments passed with the position.
+ """
+ keyword_arguments = []
+ implicit_pos_args_cnt = 0
+ explicit_pos_args = set()
+ for name in collect_string_fields(format_string):
+ if name and str(name).isdigit():
+ explicit_pos_args.add(str(name))
+ elif name:
+ keyname, fielditerator = split_format_field_names(name)
+ if isinstance(keyname, numbers.Number):
+ # In Python 2 it will return long which will lead
+ # to different output between 2 and 3
+ explicit_pos_args.add(str(keyname))
+ keyname = int(keyname)
+ try:
+ keyword_arguments.append((keyname, list(fielditerator)))
+ except ValueError:
+ raise IncompleteFormatString()
+ else:
+ implicit_pos_args_cnt += 1
+ return keyword_arguments, implicit_pos_args_cnt, len(explicit_pos_args)
+
+
+def is_attr_protected(attrname: str) -> bool:
+ """return True if attribute name is protected (start with _ and some other
+ details), False otherwise.
+ """
+ return (
+ attrname[0] == "_"
+ and attrname != "_"
+ and not (attrname.startswith("__") and attrname.endswith("__"))
+ )
+
+
+def node_frame_class(node: astroid.node_classes.NodeNG) -> Optional[astroid.ClassDef]:
+ """Return the class that is wrapping the given node
+
+ The function returns a class for a method node (or a staticmethod or a
+ classmethod), otherwise it returns `None`.
+ """
+ klass = node.frame()
+
+ while klass is not None and not isinstance(klass, astroid.ClassDef):
+ if klass.parent is None:
+ klass = None
+ else:
+ klass = klass.parent.frame()
+
+ return klass
+
+
+def is_attr_private(attrname: str) -> Optional[Match[str]]:
+ """Check that attribute name is private (at least two leading underscores,
+ at most one trailing underscore)
+ """
+ regex = re.compile("^_{2,}.*[^_]+_?$")
+ return regex.match(attrname)
+
+
+def get_argument_from_call(
+ call_node: astroid.Call, position: int = None, keyword: str = None
+) -> astroid.Name:
+ """Returns the specified argument from a function call.
+
+ :param astroid.Call call_node: Node representing a function call to check.
+ :param int position: position of the argument.
+ :param str keyword: the keyword of the argument.
+
+ :returns: The node representing the argument, None if the argument is not found.
+ :rtype: astroid.Name
+ :raises ValueError: if both position and keyword are None.
+ :raises NoSuchArgumentError: if no argument at the provided position or with
+ the provided keyword.
+ """
+ if position is None and keyword is None:
+ raise ValueError("Must specify at least one of: position or keyword.")
+ if position is not None:
+ try:
+ return call_node.args[position]
+ except IndexError:
+ pass
+ if keyword and call_node.keywords:
+ for arg in call_node.keywords:
+ if arg.arg == keyword:
+ return arg.value
+
+ raise NoSuchArgumentError
+
+
+def inherit_from_std_ex(node: astroid.node_classes.NodeNG) -> bool:
+ """
+ Return true if the given class node is subclass of
+ exceptions.Exception.
+ """
+ ancestors = node.ancestors() if hasattr(node, "ancestors") else []
+ for ancestor in itertools.chain([node], ancestors):
+ if (
+ ancestor.name in ("Exception", "BaseException")
+ and ancestor.root().name == EXCEPTIONS_MODULE
+ ):
+ return True
+ return False
+
+
+def error_of_type(handler: astroid.ExceptHandler, error_type) -> bool:
+ """
+ Check if the given exception handler catches
+ the given error_type.
+
+ The *handler* parameter is a node, representing an ExceptHandler node.
+ The *error_type* can be an exception, such as AttributeError,
+ the name of an exception, or it can be a tuple of errors.
+ The function will return True if the handler catches any of the
+ given errors.
+ """
+
+ def stringify_error(error):
+ if not isinstance(error, str):
+ return error.__name__
+ return error
+
+ if not isinstance(error_type, tuple):
+ error_type = (error_type,) # type: ignore
+ expected_errors = {stringify_error(error) for error in error_type} # type: ignore
+ if not handler.type:
+ return True
+ return handler.catch(expected_errors)
+
+
+def decorated_with_property(node: astroid.FunctionDef) -> bool:
+ """Detect if the given function node is decorated with a property. """
+ if not node.decorators:
+ return False
+ for decorator in node.decorators.nodes:
+ try:
+ if _is_property_decorator(decorator):
+ return True
+ except astroid.InferenceError:
+ pass
+ return False
+
+
+def _is_property_kind(node, *kinds):
+ if not isinstance(node, (astroid.UnboundMethod, astroid.FunctionDef)):
+ return False
+ if node.decorators:
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, astroid.Attribute) and decorator.attrname in kinds:
+ return True
+ return False
+
+
+def is_property_setter(node: astroid.FunctionDef) -> bool:
+ """Check if the given node is a property setter"""
+ return _is_property_kind(node, "setter")
+
+
+def is_property_setter_or_deleter(node: astroid.FunctionDef) -> bool:
+ """Check if the given node is either a property setter or a deleter"""
+ return _is_property_kind(node, "setter", "deleter")
+
+
+def _is_property_decorator(decorator: astroid.Name) -> bool:
+ for inferred in decorator.infer():
+ if isinstance(inferred, astroid.ClassDef):
+ if inferred.root().name == BUILTINS_NAME and inferred.name == "property":
+ return True
+ for ancestor in inferred.ancestors():
+ if (
+ ancestor.name == "property"
+ and ancestor.root().name == BUILTINS_NAME
+ ):
+ return True
+ return False
+
+
+def decorated_with(
+ func: Union[astroid.FunctionDef, astroid.BoundMethod, astroid.UnboundMethod],
+ qnames: Iterable[str],
+) -> bool:
+ """Determine if the `func` node has a decorator with the qualified name `qname`."""
+ decorators = func.decorators.nodes if func.decorators else []
+ for decorator_node in decorators:
+ if isinstance(decorator_node, astroid.Call):
+ # We only want to infer the function name
+ decorator_node = decorator_node.func
+ try:
+ if any(
+ i is not None and i.qname() in qnames or i.name in qnames
+ for i in decorator_node.infer()
+ ):
+ return True
+ except astroid.InferenceError:
+ continue
+ return False
+
+
+@lru_cache(maxsize=1024)
+def unimplemented_abstract_methods(
+ node: astroid.node_classes.NodeNG, is_abstract_cb: astroid.FunctionDef = None
+) -> Dict[str, astroid.node_classes.NodeNG]:
+ """
+ Get the unimplemented abstract methods for the given *node*.
+
+ A method can be considered abstract if the callback *is_abstract_cb*
+ returns a ``True`` value. The check defaults to verifying that
+ a method is decorated with abstract methods.
+ The function will work only for new-style classes. For old-style
+ classes, it will simply return an empty dictionary.
+ For the rest of them, it will return a dictionary of abstract method
+ names and their inferred objects.
+ """
+ if is_abstract_cb is None:
+ is_abstract_cb = partial(decorated_with, qnames=ABC_METHODS)
+ visited = {} # type: Dict[str, astroid.node_classes.NodeNG]
+ try:
+ mro = reversed(node.mro())
+ except NotImplementedError:
+ # Old style class, it will not have a mro.
+ return {}
+ except astroid.ResolveError:
+ # Probably inconsistent hierarchy, don'try
+ # to figure this out here.
+ return {}
+ for ancestor in mro:
+ for obj in ancestor.values():
+ inferred = obj
+ if isinstance(obj, astroid.AssignName):
+ inferred = safe_infer(obj)
+ if not inferred:
+ # Might be an abstract function,
+ # but since we don't have enough information
+ # in order to take this decision, we're taking
+ # the *safe* decision instead.
+ if obj.name in visited:
+ del visited[obj.name]
+ continue
+ if not isinstance(inferred, astroid.FunctionDef):
+ if obj.name in visited:
+ del visited[obj.name]
+ if isinstance(inferred, astroid.FunctionDef):
+ # It's critical to use the original name,
+ # since after inferring, an object can be something
+ # else than expected, as in the case of the
+ # following assignment.
+ #
+ # class A:
+ # def keys(self): pass
+ # __iter__ = keys
+ abstract = is_abstract_cb(inferred)
+ if abstract:
+ visited[obj.name] = inferred
+ elif not abstract and obj.name in visited:
+ del visited[obj.name]
+ return visited
+
+
+def find_try_except_wrapper_node(
+ node: astroid.node_classes.NodeNG
+) -> Union[astroid.ExceptHandler, astroid.TryExcept]:
+ """Return the ExceptHandler or the TryExcept node in which the node is."""
+ current = node
+ ignores = (astroid.ExceptHandler, astroid.TryExcept)
+ while current and not isinstance(current.parent, ignores):
+ current = current.parent
+
+ if current and isinstance(current.parent, ignores):
+ return current.parent
+ return None
+
+
+def is_from_fallback_block(node: astroid.node_classes.NodeNG) -> bool:
+ """Check if the given node is from a fallback import block."""
+ context = find_try_except_wrapper_node(node)
+ if not context:
+ return False
+
+ if isinstance(context, astroid.ExceptHandler):
+ other_body = context.parent.body
+ handlers = context.parent.handlers
+ else:
+ other_body = itertools.chain.from_iterable(
+ handler.body for handler in context.handlers
+ )
+ handlers = context.handlers
+
+ has_fallback_imports = any(
+ isinstance(import_node, (astroid.ImportFrom, astroid.Import))
+ for import_node in other_body
+ )
+ ignores_import_error = _except_handlers_ignores_exception(handlers, ImportError)
+ return ignores_import_error or has_fallback_imports
+
+
+def _except_handlers_ignores_exception(
+ handlers: astroid.ExceptHandler, exception
+) -> bool:
+ func = partial(error_of_type, error_type=(exception,))
+ return any(map(func, handlers))
+
+
+def get_exception_handlers(
+ node: astroid.node_classes.NodeNG, exception=Exception
+) -> Optional[List[astroid.ExceptHandler]]:
+ """Return the collections of handlers handling the exception in arguments.
+
+ Args:
+ node (astroid.NodeNG): A node that is potentially wrapped in a try except.
+ exception (builtin.Exception or str): exception or name of the exception.
+
+ Returns:
+ list: the collection of handlers that are handling the exception or None.
+
+ """
+ context = find_try_except_wrapper_node(node)
+ if isinstance(context, astroid.TryExcept):
+ return [
+ handler for handler in context.handlers if error_of_type(handler, exception)
+ ]
+ return []
+
+
+def is_node_inside_try_except(node: astroid.Raise) -> bool:
+ """Check if the node is directly under a Try/Except statement.
+ (but not under an ExceptHandler!)
+
+ Args:
+ node (astroid.Raise): the node raising the exception.
+
+ Returns:
+ bool: True if the node is inside a try/except statement, False otherwise.
+ """
+ context = find_try_except_wrapper_node(node)
+ return isinstance(context, astroid.TryExcept)
+
+
+def node_ignores_exception(
+ node: astroid.node_classes.NodeNG, exception=Exception
+) -> bool:
+ """Check if the node is in a TryExcept which handles the given exception.
+
+ If the exception is not given, the function is going to look for bare
+ excepts.
+ """
+ managing_handlers = get_exception_handlers(node, exception)
+ if not managing_handlers:
+ return False
+ return any(managing_handlers)
+
+
+def class_is_abstract(node: astroid.ClassDef) -> bool:
+ """return true if the given class node should be considered as an abstract
+ class
+ """
+ for method in node.methods():
+ if method.parent.frame() is node:
+ if method.is_abstract(pass_is_abstract=False):
+ return True
+ return False
+
+
+def _supports_protocol_method(value: astroid.node_classes.NodeNG, attr: str) -> bool:
+ try:
+ attributes = value.getattr(attr)
+ except astroid.NotFoundError:
+ return False
+
+ first = attributes[0]
+ if isinstance(first, astroid.AssignName):
+ if isinstance(first.parent.value, astroid.Const):
+ return False
+ return True
+
+
+def is_comprehension(node: astroid.node_classes.NodeNG) -> bool:
+ comprehensions = (
+ astroid.ListComp,
+ astroid.SetComp,
+ astroid.DictComp,
+ astroid.GeneratorExp,
+ )
+ return isinstance(node, comprehensions)
+
+
+def _supports_mapping_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(
+ value, GETITEM_METHOD
+ ) and _supports_protocol_method(value, KEYS_METHOD)
+
+
+def _supports_membership_test_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, CONTAINS_METHOD)
+
+
+def _supports_iteration_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, ITER_METHOD) or _supports_protocol_method(
+ value, GETITEM_METHOD
+ )
+
+
+def _supports_async_iteration_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, AITER_METHOD)
+
+
+def _supports_getitem_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, GETITEM_METHOD)
+
+
+def _supports_setitem_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, SETITEM_METHOD)
+
+
+def _supports_delitem_protocol(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol_method(value, DELITEM_METHOD)
+
+
+def _is_abstract_class_name(name: str) -> bool:
+ lname = name.lower()
+ is_mixin = lname.endswith("mixin")
+ is_abstract = lname.startswith("abstract")
+ is_base = lname.startswith("base") or lname.endswith("base")
+ return is_mixin or is_abstract or is_base
+
+
+def is_inside_abstract_class(node: astroid.node_classes.NodeNG) -> bool:
+ while node is not None:
+ if isinstance(node, astroid.ClassDef):
+ if class_is_abstract(node):
+ return True
+ name = getattr(node, "name", None)
+ if name is not None and _is_abstract_class_name(name):
+ return True
+ node = node.parent
+ return False
+
+
+def _supports_protocol(
+ value: astroid.node_classes.NodeNG, protocol_callback: astroid.FunctionDef
+) -> bool:
+ if isinstance(value, astroid.ClassDef):
+ if not has_known_bases(value):
+ return True
+ # classobj can only be iterable if it has an iterable metaclass
+ meta = value.metaclass()
+ if meta is not None:
+ if protocol_callback(meta):
+ return True
+ if isinstance(value, astroid.BaseInstance):
+ if not has_known_bases(value):
+ return True
+ if value.has_dynamic_getattr():
+ return True
+ if protocol_callback(value):
+ return True
+
+ if (
+ isinstance(value, _bases.Proxy)
+ and isinstance(value._proxied, astroid.BaseInstance)
+ and has_known_bases(value._proxied)
+ ):
+ value = value._proxied
+ return protocol_callback(value)
+
+ return False
+
+
+def is_iterable(value: astroid.node_classes.NodeNG, check_async: bool = False) -> bool:
+ if check_async:
+ protocol_check = _supports_async_iteration_protocol
+ else:
+ protocol_check = _supports_iteration_protocol
+ return _supports_protocol(value, protocol_check)
+
+
+def is_mapping(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol(value, _supports_mapping_protocol)
+
+
+def supports_membership_test(value: astroid.node_classes.NodeNG) -> bool:
+ supported = _supports_protocol(value, _supports_membership_test_protocol)
+ return supported or is_iterable(value)
+
+
+def supports_getitem(value: astroid.node_classes.NodeNG) -> bool:
+ if isinstance(value, astroid.ClassDef):
+ if _supports_protocol_method(value, CLASS_GETITEM_METHOD):
+ return True
+ return _supports_protocol(value, _supports_getitem_protocol)
+
+
+def supports_setitem(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol(value, _supports_setitem_protocol)
+
+
+def supports_delitem(value: astroid.node_classes.NodeNG) -> bool:
+ return _supports_protocol(value, _supports_delitem_protocol)
+
+
+@lru_cache(maxsize=1024)
+def safe_infer(
+ node: astroid.node_classes.NodeNG, context=None
+) -> Optional[astroid.node_classes.NodeNG]:
+ """Return the inferred value for the given node.
+
+ Return None if inference failed or if there is some ambiguity (more than
+ one node has been inferred).
+ """
+ try:
+ inferit = node.infer(context=context)
+ value = next(inferit)
+ except astroid.InferenceError:
+ return None
+ try:
+ next(inferit)
+ return None # None if there is ambiguity on the inferred node
+ except astroid.InferenceError:
+ return None # there is some kind of ambiguity
+ except StopIteration:
+ return value
+
+
+def has_known_bases(klass: astroid.ClassDef, context=None) -> bool:
+ """Return true if all base classes of a class could be inferred."""
+ try:
+ return klass._all_bases_known
+ except AttributeError:
+ pass
+ for base in klass.bases:
+ result = safe_infer(base, context=context)
+ if (
+ not isinstance(result, astroid.ClassDef)
+ or result is klass
+ or not has_known_bases(result, context=context)
+ ):
+ klass._all_bases_known = False
+ return False
+ klass._all_bases_known = True
+ return True
+
+
+def is_none(node: astroid.node_classes.NodeNG) -> bool:
+ return (
+ node is None
+ or (isinstance(node, astroid.Const) and node.value is None)
+ or (isinstance(node, astroid.Name) and node.name == "None")
+ )
+
+
+def node_type(node: astroid.node_classes.NodeNG) -> Optional[type]:
+ """Return the inferred type for `node`
+
+ If there is more than one possible type, or if inferred type is Uninferable or None,
+ return None
+ """
+ # check there is only one possible type for the assign node. Else we
+ # don't handle it for now
+ types = set()
+ try:
+ for var_type in node.infer():
+ if var_type == astroid.Uninferable or is_none(var_type):
+ continue
+ types.add(var_type)
+ if len(types) > 1:
+ return None
+ except astroid.InferenceError:
+ return None
+ return types.pop() if types else None
+
+
+def is_registered_in_singledispatch_function(node: astroid.FunctionDef) -> bool:
+ """Check if the given function node is a singledispatch function."""
+
+ singledispatch_qnames = (
+ "functools.singledispatch",
+ "singledispatch.singledispatch",
+ )
+
+ if not isinstance(node, astroid.FunctionDef):
+ return False
+
+ decorators = node.decorators.nodes if node.decorators else []
+ for decorator in decorators:
+ # func.register are function calls
+ if not isinstance(decorator, astroid.Call):
+ continue
+
+ func = decorator.func
+ if not isinstance(func, astroid.Attribute) or func.attrname != "register":
+ continue
+
+ try:
+ func_def = next(func.expr.infer())
+ except astroid.InferenceError:
+ continue
+
+ if isinstance(func_def, astroid.FunctionDef):
+ # pylint: disable=redundant-keyword-arg; some flow inference goes wrong here
+ return decorated_with(func_def, singledispatch_qnames)
+
+ return False
+
+
+def get_node_last_lineno(node: astroid.node_classes.NodeNG) -> int:
+ """
+ Get the last lineno of the given node. For a simple statement this will just be node.lineno,
+ but for a node that has child statements (e.g. a method) this will be the lineno of the last
+ child statement recursively.
+ """
+ # 'finalbody' is always the last clause in a try statement, if present
+ if getattr(node, "finalbody", False):
+ return get_node_last_lineno(node.finalbody[-1])
+ # For if, while, and for statements 'orelse' is always the last clause.
+ # For try statements 'orelse' is the last in the absence of a 'finalbody'
+ if getattr(node, "orelse", False):
+ return get_node_last_lineno(node.orelse[-1])
+ # try statements have the 'handlers' last if there is no 'orelse' or 'finalbody'
+ if getattr(node, "handlers", False):
+ return get_node_last_lineno(node.handlers[-1])
+ # All compound statements have a 'body'
+ if getattr(node, "body", False):
+ return get_node_last_lineno(node.body[-1])
+ # Not a compound statement
+ return node.lineno
+
+
+def is_postponed_evaluation_enabled(node: astroid.node_classes.NodeNG) -> bool:
+ """Check if the postponed evaluation of annotations is enabled"""
+ name = "annotations"
+ module = node.root()
+ stmt = module.locals.get(name)
+ return (
+ stmt
+ and isinstance(stmt[0], astroid.ImportFrom)
+ and stmt[0].modname == "__future__"
+ )
+
+
+def is_subclass_of(child: astroid.ClassDef, parent: astroid.ClassDef) -> bool:
+ """
+ Check if first node is a subclass of second node.
+ :param child: Node to check for subclass.
+ :param parent: Node to check for superclass.
+ :returns: True if child is derived from parent. False otherwise.
+ """
+ if not all(isinstance(node, astroid.ClassDef) for node in (child, parent)):
+ return False
+
+ for ancestor in child.ancestors():
+ try:
+ if helpers.is_subtype(ancestor, parent):
+ return True
+ except _NonDeducibleTypeHierarchy:
+ continue
+ return False
+
+
+@lru_cache(maxsize=1024)
+def is_overload_stub(node: astroid.node_classes.NodeNG) -> bool:
+ """Check if a node if is a function stub decorated with typing.overload.
+
+ :param node: Node to check.
+ :returns: True if node is an overload function stub. False otherwise.
+ """
+ decorators = getattr(node, "decorators", None)
+ return bool(decorators and decorated_with(node, ["typing.overload", "overload"]))
+
+
+def is_protocol_class(cls: astroid.node_classes.NodeNG) -> bool:
+ """Check if the given node represents a protocol class
+
+ :param cls: The node to check
+ :returns: True if the node is a typing protocol class, false otherwise.
+ """
+ if not isinstance(cls, astroid.ClassDef):
+ return False
+
+ # Use .ancestors() since not all protocol classes can have
+ # their mro deduced.
+ return any(parent.qname() in TYPING_PROTOCOLS for parent in cls.ancestors())
diff --git a/venv/Lib/site-packages/pylint/checkers/variables.py b/venv/Lib/site-packages/pylint/checkers/variables.py
new file mode 100644
index 0000000..e13f9b5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/checkers/variables.py
@@ -0,0 +1,1987 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009 Mads Kiilerich <mads@kiilerich.com>
+# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
+# Copyright (c) 2011-2014, 2017 Google, Inc.
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Ricardo Gemignani <ricardo.gemignani@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
+# Copyright (c) 2015 Radu Ciorba <radu@devrandom.ro>
+# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016, 2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
+# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Grant Welch <gwelch925+github@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Dan Garrette <dhgarrette@gmail.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.guinta@protonmail.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Marianna Polatoglou <mpolatoglou@bloomberg.net>
+# Copyright (c) 2018 mar-chi-pan <mar.polatoglou@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""variables checkers for Python code
+"""
+import collections
+import copy
+import itertools
+import os
+import re
+from functools import lru_cache
+
+import astroid
+from astroid import decorators, modutils, objects
+from astroid.context import InferenceContext
+
+from pylint.checkers import BaseChecker, utils
+from pylint.checkers.utils import is_postponed_evaluation_enabled
+from pylint.interfaces import HIGH, INFERENCE, INFERENCE_FAILURE, IAstroidChecker
+from pylint.utils import get_global_option
+
+SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
+FUTURE = "__future__"
+# regexp for ignored argument name
+IGNORED_ARGUMENT_NAMES = re.compile("_.*|^ignored_|^unused_")
+# In Python 3.7 abc has a Python implementation which is preferred
+# by astroid. Unfortunately this also messes up our explicit checks
+# for `abc`
+METACLASS_NAME_TRANSFORMS = {"_py_abc": "abc"}
+TYPING_TYPE_CHECKS_GUARDS = frozenset({"typing.TYPE_CHECKING", "TYPE_CHECKING"})
+BUILTIN_RANGE = "builtins.range"
+TYPING_MODULE = "typing"
+TYPING_NAMES = frozenset(
+ {
+ "Any",
+ "Callable",
+ "ClassVar",
+ "Generic",
+ "Optional",
+ "Tuple",
+ "Type",
+ "TypeVar",
+ "Union",
+ "AbstractSet",
+ "ByteString",
+ "Container",
+ "ContextManager",
+ "Hashable",
+ "ItemsView",
+ "Iterable",
+ "Iterator",
+ "KeysView",
+ "Mapping",
+ "MappingView",
+ "MutableMapping",
+ "MutableSequence",
+ "MutableSet",
+ "Sequence",
+ "Sized",
+ "ValuesView",
+ "Awaitable",
+ "AsyncIterator",
+ "AsyncIterable",
+ "Coroutine",
+ "Collection",
+ "AsyncGenerator",
+ "AsyncContextManager",
+ "Reversible",
+ "SupportsAbs",
+ "SupportsBytes",
+ "SupportsComplex",
+ "SupportsFloat",
+ "SupportsInt",
+ "SupportsRound",
+ "Counter",
+ "Deque",
+ "Dict",
+ "DefaultDict",
+ "List",
+ "Set",
+ "FrozenSet",
+ "NamedTuple",
+ "Generator",
+ "AnyStr",
+ "Text",
+ "Pattern",
+ }
+)
+
+
+def _is_from_future_import(stmt, name):
+ """Check if the name is a future import from another module."""
+ try:
+ module = stmt.do_import_module(stmt.modname)
+ except astroid.AstroidBuildingException:
+ return None
+
+ for local_node in module.locals.get(name, []):
+ if isinstance(local_node, astroid.ImportFrom) and local_node.modname == FUTURE:
+ return True
+ return None
+
+
+def in_for_else_branch(parent, stmt):
+ """Returns True if stmt in inside the else branch for a parent For stmt."""
+ return isinstance(parent, astroid.For) and any(
+ else_stmt.parent_of(stmt) or else_stmt == stmt for else_stmt in parent.orelse
+ )
+
+
+@lru_cache(maxsize=1000)
+def overridden_method(klass, name):
+ """get overridden method if any"""
+ try:
+ parent = next(klass.local_attr_ancestors(name))
+ except (StopIteration, KeyError):
+ return None
+ try:
+ meth_node = parent[name]
+ except KeyError:
+ # We have found an ancestor defining <name> but it's not in the local
+ # dictionary. This may happen with astroid built from living objects.
+ return None
+ if isinstance(meth_node, astroid.FunctionDef):
+ return meth_node
+ return None
+
+
+def _get_unpacking_extra_info(node, inferred):
+ """return extra information to add to the message for unpacking-non-sequence
+ and unbalanced-tuple-unpacking errors
+ """
+ more = ""
+ inferred_module = inferred.root().name
+ if node.root().name == inferred_module:
+ if node.lineno == inferred.lineno:
+ more = " %s" % inferred.as_string()
+ elif inferred.lineno:
+ more = " defined at line %s" % inferred.lineno
+ elif inferred.lineno:
+ more = " defined at line %s of %s" % (inferred.lineno, inferred_module)
+ return more
+
+
+def _detect_global_scope(node, frame, defframe):
+ """ Detect that the given frames shares a global
+ scope.
+
+ Two frames shares a global scope when neither
+ of them are hidden under a function scope, as well
+ as any of parent scope of them, until the root scope.
+ In this case, depending from something defined later on
+ will not work, because it is still undefined.
+
+ Example:
+ class A:
+ # B has the same global scope as `C`, leading to a NameError.
+ class B(C): ...
+ class C: ...
+
+ """
+ def_scope = scope = None
+ if frame and frame.parent:
+ scope = frame.parent.scope()
+ if defframe and defframe.parent:
+ def_scope = defframe.parent.scope()
+ if isinstance(frame, astroid.FunctionDef):
+ # If the parent of the current node is a
+ # function, then it can be under its scope
+ # (defined in, which doesn't concern us) or
+ # the `->` part of annotations. The same goes
+ # for annotations of function arguments, they'll have
+ # their parent the Arguments node.
+ if not isinstance(node.parent, (astroid.FunctionDef, astroid.Arguments)):
+ return False
+ elif any(
+ not isinstance(f, (astroid.ClassDef, astroid.Module)) for f in (frame, defframe)
+ ):
+ # Not interested in other frames, since they are already
+ # not in a global scope.
+ return False
+
+ break_scopes = []
+ for current_scope in (scope, def_scope):
+ # Look for parent scopes. If there is anything different
+ # than a module or a class scope, then they frames don't
+ # share a global scope.
+ parent_scope = current_scope
+ while parent_scope:
+ if not isinstance(parent_scope, (astroid.ClassDef, astroid.Module)):
+ break_scopes.append(parent_scope)
+ break
+ if parent_scope.parent:
+ parent_scope = parent_scope.parent.scope()
+ else:
+ break
+ if break_scopes and len(set(break_scopes)) != 1:
+ # Store different scopes than expected.
+ # If the stored scopes are, in fact, the very same, then it means
+ # that the two frames (frame and defframe) shares the same scope,
+ # and we could apply our lineno analysis over them.
+ # For instance, this works when they are inside a function, the node
+ # that uses a definition and the definition itself.
+ return False
+ # At this point, we are certain that frame and defframe shares a scope
+ # and the definition of the first depends on the second.
+ return frame.lineno < defframe.lineno
+
+
+def _infer_name_module(node, name):
+ context = InferenceContext()
+ context.lookupname = name
+ return node.infer(context, asname=False)
+
+
+def _fix_dot_imports(not_consumed):
+ """ Try to fix imports with multiple dots, by returning a dictionary
+ with the import names expanded. The function unflattens root imports,
+ like 'xml' (when we have both 'xml.etree' and 'xml.sax'), to 'xml.etree'
+ and 'xml.sax' respectively.
+ """
+ names = {}
+ for name, stmts in not_consumed.items():
+ if any(
+ isinstance(stmt, astroid.AssignName)
+ and isinstance(stmt.assign_type(), astroid.AugAssign)
+ for stmt in stmts
+ ):
+ continue
+ for stmt in stmts:
+ if not isinstance(stmt, (astroid.ImportFrom, astroid.Import)):
+ continue
+ for imports in stmt.names:
+ second_name = None
+ import_module_name = imports[0]
+ if import_module_name == "*":
+ # In case of wildcard imports,
+ # pick the name from inside the imported module.
+ second_name = name
+ else:
+ name_matches_dotted_import = False
+ if (
+ import_module_name.startswith(name)
+ and import_module_name.find(".") > -1
+ ):
+ name_matches_dotted_import = True
+
+ if name_matches_dotted_import or name in imports:
+ # Most likely something like 'xml.etree',
+ # which will appear in the .locals as 'xml'.
+ # Only pick the name if it wasn't consumed.
+ second_name = import_module_name
+ if second_name and second_name not in names:
+ names[second_name] = stmt
+ return sorted(names.items(), key=lambda a: a[1].fromlineno)
+
+
+def _find_frame_imports(name, frame):
+ """
+ Detect imports in the frame, with the required
+ *name*. Such imports can be considered assignments.
+ Returns True if an import for the given name was found.
+ """
+ imports = frame.nodes_of_class((astroid.Import, astroid.ImportFrom))
+ for import_node in imports:
+ for import_name, import_alias in import_node.names:
+ # If the import uses an alias, check only that.
+ # Otherwise, check only the import name.
+ if import_alias:
+ if import_alias == name:
+ return True
+ elif import_name and import_name == name:
+ return True
+ return None
+
+
+def _import_name_is_global(stmt, global_names):
+ for import_name, import_alias in stmt.names:
+ # If the import uses an alias, check only that.
+ # Otherwise, check only the import name.
+ if import_alias:
+ if import_alias in global_names:
+ return True
+ elif import_name in global_names:
+ return True
+ return False
+
+
+def _flattened_scope_names(iterator):
+ values = (set(stmt.names) for stmt in iterator)
+ return set(itertools.chain.from_iterable(values))
+
+
+def _assigned_locally(name_node):
+ """
+ Checks if name_node has corresponding assign statement in same scope
+ """
+ assign_stmts = name_node.scope().nodes_of_class(astroid.AssignName)
+ return any(a.name == name_node.name for a in assign_stmts)
+
+
+def _is_type_checking_import(node):
+ parent = node.parent
+ if not isinstance(parent, astroid.If):
+ return False
+ test = parent.test
+ return test.as_string() in TYPING_TYPE_CHECKS_GUARDS
+
+
+def _has_locals_call_after_node(stmt, scope):
+ skip_nodes = (
+ astroid.FunctionDef,
+ astroid.ClassDef,
+ astroid.Import,
+ astroid.ImportFrom,
+ )
+ for call in scope.nodes_of_class(astroid.Call, skip_klass=skip_nodes):
+ inferred = utils.safe_infer(call.func)
+ if (
+ utils.is_builtin_object(inferred)
+ and getattr(inferred, "name", None) == "locals"
+ ):
+ if stmt.lineno < call.lineno:
+ return True
+ return False
+
+
+MSGS = {
+ "E0601": (
+ "Using variable %r before assignment",
+ "used-before-assignment",
+ "Used when a local variable is accessed before its assignment.",
+ ),
+ "E0602": (
+ "Undefined variable %r",
+ "undefined-variable",
+ "Used when an undefined variable is accessed.",
+ ),
+ "E0603": (
+ "Undefined variable name %r in __all__",
+ "undefined-all-variable",
+ "Used when an undefined variable name is referenced in __all__.",
+ ),
+ "E0604": (
+ "Invalid object %r in __all__, must contain only strings",
+ "invalid-all-object",
+ "Used when an invalid (non-string) object occurs in __all__.",
+ ),
+ "E0611": (
+ "No name %r in module %r",
+ "no-name-in-module",
+ "Used when a name cannot be found in a module.",
+ ),
+ "W0601": (
+ "Global variable %r undefined at the module level",
+ "global-variable-undefined",
+ 'Used when a variable is defined through the "global" statement '
+ "but the variable is not defined in the module scope.",
+ ),
+ "W0602": (
+ "Using global for %r but no assignment is done",
+ "global-variable-not-assigned",
+ 'Used when a variable is defined through the "global" statement '
+ "but no assignment to this variable is done.",
+ ),
+ "W0603": (
+ "Using the global statement", # W0121
+ "global-statement",
+ 'Used when you use the "global" statement to update a global '
+ "variable. Pylint just try to discourage this "
+ "usage. That doesn't mean you cannot use it !",
+ ),
+ "W0604": (
+ "Using the global statement at the module level", # W0103
+ "global-at-module-level",
+ 'Used when you use the "global" statement at the module level '
+ "since it has no effect",
+ ),
+ "W0611": (
+ "Unused %s",
+ "unused-import",
+ "Used when an imported module or variable is not used.",
+ ),
+ "W0612": (
+ "Unused variable %r",
+ "unused-variable",
+ "Used when a variable is defined but not used.",
+ ),
+ "W0613": (
+ "Unused argument %r",
+ "unused-argument",
+ "Used when a function or method argument is not used.",
+ ),
+ "W0614": (
+ "Unused import %s from wildcard import",
+ "unused-wildcard-import",
+ "Used when an imported module or variable is not used from a "
+ "`'from X import *'` style import.",
+ ),
+ "W0621": (
+ "Redefining name %r from outer scope (line %s)",
+ "redefined-outer-name",
+ "Used when a variable's name hides a name defined in the outer scope.",
+ ),
+ "W0622": (
+ "Redefining built-in %r",
+ "redefined-builtin",
+ "Used when a variable or function override a built-in.",
+ ),
+ "W0623": (
+ "Redefining name %r from %s in exception handler",
+ "redefine-in-handler",
+ "Used when an exception handler assigns the exception to an existing name",
+ ),
+ "W0631": (
+ "Using possibly undefined loop variable %r",
+ "undefined-loop-variable",
+ "Used when a loop variable (i.e. defined by a for loop or "
+ "a list comprehension or a generator expression) is used outside "
+ "the loop.",
+ ),
+ "W0632": (
+ "Possible unbalanced tuple unpacking with "
+ "sequence%s: "
+ "left side has %d label(s), right side has %d value(s)",
+ "unbalanced-tuple-unpacking",
+ "Used when there is an unbalanced tuple unpacking in assignment",
+ {"old_names": [("E0632", "old-unbalanced-tuple-unpacking")]},
+ ),
+ "E0633": (
+ "Attempting to unpack a non-sequence%s",
+ "unpacking-non-sequence",
+ "Used when something which is not "
+ "a sequence is used in an unpack assignment",
+ {"old_names": [("W0633", "old-unpacking-non-sequence")]},
+ ),
+ "W0640": (
+ "Cell variable %s defined in loop",
+ "cell-var-from-loop",
+ "A variable used in a closure is defined in a loop. "
+ "This will result in all closures using the same value for "
+ "the closed-over variable.",
+ ),
+ "W0641": (
+ "Possibly unused variable %r",
+ "possibly-unused-variable",
+ "Used when a variable is defined but might not be used. "
+ "The possibility comes from the fact that locals() might be used, "
+ "which could consume or not the said variable",
+ ),
+ "W0642": (
+ "Invalid assignment to %s in method",
+ "self-cls-assignment",
+ "Invalid assignment to self or cls in instance or class method "
+ "respectively.",
+ ),
+}
+
+
+ScopeConsumer = collections.namedtuple(
+ "ScopeConsumer", "to_consume consumed scope_type"
+)
+
+
+class NamesConsumer:
+ """
+ A simple class to handle consumed, to consume and scope type info of node locals
+ """
+
+ def __init__(self, node, scope_type):
+ self._atomic = ScopeConsumer(copy.copy(node.locals), {}, scope_type)
+
+ def __repr__(self):
+ msg = "\nto_consume : {:s}\n".format(
+ ", ".join(
+ [
+ "{}->{}".format(key, val)
+ for key, val in self._atomic.to_consume.items()
+ ]
+ )
+ )
+ msg += "consumed : {:s}\n".format(
+ ", ".join(
+ [
+ "{}->{}".format(key, val)
+ for key, val in self._atomic.consumed.items()
+ ]
+ )
+ )
+ msg += "scope_type : {:s}\n".format(self._atomic.scope_type)
+ return msg
+
+ def __iter__(self):
+ return iter(self._atomic)
+
+ @property
+ def to_consume(self):
+ return self._atomic.to_consume
+
+ @property
+ def consumed(self):
+ return self._atomic.consumed
+
+ @property
+ def scope_type(self):
+ return self._atomic.scope_type
+
+ def mark_as_consumed(self, name, new_node):
+ """
+ Mark the name as consumed and delete it from
+ the to_consume dictionary
+ """
+ self.consumed[name] = new_node
+ del self.to_consume[name]
+
+ def get_next_to_consume(self, node):
+ # mark the name as consumed if it's defined in this scope
+ name = node.name
+ parent_node = node.parent
+ found_node = self.to_consume.get(name)
+ if (
+ found_node
+ and isinstance(parent_node, astroid.Assign)
+ and parent_node == found_node[0].parent
+ ):
+ lhs = found_node[0].parent.targets[0]
+ if lhs.name == name: # this name is defined in this very statement
+ found_node = None
+ return found_node
+
+
+# pylint: disable=too-many-public-methods
+class VariablesChecker(BaseChecker):
+ """checks for
+ * unused variables / imports
+ * undefined variables
+ * redefinition of variable from builtins or from an outer scope
+ * use of variable before assignment
+ * __all__ consistency
+ * self/cls assignment
+ """
+
+ __implements__ = IAstroidChecker
+
+ name = "variables"
+ msgs = MSGS
+ priority = -1
+ options = (
+ (
+ "init-import",
+ {
+ "default": 0,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether we should check for unused import in "
+ "__init__ files.",
+ },
+ ),
+ (
+ "dummy-variables-rgx",
+ {
+ "default": "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_",
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "A regular expression matching the name of dummy "
+ "variables (i.e. expected to not be used).",
+ },
+ ),
+ (
+ "additional-builtins",
+ {
+ "default": (),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "List of additional names supposed to be defined in "
+ "builtins. Remember that you should avoid defining new builtins "
+ "when possible.",
+ },
+ ),
+ (
+ "callbacks",
+ {
+ "default": ("cb_", "_cb"),
+ "type": "csv",
+ "metavar": "<callbacks>",
+ "help": "List of strings which can identify a callback "
+ "function by name. A callback name must start or "
+ "end with one of those strings.",
+ },
+ ),
+ (
+ "redefining-builtins-modules",
+ {
+ "default": (
+ "six.moves",
+ "past.builtins",
+ "future.builtins",
+ "builtins",
+ "io",
+ ),
+ "type": "csv",
+ "metavar": "<comma separated list>",
+ "help": "List of qualified module names which can have objects "
+ "that can redefine builtins.",
+ },
+ ),
+ (
+ "ignored-argument-names",
+ {
+ "default": IGNORED_ARGUMENT_NAMES,
+ "type": "regexp",
+ "metavar": "<regexp>",
+ "help": "Argument names that match this expression will be "
+ "ignored. Default to name with leading underscore.",
+ },
+ ),
+ (
+ "allow-global-unused-variables",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "help": "Tells whether unused global variables should be treated as a violation.",
+ },
+ ),
+ )
+
+ def __init__(self, linter=None):
+ BaseChecker.__init__(self, linter)
+ self._to_consume = (
+ None
+ ) # list of tuples: (to_consume:dict, consumed:dict, scope_type:str)
+ self._checking_mod_attr = None
+ self._loop_variables = []
+ self._type_annotation_names = []
+ self._postponed_evaluation_enabled = False
+
+ @utils.check_messages("redefined-outer-name")
+ def visit_for(self, node):
+ assigned_to = [
+ var.name for var in node.target.nodes_of_class(astroid.AssignName)
+ ]
+
+ # Only check variables that are used
+ dummy_rgx = self.config.dummy_variables_rgx
+ assigned_to = [var for var in assigned_to if not dummy_rgx.match(var)]
+
+ for variable in assigned_to:
+ for outer_for, outer_variables in self._loop_variables:
+ if variable in outer_variables and not in_for_else_branch(
+ outer_for, node
+ ):
+ self.add_message(
+ "redefined-outer-name",
+ args=(variable, outer_for.fromlineno),
+ node=node,
+ )
+ break
+
+ self._loop_variables.append((node, assigned_to))
+
+ @utils.check_messages("redefined-outer-name")
+ def leave_for(self, node):
+ self._loop_variables.pop()
+ self._store_type_annotation_names(node)
+
+ def visit_module(self, node):
+ """visit module : update consumption analysis variable
+ checks globals doesn't overrides builtins
+ """
+ self._to_consume = [NamesConsumer(node, "module")]
+ self._postponed_evaluation_enabled = is_postponed_evaluation_enabled(node)
+
+ for name, stmts in node.locals.items():
+ if utils.is_builtin(name) and not utils.is_inside_except(stmts[0]):
+ if self._should_ignore_redefined_builtin(stmts[0]) or name == "__doc__":
+ continue
+ self.add_message("redefined-builtin", args=name, node=stmts[0])
+
+ @utils.check_messages(
+ "unused-import",
+ "unused-wildcard-import",
+ "redefined-builtin",
+ "undefined-all-variable",
+ "invalid-all-object",
+ "unused-variable",
+ )
+ def leave_module(self, node):
+ """leave module: check globals
+ """
+ assert len(self._to_consume) == 1
+
+ self._check_metaclasses(node)
+ not_consumed = self._to_consume.pop().to_consume
+ # attempt to check for __all__ if defined
+ if "__all__" in node.locals:
+ self._check_all(node, not_consumed)
+
+ # check for unused globals
+ self._check_globals(not_consumed)
+
+ # don't check unused imports in __init__ files
+ if not self.config.init_import and node.package:
+ return
+
+ self._check_imports(not_consumed)
+
+ def visit_classdef(self, node):
+ """visit class: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "class"))
+
+ def leave_classdef(self, _):
+ """leave class: update consumption analysis variable
+ """
+ # do not check for not used locals here (no sense)
+ self._to_consume.pop()
+
+ def visit_lambda(self, node):
+ """visit lambda: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "lambda"))
+
+ def leave_lambda(self, _):
+ """leave lambda: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_generatorexp(self, node):
+ """visit genexpr: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
+
+ def leave_generatorexp(self, _):
+ """leave genexpr: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_dictcomp(self, node):
+ """visit dictcomp: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
+
+ def leave_dictcomp(self, _):
+ """leave dictcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_setcomp(self, node):
+ """visit setcomp: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
+
+ def leave_setcomp(self, _):
+ """leave setcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def visit_functiondef(self, node):
+ """visit function: update consumption analysis variable and check locals
+ """
+ self._to_consume.append(NamesConsumer(node, "function"))
+ if not (
+ self.linter.is_message_enabled("redefined-outer-name")
+ or self.linter.is_message_enabled("redefined-builtin")
+ ):
+ return
+ globs = node.root().globals
+ for name, stmt in node.items():
+ if utils.is_inside_except(stmt):
+ continue
+ if name in globs and not isinstance(stmt, astroid.Global):
+ definition = globs[name][0]
+ if (
+ isinstance(definition, astroid.ImportFrom)
+ and definition.modname == FUTURE
+ ):
+ # It is a __future__ directive, not a symbol.
+ continue
+
+ # Do not take in account redefined names for the purpose
+ # of type checking.:
+ if any(
+ isinstance(definition.parent, astroid.If)
+ and definition.parent.test.as_string() in TYPING_TYPE_CHECKS_GUARDS
+ for definition in globs[name]
+ ):
+ continue
+
+ line = definition.fromlineno
+ if not self._is_name_ignored(stmt, name):
+ self.add_message(
+ "redefined-outer-name", args=(name, line), node=stmt
+ )
+
+ elif utils.is_builtin(name) and not self._should_ignore_redefined_builtin(
+ stmt
+ ):
+ # do not print Redefining builtin for additional builtins
+ self.add_message("redefined-builtin", args=name, node=stmt)
+
+ def leave_functiondef(self, node):
+ """leave function: check function's locals are consumed"""
+ self._check_metaclasses(node)
+
+ if node.type_comment_returns:
+ self._store_type_annotation_node(node.type_comment_returns)
+ if node.type_comment_args:
+ for argument_annotation in node.type_comment_args:
+ self._store_type_annotation_node(argument_annotation)
+
+ not_consumed = self._to_consume.pop().to_consume
+ if not (
+ self.linter.is_message_enabled("unused-variable")
+ or self.linter.is_message_enabled("possibly-unused-variable")
+ or self.linter.is_message_enabled("unused-argument")
+ ):
+ return
+
+ # Don't check arguments of function which are only raising an exception.
+ if utils.is_error(node):
+ return
+
+ # Don't check arguments of abstract methods or within an interface.
+ is_method = node.is_method()
+ if is_method and node.is_abstract():
+ return
+
+ global_names = _flattened_scope_names(node.nodes_of_class(astroid.Global))
+ nonlocal_names = _flattened_scope_names(node.nodes_of_class(astroid.Nonlocal))
+ for name, stmts in not_consumed.items():
+ self._check_is_unused(name, node, stmts[0], global_names, nonlocal_names)
+
+ visit_asyncfunctiondef = visit_functiondef
+ leave_asyncfunctiondef = leave_functiondef
+
+ @utils.check_messages(
+ "global-variable-undefined",
+ "global-variable-not-assigned",
+ "global-statement",
+ "global-at-module-level",
+ "redefined-builtin",
+ )
+ def visit_global(self, node):
+ """check names imported exists in the global scope"""
+ frame = node.frame()
+ if isinstance(frame, astroid.Module):
+ self.add_message("global-at-module-level", node=node)
+ return
+
+ module = frame.root()
+ default_message = True
+ locals_ = node.scope().locals
+ for name in node.names:
+ try:
+ assign_nodes = module.getattr(name)
+ except astroid.NotFoundError:
+ # unassigned global, skip
+ assign_nodes = []
+
+ not_defined_locally_by_import = not any(
+ isinstance(local, astroid.node_classes.Import)
+ for local in locals_.get(name, ())
+ )
+ if not assign_nodes and not_defined_locally_by_import:
+ self.add_message("global-variable-not-assigned", args=name, node=node)
+ default_message = False
+ continue
+
+ for anode in assign_nodes:
+ if (
+ isinstance(anode, astroid.AssignName)
+ and anode.name in module.special_attributes
+ ):
+ self.add_message("redefined-builtin", args=name, node=node)
+ break
+ if anode.frame() is module:
+ # module level assignment
+ break
+ else:
+ if not_defined_locally_by_import:
+ # global undefined at the module scope
+ self.add_message("global-variable-undefined", args=name, node=node)
+ default_message = False
+
+ if default_message:
+ self.add_message("global-statement", node=node)
+
+ def visit_assignname(self, node):
+ if isinstance(node.assign_type(), astroid.AugAssign):
+ self.visit_name(node)
+
+ def visit_delname(self, node):
+ self.visit_name(node)
+
+ @utils.check_messages(*MSGS)
+ def visit_name(self, node):
+ """check that a name is defined if the current scope and doesn't
+ redefine a built-in
+ """
+ stmt = node.statement()
+ if stmt.fromlineno is None:
+ # name node from an astroid built from live code, skip
+ assert not stmt.root().file.endswith(".py")
+ return
+
+ name = node.name
+ frame = stmt.scope()
+ # if the name node is used as a function default argument's value or as
+ # a decorator, then start from the parent frame of the function instead
+ # of the function frame - and thus open an inner class scope
+ if (
+ utils.is_default_argument(node)
+ or utils.is_func_decorator(node)
+ or utils.is_ancestor_name(frame, node)
+ ):
+ start_index = len(self._to_consume) - 2
+ else:
+ start_index = len(self._to_consume) - 1
+ # iterates through parent scopes, from the inner to the outer
+ base_scope_type = self._to_consume[start_index].scope_type
+ # pylint: disable=too-many-nested-blocks; refactoring this block is a pain.
+ for i in range(start_index, -1, -1):
+ current_consumer = self._to_consume[i]
+ # if the current scope is a class scope but it's not the inner
+ # scope, ignore it. This prevents to access this scope instead of
+ # the globals one in function members when there are some common
+ # names. The only exception is when the starting scope is a
+ # comprehension and its direct outer scope is a class
+ if (
+ current_consumer.scope_type == "class"
+ and i != start_index
+ and not (base_scope_type == "comprehension" and i == start_index - 1)
+ ):
+ if self._ignore_class_scope(node):
+ continue
+
+ # the name has already been consumed, only check it's not a loop
+ # variable used outside the loop
+ # avoid the case where there are homonyms inside function scope and
+ #  comprehension current scope (avoid bug #1731)
+ if name in current_consumer.consumed and not (
+ current_consumer.scope_type == "comprehension"
+ and self._has_homonym_in_upper_function_scope(node, i)
+ ):
+ defnode = utils.assign_parent(current_consumer.consumed[name][0])
+ self._check_late_binding_closure(node, defnode)
+ self._loopvar_name(node, name)
+ break
+
+ found_node = current_consumer.get_next_to_consume(node)
+ if found_node is None:
+ continue
+
+ # checks for use before assignment
+ defnode = utils.assign_parent(current_consumer.to_consume[name][0])
+
+ if defnode is not None:
+ self._check_late_binding_closure(node, defnode)
+ defstmt = defnode.statement()
+ defframe = defstmt.frame()
+ # The class reuses itself in the class scope.
+ recursive_klass = (
+ frame is defframe
+ and defframe.parent_of(node)
+ and isinstance(defframe, astroid.ClassDef)
+ and node.name == defframe.name
+ )
+
+ if (
+ recursive_klass
+ and utils.is_inside_lambda(node)
+ and (
+ not utils.is_default_argument(node)
+ or node.scope().parent.scope() is not defframe
+ )
+ ):
+ # Self-referential class references are fine in lambda's --
+ # As long as they are not part of the default argument directly
+ # under the scope of the parent self-referring class.
+ # Example of valid default argument:
+ # class MyName3:
+ # myattr = 1
+ # mylambda3 = lambda: lambda a=MyName3: a
+ # Example of invalid default argument:
+ # class MyName4:
+ # myattr = 1
+ # mylambda4 = lambda a=MyName4: lambda: a
+
+ # If the above conditional is True,
+ # there is no possibility of undefined-variable
+ # Also do not consume class name
+ # (since consuming blocks subsequent checks)
+ # -- quit
+ break
+
+ maybee0601, annotation_return, use_outer_definition = self._is_variable_violation(
+ node,
+ name,
+ defnode,
+ stmt,
+ defstmt,
+ frame,
+ defframe,
+ base_scope_type,
+ recursive_klass,
+ )
+
+ if use_outer_definition:
+ continue
+
+ if (
+ maybee0601
+ and not utils.is_defined_before(node)
+ and not astroid.are_exclusive(stmt, defstmt, ("NameError",))
+ ):
+
+ # Used and defined in the same place, e.g `x += 1` and `del x`
+ defined_by_stmt = defstmt is stmt and isinstance(
+ node, (astroid.DelName, astroid.AssignName)
+ )
+ if (
+ recursive_klass
+ or defined_by_stmt
+ or annotation_return
+ or isinstance(defstmt, astroid.Delete)
+ ):
+ if not utils.node_ignores_exception(node, NameError):
+
+ # Handle postponed evaluation of annotations
+ if not (
+ self._postponed_evaluation_enabled
+ and isinstance(
+ stmt,
+ (
+ astroid.AnnAssign,
+ astroid.FunctionDef,
+ astroid.Arguments,
+ ),
+ )
+ and name in node.root().locals
+ ):
+ self.add_message(
+ "undefined-variable", args=name, node=node
+ )
+ elif base_scope_type != "lambda":
+ # E0601 may *not* occurs in lambda scope.
+
+ # Handle postponed evaluation of annotations
+ if not (
+ self._postponed_evaluation_enabled
+ and isinstance(
+ stmt, (astroid.AnnAssign, astroid.FunctionDef)
+ )
+ ):
+ self.add_message(
+ "used-before-assignment", args=name, node=node
+ )
+ elif base_scope_type == "lambda":
+ # E0601 can occur in class-level scope in lambdas, as in
+ # the following example:
+ # class A:
+ # x = lambda attr: f + attr
+ # f = 42
+ if isinstance(frame, astroid.ClassDef) and name in frame.locals:
+ if isinstance(node.parent, astroid.Arguments):
+ if stmt.fromlineno <= defstmt.fromlineno:
+ # Doing the following is fine:
+ # class A:
+ # x = 42
+ # y = lambda attr=x: attr
+ self.add_message(
+ "used-before-assignment", args=name, node=node
+ )
+ else:
+ self.add_message(
+ "undefined-variable", args=name, node=node
+ )
+ elif current_consumer.scope_type == "lambda":
+ self.add_message("undefined-variable", node=node, args=name)
+
+ current_consumer.mark_as_consumed(name, found_node)
+ # check it's not a loop variable used outside the loop
+ self._loopvar_name(node, name)
+ break
+ else:
+ # we have not found the name, if it isn't a builtin, that's an
+ # undefined name !
+ if not (
+ name in astroid.Module.scope_attrs
+ or utils.is_builtin(name)
+ or name in self.config.additional_builtins
+ ):
+ if not utils.node_ignores_exception(node, NameError):
+ self.add_message("undefined-variable", args=name, node=node)
+
+ @utils.check_messages("no-name-in-module")
+ def visit_import(self, node):
+ """check modules attribute accesses"""
+ if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
+ # No need to verify this, since ImportError is already
+ # handled by the client code.
+ return
+
+ for name, _ in node.names:
+ parts = name.split(".")
+ try:
+ module = next(_infer_name_module(node, parts[0]))
+ except astroid.ResolveError:
+ continue
+ self._check_module_attrs(node, module, parts[1:])
+
+ @utils.check_messages("no-name-in-module")
+ def visit_importfrom(self, node):
+ """check modules attribute accesses"""
+ if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node):
+ # No need to verify this, since ImportError is already
+ # handled by the client code.
+ return
+
+ name_parts = node.modname.split(".")
+ try:
+ module = node.do_import_module(name_parts[0])
+ except astroid.AstroidBuildingException:
+ return
+ module = self._check_module_attrs(node, module, name_parts[1:])
+ if not module:
+ return
+ for name, _ in node.names:
+ if name == "*":
+ continue
+ self._check_module_attrs(node, module, name.split("."))
+
+ @utils.check_messages(
+ "unbalanced-tuple-unpacking", "unpacking-non-sequence", "self-cls-assignment"
+ )
+ def visit_assign(self, node):
+ """Check unbalanced tuple unpacking for assignments
+ and unpacking non-sequences as well as in case self/cls
+ get assigned.
+ """
+ self._check_self_cls_assign(node)
+ if not isinstance(node.targets[0], (astroid.Tuple, astroid.List)):
+ return
+
+ targets = node.targets[0].itered()
+ try:
+ inferred = utils.safe_infer(node.value)
+ if inferred is not None:
+ self._check_unpacking(inferred, node, targets)
+ except astroid.InferenceError:
+ return
+
+ # listcomp have now also their scope
+ def visit_listcomp(self, node):
+ """visit dictcomp: update consumption analysis variable
+ """
+ self._to_consume.append(NamesConsumer(node, "comprehension"))
+
+ def leave_listcomp(self, _):
+ """leave dictcomp: update consumption analysis variable
+ """
+ # do not check for not used locals here
+ self._to_consume.pop()
+
+ def leave_assign(self, node):
+ self._store_type_annotation_names(node)
+
+ def leave_with(self, node):
+ self._store_type_annotation_names(node)
+
+ def visit_arguments(self, node):
+ for annotation in node.type_comment_args:
+ self._store_type_annotation_node(annotation)
+
+ # Relying on other checker's options, which might not have been initialized yet.
+ @decorators.cachedproperty
+ def _analyse_fallback_blocks(self):
+ return get_global_option(self, "analyse-fallback-blocks", default=False)
+
+ @decorators.cachedproperty
+ def _ignored_modules(self):
+ return get_global_option(self, "ignored-modules", default=[])
+
+ @decorators.cachedproperty
+ def _allow_global_unused_variables(self):
+ return get_global_option(self, "allow-global-unused-variables", default=True)
+
+ @staticmethod
+ def _defined_in_function_definition(node, frame):
+ in_annotation_or_default = False
+ if isinstance(frame, astroid.FunctionDef) and node.statement() is frame:
+ in_annotation_or_default = (
+ node in frame.args.annotations
+ or node in frame.args.kwonlyargs_annotations
+ or node is frame.args.varargannotation
+ or node is frame.args.kwargannotation
+ ) or frame.args.parent_of(node)
+ return in_annotation_or_default
+
+ @staticmethod
+ def _is_variable_violation(
+ node,
+ name,
+ defnode,
+ stmt,
+ defstmt,
+ frame,
+ defframe,
+ base_scope_type,
+ recursive_klass,
+ ):
+ # pylint: disable=too-many-nested-blocks
+ # node: Node to check for violation
+ # name: name of node to check violation for
+ # frame: Scope of statement of node
+ # base_scope_type: local scope type
+ maybee0601 = True
+ annotation_return = False
+ use_outer_definition = False
+ if frame is not defframe:
+ maybee0601 = _detect_global_scope(node, frame, defframe)
+ elif defframe.parent is None:
+ # we are at the module level, check the name is not
+ # defined in builtins
+ if name in defframe.scope_attrs or astroid.builtin_lookup(name)[1]:
+ maybee0601 = False
+ else:
+ # we are in a local scope, check the name is not
+ # defined in global or builtin scope
+ # skip this lookup if name is assigned later in function scope/lambda
+ # Note: the node.frame() is not the same as the `frame` argument which is
+ # equivalent to frame.statement().scope()
+ forbid_lookup = (
+ isinstance(frame, astroid.FunctionDef)
+ or isinstance(node.frame(), astroid.Lambda)
+ ) and _assigned_locally(node)
+ if not forbid_lookup and defframe.root().lookup(name)[1]:
+ maybee0601 = False
+ use_outer_definition = stmt == defstmt and not isinstance(
+ defnode, astroid.node_classes.Comprehension
+ )
+ else:
+ # check if we have a nonlocal
+ if name in defframe.locals:
+ maybee0601 = not any(
+ isinstance(child, astroid.Nonlocal) and name in child.names
+ for child in defframe.get_children()
+ )
+
+ if (
+ base_scope_type == "lambda"
+ and isinstance(frame, astroid.ClassDef)
+ and name in frame.locals
+ ):
+
+ # This rule verifies that if the definition node of the
+ # checked name is an Arguments node and if the name
+ # is used a default value in the arguments defaults
+ # and the actual definition of the variable label
+ # is happening before the Arguments definition.
+ #
+ # bar = None
+ # foo = lambda bar=bar: bar
+ #
+ # In this case, maybee0601 should be False, otherwise
+ # it should be True.
+ maybee0601 = not (
+ isinstance(defnode, astroid.Arguments)
+ and node in defnode.defaults
+ and frame.locals[name][0].fromlineno < defstmt.fromlineno
+ )
+ elif isinstance(defframe, astroid.ClassDef) and isinstance(
+ frame, astroid.FunctionDef
+ ):
+ # Special rule for function return annotations,
+ # which uses the same name as the class where
+ # the function lives.
+ if node is frame.returns and defframe.parent_of(frame.returns):
+ maybee0601 = annotation_return = True
+
+ if (
+ maybee0601
+ and defframe.name in defframe.locals
+ and defframe.locals[name][0].lineno < frame.lineno
+ ):
+ # Detect class assignments with the same
+ # name as the class. In this case, no warning
+ # should be raised.
+ maybee0601 = False
+ if isinstance(node.parent, astroid.Arguments):
+ maybee0601 = stmt.fromlineno <= defstmt.fromlineno
+ elif recursive_klass:
+ maybee0601 = True
+ else:
+ maybee0601 = maybee0601 and stmt.fromlineno <= defstmt.fromlineno
+ if maybee0601 and stmt.fromlineno == defstmt.fromlineno:
+ if (
+ isinstance(defframe, astroid.FunctionDef)
+ and frame is defframe
+ and defframe.parent_of(node)
+ and stmt is not defstmt
+ ):
+ # Single statement function, with the statement on the
+ # same line as the function definition
+ maybee0601 = False
+
+ # Look for type checking definitions inside a type checking guard.
+ if isinstance(defstmt, (astroid.Import, astroid.ImportFrom)):
+ defstmt_parent = defstmt.parent
+
+ if (
+ isinstance(defstmt_parent, astroid.If)
+ and defstmt_parent.test.as_string() in TYPING_TYPE_CHECKS_GUARDS
+ ):
+ # Exempt those definitions that are used inside the type checking
+ # guard or that are defined in both type checking guard branches.
+ used_in_branch = defstmt_parent.parent_of(node)
+ defined_in_or_else = False
+
+ for definition in defstmt_parent.orelse:
+ if isinstance(definition, astroid.Assign):
+ defined_in_or_else = any(
+ target.name == name for target in definition.targets
+ )
+ if defined_in_or_else:
+ break
+
+ if not used_in_branch and not defined_in_or_else:
+ maybee0601 = True
+
+ return maybee0601, annotation_return, use_outer_definition
+
+ def _ignore_class_scope(self, node):
+ """
+ Return True if the node is in a local class scope, as an assignment.
+
+ :param node: Node considered
+ :type node: astroid.Node
+ :return: True if the node is in a local class scope, as an assignment. False otherwise.
+ :rtype: bool
+ """
+ # Detect if we are in a local class scope, as an assignment.
+ # For example, the following is fair game.
+ #
+ # class A:
+ # b = 1
+ # c = lambda b=b: b * b
+ #
+ # class B:
+ # tp = 1
+ # def func(self, arg: tp):
+ # ...
+ # class C:
+ # tp = 2
+ # def func(self, arg=tp):
+ # ...
+
+ name = node.name
+ frame = node.statement().scope()
+ in_annotation_or_default = self._defined_in_function_definition(node, frame)
+ if in_annotation_or_default:
+ frame_locals = frame.parent.scope().locals
+ else:
+ frame_locals = frame.locals
+ return not (
+ (isinstance(frame, astroid.ClassDef) or in_annotation_or_default)
+ and name in frame_locals
+ )
+
+ def _loopvar_name(self, node, name):
+ # filter variables according to node's scope
+ if not self.linter.is_message_enabled("undefined-loop-variable"):
+ return
+ astmts = [stmt for stmt in node.lookup(name)[1] if hasattr(stmt, "assign_type")]
+ # If this variable usage exists inside a function definition
+ # that exists in the same loop,
+ # the usage is safe because the function will not be defined either if
+ # the variable is not defined.
+ scope = node.scope()
+ if isinstance(scope, astroid.FunctionDef) and any(
+ asmt.statement().parent_of(scope) for asmt in astmts
+ ):
+ return
+
+ # filter variables according their respective scope test is_statement
+ # and parent to avoid #74747. This is not a total fix, which would
+ # introduce a mechanism similar to special attribute lookup in
+ # modules. Also, in order to get correct inference in this case, the
+ # scope lookup rules would need to be changed to return the initial
+ # assignment (which does not exist in code per se) as well as any later
+ # modifications.
+ if (
+ not astmts
+ or (astmts[0].is_statement or astmts[0].parent)
+ and astmts[0].statement().parent_of(node)
+ ):
+ _astmts = []
+ else:
+ _astmts = astmts[:1]
+ for i, stmt in enumerate(astmts[1:]):
+ if astmts[i].statement().parent_of(stmt) and not in_for_else_branch(
+ astmts[i].statement(), stmt
+ ):
+ continue
+ _astmts.append(stmt)
+ astmts = _astmts
+ if len(astmts) != 1:
+ return
+
+ assign = astmts[0].assign_type()
+ if not (
+ isinstance(
+ assign, (astroid.For, astroid.Comprehension, astroid.GeneratorExp)
+ )
+ and assign.statement() is not node.statement()
+ ):
+ return
+
+ # For functions we can do more by inferring the length of the itered object
+ if not isinstance(assign, astroid.For):
+ self.add_message("undefined-loop-variable", args=name, node=node)
+ return
+
+ try:
+ inferred = next(assign.iter.infer())
+ except astroid.InferenceError:
+ self.add_message("undefined-loop-variable", args=name, node=node)
+ else:
+ if (
+ isinstance(inferred, astroid.Instance)
+ and inferred.qname() == BUILTIN_RANGE
+ ):
+ # Consider range() objects safe, even if they might not yield any results.
+ return
+
+ # Consider sequences.
+ sequences = (
+ astroid.List,
+ astroid.Tuple,
+ astroid.Dict,
+ astroid.Set,
+ objects.FrozenSet,
+ )
+ if not isinstance(inferred, sequences):
+ self.add_message("undefined-loop-variable", args=name, node=node)
+ return
+
+ elements = getattr(inferred, "elts", getattr(inferred, "items", []))
+ if not elements:
+ self.add_message("undefined-loop-variable", args=name, node=node)
+
+ def _check_is_unused(self, name, node, stmt, global_names, nonlocal_names):
+ # pylint: disable=too-many-branches
+ # Ignore some special names specified by user configuration.
+ if self._is_name_ignored(stmt, name):
+ return
+ # Ignore names that were added dynamically to the Function scope
+ if (
+ isinstance(node, astroid.FunctionDef)
+ and name == "__class__"
+ and len(node.locals["__class__"]) == 1
+ and isinstance(node.locals["__class__"][0], astroid.ClassDef)
+ ):
+ return
+
+ # Ignore names imported by the global statement.
+ if isinstance(stmt, (astroid.Global, astroid.Import, astroid.ImportFrom)):
+ # Detect imports, assigned to global statements.
+ if global_names and _import_name_is_global(stmt, global_names):
+ return
+
+ argnames = list(
+ itertools.chain(node.argnames(), [arg.name for arg in node.args.kwonlyargs])
+ )
+ # Care about functions with unknown argument (builtins)
+ if name in argnames:
+ self._check_unused_arguments(name, node, stmt, argnames)
+ else:
+ if stmt.parent and isinstance(
+ stmt.parent, (astroid.Assign, astroid.AnnAssign)
+ ):
+ if name in nonlocal_names:
+ return
+
+ qname = asname = None
+ if isinstance(stmt, (astroid.Import, astroid.ImportFrom)):
+ # Need the complete name, which we don't have in .locals.
+ if len(stmt.names) > 1:
+ import_names = next(
+ (names for names in stmt.names if name in names), None
+ )
+ else:
+ import_names = stmt.names[0]
+ if import_names:
+ qname, asname = import_names
+ name = asname or qname
+
+ if _has_locals_call_after_node(stmt, node.scope()):
+ message_name = "possibly-unused-variable"
+ else:
+ if isinstance(stmt, astroid.Import):
+ if asname is not None:
+ msg = "%s imported as %s" % (qname, asname)
+ else:
+ msg = "import %s" % name
+ self.add_message("unused-import", args=msg, node=stmt)
+ return
+ if isinstance(stmt, astroid.ImportFrom):
+ if asname is not None:
+ msg = "%s imported from %s as %s" % (
+ qname,
+ stmt.modname,
+ asname,
+ )
+ else:
+ msg = "%s imported from %s" % (name, stmt.modname)
+ self.add_message("unused-import", args=msg, node=stmt)
+ return
+ message_name = "unused-variable"
+
+ # Don't check function stubs created only for type information
+ if utils.is_overload_stub(node):
+ return
+
+ self.add_message(message_name, args=name, node=stmt)
+
+ def _is_name_ignored(self, stmt, name):
+ authorized_rgx = self.config.dummy_variables_rgx
+ if (
+ isinstance(stmt, astroid.AssignName)
+ and isinstance(stmt.parent, astroid.Arguments)
+ or isinstance(stmt, astroid.Arguments)
+ ):
+ regex = self.config.ignored_argument_names
+ else:
+ regex = authorized_rgx
+ return regex and regex.match(name)
+
+ def _check_unused_arguments(self, name, node, stmt, argnames):
+ is_method = node.is_method()
+ klass = node.parent.frame()
+ if is_method and isinstance(klass, astroid.ClassDef):
+ confidence = (
+ INFERENCE if utils.has_known_bases(klass) else INFERENCE_FAILURE
+ )
+ else:
+ confidence = HIGH
+
+ if is_method:
+ # Don't warn for the first argument of a (non static) method
+ if node.type != "staticmethod" and name == argnames[0]:
+ return
+ # Don't warn for argument of an overridden method
+ overridden = overridden_method(klass, node.name)
+ if overridden is not None and name in overridden.argnames():
+ return
+ if node.name in utils.PYMETHODS and node.name not in (
+ "__init__",
+ "__new__",
+ ):
+ return
+ # Don't check callback arguments
+ if any(
+ node.name.startswith(cb) or node.name.endswith(cb)
+ for cb in self.config.callbacks
+ ):
+ return
+ # Don't check arguments of singledispatch.register function.
+ if utils.is_registered_in_singledispatch_function(node):
+ return
+
+ # Don't check function stubs created only for type information
+ if utils.is_overload_stub(node):
+ return
+
+ # Don't check protocol classes
+ if utils.is_protocol_class(klass):
+ return
+
+ self.add_message("unused-argument", args=name, node=stmt, confidence=confidence)
+
+ def _check_late_binding_closure(self, node, assignment_node):
+ def _is_direct_lambda_call():
+ return (
+ isinstance(node_scope.parent, astroid.Call)
+ and node_scope.parent.func is node_scope
+ )
+
+ node_scope = node.scope()
+ if not isinstance(node_scope, (astroid.Lambda, astroid.FunctionDef)):
+ return
+ if isinstance(node.parent, astroid.Arguments):
+ return
+
+ if isinstance(assignment_node, astroid.Comprehension):
+ if assignment_node.parent.parent_of(node.scope()):
+ self.add_message("cell-var-from-loop", node=node, args=node.name)
+ else:
+ assign_scope = assignment_node.scope()
+ maybe_for = assignment_node
+ while not isinstance(maybe_for, astroid.For):
+ if maybe_for is assign_scope:
+ break
+ maybe_for = maybe_for.parent
+ else:
+ if (
+ maybe_for.parent_of(node_scope)
+ and not _is_direct_lambda_call()
+ and not isinstance(node_scope.statement(), astroid.Return)
+ ):
+ self.add_message("cell-var-from-loop", node=node, args=node.name)
+
+ def _should_ignore_redefined_builtin(self, stmt):
+ if not isinstance(stmt, astroid.ImportFrom):
+ return False
+ return stmt.modname in self.config.redefining_builtins_modules
+
+ def _has_homonym_in_upper_function_scope(self, node, index):
+ """
+ Return True if there is a node with the same name in the to_consume dict of an upper scope
+ and if that scope is a function
+
+ :param node: node to check for
+ :type node: astroid.Node
+ :param index: index of the current consumer inside self._to_consume
+ :type index: int
+ :return: True if there is a node with the same name in the to_consume dict of an upper scope
+ and if that scope is a function
+ :rtype: bool
+ """
+ for _consumer in self._to_consume[index - 1 :: -1]:
+ if _consumer.scope_type == "function" and node.name in _consumer.to_consume:
+ return True
+ return False
+
+ def _store_type_annotation_node(self, type_annotation):
+ """Given a type annotation, store all the name nodes it refers to"""
+ if isinstance(type_annotation, astroid.Name):
+ self._type_annotation_names.append(type_annotation.name)
+ return
+
+ if not isinstance(type_annotation, astroid.Subscript):
+ return
+
+ if (
+ isinstance(type_annotation.value, astroid.Attribute)
+ and isinstance(type_annotation.value.expr, astroid.Name)
+ and type_annotation.value.expr.name == TYPING_MODULE
+ ):
+ self._type_annotation_names.append(TYPING_MODULE)
+ return
+
+ self._type_annotation_names.extend(
+ annotation.name
+ for annotation in type_annotation.nodes_of_class(astroid.Name)
+ )
+
+ def _store_type_annotation_names(self, node):
+ type_annotation = node.type_annotation
+ if not type_annotation:
+ return
+ self._store_type_annotation_node(node.type_annotation)
+
+ def _check_self_cls_assign(self, node):
+ """Check that self/cls don't get assigned"""
+ assign_names = {
+ target.name
+ for target in node.targets
+ if isinstance(target, astroid.AssignName)
+ }
+ scope = node.scope()
+ nonlocals_with_same_name = any(
+ child
+ for child in scope.body
+ if isinstance(child, astroid.Nonlocal) and assign_names & set(child.names)
+ )
+ if nonlocals_with_same_name:
+ scope = node.scope().parent.scope()
+
+ if not (
+ isinstance(scope, astroid.scoped_nodes.FunctionDef)
+ and scope.is_method()
+ and "builtins.staticmethod" not in scope.decoratornames()
+ ):
+ return
+ argument_names = scope.argnames()
+ if not argument_names:
+ return
+ self_cls_name = argument_names[0]
+ target_assign_names = (
+ target.name
+ for target in node.targets
+ if isinstance(target, astroid.node_classes.AssignName)
+ )
+ if self_cls_name in target_assign_names:
+ self.add_message("self-cls-assignment", node=node, args=(self_cls_name))
+
+ def _check_unpacking(self, inferred, node, targets):
+ """ Check for unbalanced tuple unpacking
+ and unpacking non sequences.
+ """
+ if utils.is_inside_abstract_class(node):
+ return
+ if utils.is_comprehension(node):
+ return
+ if inferred is astroid.Uninferable:
+ return
+ if (
+ isinstance(inferred.parent, astroid.Arguments)
+ and isinstance(node.value, astroid.Name)
+ and node.value.name == inferred.parent.vararg
+ ):
+ # Variable-length argument, we can't determine the length.
+ return
+ if isinstance(inferred, (astroid.Tuple, astroid.List)):
+ # attempt to check unpacking is properly balanced
+ values = inferred.itered()
+ if len(targets) != len(values):
+ # Check if we have starred nodes.
+ if any(isinstance(target, astroid.Starred) for target in targets):
+ return
+ self.add_message(
+ "unbalanced-tuple-unpacking",
+ node=node,
+ args=(
+ _get_unpacking_extra_info(node, inferred),
+ len(targets),
+ len(values),
+ ),
+ )
+ # attempt to check unpacking may be possible (ie RHS is iterable)
+ else:
+ if not utils.is_iterable(inferred):
+ self.add_message(
+ "unpacking-non-sequence",
+ node=node,
+ args=(_get_unpacking_extra_info(node, inferred),),
+ )
+
+ def _check_module_attrs(self, node, module, module_names):
+ """check that module_names (list of string) are accessible through the
+ given module
+ if the latest access name corresponds to a module, return it
+ """
+ assert isinstance(module, astroid.Module), module
+ while module_names:
+ name = module_names.pop(0)
+ if name == "__dict__":
+ module = None
+ break
+ try:
+ module = next(module.getattr(name)[0].infer())
+ if module is astroid.Uninferable:
+ return None
+ except astroid.NotFoundError:
+ if module.name in self._ignored_modules:
+ return None
+ self.add_message(
+ "no-name-in-module", args=(name, module.name), node=node
+ )
+ return None
+ except astroid.InferenceError:
+ return None
+ if module_names:
+ modname = module.name if module else "__dict__"
+ self.add_message(
+ "no-name-in-module", node=node, args=(".".join(module_names), modname)
+ )
+ return None
+ if isinstance(module, astroid.Module):
+ return module
+ return None
+
+ def _check_all(self, node, not_consumed):
+ assigned = next(node.igetattr("__all__"))
+ if assigned is astroid.Uninferable:
+ return
+
+ for elt in getattr(assigned, "elts", ()):
+ try:
+ elt_name = next(elt.infer())
+ except astroid.InferenceError:
+ continue
+ if elt_name is astroid.Uninferable:
+ continue
+ if not elt_name.parent:
+ continue
+
+ if not isinstance(elt_name, astroid.Const) or not isinstance(
+ elt_name.value, str
+ ):
+ self.add_message("invalid-all-object", args=elt.as_string(), node=elt)
+ continue
+
+ elt_name = elt_name.value
+ # If elt is in not_consumed, remove it from not_consumed
+ if elt_name in not_consumed:
+ del not_consumed[elt_name]
+ continue
+
+ if elt_name not in node.locals:
+ if not node.package:
+ self.add_message(
+ "undefined-all-variable", args=(elt_name,), node=elt
+ )
+ else:
+ basename = os.path.splitext(node.file)[0]
+ if os.path.basename(basename) == "__init__":
+ name = node.name + "." + elt_name
+ try:
+ modutils.file_from_modpath(name.split("."))
+ except ImportError:
+ self.add_message(
+ "undefined-all-variable", args=(elt_name,), node=elt
+ )
+ except SyntaxError:
+ # don't yield a syntax-error warning,
+ # because it will be later yielded
+ # when the file will be checked
+ pass
+
+ def _check_globals(self, not_consumed):
+ if self._allow_global_unused_variables:
+ return
+ for name, nodes in not_consumed.items():
+ for node in nodes:
+ self.add_message("unused-variable", args=(name,), node=node)
+
+ def _check_imports(self, not_consumed):
+ local_names = _fix_dot_imports(not_consumed)
+ checked = set()
+ for name, stmt in local_names:
+ for imports in stmt.names:
+ real_name = imported_name = imports[0]
+ if imported_name == "*":
+ real_name = name
+ as_name = imports[1]
+ if real_name in checked:
+ continue
+ if name not in (real_name, as_name):
+ continue
+ checked.add(real_name)
+
+ if isinstance(stmt, astroid.Import) or (
+ isinstance(stmt, astroid.ImportFrom) and not stmt.modname
+ ):
+ if isinstance(stmt, astroid.ImportFrom) and SPECIAL_OBJ.search(
+ imported_name
+ ):
+ # Filter special objects (__doc__, __all__) etc.,
+ # because they can be imported for exporting.
+ continue
+
+ if imported_name in self._type_annotation_names:
+ # Most likely a typing import if it wasn't used so far.
+ continue
+
+ if as_name == "_":
+ continue
+ if as_name is None:
+ msg = "import %s" % imported_name
+ else:
+ msg = "%s imported as %s" % (imported_name, as_name)
+ if not _is_type_checking_import(stmt):
+ self.add_message("unused-import", args=msg, node=stmt)
+ elif isinstance(stmt, astroid.ImportFrom) and stmt.modname != FUTURE:
+ if SPECIAL_OBJ.search(imported_name):
+ # Filter special objects (__doc__, __all__) etc.,
+ # because they can be imported for exporting.
+ continue
+
+ if _is_from_future_import(stmt, name):
+ # Check if the name is in fact loaded from a
+ # __future__ import in another module.
+ continue
+
+ if imported_name in self._type_annotation_names:
+ # Most likely a typing import if it wasn't used so far.
+ continue
+
+ if imported_name == "*":
+ self.add_message("unused-wildcard-import", args=name, node=stmt)
+ else:
+ if as_name is None:
+ msg = "%s imported from %s" % (imported_name, stmt.modname)
+ else:
+ fields = (imported_name, stmt.modname, as_name)
+ msg = "%s imported from %s as %s" % fields
+ if not _is_type_checking_import(stmt):
+ self.add_message("unused-import", args=msg, node=stmt)
+ del self._to_consume
+
+ def _check_metaclasses(self, node):
+ """ Update consumption analysis for metaclasses. """
+ consumed = [] # [(scope_locals, consumed_key)]
+
+ for child_node in node.get_children():
+ if isinstance(child_node, astroid.ClassDef):
+ consumed.extend(self._check_classdef_metaclasses(child_node, node))
+
+ # Pop the consumed items, in order to avoid having
+ # unused-import and unused-variable false positives
+ for scope_locals, name in consumed:
+ scope_locals.pop(name, None)
+
+ def _check_classdef_metaclasses(self, klass, parent_node):
+ if not klass._metaclass:
+ # Skip if this class doesn't use explicitly a metaclass, but inherits it from ancestors
+ return []
+
+ consumed = [] # [(scope_locals, consumed_key)]
+ metaclass = klass.metaclass()
+
+ name = None
+ if isinstance(klass._metaclass, astroid.Name):
+ name = klass._metaclass.name
+ elif metaclass:
+ name = metaclass.root().name
+
+ found = None
+ name = METACLASS_NAME_TRANSFORMS.get(name, name)
+ if name:
+ # check enclosing scopes starting from most local
+ for scope_locals, _, _ in self._to_consume[::-1]:
+ found = scope_locals.get(name)
+ if found:
+ consumed.append((scope_locals, name))
+ break
+
+ if found is None and not metaclass:
+ name = None
+ if isinstance(klass._metaclass, astroid.Name):
+ name = klass._metaclass.name
+ elif isinstance(klass._metaclass, astroid.Attribute):
+ name = klass._metaclass.as_string()
+
+ if name is not None:
+ if not (
+ name in astroid.Module.scope_attrs
+ or utils.is_builtin(name)
+ or name in self.config.additional_builtins
+ or name in parent_node.locals
+ ):
+ self.add_message("undefined-variable", node=klass, args=(name,))
+
+ return consumed
+
+
+def register(linter):
+ """required method to auto register this checker"""
+ linter.register_checker(VariablesChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/config.py b/venv/Lib/site-packages/pylint/config.py
new file mode 100644
index 0000000..0925575
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/config.py
@@ -0,0 +1,913 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2010, 2012-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2008 pyves@crater.logilab.fr <pyves@crater.logilab.fr>
+# Copyright (c) 2010 Julien Jehannet <julien.jehannet@logilab.fr>
+# Copyright (c) 2013 Google, Inc.
+# Copyright (c) 2013 John McGehee <jmcgehee@altera.com>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Aru Sahni <arusahni@gmail.com>
+# Copyright (c) 2015 John Kirkham <jakirkham@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2017-2018 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Gary Tyler McLeod <mail@garytyler.com>
+# Copyright (c) 2018 Konstantin <Github@pheanex.de>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""utilities for Pylint configuration :
+
+* pylintrc
+* pylint.d (PYLINTHOME)
+"""
+import collections
+import configparser
+import contextlib
+import copy
+import io
+import optparse
+import os
+import pickle
+import re
+import sys
+import time
+from typing import Any, Dict, Tuple
+
+from pylint import utils
+
+USER_HOME = os.path.expanduser("~")
+if "PYLINTHOME" in os.environ:
+ PYLINT_HOME = os.environ["PYLINTHOME"]
+ if USER_HOME == "~":
+ USER_HOME = os.path.dirname(PYLINT_HOME)
+elif USER_HOME == "~":
+ PYLINT_HOME = ".pylint.d"
+else:
+ PYLINT_HOME = os.path.join(USER_HOME, ".pylint.d")
+
+
+def _get_pdata_path(base_name, recurs):
+ base_name = base_name.replace(os.sep, "_")
+ return os.path.join(PYLINT_HOME, "%s%s%s" % (base_name, recurs, ".stats"))
+
+
+def load_results(base):
+ data_file = _get_pdata_path(base, 1)
+ try:
+ with open(data_file, "rb") as stream:
+ return pickle.load(stream)
+ except Exception: # pylint: disable=broad-except
+ return {}
+
+
+def save_results(results, base):
+ if not os.path.exists(PYLINT_HOME):
+ try:
+ os.mkdir(PYLINT_HOME)
+ except OSError:
+ print("Unable to create directory %s" % PYLINT_HOME, file=sys.stderr)
+ data_file = _get_pdata_path(base, 1)
+ try:
+ with open(data_file, "wb") as stream:
+ pickle.dump(results, stream)
+ except (IOError, OSError) as ex:
+ print("Unable to create file %s: %s" % (data_file, ex), file=sys.stderr)
+
+
+def find_pylintrc():
+ """search the pylint rc file and return its path if it find it, else None
+ """
+ # is there a pylint rc file in the current directory ?
+ if os.path.exists("pylintrc"):
+ return os.path.abspath("pylintrc")
+ if os.path.exists(".pylintrc"):
+ return os.path.abspath(".pylintrc")
+ if os.path.isfile("__init__.py"):
+ curdir = os.path.abspath(os.getcwd())
+ while os.path.isfile(os.path.join(curdir, "__init__.py")):
+ curdir = os.path.abspath(os.path.join(curdir, ".."))
+ if os.path.isfile(os.path.join(curdir, "pylintrc")):
+ return os.path.join(curdir, "pylintrc")
+ if os.path.isfile(os.path.join(curdir, ".pylintrc")):
+ return os.path.join(curdir, ".pylintrc")
+ if "PYLINTRC" in os.environ and os.path.exists(os.environ["PYLINTRC"]):
+ pylintrc = os.environ["PYLINTRC"]
+ else:
+ user_home = os.path.expanduser("~")
+ if user_home in ("~", "/root"):
+ pylintrc = ".pylintrc"
+ else:
+ pylintrc = os.path.join(user_home, ".pylintrc")
+ if not os.path.isfile(pylintrc):
+ pylintrc = os.path.join(user_home, ".config", "pylintrc")
+ if not os.path.isfile(pylintrc):
+ if os.path.isfile("/etc/pylintrc"):
+ pylintrc = "/etc/pylintrc"
+ else:
+ pylintrc = None
+ return pylintrc
+
+
+PYLINTRC = find_pylintrc()
+
+ENV_HELP = (
+ """
+The following environment variables are used:
+ * PYLINTHOME
+ Path to the directory where persistent data for the run will be stored. If
+not found, it defaults to ~/.pylint.d/ or .pylint.d (in the current working
+directory).
+ * PYLINTRC
+ Path to the configuration file. See the documentation for the method used
+to search for configuration file.
+"""
+ % globals() # type: ignore
+)
+
+
+class UnsupportedAction(Exception):
+ """raised by set_option when it doesn't know what to do for an action"""
+
+
+def _multiple_choice_validator(choices, name, value):
+ values = utils._check_csv(value)
+ for csv_value in values:
+ if csv_value not in choices:
+ msg = "option %s: invalid value: %r, should be in %s"
+ raise optparse.OptionValueError(msg % (name, csv_value, choices))
+ return values
+
+
+def _choice_validator(choices, name, value):
+ if value not in choices:
+ msg = "option %s: invalid value: %r, should be in %s"
+ raise optparse.OptionValueError(msg % (name, value, choices))
+ return value
+
+
+# pylint: disable=unused-argument
+def _csv_validator(_, name, value):
+ return utils._check_csv(value)
+
+
+# pylint: disable=unused-argument
+def _regexp_validator(_, name, value):
+ if hasattr(value, "pattern"):
+ return value
+ return re.compile(value)
+
+
+# pylint: disable=unused-argument
+def _regexp_csv_validator(_, name, value):
+ return [_regexp_validator(_, name, val) for val in _csv_validator(_, name, value)]
+
+
+def _yn_validator(opt, _, value):
+ if isinstance(value, int):
+ return bool(value)
+ if value in ("y", "yes"):
+ return True
+ if value in ("n", "no"):
+ return False
+ msg = "option %s: invalid yn value %r, should be in (y, yes, n, no)"
+ raise optparse.OptionValueError(msg % (opt, value))
+
+
+def _non_empty_string_validator(opt, _, value):
+ if not value:
+ msg = "indent string can't be empty."
+ raise optparse.OptionValueError(msg)
+ return utils._unquote(value)
+
+
+VALIDATORS = {
+ "string": utils._unquote,
+ "int": int,
+ "regexp": re.compile,
+ "regexp_csv": _regexp_csv_validator,
+ "csv": _csv_validator,
+ "yn": _yn_validator,
+ "choice": lambda opt, name, value: _choice_validator(opt["choices"], name, value),
+ "multiple_choice": lambda opt, name, value: _multiple_choice_validator(
+ opt["choices"], name, value
+ ),
+ "non_empty_string": _non_empty_string_validator,
+}
+
+
+def _call_validator(opttype, optdict, option, value):
+ if opttype not in VALIDATORS:
+ raise Exception('Unsupported type "%s"' % opttype)
+ try:
+ return VALIDATORS[opttype](optdict, option, value)
+ except TypeError:
+ try:
+ return VALIDATORS[opttype](value)
+ except Exception:
+ raise optparse.OptionValueError(
+ "%s value (%r) should be of type %s" % (option, value, opttype)
+ )
+
+
+def _validate(value, optdict, name=""):
+ """return a validated value for an option according to its type
+
+ optional argument name is only used for error message formatting
+ """
+ try:
+ _type = optdict["type"]
+ except KeyError:
+ return value
+ return _call_validator(_type, optdict, name, value)
+
+
+def _level_options(group, outputlevel):
+ return [
+ option
+ for option in group.option_list
+ if (getattr(option, "level", 0) or 0) <= outputlevel
+ and option.help is not optparse.SUPPRESS_HELP
+ ]
+
+
+def _expand_default(self, option):
+ """Patch OptionParser.expand_default with custom behaviour
+
+ This will handle defaults to avoid overriding values in the
+ configuration file.
+ """
+ if self.parser is None or not self.default_tag:
+ return option.help
+ optname = option._long_opts[0][2:]
+ try:
+ provider = self.parser.options_manager._all_options[optname]
+ except KeyError:
+ value = None
+ else:
+ optdict = provider.get_option_def(optname)
+ optname = provider.option_attrname(optname, optdict)
+ value = getattr(provider.config, optname, optdict)
+ value = utils._format_option_value(optdict, value)
+ if value is optparse.NO_DEFAULT or not value:
+ value = self.NO_DEFAULT_VALUE
+ return option.help.replace(self.default_tag, str(value))
+
+
+@contextlib.contextmanager
+def _patch_optparse():
+ orig_default = optparse.HelpFormatter
+ try:
+ optparse.HelpFormatter.expand_default = _expand_default
+ yield
+ finally:
+ optparse.HelpFormatter.expand_default = orig_default
+
+
+def _multiple_choices_validating_option(opt, name, value):
+ return _multiple_choice_validator(opt.choices, name, value)
+
+
+# pylint: disable=no-member
+class Option(optparse.Option):
+ TYPES = optparse.Option.TYPES + (
+ "regexp",
+ "regexp_csv",
+ "csv",
+ "yn",
+ "multiple_choice",
+ "non_empty_string",
+ )
+ ATTRS = optparse.Option.ATTRS + ["hide", "level"]
+ TYPE_CHECKER = copy.copy(optparse.Option.TYPE_CHECKER)
+ TYPE_CHECKER["regexp"] = _regexp_validator
+ TYPE_CHECKER["regexp_csv"] = _regexp_csv_validator
+ TYPE_CHECKER["csv"] = _csv_validator
+ TYPE_CHECKER["yn"] = _yn_validator
+ TYPE_CHECKER["multiple_choice"] = _multiple_choices_validating_option
+ TYPE_CHECKER["non_empty_string"] = _non_empty_string_validator
+
+ def __init__(self, *opts, **attrs):
+ optparse.Option.__init__(self, *opts, **attrs)
+ if hasattr(self, "hide") and self.hide:
+ self.help = optparse.SUPPRESS_HELP
+
+ def _check_choice(self):
+ if self.type in ("choice", "multiple_choice"):
+ if self.choices is None:
+ raise optparse.OptionError(
+ "must supply a list of choices for type 'choice'", self
+ )
+ if not isinstance(self.choices, (tuple, list)):
+ raise optparse.OptionError(
+ "choices must be a list of strings ('%s' supplied)"
+ % str(type(self.choices)).split("'")[1],
+ self,
+ )
+ elif self.choices is not None:
+ raise optparse.OptionError(
+ "must not supply choices for type %r" % self.type, self
+ )
+
+ # pylint: disable=unsupported-assignment-operation
+ optparse.Option.CHECK_METHODS[2] = _check_choice # type: ignore
+
+ def process(self, opt, value, values, parser):
+ # First, convert the value(s) to the right type. Howl if any
+ # value(s) are bogus.
+ value = self.convert_value(opt, value)
+ if self.type == "named":
+ existent = getattr(values, self.dest)
+ if existent:
+ existent.update(value)
+ value = existent
+ # And then take whatever action is expected of us.
+ # This is a separate method to make life easier for
+ # subclasses to add new actions.
+ return self.take_action(self.action, self.dest, opt, value, values, parser)
+
+
+class OptionParser(optparse.OptionParser):
+ def __init__(self, option_class, *args, **kwargs):
+ optparse.OptionParser.__init__(self, option_class=Option, *args, **kwargs)
+
+ def format_option_help(self, formatter=None):
+ if formatter is None:
+ formatter = self.formatter
+ outputlevel = getattr(formatter, "output_level", 0)
+ formatter.store_option_strings(self)
+ result = []
+ result.append(formatter.format_heading("Options"))
+ formatter.indent()
+ if self.option_list:
+ result.append(optparse.OptionContainer.format_option_help(self, formatter))
+ result.append("\n")
+ for group in self.option_groups:
+ if group.level <= outputlevel and (
+ group.description or _level_options(group, outputlevel)
+ ):
+ result.append(group.format_help(formatter))
+ result.append("\n")
+ formatter.dedent()
+ # Drop the last "\n", or the header if no options or option groups:
+ return "".join(result[:-1])
+
+ def _match_long_opt(self, opt):
+ """Disable abbreviations."""
+ if opt not in self._long_opt:
+ raise optparse.BadOptionError(opt)
+ return opt
+
+
+# pylint: disable=abstract-method; by design?
+class _ManHelpFormatter(optparse.HelpFormatter):
+ def __init__(
+ self, indent_increment=0, max_help_position=24, width=79, short_first=0
+ ):
+ optparse.HelpFormatter.__init__(
+ self, indent_increment, max_help_position, width, short_first
+ )
+
+ def format_heading(self, heading):
+ return ".SH %s\n" % heading.upper()
+
+ def format_description(self, description):
+ return description
+
+ def format_option(self, option):
+ try:
+ optstring = option.option_strings
+ except AttributeError:
+ optstring = self.format_option_strings(option)
+ if option.help:
+ help_text = self.expand_default(option)
+ help_string = " ".join([l.strip() for l in help_text.splitlines()])
+ help_string = help_string.replace("\\", "\\\\")
+ help_string = help_string.replace("[current:", "[default:")
+ else:
+ help_string = ""
+ return """.IP "%s"
+%s
+""" % (
+ optstring,
+ help_string,
+ )
+
+ def format_head(self, optparser, pkginfo, section=1):
+ long_desc = ""
+ try:
+ pgm = optparser._get_prog_name()
+ except AttributeError:
+ # py >= 2.4.X (dunno which X exactly, at least 2)
+ pgm = optparser.get_prog_name()
+ short_desc = self.format_short_description(pgm, pkginfo.description)
+ if hasattr(pkginfo, "long_desc"):
+ long_desc = self.format_long_description(pgm, pkginfo.long_desc)
+ return "%s\n%s\n%s\n%s" % (
+ self.format_title(pgm, section),
+ short_desc,
+ self.format_synopsis(pgm),
+ long_desc,
+ )
+
+ @staticmethod
+ def format_title(pgm, section):
+ date = "%d-%02d-%02d" % time.localtime()[:3]
+ return '.TH %s %s "%s" %s' % (pgm, section, date, pgm)
+
+ @staticmethod
+ def format_short_description(pgm, short_desc):
+ return """.SH NAME
+.B %s
+\\- %s
+""" % (
+ pgm,
+ short_desc.strip(),
+ )
+
+ @staticmethod
+ def format_synopsis(pgm):
+ return (
+ """.SH SYNOPSIS
+.B %s
+[
+.I OPTIONS
+] [
+.I <arguments>
+]
+"""
+ % pgm
+ )
+
+ @staticmethod
+ def format_long_description(pgm, long_desc):
+ long_desc = "\n".join(line.lstrip() for line in long_desc.splitlines())
+ long_desc = long_desc.replace("\n.\n", "\n\n")
+ if long_desc.lower().startswith(pgm):
+ long_desc = long_desc[len(pgm) :]
+ return """.SH DESCRIPTION
+.B %s
+%s
+""" % (
+ pgm,
+ long_desc.strip(),
+ )
+
+ @staticmethod
+ def format_tail(pkginfo):
+ tail = """.SH SEE ALSO
+/usr/share/doc/pythonX.Y-%s/
+
+.SH BUGS
+Please report bugs on the project\'s mailing list:
+%s
+
+.SH AUTHOR
+%s <%s>
+""" % (
+ getattr(pkginfo, "debian_name", pkginfo.modname),
+ pkginfo.mailinglist,
+ pkginfo.author,
+ pkginfo.author_email,
+ )
+
+ if hasattr(pkginfo, "copyright"):
+ tail += (
+ """
+.SH COPYRIGHT
+%s
+"""
+ % pkginfo.copyright
+ )
+
+ return tail
+
+
+class OptionsManagerMixIn:
+ """Handle configuration from both a configuration file and command line options"""
+
+ def __init__(self, usage, config_file=None, version=None):
+ self.config_file = config_file
+ self.reset_parsers(usage, version=version)
+ # list of registered options providers
+ self.options_providers = []
+ # dictionary associating option name to checker
+ self._all_options = collections.OrderedDict()
+ self._short_options = {}
+ self._nocallback_options = {}
+ self._mygroups = {}
+ # verbosity
+ self._maxlevel = 0
+
+ def reset_parsers(self, usage="", version=None):
+ # configuration file parser
+ self.cfgfile_parser = configparser.ConfigParser(
+ inline_comment_prefixes=("#", ";")
+ )
+ # command line parser
+ self.cmdline_parser = OptionParser(Option, usage=usage, version=version)
+ self.cmdline_parser.options_manager = self
+ self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
+
+ def register_options_provider(self, provider, own_group=True):
+ """register an options provider"""
+ assert provider.priority <= 0, "provider's priority can't be >= 0"
+ for i in range(len(self.options_providers)):
+ if provider.priority > self.options_providers[i].priority:
+ self.options_providers.insert(i, provider)
+ break
+ else:
+ self.options_providers.append(provider)
+ non_group_spec_options = [
+ option for option in provider.options if "group" not in option[1]
+ ]
+ groups = getattr(provider, "option_groups", ())
+ if own_group and non_group_spec_options:
+ self.add_option_group(
+ provider.name.upper(),
+ provider.__doc__,
+ non_group_spec_options,
+ provider,
+ )
+ else:
+ for opt, optdict in non_group_spec_options:
+ self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
+ for gname, gdoc in groups:
+ gname = gname.upper()
+ goptions = [
+ option
+ for option in provider.options
+ if option[1].get("group", "").upper() == gname
+ ]
+ self.add_option_group(gname, gdoc, goptions, provider)
+
+ def add_option_group(self, group_name, _, options, provider):
+ # add option group to the command line parser
+ if group_name in self._mygroups:
+ group = self._mygroups[group_name]
+ else:
+ group = optparse.OptionGroup(
+ self.cmdline_parser, title=group_name.capitalize()
+ )
+ self.cmdline_parser.add_option_group(group)
+ group.level = provider.level
+ self._mygroups[group_name] = group
+ # add section to the config file
+ if (
+ group_name != "DEFAULT"
+ and group_name not in self.cfgfile_parser._sections
+ ):
+ self.cfgfile_parser.add_section(group_name)
+ # add provider's specific options
+ for opt, optdict in options:
+ self.add_optik_option(provider, group, opt, optdict)
+
+ def add_optik_option(self, provider, optikcontainer, opt, optdict):
+ args, optdict = self.optik_option(provider, opt, optdict)
+ option = optikcontainer.add_option(*args, **optdict)
+ self._all_options[opt] = provider
+ self._maxlevel = max(self._maxlevel, option.level or 0)
+
+ def optik_option(self, provider, opt, optdict):
+ """get our personal option definition and return a suitable form for
+ use with optik/optparse
+ """
+ optdict = copy.copy(optdict)
+ if "action" in optdict:
+ self._nocallback_options[provider] = opt
+ else:
+ optdict["action"] = "callback"
+ optdict["callback"] = self.cb_set_provider_option
+ # default is handled here and *must not* be given to optik if you
+ # want the whole machinery to work
+ if "default" in optdict:
+ if (
+ "help" in optdict
+ and optdict.get("default") is not None
+ and optdict["action"] not in ("store_true", "store_false")
+ ):
+ optdict["help"] += " [current: %default]"
+ del optdict["default"]
+ args = ["--" + str(opt)]
+ if "short" in optdict:
+ self._short_options[optdict["short"]] = opt
+ args.append("-" + optdict["short"])
+ del optdict["short"]
+ # cleanup option definition dict before giving it to optik
+ for key in list(optdict.keys()):
+ if key not in self._optik_option_attrs:
+ optdict.pop(key)
+ return args, optdict
+
+ def cb_set_provider_option(self, option, opt, value, parser):
+ """optik callback for option setting"""
+ if opt.startswith("--"):
+ # remove -- on long option
+ opt = opt[2:]
+ else:
+ # short option, get its long equivalent
+ opt = self._short_options[opt[1:]]
+ # trick since we can't set action='store_true' on options
+ if value is None:
+ value = 1
+ self.global_set_option(opt, value)
+
+ def global_set_option(self, opt, value):
+ """set option on the correct option provider"""
+ self._all_options[opt].set_option(opt, value)
+
+ def generate_config(self, stream=None, skipsections=(), encoding=None):
+ """write a configuration file according to the current configuration
+ into the given stream or stdout
+ """
+ options_by_section = {}
+ sections = []
+ for provider in self.options_providers:
+ for section, options in provider.options_by_section():
+ if section is None:
+ section = provider.name
+ if section in skipsections:
+ continue
+ options = [
+ (n, d, v)
+ for (n, d, v) in options
+ if d.get("type") is not None and not d.get("deprecated")
+ ]
+ if not options:
+ continue
+ if section not in sections:
+ sections.append(section)
+ alloptions = options_by_section.setdefault(section, [])
+ alloptions += options
+ stream = stream or sys.stdout
+ printed = False
+ for section in sections:
+ if printed:
+ print("\n", file=stream)
+ utils.format_section(
+ stream, section.upper(), sorted(options_by_section[section])
+ )
+ printed = True
+
+ def generate_manpage(self, pkginfo, section=1, stream=None):
+ with _patch_optparse():
+ _generate_manpage(
+ self.cmdline_parser,
+ pkginfo,
+ section,
+ stream=stream or sys.stdout,
+ level=self._maxlevel,
+ )
+
+ def load_provider_defaults(self):
+ """initialize configuration using default values"""
+ for provider in self.options_providers:
+ provider.load_defaults()
+
+ def read_config_file(self, config_file=None, verbose=None):
+ """read the configuration file but do not load it (i.e. dispatching
+ values to each options provider)
+ """
+ helplevel = 1
+ while helplevel <= self._maxlevel:
+ opt = "-".join(["long"] * helplevel) + "-help"
+ if opt in self._all_options:
+ break # already processed
+ # pylint: disable=unused-argument
+ def helpfunc(option, opt, val, p, level=helplevel):
+ print(self.help(level))
+ sys.exit(0)
+
+ helpmsg = "%s verbose help." % " ".join(["more"] * helplevel)
+ optdict = {"action": "callback", "callback": helpfunc, "help": helpmsg}
+ provider = self.options_providers[0]
+ self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
+ provider.options += ((opt, optdict),)
+ helplevel += 1
+ if config_file is None:
+ config_file = self.config_file
+ if config_file is not None:
+ config_file = os.path.expanduser(config_file)
+ if not os.path.exists(config_file):
+ raise IOError("The config file {:s} doesn't exist!".format(config_file))
+
+ use_config_file = config_file and os.path.exists(config_file)
+ if use_config_file:
+ parser = self.cfgfile_parser
+
+ # Use this encoding in order to strip the BOM marker, if any.
+ with io.open(config_file, "r", encoding="utf_8_sig") as fp:
+ parser.read_file(fp)
+
+ # normalize sections'title
+ for sect, values in list(parser._sections.items()):
+ if not sect.isupper() and values:
+ parser._sections[sect.upper()] = values
+
+ if not verbose:
+ return
+
+ if use_config_file:
+ msg = "Using config file {}".format(os.path.abspath(config_file))
+ else:
+ msg = "No config file found, using default configuration"
+ print(msg, file=sys.stderr)
+
+ def load_config_file(self):
+ """dispatch values previously read from a configuration file to each
+ options provider)
+ """
+ parser = self.cfgfile_parser
+ for section in parser.sections():
+ for option, value in parser.items(section):
+ try:
+ self.global_set_option(option, value)
+ except (KeyError, optparse.OptionError):
+ continue
+
+ def load_configuration(self, **kwargs):
+ """override configuration according to given parameters"""
+ return self.load_configuration_from_config(kwargs)
+
+ def load_configuration_from_config(self, config):
+ for opt, opt_value in config.items():
+ opt = opt.replace("_", "-")
+ provider = self._all_options[opt]
+ provider.set_option(opt, opt_value)
+
+ def load_command_line_configuration(self, args=None):
+ """Override configuration according to command line parameters
+
+ return additional arguments
+ """
+ with _patch_optparse():
+ if args is None:
+ args = sys.argv[1:]
+ else:
+ args = list(args)
+ (options, args) = self.cmdline_parser.parse_args(args=args)
+ for provider in self._nocallback_options:
+ config = provider.config
+ for attr in config.__dict__.keys():
+ value = getattr(options, attr, None)
+ if value is None:
+ continue
+ setattr(config, attr, value)
+ return args
+
+ def add_help_section(self, title, description, level=0):
+ """add a dummy option section for help purpose """
+ group = optparse.OptionGroup(
+ self.cmdline_parser, title=title.capitalize(), description=description
+ )
+ group.level = level
+ self._maxlevel = max(self._maxlevel, level)
+ self.cmdline_parser.add_option_group(group)
+
+ def help(self, level=0):
+ """return the usage string for available options """
+ self.cmdline_parser.formatter.output_level = level
+ with _patch_optparse():
+ return self.cmdline_parser.format_help()
+
+
+class OptionsProviderMixIn:
+ """Mixin to provide options to an OptionsManager"""
+
+ # those attributes should be overridden
+ priority = -1
+ name = "default"
+ options = () # type: Tuple[Tuple[str, Dict[str, Any]], ...]
+ level = 0
+
+ def __init__(self):
+ self.config = optparse.Values()
+ self.load_defaults()
+
+ def load_defaults(self):
+ """initialize the provider using default values"""
+ for opt, optdict in self.options:
+ action = optdict.get("action")
+ if action != "callback":
+ # callback action have no default
+ if optdict is None:
+ optdict = self.get_option_def(opt)
+ default = optdict.get("default")
+ self.set_option(opt, default, action, optdict)
+
+ def option_attrname(self, opt, optdict=None):
+ """get the config attribute corresponding to opt"""
+ if optdict is None:
+ optdict = self.get_option_def(opt)
+ return optdict.get("dest", opt.replace("-", "_"))
+
+ def option_value(self, opt):
+ """get the current value for the given option"""
+ return getattr(self.config, self.option_attrname(opt), None)
+
+ def set_option(self, optname, value, action=None, optdict=None):
+ """method called to set an option (registered in the options list)"""
+ if optdict is None:
+ optdict = self.get_option_def(optname)
+ if value is not None:
+ value = _validate(value, optdict, optname)
+ if action is None:
+ action = optdict.get("action", "store")
+ if action == "store":
+ setattr(self.config, self.option_attrname(optname, optdict), value)
+ elif action in ("store_true", "count"):
+ setattr(self.config, self.option_attrname(optname, optdict), 0)
+ elif action == "store_false":
+ setattr(self.config, self.option_attrname(optname, optdict), 1)
+ elif action == "append":
+ optname = self.option_attrname(optname, optdict)
+ _list = getattr(self.config, optname, None)
+ if _list is None:
+ if isinstance(value, (list, tuple)):
+ _list = value
+ elif value is not None:
+ _list = []
+ _list.append(value)
+ setattr(self.config, optname, _list)
+ elif isinstance(_list, tuple):
+ setattr(self.config, optname, _list + (value,))
+ else:
+ _list.append(value)
+ elif action == "callback":
+ optdict["callback"](None, optname, value, None)
+ else:
+ raise UnsupportedAction(action)
+
+ def get_option_def(self, opt):
+ """return the dictionary defining an option given its name"""
+ assert self.options
+ for option in self.options:
+ if option[0] == opt:
+ return option[1]
+ raise optparse.OptionError(
+ "no such option %s in section %r" % (opt, self.name), opt
+ )
+
+ def options_by_section(self):
+ """return an iterator on options grouped by section
+
+ (section, [list of (optname, optdict, optvalue)])
+ """
+ sections = {}
+ for optname, optdict in self.options:
+ sections.setdefault(optdict.get("group"), []).append(
+ (optname, optdict, self.option_value(optname))
+ )
+ if None in sections:
+ yield None, sections.pop(None)
+ for section, options in sorted(sections.items()):
+ yield section.upper(), options
+
+ def options_and_values(self, options=None):
+ if options is None:
+ options = self.options
+ for optname, optdict in options:
+ yield (optname, optdict, self.option_value(optname))
+
+
+class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
+ """basic mixin for simple configurations which don't need the
+ manager / providers model
+ """
+
+ def __init__(self, *args, **kwargs):
+ if not args:
+ kwargs.setdefault("usage", "")
+ OptionsManagerMixIn.__init__(self, *args, **kwargs)
+ OptionsProviderMixIn.__init__(self)
+ if not getattr(self, "option_groups", None):
+ self.option_groups = []
+ for _, optdict in self.options:
+ try:
+ gdef = (optdict["group"].upper(), "")
+ except KeyError:
+ continue
+ if gdef not in self.option_groups:
+ self.option_groups.append(gdef)
+ self.register_options_provider(self, own_group=False)
+
+
+def _generate_manpage(optparser, pkginfo, section=1, stream=sys.stdout, level=0):
+ formatter = _ManHelpFormatter()
+ formatter.output_level = level
+ formatter.parser = optparser
+ print(formatter.format_head(optparser, pkginfo, section), file=stream)
+ print(optparser.format_option_help(formatter), file=stream)
+ print(formatter.format_tail(pkginfo), file=stream)
diff --git a/venv/Lib/site-packages/pylint/constants.py b/venv/Lib/site-packages/pylint/constants.py
new file mode 100644
index 0000000..852fc15
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/constants.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import re
+
+# Allow stopping after the first semicolon/hash encountered,
+# so that an option can be continued with the reasons
+# why it is active or disabled.
+OPTION_RGX = re.compile(r"\s*#.*\bpylint:\s*([^;#]+)[;#]{0,1}")
+
+PY_EXTS = (".py", ".pyc", ".pyo", ".pyw", ".so", ".dll")
+
+MSG_STATE_CONFIDENCE = 2
+_MSG_ORDER = "EWRCIF"
+MSG_STATE_SCOPE_CONFIG = 0
+MSG_STATE_SCOPE_MODULE = 1
+
+# The line/node distinction does not apply to fatal errors and reports.
+_SCOPE_EXEMPT = "FR"
+
+MSG_TYPES = {
+ "I": "info",
+ "C": "convention",
+ "R": "refactor",
+ "W": "warning",
+ "E": "error",
+ "F": "fatal",
+}
+MSG_TYPES_LONG = {v: k for k, v in MSG_TYPES.items()}
+
+MSG_TYPES_STATUS = {"I": 0, "C": 16, "R": 8, "W": 4, "E": 2, "F": 1}
+
+# You probably don't want to change the MAIN_CHECKER_NAME
+# This would affect rcfile generation and retro-compatibility
+# on all project using [MASTER] in their rcfile.
+MAIN_CHECKER_NAME = "master"
+
+
+class WarningScope:
+ LINE = "line-based-msg"
+ NODE = "node-based-msg"
diff --git a/venv/Lib/site-packages/pylint/epylint.py b/venv/Lib/site-packages/pylint/epylint.py
new file mode 100644
index 0000000..85f1c86
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/epylint.py
@@ -0,0 +1,197 @@
+# -*- coding: utf-8;
+# mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4
+# -*- vim:fenc=utf-8:ft=python:et:sw=4:ts=4:sts=4
+
+# Copyright (c) 2008-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2014 Jakob Normark <jakobnormark@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Manuel Vázquez Acosta <mva.led@gmail.com>
+# Copyright (c) 2014 Derek Harland <derek.harland@finq.co.nz>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Mihai Balint <balint.mihai@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Daniela Plascencia <daplascen@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Ryan McGuire <ryan@enigmacurry.com>
+# Copyright (c) 2018 thernstig <30827238+thernstig@users.noreply.github.com>
+# Copyright (c) 2018 Radostin Stoyanov <rst0git@users.noreply.github.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Emacs and Flymake compatible Pylint.
+
+This script is for integration with emacs and is compatible with flymake mode.
+
+epylint walks out of python packages before invoking pylint. This avoids
+reporting import errors that occur when a module within a package uses the
+absolute import path to get another module within this package.
+
+For example:
+ - Suppose a package is structured as
+
+ a/__init__.py
+ a/b/x.py
+ a/c/y.py
+
+ - Then if y.py imports x as "from a.b import x" the following produces pylint
+ errors
+
+ cd a/c; pylint y.py
+
+ - The following obviously doesn't
+
+ pylint a/c/y.py
+
+ - As this script will be invoked by emacs within the directory of the file
+ we are checking we need to go out of it to avoid these false positives.
+
+
+You may also use py_run to run pylint with desired options and get back (or not)
+its output.
+"""
+import os
+import os.path as osp
+import shlex
+import sys
+from io import StringIO
+from subprocess import PIPE, Popen
+
+
+def _get_env():
+ """Extracts the environment PYTHONPATH and appends the current sys.path to
+ those."""
+ env = dict(os.environ)
+ env["PYTHONPATH"] = os.pathsep.join(sys.path)
+ return env
+
+
+def lint(filename, options=()):
+ """Pylint the given file.
+
+ When run from emacs we will be in the directory of a file, and passed its
+ filename. If this file is part of a package and is trying to import other
+ modules from within its own package or another package rooted in a directory
+ below it, pylint will classify it as a failed import.
+
+ To get around this, we traverse down the directory tree to find the root of
+ the package this module is in. We then invoke pylint from this directory.
+
+ Finally, we must correct the filenames in the output generated by pylint so
+ Emacs doesn't become confused (it will expect just the original filename,
+ while pylint may extend it with extra directories if we've traversed down
+ the tree)
+ """
+ # traverse downwards until we are out of a python package
+ full_path = osp.abspath(filename)
+ parent_path = osp.dirname(full_path)
+ child_path = osp.basename(full_path)
+
+ while parent_path != "/" and osp.exists(osp.join(parent_path, "__init__.py")):
+ child_path = osp.join(osp.basename(parent_path), child_path)
+ parent_path = osp.dirname(parent_path)
+
+ # Start pylint
+ # Ensure we use the python and pylint associated with the running epylint
+ run_cmd = "import sys; from pylint.lint import Run; Run(sys.argv[1:])"
+ cmd = (
+ [sys.executable, "-c", run_cmd]
+ + [
+ "--msg-template",
+ "{path}:{line}: {category} ({msg_id}, {symbol}, {obj}) {msg}",
+ "-r",
+ "n",
+ child_path,
+ ]
+ + list(options)
+ )
+ process = Popen(
+ cmd, stdout=PIPE, cwd=parent_path, env=_get_env(), universal_newlines=True
+ )
+
+ for line in process.stdout:
+ # remove pylintrc warning
+ if line.startswith("No config file found"):
+ continue
+
+ # modify the file name thats output to reverse the path traversal we made
+ parts = line.split(":")
+ if parts and parts[0] == child_path:
+ line = ":".join([filename] + parts[1:])
+ print(line, end=" ")
+
+ process.wait()
+ return process.returncode
+
+
+def py_run(command_options="", return_std=False, stdout=None, stderr=None):
+ """Run pylint from python
+
+ ``command_options`` is a string containing ``pylint`` command line options;
+ ``return_std`` (boolean) indicates return of created standard output
+ and error (see below);
+ ``stdout`` and ``stderr`` are 'file-like' objects in which standard output
+ could be written.
+
+ Calling agent is responsible for stdout/err management (creation, close).
+ Default standard output and error are those from sys,
+ or standalone ones (``subprocess.PIPE``) are used
+ if they are not set and ``return_std``.
+
+ If ``return_std`` is set to ``True``, this function returns a 2-uple
+ containing standard output and error related to created process,
+ as follows: ``(stdout, stderr)``.
+
+ To silently run Pylint on a module, and get its standard output and error:
+ >>> (pylint_stdout, pylint_stderr) = py_run( 'module_name.py', True)
+ """
+ # Detect if we use Python as executable or not, else default to `python`
+ executable = sys.executable if "python" in sys.executable else "python"
+
+ # Create command line to call pylint
+ epylint_part = [executable, "-c", "from pylint import epylint;epylint.Run()"]
+ options = shlex.split(command_options, posix=not sys.platform.startswith("win"))
+ cli = epylint_part + options
+
+ # Providing standard output and/or error if not set
+ if stdout is None:
+ if return_std:
+ stdout = PIPE
+ else:
+ stdout = sys.stdout
+ if stderr is None:
+ if return_std:
+ stderr = PIPE
+ else:
+ stderr = sys.stderr
+ # Call pylint in a subprocess
+ process = Popen(
+ cli,
+ shell=False,
+ stdout=stdout,
+ stderr=stderr,
+ env=_get_env(),
+ universal_newlines=True,
+ )
+ proc_stdout, proc_stderr = process.communicate()
+ # Return standard output and error
+ if return_std:
+ return StringIO(proc_stdout), StringIO(proc_stderr)
+ return None
+
+
+def Run():
+ if len(sys.argv) == 1:
+ print("Usage: %s <filename> [options]" % sys.argv[0])
+ sys.exit(1)
+ elif not osp.exists(sys.argv[1]):
+ print("%s does not exist" % sys.argv[1])
+ sys.exit(1)
+ else:
+ sys.exit(lint(sys.argv[1], sys.argv[2:]))
+
+
+if __name__ == "__main__":
+ Run()
diff --git a/venv/Lib/site-packages/pylint/exceptions.py b/venv/Lib/site-packages/pylint/exceptions.py
new file mode 100644
index 0000000..d5dd17f
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/exceptions.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Exception classes raised by various operations within pylint."""
+
+
+class InvalidMessageError(Exception):
+ """raised when a message creation, registration or addition is rejected"""
+
+
+class UnknownMessageError(Exception):
+ """raised when an unregistered message id is encountered"""
+
+
+class EmptyReportError(Exception):
+ """raised when a report is empty and so should not be displayed"""
+
+
+class InvalidReporterError(Exception):
+ """raised when selected reporter is invalid (e.g. not found)"""
+
+
+class InvalidArgsError(ValueError):
+ """raised when passed arguments are invalid, e.g., have the wrong length"""
diff --git a/venv/Lib/site-packages/pylint/extensions/__init__.py b/venv/Lib/site-packages/pylint/extensions/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__init__.py
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..03323e7
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/_check_docs_utils.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/_check_docs_utils.cpython-37.pyc
new file mode 100644
index 0000000..271e216
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/_check_docs_utils.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/bad_builtin.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/bad_builtin.cpython-37.pyc
new file mode 100644
index 0000000..bb50903
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/bad_builtin.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/broad_try_clause.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/broad_try_clause.cpython-37.pyc
new file mode 100644
index 0000000..cd3cd71
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/broad_try_clause.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/check_docs.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/check_docs.cpython-37.pyc
new file mode 100644
index 0000000..9730100
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/check_docs.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/check_elif.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/check_elif.cpython-37.pyc
new file mode 100644
index 0000000..030378b
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/check_elif.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/comparetozero.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/comparetozero.cpython-37.pyc
new file mode 100644
index 0000000..83eaae3
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/comparetozero.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/docparams.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/docparams.cpython-37.pyc
new file mode 100644
index 0000000..3d447e1
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/docparams.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/docstyle.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/docstyle.cpython-37.pyc
new file mode 100644
index 0000000..e6d0d7d
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/docstyle.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/emptystring.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/emptystring.cpython-37.pyc
new file mode 100644
index 0000000..f5f4892
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/emptystring.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/mccabe.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/mccabe.cpython-37.pyc
new file mode 100644
index 0000000..cb64a4d
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/mccabe.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/overlapping_exceptions.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/overlapping_exceptions.cpython-37.pyc
new file mode 100644
index 0000000..f099683
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/overlapping_exceptions.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/__pycache__/redefined_variable_type.cpython-37.pyc b/venv/Lib/site-packages/pylint/extensions/__pycache__/redefined_variable_type.cpython-37.pyc
new file mode 100644
index 0000000..eb897a3
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/__pycache__/redefined_variable_type.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/extensions/_check_docs_utils.py b/venv/Lib/site-packages/pylint/extensions/_check_docs_utils.py
new file mode 100644
index 0000000..fe1603f
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/_check_docs_utils.py
@@ -0,0 +1,792 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016-2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016 Yuri Bochkarev <baltazar.bz@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Mitar <mitar.github@tnode.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Mitchell T.H. Young <mitchelly@gmail.com>
+# Copyright (c) 2018 Adrian Chirieac <chirieacam@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Utility methods for docstring checking."""
+
+import re
+
+import astroid
+
+from pylint.checkers import utils
+
+
+def space_indentation(s):
+ """The number of leading spaces in a string
+
+ :param str s: input string
+
+ :rtype: int
+ :return: number of leading spaces
+ """
+ return len(s) - len(s.lstrip(" "))
+
+
+def get_setters_property_name(node):
+ """Get the name of the property that the given node is a setter for.
+
+ :param node: The node to get the property name for.
+ :type node: str
+
+ :rtype: str or None
+ :returns: The name of the property that the node is a setter for,
+ or None if one could not be found.
+ """
+ decorators = node.decorators.nodes if node.decorators else []
+ for decorator in decorators:
+ if (
+ isinstance(decorator, astroid.Attribute)
+ and decorator.attrname == "setter"
+ and isinstance(decorator.expr, astroid.Name)
+ ):
+ return decorator.expr.name
+ return None
+
+
+def get_setters_property(node):
+ """Get the property node for the given setter node.
+
+ :param node: The node to get the property for.
+ :type node: astroid.FunctionDef
+
+ :rtype: astroid.FunctionDef or None
+ :returns: The node relating to the property of the given setter node,
+ or None if one could not be found.
+ """
+ property_ = None
+
+ property_name = get_setters_property_name(node)
+ class_node = utils.node_frame_class(node)
+ if property_name and class_node:
+ class_attrs = class_node.getattr(node.name)
+ for attr in class_attrs:
+ if utils.decorated_with_property(attr):
+ property_ = attr
+ break
+
+ return property_
+
+
+def returns_something(return_node):
+ """Check if a return node returns a value other than None.
+
+ :param return_node: The return node to check.
+ :type return_node: astroid.Return
+
+ :rtype: bool
+ :return: True if the return node returns a value other than None,
+ False otherwise.
+ """
+ returns = return_node.value
+
+ if returns is None:
+ return False
+
+ return not (isinstance(returns, astroid.Const) and returns.value is None)
+
+
+def _get_raise_target(node):
+ if isinstance(node.exc, astroid.Call):
+ func = node.exc.func
+ if isinstance(func, (astroid.Name, astroid.Attribute)):
+ return utils.safe_infer(func)
+ return None
+
+
+def possible_exc_types(node):
+ """
+ Gets all of the possible raised exception types for the given raise node.
+
+ .. note::
+
+ Caught exception types are ignored.
+
+
+ :param node: The raise node to find exception types for.
+ :type node: astroid.node_classes.NodeNG
+
+ :returns: A list of exception types possibly raised by :param:`node`.
+ :rtype: set(str)
+ """
+ excs = []
+ if isinstance(node.exc, astroid.Name):
+ inferred = utils.safe_infer(node.exc)
+ if inferred:
+ excs = [inferred.name]
+ elif node.exc is None:
+ handler = node.parent
+ while handler and not isinstance(handler, astroid.ExceptHandler):
+ handler = handler.parent
+
+ if handler and handler.type:
+ inferred_excs = astroid.unpack_infer(handler.type)
+ excs = (exc.name for exc in inferred_excs if exc is not astroid.Uninferable)
+ else:
+ target = _get_raise_target(node)
+ if isinstance(target, astroid.ClassDef):
+ excs = [target.name]
+ elif isinstance(target, astroid.FunctionDef):
+ for ret in target.nodes_of_class(astroid.Return):
+ if ret.frame() != target:
+ # return from inner function - ignore it
+ continue
+
+ val = utils.safe_infer(ret.value)
+ if (
+ val
+ and isinstance(val, (astroid.Instance, astroid.ClassDef))
+ and utils.inherit_from_std_ex(val)
+ ):
+ excs.append(val.name)
+
+ try:
+ return {exc for exc in excs if not utils.node_ignores_exception(node, exc)}
+ except astroid.InferenceError:
+ return set()
+
+
+def docstringify(docstring, default_type="default"):
+ for docstring_type in [
+ SphinxDocstring,
+ EpytextDocstring,
+ GoogleDocstring,
+ NumpyDocstring,
+ ]:
+ instance = docstring_type(docstring)
+ if instance.is_valid():
+ return instance
+
+ docstring_type = DOCSTRING_TYPES.get(default_type, Docstring)
+ return docstring_type(docstring)
+
+
+class Docstring:
+ re_for_parameters_see = re.compile(
+ r"""
+ For\s+the\s+(other)?\s*parameters\s*,\s+see
+ """,
+ re.X | re.S,
+ )
+
+ supports_yields = None
+ """True if the docstring supports a "yield" section.
+
+ False if the docstring uses the returns section to document generators.
+ """
+
+ # These methods are designed to be overridden
+ # pylint: disable=no-self-use
+ def __init__(self, doc):
+ doc = doc or ""
+ self.doc = doc.expandtabs()
+
+ def is_valid(self):
+ return False
+
+ def exceptions(self):
+ return set()
+
+ def has_params(self):
+ return False
+
+ def has_returns(self):
+ return False
+
+ def has_rtype(self):
+ return False
+
+ def has_property_returns(self):
+ return False
+
+ def has_property_type(self):
+ return False
+
+ def has_yields(self):
+ return False
+
+ def has_yields_type(self):
+ return False
+
+ def match_param_docs(self):
+ return set(), set()
+
+ def params_documented_elsewhere(self):
+ return self.re_for_parameters_see.search(self.doc) is not None
+
+
+class SphinxDocstring(Docstring):
+ re_type = r"""
+ [~!.]? # Optional link style prefix
+ \w(?:\w|\.[^\.])* # Valid python name
+ """
+
+ re_simple_container_type = r"""
+ {type} # a container type
+ [\(\[] [^\n\s]+ [\)\]] # with the contents of the container
+ """.format(
+ type=re_type
+ )
+
+ re_xref = r"""
+ (?::\w+:)? # optional tag
+ `{}` # what to reference
+ """.format(
+ re_type
+ )
+
+ re_param_raw = r"""
+ : # initial colon
+ (?: # Sphinx keywords
+ param|parameter|
+ arg|argument|
+ key|keyword
+ )
+ \s+ # whitespace
+
+ (?: # optional type declaration
+ ({type}|{container_type})
+ \s+
+ )?
+
+ (\w+) # Parameter name
+ \s* # whitespace
+ : # final colon
+ """.format(
+ type=re_type, container_type=re_simple_container_type
+ )
+ re_param_in_docstring = re.compile(re_param_raw, re.X | re.S)
+
+ re_type_raw = r"""
+ :type # Sphinx keyword
+ \s+ # whitespace
+ ({type}) # Parameter name
+ \s* # whitespace
+ : # final colon
+ """.format(
+ type=re_type
+ )
+ re_type_in_docstring = re.compile(re_type_raw, re.X | re.S)
+
+ re_property_type_raw = r"""
+ :type: # Sphinx keyword
+ \s+ # whitespace
+ {type} # type declaration
+ """.format(
+ type=re_type
+ )
+ re_property_type_in_docstring = re.compile(re_property_type_raw, re.X | re.S)
+
+ re_raise_raw = r"""
+ : # initial colon
+ (?: # Sphinx keyword
+ raises?|
+ except|exception
+ )
+ \s+ # whitespace
+ ({type}) # exception type
+ \s* # whitespace
+ : # final colon
+ """.format(
+ type=re_type
+ )
+ re_raise_in_docstring = re.compile(re_raise_raw, re.X | re.S)
+
+ re_rtype_in_docstring = re.compile(r":rtype:")
+
+ re_returns_in_docstring = re.compile(r":returns?:")
+
+ supports_yields = False
+
+ def is_valid(self):
+ return bool(
+ self.re_param_in_docstring.search(self.doc)
+ or self.re_raise_in_docstring.search(self.doc)
+ or self.re_rtype_in_docstring.search(self.doc)
+ or self.re_returns_in_docstring.search(self.doc)
+ or self.re_property_type_in_docstring.search(self.doc)
+ )
+
+ def exceptions(self):
+ types = set()
+
+ for match in re.finditer(self.re_raise_in_docstring, self.doc):
+ raise_type = match.group(1)
+ types.add(raise_type)
+
+ return types
+
+ def has_params(self):
+ if not self.doc:
+ return False
+
+ return self.re_param_in_docstring.search(self.doc) is not None
+
+ def has_returns(self):
+ if not self.doc:
+ return False
+
+ return bool(self.re_returns_in_docstring.search(self.doc))
+
+ def has_rtype(self):
+ if not self.doc:
+ return False
+
+ return bool(self.re_rtype_in_docstring.search(self.doc))
+
+ def has_property_returns(self):
+ if not self.doc:
+ return False
+
+ # The summary line is the return doc,
+ # so the first line must not be a known directive.
+ return not self.doc.lstrip().startswith(":")
+
+ def has_property_type(self):
+ if not self.doc:
+ return False
+
+ return bool(self.re_property_type_in_docstring.search(self.doc))
+
+ def match_param_docs(self):
+ params_with_doc = set()
+ params_with_type = set()
+
+ for match in re.finditer(self.re_param_in_docstring, self.doc):
+ name = match.group(2)
+ params_with_doc.add(name)
+ param_type = match.group(1)
+ if param_type is not None:
+ params_with_type.add(name)
+
+ params_with_type.update(re.findall(self.re_type_in_docstring, self.doc))
+ return params_with_doc, params_with_type
+
+
+class EpytextDocstring(SphinxDocstring):
+ """
+ Epytext is similar to Sphinx. See the docs:
+ http://epydoc.sourceforge.net/epytext.html
+ http://epydoc.sourceforge.net/fields.html#fields
+
+ It's used in PyCharm:
+ https://www.jetbrains.com/help/pycharm/2016.1/creating-documentation-comments.html#d848203e314
+ https://www.jetbrains.com/help/pycharm/2016.1/using-docstrings-to-specify-types.html
+ """
+
+ re_param_in_docstring = re.compile(
+ SphinxDocstring.re_param_raw.replace(":", "@", 1), re.X | re.S
+ )
+
+ re_type_in_docstring = re.compile(
+ SphinxDocstring.re_type_raw.replace(":", "@", 1), re.X | re.S
+ )
+
+ re_property_type_in_docstring = re.compile(
+ SphinxDocstring.re_property_type_raw.replace(":", "@", 1), re.X | re.S
+ )
+
+ re_raise_in_docstring = re.compile(
+ SphinxDocstring.re_raise_raw.replace(":", "@", 1), re.X | re.S
+ )
+
+ re_rtype_in_docstring = re.compile(
+ r"""
+ @ # initial "at" symbol
+ (?: # Epytext keyword
+ rtype|returntype
+ )
+ : # final colon
+ """,
+ re.X | re.S,
+ )
+
+ re_returns_in_docstring = re.compile(r"@returns?:")
+
+ def has_property_returns(self):
+ if not self.doc:
+ return False
+
+ # If this is a property docstring, the summary is the return doc.
+ if self.has_property_type():
+ # The summary line is the return doc,
+ # so the first line must not be a known directive.
+ return not self.doc.lstrip().startswith("@")
+
+ return False
+
+
+class GoogleDocstring(Docstring):
+ re_type = SphinxDocstring.re_type
+
+ re_xref = SphinxDocstring.re_xref
+
+ re_container_type = r"""
+ (?:{type}|{xref}) # a container type
+ [\(\[] [^\n]+ [\)\]] # with the contents of the container
+ """.format(
+ type=re_type, xref=re_xref
+ )
+
+ re_multiple_type = r"""
+ (?:{container_type}|{type}|{xref})
+ (?:\s+(?:of|or)\s+(?:{container_type}|{type}|{xref}))*
+ """.format(
+ type=re_type, xref=re_xref, container_type=re_container_type
+ )
+
+ _re_section_template = r"""
+ ^([ ]*) {0} \s*: \s*$ # Google parameter header
+ ( .* ) # section
+ """
+
+ re_param_section = re.compile(
+ _re_section_template.format(r"(?:Args|Arguments|Parameters)"),
+ re.X | re.S | re.M,
+ )
+
+ re_keyword_param_section = re.compile(
+ _re_section_template.format(r"Keyword\s(?:Args|Arguments|Parameters)"),
+ re.X | re.S | re.M,
+ )
+
+ re_param_line = re.compile(
+ r"""
+ \s* \*{{0,2}}(\w+) # identifier potentially with asterisks
+ \s* ( [(]
+ {type}
+ (?:,\s+optional)?
+ [)] )? \s* : # optional type declaration
+ \s* (.*) # beginning of optional description
+ """.format(
+ type=re_multiple_type
+ ),
+ re.X | re.S | re.M,
+ )
+
+ re_raise_section = re.compile(
+ _re_section_template.format(r"Raises"), re.X | re.S | re.M
+ )
+
+ re_raise_line = re.compile(
+ r"""
+ \s* ({type}) \s* : # identifier
+ \s* (.*) # beginning of optional description
+ """.format(
+ type=re_type
+ ),
+ re.X | re.S | re.M,
+ )
+
+ re_returns_section = re.compile(
+ _re_section_template.format(r"Returns?"), re.X | re.S | re.M
+ )
+
+ re_returns_line = re.compile(
+ r"""
+ \s* ({type}:)? # identifier
+ \s* (.*) # beginning of description
+ """.format(
+ type=re_multiple_type
+ ),
+ re.X | re.S | re.M,
+ )
+
+ re_property_returns_line = re.compile(
+ r"""
+ ^{type}: # indentifier
+ \s* (.*) # Summary line / description
+ """.format(
+ type=re_multiple_type
+ ),
+ re.X | re.S | re.M,
+ )
+
+ re_yields_section = re.compile(
+ _re_section_template.format(r"Yields?"), re.X | re.S | re.M
+ )
+
+ re_yields_line = re_returns_line
+
+ supports_yields = True
+
+ def is_valid(self):
+ return bool(
+ self.re_param_section.search(self.doc)
+ or self.re_raise_section.search(self.doc)
+ or self.re_returns_section.search(self.doc)
+ or self.re_yields_section.search(self.doc)
+ or self.re_property_returns_line.search(self._first_line())
+ )
+
+ def has_params(self):
+ if not self.doc:
+ return False
+
+ return self.re_param_section.search(self.doc) is not None
+
+ def has_returns(self):
+ if not self.doc:
+ return False
+
+ entries = self._parse_section(self.re_returns_section)
+ for entry in entries:
+ match = self.re_returns_line.match(entry)
+ if not match:
+ continue
+
+ return_desc = match.group(2)
+ if return_desc:
+ return True
+
+ return False
+
+ def has_rtype(self):
+ if not self.doc:
+ return False
+
+ entries = self._parse_section(self.re_returns_section)
+ for entry in entries:
+ match = self.re_returns_line.match(entry)
+ if not match:
+ continue
+
+ return_type = match.group(1)
+ if return_type:
+ return True
+
+ return False
+
+ def has_property_returns(self):
+ # The summary line is the return doc,
+ # so the first line must not be a known directive.
+ first_line = self._first_line()
+ return not bool(
+ self.re_param_section.search(first_line)
+ or self.re_raise_section.search(first_line)
+ or self.re_returns_section.search(first_line)
+ or self.re_yields_section.search(first_line)
+ )
+
+ def has_property_type(self):
+ if not self.doc:
+ return False
+
+ return bool(self.re_property_returns_line.match(self._first_line()))
+
+ def has_yields(self):
+ if not self.doc:
+ return False
+
+ entries = self._parse_section(self.re_yields_section)
+ for entry in entries:
+ match = self.re_yields_line.match(entry)
+ if not match:
+ continue
+
+ yield_desc = match.group(2)
+ if yield_desc:
+ return True
+
+ return False
+
+ def has_yields_type(self):
+ if not self.doc:
+ return False
+
+ entries = self._parse_section(self.re_yields_section)
+ for entry in entries:
+ match = self.re_yields_line.match(entry)
+ if not match:
+ continue
+
+ yield_type = match.group(1)
+ if yield_type:
+ return True
+
+ return False
+
+ def exceptions(self):
+ types = set()
+
+ entries = self._parse_section(self.re_raise_section)
+ for entry in entries:
+ match = self.re_raise_line.match(entry)
+ if not match:
+ continue
+
+ exc_type = match.group(1)
+ exc_desc = match.group(2)
+ if exc_desc:
+ types.add(exc_type)
+
+ return types
+
+ def match_param_docs(self):
+ params_with_doc = set()
+ params_with_type = set()
+
+ entries = self._parse_section(self.re_param_section)
+ entries.extend(self._parse_section(self.re_keyword_param_section))
+ for entry in entries:
+ match = self.re_param_line.match(entry)
+ if not match:
+ continue
+
+ param_name = match.group(1)
+ param_type = match.group(2)
+ param_desc = match.group(3)
+ if param_type:
+ params_with_type.add(param_name)
+
+ if param_desc:
+ params_with_doc.add(param_name)
+
+ return params_with_doc, params_with_type
+
+ def _first_line(self):
+ return self.doc.lstrip().split("\n", 1)[0]
+
+ @staticmethod
+ def min_section_indent(section_match):
+ return len(section_match.group(1)) + 1
+
+ @staticmethod
+ def _is_section_header(_):
+ # Google parsing does not need to detect section headers,
+ # because it works off of indentation level only
+ return False
+
+ def _parse_section(self, section_re):
+ section_match = section_re.search(self.doc)
+ if section_match is None:
+ return []
+
+ min_indentation = self.min_section_indent(section_match)
+
+ entries = []
+ entry = []
+ is_first = True
+ for line in section_match.group(2).splitlines():
+ if not line.strip():
+ continue
+ indentation = space_indentation(line)
+ if indentation < min_indentation:
+ break
+
+ # The first line after the header defines the minimum
+ # indentation.
+ if is_first:
+ min_indentation = indentation
+ is_first = False
+
+ if indentation == min_indentation:
+ if self._is_section_header(line):
+ break
+ # Lines with minimum indentation must contain the beginning
+ # of a new parameter documentation.
+ if entry:
+ entries.append("\n".join(entry))
+ entry = []
+
+ entry.append(line)
+
+ if entry:
+ entries.append("\n".join(entry))
+
+ return entries
+
+
+class NumpyDocstring(GoogleDocstring):
+ _re_section_template = r"""
+ ^([ ]*) {0} \s*?$ # Numpy parameters header
+ \s* [-=]+ \s*?$ # underline
+ ( .* ) # section
+ """
+
+ re_param_section = re.compile(
+ _re_section_template.format(r"(?:Args|Arguments|Parameters)"),
+ re.X | re.S | re.M,
+ )
+
+ re_param_line = re.compile(
+ r"""
+ \s* (\w+) # identifier
+ \s* :
+ \s* (?:({type})(?:,\s+optional)?)? # optional type declaration
+ \n # description starts on a new line
+ \s* (.*) # description
+ """.format(
+ type=GoogleDocstring.re_multiple_type
+ ),
+ re.X | re.S,
+ )
+
+ re_raise_section = re.compile(
+ _re_section_template.format(r"Raises"), re.X | re.S | re.M
+ )
+
+ re_raise_line = re.compile(
+ r"""
+ \s* ({type})$ # type declaration
+ \s* (.*) # optional description
+ """.format(
+ type=GoogleDocstring.re_type
+ ),
+ re.X | re.S | re.M,
+ )
+
+ re_returns_section = re.compile(
+ _re_section_template.format(r"Returns?"), re.X | re.S | re.M
+ )
+
+ re_returns_line = re.compile(
+ r"""
+ \s* (?:\w+\s+:\s+)? # optional name
+ ({type})$ # type declaration
+ \s* (.*) # optional description
+ """.format(
+ type=GoogleDocstring.re_multiple_type
+ ),
+ re.X | re.S | re.M,
+ )
+
+ re_yields_section = re.compile(
+ _re_section_template.format(r"Yields?"), re.X | re.S | re.M
+ )
+
+ re_yields_line = re_returns_line
+
+ supports_yields = True
+
+ @staticmethod
+ def min_section_indent(section_match):
+ return len(section_match.group(1))
+
+ @staticmethod
+ def _is_section_header(line):
+ return bool(re.match(r"\s*-+$", line))
+
+
+DOCSTRING_TYPES = {
+ "sphinx": SphinxDocstring,
+ "epytext": EpytextDocstring,
+ "google": GoogleDocstring,
+ "numpy": NumpyDocstring,
+ "default": Docstring,
+}
+"""A map of the name of the docstring type to its class.
+
+:type: dict(str, type)
+"""
diff --git a/venv/Lib/site-packages/pylint/extensions/bad_builtin.py b/venv/Lib/site-packages/pylint/extensions/bad_builtin.py
new file mode 100644
index 0000000..754c409
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/bad_builtin.py
@@ -0,0 +1,69 @@
+# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Checker for deprecated builtins."""
+import astroid
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import IAstroidChecker
+
+BAD_FUNCTIONS = ["map", "filter"]
+# Some hints regarding the use of bad builtins.
+BUILTIN_HINTS = {"map": "Using a list comprehension can be clearer."}
+BUILTIN_HINTS["filter"] = BUILTIN_HINTS["map"]
+
+
+class BadBuiltinChecker(BaseChecker):
+
+ __implements__ = (IAstroidChecker,)
+ name = "deprecated_builtins"
+ msgs = {
+ "W0141": (
+ "Used builtin function %s",
+ "bad-builtin",
+ "Used when a black listed builtin function is used (see the "
+ "bad-function option). Usual black listed functions are the ones "
+ "like map, or filter , where Python offers now some cleaner "
+ "alternative like list comprehension.",
+ )
+ }
+
+ options = (
+ (
+ "bad-functions",
+ {
+ "default": BAD_FUNCTIONS,
+ "type": "csv",
+ "metavar": "<builtin function names>",
+ "help": "List of builtins function names that should not be "
+ "used, separated by a comma",
+ },
+ ),
+ )
+
+ @check_messages("bad-builtin")
+ def visit_call(self, node):
+ if isinstance(node.func, astroid.Name):
+ name = node.func.name
+ # ignore the name if it's not a builtin (i.e. not defined in the
+ # locals nor globals scope)
+ if not (name in node.frame() or name in node.root()):
+ if name in self.config.bad_functions:
+ hint = BUILTIN_HINTS.get(name)
+ if hint:
+ args = "%r. %s" % (name, hint)
+ else:
+ args = repr(name)
+ self.add_message("bad-builtin", node=node, args=args)
+
+
+def register(linter):
+ """Required method to auto register this checker.
+
+ :param linter: Main interface object for Pylint plugins
+ :type linter: Pylint object
+ """
+ linter.register_checker(BadBuiltinChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/broad_try_clause.py b/venv/Lib/site-packages/pylint/extensions/broad_try_clause.py
new file mode 100644
index 0000000..9a61fb6
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/broad_try_clause.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2019 Tyler N. Thieding <python@thieding.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Looks for try/except statements with too much code in the try clause."""
+
+from pylint import checkers, interfaces
+
+
+class BroadTryClauseChecker(checkers.BaseChecker):
+ """Checks for try clauses with too many lines.
+
+ According to PEP 8, ``try`` clauses shall contain the absolute minimum
+ amount of code. This checker enforces a maximum number of statements within
+ ``try`` clauses.
+
+ """
+
+ __implements__ = interfaces.IAstroidChecker
+
+ # configuration section name
+ name = "broad_try_clause"
+ msgs = {
+ "W0717": (
+ "%s",
+ "too-many-try-statements",
+ "Try clause contains too many statements.",
+ )
+ }
+
+ priority = -2
+ options = (
+ (
+ "max-try-statements",
+ {
+ "default": 1,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "Maximum number of statements allowed in a try clause",
+ },
+ ),
+ )
+
+ def visit_tryexcept(self, node):
+ try_clause_statements = len(node.body)
+ if try_clause_statements > self.config.max_try_statements:
+ msg = "try clause contains {0} statements, expected at most {1}".format(
+ try_clause_statements, self.config.max_try_statements
+ )
+ self.add_message(
+ "too-many-try-statements", node.lineno, node=node, args=msg
+ )
+
+
+def register(linter):
+ """Required method to auto register this checker."""
+ linter.register_checker(BroadTryClauseChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/check_docs.py b/venv/Lib/site-packages/pylint/extensions/check_docs.py
new file mode 100644
index 0000000..7f7f643
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/check_docs.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2014-2015 Bruno Daniel <bruno.daniel@blue-yonder.com>
+# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import warnings
+
+from pylint.extensions import docparams
+
+
+def register(linter):
+ """Required method to auto register this checker.
+
+ :param linter: Main interface object for Pylint plugins
+ :type linter: Pylint object
+ """
+ warnings.warn(
+ "This plugin is deprecated, use pylint.extensions.docparams instead.",
+ DeprecationWarning,
+ )
+ linter.register_checker(docparams.DocstringParameterChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/check_elif.py b/venv/Lib/site-packages/pylint/extensions/check_elif.py
new file mode 100644
index 0000000..67555b1
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/check_elif.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glmatthe@cisco.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import astroid
+
+from pylint.checkers import BaseTokenChecker
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import IAstroidChecker, ITokenChecker
+
+
+class ElseifUsedChecker(BaseTokenChecker):
+ """Checks for use of "else if" when an "elif" could be used
+ """
+
+ __implements__ = (ITokenChecker, IAstroidChecker)
+ name = "else_if_used"
+ msgs = {
+ "R5501": (
+ 'Consider using "elif" instead of "else if"',
+ "else-if-used",
+ "Used when an else statement is immediately followed by "
+ "an if statement and does not contain statements that "
+ "would be unrelated to it.",
+ )
+ }
+
+ def __init__(self, linter=None):
+ BaseTokenChecker.__init__(self, linter)
+ self._init()
+
+ def _init(self):
+ self._elifs = []
+ self._if_counter = 0
+
+ def process_tokens(self, tokens):
+ # Process tokens and look for 'if' or 'elif'
+ for _, token, _, _, _ in tokens:
+ if token == "elif":
+ self._elifs.append(True)
+ elif token == "if":
+ self._elifs.append(False)
+
+ def leave_module(self, _):
+ self._init()
+
+ def visit_ifexp(self, node):
+ if isinstance(node.parent, astroid.FormattedValue):
+ return
+ self._if_counter += 1
+
+ def visit_comprehension(self, node):
+ self._if_counter += len(node.ifs)
+
+ @check_messages("else-if-used")
+ def visit_if(self, node):
+ if isinstance(node.parent, astroid.If):
+ orelse = node.parent.orelse
+ # current if node must directly follow an "else"
+ if orelse and orelse == [node]:
+ if not self._elifs[self._if_counter]:
+ self.add_message("else-if-used", node=node)
+ self._if_counter += 1
+
+
+def register(linter):
+ """Required method to auto register this checker.
+
+ :param linter: Main interface object for Pylint plugins
+ :type linter: Pylint object
+ """
+ linter.register_checker(ElseifUsedChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/comparetozero.py b/venv/Lib/site-packages/pylint/extensions/comparetozero.py
new file mode 100644
index 0000000..e31f488
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/comparetozero.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Looks for comparisons to empty string."""
+
+import itertools
+
+import astroid
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+
+
+def _is_constant_zero(node):
+ return isinstance(node, astroid.Const) and node.value == 0
+
+
+class CompareToZeroChecker(checkers.BaseChecker):
+ """Checks for comparisons to zero.
+ Most of the times you should use the fact that integers with a value of 0 are false.
+ An exception to this rule is when 0 is allowed in the program and has a
+ different meaning than None!
+ """
+
+ __implements__ = (interfaces.IAstroidChecker,)
+
+ # configuration section name
+ name = "compare-to-zero"
+ msgs = {
+ "C2001": (
+ "Avoid comparisons to zero",
+ "compare-to-zero",
+ "Used when Pylint detects comparison to a 0 constant.",
+ )
+ }
+
+ priority = -2
+ options = ()
+
+ @utils.check_messages("compare-to-zero")
+ def visit_compare(self, node):
+ _operators = ["!=", "==", "is not", "is"]
+ # note: astroid.Compare has the left most operand in node.left
+ # while the rest are a list of tuples in node.ops
+ # the format of the tuple is ('compare operator sign', node)
+ # here we squash everything into `ops` to make it easier for processing later
+ ops = [("", node.left)]
+ ops.extend(node.ops)
+ ops = list(itertools.chain(*ops))
+
+ for ops_idx in range(len(ops) - 2):
+ op_1 = ops[ops_idx]
+ op_2 = ops[ops_idx + 1]
+ op_3 = ops[ops_idx + 2]
+ error_detected = False
+
+ # 0 ?? X
+ if _is_constant_zero(op_1) and op_2 in _operators:
+ error_detected = True
+ # X ?? 0
+ elif op_2 in _operators and _is_constant_zero(op_3):
+ error_detected = True
+
+ if error_detected:
+ self.add_message("compare-to-zero", node=node)
+
+
+def register(linter):
+ """Required method to auto register this checker."""
+ linter.register_checker(CompareToZeroChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/docparams.py b/venv/Lib/site-packages/pylint/extensions/docparams.py
new file mode 100644
index 0000000..d5a15a4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/docparams.py
@@ -0,0 +1,536 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014-2015 Bruno Daniel <bruno.daniel@blue-yonder.com>
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016-2018 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Glenn Matthews <glmatthe@cisco.com>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017 John Paraskevopoulos <io.paraskev@gmail.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Adam Dangoor <adamdangoor@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Pylint plugin for checking in Sphinx, Google, or Numpy style docstrings
+"""
+import astroid
+
+import pylint.extensions._check_docs_utils as utils
+from pylint.checkers import BaseChecker
+from pylint.checkers import utils as checker_utils
+from pylint.interfaces import IAstroidChecker
+
+
+class DocstringParameterChecker(BaseChecker):
+ """Checker for Sphinx, Google, or Numpy style docstrings
+
+ * Check that all function, method and constructor parameters are mentioned
+ in the params and types part of the docstring. Constructor parameters
+ can be documented in either the class docstring or ``__init__`` docstring,
+ but not both.
+ * Check that there are no naming inconsistencies between the signature and
+ the documentation, i.e. also report documented parameters that are missing
+ in the signature. This is important to find cases where parameters are
+ renamed only in the code, not in the documentation.
+ * Check that all explicitly raised exceptions in a function are documented
+ in the function docstring. Caught exceptions are ignored.
+
+ Activate this checker by adding the line::
+
+ load-plugins=pylint.extensions.docparams
+
+ to the ``MASTER`` section of your ``.pylintrc``.
+
+ :param linter: linter object
+ :type linter: :class:`pylint.lint.PyLinter`
+ """
+
+ __implements__ = IAstroidChecker
+
+ name = "parameter_documentation"
+ msgs = {
+ "W9005": (
+ '"%s" has constructor parameters documented in class and __init__',
+ "multiple-constructor-doc",
+ "Please remove parameter declarations in the class or constructor.",
+ ),
+ "W9006": (
+ '"%s" not documented as being raised',
+ "missing-raises-doc",
+ "Please document exceptions for all raised exception types.",
+ ),
+ "W9008": (
+ "Redundant returns documentation",
+ "redundant-returns-doc",
+ "Please remove the return/rtype documentation from this method.",
+ ),
+ "W9010": (
+ "Redundant yields documentation",
+ "redundant-yields-doc",
+ "Please remove the yields documentation from this method.",
+ ),
+ "W9011": (
+ "Missing return documentation",
+ "missing-return-doc",
+ "Please add documentation about what this method returns.",
+ {"old_names": [("W9007", "old-missing-returns-doc")]},
+ ),
+ "W9012": (
+ "Missing return type documentation",
+ "missing-return-type-doc",
+ "Please document the type returned by this method.",
+ # we can't use the same old_name for two different warnings
+ # {'old_names': [('W9007', 'missing-returns-doc')]},
+ ),
+ "W9013": (
+ "Missing yield documentation",
+ "missing-yield-doc",
+ "Please add documentation about what this generator yields.",
+ {"old_names": [("W9009", "old-missing-yields-doc")]},
+ ),
+ "W9014": (
+ "Missing yield type documentation",
+ "missing-yield-type-doc",
+ "Please document the type yielded by this method.",
+ # we can't use the same old_name for two different warnings
+ # {'old_names': [('W9009', 'missing-yields-doc')]},
+ ),
+ "W9015": (
+ '"%s" missing in parameter documentation',
+ "missing-param-doc",
+ "Please add parameter declarations for all parameters.",
+ {"old_names": [("W9003", "old-missing-param-doc")]},
+ ),
+ "W9016": (
+ '"%s" missing in parameter type documentation',
+ "missing-type-doc",
+ "Please add parameter type declarations for all parameters.",
+ {"old_names": [("W9004", "old-missing-type-doc")]},
+ ),
+ "W9017": (
+ '"%s" differing in parameter documentation',
+ "differing-param-doc",
+ "Please check parameter names in declarations.",
+ ),
+ "W9018": (
+ '"%s" differing in parameter type documentation',
+ "differing-type-doc",
+ "Please check parameter names in type declarations.",
+ ),
+ }
+
+ options = (
+ (
+ "accept-no-param-doc",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Whether to accept totally missing parameter "
+ "documentation in the docstring of a function that has "
+ "parameters.",
+ },
+ ),
+ (
+ "accept-no-raise-doc",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Whether to accept totally missing raises "
+ "documentation in the docstring of a function that "
+ "raises an exception.",
+ },
+ ),
+ (
+ "accept-no-return-doc",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Whether to accept totally missing return "
+ "documentation in the docstring of a function that "
+ "returns a statement.",
+ },
+ ),
+ (
+ "accept-no-yields-doc",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y or n>",
+ "help": "Whether to accept totally missing yields "
+ "documentation in the docstring of a generator.",
+ },
+ ),
+ (
+ "default-docstring-type",
+ {
+ "type": "choice",
+ "default": "default",
+ "choices": list(utils.DOCSTRING_TYPES),
+ "help": "If the docstring type cannot be guessed "
+ "the specified docstring type will be used.",
+ },
+ ),
+ )
+
+ priority = -2
+
+ constructor_names = {"__init__", "__new__"}
+ not_needed_param_in_docstring = {"self", "cls"}
+
+ def visit_functiondef(self, node):
+ """Called for function and method definitions (def).
+
+ :param node: Node for a function or method definition in the AST
+ :type node: :class:`astroid.scoped_nodes.Function`
+ """
+ node_doc = utils.docstringify(node.doc, self.config.default_docstring_type)
+ self.check_functiondef_params(node, node_doc)
+ self.check_functiondef_returns(node, node_doc)
+ self.check_functiondef_yields(node, node_doc)
+
+ def check_functiondef_params(self, node, node_doc):
+ node_allow_no_param = None
+ if node.name in self.constructor_names:
+ class_node = checker_utils.node_frame_class(node)
+ if class_node is not None:
+ class_doc = utils.docstringify(
+ class_node.doc, self.config.default_docstring_type
+ )
+ self.check_single_constructor_params(class_doc, node_doc, class_node)
+
+ # __init__ or class docstrings can have no parameters documented
+ # as long as the other documents them.
+ node_allow_no_param = (
+ class_doc.has_params()
+ or class_doc.params_documented_elsewhere()
+ or None
+ )
+ class_allow_no_param = (
+ node_doc.has_params()
+ or node_doc.params_documented_elsewhere()
+ or None
+ )
+
+ self.check_arguments_in_docstring(
+ class_doc, node.args, class_node, class_allow_no_param
+ )
+
+ self.check_arguments_in_docstring(
+ node_doc, node.args, node, node_allow_no_param
+ )
+
+ def check_functiondef_returns(self, node, node_doc):
+ if (not node_doc.supports_yields and node.is_generator()) or node.is_abstract():
+ return
+
+ return_nodes = node.nodes_of_class(astroid.Return)
+ if (node_doc.has_returns() or node_doc.has_rtype()) and not any(
+ utils.returns_something(ret_node) for ret_node in return_nodes
+ ):
+ self.add_message("redundant-returns-doc", node=node)
+
+ def check_functiondef_yields(self, node, node_doc):
+ if not node_doc.supports_yields or node.is_abstract():
+ return
+
+ if (
+ node_doc.has_yields() or node_doc.has_yields_type()
+ ) and not node.is_generator():
+ self.add_message("redundant-yields-doc", node=node)
+
+ def visit_raise(self, node):
+ func_node = node.frame()
+ if not isinstance(func_node, astroid.FunctionDef):
+ return
+
+ expected_excs = utils.possible_exc_types(node)
+
+ if not expected_excs:
+ return
+
+ if not func_node.doc:
+ # If this is a property setter,
+ # the property should have the docstring instead.
+ property_ = utils.get_setters_property(func_node)
+ if property_:
+ func_node = property_
+
+ doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
+ if not doc.is_valid():
+ if doc.doc:
+ self._handle_no_raise_doc(expected_excs, func_node)
+ return
+
+ found_excs_full_names = doc.exceptions()
+
+ # Extract just the class name, e.g. "error" from "re.error"
+ found_excs_class_names = {exc.split(".")[-1] for exc in found_excs_full_names}
+ missing_excs = expected_excs - found_excs_class_names
+ self._add_raise_message(missing_excs, func_node)
+
+ def visit_return(self, node):
+ if not utils.returns_something(node):
+ return
+
+ func_node = node.frame()
+ if not isinstance(func_node, astroid.FunctionDef):
+ return
+
+ doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
+ if not doc.is_valid() and self.config.accept_no_return_doc:
+ return
+
+ is_property = checker_utils.decorated_with_property(func_node)
+
+ if not (doc.has_returns() or (doc.has_property_returns() and is_property)):
+ self.add_message("missing-return-doc", node=func_node)
+
+ if func_node.returns:
+ return
+
+ if not (doc.has_rtype() or (doc.has_property_type() and is_property)):
+ self.add_message("missing-return-type-doc", node=func_node)
+
+ def visit_yield(self, node):
+ func_node = node.frame()
+ if not isinstance(func_node, astroid.FunctionDef):
+ return
+
+ doc = utils.docstringify(func_node.doc, self.config.default_docstring_type)
+ if not doc.is_valid() and self.config.accept_no_yields_doc:
+ return
+
+ if doc.supports_yields:
+ doc_has_yields = doc.has_yields()
+ doc_has_yields_type = doc.has_yields_type()
+ else:
+ doc_has_yields = doc.has_returns()
+ doc_has_yields_type = doc.has_rtype()
+
+ if not doc_has_yields:
+ self.add_message("missing-yield-doc", node=func_node)
+
+ if not (doc_has_yields_type or func_node.returns):
+ self.add_message("missing-yield-type-doc", node=func_node)
+
+ def visit_yieldfrom(self, node):
+ self.visit_yield(node)
+
+ def _compare_missing_args(
+ self,
+ found_argument_names,
+ message_id,
+ not_needed_names,
+ expected_argument_names,
+ warning_node,
+ ):
+ """Compare the found argument names with the expected ones and
+ generate a message if there are arguments missing.
+
+ :param set found_argument_names: argument names found in the
+ docstring
+
+ :param str message_id: pylint message id
+
+ :param not_needed_names: names that may be omitted
+ :type not_needed_names: set of str
+
+ :param set expected_argument_names: Expected argument names
+ :param NodeNG warning_node: The node to be analyzed
+ """
+ missing_argument_names = (
+ expected_argument_names - found_argument_names
+ ) - not_needed_names
+ if missing_argument_names:
+ self.add_message(
+ message_id,
+ args=(", ".join(sorted(missing_argument_names)),),
+ node=warning_node,
+ )
+
+ def _compare_different_args(
+ self,
+ found_argument_names,
+ message_id,
+ not_needed_names,
+ expected_argument_names,
+ warning_node,
+ ):
+ """Compare the found argument names with the expected ones and
+ generate a message if there are extra arguments found.
+
+ :param set found_argument_names: argument names found in the
+ docstring
+
+ :param str message_id: pylint message id
+
+ :param not_needed_names: names that may be omitted
+ :type not_needed_names: set of str
+
+ :param set expected_argument_names: Expected argument names
+ :param NodeNG warning_node: The node to be analyzed
+ """
+ differing_argument_names = (
+ (expected_argument_names ^ found_argument_names)
+ - not_needed_names
+ - expected_argument_names
+ )
+
+ if differing_argument_names:
+ self.add_message(
+ message_id,
+ args=(", ".join(sorted(differing_argument_names)),),
+ node=warning_node,
+ )
+
+ def check_arguments_in_docstring(
+ self, doc, arguments_node, warning_node, accept_no_param_doc=None
+ ):
+ """Check that all parameters in a function, method or class constructor
+ on the one hand and the parameters mentioned in the parameter
+ documentation (e.g. the Sphinx tags 'param' and 'type') on the other
+ hand are consistent with each other.
+
+ * Undocumented parameters except 'self' are noticed.
+ * Undocumented parameter types except for 'self' and the ``*<args>``
+ and ``**<kwargs>`` parameters are noticed.
+ * Parameters mentioned in the parameter documentation that don't or no
+ longer exist in the function parameter list are noticed.
+ * If the text "For the parameters, see" or "For the other parameters,
+ see" (ignoring additional whitespace) is mentioned in the docstring,
+ missing parameter documentation is tolerated.
+ * If there's no Sphinx style, Google style or NumPy style parameter
+ documentation at all, i.e. ``:param`` is never mentioned etc., the
+ checker assumes that the parameters are documented in another format
+ and the absence is tolerated.
+
+ :param doc: Docstring for the function, method or class.
+ :type doc: :class:`Docstring`
+
+ :param arguments_node: Arguments node for the function, method or
+ class constructor.
+ :type arguments_node: :class:`astroid.scoped_nodes.Arguments`
+
+ :param warning_node: The node to assign the warnings to
+ :type warning_node: :class:`astroid.scoped_nodes.Node`
+
+ :param accept_no_param_doc: Whether or not to allow no parameters
+ to be documented.
+ If None then this value is read from the configuration.
+ :type accept_no_param_doc: bool or None
+ """
+ # Tolerate missing param or type declarations if there is a link to
+ # another method carrying the same name.
+ if not doc.doc:
+ return
+
+ if accept_no_param_doc is None:
+ accept_no_param_doc = self.config.accept_no_param_doc
+ tolerate_missing_params = doc.params_documented_elsewhere()
+
+ # Collect the function arguments.
+ expected_argument_names = {arg.name for arg in arguments_node.args}
+ expected_argument_names.update(arg.name for arg in arguments_node.kwonlyargs)
+ not_needed_type_in_docstring = self.not_needed_param_in_docstring.copy()
+
+ if arguments_node.vararg is not None:
+ expected_argument_names.add(arguments_node.vararg)
+ not_needed_type_in_docstring.add(arguments_node.vararg)
+ if arguments_node.kwarg is not None:
+ expected_argument_names.add(arguments_node.kwarg)
+ not_needed_type_in_docstring.add(arguments_node.kwarg)
+ params_with_doc, params_with_type = doc.match_param_docs()
+
+ # Tolerate no parameter documentation at all.
+ if not params_with_doc and not params_with_type and accept_no_param_doc:
+ tolerate_missing_params = True
+
+ if not tolerate_missing_params:
+ self._compare_missing_args(
+ params_with_doc,
+ "missing-param-doc",
+ self.not_needed_param_in_docstring,
+ expected_argument_names,
+ warning_node,
+ )
+
+ for index, arg_name in enumerate(arguments_node.args):
+ if arguments_node.annotations[index]:
+ params_with_type.add(arg_name.name)
+ for index, arg_name in enumerate(arguments_node.kwonlyargs):
+ if arguments_node.kwonlyargs_annotations[index]:
+ params_with_type.add(arg_name.name)
+
+ if not tolerate_missing_params:
+ self._compare_missing_args(
+ params_with_type,
+ "missing-type-doc",
+ not_needed_type_in_docstring,
+ expected_argument_names,
+ warning_node,
+ )
+
+ self._compare_different_args(
+ params_with_doc,
+ "differing-param-doc",
+ self.not_needed_param_in_docstring,
+ expected_argument_names,
+ warning_node,
+ )
+ self._compare_different_args(
+ params_with_type,
+ "differing-type-doc",
+ not_needed_type_in_docstring,
+ expected_argument_names,
+ warning_node,
+ )
+
+ def check_single_constructor_params(self, class_doc, init_doc, class_node):
+ if class_doc.has_params() and init_doc.has_params():
+ self.add_message(
+ "multiple-constructor-doc", args=(class_node.name,), node=class_node
+ )
+
+ def _handle_no_raise_doc(self, excs, node):
+ if self.config.accept_no_raise_doc:
+ return
+
+ self._add_raise_message(excs, node)
+
+ def _add_raise_message(self, missing_excs, node):
+ """
+ Adds a message on :param:`node` for the missing exception type.
+
+ :param missing_excs: A list of missing exception types.
+ :type missing_excs: set(str)
+
+ :param node: The node show the message on.
+ :type node: astroid.node_classes.NodeNG
+ """
+ if node.is_abstract():
+ try:
+ missing_excs.remove("NotImplementedError")
+ except KeyError:
+ pass
+
+ if not missing_excs:
+ return
+
+ self.add_message(
+ "missing-raises-doc", args=(", ".join(sorted(missing_excs)),), node=node
+ )
+
+
+def register(linter):
+ """Required method to auto register this checker.
+
+ :param linter: Main interface object for Pylint plugins
+ :type linter: Pylint object
+ """
+ linter.register_checker(DocstringParameterChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/docstyle.py b/venv/Lib/site-packages/pylint/extensions/docstyle.py
new file mode 100644
index 0000000..36f506f
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/docstyle.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Luis Escobar <lescobar@vauxoo.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import linecache
+
+from pylint import checkers
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import HIGH, IAstroidChecker
+
+
+class DocStringStyleChecker(checkers.BaseChecker):
+ """Checks format of docstrings based on PEP 0257"""
+
+ __implements__ = IAstroidChecker
+ name = "docstyle"
+
+ msgs = {
+ "C0198": (
+ 'Bad docstring quotes in %s, expected """, given %s',
+ "bad-docstring-quotes",
+ "Used when a docstring does not have triple double quotes.",
+ ),
+ "C0199": (
+ "First line empty in %s docstring",
+ "docstring-first-line-empty",
+ "Used when a blank line is found at the beginning of a docstring.",
+ ),
+ }
+
+ @check_messages("docstring-first-line-empty", "bad-docstring-quotes")
+ def visit_module(self, node):
+ self._check_docstring("module", node)
+
+ def visit_classdef(self, node):
+ self._check_docstring("class", node)
+
+ def visit_functiondef(self, node):
+ ftype = "method" if node.is_method() else "function"
+ self._check_docstring(ftype, node)
+
+ visit_asyncfunctiondef = visit_functiondef
+
+ def _check_docstring(self, node_type, node):
+ docstring = node.doc
+ if docstring and docstring[0] == "\n":
+ self.add_message(
+ "docstring-first-line-empty",
+ node=node,
+ args=(node_type,),
+ confidence=HIGH,
+ )
+
+ # Use "linecache", instead of node.as_string(), because the latter
+ # looses the original form of the docstrings.
+
+ if docstring:
+ lineno = node.fromlineno + 1
+ line = linecache.getline(node.root().file, lineno).lstrip()
+ if line and line.find('"""') == 0:
+ return
+ if line and "'''" in line:
+ quotes = "'''"
+ elif line and line[0] == '"':
+ quotes = '"'
+ elif line and line[0] == "'":
+ quotes = "'"
+ else:
+ quotes = False
+ if quotes:
+ self.add_message(
+ "bad-docstring-quotes",
+ node=node,
+ args=(node_type, quotes),
+ confidence=HIGH,
+ )
+
+
+def register(linter):
+ """Required method to auto register this checker.
+
+ :param linter: Main interface object for Pylint plugins
+ :type linter: Pylint object
+ """
+ linter.register_checker(DocStringStyleChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/emptystring.py b/venv/Lib/site-packages/pylint/extensions/emptystring.py
new file mode 100644
index 0000000..04021d5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/emptystring.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
+# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Looks for comparisons to empty string."""
+
+import itertools
+
+import astroid
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+
+
+def _is_constant_empty_str(node):
+ return isinstance(node, astroid.Const) and node.value == ""
+
+
+class CompareToEmptyStringChecker(checkers.BaseChecker):
+ """Checks for comparisons to empty string.
+ Most of the times you should use the fact that empty strings are false.
+ An exception to this rule is when an empty string value is allowed in the program
+ and has a different meaning than None!
+ """
+
+ __implements__ = (interfaces.IAstroidChecker,)
+
+ # configuration section name
+ name = "compare-to-empty-string"
+ msgs = {
+ "C1901": (
+ "Avoid comparisons to empty string",
+ "compare-to-empty-string",
+ "Used when Pylint detects comparison to an empty string constant.",
+ )
+ }
+
+ priority = -2
+ options = ()
+
+ @utils.check_messages("compare-to-empty-string")
+ def visit_compare(self, node):
+ _operators = ["!=", "==", "is not", "is"]
+ # note: astroid.Compare has the left most operand in node.left
+ # while the rest are a list of tuples in node.ops
+ # the format of the tuple is ('compare operator sign', node)
+ # here we squash everything into `ops` to make it easier for processing later
+ ops = [("", node.left)]
+ ops.extend(node.ops)
+ ops = list(itertools.chain(*ops))
+
+ for ops_idx in range(len(ops) - 2):
+ op_1 = ops[ops_idx]
+ op_2 = ops[ops_idx + 1]
+ op_3 = ops[ops_idx + 2]
+ error_detected = False
+
+ # x ?? ""
+ if _is_constant_empty_str(op_1) and op_2 in _operators:
+ error_detected = True
+ # '' ?? X
+ elif op_2 in _operators and _is_constant_empty_str(op_3):
+ error_detected = True
+
+ if error_detected:
+ self.add_message("compare-to-empty-string", node=node)
+
+
+def register(linter):
+ """Required method to auto register this checker."""
+ linter.register_checker(CompareToEmptyStringChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/mccabe.py b/venv/Lib/site-packages/pylint/extensions/mccabe.py
new file mode 100644
index 0000000..cafac97
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/mccabe.py
@@ -0,0 +1,196 @@
+# Copyright (c) 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Module to add McCabe checker class for pylint. """
+
+from mccabe import PathGraph as Mccabe_PathGraph
+from mccabe import PathGraphingAstVisitor as Mccabe_PathGraphingAstVisitor
+
+from pylint import checkers
+from pylint.checkers.utils import check_messages
+from pylint.interfaces import HIGH, IAstroidChecker
+
+
+class PathGraph(Mccabe_PathGraph):
+ def __init__(self, node):
+ super(PathGraph, self).__init__(name="", entity="", lineno=1)
+ self.root = node
+
+
+class PathGraphingAstVisitor(Mccabe_PathGraphingAstVisitor):
+ def __init__(self):
+ super(PathGraphingAstVisitor, self).__init__()
+ self._bottom_counter = 0
+
+ def default(self, node, *args):
+ for child in node.get_children():
+ self.dispatch(child, *args)
+
+ def dispatch(self, node, *args):
+ self.node = node
+ klass = node.__class__
+ meth = self._cache.get(klass)
+ if meth is None:
+ class_name = klass.__name__
+ meth = getattr(self.visitor, "visit" + class_name, self.default)
+ self._cache[klass] = meth
+ return meth(node, *args)
+
+ def visitFunctionDef(self, node):
+ if self.graph is not None:
+ # closure
+ pathnode = self._append_node(node)
+ self.tail = pathnode
+ self.dispatch_list(node.body)
+ bottom = "%s" % self._bottom_counter
+ self._bottom_counter += 1
+ self.graph.connect(self.tail, bottom)
+ self.graph.connect(node, bottom)
+ self.tail = bottom
+ else:
+ self.graph = PathGraph(node)
+ self.tail = node
+ self.dispatch_list(node.body)
+ self.graphs["%s%s" % (self.classname, node.name)] = self.graph
+ self.reset()
+
+ visitAsyncFunctionDef = visitFunctionDef
+
+ def visitSimpleStatement(self, node):
+ self._append_node(node)
+
+ visitAssert = (
+ visitAssign
+ ) = (
+ visitAugAssign
+ ) = (
+ visitDelete
+ ) = (
+ visitPrint
+ ) = (
+ visitRaise
+ ) = (
+ visitYield
+ ) = (
+ visitImport
+ ) = (
+ visitCall
+ ) = (
+ visitSubscript
+ ) = (
+ visitPass
+ ) = (
+ visitContinue
+ ) = (
+ visitBreak
+ ) = visitGlobal = visitReturn = visitExpr = visitAwait = visitSimpleStatement
+
+ def visitWith(self, node):
+ self._append_node(node)
+ self.dispatch_list(node.body)
+
+ visitAsyncWith = visitWith
+
+ def _append_node(self, node):
+ if not self.tail:
+ return None
+ self.graph.connect(self.tail, node)
+ self.tail = node
+ return node
+
+ def _subgraph(self, node, name, extra_blocks=()):
+ """create the subgraphs representing any `if` and `for` statements"""
+ if self.graph is None:
+ # global loop
+ self.graph = PathGraph(node)
+ self._subgraph_parse(node, node, extra_blocks)
+ self.graphs["%s%s" % (self.classname, name)] = self.graph
+ self.reset()
+ else:
+ self._append_node(node)
+ self._subgraph_parse(node, node, extra_blocks)
+
+ def _subgraph_parse(self, node, pathnode, extra_blocks):
+ """parse the body and any `else` block of `if` and `for` statements"""
+ loose_ends = []
+ self.tail = node
+ self.dispatch_list(node.body)
+ loose_ends.append(self.tail)
+ for extra in extra_blocks:
+ self.tail = node
+ self.dispatch_list(extra.body)
+ loose_ends.append(self.tail)
+ if node.orelse:
+ self.tail = node
+ self.dispatch_list(node.orelse)
+ loose_ends.append(self.tail)
+ else:
+ loose_ends.append(node)
+ if node:
+ bottom = "%s" % self._bottom_counter
+ self._bottom_counter += 1
+ for end in loose_ends:
+ self.graph.connect(end, bottom)
+ self.tail = bottom
+
+
+class McCabeMethodChecker(checkers.BaseChecker):
+ """Checks McCabe complexity cyclomatic threshold in methods and functions
+ to validate a too complex code.
+ """
+
+ __implements__ = IAstroidChecker
+ name = "design"
+
+ msgs = {
+ "R1260": (
+ "%s is too complex. The McCabe rating is %d",
+ "too-complex",
+ "Used when a method or function is too complex based on "
+ "McCabe Complexity Cyclomatic",
+ )
+ }
+ options = (
+ (
+ "max-complexity",
+ {
+ "default": 10,
+ "type": "int",
+ "metavar": "<int>",
+ "help": "McCabe complexity cyclomatic threshold",
+ },
+ ),
+ )
+
+ @check_messages("too-complex")
+ def visit_module(self, node):
+ """visit an astroid.Module node to check too complex rating and
+ add message if is greather than max_complexity stored from options"""
+ visitor = PathGraphingAstVisitor()
+ for child in node.body:
+ visitor.preorder(child, visitor)
+ for graph in visitor.graphs.values():
+ complexity = graph.complexity()
+ node = graph.root
+ if hasattr(node, "name"):
+ node_name = "'%s'" % node.name
+ else:
+ node_name = "This '%s'" % node.__class__.__name__.lower()
+ if complexity <= self.config.max_complexity:
+ continue
+ self.add_message(
+ "too-complex", node=node, confidence=HIGH, args=(node_name, complexity)
+ )
+
+
+def register(linter):
+ """Required method to auto register this checker.
+
+ :param linter: Main interface object for Pylint plugins
+ :type linter: Pylint object
+ """
+ linter.register_checker(McCabeMethodChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/overlapping_exceptions.py b/venv/Lib/site-packages/pylint/extensions/overlapping_exceptions.py
new file mode 100644
index 0000000..be2208c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/overlapping_exceptions.py
@@ -0,0 +1,88 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Looks for overlapping exceptions."""
+
+import astroid
+
+from pylint import checkers, interfaces
+from pylint.checkers import utils
+from pylint.checkers.exceptions import _annotated_unpack_infer
+
+
+class OverlappingExceptionsChecker(checkers.BaseChecker):
+ """Checks for two or more exceptions in the same exception handler
+ clause that are identical or parts of the same inheritance hierarchy
+ (i.e. overlapping)."""
+
+ __implements__ = interfaces.IAstroidChecker
+
+ name = "overlap-except"
+ msgs = {
+ "W0714": (
+ "Overlapping exceptions (%s)",
+ "overlapping-except",
+ "Used when exceptions in handler overlap or are identical",
+ )
+ }
+ priority = -2
+ options = ()
+
+ @utils.check_messages("overlapping-except")
+ def visit_tryexcept(self, node):
+ """check for empty except"""
+ for handler in node.handlers:
+ if handler.type is None:
+ continue
+ if isinstance(handler.type, astroid.BoolOp):
+ continue
+ try:
+ excs = list(_annotated_unpack_infer(handler.type))
+ except astroid.InferenceError:
+ continue
+
+ handled_in_clause = []
+ for part, exc in excs:
+ if exc is astroid.Uninferable:
+ continue
+ if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(exc):
+ # pylint: disable=protected-access
+ exc = exc._proxied
+
+ if not isinstance(exc, astroid.ClassDef):
+ continue
+
+ exc_ancestors = [
+ anc for anc in exc.ancestors() if isinstance(anc, astroid.ClassDef)
+ ]
+
+ for prev_part, prev_exc in handled_in_clause:
+ prev_exc_ancestors = [
+ anc
+ for anc in prev_exc.ancestors()
+ if isinstance(anc, astroid.ClassDef)
+ ]
+ if exc == prev_exc:
+ self.add_message(
+ "overlapping-except",
+ node=handler.type,
+ args="%s and %s are the same"
+ % (prev_part.as_string(), part.as_string()),
+ )
+ elif prev_exc in exc_ancestors or exc in prev_exc_ancestors:
+ ancestor = part if exc in prev_exc_ancestors else prev_part
+ descendant = part if prev_exc in exc_ancestors else prev_part
+ self.add_message(
+ "overlapping-except",
+ node=handler.type,
+ args="%s is an ancestor class of %s"
+ % (ancestor.as_string(), descendant.as_string()),
+ )
+ handled_in_clause += [(part, exc)]
+
+
+def register(linter):
+ """Required method to auto register this checker."""
+ linter.register_checker(OverlappingExceptionsChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/extensions/redefined_variable_type.py b/venv/Lib/site-packages/pylint/extensions/redefined_variable_type.py
new file mode 100644
index 0000000..cfe4754
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/extensions/redefined_variable_type.py
@@ -0,0 +1,116 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glmatthe@cisco.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import astroid
+
+from pylint.checkers import BaseChecker
+from pylint.checkers.utils import check_messages, is_none, node_type
+from pylint.interfaces import IAstroidChecker
+
+BUILTINS = "builtins"
+
+
+class MultipleTypesChecker(BaseChecker):
+ """Checks for variable type redefinitions (NoneType excepted)
+
+ At a function, method, class or module scope
+
+ This rule could be improved:
+
+ - Currently, if an attribute is set to different types in 2 methods of a
+ same class, it won't be detected (see functional test)
+ - One could improve the support for inference on assignment with tuples,
+ ifexpr, etc. Also it would be great to have support for inference on
+ str.split()
+ """
+
+ __implements__ = IAstroidChecker
+
+ name = "multiple_types"
+ msgs = {
+ "R0204": (
+ "Redefinition of %s type from %s to %s",
+ "redefined-variable-type",
+ "Used when the type of a variable changes inside a "
+ "method or a function.",
+ )
+ }
+
+ def visit_classdef(self, _):
+ self._assigns.append({})
+
+ @check_messages("redefined-variable-type")
+ def leave_classdef(self, _):
+ self._check_and_add_messages()
+
+ visit_functiondef = visit_classdef
+ leave_functiondef = leave_module = leave_classdef
+
+ def visit_module(self, _):
+ self._assigns = [{}]
+
+ def _check_and_add_messages(self):
+ assigns = self._assigns.pop()
+ for name, args in assigns.items():
+ if len(args) <= 1:
+ continue
+ orig_node, orig_type = args[0]
+ # Check if there is a type in the following nodes that would be
+ # different from orig_type.
+ for redef_node, redef_type in args[1:]:
+ if redef_type == orig_type:
+ continue
+ # if a variable is defined to several types in an if node,
+ # this is not actually redefining.
+ orig_parent = orig_node.parent
+ redef_parent = redef_node.parent
+ if isinstance(orig_parent, astroid.If):
+ if orig_parent == redef_parent:
+ if (
+ redef_node in orig_parent.orelse
+ and orig_node not in orig_parent.orelse
+ ):
+ orig_node, orig_type = redef_node, redef_type
+ continue
+ elif isinstance(
+ redef_parent, astroid.If
+ ) and redef_parent in orig_parent.nodes_of_class(astroid.If):
+ orig_node, orig_type = redef_node, redef_type
+ continue
+ orig_type = orig_type.replace(BUILTINS + ".", "")
+ redef_type = redef_type.replace(BUILTINS + ".", "")
+ self.add_message(
+ "redefined-variable-type",
+ node=redef_node,
+ args=(name, orig_type, redef_type),
+ )
+ break
+
+ def visit_assign(self, node):
+ # we don't handle multiple assignment nor slice assignment
+ target = node.targets[0]
+ if isinstance(target, (astroid.Tuple, astroid.Subscript)):
+ return
+ # ignore NoneType
+ if is_none(node):
+ return
+ _type = node_type(node.value)
+ if _type:
+ self._assigns[-1].setdefault(target.as_string(), []).append(
+ (node, _type.pytype())
+ )
+
+
+def register(linter):
+ """Required method to auto register this checker.
+
+ :param linter: Main interface object for Pylint plugins
+ :type linter: Pylint object
+ """
+ linter.register_checker(MultipleTypesChecker(linter))
diff --git a/venv/Lib/site-packages/pylint/graph.py b/venv/Lib/site-packages/pylint/graph.py
new file mode 100644
index 0000000..0dc7a14
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/graph.py
@@ -0,0 +1,197 @@
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Graph manipulation utilities.
+
+(dot generation adapted from pypy/translator/tool/make_dot.py)
+"""
+
+import codecs
+import os
+import os.path as osp
+import subprocess
+import sys
+import tempfile
+
+
+def target_info_from_filename(filename):
+ """Transforms /some/path/foo.png into ('/some/path', 'foo.png', 'png')."""
+ basename = osp.basename(filename)
+ storedir = osp.dirname(osp.abspath(filename))
+ target = filename.split(".")[-1]
+ return storedir, basename, target
+
+
+class DotBackend:
+ """Dot File backend."""
+
+ def __init__(
+ self,
+ graphname,
+ rankdir=None,
+ size=None,
+ ratio=None,
+ charset="utf-8",
+ renderer="dot",
+ additional_param=None,
+ ):
+ if additional_param is None:
+ additional_param = {}
+ self.graphname = graphname
+ self.renderer = renderer
+ self.lines = []
+ self._source = None
+ self.emit("digraph %s {" % normalize_node_id(graphname))
+ if rankdir:
+ self.emit("rankdir=%s" % rankdir)
+ if ratio:
+ self.emit("ratio=%s" % ratio)
+ if size:
+ self.emit('size="%s"' % size)
+ if charset:
+ assert charset.lower() in ("utf-8", "iso-8859-1", "latin1"), (
+ "unsupported charset %s" % charset
+ )
+ self.emit('charset="%s"' % charset)
+ for param in additional_param.items():
+ self.emit("=".join(param))
+
+ def get_source(self):
+ """returns self._source"""
+ if self._source is None:
+ self.emit("}\n")
+ self._source = "\n".join(self.lines)
+ del self.lines
+ return self._source
+
+ source = property(get_source)
+
+ def generate(self, outputfile=None, dotfile=None, mapfile=None):
+ """Generates a graph file.
+
+ :param str outputfile: filename and path [defaults to graphname.png]
+ :param str dotfile: filename and path [defaults to graphname.dot]
+ :param str mapfile: filename and path
+
+ :rtype: str
+ :return: a path to the generated file
+ """
+ name = self.graphname
+ if not dotfile:
+ # if 'outputfile' is a dot file use it as 'dotfile'
+ if outputfile and outputfile.endswith(".dot"):
+ dotfile = outputfile
+ else:
+ dotfile = "%s.dot" % name
+ if outputfile is not None:
+ storedir, _, target = target_info_from_filename(outputfile)
+ if target != "dot":
+ pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
+ os.close(pdot)
+ else:
+ dot_sourcepath = osp.join(storedir, dotfile)
+ else:
+ target = "png"
+ pdot, dot_sourcepath = tempfile.mkstemp(".dot", name)
+ ppng, outputfile = tempfile.mkstemp(".png", name)
+ os.close(pdot)
+ os.close(ppng)
+ pdot = codecs.open(dot_sourcepath, "w", encoding="utf8")
+ pdot.write(self.source)
+ pdot.close()
+ if target != "dot":
+ use_shell = sys.platform == "win32"
+ if mapfile:
+ subprocess.call(
+ [
+ self.renderer,
+ "-Tcmapx",
+ "-o",
+ mapfile,
+ "-T",
+ target,
+ dot_sourcepath,
+ "-o",
+ outputfile,
+ ],
+ shell=use_shell,
+ )
+ else:
+ subprocess.call(
+ [self.renderer, "-T", target, dot_sourcepath, "-o", outputfile],
+ shell=use_shell,
+ )
+ os.unlink(dot_sourcepath)
+ return outputfile
+
+ def emit(self, line):
+ """Adds <line> to final output."""
+ self.lines.append(line)
+
+ def emit_edge(self, name1, name2, **props):
+ """emit an edge from <name1> to <name2>.
+ edge properties: see http://www.graphviz.org/doc/info/attrs.html
+ """
+ attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
+ n_from, n_to = normalize_node_id(name1), normalize_node_id(name2)
+ self.emit("%s -> %s [%s];" % (n_from, n_to, ", ".join(sorted(attrs))))
+
+ def emit_node(self, name, **props):
+ """emit a node with given properties.
+ node properties: see http://www.graphviz.org/doc/info/attrs.html
+ """
+ attrs = ['%s="%s"' % (prop, value) for prop, value in props.items()]
+ self.emit("%s [%s];" % (normalize_node_id(name), ", ".join(sorted(attrs))))
+
+
+def normalize_node_id(nid):
+ """Returns a suitable DOT node id for `nid`."""
+ return '"%s"' % nid
+
+
+def get_cycles(graph_dict, vertices=None):
+ """given a dictionary representing an ordered graph (i.e. key are vertices
+ and values is a list of destination vertices representing edges), return a
+ list of detected cycles
+ """
+ if not graph_dict:
+ return ()
+ result = []
+ if vertices is None:
+ vertices = graph_dict.keys()
+ for vertice in vertices:
+ _get_cycles(graph_dict, [], set(), result, vertice)
+ return result
+
+
+def _get_cycles(graph_dict, path, visited, result, vertice):
+ """recursive function doing the real work for get_cycles"""
+ if vertice in path:
+ cycle = [vertice]
+ for node in path[::-1]:
+ if node == vertice:
+ break
+ cycle.insert(0, node)
+ # make a canonical representation
+ start_from = min(cycle)
+ index = cycle.index(start_from)
+ cycle = cycle[index:] + cycle[0:index]
+ # append it to result if not already in
+ if cycle not in result:
+ result.append(cycle)
+ return
+ path.append(vertice)
+ try:
+ for node in graph_dict[vertice]:
+ # don't check already visited nodes again
+ if node not in visited:
+ _get_cycles(graph_dict, path, visited, result, node)
+ visited.add(node)
+ except KeyError:
+ pass
+ path.pop()
diff --git a/venv/Lib/site-packages/pylint/interfaces.py b/venv/Lib/site-packages/pylint/interfaces.py
new file mode 100644
index 0000000..378585c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/interfaces.py
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009-2010, 2012-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Interfaces for Pylint objects"""
+from collections import namedtuple
+
+Confidence = namedtuple("Confidence", ["name", "description"])
+# Warning Certainties
+HIGH = Confidence("HIGH", "No false positive possible.")
+INFERENCE = Confidence("INFERENCE", "Warning based on inference result.")
+INFERENCE_FAILURE = Confidence(
+ "INFERENCE_FAILURE", "Warning based on inference with failures."
+)
+UNDEFINED = Confidence("UNDEFINED", "Warning without any associated confidence level.")
+
+CONFIDENCE_LEVELS = [HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED]
+
+
+class Interface:
+ """Base class for interfaces."""
+
+ @classmethod
+ def is_implemented_by(cls, instance):
+ return implements(instance, cls)
+
+
+def implements(obj, interface):
+ """Return true if the give object (maybe an instance or class) implements
+ the interface.
+ """
+ kimplements = getattr(obj, "__implements__", ())
+ if not isinstance(kimplements, (list, tuple)):
+ kimplements = (kimplements,)
+ for implementedinterface in kimplements:
+ if issubclass(implementedinterface, interface):
+ return True
+ return False
+
+
+class IChecker(Interface):
+ """This is a base interface, not designed to be used elsewhere than for
+ sub interfaces definition.
+ """
+
+ def open(self):
+ """called before visiting project (i.e set of modules)"""
+
+ def close(self):
+ """called after visiting project (i.e set of modules)"""
+
+
+class IRawChecker(IChecker):
+ """interface for checker which need to parse the raw file
+ """
+
+ def process_module(self, astroid):
+ """ process a module
+
+ the module's content is accessible via astroid.stream
+ """
+
+
+class ITokenChecker(IChecker):
+ """Interface for checkers that need access to the token list."""
+
+ def process_tokens(self, tokens):
+ """Process a module.
+
+ tokens is a list of all source code tokens in the file.
+ """
+
+
+class IAstroidChecker(IChecker):
+ """ interface for checker which prefers receive events according to
+ statement type
+ """
+
+
+class IReporter(Interface):
+ """ reporter collect messages and display results encapsulated in a layout
+ """
+
+ def handle_message(self, msg):
+ """Handle the given message object."""
+
+ def display_reports(self, layout):
+ """display results encapsulated in the layout tree
+ """
+
+
+__all__ = ("IRawChecker", "IAstroidChecker", "ITokenChecker", "IReporter")
diff --git a/venv/Lib/site-packages/pylint/lint.py b/venv/Lib/site-packages/pylint/lint.py
new file mode 100644
index 0000000..a98970b
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/lint.py
@@ -0,0 +1,1817 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2008 Fabrice Douchant <Fabrice.Douchant@logilab.fr>
+# Copyright (c) 2009 Vincent
+# Copyright (c) 2009 Mads Kiilerich <mads@kiilerich.com>
+# Copyright (c) 2011-2014 Google, Inc.
+# Copyright (c) 2012 David Pursehouse <david.pursehouse@sonymobile.com>
+# Copyright (c) 2012 Kevin Jing Qiu <kevin.jing.qiu@gmail.com>
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2012 JT Olds <jtolds@xnet5.com>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014-2015 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Alexandru Coman <fcoman@bitdefender.com>
+# Copyright (c) 2014 Daniel Harding <dharding@living180.net>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2014 Dan Goldsmith <djgoldsmith@googlemail.com>
+# Copyright (c) 2015-2016 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Aru Sahni <arusahni@gmail.com>
+# Copyright (c) 2015 Steven Myint <hg@stevenmyint.com>
+# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
+# Copyright (c) 2015 Mihai Balint <balint.mihai@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Alan Evangelista <alanoe@linux.vnet.ibm.com>
+# Copyright (c) 2017-2018 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Daniel Miller <millerdev@gmail.com>
+# Copyright (c) 2017 Roman Ivanov <me@roivanov.com>
+# Copyright (c) 2017 Ned Batchelder <ned@nedbatchelder.com>
+# Copyright (c) 2018 Randall Leeds <randall@bleeds.info>
+# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2018 Jason Owen <jason.a.owen@gmail.com>
+# Copyright (c) 2018 Gary Tyler McLeod <mail@garytyler.com>
+# Copyright (c) 2018 Yuval Langer <yuvallanger@mail.tau.ac.il>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+# Copyright (c) 2018 kapsh <kapsh@kap.sh>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+# pylint: disable=broad-except
+
+""" pylint [options] modules_or_packages
+
+ Check that module(s) satisfy a coding standard (and more !).
+
+ pylint --help
+
+ Display this help message and exit.
+
+ pylint --help-msg <msg-id>[,<msg-id>]
+
+ Display help messages about given message identifiers and exit.
+"""
+import collections
+import contextlib
+import operator
+import os
+import sys
+import tokenize
+import traceback
+import warnings
+from io import TextIOWrapper
+
+import astroid
+from astroid import modutils
+from astroid.__pkginfo__ import version as astroid_version
+from astroid.builder import AstroidBuilder
+
+from pylint import __pkginfo__, checkers, config, exceptions, interfaces, reporters
+from pylint.__pkginfo__ import version
+from pylint.constants import MAIN_CHECKER_NAME, MSG_TYPES, OPTION_RGX
+from pylint.message import Message, MessageDefinitionStore, MessagesHandlerMixIn
+from pylint.reporters.ureports import nodes as report_nodes
+from pylint.utils import ASTWalker, FileState, utils
+
+try:
+ import multiprocessing
+except ImportError:
+ multiprocessing = None # type: ignore
+
+
+MANAGER = astroid.MANAGER
+
+
+def _ast_from_string(data, filepath, modname):
+ cached = MANAGER.astroid_cache.get(modname)
+ if cached and cached.file == filepath:
+ return cached
+
+ return AstroidBuilder(MANAGER).string_build(data, modname, filepath)
+
+
+def _read_stdin():
+ # https://mail.python.org/pipermail/python-list/2012-November/634424.html
+ sys.stdin = TextIOWrapper(sys.stdin.detach(), encoding="utf-8")
+ return sys.stdin.read()
+
+
+def _get_new_args(message):
+ location = (
+ message.abspath,
+ message.path,
+ message.module,
+ message.obj,
+ message.line,
+ message.column,
+ )
+ return (message.msg_id, message.symbol, location, message.msg, message.confidence)
+
+
+def _get_python_path(filepath):
+ dirname = os.path.realpath(os.path.expanduser(filepath))
+ if not os.path.isdir(dirname):
+ dirname = os.path.dirname(dirname)
+ while True:
+ if not os.path.exists(os.path.join(dirname, "__init__.py")):
+ return dirname
+ old_dirname = dirname
+ dirname = os.path.dirname(dirname)
+ if old_dirname == dirname:
+ return os.getcwd()
+ return None
+
+
+def _merge_stats(stats):
+ merged = {}
+ by_msg = collections.Counter()
+ for stat in stats:
+ message_stats = stat.pop("by_msg", {})
+ by_msg.update(message_stats)
+
+ for key, item in stat.items():
+ if key not in merged:
+ merged[key] = item
+ else:
+ if isinstance(item, dict):
+ merged[key].update(item)
+ else:
+ merged[key] = merged[key] + item
+
+ merged["by_msg"] = by_msg
+ return merged
+
+
+# Python Linter class #########################################################
+
+MSGS = {
+ "F0001": (
+ "%s",
+ "fatal",
+ "Used when an error occurred preventing the analysis of a \
+ module (unable to find it for instance).",
+ ),
+ "F0002": (
+ "%s: %s",
+ "astroid-error",
+ "Used when an unexpected error occurred while building the "
+ "Astroid representation. This is usually accompanied by a "
+ "traceback. Please report such errors !",
+ ),
+ "F0010": (
+ "error while code parsing: %s",
+ "parse-error",
+ "Used when an exception occurred while building the Astroid "
+ "representation which could be handled by astroid.",
+ ),
+ "I0001": (
+ "Unable to run raw checkers on built-in module %s",
+ "raw-checker-failed",
+ "Used to inform that a built-in module has not been checked "
+ "using the raw checkers.",
+ ),
+ "I0010": (
+ "Unable to consider inline option %r",
+ "bad-inline-option",
+ "Used when an inline option is either badly formatted or can't "
+ "be used inside modules.",
+ ),
+ "I0011": (
+ "Locally disabling %s (%s)",
+ "locally-disabled",
+ "Used when an inline option disables a message or a messages category.",
+ ),
+ "I0013": (
+ "Ignoring entire file",
+ "file-ignored",
+ "Used to inform that the file will not be checked",
+ ),
+ "I0020": (
+ "Suppressed %s (from line %d)",
+ "suppressed-message",
+ "A message was triggered on a line, but suppressed explicitly "
+ "by a disable= comment in the file. This message is not "
+ "generated for messages that are ignored due to configuration "
+ "settings.",
+ ),
+ "I0021": (
+ "Useless suppression of %s",
+ "useless-suppression",
+ "Reported when a message is explicitly disabled for a line or "
+ "a block of code, but never triggered.",
+ ),
+ "I0022": (
+ 'Pragma "%s" is deprecated, use "%s" instead',
+ "deprecated-pragma",
+ "Some inline pylint options have been renamed or reworked, "
+ "only the most recent form should be used. "
+ "NOTE:skip-all is only available with pylint >= 0.26",
+ {"old_names": [("I0014", "deprecated-disable-all")]},
+ ),
+ "E0001": ("%s", "syntax-error", "Used when a syntax error is raised for a module."),
+ "E0011": (
+ "Unrecognized file option %r",
+ "unrecognized-inline-option",
+ "Used when an unknown inline option is encountered.",
+ ),
+ "E0012": (
+ "Bad option value %r",
+ "bad-option-value",
+ "Used when a bad value for an inline option is encountered.",
+ ),
+}
+
+
+def _cpu_count() -> int:
+ """Use sched_affinity if available for virtualized or containerized environments."""
+ sched_getaffinity = getattr(os, "sched_getaffinity", None)
+ # pylint: disable=not-callable,using-constant-test
+ if sched_getaffinity:
+ return len(sched_getaffinity(0))
+ if multiprocessing:
+ return multiprocessing.cpu_count()
+ return 1
+
+
+if multiprocessing is not None:
+
+ class ChildLinter(multiprocessing.Process):
+ def run(self):
+ # pylint: disable=no-member, unbalanced-tuple-unpacking
+ tasks_queue, results_queue, self._config = self._args
+
+ self._config["jobs"] = 1 # Child does not parallelize any further.
+ self._python3_porting_mode = self._config.pop("python3_porting_mode", None)
+ self._plugins = self._config.pop("plugins", None)
+
+ # Run linter for received files/modules.
+ for file_or_module in iter(tasks_queue.get, "STOP"):
+ try:
+ result = self._run_linter(file_or_module[0])
+ results_queue.put(result)
+ except Exception as ex:
+ print(
+ "internal error with sending report for module %s"
+ % file_or_module,
+ file=sys.stderr,
+ )
+ print(ex, file=sys.stderr)
+ results_queue.put({})
+
+ def _run_linter(self, file_or_module):
+ linter = PyLinter()
+
+ # Register standard checkers.
+ linter.load_default_plugins()
+ # Load command line plugins.
+ if self._plugins:
+ linter.load_plugin_modules(self._plugins)
+
+ linter.load_configuration_from_config(self._config)
+
+ # Load plugin specific configuration
+ linter.load_plugin_configuration()
+
+ linter.set_reporter(reporters.CollectingReporter())
+
+ # Enable the Python 3 checker mode. This option is
+ # passed down from the parent linter up to here, since
+ # the Python 3 porting flag belongs to the Run class,
+ # instead of the Linter class.
+ if self._python3_porting_mode:
+ linter.python3_porting_mode()
+
+ # Run the checks.
+ linter.check(file_or_module)
+
+ msgs = [_get_new_args(m) for m in linter.reporter.messages]
+ return (
+ file_or_module,
+ linter.file_state.base_name,
+ linter.current_name,
+ msgs,
+ linter.stats,
+ linter.msg_status,
+ )
+
+
+# pylint: disable=too-many-instance-attributes,too-many-public-methods
+class PyLinter(
+ config.OptionsManagerMixIn,
+ MessagesHandlerMixIn,
+ reporters.ReportsHandlerMixIn,
+ checkers.BaseTokenChecker,
+):
+ """lint Python modules using external checkers.
+
+ This is the main checker controlling the other ones and the reports
+ generation. It is itself both a raw checker and an astroid checker in order
+ to:
+ * handle message activation / deactivation at the module level
+ * handle some basic but necessary stats'data (number of classes, methods...)
+
+ IDE plugin developers: you may have to call
+ `astroid.builder.MANAGER.astroid_cache.clear()` across runs if you want
+ to ensure the latest code version is actually checked.
+ """
+
+ __implements__ = (interfaces.ITokenChecker,)
+
+ name = MAIN_CHECKER_NAME
+ priority = 0
+ level = 0
+ msgs = MSGS
+
+ @staticmethod
+ def make_options():
+ return (
+ (
+ "ignore",
+ {
+ "type": "csv",
+ "metavar": "<file>[,<file>...]",
+ "dest": "black_list",
+ "default": ("CVS",),
+ "help": "Add files or directories to the blacklist. "
+ "They should be base names, not paths.",
+ },
+ ),
+ (
+ "ignore-patterns",
+ {
+ "type": "regexp_csv",
+ "metavar": "<pattern>[,<pattern>...]",
+ "dest": "black_list_re",
+ "default": (),
+ "help": "Add files or directories matching the regex patterns to the"
+ " blacklist. The regex matches against base names, not paths.",
+ },
+ ),
+ (
+ "persistent",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "level": 1,
+ "help": "Pickle collected data for later comparisons.",
+ },
+ ),
+ (
+ "load-plugins",
+ {
+ "type": "csv",
+ "metavar": "<modules>",
+ "default": (),
+ "level": 1,
+ "help": "List of plugins (as comma separated values of "
+ "python module names) to load, usually to register "
+ "additional checkers.",
+ },
+ ),
+ (
+ "output-format",
+ {
+ "default": "text",
+ "type": "string",
+ "metavar": "<format>",
+ "short": "f",
+ "group": "Reports",
+ "help": "Set the output format. Available formats are text,"
+ " parseable, colorized, json and msvs (visual studio)."
+ " You can also give a reporter class, e.g. mypackage.mymodule."
+ "MyReporterClass.",
+ },
+ ),
+ (
+ "reports",
+ {
+ "default": False,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "short": "r",
+ "group": "Reports",
+ "help": "Tells whether to display a full report or only the "
+ "messages.",
+ },
+ ),
+ (
+ "evaluation",
+ {
+ "type": "string",
+ "metavar": "<python_expression>",
+ "group": "Reports",
+ "level": 1,
+ "default": "10.0 - ((float(5 * error + warning + refactor + "
+ "convention) / statement) * 10)",
+ "help": "Python expression which should return a score less "
+ "than or equal to 10. You have access to the variables "
+ "'error', 'warning', 'refactor', and 'convention' which "
+ "contain the number of messages in each category, as well as "
+ "'statement' which is the total number of statements "
+ "analyzed. This score is used by the global "
+ "evaluation report (RP0004).",
+ },
+ ),
+ (
+ "score",
+ {
+ "default": True,
+ "type": "yn",
+ "metavar": "<y_or_n>",
+ "short": "s",
+ "group": "Reports",
+ "help": "Activate the evaluation score.",
+ },
+ ),
+ (
+ "confidence",
+ {
+ "type": "multiple_choice",
+ "metavar": "<levels>",
+ "default": "",
+ "choices": [c.name for c in interfaces.CONFIDENCE_LEVELS],
+ "group": "Messages control",
+ "help": "Only show warnings with the listed confidence levels."
+ " Leave empty to show all. Valid levels: %s."
+ % (", ".join(c.name for c in interfaces.CONFIDENCE_LEVELS),),
+ },
+ ),
+ (
+ "enable",
+ {
+ "type": "csv",
+ "metavar": "<msg ids>",
+ "short": "e",
+ "group": "Messages control",
+ "help": "Enable the message, report, category or checker with the "
+ "given id(s). You can either give multiple identifier "
+ "separated by comma (,) or put this option multiple time "
+ "(only on the command line, not in the configuration file "
+ "where it should appear only once). "
+ 'See also the "--disable" option for examples.',
+ },
+ ),
+ (
+ "disable",
+ {
+ "type": "csv",
+ "metavar": "<msg ids>",
+ "short": "d",
+ "group": "Messages control",
+ "help": "Disable the message, report, category or checker "
+ "with the given id(s). You can either give multiple identifiers "
+ "separated by comma (,) or put this option multiple times "
+ "(only on the command line, not in the configuration file "
+ "where it should appear only once). "
+ 'You can also use "--disable=all" to disable everything first '
+ "and then reenable specific checks. For example, if you want "
+ "to run only the similarities checker, you can use "
+ '"--disable=all --enable=similarities". '
+ "If you want to run only the classes checker, but have no "
+ "Warning level messages displayed, use "
+ '"--disable=all --enable=classes --disable=W".',
+ },
+ ),
+ (
+ "msg-template",
+ {
+ "type": "string",
+ "metavar": "<template>",
+ "group": "Reports",
+ "help": (
+ "Template used to display messages. "
+ "This is a python new-style format string "
+ "used to format the message information. "
+ "See doc for all details."
+ ),
+ },
+ ),
+ (
+ "jobs",
+ {
+ "type": "int",
+ "metavar": "<n-processes>",
+ "short": "j",
+ "default": 1,
+ "help": "Use multiple processes to speed up Pylint. Specifying 0 will "
+ "auto-detect the number of processors available to use.",
+ },
+ ),
+ (
+ "unsafe-load-any-extension",
+ {
+ "type": "yn",
+ "metavar": "<yn>",
+ "default": False,
+ "hide": True,
+ "help": (
+ "Allow loading of arbitrary C extensions. Extensions"
+ " are imported into the active Python interpreter and"
+ " may run arbitrary code."
+ ),
+ },
+ ),
+ (
+ "limit-inference-results",
+ {
+ "type": "int",
+ "metavar": "<number-of-results>",
+ "default": 100,
+ "help": (
+ "Control the amount of potential inferred values when inferring "
+ "a single object. This can help the performance when dealing with "
+ "large functions or complex, nested conditions. "
+ ),
+ },
+ ),
+ (
+ "extension-pkg-whitelist",
+ {
+ "type": "csv",
+ "metavar": "<pkg[,pkg]>",
+ "default": [],
+ "help": (
+ "A comma-separated list of package or module names"
+ " from where C extensions may be loaded. Extensions are"
+ " loading into the active Python interpreter and may run"
+ " arbitrary code."
+ ),
+ },
+ ),
+ (
+ "suggestion-mode",
+ {
+ "type": "yn",
+ "metavar": "<yn>",
+ "default": True,
+ "help": (
+ "When enabled, pylint would attempt to guess common "
+ "misconfiguration and emit user-friendly hints instead "
+ "of false-positive error messages."
+ ),
+ },
+ ),
+ (
+ "exit-zero",
+ {
+ "action": "store_true",
+ "help": (
+ "Always return a 0 (non-error) status code, even if "
+ "lint errors are found. This is primarily useful in "
+ "continuous integration scripts."
+ ),
+ },
+ ),
+ (
+ "from-stdin",
+ {
+ "action": "store_true",
+ "help": (
+ "Interpret the stdin as a python script, whose filename "
+ "needs to be passed as the module_or_package argument."
+ ),
+ },
+ ),
+ )
+
+ option_groups = (
+ ("Messages control", "Options controlling analysis messages"),
+ ("Reports", "Options related to output formatting and reporting"),
+ )
+
+ def __init__(self, options=(), reporter=None, option_groups=(), pylintrc=None):
+ # some stuff has to be done before ancestors initialization...
+ #
+ # messages store / checkers / reporter / astroid manager
+ self.msgs_store = MessageDefinitionStore()
+ self.reporter = None
+ self._reporter_name = None
+ self._reporters = {}
+ self._checkers = collections.defaultdict(list)
+ self._pragma_lineno = {}
+ self._ignore_file = False
+ # visit variables
+ self.file_state = FileState()
+ self.current_name = None
+ self.current_file = None
+ self.stats = None
+ # init options
+ self._external_opts = options
+ self.options = options + PyLinter.make_options()
+ self.option_groups = option_groups + PyLinter.option_groups
+ self._options_methods = {"enable": self.enable, "disable": self.disable}
+ self._bw_options_methods = {
+ "disable-msg": self.disable,
+ "enable-msg": self.enable,
+ }
+ full_version = "pylint %s\nastroid %s\nPython %s" % (
+ version,
+ astroid_version,
+ sys.version,
+ )
+ MessagesHandlerMixIn.__init__(self)
+ reporters.ReportsHandlerMixIn.__init__(self)
+ super(PyLinter, self).__init__(
+ usage=__doc__, version=full_version, config_file=pylintrc or config.PYLINTRC
+ )
+ checkers.BaseTokenChecker.__init__(self)
+ # provided reports
+ self.reports = (
+ ("RP0001", "Messages by category", report_total_messages_stats),
+ (
+ "RP0002",
+ "% errors / warnings by module",
+ report_messages_by_module_stats,
+ ),
+ ("RP0003", "Messages", report_messages_stats),
+ )
+ self.register_checker(self)
+ self._dynamic_plugins = set()
+ self._python3_porting_mode = False
+ self._error_mode = False
+ self.load_provider_defaults()
+ if reporter:
+ self.set_reporter(reporter)
+
+ def load_default_plugins(self):
+ checkers.initialize(self)
+ reporters.initialize(self)
+ # Make sure to load the default reporter, because
+ # the option has been set before the plugins had been loaded.
+ if not self.reporter:
+ self._load_reporter()
+
+ def load_plugin_modules(self, modnames):
+ """take a list of module names which are pylint plugins and load
+ and register them
+ """
+ for modname in modnames:
+ if modname in self._dynamic_plugins:
+ continue
+ self._dynamic_plugins.add(modname)
+ module = modutils.load_module_from_name(modname)
+ module.register(self)
+
+ def load_plugin_configuration(self):
+ """Call the configuration hook for plugins
+
+ This walks through the list of plugins, grabs the "load_configuration"
+ hook, if exposed, and calls it to allow plugins to configure specific
+ settings.
+ """
+ for modname in self._dynamic_plugins:
+ module = modutils.load_module_from_name(modname)
+ if hasattr(module, "load_configuration"):
+ module.load_configuration(self)
+
+ def _load_reporter(self):
+ name = self._reporter_name.lower()
+ if name in self._reporters:
+ self.set_reporter(self._reporters[name]())
+ else:
+ try:
+ reporter_class = self._load_reporter_class()
+ except (ImportError, AttributeError):
+ raise exceptions.InvalidReporterError(name)
+ else:
+ self.set_reporter(reporter_class())
+
+ def _load_reporter_class(self):
+ qname = self._reporter_name
+ module = modutils.load_module_from_name(modutils.get_module_part(qname))
+ class_name = qname.split(".")[-1]
+ reporter_class = getattr(module, class_name)
+ return reporter_class
+
+ def set_reporter(self, reporter):
+ """set the reporter used to display messages and reports"""
+ self.reporter = reporter
+ reporter.linter = self
+
+ def set_option(self, optname, value, action=None, optdict=None):
+ """overridden from config.OptionsProviderMixin to handle some
+ special options
+ """
+ if optname in self._options_methods or optname in self._bw_options_methods:
+ if value:
+ try:
+ meth = self._options_methods[optname]
+ except KeyError:
+ meth = self._bw_options_methods[optname]
+ warnings.warn(
+ "%s is deprecated, replace it by %s"
+ % (optname, optname.split("-")[0]),
+ DeprecationWarning,
+ )
+ value = utils._check_csv(value)
+ if isinstance(value, (list, tuple)):
+ for _id in value:
+ meth(_id, ignore_unknown=True)
+ else:
+ meth(value)
+ return # no need to call set_option, disable/enable methods do it
+ elif optname == "output-format":
+ self._reporter_name = value
+ # If the reporters are already available, load
+ # the reporter class.
+ if self._reporters:
+ self._load_reporter()
+
+ try:
+ checkers.BaseTokenChecker.set_option(self, optname, value, action, optdict)
+ except config.UnsupportedAction:
+ print("option %s can't be read from config file" % optname, file=sys.stderr)
+
+ def register_reporter(self, reporter_class):
+ self._reporters[reporter_class.name] = reporter_class
+
+ def report_order(self):
+ reports = sorted(self._reports, key=lambda x: getattr(x, "name", ""))
+ try:
+ # Remove the current reporter and add it
+ # at the end of the list.
+ reports.pop(reports.index(self))
+ except ValueError:
+ pass
+ else:
+ reports.append(self)
+ return reports
+
+ # checkers manipulation methods ############################################
+
+ def register_checker(self, checker):
+ """register a new checker
+
+ checker is an object implementing IRawChecker or / and IAstroidChecker
+ """
+ assert checker.priority <= 0, "checker priority can't be >= 0"
+ self._checkers[checker.name].append(checker)
+ for r_id, r_title, r_cb in checker.reports:
+ self.register_report(r_id, r_title, r_cb, checker)
+ self.register_options_provider(checker)
+ if hasattr(checker, "msgs"):
+ self.msgs_store.register_messages_from_checker(checker)
+ checker.load_defaults()
+
+ # Register the checker, but disable all of its messages.
+ if not getattr(checker, "enabled", True):
+ self.disable(checker.name)
+
+ def disable_noerror_messages(self):
+ for msgcat, msgids in self.msgs_store._msgs_by_category.items():
+ # enable only messages with 'error' severity and above ('fatal')
+ if msgcat in ["E", "F"]:
+ for msgid in msgids:
+ self.enable(msgid)
+ else:
+ for msgid in msgids:
+ self.disable(msgid)
+
+ def disable_reporters(self):
+ """disable all reporters"""
+ for _reporters in self._reports.values():
+ for report_id, _, _ in _reporters:
+ self.disable_report(report_id)
+
+ def error_mode(self):
+ """error mode: enable only errors; no reports, no persistent"""
+ self._error_mode = True
+ self.disable_noerror_messages()
+ self.disable("miscellaneous")
+ if self._python3_porting_mode:
+ self.disable("all")
+ for msg_id in self._checker_messages("python3"):
+ if msg_id.startswith("E"):
+ self.enable(msg_id)
+ config_parser = self.cfgfile_parser
+ if config_parser.has_option("MESSAGES CONTROL", "disable"):
+ value = config_parser.get("MESSAGES CONTROL", "disable")
+ self.global_set_option("disable", value)
+ else:
+ self.disable("python3")
+ self.set_option("reports", False)
+ self.set_option("persistent", False)
+ self.set_option("score", False)
+
+ def python3_porting_mode(self):
+ """Disable all other checkers and enable Python 3 warnings."""
+ self.disable("all")
+ self.enable("python3")
+ if self._error_mode:
+ # The error mode was activated, using the -E flag.
+ # So we'll need to enable only the errors from the
+ # Python 3 porting checker.
+ for msg_id in self._checker_messages("python3"):
+ if msg_id.startswith("E"):
+ self.enable(msg_id)
+ else:
+ self.disable(msg_id)
+ config_parser = self.cfgfile_parser
+ if config_parser.has_option("MESSAGES CONTROL", "disable"):
+ value = config_parser.get("MESSAGES CONTROL", "disable")
+ self.global_set_option("disable", value)
+ self._python3_porting_mode = True
+
+ def list_messages_enabled(self):
+ enabled = [
+ " %s (%s)" % (message.symbol, message.msgid)
+ for message in self.msgs_store.messages
+ if self.is_message_enabled(message.msgid)
+ ]
+ disabled = [
+ " %s (%s)" % (message.symbol, message.msgid)
+ for message in self.msgs_store.messages
+ if not self.is_message_enabled(message.msgid)
+ ]
+ print("Enabled messages:")
+ for msg in sorted(enabled):
+ print(msg)
+ print("\nDisabled messages:")
+ for msg in sorted(disabled):
+ print(msg)
+ print("")
+
+ # block level option handling #############################################
+ #
+ # see func_block_disable_msg.py test case for expected behaviour
+
+ def process_tokens(self, tokens):
+ """process tokens from the current module to search for module/block
+ level options
+ """
+ control_pragmas = {"disable", "enable"}
+ prev_line = None
+ saw_newline = True
+ seen_newline = True
+ for (tok_type, content, start, _, _) in tokens:
+ if prev_line and prev_line != start[0]:
+ saw_newline = seen_newline
+ seen_newline = False
+
+ prev_line = start[0]
+ if tok_type in (tokenize.NL, tokenize.NEWLINE):
+ seen_newline = True
+
+ if tok_type != tokenize.COMMENT:
+ continue
+ match = OPTION_RGX.search(content)
+ if match is None:
+ continue
+
+ first_group = match.group(1)
+ if (
+ first_group.strip() == "disable-all"
+ or first_group.strip() == "skip-file"
+ ):
+ if first_group.strip() == "disable-all":
+ self.add_message(
+ "deprecated-pragma",
+ line=start[0],
+ args=("disable-all", "skip-file"),
+ )
+ self.add_message("file-ignored", line=start[0])
+ self._ignore_file = True
+ return
+ try:
+ opt, value = first_group.split("=", 1)
+ except ValueError:
+ self.add_message(
+ "bad-inline-option", args=first_group.strip(), line=start[0]
+ )
+ continue
+ opt = opt.strip()
+ if opt in self._options_methods or opt in self._bw_options_methods:
+ try:
+ meth = self._options_methods[opt]
+ except KeyError:
+ meth = self._bw_options_methods[opt]
+ # found a "(dis|en)able-msg" pragma deprecated suppression
+ self.add_message(
+ "deprecated-pragma",
+ line=start[0],
+ args=(opt, opt.replace("-msg", "")),
+ )
+ for msgid in utils._splitstrip(value):
+ # Add the line where a control pragma was encountered.
+ if opt in control_pragmas:
+ self._pragma_lineno[msgid] = start[0]
+
+ try:
+ if (opt, msgid) == ("disable", "all"):
+ self.add_message(
+ "deprecated-pragma",
+ line=start[0],
+ args=("disable=all", "skip-file"),
+ )
+ self.add_message("file-ignored", line=start[0])
+ self._ignore_file = True
+ return
+ # If we did not see a newline between the previous line and now,
+ # we saw a backslash so treat the two lines as one.
+ if not saw_newline:
+ meth(msgid, "module", start[0] - 1)
+ meth(msgid, "module", start[0])
+ except exceptions.UnknownMessageError:
+ self.add_message("bad-option-value", args=msgid, line=start[0])
+ else:
+ self.add_message("unrecognized-inline-option", args=opt, line=start[0])
+
+ # code checking methods ###################################################
+
+ def get_checkers(self):
+ """return all available checkers as a list"""
+ return [self] + [
+ c
+ for _checkers in self._checkers.values()
+ for c in _checkers
+ if c is not self
+ ]
+
+ def get_checker_names(self):
+ """Get all the checker names that this linter knows about."""
+ current_checkers = self.get_checkers()
+ return sorted(
+ {
+ checker.name
+ for checker in current_checkers
+ if checker.name != MAIN_CHECKER_NAME
+ }
+ )
+
+ def prepare_checkers(self):
+ """return checkers needed for activated messages and reports"""
+ if not self.config.reports:
+ self.disable_reporters()
+ # get needed checkers
+ needed_checkers = [self]
+ for checker in self.get_checkers()[1:]:
+ messages = {msg for msg in checker.msgs if self.is_message_enabled(msg)}
+ if messages or any(self.report_is_enabled(r[0]) for r in checker.reports):
+ needed_checkers.append(checker)
+ # Sort checkers by priority
+ needed_checkers = sorted(
+ needed_checkers, key=operator.attrgetter("priority"), reverse=True
+ )
+ return needed_checkers
+
+ # pylint: disable=unused-argument
+ @staticmethod
+ def should_analyze_file(modname, path, is_argument=False):
+ """Returns whether or not a module should be checked.
+
+ This implementation returns True for all python source file, indicating
+ that all files should be linted.
+
+ Subclasses may override this method to indicate that modules satisfying
+ certain conditions should not be linted.
+
+ :param str modname: The name of the module to be checked.
+ :param str path: The full path to the source code of the module.
+ :param bool is_argument: Whetter the file is an argument to pylint or not.
+ Files which respect this property are always
+ checked, since the user requested it explicitly.
+ :returns: True if the module should be checked.
+ :rtype: bool
+ """
+ if is_argument:
+ return True
+ return path.endswith(".py")
+
+ # pylint: enable=unused-argument
+
+ def check(self, files_or_modules):
+ """main checking entry: check a list of files or modules from their
+ name.
+ """
+ # initialize msgs_state now that all messages have been registered into
+ # the store
+ for msg in self.msgs_store.messages:
+ if not msg.may_be_emitted():
+ self._msgs_state[msg.msgid] = False
+
+ if not isinstance(files_or_modules, (list, tuple)):
+ files_or_modules = (files_or_modules,)
+
+ if self.config.jobs == 1:
+ self._do_check(files_or_modules)
+ else:
+ self._parallel_check(files_or_modules)
+
+ def _get_jobs_config(self):
+ child_config = collections.OrderedDict()
+ filter_options = {"long-help"}
+ filter_options.update((opt_name for opt_name, _ in self._external_opts))
+ for opt_providers in self._all_options.values():
+ for optname, optdict, val in opt_providers.options_and_values():
+ if optdict.get("deprecated"):
+ continue
+
+ if optname not in filter_options:
+ child_config[optname] = utils._format_option_value(optdict, val)
+ child_config["python3_porting_mode"] = self._python3_porting_mode
+ child_config["plugins"] = self._dynamic_plugins
+ return child_config
+
+ def _parallel_task(self, files_or_modules):
+ # Prepare configuration for child linters.
+ child_config = self._get_jobs_config()
+
+ children = []
+ manager = multiprocessing.Manager()
+ tasks_queue = manager.Queue()
+ results_queue = manager.Queue()
+
+ # Send files to child linters.
+ expanded_files = []
+ for descr in self.expand_files(files_or_modules):
+ modname, filepath, is_arg = descr["name"], descr["path"], descr["isarg"]
+ if self.should_analyze_file(modname, filepath, is_argument=is_arg):
+ expanded_files.append(descr)
+
+ # do not start more jobs than needed
+ for _ in range(min(self.config.jobs, len(expanded_files))):
+ child_linter = ChildLinter(args=(tasks_queue, results_queue, child_config))
+ child_linter.start()
+ children.append(child_linter)
+
+ for files_or_module in expanded_files:
+ path = files_or_module["path"]
+ tasks_queue.put([path])
+
+ # collect results from child linters
+ failed = False
+ for _ in expanded_files:
+ try:
+ result = results_queue.get()
+ except Exception as ex:
+ print(
+ "internal error while receiving results from child linter",
+ file=sys.stderr,
+ )
+ print(ex, file=sys.stderr)
+ failed = True
+ break
+ yield result
+
+ # Stop child linters and wait for their completion.
+ for _ in range(self.config.jobs):
+ tasks_queue.put("STOP")
+ for child in children:
+ child.join()
+
+ if failed:
+ print("Error occurred, stopping the linter.", file=sys.stderr)
+ sys.exit(32)
+
+ def _parallel_check(self, files_or_modules):
+ # Reset stats.
+ self.open()
+
+ all_stats = []
+ module = None
+ for result in self._parallel_task(files_or_modules):
+ if not result:
+ continue
+ (_, self.file_state.base_name, module, messages, stats, msg_status) = result
+
+ for msg in messages:
+ msg = Message(*msg)
+ self.set_current_module(module)
+ self.reporter.handle_message(msg)
+
+ all_stats.append(stats)
+ self.msg_status |= msg_status
+
+ self.stats = _merge_stats(all_stats)
+ self.current_name = module
+
+ # Insert stats data to local checkers.
+ for checker in self.get_checkers():
+ if checker is not self:
+ checker.stats = self.stats
+
+ def _do_check(self, files_or_modules):
+ walker = ASTWalker(self)
+ _checkers = self.prepare_checkers()
+ tokencheckers = [
+ c
+ for c in _checkers
+ if interfaces.implements(c, interfaces.ITokenChecker) and c is not self
+ ]
+ rawcheckers = [
+ c for c in _checkers if interfaces.implements(c, interfaces.IRawChecker)
+ ]
+ # notify global begin
+ for checker in _checkers:
+ checker.open()
+ if interfaces.implements(checker, interfaces.IAstroidChecker):
+ walker.add_checker(checker)
+ # build ast and check modules or packages
+ if self.config.from_stdin:
+ if len(files_or_modules) != 1:
+ raise exceptions.InvalidArgsError(
+ "Missing filename required for --from-stdin"
+ )
+
+ filepath = files_or_modules[0]
+ try:
+ # Note that this function does not really perform an
+ # __import__ but may raise an ImportError exception, which
+ # we want to catch here.
+ modname = ".".join(modutils.modpath_from_file(filepath))
+ except ImportError:
+ modname = os.path.splitext(os.path.basename(filepath))[0]
+
+ self.set_current_module(modname, filepath)
+
+ # get the module representation
+ ast_node = _ast_from_string(_read_stdin(), filepath, modname)
+
+ if ast_node is not None:
+ self.file_state = FileState(filepath)
+ self.check_astroid_module(ast_node, walker, rawcheckers, tokencheckers)
+ # warn about spurious inline messages handling
+ spurious_messages = self.file_state.iter_spurious_suppression_messages(
+ self.msgs_store
+ )
+ for msgid, line, args in spurious_messages:
+ self.add_message(msgid, line, None, args)
+ else:
+ for descr in self.expand_files(files_or_modules):
+ modname, filepath, is_arg = descr["name"], descr["path"], descr["isarg"]
+ if not self.should_analyze_file(modname, filepath, is_argument=is_arg):
+ continue
+
+ self.set_current_module(modname, filepath)
+ # get the module representation
+ ast_node = self.get_ast(filepath, modname)
+ if ast_node is None:
+ continue
+
+ self.file_state = FileState(descr["basename"])
+ self._ignore_file = False
+ # fix the current file (if the source file was not available or
+ # if it's actually a c extension)
+ self.current_file = ast_node.file # pylint: disable=maybe-no-member
+ before_check_statements = walker.nbstatements
+ self.check_astroid_module(ast_node, walker, rawcheckers, tokencheckers)
+ self.stats["by_module"][modname]["statement"] = (
+ walker.nbstatements - before_check_statements
+ )
+ # warn about spurious inline messages handling
+ spurious_messages = self.file_state.iter_spurious_suppression_messages(
+ self.msgs_store
+ )
+ for msgid, line, args in spurious_messages:
+ self.add_message(msgid, line, None, args)
+ # notify global end
+ self.stats["statement"] = walker.nbstatements
+ for checker in reversed(_checkers):
+ checker.close()
+
+ def expand_files(self, modules):
+ """get modules and errors from a list of modules and handle errors
+ """
+ result, errors = utils.expand_modules(
+ modules, self.config.black_list, self.config.black_list_re
+ )
+ for error in errors:
+ message = modname = error["mod"]
+ key = error["key"]
+ self.set_current_module(modname)
+ if key == "fatal":
+ message = str(error["ex"]).replace(os.getcwd() + os.sep, "")
+ self.add_message(key, args=message)
+ return result
+
+ def set_current_module(self, modname, filepath=None):
+ """set the name of the currently analyzed module and
+ init statistics for it
+ """
+ if not modname and filepath is None:
+ return
+ self.reporter.on_set_current_module(modname, filepath)
+ self.current_name = modname
+ self.current_file = filepath or modname
+ self.stats["by_module"][modname] = {}
+ self.stats["by_module"][modname]["statement"] = 0
+ for msg_cat in MSG_TYPES.values():
+ self.stats["by_module"][modname][msg_cat] = 0
+
+ def get_ast(self, filepath, modname):
+ """return an ast(roid) representation for a module"""
+ try:
+ return MANAGER.ast_from_file(filepath, modname, source=True)
+ except astroid.AstroidSyntaxError as ex:
+ # pylint: disable=no-member
+ self.add_message(
+ "syntax-error",
+ line=getattr(ex.error, "lineno", 0),
+ col_offset=getattr(ex.error, "offset", None),
+ args=str(ex.error),
+ )
+ except astroid.AstroidBuildingException as ex:
+ self.add_message("parse-error", args=ex)
+ except Exception as ex:
+ traceback.print_exc()
+ self.add_message("astroid-error", args=(ex.__class__, ex))
+
+ def check_astroid_module(self, ast_node, walker, rawcheckers, tokencheckers):
+ """Check a module from its astroid representation."""
+ try:
+ tokens = utils.tokenize_module(ast_node)
+ except tokenize.TokenError as ex:
+ self.add_message("syntax-error", line=ex.args[1][0], args=ex.args[0])
+ return None
+
+ if not ast_node.pure_python:
+ self.add_message("raw-checker-failed", args=ast_node.name)
+ else:
+ # assert astroid.file.endswith('.py')
+ # invoke ITokenChecker interface on self to fetch module/block
+ # level options
+ self.process_tokens(tokens)
+ if self._ignore_file:
+ return False
+ # walk ast to collect line numbers
+ self.file_state.collect_block_lines(self.msgs_store, ast_node)
+ # run raw and tokens checkers
+ for checker in rawcheckers:
+ checker.process_module(ast_node)
+ for checker in tokencheckers:
+ checker.process_tokens(tokens)
+ # generate events to astroid checkers
+ walker.walk(ast_node)
+ return True
+
+ # IAstroidChecker interface #################################################
+
+ def open(self):
+ """initialize counters"""
+ self.stats = {"by_module": {}, "by_msg": {}}
+ MANAGER.always_load_extensions = self.config.unsafe_load_any_extension
+ MANAGER.max_inferable_values = self.config.limit_inference_results
+ MANAGER.extension_package_whitelist.update(self.config.extension_pkg_whitelist)
+ for msg_cat in MSG_TYPES.values():
+ self.stats[msg_cat] = 0
+
+ def generate_reports(self):
+ """close the whole package /module, it's time to make reports !
+
+ if persistent run, pickle results for later comparison
+ """
+ # Display whatever messages are left on the reporter.
+ self.reporter.display_messages(report_nodes.Section())
+
+ if self.file_state.base_name is not None:
+ # load previous results if any
+ previous_stats = config.load_results(self.file_state.base_name)
+ self.reporter.on_close(self.stats, previous_stats)
+ if self.config.reports:
+ sect = self.make_reports(self.stats, previous_stats)
+ else:
+ sect = report_nodes.Section()
+
+ if self.config.reports:
+ self.reporter.display_reports(sect)
+ self._report_evaluation()
+ # save results if persistent run
+ if self.config.persistent:
+ config.save_results(self.stats, self.file_state.base_name)
+ else:
+ self.reporter.on_close(self.stats, {})
+
+ def _report_evaluation(self):
+ """make the global evaluation report"""
+ # check with at least check 1 statements (usually 0 when there is a
+ # syntax error preventing pylint from further processing)
+ previous_stats = config.load_results(self.file_state.base_name)
+ if self.stats["statement"] == 0:
+ return
+
+ # get a global note for the code
+ evaluation = self.config.evaluation
+ try:
+ note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used
+ except Exception as ex:
+ msg = "An exception occurred while rating: %s" % ex
+ else:
+ self.stats["global_note"] = note
+ msg = "Your code has been rated at %.2f/10" % note
+ pnote = previous_stats.get("global_note")
+ if pnote is not None:
+ msg += " (previous run: %.2f/10, %+.2f)" % (pnote, note - pnote)
+
+ if self.config.score:
+ sect = report_nodes.EvaluationSection(msg)
+ self.reporter.display_reports(sect)
+
+
+# some reporting functions ####################################################
+
+
+def report_total_messages_stats(sect, stats, previous_stats):
+ """make total errors / warnings report"""
+ lines = ["type", "number", "previous", "difference"]
+ lines += checkers.table_lines_from_stats(
+ stats, previous_stats, ("convention", "refactor", "warning", "error")
+ )
+ sect.append(report_nodes.Table(children=lines, cols=4, rheaders=1))
+
+
+def report_messages_stats(sect, stats, _):
+ """make messages type report"""
+ if not stats["by_msg"]:
+ # don't print this report when we didn't detected any errors
+ raise exceptions.EmptyReportError()
+ in_order = sorted(
+ [
+ (value, msg_id)
+ for msg_id, value in stats["by_msg"].items()
+ if not msg_id.startswith("I")
+ ]
+ )
+ in_order.reverse()
+ lines = ("message id", "occurrences")
+ for value, msg_id in in_order:
+ lines += (msg_id, str(value))
+ sect.append(report_nodes.Table(children=lines, cols=2, rheaders=1))
+
+
+def report_messages_by_module_stats(sect, stats, _):
+ """make errors / warnings by modules report"""
+ if len(stats["by_module"]) == 1:
+ # don't print this report when we are analysing a single module
+ raise exceptions.EmptyReportError()
+ by_mod = collections.defaultdict(dict)
+ for m_type in ("fatal", "error", "warning", "refactor", "convention"):
+ total = stats[m_type]
+ for module in stats["by_module"].keys():
+ mod_total = stats["by_module"][module][m_type]
+ if total == 0:
+ percent = 0
+ else:
+ percent = float((mod_total) * 100) / total
+ by_mod[module][m_type] = percent
+ sorted_result = []
+ for module, mod_info in by_mod.items():
+ sorted_result.append(
+ (
+ mod_info["error"],
+ mod_info["warning"],
+ mod_info["refactor"],
+ mod_info["convention"],
+ module,
+ )
+ )
+ sorted_result.sort()
+ sorted_result.reverse()
+ lines = ["module", "error", "warning", "refactor", "convention"]
+ for line in sorted_result:
+ # Don't report clean modules.
+ if all(entry == 0 for entry in line[:-1]):
+ continue
+ lines.append(line[-1])
+ for val in line[:-1]:
+ lines.append("%.2f" % val)
+ if len(lines) == 5:
+ raise exceptions.EmptyReportError()
+ sect.append(report_nodes.Table(children=lines, cols=5, rheaders=1))
+
+
+# utilities ###################################################################
+
+
+class ArgumentPreprocessingError(Exception):
+ """Raised if an error occurs during argument preprocessing."""
+
+
+def preprocess_options(args, search_for):
+ """look for some options (keys of <search_for>) which have to be processed
+ before others
+
+ values of <search_for> are callback functions to call when the option is
+ found
+ """
+ i = 0
+ while i < len(args):
+ arg = args[i]
+ if arg.startswith("--"):
+ try:
+ option, val = arg[2:].split("=", 1)
+ except ValueError:
+ option, val = arg[2:], None
+ try:
+ cb, takearg = search_for[option]
+ except KeyError:
+ i += 1
+ else:
+ del args[i]
+ if takearg and val is None:
+ if i >= len(args) or args[i].startswith("-"):
+ msg = "Option %s expects a value" % option
+ raise ArgumentPreprocessingError(msg)
+ val = args[i]
+ del args[i]
+ elif not takearg and val is not None:
+ msg = "Option %s doesn't expects a value" % option
+ raise ArgumentPreprocessingError(msg)
+ cb(option, val)
+ else:
+ i += 1
+
+
+@contextlib.contextmanager
+def fix_import_path(args):
+ """Prepare sys.path for running the linter checks.
+
+ Within this context, each of the given arguments is importable.
+ Paths are added to sys.path in corresponding order to the arguments.
+ We avoid adding duplicate directories to sys.path.
+ `sys.path` is reset to its original value upon exiting this context.
+ """
+ orig = list(sys.path)
+ changes = []
+ for arg in args:
+ path = _get_python_path(arg)
+ if path not in changes:
+ changes.append(path)
+ sys.path[:] = changes + ["."] + sys.path
+ try:
+ yield
+ finally:
+ sys.path[:] = orig
+
+
+class Run:
+ """helper class to use as main for pylint :
+
+ run(*sys.argv[1:])
+ """
+
+ LinterClass = PyLinter
+ option_groups = (
+ (
+ "Commands",
+ "Options which are actually commands. Options in this \
+group are mutually exclusive.",
+ ),
+ )
+
+ def __init__(self, args, reporter=None, do_exit=True):
+ self._rcfile = None
+ self._plugins = []
+ self.verbose = None
+ try:
+ preprocess_options(
+ args,
+ {
+ # option: (callback, takearg)
+ "init-hook": (cb_init_hook, True),
+ "rcfile": (self.cb_set_rcfile, True),
+ "load-plugins": (self.cb_add_plugins, True),
+ "verbose": (self.cb_verbose_mode, False),
+ },
+ )
+ except ArgumentPreprocessingError as ex:
+ print(ex, file=sys.stderr)
+ sys.exit(32)
+
+ self.linter = linter = self.LinterClass(
+ (
+ (
+ "rcfile",
+ {
+ "action": "callback",
+ "callback": lambda *args: 1,
+ "type": "string",
+ "metavar": "<file>",
+ "help": "Specify a configuration file.",
+ },
+ ),
+ (
+ "init-hook",
+ {
+ "action": "callback",
+ "callback": lambda *args: 1,
+ "type": "string",
+ "metavar": "<code>",
+ "level": 1,
+ "help": "Python code to execute, usually for sys.path "
+ "manipulation such as pygtk.require().",
+ },
+ ),
+ (
+ "help-msg",
+ {
+ "action": "callback",
+ "type": "string",
+ "metavar": "<msg-id>",
+ "callback": self.cb_help_message,
+ "group": "Commands",
+ "help": "Display a help message for the given message id and "
+ "exit. The value may be a comma separated list of message ids.",
+ },
+ ),
+ (
+ "list-msgs",
+ {
+ "action": "callback",
+ "metavar": "<msg-id>",
+ "callback": self.cb_list_messages,
+ "group": "Commands",
+ "level": 1,
+ "help": "Generate pylint's messages.",
+ },
+ ),
+ (
+ "list-msgs-enabled",
+ {
+ "action": "callback",
+ "metavar": "<msg-id>",
+ "callback": self.cb_list_messages_enabled,
+ "group": "Commands",
+ "level": 1,
+ "help": "Display a list of what messages are enabled "
+ "and disabled with the given configuration.",
+ },
+ ),
+ (
+ "list-groups",
+ {
+ "action": "callback",
+ "metavar": "<msg-id>",
+ "callback": self.cb_list_groups,
+ "group": "Commands",
+ "level": 1,
+ "help": "List pylint's message groups.",
+ },
+ ),
+ (
+ "list-conf-levels",
+ {
+ "action": "callback",
+ "callback": cb_list_confidence_levels,
+ "group": "Commands",
+ "level": 1,
+ "help": "Generate pylint's confidence levels.",
+ },
+ ),
+ (
+ "full-documentation",
+ {
+ "action": "callback",
+ "metavar": "<msg-id>",
+ "callback": self.cb_full_documentation,
+ "group": "Commands",
+ "level": 1,
+ "help": "Generate pylint's full documentation.",
+ },
+ ),
+ (
+ "generate-rcfile",
+ {
+ "action": "callback",
+ "callback": self.cb_generate_config,
+ "group": "Commands",
+ "help": "Generate a sample configuration file according to "
+ "the current configuration. You can put other options "
+ "before this one to get them in the generated "
+ "configuration.",
+ },
+ ),
+ (
+ "generate-man",
+ {
+ "action": "callback",
+ "callback": self.cb_generate_manpage,
+ "group": "Commands",
+ "help": "Generate pylint's man page.",
+ "hide": True,
+ },
+ ),
+ (
+ "errors-only",
+ {
+ "action": "callback",
+ "callback": self.cb_error_mode,
+ "short": "E",
+ "help": "In error mode, checkers without error messages are "
+ "disabled and for others, only the ERROR messages are "
+ "displayed, and no reports are done by default.",
+ },
+ ),
+ (
+ "py3k",
+ {
+ "action": "callback",
+ "callback": self.cb_python3_porting_mode,
+ "help": "In Python 3 porting mode, all checkers will be "
+ "disabled and only messages emitted by the porting "
+ "checker will be displayed.",
+ },
+ ),
+ (
+ "verbose",
+ {
+ "action": "callback",
+ "callback": self.cb_verbose_mode,
+ "short": "v",
+ "help": "In verbose mode, extra non-checker-related info "
+ "will be displayed.",
+ },
+ ),
+ ),
+ option_groups=self.option_groups,
+ pylintrc=self._rcfile,
+ )
+ # register standard checkers
+ linter.load_default_plugins()
+ # load command line plugins
+ linter.load_plugin_modules(self._plugins)
+ # add some help section
+ linter.add_help_section("Environment variables", config.ENV_HELP, level=1)
+ # pylint: disable=bad-continuation
+ linter.add_help_section(
+ "Output",
+ "Using the default text output, the message format is : \n"
+ " \n"
+ " MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE \n"
+ " \n"
+ "There are 5 kind of message types : \n"
+ " * (C) convention, for programming standard violation \n"
+ " * (R) refactor, for bad code smell \n"
+ " * (W) warning, for python specific problems \n"
+ " * (E) error, for probable bugs in the code \n"
+ " * (F) fatal, if an error occurred which prevented pylint from doing further\n"
+ "processing.\n",
+ level=1,
+ )
+ linter.add_help_section(
+ "Output status code",
+ "Pylint should leave with following status code: \n"
+ " * 0 if everything went fine \n"
+ " * 1 if a fatal message was issued \n"
+ " * 2 if an error message was issued \n"
+ " * 4 if a warning message was issued \n"
+ " * 8 if a refactor message was issued \n"
+ " * 16 if a convention message was issued \n"
+ " * 32 on usage error \n"
+ " \n"
+ "status 1 to 16 will be bit-ORed so you can know which different categories has\n"
+ "been issued by analysing pylint output status code\n",
+ level=1,
+ )
+ # read configuration
+ linter.disable("I")
+ linter.enable("c-extension-no-member")
+ linter.read_config_file(verbose=self.verbose)
+ config_parser = linter.cfgfile_parser
+ # run init hook, if present, before loading plugins
+ if config_parser.has_option("MASTER", "init-hook"):
+ cb_init_hook(
+ "init-hook", utils._unquote(config_parser.get("MASTER", "init-hook"))
+ )
+ # is there some additional plugins in the file configuration, in
+ if config_parser.has_option("MASTER", "load-plugins"):
+ plugins = utils._splitstrip(config_parser.get("MASTER", "load-plugins"))
+ linter.load_plugin_modules(plugins)
+ # now we can load file config and command line, plugins (which can
+ # provide options) have been registered
+ linter.load_config_file()
+
+ if reporter:
+ # if a custom reporter is provided as argument, it may be overridden
+ # by file parameters, so re-set it here, but before command line
+ # parsing so it's still overrideable by command line option
+ linter.set_reporter(reporter)
+ try:
+ args = linter.load_command_line_configuration(args)
+ except SystemExit as exc:
+ if exc.code == 2: # bad options
+ exc.code = 32
+ raise
+ if not args:
+ print(linter.help())
+ sys.exit(32)
+
+ if linter.config.jobs < 0:
+ print(
+ "Jobs number (%d) should be greater than or equal to 0"
+ % linter.config.jobs,
+ file=sys.stderr,
+ )
+ sys.exit(32)
+ if linter.config.jobs > 1 or linter.config.jobs == 0:
+ if multiprocessing is None:
+ print(
+ "Multiprocessing library is missing, " "fallback to single process",
+ file=sys.stderr,
+ )
+ linter.set_option("jobs", 1)
+ else:
+ if linter.config.jobs == 0:
+ linter.config.jobs = _cpu_count()
+
+ # We have loaded configuration from config file and command line. Now, we can
+ # load plugin specific configuration.
+ linter.load_plugin_configuration()
+
+ # insert current working directory to the python path to have a correct
+ # behaviour
+ with fix_import_path(args):
+ linter.check(args)
+ linter.generate_reports()
+ if do_exit:
+ if linter.config.exit_zero:
+ sys.exit(0)
+ else:
+ sys.exit(self.linter.msg_status)
+
+ def cb_set_rcfile(self, name, value):
+ """callback for option preprocessing (i.e. before option parsing)"""
+ self._rcfile = value
+
+ def cb_add_plugins(self, name, value):
+ """callback for option preprocessing (i.e. before option parsing)"""
+ self._plugins.extend(utils._splitstrip(value))
+
+ def cb_error_mode(self, *args, **kwargs):
+ """error mode:
+ * disable all but error messages
+ * disable the 'miscellaneous' checker which can be safely deactivated in
+ debug
+ * disable reports
+ * do not save execution information
+ """
+ self.linter.error_mode()
+
+ def cb_generate_config(self, *args, **kwargs):
+ """optik callback for sample config file generation"""
+ self.linter.generate_config(skipsections=("COMMANDS",))
+ sys.exit(0)
+
+ def cb_generate_manpage(self, *args, **kwargs):
+ """optik callback for sample config file generation"""
+ self.linter.generate_manpage(__pkginfo__)
+ sys.exit(0)
+
+ def cb_help_message(self, option, optname, value, parser):
+ """optik callback for printing some help about a particular message"""
+ self.linter.msgs_store.help_message(utils._splitstrip(value))
+ sys.exit(0)
+
+ def cb_full_documentation(self, option, optname, value, parser):
+ """optik callback for printing full documentation"""
+ self.linter.print_full_documentation()
+ sys.exit(0)
+
+ def cb_list_messages(self, option, optname, value, parser):
+ """optik callback for printing available messages"""
+ self.linter.msgs_store.list_messages()
+ sys.exit(0)
+
+ def cb_list_messages_enabled(self, option, optname, value, parser):
+ """optik callback for printing available messages"""
+ self.linter.list_messages_enabled()
+ sys.exit(0)
+
+ def cb_list_groups(self, *args, **kwargs):
+ """List all the check groups that pylint knows about
+
+ These should be useful to know what check groups someone can disable
+ or enable.
+ """
+ for check in self.linter.get_checker_names():
+ print(check)
+ sys.exit(0)
+
+ def cb_python3_porting_mode(self, *args, **kwargs):
+ """Activate only the python3 porting checker."""
+ self.linter.python3_porting_mode()
+
+ def cb_verbose_mode(self, *args, **kwargs):
+ self.verbose = True
+
+
+def cb_list_confidence_levels(option, optname, value, parser):
+ for level in interfaces.CONFIDENCE_LEVELS:
+ print("%-18s: %s" % level)
+ sys.exit(0)
+
+
+def cb_init_hook(optname, value):
+ """exec arbitrary code to set sys.path for instance"""
+ exec(value) # pylint: disable=exec-used
+
+
+if __name__ == "__main__":
+ Run(sys.argv[1:])
diff --git a/venv/Lib/site-packages/pylint/message/__init__.py b/venv/Lib/site-packages/pylint/message/__init__.py
new file mode 100644
index 0000000..5ac8411
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/__init__.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009 Vincent
+# Copyright (c) 2009 Mads Kiilerich <mads@kiilerich.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014-2015 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 LCD 47 <lcd047@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2014 Damien Nozay <damien.nozay@gmail.com>
+# Copyright (c) 2015 Aru Sahni <arusahni@gmail.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Glenn Matthews <glmatthe@cisco.com>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016 xmo-odoo <xmo-odoo@users.noreply.github.com>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Pierre Sassoulas <pierre.sassoulas@cea.fr>
+# Copyright (c) 2017 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2017 Chris Lamb <chris@chris-lamb.co.uk>
+# Copyright (c) 2017 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2017 Thomas Hisch <t.hisch@gmail.com>
+# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
+# Copyright (c) 2017 Craig Citro <craigcitro@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Pierre Sassoulas <pierre.sassoulas@wisebim.fr>
+# Copyright (c) 2018 Reverb C <reverbc@users.noreply.github.com>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""All the classes related to Message handling."""
+
+from pylint.message.message import Message
+from pylint.message.message_definition import MessageDefinition
+from pylint.message.message_definition_store import MessageDefinitionStore
+from pylint.message.message_handler_mix_in import MessagesHandlerMixIn
+from pylint.message.message_id_store import MessageIdStore
+
+__all__ = [
+ "Message",
+ "MessageDefinition",
+ "MessageDefinitionStore",
+ "MessagesHandlerMixIn",
+ "MessageIdStore",
+]
diff --git a/venv/Lib/site-packages/pylint/message/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/message/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..f3462f1
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/message/__pycache__/message.cpython-37.pyc b/venv/Lib/site-packages/pylint/message/__pycache__/message.cpython-37.pyc
new file mode 100644
index 0000000..6c89577
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/__pycache__/message.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/message/__pycache__/message_definition.cpython-37.pyc b/venv/Lib/site-packages/pylint/message/__pycache__/message_definition.cpython-37.pyc
new file mode 100644
index 0000000..952803b
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/__pycache__/message_definition.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/message/__pycache__/message_definition_store.cpython-37.pyc b/venv/Lib/site-packages/pylint/message/__pycache__/message_definition_store.cpython-37.pyc
new file mode 100644
index 0000000..ce6f867
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/__pycache__/message_definition_store.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/message/__pycache__/message_handler_mix_in.cpython-37.pyc b/venv/Lib/site-packages/pylint/message/__pycache__/message_handler_mix_in.cpython-37.pyc
new file mode 100644
index 0000000..23cc65a
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/__pycache__/message_handler_mix_in.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/message/__pycache__/message_id_store.cpython-37.pyc b/venv/Lib/site-packages/pylint/message/__pycache__/message_id_store.cpython-37.pyc
new file mode 100644
index 0000000..f132b88
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/__pycache__/message_id_store.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/message/message.py b/venv/Lib/site-packages/pylint/message/message.py
new file mode 100644
index 0000000..e2b0320
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/message.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+
+import collections
+
+from pylint.constants import MSG_TYPES
+
+_MsgBase = collections.namedtuple(
+ "_MsgBase",
+ [
+ "msg_id",
+ "symbol",
+ "msg",
+ "C",
+ "category",
+ "confidence",
+ "abspath",
+ "path",
+ "module",
+ "obj",
+ "line",
+ "column",
+ ],
+)
+
+
+class Message(_MsgBase):
+ """This class represent a message to be issued by the reporters"""
+
+ def __new__(cls, msg_id, symbol, location, msg, confidence):
+ return _MsgBase.__new__(
+ cls,
+ msg_id,
+ symbol,
+ msg,
+ msg_id[0],
+ MSG_TYPES[msg_id[0]],
+ confidence,
+ *location
+ )
+
+ def format(self, template):
+ """Format the message according to the given template.
+
+ The template format is the one of the format method :
+ cf. http://docs.python.org/2/library/string.html#formatstrings
+ """
+ # For some reason, _asdict on derived namedtuples does not work with
+ # Python 3.4. Needs some investigation.
+ return template.format(**dict(zip(self._fields, self)))
diff --git a/venv/Lib/site-packages/pylint/message/message_definition.py b/venv/Lib/site-packages/pylint/message/message_definition.py
new file mode 100644
index 0000000..e54c15a
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/message_definition.py
@@ -0,0 +1,84 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import sys
+
+from pylint.constants import MSG_TYPES
+from pylint.exceptions import InvalidMessageError
+from pylint.utils import normalize_text
+
+
+class MessageDefinition:
+ def __init__(
+ self,
+ checker,
+ msgid,
+ msg,
+ description,
+ symbol,
+ scope,
+ minversion=None,
+ maxversion=None,
+ old_names=None,
+ ):
+ self.checker = checker
+ self.check_msgid(msgid)
+ self.msgid = msgid
+ self.symbol = symbol
+ self.msg = msg
+ self.description = description
+ self.scope = scope
+ self.minversion = minversion
+ self.maxversion = maxversion
+ self.old_names = []
+ if old_names:
+ for old_msgid, old_symbol in old_names:
+ self.check_msgid(old_msgid)
+ self.old_names.append([old_msgid, old_symbol])
+
+ @staticmethod
+ def check_msgid(msgid: str) -> None:
+ if len(msgid) != 5:
+ raise InvalidMessageError("Invalid message id %r" % msgid)
+ if msgid[0] not in MSG_TYPES:
+ raise InvalidMessageError("Bad message type %s in %r" % (msgid[0], msgid))
+
+ def __repr__(self):
+ return "MessageDefinition:%s (%s)" % (self.symbol, self.msgid)
+
+ def __str__(self):
+ return "%s:\n%s %s" % (repr(self), self.msg, self.description)
+
+ def may_be_emitted(self):
+ """return True if message may be emitted using the current interpreter"""
+ if self.minversion is not None and self.minversion > sys.version_info:
+ return False
+ if self.maxversion is not None and self.maxversion <= sys.version_info:
+ return False
+ return True
+
+ def format_help(self, checkerref=False):
+ """return the help string for the given message id"""
+ desc = self.description
+ if checkerref:
+ desc += " This message belongs to the %s checker." % self.checker.name
+ title = self.msg
+ if self.minversion or self.maxversion:
+ restr = []
+ if self.minversion:
+ restr.append("< %s" % ".".join([str(n) for n in self.minversion]))
+ if self.maxversion:
+ restr.append(">= %s" % ".".join([str(n) for n in self.maxversion]))
+ restr = " or ".join(restr)
+ if checkerref:
+ desc += " It can't be emitted when using Python %s." % restr
+ else:
+ desc += " This message can't be emitted when using Python %s." % restr
+ msg_help = normalize_text(" ".join(desc.split()), indent=" ")
+ message_id = "%s (%s)" % (self.symbol, self.msgid)
+ if title != "%s":
+ title = title.splitlines()[0]
+ return ":%s: *%s*\n%s" % (message_id, title.rstrip(" "), msg_help)
+ return ":%s:\n%s" % (message_id, msg_help)
diff --git a/venv/Lib/site-packages/pylint/message/message_definition_store.py b/venv/Lib/site-packages/pylint/message/message_definition_store.py
new file mode 100644
index 0000000..f7d87b6
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/message_definition_store.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import collections
+
+from pylint.exceptions import UnknownMessageError
+from pylint.message.message_id_store import MessageIdStore
+
+
+class MessageDefinitionStore:
+
+ """The messages store knows information about every possible message definition but has
+ no particular state during analysis.
+ """
+
+ def __init__(self):
+ self.message_id_store = MessageIdStore()
+ # Primary registry for all active messages definitions.
+ # It contains the 1:1 mapping from msgid to MessageDefinition.
+ # Keys are msgid, values are MessageDefinition
+ self._messages_definitions = {}
+ # MessageDefinition kept by category
+ self._msgs_by_category = collections.defaultdict(list)
+
+ @property
+ def messages(self) -> list:
+ """The list of all active messages."""
+ return self._messages_definitions.values()
+
+ def register_messages_from_checker(self, checker):
+ """Register all messages definitions from a checker.
+
+ :param BaseChecker checker:
+ """
+ checker.check_consistency()
+ for message in checker.messages:
+ self.register_message(message)
+
+ def register_message(self, message):
+ """Register a MessageDefinition with consistency in mind.
+
+ :param MessageDefinition message: The message definition being added.
+ """
+ self.message_id_store.register_message_definition(message)
+ self._messages_definitions[message.msgid] = message
+ self._msgs_by_category[message.msgid[0]].append(message.msgid)
+
+ def get_message_definitions(self, msgid_or_symbol: str) -> list:
+ """Returns the Message object for this message.
+ :param str msgid_or_symbol: msgid_or_symbol may be either a numeric or symbolic id.
+ :raises UnknownMessageError: if the message id is not defined.
+ :rtype: List of MessageDefinition
+ :return: A message definition corresponding to msgid_or_symbol
+ """
+ return [
+ self._messages_definitions[m]
+ for m in self.message_id_store.get_active_msgids(msgid_or_symbol)
+ ]
+
+ def get_msg_display_string(self, msgid_or_symbol: str):
+ """Generates a user-consumable representation of a message. """
+ message_definitions = self.get_message_definitions(msgid_or_symbol)
+ if len(message_definitions) == 1:
+ return repr(message_definitions[0].symbol)
+ return repr([md.symbol for md in message_definitions])
+
+ def help_message(self, msgids_or_symbols: list):
+ """Display help messages for the given message identifiers"""
+ for msgids_or_symbol in msgids_or_symbols:
+ try:
+ for message_definition in self.get_message_definitions(
+ msgids_or_symbol
+ ):
+ print(message_definition.format_help(checkerref=True))
+ print("")
+ except UnknownMessageError as ex:
+ print(ex)
+ print("")
+ continue
+
+ def list_messages(self):
+ """Output full messages list documentation in ReST format. """
+ messages = sorted(self._messages_definitions.values(), key=lambda m: m.msgid)
+ for message in messages:
+ if not message.may_be_emitted():
+ continue
+ print(message.format_help(checkerref=False))
+ print("")
diff --git a/venv/Lib/site-packages/pylint/message/message_handler_mix_in.py b/venv/Lib/site-packages/pylint/message/message_handler_mix_in.py
new file mode 100644
index 0000000..813cdd7
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/message_handler_mix_in.py
@@ -0,0 +1,393 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import sys
+
+from pylint.constants import (
+ _SCOPE_EXEMPT,
+ MAIN_CHECKER_NAME,
+ MSG_STATE_CONFIDENCE,
+ MSG_STATE_SCOPE_CONFIG,
+ MSG_STATE_SCOPE_MODULE,
+ MSG_TYPES,
+ MSG_TYPES_LONG,
+ MSG_TYPES_STATUS,
+ WarningScope,
+)
+from pylint.exceptions import InvalidMessageError, UnknownMessageError
+from pylint.interfaces import UNDEFINED
+from pylint.message.message import Message
+from pylint.utils import get_module_and_frameid, get_rst_section, get_rst_title
+
+
+class MessagesHandlerMixIn:
+ """a mix-in class containing all the messages related methods for the main
+ lint class
+ """
+
+ __by_id_managed_msgs = [] # type: ignore
+
+ def __init__(self):
+ self._msgs_state = {}
+ self.msg_status = 0
+
+ def _checker_messages(self, checker):
+ for known_checker in self._checkers[checker.lower()]:
+ for msgid in known_checker.msgs:
+ yield msgid
+
+ @classmethod
+ def clear_by_id_managed_msgs(cls):
+ cls.__by_id_managed_msgs.clear()
+
+ @classmethod
+ def get_by_id_managed_msgs(cls):
+ return cls.__by_id_managed_msgs
+
+ def _register_by_id_managed_msg(self, msgid, line, is_disabled=True):
+ """If the msgid is a numeric one, then register it to inform the user
+ it could furnish instead a symbolic msgid."""
+ try:
+ message_definitions = self.msgs_store.get_message_definitions(msgid)
+ for message_definition in message_definitions:
+ if msgid == message_definition.msgid:
+ MessagesHandlerMixIn.__by_id_managed_msgs.append(
+ (
+ self.current_name,
+ message_definition.msgid,
+ message_definition.symbol,
+ line,
+ is_disabled,
+ )
+ )
+ except UnknownMessageError:
+ pass
+
+ def disable(self, msgid, scope="package", line=None, ignore_unknown=False):
+ """don't output message of the given id"""
+ self._set_msg_status(
+ msgid, enable=False, scope=scope, line=line, ignore_unknown=ignore_unknown
+ )
+ self._register_by_id_managed_msg(msgid, line)
+
+ def enable(self, msgid, scope="package", line=None, ignore_unknown=False):
+ """reenable message of the given id"""
+ self._set_msg_status(
+ msgid, enable=True, scope=scope, line=line, ignore_unknown=ignore_unknown
+ )
+ self._register_by_id_managed_msg(msgid, line, is_disabled=False)
+
+ def _set_msg_status(
+ self, msgid, enable, scope="package", line=None, ignore_unknown=False
+ ):
+ assert scope in ("package", "module")
+
+ if msgid == "all":
+ for _msgid in MSG_TYPES:
+ self._set_msg_status(_msgid, enable, scope, line, ignore_unknown)
+ if enable and not self._python3_porting_mode:
+ # Don't activate the python 3 porting checker if it wasn't activated explicitly.
+ self.disable("python3")
+ return
+
+ # msgid is a category?
+ category_id = msgid.upper()
+ if category_id not in MSG_TYPES:
+ category_id = MSG_TYPES_LONG.get(category_id)
+ if category_id is not None:
+ for _msgid in self.msgs_store._msgs_by_category.get(category_id):
+ self._set_msg_status(_msgid, enable, scope, line)
+ return
+
+ # msgid is a checker name?
+ if msgid.lower() in self._checkers:
+ for checker in self._checkers[msgid.lower()]:
+ for _msgid in checker.msgs:
+ self._set_msg_status(_msgid, enable, scope, line)
+ return
+
+ # msgid is report id?
+ if msgid.lower().startswith("rp"):
+ if enable:
+ self.enable_report(msgid)
+ else:
+ self.disable_report(msgid)
+ return
+
+ try:
+ # msgid is a symbolic or numeric msgid.
+ message_definitions = self.msgs_store.get_message_definitions(msgid)
+ except UnknownMessageError:
+ if ignore_unknown:
+ return
+ raise
+ for message_definition in message_definitions:
+ self._set_one_msg_status(scope, message_definition, line, enable)
+
+ def _set_one_msg_status(self, scope, msg, line, enable):
+ if scope == "module":
+ self.file_state.set_msg_status(msg, line, enable)
+ if not enable and msg.symbol != "locally-disabled":
+ self.add_message(
+ "locally-disabled", line=line, args=(msg.symbol, msg.msgid)
+ )
+ else:
+ msgs = self._msgs_state
+ msgs[msg.msgid] = enable
+ # sync configuration object
+ self.config.enable = [
+ self._message_symbol(mid) for mid, val in sorted(msgs.items()) if val
+ ]
+ self.config.disable = [
+ self._message_symbol(mid)
+ for mid, val in sorted(msgs.items())
+ if not val
+ ]
+
+ def _message_symbol(self, msgid):
+ """Get the message symbol of the given message id
+
+ Return the original message id if the message does not
+ exist.
+ """
+ try:
+ return [md.symbol for md in self.msgs_store.get_message_definitions(msgid)]
+ except UnknownMessageError:
+ return msgid
+
+ def get_message_state_scope(self, msgid, line=None, confidence=UNDEFINED):
+ """Returns the scope at which a message was enabled/disabled."""
+ if self.config.confidence and confidence.name not in self.config.confidence:
+ return MSG_STATE_CONFIDENCE
+ try:
+ if line in self.file_state._module_msgs_state[msgid]:
+ return MSG_STATE_SCOPE_MODULE
+ except (KeyError, TypeError):
+ return MSG_STATE_SCOPE_CONFIG
+ return None
+
+ def is_message_enabled(self, msg_descr, line=None, confidence=None):
+ """return true if the message associated to the given message id is
+ enabled
+
+ msgid may be either a numeric or symbolic message id.
+ """
+ if self.config.confidence and confidence:
+ if confidence.name not in self.config.confidence:
+ return False
+ try:
+ message_definitions = self.msgs_store.get_message_definitions(msg_descr)
+ msgids = [md.msgid for md in message_definitions]
+ except UnknownMessageError:
+ # The linter checks for messages that are not registered
+ # due to version mismatch, just treat them as message IDs
+ # for now.
+ msgids = [msg_descr]
+ for msgid in msgids:
+ if self.is_one_message_enabled(msgid, line):
+ return True
+ return False
+
+ def is_one_message_enabled(self, msgid, line):
+ if line is None:
+ return self._msgs_state.get(msgid, True)
+ try:
+ return self.file_state._module_msgs_state[msgid][line]
+ except KeyError:
+ # Check if the message's line is after the maximum line existing in ast tree.
+ # This line won't appear in the ast tree and won't be referred in
+ #  self.file_state._module_msgs_state
+ # This happens for example with a commented line at the end of a module.
+ max_line_number = self.file_state.get_effective_max_line_number()
+ if max_line_number and line > max_line_number:
+ fallback = True
+ lines = self.file_state._raw_module_msgs_state.get(msgid, {})
+
+ # Doesn't consider scopes, as a disable can be in a different scope
+ # than that of the current line.
+ closest_lines = reversed(
+ [
+ (message_line, enable)
+ for message_line, enable in lines.items()
+ if message_line <= line
+ ]
+ )
+ last_line, is_enabled = next(closest_lines, (None, None))
+ if last_line is not None:
+ fallback = is_enabled
+
+ return self._msgs_state.get(msgid, fallback)
+ return self._msgs_state.get(msgid, True)
+
+ def add_message(
+ self, msgid, line=None, node=None, args=None, confidence=None, col_offset=None
+ ):
+ """Adds a message given by ID or name.
+
+ If provided, the message string is expanded using args.
+
+ AST checkers must provide the node argument (but may optionally
+ provide line if the line number is different), raw and token checkers
+ must provide the line argument.
+ """
+ if confidence is None:
+ confidence = UNDEFINED
+ message_definitions = self.msgs_store.get_message_definitions(msgid)
+ for message_definition in message_definitions:
+ self.add_one_message(
+ message_definition, line, node, args, confidence, col_offset
+ )
+
+ @staticmethod
+ def check_message_definition(message_definition, line, node):
+ if message_definition.msgid[0] not in _SCOPE_EXEMPT:
+ # Fatal messages and reports are special, the node/scope distinction
+ # does not apply to them.
+ if message_definition.scope == WarningScope.LINE:
+ if line is None:
+ raise InvalidMessageError(
+ "Message %s must provide line, got None"
+ % message_definition.msgid
+ )
+ if node is not None:
+ raise InvalidMessageError(
+ "Message %s must only provide line, "
+ "got line=%s, node=%s" % (message_definition.msgid, line, node)
+ )
+ elif message_definition.scope == WarningScope.NODE:
+ # Node-based warnings may provide an override line.
+ if node is None:
+ raise InvalidMessageError(
+ "Message %s must provide Node, got None"
+ % message_definition.msgid
+ )
+
+ def add_one_message(
+ self, message_definition, line, node, args, confidence, col_offset
+ ):
+ self.check_message_definition(message_definition, line, node)
+ if line is None and node is not None:
+ line = node.fromlineno
+ if col_offset is None and hasattr(node, "col_offset"):
+ col_offset = node.col_offset
+
+ # should this message be displayed
+ if not self.is_message_enabled(message_definition.msgid, line, confidence):
+ self.file_state.handle_ignored_message(
+ self.get_message_state_scope(
+ message_definition.msgid, line, confidence
+ ),
+ message_definition.msgid,
+ line,
+ node,
+ args,
+ confidence,
+ )
+ return
+ # update stats
+ msg_cat = MSG_TYPES[message_definition.msgid[0]]
+ self.msg_status |= MSG_TYPES_STATUS[message_definition.msgid[0]]
+ self.stats[msg_cat] += 1
+ self.stats["by_module"][self.current_name][msg_cat] += 1
+ try:
+ self.stats["by_msg"][message_definition.symbol] += 1
+ except KeyError:
+ self.stats["by_msg"][message_definition.symbol] = 1
+ # expand message ?
+ msg = message_definition.msg
+ if args:
+ msg %= args
+ # get module and object
+ if node is None:
+ module, obj = self.current_name, ""
+ abspath = self.current_file
+ else:
+ module, obj = get_module_and_frameid(node)
+ abspath = node.root().file
+ path = abspath.replace(self.reporter.path_strip_prefix, "", 1)
+ # add the message
+ self.reporter.handle_message(
+ Message(
+ message_definition.msgid,
+ message_definition.symbol,
+ (abspath, path, module, obj, line or 1, col_offset or 0),
+ msg,
+ confidence,
+ )
+ )
+
+ def _get_checkers_infos(self):
+ by_checker = {}
+ for checker in self.get_checkers():
+ name = checker.name
+ if name != "master":
+ try:
+ by_checker[name]["checker"] = checker
+ by_checker[name]["options"] += checker.options_and_values()
+ by_checker[name]["msgs"].update(checker.msgs)
+ by_checker[name]["reports"] += checker.reports
+ except KeyError:
+ by_checker[name] = {
+ "checker": checker,
+ "options": list(checker.options_and_values()),
+ "msgs": dict(checker.msgs),
+ "reports": list(checker.reports),
+ }
+ return by_checker
+
+ def get_checkers_documentation(self):
+ result = get_rst_title("Pylint global options and switches", "-")
+ result += """
+Pylint provides global options and switches.
+
+"""
+ for checker in self.get_checkers():
+ name = checker.name
+ if name == MAIN_CHECKER_NAME:
+ if checker.options:
+ for section, options in checker.options_by_section():
+ if section is None:
+ title = "General options"
+ else:
+ title = "%s options" % section.capitalize()
+ result += get_rst_title(title, "~")
+ result += "%s\n" % get_rst_section(None, options)
+ result += get_rst_title("Pylint checkers' options and switches", "-")
+ result += """\
+
+Pylint checkers can provide three set of features:
+
+* options that control their execution,
+* messages that they can raise,
+* reports that they can generate.
+
+Below is a list of all checkers and their features.
+
+"""
+ by_checker = self._get_checkers_infos()
+ for checker in sorted(by_checker):
+ information = by_checker[checker]
+ checker = information["checker"]
+ del information["checker"]
+ result += checker.get_full_documentation(**information)
+ return result
+
+ def print_full_documentation(self, stream=None):
+ """output a full documentation in ReST format"""
+ if not stream:
+ stream = sys.stdout
+ print(self.get_checkers_documentation()[:-1], file=stream)
+
+ @staticmethod
+ def _print_checker_doc(information, stream=None):
+ """Helper method for print_full_documentation.
+
+ Also used by doc/exts/pylint_extensions.py.
+ """
+ if not stream:
+ stream = sys.stdout
+ checker = information["checker"]
+ del information["checker"]
+ print(checker.get_full_documentation(**information)[:-1], file=stream)
diff --git a/venv/Lib/site-packages/pylint/message/message_id_store.py b/venv/Lib/site-packages/pylint/message/message_id_store.py
new file mode 100644
index 0000000..756888a
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/message/message_id_store.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+from typing import List
+
+from pylint.exceptions import InvalidMessageError, UnknownMessageError
+
+
+class MessageIdStore:
+
+ """The MessageIdStore store MessageId and make sure that there is a 1-1 relation between msgid and symbol."""
+
+ def __init__(self):
+ self.__msgid_to_symbol = {}
+ self.__symbol_to_msgid = {}
+ self.__old_names = {}
+
+ def __len__(self):
+ return len(self.__msgid_to_symbol)
+
+ def __repr__(self):
+ result = "MessageIdStore: [\n"
+ for msgid, symbol in self.__msgid_to_symbol.items():
+ result += " - {msgid} ({symbol})\n".format(msgid=msgid, symbol=symbol)
+ result += "]"
+ return result
+
+ def get_symbol(self, msgid: str) -> str:
+ return self.__msgid_to_symbol[msgid]
+
+ def get_msgid(self, symbol: str) -> str:
+ return self.__symbol_to_msgid[symbol]
+
+ def register_message_definition(self, message_definition):
+ self.check_msgid_and_symbol(message_definition.msgid, message_definition.symbol)
+ self.add_msgid_and_symbol(message_definition.msgid, message_definition.symbol)
+ for old_msgid, old_symbol in message_definition.old_names:
+ self.check_msgid_and_symbol(old_msgid, old_symbol)
+ self.add_legacy_msgid_and_symbol(
+ old_msgid, old_symbol, message_definition.msgid
+ )
+
+ def add_msgid_and_symbol(self, msgid: str, symbol: str) -> None:
+ """Add valid message id.
+
+ There is a little duplication with add_legacy_msgid_and_symbol to avoid a function call,
+ this is called a lot at initialization."""
+ self.__msgid_to_symbol[msgid] = symbol
+ self.__symbol_to_msgid[symbol] = msgid
+
+ def add_legacy_msgid_and_symbol(self, msgid: str, symbol: str, new_msgid: str):
+ """Add valid legacy message id.
+
+ There is a little duplication with add_msgid_and_symbol to avoid a function call,
+ this is called a lot at initialization."""
+ self.__msgid_to_symbol[msgid] = symbol
+ self.__symbol_to_msgid[symbol] = msgid
+ existing_old_names = self.__old_names.get(msgid, [])
+ existing_old_names.append(new_msgid)
+ self.__old_names[msgid] = existing_old_names
+
+ def check_msgid_and_symbol(self, msgid: str, symbol: str) -> None:
+ existing_msgid = self.__symbol_to_msgid.get(symbol)
+ existing_symbol = self.__msgid_to_symbol.get(msgid)
+ if existing_symbol is None and existing_msgid is None:
+ return
+ if existing_msgid is not None:
+ if existing_msgid != msgid:
+ self._raise_duplicate_msgid(symbol, msgid, existing_msgid)
+ if existing_symbol != symbol:
+ self._raise_duplicate_symbol(msgid, symbol, existing_symbol)
+
+ @staticmethod
+ def _raise_duplicate_symbol(msgid, symbol, other_symbol):
+ """Raise an error when a symbol is duplicated.
+
+ :param str msgid: The msgid corresponding to the symbols
+ :param str symbol: Offending symbol
+ :param str other_symbol: Other offending symbol
+ :raises InvalidMessageError:"""
+ symbols = [symbol, other_symbol]
+ symbols.sort()
+ error_message = "Message id '{msgid}' cannot have both ".format(msgid=msgid)
+ error_message += "'{other_symbol}' and '{symbol}' as symbolic name.".format(
+ other_symbol=symbols[0], symbol=symbols[1]
+ )
+ raise InvalidMessageError(error_message)
+
+ @staticmethod
+ def _raise_duplicate_msgid(symbol, msgid, other_msgid):
+ """Raise an error when a msgid is duplicated.
+
+ :param str symbol: The symbol corresponding to the msgids
+ :param str msgid: Offending msgid
+ :param str other_msgid: Other offending msgid
+ :raises InvalidMessageError:"""
+ msgids = [msgid, other_msgid]
+ msgids.sort()
+ error_message = (
+ "Message symbol '{symbol}' cannot be used for "
+ "'{other_msgid}' and '{msgid}' at the same time."
+ " If you're creating an 'old_names' use 'old-{symbol}' as the old symbol."
+ ).format(symbol=symbol, other_msgid=msgids[0], msgid=msgids[1])
+ raise InvalidMessageError(error_message)
+
+ def get_active_msgids(self, msgid_or_symbol: str) -> List[str]:
+ """Return msgids but the input can be a symbol."""
+ # Only msgid can have a digit as second letter
+ is_msgid = msgid_or_symbol[1:].isdigit()
+ if is_msgid:
+ msgid = msgid_or_symbol.upper()
+ symbol = self.__msgid_to_symbol.get(msgid)
+ else:
+ msgid = self.__symbol_to_msgid.get(msgid_or_symbol)
+ symbol = msgid_or_symbol
+ if not msgid or not symbol:
+ error_msg = "No such message id or symbol '{msgid_or_symbol}'.".format(
+ msgid_or_symbol=msgid_or_symbol
+ )
+ raise UnknownMessageError(error_msg)
+ # logging.debug(
+ # "Return for {} and msgid {} is {}".format(
+ # msgid_or_symbol, msgid, self.__old_names.get(msgid, [msgid])
+ # )
+ # )
+ return self.__old_names.get(msgid, [msgid])
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__init__.py b/venv/Lib/site-packages/pylint/pyreverse/__init__.py
new file mode 100644
index 0000000..9ca1da5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__init__.py
@@ -0,0 +1,8 @@
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""
+pyreverse.extensions
+"""
+
+__revision__ = "$Id $"
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..6054dd9
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__pycache__/diadefslib.cpython-37.pyc b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/diadefslib.cpython-37.pyc
new file mode 100644
index 0000000..64bdd6b
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/diadefslib.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__pycache__/diagrams.cpython-37.pyc b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/diagrams.cpython-37.pyc
new file mode 100644
index 0000000..cd5a663
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/diagrams.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__pycache__/inspector.cpython-37.pyc b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/inspector.cpython-37.pyc
new file mode 100644
index 0000000..0bcfb4d
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/inspector.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__pycache__/main.cpython-37.pyc b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/main.cpython-37.pyc
new file mode 100644
index 0000000..c8f9398
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/main.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__pycache__/utils.cpython-37.pyc b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 0000000..1711f15
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/utils.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__pycache__/vcgutils.cpython-37.pyc b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/vcgutils.cpython-37.pyc
new file mode 100644
index 0000000..f1a93f5
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/vcgutils.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/pyreverse/__pycache__/writer.cpython-37.pyc b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/writer.cpython-37.pyc
new file mode 100644
index 0000000..a0ac15c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/__pycache__/writer.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/pyreverse/diadefslib.py b/venv/Lib/site-packages/pylint/pyreverse/diadefslib.py
new file mode 100644
index 0000000..de4e9fd
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/diadefslib.py
@@ -0,0 +1,238 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006, 2008-2010, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""handle diagram generation options for class diagram or default diagrams
+"""
+
+import astroid
+
+from pylint.pyreverse.diagrams import ClassDiagram, PackageDiagram
+from pylint.pyreverse.utils import LocalsVisitor
+
+BUILTINS_NAME = "builtins"
+
+# diagram generators ##########################################################
+
+
+class DiaDefGenerator:
+ """handle diagram generation options"""
+
+ def __init__(self, linker, handler):
+ """common Diagram Handler initialization"""
+ self.config = handler.config
+ self._set_default_options()
+ self.linker = linker
+ self.classdiagram = None # defined by subclasses
+
+ def get_title(self, node):
+ """get title for objects"""
+ title = node.name
+ if self.module_names:
+ title = "%s.%s" % (node.root().name, title)
+ return title
+
+ def _set_option(self, option):
+ """activate some options if not explicitly deactivated"""
+ # if we have a class diagram, we want more information by default;
+ # so if the option is None, we return True
+ if option is None:
+ return bool(self.config.classes)
+ return option
+
+ def _set_default_options(self):
+ """set different default options with _default dictionary"""
+ self.module_names = self._set_option(self.config.module_names)
+ all_ancestors = self._set_option(self.config.all_ancestors)
+ all_associated = self._set_option(self.config.all_associated)
+ anc_level, association_level = (0, 0)
+ if all_ancestors:
+ anc_level = -1
+ if all_associated:
+ association_level = -1
+ if self.config.show_ancestors is not None:
+ anc_level = self.config.show_ancestors
+ if self.config.show_associated is not None:
+ association_level = self.config.show_associated
+ self.anc_level, self.association_level = anc_level, association_level
+
+ def _get_levels(self):
+ """help function for search levels"""
+ return self.anc_level, self.association_level
+
+ def show_node(self, node):
+ """true if builtins and not show_builtins"""
+ if self.config.show_builtin:
+ return True
+ return node.root().name != BUILTINS_NAME
+
+ def add_class(self, node):
+ """visit one class and add it to diagram"""
+ self.linker.visit(node)
+ self.classdiagram.add_object(self.get_title(node), node)
+
+ def get_ancestors(self, node, level):
+ """return ancestor nodes of a class node"""
+ if level == 0:
+ return
+ for ancestor in node.ancestors(recurs=False):
+ if not self.show_node(ancestor):
+ continue
+ yield ancestor
+
+ def get_associated(self, klass_node, level):
+ """return associated nodes of a class node"""
+ if level == 0:
+ return
+ for association_nodes in list(klass_node.instance_attrs_type.values()) + list(
+ klass_node.locals_type.values()
+ ):
+ for node in association_nodes:
+ if isinstance(node, astroid.Instance):
+ node = node._proxied
+ if not (isinstance(node, astroid.ClassDef) and self.show_node(node)):
+ continue
+ yield node
+
+ def extract_classes(self, klass_node, anc_level, association_level):
+ """extract recursively classes related to klass_node"""
+ if self.classdiagram.has_node(klass_node) or not self.show_node(klass_node):
+ return
+ self.add_class(klass_node)
+
+ for ancestor in self.get_ancestors(klass_node, anc_level):
+ self.extract_classes(ancestor, anc_level - 1, association_level)
+
+ for node in self.get_associated(klass_node, association_level):
+ self.extract_classes(node, anc_level, association_level - 1)
+
+
+class DefaultDiadefGenerator(LocalsVisitor, DiaDefGenerator):
+ """generate minimum diagram definition for the project :
+
+ * a package diagram including project's modules
+ * a class diagram including project's classes
+ """
+
+ def __init__(self, linker, handler):
+ DiaDefGenerator.__init__(self, linker, handler)
+ LocalsVisitor.__init__(self)
+
+ def visit_project(self, node):
+ """visit a pyreverse.utils.Project node
+
+ create a diagram definition for packages
+ """
+ mode = self.config.mode
+ if len(node.modules) > 1:
+ self.pkgdiagram = PackageDiagram("packages %s" % node.name, mode)
+ else:
+ self.pkgdiagram = None
+ self.classdiagram = ClassDiagram("classes %s" % node.name, mode)
+
+ def leave_project(self, node): # pylint: disable=unused-argument
+ """leave the pyreverse.utils.Project node
+
+ return the generated diagram definition
+ """
+ if self.pkgdiagram:
+ return self.pkgdiagram, self.classdiagram
+ return (self.classdiagram,)
+
+ def visit_module(self, node):
+ """visit an astroid.Module node
+
+ add this class to the package diagram definition
+ """
+ if self.pkgdiagram:
+ self.linker.visit(node)
+ self.pkgdiagram.add_object(node.name, node)
+
+ def visit_classdef(self, node):
+ """visit an astroid.Class node
+
+ add this class to the class diagram definition
+ """
+ anc_level, association_level = self._get_levels()
+ self.extract_classes(node, anc_level, association_level)
+
+ def visit_importfrom(self, node):
+ """visit astroid.ImportFrom and catch modules for package diagram
+ """
+ if self.pkgdiagram:
+ self.pkgdiagram.add_from_depend(node, node.modname)
+
+
+class ClassDiadefGenerator(DiaDefGenerator):
+ """generate a class diagram definition including all classes related to a
+ given class
+ """
+
+ def __init__(self, linker, handler):
+ DiaDefGenerator.__init__(self, linker, handler)
+
+ def class_diagram(self, project, klass):
+ """return a class diagram definition for the given klass and its
+ related klasses
+ """
+
+ self.classdiagram = ClassDiagram(klass, self.config.mode)
+ if len(project.modules) > 1:
+ module, klass = klass.rsplit(".", 1)
+ module = project.get_module(module)
+ else:
+ module = project.modules[0]
+ klass = klass.split(".")[-1]
+ klass = next(module.ilookup(klass))
+
+ anc_level, association_level = self._get_levels()
+ self.extract_classes(klass, anc_level, association_level)
+ return self.classdiagram
+
+
+# diagram handler #############################################################
+
+
+class DiadefsHandler:
+ """handle diagram definitions :
+
+ get it from user (i.e. xml files) or generate them
+ """
+
+ def __init__(self, config):
+ self.config = config
+
+ def get_diadefs(self, project, linker):
+ """Get the diagrams configuration data
+
+ :param project:The pyreverse project
+ :type project: pyreverse.utils.Project
+ :param linker: The linker
+ :type linker: pyreverse.inspector.Linker(IdGeneratorMixIn, LocalsVisitor)
+
+ :returns: The list of diagram definitions
+ :rtype: list(:class:`pylint.pyreverse.diagrams.ClassDiagram`)
+ """
+
+ # read and interpret diagram definitions (Diadefs)
+ diagrams = []
+ generator = ClassDiadefGenerator(linker, self)
+ for klass in self.config.classes:
+ diagrams.append(generator.class_diagram(project, klass))
+ if not diagrams:
+ diagrams = DefaultDiadefGenerator(linker, self).visit(project)
+ for diagram in diagrams:
+ diagram.extract_relationships()
+ return diagrams
diff --git a/venv/Lib/site-packages/pylint/pyreverse/diagrams.py b/venv/Lib/site-packages/pylint/pyreverse/diagrams.py
new file mode 100644
index 0000000..b53b845
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/diagrams.py
@@ -0,0 +1,268 @@
+# Copyright (c) 2006, 2008-2010, 2012-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""diagram objects
+"""
+
+import astroid
+
+from pylint.checkers.utils import decorated_with_property
+from pylint.pyreverse.utils import FilterMixIn, is_interface
+
+
+class Figure:
+ """base class for counter handling"""
+
+
+class Relationship(Figure):
+ """a relation ship from an object in the diagram to another
+ """
+
+ def __init__(self, from_object, to_object, relation_type, name=None):
+ Figure.__init__(self)
+ self.from_object = from_object
+ self.to_object = to_object
+ self.type = relation_type
+ self.name = name
+
+
+class DiagramEntity(Figure):
+ """a diagram object, i.e. a label associated to an astroid node
+ """
+
+ def __init__(self, title="No name", node=None):
+ Figure.__init__(self)
+ self.title = title
+ self.node = node
+
+
+class ClassDiagram(Figure, FilterMixIn):
+ """main class diagram handling
+ """
+
+ TYPE = "class"
+
+ def __init__(self, title, mode):
+ FilterMixIn.__init__(self, mode)
+ Figure.__init__(self)
+ self.title = title
+ self.objects = []
+ self.relationships = {}
+ self._nodes = {}
+ self.depends = []
+
+ def get_relationships(self, role):
+ # sorted to get predictable (hence testable) results
+ return sorted(
+ self.relationships.get(role, ()),
+ key=lambda x: (x.from_object.fig_id, x.to_object.fig_id),
+ )
+
+ def add_relationship(self, from_object, to_object, relation_type, name=None):
+ """create a relation ship
+ """
+ rel = Relationship(from_object, to_object, relation_type, name)
+ self.relationships.setdefault(relation_type, []).append(rel)
+
+ def get_relationship(self, from_object, relation_type):
+ """return a relation ship or None
+ """
+ for rel in self.relationships.get(relation_type, ()):
+ if rel.from_object is from_object:
+ return rel
+ raise KeyError(relation_type)
+
+ def get_attrs(self, node):
+ """return visible attributes, possibly with class name"""
+ attrs = []
+ properties = [
+ (n, m)
+ for n, m in node.items()
+ if isinstance(m, astroid.FunctionDef) and decorated_with_property(m)
+ ]
+ for node_name, associated_nodes in (
+ list(node.instance_attrs_type.items())
+ + list(node.locals_type.items())
+ + properties
+ ):
+ if not self.show_attr(node_name):
+ continue
+ names = self.class_names(associated_nodes)
+ if names:
+ node_name = "%s : %s" % (node_name, ", ".join(names))
+ attrs.append(node_name)
+ return sorted(attrs)
+
+ def get_methods(self, node):
+ """return visible methods"""
+ methods = [
+ m
+ for m in node.values()
+ if isinstance(m, astroid.FunctionDef)
+ and not decorated_with_property(m)
+ and self.show_attr(m.name)
+ ]
+ return sorted(methods, key=lambda n: n.name)
+
+ def add_object(self, title, node):
+ """create a diagram object
+ """
+ assert node not in self._nodes
+ ent = DiagramEntity(title, node)
+ self._nodes[node] = ent
+ self.objects.append(ent)
+
+ def class_names(self, nodes):
+ """return class names if needed in diagram"""
+ names = []
+ for node in nodes:
+ if isinstance(node, astroid.Instance):
+ node = node._proxied
+ if (
+ isinstance(node, astroid.ClassDef)
+ and hasattr(node, "name")
+ and not self.has_node(node)
+ ):
+ if node.name not in names:
+ node_name = node.name
+ names.append(node_name)
+ return names
+
+ def nodes(self):
+ """return the list of underlying nodes
+ """
+ return self._nodes.keys()
+
+ def has_node(self, node):
+ """return true if the given node is included in the diagram
+ """
+ return node in self._nodes
+
+ def object_from_node(self, node):
+ """return the diagram object mapped to node
+ """
+ return self._nodes[node]
+
+ def classes(self):
+ """return all class nodes in the diagram"""
+ return [o for o in self.objects if isinstance(o.node, astroid.ClassDef)]
+
+ def classe(self, name):
+ """return a class by its name, raise KeyError if not found
+ """
+ for klass in self.classes():
+ if klass.node.name == name:
+ return klass
+ raise KeyError(name)
+
+ def extract_relationships(self):
+ """extract relation ships between nodes in the diagram
+ """
+ for obj in self.classes():
+ node = obj.node
+ obj.attrs = self.get_attrs(node)
+ obj.methods = self.get_methods(node)
+ # shape
+ if is_interface(node):
+ obj.shape = "interface"
+ else:
+ obj.shape = "class"
+ # inheritance link
+ for par_node in node.ancestors(recurs=False):
+ try:
+ par_obj = self.object_from_node(par_node)
+ self.add_relationship(obj, par_obj, "specialization")
+ except KeyError:
+ continue
+ # implements link
+ for impl_node in node.implements:
+ try:
+ impl_obj = self.object_from_node(impl_node)
+ self.add_relationship(obj, impl_obj, "implements")
+ except KeyError:
+ continue
+ # associations link
+ for name, values in list(node.instance_attrs_type.items()) + list(
+ node.locals_type.items()
+ ):
+ for value in values:
+ if value is astroid.Uninferable:
+ continue
+ if isinstance(value, astroid.Instance):
+ value = value._proxied
+ try:
+ associated_obj = self.object_from_node(value)
+ self.add_relationship(associated_obj, obj, "association", name)
+ except KeyError:
+ continue
+
+
+class PackageDiagram(ClassDiagram):
+ """package diagram handling
+ """
+
+ TYPE = "package"
+
+ def modules(self):
+ """return all module nodes in the diagram"""
+ return [o for o in self.objects if isinstance(o.node, astroid.Module)]
+
+ def module(self, name):
+ """return a module by its name, raise KeyError if not found
+ """
+ for mod in self.modules():
+ if mod.node.name == name:
+ return mod
+ raise KeyError(name)
+
+ def get_module(self, name, node):
+ """return a module by its name, looking also for relative imports;
+ raise KeyError if not found
+ """
+ for mod in self.modules():
+ mod_name = mod.node.name
+ if mod_name == name:
+ return mod
+ # search for fullname of relative import modules
+ package = node.root().name
+ if mod_name == "%s.%s" % (package, name):
+ return mod
+ if mod_name == "%s.%s" % (package.rsplit(".", 1)[0], name):
+ return mod
+ raise KeyError(name)
+
+ def add_from_depend(self, node, from_module):
+ """add dependencies created by from-imports
+ """
+ mod_name = node.root().name
+ obj = self.module(mod_name)
+ if from_module not in obj.node.depends:
+ obj.node.depends.append(from_module)
+
+ def extract_relationships(self):
+ """extract relation ships between nodes in the diagram
+ """
+ ClassDiagram.extract_relationships(self)
+ for obj in self.classes():
+ # ownership
+ try:
+ mod = self.object_from_node(obj.node.root())
+ self.add_relationship(obj, mod, "ownership")
+ except KeyError:
+ continue
+ for obj in self.modules():
+ obj.shape = "package"
+ # dependencies
+ for dep_name in obj.node.depends:
+ try:
+ dep = self.get_module(dep_name, obj.node)
+ except KeyError:
+ continue
+ self.add_relationship(obj, dep, "depends")
diff --git a/venv/Lib/site-packages/pylint/pyreverse/inspector.py b/venv/Lib/site-packages/pylint/pyreverse/inspector.py
new file mode 100644
index 0000000..702b108
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/inspector.py
@@ -0,0 +1,357 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""
+Visitor doing some postprocessing on the astroid tree.
+Try to resolve definitions (namespace) dictionary, relationship...
+"""
+import collections
+import os
+import traceback
+
+import astroid
+from astroid import bases, exceptions, manager, modutils, node_classes
+
+from pylint.pyreverse import utils
+
+
+def _iface_hdlr(_):
+ """Handler used by interfaces to handle suspicious interface nodes."""
+ return True
+
+
+def _astroid_wrapper(func, modname):
+ print("parsing %s..." % modname)
+ try:
+ return func(modname)
+ except exceptions.AstroidBuildingException as exc:
+ print(exc)
+ except Exception as exc: # pylint: disable=broad-except
+ traceback.print_exc()
+
+
+def interfaces(node, herited=True, handler_func=_iface_hdlr):
+ """Return an iterator on interfaces implemented by the given class node."""
+ try:
+ implements = bases.Instance(node).getattr("__implements__")[0]
+ except exceptions.NotFoundError:
+ return
+ if not herited and implements.frame() is not node:
+ return
+ found = set()
+ missing = False
+ for iface in node_classes.unpack_infer(implements):
+ if iface is astroid.Uninferable:
+ missing = True
+ continue
+ if iface not in found and handler_func(iface):
+ found.add(iface)
+ yield iface
+ if missing:
+ raise exceptions.InferenceError()
+
+
+class IdGeneratorMixIn:
+ """Mixin adding the ability to generate integer uid."""
+
+ def __init__(self, start_value=0):
+ self.id_count = start_value
+
+ def init_counter(self, start_value=0):
+ """init the id counter
+ """
+ self.id_count = start_value
+
+ def generate_id(self):
+ """generate a new identifier
+ """
+ self.id_count += 1
+ return self.id_count
+
+
+class Linker(IdGeneratorMixIn, utils.LocalsVisitor):
+ """Walk on the project tree and resolve relationships.
+
+ According to options the following attributes may be
+ added to visited nodes:
+
+ * uid,
+ a unique identifier for the node (on astroid.Project, astroid.Module,
+ astroid.Class and astroid.locals_type). Only if the linker
+ has been instantiated with tag=True parameter (False by default).
+
+ * Function
+ a mapping from locals names to their bounded value, which may be a
+ constant like a string or an integer, or an astroid node
+ (on astroid.Module, astroid.Class and astroid.Function).
+
+ * instance_attrs_type
+ as locals_type but for klass member attributes (only on astroid.Class)
+
+ * implements,
+ list of implemented interface _objects_ (only on astroid.Class nodes)
+ """
+
+ def __init__(self, project, inherited_interfaces=0, tag=False):
+ IdGeneratorMixIn.__init__(self)
+ utils.LocalsVisitor.__init__(self)
+ # take inherited interface in consideration or not
+ self.inherited_interfaces = inherited_interfaces
+ # tag nodes or not
+ self.tag = tag
+ # visited project
+ self.project = project
+
+ def visit_project(self, node):
+ """visit a pyreverse.utils.Project node
+
+ * optionally tag the node with a unique id
+ """
+ if self.tag:
+ node.uid = self.generate_id()
+ for module in node.modules:
+ self.visit(module)
+
+ def visit_package(self, node):
+ """visit an astroid.Package node
+
+ * optionally tag the node with a unique id
+ """
+ if self.tag:
+ node.uid = self.generate_id()
+ for subelmt in node.values():
+ self.visit(subelmt)
+
+ def visit_module(self, node):
+ """visit an astroid.Module node
+
+ * set the locals_type mapping
+ * set the depends mapping
+ * optionally tag the node with a unique id
+ """
+ if hasattr(node, "locals_type"):
+ return
+ node.locals_type = collections.defaultdict(list)
+ node.depends = []
+ if self.tag:
+ node.uid = self.generate_id()
+
+ def visit_classdef(self, node):
+ """visit an astroid.Class node
+
+ * set the locals_type and instance_attrs_type mappings
+ * set the implements list and build it
+ * optionally tag the node with a unique id
+ """
+ if hasattr(node, "locals_type"):
+ return
+ node.locals_type = collections.defaultdict(list)
+ if self.tag:
+ node.uid = self.generate_id()
+ # resolve ancestors
+ for baseobj in node.ancestors(recurs=False):
+ specializations = getattr(baseobj, "specializations", [])
+ specializations.append(node)
+ baseobj.specializations = specializations
+ # resolve instance attributes
+ node.instance_attrs_type = collections.defaultdict(list)
+ for assignattrs in node.instance_attrs.values():
+ for assignattr in assignattrs:
+ self.handle_assignattr_type(assignattr, node)
+ # resolve implemented interface
+ try:
+ node.implements = list(interfaces(node, self.inherited_interfaces))
+ except astroid.InferenceError:
+ node.implements = ()
+
+ def visit_functiondef(self, node):
+ """visit an astroid.Function node
+
+ * set the locals_type mapping
+ * optionally tag the node with a unique id
+ """
+ if hasattr(node, "locals_type"):
+ return
+ node.locals_type = collections.defaultdict(list)
+ if self.tag:
+ node.uid = self.generate_id()
+
+ link_project = visit_project
+ link_module = visit_module
+ link_class = visit_classdef
+ link_function = visit_functiondef
+
+ def visit_assignname(self, node):
+ """visit an astroid.AssignName node
+
+ handle locals_type
+ """
+ # avoid double parsing done by different Linkers.visit
+ # running over the same project:
+ if hasattr(node, "_handled"):
+ return
+ node._handled = True
+ if node.name in node.frame():
+ frame = node.frame()
+ else:
+ # the name has been defined as 'global' in the frame and belongs
+ # there.
+ frame = node.root()
+ try:
+ if not hasattr(frame, "locals_type"):
+ # If the frame doesn't have a locals_type yet,
+ # it means it wasn't yet visited. Visit it now
+ # to add what's missing from it.
+ if isinstance(frame, astroid.ClassDef):
+ self.visit_classdef(frame)
+ elif isinstance(frame, astroid.FunctionDef):
+ self.visit_functiondef(frame)
+ else:
+ self.visit_module(frame)
+
+ current = frame.locals_type[node.name]
+ values = set(node.infer())
+ frame.locals_type[node.name] = list(set(current) | values)
+ except astroid.InferenceError:
+ pass
+
+ @staticmethod
+ def handle_assignattr_type(node, parent):
+ """handle an astroid.assignattr node
+
+ handle instance_attrs_type
+ """
+ try:
+ values = set(node.infer())
+ current = set(parent.instance_attrs_type[node.attrname])
+ parent.instance_attrs_type[node.attrname] = list(current | values)
+ except astroid.InferenceError:
+ pass
+
+ def visit_import(self, node):
+ """visit an astroid.Import node
+
+ resolve module dependencies
+ """
+ context_file = node.root().file
+ for name in node.names:
+ relative = modutils.is_relative(name[0], context_file)
+ self._imported_module(node, name[0], relative)
+
+ def visit_importfrom(self, node):
+ """visit an astroid.ImportFrom node
+
+ resolve module dependencies
+ """
+ basename = node.modname
+ context_file = node.root().file
+ if context_file is not None:
+ relative = modutils.is_relative(basename, context_file)
+ else:
+ relative = False
+ for name in node.names:
+ if name[0] == "*":
+ continue
+ # analyze dependencies
+ fullname = "%s.%s" % (basename, name[0])
+ if fullname.find(".") > -1:
+ try:
+ fullname = modutils.get_module_part(fullname, context_file)
+ except ImportError:
+ continue
+ if fullname != basename:
+ self._imported_module(node, fullname, relative)
+
+ def compute_module(self, context_name, mod_path):
+ """return true if the module should be added to dependencies"""
+ package_dir = os.path.dirname(self.project.path)
+ if context_name == mod_path:
+ return 0
+ if modutils.is_standard_module(mod_path, (package_dir,)):
+ return 1
+ return 0
+
+ def _imported_module(self, node, mod_path, relative):
+ """Notify an imported module, used to analyze dependencies"""
+ module = node.root()
+ context_name = module.name
+ if relative:
+ mod_path = "%s.%s" % (".".join(context_name.split(".")[:-1]), mod_path)
+ if self.compute_module(context_name, mod_path):
+ # handle dependencies
+ if not hasattr(module, "depends"):
+ module.depends = []
+ mod_paths = module.depends
+ if mod_path not in mod_paths:
+ mod_paths.append(mod_path)
+
+
+class Project:
+ """a project handle a set of modules / packages"""
+
+ def __init__(self, name=""):
+ self.name = name
+ self.path = None
+ self.modules = []
+ self.locals = {}
+ self.__getitem__ = self.locals.__getitem__
+ self.__iter__ = self.locals.__iter__
+ self.values = self.locals.values
+ self.keys = self.locals.keys
+ self.items = self.locals.items
+
+ def add_module(self, node):
+ self.locals[node.name] = node
+ self.modules.append(node)
+
+ def get_module(self, name):
+ return self.locals[name]
+
+ def get_children(self):
+ return self.modules
+
+ def __repr__(self):
+ return "<Project %r at %s (%s modules)>" % (
+ self.name,
+ id(self),
+ len(self.modules),
+ )
+
+
+def project_from_files(
+ files, func_wrapper=_astroid_wrapper, project_name="no name", black_list=("CVS",)
+):
+ """return a Project from a list of files or modules"""
+ # build the project representation
+ astroid_manager = manager.AstroidManager()
+ project = Project(project_name)
+ for something in files:
+ if not os.path.exists(something):
+ fpath = modutils.file_from_modpath(something.split("."))
+ elif os.path.isdir(something):
+ fpath = os.path.join(something, "__init__.py")
+ else:
+ fpath = something
+ ast = func_wrapper(astroid_manager.ast_from_file, fpath)
+ if ast is None:
+ continue
+ project.path = project.path or ast.file
+ project.add_module(ast)
+ base_name = ast.name
+ # recurse in package except if __init__ was explicitly given
+ if ast.package and something.find("__init__") == -1:
+ # recurse on others packages / modules if this is a package
+ for fpath in modutils.get_module_files(
+ os.path.dirname(ast.file), black_list
+ ):
+ ast = func_wrapper(astroid_manager.ast_from_file, fpath)
+ if ast is None or ast.name == base_name:
+ continue
+ project.add_module(ast)
+ return project
diff --git a/venv/Lib/site-packages/pylint/pyreverse/main.py b/venv/Lib/site-packages/pylint/pyreverse/main.py
new file mode 100644
index 0000000..652b954
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/main.py
@@ -0,0 +1,214 @@
+# Copyright (c) 2008-2010, 2012-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Alexander Pervakov <frost.nzcr4@jagmort.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""
+ %prog [options] <packages>
+
+ create UML diagrams for classes and modules in <packages>
+"""
+import os
+import subprocess
+import sys
+
+from pylint.config import ConfigurationMixIn
+from pylint.pyreverse import writer
+from pylint.pyreverse.diadefslib import DiadefsHandler
+from pylint.pyreverse.inspector import Linker, project_from_files
+from pylint.pyreverse.utils import insert_default_options
+
+OPTIONS = (
+ (
+ "filter-mode",
+ dict(
+ short="f",
+ default="PUB_ONLY",
+ dest="mode",
+ type="string",
+ action="store",
+ metavar="<mode>",
+ help="""filter attributes and functions according to
+ <mode>. Correct modes are :
+ 'PUB_ONLY' filter all non public attributes
+ [DEFAULT], equivalent to PRIVATE+SPECIAL_A
+ 'ALL' no filter
+ 'SPECIAL' filter Python special functions
+ except constructor
+ 'OTHER' filter protected and private
+ attributes""",
+ ),
+ ),
+ (
+ "class",
+ dict(
+ short="c",
+ action="append",
+ metavar="<class>",
+ dest="classes",
+ default=[],
+ help="create a class diagram with all classes related to <class>;\
+ this uses by default the options -ASmy",
+ ),
+ ),
+ (
+ "show-ancestors",
+ dict(
+ short="a",
+ action="store",
+ metavar="<ancestor>",
+ type="int",
+ help="show <ancestor> generations of ancestor classes not in <projects>",
+ ),
+ ),
+ (
+ "all-ancestors",
+ dict(
+ short="A",
+ default=None,
+ help="show all ancestors off all classes in <projects>",
+ ),
+ ),
+ (
+ "show-associated",
+ dict(
+ short="s",
+ action="store",
+ metavar="<association_level>",
+ type="int",
+ help="show <association_level> levels of associated classes not in <projects>",
+ ),
+ ),
+ (
+ "all-associated",
+ dict(
+ short="S",
+ default=None,
+ help="show recursively all associated off all associated classes",
+ ),
+ ),
+ (
+ "show-builtin",
+ dict(
+ short="b",
+ action="store_true",
+ default=False,
+ help="include builtin objects in representation of classes",
+ ),
+ ),
+ (
+ "module-names",
+ dict(
+ short="m",
+ default=None,
+ type="yn",
+ metavar="[yn]",
+ help="include module name in representation of classes",
+ ),
+ ),
+ (
+ "only-classnames",
+ dict(
+ short="k",
+ action="store_true",
+ default=False,
+ help="don't show attributes and methods in the class boxes; \
+this disables -f values",
+ ),
+ ),
+ (
+ "output",
+ dict(
+ short="o",
+ dest="output_format",
+ action="store",
+ default="dot",
+ metavar="<format>",
+ help="create a *.<format> output file if format available.",
+ ),
+ ),
+ (
+ "ignore",
+ {
+ "type": "csv",
+ "metavar": "<file[,file...]>",
+ "dest": "black_list",
+ "default": ("CVS",),
+ "help": "Add files or directories to the blacklist. They "
+ "should be base names, not paths.",
+ },
+ ),
+ (
+ "project",
+ {
+ "default": "",
+ "type": "string",
+ "short": "p",
+ "metavar": "<project name>",
+ "help": "set the project name.",
+ },
+ ),
+)
+
+
+def _check_graphviz_available(output_format):
+ """check if we need graphviz for different output format"""
+ try:
+ subprocess.call(["dot", "-V"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except OSError:
+ print(
+ "The output format '%s' is currently not available.\n"
+ "Please install 'Graphviz' to have other output formats "
+ "than 'dot' or 'vcg'." % output_format
+ )
+ sys.exit(32)
+
+
+class Run(ConfigurationMixIn):
+ """base class providing common behaviour for pyreverse commands"""
+
+ options = OPTIONS # type: ignore
+
+ def __init__(self, args):
+ ConfigurationMixIn.__init__(self, usage=__doc__)
+ insert_default_options()
+ args = self.load_command_line_configuration()
+ if self.config.output_format not in ("dot", "vcg"):
+ _check_graphviz_available(self.config.output_format)
+
+ sys.exit(self.run(args))
+
+ def run(self, args):
+ """checking arguments and run project"""
+ if not args:
+ print(self.help())
+ return 1
+ # insert current working directory to the python path to recognize
+ # dependencies to local modules even if cwd is not in the PYTHONPATH
+ sys.path.insert(0, os.getcwd())
+ try:
+ project = project_from_files(
+ args,
+ project_name=self.config.project,
+ black_list=self.config.black_list,
+ )
+ linker = Linker(project, tag=True)
+ handler = DiadefsHandler(self.config)
+ diadefs = handler.get_diadefs(project, linker)
+ finally:
+ sys.path.pop(0)
+
+ if self.config.output_format == "vcg":
+ writer.VCGWriter(self.config).write(diadefs)
+ else:
+ writer.DotWriter(self.config).write(diadefs)
+ return 0
+
+
+if __name__ == "__main__":
+ Run(sys.argv[1:])
diff --git a/venv/Lib/site-packages/pylint/pyreverse/utils.py b/venv/Lib/site-packages/pylint/pyreverse/utils.py
new file mode 100644
index 0000000..5a1e7e2
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/utils.py
@@ -0,0 +1,220 @@
+# Copyright (c) 2006, 2008, 2010, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""
+generic classes/functions for pyreverse core/extensions
+"""
+import os
+import re
+import sys
+
+########### pyreverse option utils ##############################
+
+
+RCFILE = ".pyreverserc"
+
+
+def get_default_options():
+ """
+ Read config file and return list of options
+ """
+ options = []
+ home = os.environ.get("HOME", "")
+ if home:
+ rcfile = os.path.join(home, RCFILE)
+ try:
+ options = open(rcfile).read().split()
+ except IOError:
+ pass # ignore if no config file found
+ return options
+
+
+def insert_default_options():
+ """insert default options to sys.argv
+ """
+ options = get_default_options()
+ options.reverse()
+ for arg in options:
+ sys.argv.insert(1, arg)
+
+
+# astroid utilities ###########################################################
+
+SPECIAL = re.compile("^__[A-Za-z0-9]+[A-Za-z0-9_]*__$")
+PRIVATE = re.compile("^__[_A-Za-z0-9]*[A-Za-z0-9]+_?$")
+PROTECTED = re.compile("^_[_A-Za-z0-9]*$")
+
+
+def get_visibility(name):
+ """return the visibility from a name: public, protected, private or special
+ """
+ if SPECIAL.match(name):
+ visibility = "special"
+ elif PRIVATE.match(name):
+ visibility = "private"
+ elif PROTECTED.match(name):
+ visibility = "protected"
+
+ else:
+ visibility = "public"
+ return visibility
+
+
+ABSTRACT = re.compile("^.*Abstract.*")
+FINAL = re.compile("^[A-Z_]*$")
+
+
+def is_abstract(node):
+ """return true if the given class node correspond to an abstract class
+ definition
+ """
+ return ABSTRACT.match(node.name)
+
+
+def is_final(node):
+ """return true if the given class/function node correspond to final
+ definition
+ """
+ return FINAL.match(node.name)
+
+
+def is_interface(node):
+ # bw compat
+ return node.type == "interface"
+
+
+def is_exception(node):
+ # bw compat
+ return node.type == "exception"
+
+
+# Helpers #####################################################################
+
+_CONSTRUCTOR = 1
+_SPECIAL = 2
+_PROTECTED = 4
+_PRIVATE = 8
+MODES = {
+ "ALL": 0,
+ "PUB_ONLY": _SPECIAL + _PROTECTED + _PRIVATE,
+ "SPECIAL": _SPECIAL,
+ "OTHER": _PROTECTED + _PRIVATE,
+}
+VIS_MOD = {
+ "special": _SPECIAL,
+ "protected": _PROTECTED,
+ "private": _PRIVATE,
+ "public": 0,
+}
+
+
+class FilterMixIn:
+ """filter nodes according to a mode and nodes' visibility
+ """
+
+ def __init__(self, mode):
+ "init filter modes"
+ __mode = 0
+ for nummod in mode.split("+"):
+ try:
+ __mode += MODES[nummod]
+ except KeyError as ex:
+ print("Unknown filter mode %s" % ex, file=sys.stderr)
+ self.__mode = __mode
+
+ def show_attr(self, node):
+ """return true if the node should be treated
+ """
+ visibility = get_visibility(getattr(node, "name", node))
+ return not self.__mode & VIS_MOD[visibility]
+
+
+class ASTWalker:
+ """a walker visiting a tree in preorder, calling on the handler:
+
+ * visit_<class name> on entering a node, where class name is the class of
+ the node in lower case
+
+ * leave_<class name> on leaving a node, where class name is the class of
+ the node in lower case
+ """
+
+ def __init__(self, handler):
+ self.handler = handler
+ self._cache = {}
+
+ def walk(self, node, _done=None):
+ """walk on the tree from <node>, getting callbacks from handler"""
+ if _done is None:
+ _done = set()
+ if node in _done:
+ raise AssertionError((id(node), node, node.parent))
+ _done.add(node)
+ self.visit(node)
+ for child_node in node.get_children():
+ assert child_node is not node
+ self.walk(child_node, _done)
+ self.leave(node)
+ assert node.parent is not node
+
+ def get_callbacks(self, node):
+ """get callbacks from handler for the visited node"""
+ klass = node.__class__
+ methods = self._cache.get(klass)
+ if methods is None:
+ handler = self.handler
+ kid = klass.__name__.lower()
+ e_method = getattr(
+ handler, "visit_%s" % kid, getattr(handler, "visit_default", None)
+ )
+ l_method = getattr(
+ handler, "leave_%s" % kid, getattr(handler, "leave_default", None)
+ )
+ self._cache[klass] = (e_method, l_method)
+ else:
+ e_method, l_method = methods
+ return e_method, l_method
+
+ def visit(self, node):
+ """walk on the tree from <node>, getting callbacks from handler"""
+ method = self.get_callbacks(node)[0]
+ if method is not None:
+ method(node)
+
+ def leave(self, node):
+ """walk on the tree from <node>, getting callbacks from handler"""
+ method = self.get_callbacks(node)[1]
+ if method is not None:
+ method(node)
+
+
+class LocalsVisitor(ASTWalker):
+ """visit a project by traversing the locals dictionary"""
+
+ def __init__(self):
+ ASTWalker.__init__(self, self)
+ self._visited = set()
+
+ def visit(self, node):
+ """launch the visit starting from the given node"""
+ if node in self._visited:
+ return None
+
+ self._visited.add(node)
+ methods = self.get_callbacks(node)
+ if methods[0] is not None:
+ methods[0](node)
+ if hasattr(node, "locals"): # skip Instance and other proxy
+ for local_node in node.values():
+ self.visit(local_node)
+ if methods[1] is not None:
+ return methods[1](node)
+ return None
diff --git a/venv/Lib/site-packages/pylint/pyreverse/vcgutils.py b/venv/Lib/site-packages/pylint/pyreverse/vcgutils.py
new file mode 100644
index 0000000..89c6911
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/vcgutils.py
@@ -0,0 +1,229 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Functions to generate files readable with Georg Sander's vcg
+(Visualization of Compiler Graphs).
+
+You can download vcg at http://rw4.cs.uni-sb.de/~sander/html/gshome.html
+Note that vcg exists as a debian package.
+
+See vcg's documentation for explanation about the different values that
+maybe used for the functions parameters.
+"""
+
+ATTRS_VAL = {
+ "algos": (
+ "dfs",
+ "tree",
+ "minbackward",
+ "left_to_right",
+ "right_to_left",
+ "top_to_bottom",
+ "bottom_to_top",
+ "maxdepth",
+ "maxdepthslow",
+ "mindepth",
+ "mindepthslow",
+ "mindegree",
+ "minindegree",
+ "minoutdegree",
+ "maxdegree",
+ "maxindegree",
+ "maxoutdegree",
+ ),
+ "booleans": ("yes", "no"),
+ "colors": (
+ "black",
+ "white",
+ "blue",
+ "red",
+ "green",
+ "yellow",
+ "magenta",
+ "lightgrey",
+ "cyan",
+ "darkgrey",
+ "darkblue",
+ "darkred",
+ "darkgreen",
+ "darkyellow",
+ "darkmagenta",
+ "darkcyan",
+ "gold",
+ "lightblue",
+ "lightred",
+ "lightgreen",
+ "lightyellow",
+ "lightmagenta",
+ "lightcyan",
+ "lilac",
+ "turquoise",
+ "aquamarine",
+ "khaki",
+ "purple",
+ "yellowgreen",
+ "pink",
+ "orange",
+ "orchid",
+ ),
+ "shapes": ("box", "ellipse", "rhomb", "triangle"),
+ "textmodes": ("center", "left_justify", "right_justify"),
+ "arrowstyles": ("solid", "line", "none"),
+ "linestyles": ("continuous", "dashed", "dotted", "invisible"),
+}
+
+# meaning of possible values:
+# O -> string
+# 1 -> int
+# list -> value in list
+GRAPH_ATTRS = {
+ "title": 0,
+ "label": 0,
+ "color": ATTRS_VAL["colors"],
+ "textcolor": ATTRS_VAL["colors"],
+ "bordercolor": ATTRS_VAL["colors"],
+ "width": 1,
+ "height": 1,
+ "borderwidth": 1,
+ "textmode": ATTRS_VAL["textmodes"],
+ "shape": ATTRS_VAL["shapes"],
+ "shrink": 1,
+ "stretch": 1,
+ "orientation": ATTRS_VAL["algos"],
+ "vertical_order": 1,
+ "horizontal_order": 1,
+ "xspace": 1,
+ "yspace": 1,
+ "layoutalgorithm": ATTRS_VAL["algos"],
+ "late_edge_labels": ATTRS_VAL["booleans"],
+ "display_edge_labels": ATTRS_VAL["booleans"],
+ "dirty_edge_labels": ATTRS_VAL["booleans"],
+ "finetuning": ATTRS_VAL["booleans"],
+ "manhattan_edges": ATTRS_VAL["booleans"],
+ "smanhattan_edges": ATTRS_VAL["booleans"],
+ "port_sharing": ATTRS_VAL["booleans"],
+ "edges": ATTRS_VAL["booleans"],
+ "nodes": ATTRS_VAL["booleans"],
+ "splines": ATTRS_VAL["booleans"],
+}
+NODE_ATTRS = {
+ "title": 0,
+ "label": 0,
+ "color": ATTRS_VAL["colors"],
+ "textcolor": ATTRS_VAL["colors"],
+ "bordercolor": ATTRS_VAL["colors"],
+ "width": 1,
+ "height": 1,
+ "borderwidth": 1,
+ "textmode": ATTRS_VAL["textmodes"],
+ "shape": ATTRS_VAL["shapes"],
+ "shrink": 1,
+ "stretch": 1,
+ "vertical_order": 1,
+ "horizontal_order": 1,
+}
+EDGE_ATTRS = {
+ "sourcename": 0,
+ "targetname": 0,
+ "label": 0,
+ "linestyle": ATTRS_VAL["linestyles"],
+ "class": 1,
+ "thickness": 0,
+ "color": ATTRS_VAL["colors"],
+ "textcolor": ATTRS_VAL["colors"],
+ "arrowcolor": ATTRS_VAL["colors"],
+ "backarrowcolor": ATTRS_VAL["colors"],
+ "arrowsize": 1,
+ "backarrowsize": 1,
+ "arrowstyle": ATTRS_VAL["arrowstyles"],
+ "backarrowstyle": ATTRS_VAL["arrowstyles"],
+ "textmode": ATTRS_VAL["textmodes"],
+ "priority": 1,
+ "anchor": 1,
+ "horizontal_order": 1,
+}
+
+
+# Misc utilities ###############################################################
+
+
+class VCGPrinter:
+ """A vcg graph writer.
+ """
+
+ def __init__(self, output_stream):
+ self._stream = output_stream
+ self._indent = ""
+
+ def open_graph(self, **args):
+ """open a vcg graph
+ """
+ self._stream.write("%sgraph:{\n" % self._indent)
+ self._inc_indent()
+ self._write_attributes(GRAPH_ATTRS, **args)
+
+ def close_graph(self):
+ """close a vcg graph
+ """
+ self._dec_indent()
+ self._stream.write("%s}\n" % self._indent)
+
+ def node(self, title, **args):
+ """draw a node
+ """
+ self._stream.write('%snode: {title:"%s"' % (self._indent, title))
+ self._write_attributes(NODE_ATTRS, **args)
+ self._stream.write("}\n")
+
+ def edge(self, from_node, to_node, edge_type="", **args):
+ """draw an edge from a node to another.
+ """
+ self._stream.write(
+ '%s%sedge: {sourcename:"%s" targetname:"%s"'
+ % (self._indent, edge_type, from_node, to_node)
+ )
+ self._write_attributes(EDGE_ATTRS, **args)
+ self._stream.write("}\n")
+
+ # private ##################################################################
+
+ def _write_attributes(self, attributes_dict, **args):
+ """write graph, node or edge attributes
+ """
+ for key, value in args.items():
+ try:
+ _type = attributes_dict[key]
+ except KeyError:
+ raise Exception(
+ """no such attribute %s
+possible attributes are %s"""
+ % (key, attributes_dict.keys())
+ )
+
+ if not _type:
+ self._stream.write('%s%s:"%s"\n' % (self._indent, key, value))
+ elif _type == 1:
+ self._stream.write("%s%s:%s\n" % (self._indent, key, int(value)))
+ elif value in _type:
+ self._stream.write("%s%s:%s\n" % (self._indent, key, value))
+ else:
+ raise Exception(
+ """value %s isn\'t correct for attribute %s
+correct values are %s"""
+ % (value, key, _type)
+ )
+
+ def _inc_indent(self):
+ """increment indentation
+ """
+ self._indent = " %s" % self._indent
+
+ def _dec_indent(self):
+ """decrement indentation
+ """
+ self._indent = self._indent[:-2]
diff --git a/venv/Lib/site-packages/pylint/pyreverse/writer.py b/venv/Lib/site-packages/pylint/pyreverse/writer.py
new file mode 100644
index 0000000..609b1ef
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/pyreverse/writer.py
@@ -0,0 +1,213 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2008-2010, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Utilities for creating VCG and Dot diagrams"""
+
+from pylint.graph import DotBackend
+from pylint.pyreverse.utils import is_exception
+from pylint.pyreverse.vcgutils import VCGPrinter
+
+
+class DiagramWriter:
+ """base class for writing project diagrams
+ """
+
+ def __init__(self, config, styles):
+ self.config = config
+ self.pkg_edges, self.inh_edges, self.imp_edges, self.association_edges = styles
+ self.printer = None # defined in set_printer
+
+ def write(self, diadefs):
+ """write files for <project> according to <diadefs>
+ """
+ for diagram in diadefs:
+ basename = diagram.title.strip().replace(" ", "_")
+ file_name = "%s.%s" % (basename, self.config.output_format)
+ self.set_printer(file_name, basename)
+ if diagram.TYPE == "class":
+ self.write_classes(diagram)
+ else:
+ self.write_packages(diagram)
+ self.close_graph()
+
+ def write_packages(self, diagram):
+ """write a package diagram"""
+ # sorted to get predictable (hence testable) results
+ for i, obj in enumerate(sorted(diagram.modules(), key=lambda x: x.title)):
+ self.printer.emit_node(i, label=self.get_title(obj), shape="box")
+ obj.fig_id = i
+ # package dependencies
+ for rel in diagram.get_relationships("depends"):
+ self.printer.emit_edge(
+ rel.from_object.fig_id, rel.to_object.fig_id, **self.pkg_edges
+ )
+
+ def write_classes(self, diagram):
+ """write a class diagram"""
+ # sorted to get predictable (hence testable) results
+ for i, obj in enumerate(sorted(diagram.objects, key=lambda x: x.title)):
+ self.printer.emit_node(i, **self.get_values(obj))
+ obj.fig_id = i
+ # inheritance links
+ for rel in diagram.get_relationships("specialization"):
+ self.printer.emit_edge(
+ rel.from_object.fig_id, rel.to_object.fig_id, **self.inh_edges
+ )
+ # implementation links
+ for rel in diagram.get_relationships("implements"):
+ self.printer.emit_edge(
+ rel.from_object.fig_id, rel.to_object.fig_id, **self.imp_edges
+ )
+ # generate associations
+ for rel in diagram.get_relationships("association"):
+ self.printer.emit_edge(
+ rel.from_object.fig_id,
+ rel.to_object.fig_id,
+ label=rel.name,
+ **self.association_edges
+ )
+
+ def set_printer(self, file_name, basename):
+ """set printer"""
+ raise NotImplementedError
+
+ def get_title(self, obj):
+ """get project title"""
+ raise NotImplementedError
+
+ def get_values(self, obj):
+ """get label and shape for classes."""
+ raise NotImplementedError
+
+ def close_graph(self):
+ """finalize the graph"""
+ raise NotImplementedError
+
+
+class DotWriter(DiagramWriter):
+ """write dot graphs from a diagram definition and a project
+ """
+
+ def __init__(self, config):
+ styles = [
+ dict(arrowtail="none", arrowhead="open"),
+ dict(arrowtail="none", arrowhead="empty"),
+ dict(arrowtail="node", arrowhead="empty", style="dashed"),
+ dict(
+ fontcolor="green", arrowtail="none", arrowhead="diamond", style="solid"
+ ),
+ ]
+ DiagramWriter.__init__(self, config, styles)
+
+ def set_printer(self, file_name, basename):
+ """initialize DotWriter and add options for layout.
+ """
+ layout = dict(rankdir="BT")
+ self.printer = DotBackend(basename, additional_param=layout)
+ self.file_name = file_name
+
+ def get_title(self, obj):
+ """get project title"""
+ return obj.title
+
+ def get_values(self, obj):
+ """get label and shape for classes.
+
+ The label contains all attributes and methods
+ """
+ label = obj.title
+ if obj.shape == "interface":
+ label = "«interface»\\n%s" % label
+ if not self.config.only_classnames:
+ label = r"%s|%s\l|" % (label, r"\l".join(obj.attrs))
+ for func in obj.methods:
+ args = [arg.name for arg in func.args.args if arg.name != "self"]
+ label = r"%s%s(%s)\l" % (label, func.name, ", ".join(args))
+ label = "{%s}" % label
+ if is_exception(obj.node):
+ return dict(fontcolor="red", label=label, shape="record")
+ return dict(label=label, shape="record")
+
+ def close_graph(self):
+ """print the dot graph into <file_name>"""
+ self.printer.generate(self.file_name)
+
+
+class VCGWriter(DiagramWriter):
+ """write vcg graphs from a diagram definition and a project
+ """
+
+ def __init__(self, config):
+ styles = [
+ dict(arrowstyle="solid", backarrowstyle="none", backarrowsize=0),
+ dict(arrowstyle="solid", backarrowstyle="none", backarrowsize=10),
+ dict(
+ arrowstyle="solid",
+ backarrowstyle="none",
+ linestyle="dotted",
+ backarrowsize=10,
+ ),
+ dict(arrowstyle="solid", backarrowstyle="none", textcolor="green"),
+ ]
+ DiagramWriter.__init__(self, config, styles)
+
+ def set_printer(self, file_name, basename):
+ """initialize VCGWriter for a UML graph"""
+ self.graph_file = open(file_name, "w+")
+ self.printer = VCGPrinter(self.graph_file)
+ self.printer.open_graph(
+ title=basename,
+ layoutalgorithm="dfs",
+ late_edge_labels="yes",
+ port_sharing="no",
+ manhattan_edges="yes",
+ )
+ self.printer.emit_node = self.printer.node
+ self.printer.emit_edge = self.printer.edge
+
+ def get_title(self, obj):
+ """get project title in vcg format"""
+ return r"\fb%s\fn" % obj.title
+
+ def get_values(self, obj):
+ """get label and shape for classes.
+
+ The label contains all attributes and methods
+ """
+ if is_exception(obj.node):
+ label = r"\fb\f09%s\fn" % obj.title
+ else:
+ label = r"\fb%s\fn" % obj.title
+ if obj.shape == "interface":
+ shape = "ellipse"
+ else:
+ shape = "box"
+ if not self.config.only_classnames:
+ attrs = obj.attrs
+ methods = [func.name for func in obj.methods]
+ # box width for UML like diagram
+ maxlen = max(len(name) for name in [obj.title] + methods + attrs)
+ line = "_" * (maxlen + 2)
+ label = r"%s\n\f%s" % (label, line)
+ for attr in attrs:
+ label = r"%s\n\f08%s" % (label, attr)
+ if attrs:
+ label = r"%s\n\f%s" % (label, line)
+ for func in methods:
+ label = r"%s\n\f10%s()" % (label, func)
+ return dict(label=label, shape=shape)
+
+ def close_graph(self):
+ """close graph and file"""
+ self.printer.close_graph()
+ self.graph_file.close()
diff --git a/venv/Lib/site-packages/pylint/reporters/__init__.py b/venv/Lib/site-packages/pylint/reporters/__init__.py
new file mode 100644
index 0000000..f01629b
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/__init__.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006, 2010, 2012-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Ricardo Gemignani <ricardo.gemignani@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2017 Kári Tristan Helgason <kthelgason@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""utilities methods and classes for reporters"""
+
+
+from pylint import utils
+from pylint.reporters.base_reporter import BaseReporter
+from pylint.reporters.collecting_reporter import CollectingReporter
+from pylint.reporters.json_reporter import JSONReporter
+from pylint.reporters.reports_handler_mix_in import ReportsHandlerMixIn
+
+
+def initialize(linter):
+ """initialize linter with reporters in this package """
+ utils.register_plugins(linter, __path__[0])
+
+
+__all__ = ["BaseReporter", "ReportsHandlerMixIn", "JSONReporter", "CollectingReporter"]
diff --git a/venv/Lib/site-packages/pylint/reporters/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..a1b55a7
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/__pycache__/base_reporter.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/__pycache__/base_reporter.cpython-37.pyc
new file mode 100644
index 0000000..5f35295
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/__pycache__/base_reporter.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/__pycache__/collecting_reporter.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/__pycache__/collecting_reporter.cpython-37.pyc
new file mode 100644
index 0000000..066140c
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/__pycache__/collecting_reporter.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/__pycache__/json_reporter.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/__pycache__/json_reporter.cpython-37.pyc
new file mode 100644
index 0000000..ca871c3
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/__pycache__/json_reporter.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/__pycache__/reports_handler_mix_in.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/__pycache__/reports_handler_mix_in.cpython-37.pyc
new file mode 100644
index 0000000..8269f35
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/__pycache__/reports_handler_mix_in.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/__pycache__/text.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/__pycache__/text.cpython-37.pyc
new file mode 100644
index 0000000..f40cc5e
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/__pycache__/text.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/base_reporter.py b/venv/Lib/site-packages/pylint/reporters/base_reporter.py
new file mode 100644
index 0000000..1003eeb
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/base_reporter.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import os
+import sys
+
+
+class BaseReporter:
+ """base class for reporters
+
+ symbols: show short symbolic names for messages.
+ """
+
+ extension = ""
+
+ def __init__(self, output=None):
+ self.linter = None
+ self.section = 0
+ self.out = None
+ self.out_encoding = None
+ self.set_output(output)
+ # Build the path prefix to strip to get relative paths
+ self.path_strip_prefix = os.getcwd() + os.sep
+
+ def handle_message(self, msg):
+ """Handle a new message triggered on the current file."""
+
+ def set_output(self, output=None):
+ """set output stream"""
+ self.out = output or sys.stdout
+
+ def writeln(self, string=""):
+ """write a line in the output buffer"""
+ print(string, file=self.out)
+
+ def display_reports(self, layout):
+ """display results encapsulated in the layout tree"""
+ self.section = 0
+ if hasattr(layout, "report_id"):
+ layout.children[0].children[0].data += " (%s)" % layout.report_id
+ self._display(layout)
+
+ def _display(self, layout):
+ """display the layout"""
+ raise NotImplementedError()
+
+ def display_messages(self, layout):
+ """Hook for displaying the messages of the reporter
+
+ This will be called whenever the underlying messages
+ needs to be displayed. For some reporters, it probably
+ doesn't make sense to display messages as soon as they
+ are available, so some mechanism of storing them could be used.
+ This method can be implemented to display them after they've
+ been aggregated.
+ """
+
+ # Event callbacks
+
+ def on_set_current_module(self, module, filepath):
+ """Hook called when a module starts to be analysed."""
+
+ def on_close(self, stats, previous_stats):
+ """Hook called when a module finished analyzing."""
diff --git a/venv/Lib/site-packages/pylint/reporters/collecting_reporter.py b/venv/Lib/site-packages/pylint/reporters/collecting_reporter.py
new file mode 100644
index 0000000..7798d83
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/collecting_reporter.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+from pylint.reporters.base_reporter import BaseReporter
+
+
+class CollectingReporter(BaseReporter):
+ """collects messages"""
+
+ name = "collector"
+
+ def __init__(self):
+ BaseReporter.__init__(self)
+ self.messages = []
+
+ def handle_message(self, msg):
+ self.messages.append(msg)
+
+ _display = None
diff --git a/venv/Lib/site-packages/pylint/reporters/json_reporter.py b/venv/Lib/site-packages/pylint/reporters/json_reporter.py
new file mode 100644
index 0000000..fa6a0f8
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/json_reporter.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2014 Vlad Temian <vladtemian@gmail.com>
+# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2017 guillaume2 <guillaume.peillex@gmail.col>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""JSON reporter"""
+import html
+import json
+import sys
+
+from pylint.interfaces import IReporter
+from pylint.reporters.base_reporter import BaseReporter
+
+
+class JSONReporter(BaseReporter):
+ """Report messages and layouts in JSON."""
+
+ __implements__ = IReporter
+ name = "json"
+ extension = "json"
+
+ def __init__(self, output=sys.stdout):
+ BaseReporter.__init__(self, output)
+ self.messages = []
+
+ def handle_message(self, msg):
+ """Manage message of different type and in the context of path."""
+ self.messages.append(
+ {
+ "type": msg.category,
+ "module": msg.module,
+ "obj": msg.obj,
+ "line": msg.line,
+ "column": msg.column,
+ "path": msg.path,
+ "symbol": msg.symbol,
+ "message": html.escape(msg.msg or "", quote=False),
+ "message-id": msg.msg_id,
+ }
+ )
+
+ def display_messages(self, layout):
+ """Launch layouts display"""
+ print(json.dumps(self.messages, indent=4), file=self.out)
+
+ def display_reports(self, layout):
+ """Don't do nothing in this reporter."""
+
+ def _display(self, layout):
+ """Do nothing."""
+
+
+def register(linter):
+ """Register the reporter classes with the linter."""
+ linter.register_reporter(JSONReporter)
diff --git a/venv/Lib/site-packages/pylint/reporters/reports_handler_mix_in.py b/venv/Lib/site-packages/pylint/reporters/reports_handler_mix_in.py
new file mode 100644
index 0000000..6f91a97
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/reports_handler_mix_in.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import collections
+
+from pylint.exceptions import EmptyReportError
+from pylint.reporters.ureports.nodes import Section
+
+
+class ReportsHandlerMixIn:
+ """a mix-in class containing all the reports and stats manipulation
+ related methods for the main lint class
+ """
+
+ def __init__(self):
+ self._reports = collections.defaultdict(list)
+ self._reports_state = {}
+
+ def report_order(self):
+ """ Return a list of reports, sorted in the order
+ in which they must be called.
+ """
+ return list(self._reports)
+
+ def register_report(self, reportid, r_title, r_cb, checker):
+ """register a report
+
+ reportid is the unique identifier for the report
+ r_title the report's title
+ r_cb the method to call to make the report
+ checker is the checker defining the report
+ """
+ reportid = reportid.upper()
+ self._reports[checker].append((reportid, r_title, r_cb))
+
+ def enable_report(self, reportid):
+ """disable the report of the given id"""
+ reportid = reportid.upper()
+ self._reports_state[reportid] = True
+
+ def disable_report(self, reportid):
+ """disable the report of the given id"""
+ reportid = reportid.upper()
+ self._reports_state[reportid] = False
+
+ def report_is_enabled(self, reportid):
+ """return true if the report associated to the given identifier is
+ enabled
+ """
+ return self._reports_state.get(reportid, True)
+
+ def make_reports(self, stats, old_stats):
+ """render registered reports"""
+ sect = Section("Report", "%s statements analysed." % (self.stats["statement"]))
+ for checker in self.report_order():
+ for reportid, r_title, r_cb in self._reports[checker]:
+ if not self.report_is_enabled(reportid):
+ continue
+ report_sect = Section(r_title)
+ try:
+ r_cb(report_sect, stats, old_stats)
+ except EmptyReportError:
+ continue
+ report_sect.report_id = reportid
+ sect.append(report_sect)
+ return sect
+
+ def add_stats(self, **kwargs):
+ """add some stats entries to the statistic dictionary
+ raise an AssertionError if there is a key conflict
+ """
+ for key, value in kwargs.items():
+ if key[-1] == "_":
+ key = key[:-1]
+ assert key not in self.stats
+ self.stats[key] = value
+ return self.stats
diff --git a/venv/Lib/site-packages/pylint/reporters/text.py b/venv/Lib/site-packages/pylint/reporters/text.py
new file mode 100644
index 0000000..ce74174
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/text.py
@@ -0,0 +1,247 @@
+# Copyright (c) 2006-2007, 2010-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 y2kbugger <y2kbugger@users.noreply.github.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Jace Browning <jacebrowning@gmail.com>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Plain text reporters:
+
+:text: the default one grouping messages by module
+:colorized: an ANSI colorized text reporter
+"""
+import os
+import sys
+import warnings
+
+from pylint import utils
+from pylint.interfaces import IReporter
+from pylint.reporters import BaseReporter
+from pylint.reporters.ureports.text_writer import TextWriter
+
+TITLE_UNDERLINES = ["", "=", "-", "."]
+
+ANSI_PREFIX = "\033["
+ANSI_END = "m"
+ANSI_RESET = "\033[0m"
+ANSI_STYLES = {
+ "reset": "0",
+ "bold": "1",
+ "italic": "3",
+ "underline": "4",
+ "blink": "5",
+ "inverse": "7",
+ "strike": "9",
+}
+ANSI_COLORS = {
+ "reset": "0",
+ "black": "30",
+ "red": "31",
+ "green": "32",
+ "yellow": "33",
+ "blue": "34",
+ "magenta": "35",
+ "cyan": "36",
+ "white": "37",
+}
+
+
+def _get_ansi_code(color=None, style=None):
+ """return ansi escape code corresponding to color and style
+
+ :type color: str or None
+ :param color:
+ the color name (see `ANSI_COLORS` for available values)
+ or the color number when 256 colors are available
+
+ :type style: str or None
+ :param style:
+ style string (see `ANSI_COLORS` for available values). To get
+ several style effects at the same time, use a coma as separator.
+
+ :raise KeyError: if an unexistent color or style identifier is given
+
+ :rtype: str
+ :return: the built escape code
+ """
+ ansi_code = []
+ if style:
+ style_attrs = utils._splitstrip(style)
+ for effect in style_attrs:
+ ansi_code.append(ANSI_STYLES[effect])
+ if color:
+ if color.isdigit():
+ ansi_code.extend(["38", "5"])
+ ansi_code.append(color)
+ else:
+ ansi_code.append(ANSI_COLORS[color])
+ if ansi_code:
+ return ANSI_PREFIX + ";".join(ansi_code) + ANSI_END
+ return ""
+
+
+def colorize_ansi(msg, color=None, style=None):
+ """colorize message by wrapping it with ansi escape codes
+
+ :type msg: str or unicode
+ :param msg: the message string to colorize
+
+ :type color: str or None
+ :param color:
+ the color identifier (see `ANSI_COLORS` for available values)
+
+ :type style: str or None
+ :param style:
+ style string (see `ANSI_COLORS` for available values). To get
+ several style effects at the same time, use a coma as separator.
+
+ :raise KeyError: if an unexistent color or style identifier is given
+
+ :rtype: str or unicode
+ :return: the ansi escaped string
+ """
+ # If both color and style are not defined, then leave the text as is
+ if color is None and style is None:
+ return msg
+ escape_code = _get_ansi_code(color, style)
+ # If invalid (or unknown) color, don't wrap msg with ansi codes
+ if escape_code:
+ return "%s%s%s" % (escape_code, msg, ANSI_RESET)
+ return msg
+
+
+class TextReporter(BaseReporter):
+ """reports messages and layouts in plain text"""
+
+ __implements__ = IReporter
+ name = "text"
+ extension = "txt"
+ line_format = "{path}:{line}:{column}: {msg_id}: {msg} ({symbol})"
+
+ def __init__(self, output=None):
+ BaseReporter.__init__(self, output)
+ self._modules = set()
+ self._template = None
+
+ def on_set_current_module(self, module, filepath):
+ self._template = str(self.linter.config.msg_template or self.line_format)
+
+ def write_message(self, msg):
+ """Convenience method to write a formated message with class default template"""
+ self.writeln(msg.format(self._template))
+
+ def handle_message(self, msg):
+ """manage message of different type and in the context of path"""
+ if msg.module not in self._modules:
+ if msg.module:
+ self.writeln("************* Module %s" % msg.module)
+ self._modules.add(msg.module)
+ else:
+ self.writeln("************* ")
+ self.write_message(msg)
+
+ def _display(self, layout):
+ """launch layouts display"""
+ print(file=self.out)
+ TextWriter().format(layout, self.out)
+
+
+class ParseableTextReporter(TextReporter):
+ """a reporter very similar to TextReporter, but display messages in a form
+ recognized by most text editors :
+
+ <filename>:<linenum>:<msg>
+ """
+
+ name = "parseable"
+ line_format = "{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"
+
+ def __init__(self, output=None):
+ warnings.warn(
+ "%s output format is deprecated. This is equivalent "
+ "to --msg-template=%s" % (self.name, self.line_format),
+ DeprecationWarning,
+ )
+ TextReporter.__init__(self, output)
+
+
+class VSTextReporter(ParseableTextReporter):
+ """Visual studio text reporter"""
+
+ name = "msvs"
+ line_format = "{path}({line}): [{msg_id}({symbol}){obj}] {msg}"
+
+
+class ColorizedTextReporter(TextReporter):
+ """Simple TextReporter that colorizes text output"""
+
+ name = "colorized"
+ COLOR_MAPPING = {
+ "I": ("green", None),
+ "C": (None, "bold"),
+ "R": ("magenta", "bold, italic"),
+ "W": ("magenta", None),
+ "E": ("red", "bold"),
+ "F": ("red", "bold, underline"),
+ "S": ("yellow", "inverse"), # S stands for module Separator
+ }
+
+ def __init__(self, output=None, color_mapping=None):
+ TextReporter.__init__(self, output)
+ self.color_mapping = color_mapping or dict(ColorizedTextReporter.COLOR_MAPPING)
+ ansi_terms = ["xterm-16color", "xterm-256color"]
+ if os.environ.get("TERM") not in ansi_terms:
+ if sys.platform == "win32":
+ # pylint: disable=import-error,import-outside-toplevel
+ import colorama
+
+ self.out = colorama.AnsiToWin32(self.out)
+
+ def _get_decoration(self, msg_id):
+ """Returns the tuple color, style associated with msg_id as defined
+ in self.color_mapping
+ """
+ try:
+ return self.color_mapping[msg_id[0]]
+ except KeyError:
+ return None, None
+
+ def handle_message(self, msg):
+ """manage message of different types, and colorize output
+ using ansi escape codes
+ """
+ if msg.module not in self._modules:
+ color, style = self._get_decoration("S")
+ if msg.module:
+ modsep = colorize_ansi(
+ "************* Module %s" % msg.module, color, style
+ )
+ else:
+ modsep = colorize_ansi("************* %s" % msg.module, color, style)
+ self.writeln(modsep)
+ self._modules.add(msg.module)
+ color, style = self._get_decoration(msg.C)
+
+ msg = msg._replace(
+ **{
+ attr: colorize_ansi(getattr(msg, attr), color, style)
+ for attr in ("msg", "symbol", "category", "C")
+ }
+ )
+ self.write_message(msg)
+
+
+def register(linter):
+ """Register the reporter classes with the linter."""
+ linter.register_reporter(TextReporter)
+ linter.register_reporter(ParseableTextReporter)
+ linter.register_reporter(VSTextReporter)
+ linter.register_reporter(ColorizedTextReporter)
diff --git a/venv/Lib/site-packages/pylint/reporters/ureports/__init__.py b/venv/Lib/site-packages/pylint/reporters/ureports/__init__.py
new file mode 100644
index 0000000..361552b
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/ureports/__init__.py
@@ -0,0 +1,96 @@
+# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Universal report objects and some formatting drivers.
+
+A way to create simple reports using python objects, primarily designed to be
+formatted as text and html.
+"""
+import os
+import sys
+from io import StringIO
+
+
+class BaseWriter:
+ """base class for ureport writers"""
+
+ def format(self, layout, stream=None, encoding=None):
+ """format and write the given layout into the stream object
+
+ unicode policy: unicode strings may be found in the layout;
+ try to call stream.write with it, but give it back encoded using
+ the given encoding if it fails
+ """
+ if stream is None:
+ stream = sys.stdout
+ if not encoding:
+ encoding = getattr(stream, "encoding", "UTF-8")
+ self.encoding = encoding or "UTF-8"
+ self.out = stream
+ self.begin_format()
+ layout.accept(self)
+ self.end_format()
+
+ def format_children(self, layout):
+ """recurse on the layout children and call their accept method
+ (see the Visitor pattern)
+ """
+ for child in getattr(layout, "children", ()):
+ child.accept(self)
+
+ def writeln(self, string=""):
+ """write a line in the output buffer"""
+ self.write(string + os.linesep)
+
+ def write(self, string):
+ """write a string in the output buffer"""
+ self.out.write(string)
+
+ def begin_format(self):
+ """begin to format a layout"""
+ self.section = 0
+
+ def end_format(self):
+ """finished to format a layout"""
+
+ def get_table_content(self, table):
+ """trick to get table content without actually writing it
+
+ return an aligned list of lists containing table cells values as string
+ """
+ result = [[]]
+ cols = table.cols
+ for cell in self.compute_content(table):
+ if cols == 0:
+ result.append([])
+ cols = table.cols
+ cols -= 1
+ result[-1].append(cell)
+ # fill missing cells
+ while len(result[-1]) < cols:
+ result[-1].append("")
+ return result
+
+ def compute_content(self, layout):
+ """trick to compute the formatting of children layout before actually
+ writing it
+
+ return an iterator on strings (one for each child element)
+ """
+ # Patch the underlying output stream with a fresh-generated stream,
+ # which is used to store a temporary representation of a child
+ # node.
+ out = self.out
+ try:
+ for child in layout.children:
+ stream = StringIO()
+ self.out = stream
+ child.accept(self)
+ yield stream.getvalue()
+ finally:
+ self.out = out
diff --git a/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..408b51f
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/nodes.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/nodes.cpython-37.pyc
new file mode 100644
index 0000000..2640b32
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/nodes.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/text_writer.cpython-37.pyc b/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/text_writer.cpython-37.pyc
new file mode 100644
index 0000000..7222ed4
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/text_writer.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/reporters/ureports/nodes.py b/venv/Lib/site-packages/pylint/reporters/ureports/nodes.py
new file mode 100644
index 0000000..8fafb20
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/ureports/nodes.py
@@ -0,0 +1,188 @@
+# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Micro reports objects.
+
+A micro report is a tree of layout and content objects.
+"""
+
+
+class VNode:
+ def __init__(self, nid=None):
+ self.id = nid
+ # navigation
+ self.parent = None
+ self.children = []
+
+ def __iter__(self):
+ return iter(self.children)
+
+ def append(self, child):
+ """add a node to children"""
+ self.children.append(child)
+ child.parent = self
+
+ def insert(self, index, child):
+ """insert a child node"""
+ self.children.insert(index, child)
+ child.parent = self
+
+ def _get_visit_name(self):
+ """
+ return the visit name for the mixed class. When calling 'accept', the
+ method <'visit_' + name returned by this method> will be called on the
+ visitor
+ """
+ try:
+ # pylint: disable=no-member
+ return self.TYPE.replace("-", "_")
+ # pylint: disable=broad-except
+ except Exception:
+ return self.__class__.__name__.lower()
+
+ def accept(self, visitor, *args, **kwargs):
+ func = getattr(visitor, "visit_%s" % self._get_visit_name())
+ return func(self, *args, **kwargs)
+
+ def leave(self, visitor, *args, **kwargs):
+ func = getattr(visitor, "leave_%s" % self._get_visit_name())
+ return func(self, *args, **kwargs)
+
+
+class BaseLayout(VNode):
+ """base container node
+
+ attributes
+ * children : components in this table (i.e. the table's cells)
+ """
+
+ def __init__(self, children=(), **kwargs):
+ super(BaseLayout, self).__init__(**kwargs)
+ for child in children:
+ if isinstance(child, VNode):
+ self.append(child)
+ else:
+ self.add_text(child)
+
+ def append(self, child):
+ """overridden to detect problems easily"""
+ assert child not in self.parents()
+ VNode.append(self, child)
+
+ def parents(self):
+ """return the ancestor nodes"""
+ assert self.parent is not self
+ if self.parent is None:
+ return []
+ return [self.parent] + self.parent.parents()
+
+ def add_text(self, text):
+ """shortcut to add text data"""
+ self.children.append(Text(text))
+
+
+# non container nodes #########################################################
+
+
+class Text(VNode):
+ """a text portion
+
+ attributes :
+ * data : the text value as an encoded or unicode string
+ """
+
+ def __init__(self, data, escaped=True, **kwargs):
+ super(Text, self).__init__(**kwargs)
+ # if isinstance(data, unicode):
+ # data = data.encode('ascii')
+ assert isinstance(data, str), data.__class__
+ self.escaped = escaped
+ self.data = data
+
+
+class VerbatimText(Text):
+ """a verbatim text, display the raw data
+
+ attributes :
+ * data : the text value as an encoded or unicode string
+ """
+
+
+# container nodes #############################################################
+
+
+class Section(BaseLayout):
+ """a section
+
+ attributes :
+ * BaseLayout attributes
+
+ a title may also be given to the constructor, it'll be added
+ as a first element
+ a description may also be given to the constructor, it'll be added
+ as a first paragraph
+ """
+
+ def __init__(self, title=None, description=None, **kwargs):
+ super(Section, self).__init__(**kwargs)
+ if description:
+ self.insert(0, Paragraph([Text(description)]))
+ if title:
+ self.insert(0, Title(children=(title,)))
+
+
+class EvaluationSection(Section):
+ def __init__(self, message, **kwargs):
+ super(EvaluationSection, self).__init__(**kwargs)
+ title = Paragraph()
+ title.append(Text("-" * len(message)))
+ self.append(title)
+
+ message_body = Paragraph()
+ message_body.append(Text(message))
+ self.append(message_body)
+
+
+class Title(BaseLayout):
+ """a title
+
+ attributes :
+ * BaseLayout attributes
+
+ A title must not contains a section nor a paragraph!
+ """
+
+
+class Paragraph(BaseLayout):
+ """a simple text paragraph
+
+ attributes :
+ * BaseLayout attributes
+
+ A paragraph must not contains a section !
+ """
+
+
+class Table(BaseLayout):
+ """some tabular data
+
+ attributes :
+ * BaseLayout attributes
+ * cols : the number of columns of the table (REQUIRED)
+ * rheaders : the first row's elements are table's header
+ * cheaders : the first col's elements are table's header
+ * title : the table's optional title
+ """
+
+ def __init__(self, cols, title=None, rheaders=0, cheaders=0, **kwargs):
+ super(Table, self).__init__(**kwargs)
+ assert isinstance(cols, int)
+ self.cols = cols
+ self.title = title
+ self.rheaders = rheaders
+ self.cheaders = cheaders
diff --git a/venv/Lib/site-packages/pylint/reporters/ureports/text_writer.py b/venv/Lib/site-packages/pylint/reporters/ureports/text_writer.py
new file mode 100644
index 0000000..8f6aea2
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/reporters/ureports/text_writer.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""Text formatting drivers for ureports"""
+
+from pylint.reporters.ureports import BaseWriter
+
+TITLE_UNDERLINES = ["", "=", "-", "`", ".", "~", "^"]
+BULLETS = ["*", "-"]
+
+
+class TextWriter(BaseWriter):
+ """format layouts as text
+ (ReStructured inspiration but not totally handled yet)
+ """
+
+ def begin_format(self):
+ super(TextWriter, self).begin_format()
+ self.list_level = 0
+
+ def visit_section(self, layout):
+ """display a section as text
+ """
+ self.section += 1
+ self.writeln()
+ self.format_children(layout)
+ self.section -= 1
+ self.writeln()
+
+ def visit_evaluationsection(self, layout):
+ """Display an evaluation section as a text."""
+ self.section += 1
+ self.format_children(layout)
+ self.section -= 1
+ self.writeln()
+
+ def visit_title(self, layout):
+ title = "".join(list(self.compute_content(layout)))
+ self.writeln(title)
+ try:
+ self.writeln(TITLE_UNDERLINES[self.section] * len(title))
+ except IndexError:
+ print("FIXME TITLE TOO DEEP. TURNING TITLE INTO TEXT")
+
+ def visit_paragraph(self, layout):
+ """enter a paragraph"""
+ self.format_children(layout)
+ self.writeln()
+
+ def visit_table(self, layout):
+ """display a table as text"""
+ table_content = self.get_table_content(layout)
+ # get columns width
+ cols_width = [0] * len(table_content[0])
+ for row in table_content:
+ for index, col in enumerate(row):
+ cols_width[index] = max(cols_width[index], len(col))
+ self.default_table(layout, table_content, cols_width)
+ self.writeln()
+
+ def default_table(self, layout, table_content, cols_width):
+ """format a table"""
+ cols_width = [size + 1 for size in cols_width]
+ format_strings = " ".join(["%%-%ss"] * len(cols_width))
+ format_strings = format_strings % tuple(cols_width)
+ format_strings = format_strings.split(" ")
+ table_linesep = "\n+" + "+".join(["-" * w for w in cols_width]) + "+\n"
+ headsep = "\n+" + "+".join(["=" * w for w in cols_width]) + "+\n"
+
+ self.write(table_linesep)
+ for index, line in enumerate(table_content):
+ self.write("|")
+ for line_index, at_index in enumerate(line):
+ self.write(format_strings[line_index] % at_index)
+ self.write("|")
+ if index == 0 and layout.rheaders:
+ self.write(headsep)
+ else:
+ self.write(table_linesep)
+
+ def visit_verbatimtext(self, layout):
+ """display a verbatim layout as text (so difficult ;)
+ """
+ self.writeln("::\n")
+ for line in layout.data.splitlines():
+ self.writeln(" " + line)
+ self.writeln()
+
+ def visit_text(self, layout):
+ """add some text"""
+ self.write("%s" % layout.data)
diff --git a/venv/Lib/site-packages/pylint/testutils.py b/venv/Lib/site-packages/pylint/testutils.py
new file mode 100644
index 0000000..f214208
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/testutils.py
@@ -0,0 +1,298 @@
+# Copyright (c) 2012-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
+# Copyright (c) 2013-2017 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
+# Copyright (c) 2014 LCD 47 <lcd047@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Ricardo Gemignani <ricardo.gemignani@gmail.com>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
+# Copyright (c) 2016 Roy Williams <roy.williams.iii@gmail.com>
+# Copyright (c) 2016 xmo-odoo <xmo-odoo@users.noreply.github.com>
+# Copyright (c) 2017 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""functional/non regression tests for pylint"""
+import collections
+import contextlib
+import functools
+import sys
+import tempfile
+import tokenize
+from glob import glob
+from io import StringIO
+from os import close, getcwd, linesep, remove, sep, write
+from os.path import abspath, basename, dirname, join, splitext
+
+import astroid
+
+from pylint import checkers
+from pylint.interfaces import IReporter
+from pylint.lint import PyLinter
+from pylint.reporters import BaseReporter
+from pylint.utils import ASTWalker
+
+# Utils
+
+SYS_VERS_STR = "%d%d%d" % sys.version_info[:3]
+TITLE_UNDERLINES = ["", "=", "-", "."]
+PREFIX = abspath(dirname(__file__))
+
+
+def _get_tests_info(input_dir, msg_dir, prefix, suffix):
+ """get python input examples and output messages
+
+ We use following conventions for input files and messages:
+ for different inputs:
+ test for python >= x.y -> input = <name>_pyxy.py
+ test for python < x.y -> input = <name>_py_xy.py
+ for one input and different messages:
+ message for python >= x.y -> message = <name>_pyxy.txt
+ lower versions -> message with highest num
+ """
+ result = []
+ for fname in glob(join(input_dir, prefix + "*" + suffix)):
+ infile = basename(fname)
+ fbase = splitext(infile)[0]
+ # filter input files :
+ pyrestr = fbase.rsplit("_py", 1)[-1] # like _26 or 26
+ if pyrestr.isdigit(): # '24', '25'...
+ if SYS_VERS_STR < pyrestr:
+ continue
+ if pyrestr.startswith("_") and pyrestr[1:].isdigit():
+ # skip test for higher python versions
+ if SYS_VERS_STR >= pyrestr[1:]:
+ continue
+ messages = glob(join(msg_dir, fbase + "*.txt"))
+ # the last one will be without ext, i.e. for all or upper versions:
+ if messages:
+ for outfile in sorted(messages, reverse=True):
+ py_rest = outfile.rsplit("_py", 1)[-1][:-4]
+ if py_rest.isdigit() and SYS_VERS_STR >= py_rest:
+ break
+ else:
+ # This will provide an error message indicating the missing filename.
+ outfile = join(msg_dir, fbase + ".txt")
+ result.append((infile, outfile))
+ return result
+
+
+class TestReporter(BaseReporter):
+ """reporter storing plain text messages"""
+
+ __implements__ = IReporter
+
+ def __init__(self): # pylint: disable=super-init-not-called
+
+ self.message_ids = {}
+ self.reset()
+ self.path_strip_prefix = getcwd() + sep
+
+ def reset(self):
+ self.out = StringIO()
+ self.messages = []
+
+ def handle_message(self, msg):
+ """manage message of different type and in the context of path """
+ obj = msg.obj
+ line = msg.line
+ msg_id = msg.msg_id
+ msg = msg.msg
+ self.message_ids[msg_id] = 1
+ if obj:
+ obj = ":%s" % obj
+ sigle = msg_id[0]
+ if linesep != "\n":
+ # 2to3 writes os.linesep instead of using
+ # the previosly used line separators
+ msg = msg.replace("\r\n", "\n")
+ self.messages.append("%s:%3s%s: %s" % (sigle, line, obj, msg))
+
+ def finalize(self):
+ self.messages.sort()
+ for msg in self.messages:
+ print(msg, file=self.out)
+ result = self.out.getvalue()
+ self.reset()
+ return result
+
+ # pylint: disable=unused-argument
+ def on_set_current_module(self, module, filepath):
+ pass
+
+ # pylint: enable=unused-argument
+
+ def display_reports(self, layout):
+ """ignore layouts"""
+
+ _display = None
+
+
+class MinimalTestReporter(BaseReporter):
+ def handle_message(self, msg):
+ self.messages.append(msg)
+
+ def on_set_current_module(self, module, filepath):
+ self.messages = []
+
+ _display = None
+
+
+class Message(
+ collections.namedtuple("Message", ["msg_id", "line", "node", "args", "confidence"])
+):
+ def __new__(cls, msg_id, line=None, node=None, args=None, confidence=None):
+ return tuple.__new__(cls, (msg_id, line, node, args, confidence))
+
+ def __eq__(self, other):
+ if isinstance(other, Message):
+ if self.confidence and other.confidence:
+ return super(Message, self).__eq__(other)
+ return self[:-1] == other[:-1]
+ return NotImplemented # pragma: no cover
+
+ __hash__ = None
+
+
+class UnittestLinter:
+ """A fake linter class to capture checker messages."""
+
+ # pylint: disable=unused-argument, no-self-use
+
+ def __init__(self):
+ self._messages = []
+ self.stats = {}
+
+ def release_messages(self):
+ try:
+ return self._messages
+ finally:
+ self._messages = []
+
+ def add_message(
+ self, msg_id, line=None, node=None, args=None, confidence=None, col_offset=None
+ ):
+ # Do not test col_offset for now since changing Message breaks everything
+ self._messages.append(Message(msg_id, line, node, args, confidence))
+
+ def is_message_enabled(self, *unused_args, **unused_kwargs):
+ return True
+
+ def add_stats(self, **kwargs):
+ for name, value in kwargs.items():
+ self.stats[name] = value
+ return self.stats
+
+ @property
+ def options_providers(self):
+ return linter.options_providers
+
+
+def set_config(**kwargs):
+ """Decorator for setting config values on a checker."""
+
+ def _wrapper(fun):
+ @functools.wraps(fun)
+ def _forward(self):
+ for key, value in kwargs.items():
+ setattr(self.checker.config, key, value)
+ if isinstance(self, CheckerTestCase):
+ # reopen checker in case, it may be interested in configuration change
+ self.checker.open()
+ fun(self)
+
+ return _forward
+
+ return _wrapper
+
+
+class CheckerTestCase:
+ """A base testcase class for unit testing individual checker classes."""
+
+ CHECKER_CLASS = None
+ CONFIG = {}
+
+ def setup_method(self):
+ self.linter = UnittestLinter()
+ self.checker = self.CHECKER_CLASS(self.linter) # pylint: disable=not-callable
+ for key, value in self.CONFIG.items():
+ setattr(self.checker.config, key, value)
+ self.checker.open()
+
+ @contextlib.contextmanager
+ def assertNoMessages(self):
+ """Assert that no messages are added by the given method."""
+ with self.assertAddsMessages():
+ yield
+
+ @contextlib.contextmanager
+ def assertAddsMessages(self, *messages):
+ """Assert that exactly the given method adds the given messages.
+
+ The list of messages must exactly match *all* the messages added by the
+ method. Additionally, we check to see whether the args in each message can
+ actually be substituted into the message string.
+ """
+ yield
+ got = self.linter.release_messages()
+ msg = "Expected messages did not match actual.\n" "Expected:\n%s\nGot:\n%s" % (
+ "\n".join(repr(m) for m in messages),
+ "\n".join(repr(m) for m in got),
+ )
+ assert list(messages) == got, msg
+
+ def walk(self, node):
+ """recursive walk on the given node"""
+ walker = ASTWalker(linter)
+ walker.add_checker(self.checker)
+ walker.walk(node)
+
+
+# Init
+test_reporter = TestReporter()
+linter = PyLinter()
+linter.set_reporter(test_reporter)
+linter.config.persistent = 0
+checkers.initialize(linter)
+
+
+def _tokenize_str(code):
+ return list(tokenize.generate_tokens(StringIO(code).readline))
+
+
+@contextlib.contextmanager
+def _create_tempfile(content=None):
+ """Create a new temporary file.
+
+ If *content* parameter is given, then it will be written
+ in the temporary file, before passing it back.
+ This is a context manager and should be used with a *with* statement.
+ """
+ # Can't use tempfile.NamedTemporaryFile here
+ # because on Windows the file must be closed before writing to it,
+ # see http://bugs.python.org/issue14243
+ file_handle, tmp = tempfile.mkstemp()
+ if content:
+ # erff
+ write(file_handle, bytes(content, "ascii"))
+ try:
+ yield tmp
+ finally:
+ close(file_handle)
+ remove(tmp)
+
+
+@contextlib.contextmanager
+def _create_file_backed_module(code):
+ """Create an astroid module for the given code, backed by a real file."""
+ with _create_tempfile() as temp:
+ module = astroid.parse(code)
+ module.file = temp
+ yield module
diff --git a/venv/Lib/site-packages/pylint/utils/__init__.py b/venv/Lib/site-packages/pylint/utils/__init__.py
new file mode 100644
index 0000000..8ee9e07
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/utils/__init__.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
+# Copyright (c) 2009 Vincent
+# Copyright (c) 2009 Mads Kiilerich <mads@kiilerich.com>
+# Copyright (c) 2012-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
+# Copyright (c) 2014-2015 Michal Nowikowski <godfryd@gmail.com>
+# Copyright (c) 2014 LCD 47 <lcd047@gmail.com>
+# Copyright (c) 2014 Brett Cannon <brett@python.org>
+# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
+# Copyright (c) 2014 Damien Nozay <damien.nozay@gmail.com>
+# Copyright (c) 2015 Aru Sahni <arusahni@gmail.com>
+# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
+# Copyright (c) 2015 Simu Toni <simutoni@gmail.com>
+# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
+# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
+# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
+# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
+# Copyright (c) 2016 Glenn Matthews <glmatthe@cisco.com>
+# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
+# Copyright (c) 2016 xmo-odoo <xmo-odoo@users.noreply.github.com>
+# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
+# Copyright (c) 2017 Pierre Sassoulas <pierre.sassoulas@cea.fr>
+# Copyright (c) 2017 Bryce Guinta <bryce.paul.guinta@gmail.com>
+# Copyright (c) 2017 Chris Lamb <chris@chris-lamb.co.uk>
+# Copyright (c) 2017 Anthony Sottile <asottile@umich.edu>
+# Copyright (c) 2017 Thomas Hisch <t.hisch@gmail.com>
+# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
+# Copyright (c) 2017 Craig Citro <craigcitro@gmail.com>
+# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
+# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
+# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
+# Copyright (c) 2018 Pierre Sassoulas <pierre.sassoulas@wisebim.fr>
+# Copyright (c) 2018 Reverb C <reverbc@users.noreply.github.com>
+# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+"""some various utilities and helper classes, most of them used in the
+main pylint class
+"""
+
+from pylint.utils.ast_walker import ASTWalker
+from pylint.utils.file_state import FileState
+from pylint.utils.utils import (
+ _basename_in_blacklist_re,
+ _check_csv,
+ _format_option_value,
+ _splitstrip,
+ _unquote,
+ decoding_stream,
+ deprecated_option,
+ expand_modules,
+ format_section,
+ get_global_option,
+ get_module_and_frameid,
+ get_rst_section,
+ get_rst_title,
+ normalize_text,
+ register_plugins,
+ safe_decode,
+ tokenize_module,
+)
diff --git a/venv/Lib/site-packages/pylint/utils/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pylint/utils/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..6f3569d
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/utils/__pycache__/__init__.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/utils/__pycache__/ast_walker.cpython-37.pyc b/venv/Lib/site-packages/pylint/utils/__pycache__/ast_walker.cpython-37.pyc
new file mode 100644
index 0000000..af27609
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/utils/__pycache__/ast_walker.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/utils/__pycache__/file_state.cpython-37.pyc b/venv/Lib/site-packages/pylint/utils/__pycache__/file_state.cpython-37.pyc
new file mode 100644
index 0000000..4a43508
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/utils/__pycache__/file_state.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/utils/__pycache__/utils.cpython-37.pyc b/venv/Lib/site-packages/pylint/utils/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 0000000..9049995
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/utils/__pycache__/utils.cpython-37.pyc
Binary files differ
diff --git a/venv/Lib/site-packages/pylint/utils/ast_walker.py b/venv/Lib/site-packages/pylint/utils/ast_walker.py
new file mode 100644
index 0000000..2e7a6da
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/utils/ast_walker.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import collections
+
+from astroid import nodes
+
+
+class ASTWalker:
+ def __init__(self, linter):
+ # callbacks per node types
+ self.nbstatements = 0
+ self.visit_events = collections.defaultdict(list)
+ self.leave_events = collections.defaultdict(list)
+ self.linter = linter
+
+ def _is_method_enabled(self, method):
+ if not hasattr(method, "checks_msgs"):
+ return True
+ for msg_desc in method.checks_msgs:
+ if self.linter.is_message_enabled(msg_desc):
+ return True
+ return False
+
+ def add_checker(self, checker):
+ """walk to the checker's dir and collect visit and leave methods"""
+ vcids = set()
+ lcids = set()
+ visits = self.visit_events
+ leaves = self.leave_events
+ for member in dir(checker):
+ cid = member[6:]
+ if cid == "default":
+ continue
+ if member.startswith("visit_"):
+ v_meth = getattr(checker, member)
+ # don't use visit_methods with no activated message:
+ if self._is_method_enabled(v_meth):
+ visits[cid].append(v_meth)
+ vcids.add(cid)
+ elif member.startswith("leave_"):
+ l_meth = getattr(checker, member)
+ # don't use leave_methods with no activated message:
+ if self._is_method_enabled(l_meth):
+ leaves[cid].append(l_meth)
+ lcids.add(cid)
+ visit_default = getattr(checker, "visit_default", None)
+ if visit_default:
+ for cls in nodes.ALL_NODE_CLASSES:
+ cid = cls.__name__.lower()
+ if cid not in vcids:
+ visits[cid].append(visit_default)
+ # for now we have no "leave_default" method in Pylint
+
+ def walk(self, astroid):
+ """call visit events of astroid checkers for the given node, recurse on
+ its children, then leave events.
+ """
+ cid = astroid.__class__.__name__.lower()
+
+ # Detect if the node is a new name for a deprecated alias.
+ # In this case, favour the methods for the deprecated
+ # alias if any, in order to maintain backwards
+ # compatibility.
+ visit_events = self.visit_events.get(cid, ())
+ leave_events = self.leave_events.get(cid, ())
+
+ if astroid.is_statement:
+ self.nbstatements += 1
+ # generate events for this node on each checker
+ for callback in visit_events or ():
+ callback(astroid)
+ # recurse on children
+ for child in astroid.get_children():
+ self.walk(child)
+ for callback in leave_events or ():
+ callback(astroid)
diff --git a/venv/Lib/site-packages/pylint/utils/file_state.py b/venv/Lib/site-packages/pylint/utils/file_state.py
new file mode 100644
index 0000000..1a8dd4d
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/utils/file_state.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import collections
+
+from astroid import nodes
+
+from pylint.constants import MSG_STATE_SCOPE_MODULE, WarningScope
+
+
+class FileState:
+ """Hold internal state specific to the currently analyzed file"""
+
+ def __init__(self, modname=None):
+ self.base_name = modname
+ self._module_msgs_state = {}
+ self._raw_module_msgs_state = {}
+ self._ignored_msgs = collections.defaultdict(set)
+ self._suppression_mapping = {}
+ self._effective_max_line_number = None
+
+ def collect_block_lines(self, msgs_store, module_node):
+ """Walk the AST to collect block level options line numbers."""
+ for msg, lines in self._module_msgs_state.items():
+ self._raw_module_msgs_state[msg] = lines.copy()
+ orig_state = self._module_msgs_state.copy()
+ self._module_msgs_state = {}
+ self._suppression_mapping = {}
+ self._effective_max_line_number = module_node.tolineno
+ self._collect_block_lines(msgs_store, module_node, orig_state)
+
+ def _collect_block_lines(self, msgs_store, node, msg_state):
+ """Recursively walk (depth first) AST to collect block level options
+ line numbers.
+ """
+ for child in node.get_children():
+ self._collect_block_lines(msgs_store, child, msg_state)
+ first = node.fromlineno
+ last = node.tolineno
+ # first child line number used to distinguish between disable
+ # which are the first child of scoped node with those defined later.
+ # For instance in the code below:
+ #
+ # 1. def meth8(self):
+ # 2. """test late disabling"""
+ # 3. pylint: disable=not-callable
+ # 4. print(self.blip)
+ # 5. pylint: disable=no-member
+ # 6. print(self.bla)
+ #
+ # E1102 should be disabled from line 1 to 6 while E1101 from line 5 to 6
+ #
+ # this is necessary to disable locally messages applying to class /
+ # function using their fromlineno
+ if (
+ isinstance(node, (nodes.Module, nodes.ClassDef, nodes.FunctionDef))
+ and node.body
+ ):
+ firstchildlineno = node.body[0].fromlineno
+ else:
+ firstchildlineno = last
+ for msgid, lines in msg_state.items():
+ for lineno, state in list(lines.items()):
+ original_lineno = lineno
+ if first > lineno or last < lineno:
+ continue
+ # Set state for all lines for this block, if the
+ # warning is applied to nodes.
+ message_definitions = msgs_store.get_message_definitions(msgid)
+ for message_definition in message_definitions:
+ if message_definition.scope == WarningScope.NODE:
+ if lineno > firstchildlineno:
+ state = True
+ first_, last_ = node.block_range(lineno)
+ else:
+ first_ = lineno
+ last_ = last
+ for line in range(first_, last_ + 1):
+ # do not override existing entries
+ if line in self._module_msgs_state.get(msgid, ()):
+ continue
+ if line in lines: # state change in the same block
+ state = lines[line]
+ original_lineno = line
+ if not state:
+ self._suppression_mapping[(msgid, line)] = original_lineno
+ try:
+ self._module_msgs_state[msgid][line] = state
+ except KeyError:
+ self._module_msgs_state[msgid] = {line: state}
+ del lines[lineno]
+
+ def set_msg_status(self, msg, line, status):
+ """Set status (enabled/disable) for a given message at a given line"""
+ assert line > 0
+ try:
+ self._module_msgs_state[msg.msgid][line] = status
+ except KeyError:
+ self._module_msgs_state[msg.msgid] = {line: status}
+
+ def handle_ignored_message(
+ self, state_scope, msgid, line, node, args, confidence
+ ): # pylint: disable=unused-argument
+ """Report an ignored message.
+
+ state_scope is either MSG_STATE_SCOPE_MODULE or MSG_STATE_SCOPE_CONFIG,
+ depending on whether the message was disabled locally in the module,
+ or globally. The other arguments are the same as for add_message.
+ """
+ if state_scope == MSG_STATE_SCOPE_MODULE:
+ try:
+ orig_line = self._suppression_mapping[(msgid, line)]
+ self._ignored_msgs[(msgid, orig_line)].add(line)
+ except KeyError:
+ pass
+
+ def iter_spurious_suppression_messages(self, msgs_store):
+ for warning, lines in self._raw_module_msgs_state.items():
+ for line, enable in lines.items():
+ if not enable and (warning, line) not in self._ignored_msgs:
+ # ignore cyclic-import check which can show false positives
+ # here due to incomplete context
+ if warning != "R0401":
+ yield "useless-suppression", line, (
+ msgs_store.get_msg_display_string(warning),
+ )
+ # don't use iteritems here, _ignored_msgs may be modified by add_message
+ for (warning, from_), lines in list(self._ignored_msgs.items()):
+ for line in lines:
+ yield "suppressed-message", line, (
+ msgs_store.get_msg_display_string(warning),
+ from_,
+ )
+
+ def get_effective_max_line_number(self):
+ return self._effective_max_line_number
diff --git a/venv/Lib/site-packages/pylint/utils/utils.py b/venv/Lib/site-packages/pylint/utils/utils.py
new file mode 100644
index 0000000..5605ecd
--- /dev/null
+++ b/venv/Lib/site-packages/pylint/utils/utils.py
@@ -0,0 +1,371 @@
+# -*- coding: utf-8 -*-
+
+# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
+# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
+
+import codecs
+import re
+import sys
+import textwrap
+import tokenize
+from os import linesep, listdir
+from os.path import basename, dirname, exists, isdir, join, normpath, splitext
+
+from astroid import Module, modutils
+
+from pylint.constants import PY_EXTS
+
+
+def normalize_text(text, line_len=80, indent=""):
+ """Wrap the text on the given line length."""
+ return "\n".join(
+ textwrap.wrap(
+ text, width=line_len, initial_indent=indent, subsequent_indent=indent
+ )
+ )
+
+
+def get_module_and_frameid(node):
+ """return the module name and the frame id in the module"""
+ frame = node.frame()
+ module, obj = "", []
+ while frame:
+ if isinstance(frame, Module):
+ module = frame.name
+ else:
+ obj.append(getattr(frame, "name", "<lambda>"))
+ try:
+ frame = frame.parent.frame()
+ except AttributeError:
+ frame = None
+ obj.reverse()
+ return module, ".".join(obj)
+
+
+def get_rst_title(title, character):
+ """Permit to get a title formatted as ReStructuredText test (underlined with a chosen character)."""
+ return "%s\n%s\n" % (title, character * len(title))
+
+
+def get_rst_section(section, options, doc=None):
+ """format an options section using as a ReStructuredText formatted output"""
+ result = ""
+ if section:
+ result += get_rst_title(section, "'")
+ if doc:
+ formatted_doc = normalize_text(doc, line_len=79, indent="")
+ result += "%s\n\n" % formatted_doc
+ for optname, optdict, value in options:
+ help_opt = optdict.get("help")
+ result += ":%s:\n" % optname
+ if help_opt:
+ formatted_help = normalize_text(help_opt, line_len=79, indent=" ")
+ result += "%s\n" % formatted_help
+ if value:
+ value = str(_format_option_value(optdict, value))
+ result += "\n Default: ``%s``\n" % value.replace("`` ", "```` ``")
+ return result
+
+
+def safe_decode(line, encoding, *args, **kwargs):
+ """return decoded line from encoding or decode with default encoding"""
+ try:
+ return line.decode(encoding or sys.getdefaultencoding(), *args, **kwargs)
+ except LookupError:
+ return line.decode(sys.getdefaultencoding(), *args, **kwargs)
+
+
+def decoding_stream(stream, encoding, errors="strict"):
+ try:
+ reader_cls = codecs.getreader(encoding or sys.getdefaultencoding())
+ except LookupError:
+ reader_cls = codecs.getreader(sys.getdefaultencoding())
+ return reader_cls(stream, errors)
+
+
+def tokenize_module(module):
+ with module.stream() as stream:
+ readline = stream.readline
+ return list(tokenize.tokenize(readline))
+
+
+def _basename_in_blacklist_re(base_name, black_list_re):
+ """Determines if the basename is matched in a regex blacklist
+
+ :param str base_name: The basename of the file
+ :param list black_list_re: A collection of regex patterns to match against.
+ Successful matches are blacklisted.
+
+ :returns: `True` if the basename is blacklisted, `False` otherwise.
+ :rtype: bool
+ """
+ for file_pattern in black_list_re:
+ if file_pattern.match(base_name):
+ return True
+ return False
+
+
+def _modpath_from_file(filename, is_namespace):
+ def _is_package_cb(path, parts):
+ return modutils.check_modpath_has_init(path, parts) or is_namespace
+
+ return modutils.modpath_from_file_with_callback(
+ filename, is_package_cb=_is_package_cb
+ )
+
+
+def expand_modules(files_or_modules, black_list, black_list_re):
+ """take a list of files/modules/packages and return the list of tuple
+ (file, module name) which have to be actually checked
+ """
+ result = []
+ errors = []
+ for something in files_or_modules:
+ if basename(something) in black_list:
+ continue
+ if _basename_in_blacklist_re(basename(something), black_list_re):
+ continue
+ if exists(something):
+ # this is a file or a directory
+ try:
+ modname = ".".join(modutils.modpath_from_file(something))
+ except ImportError:
+ modname = splitext(basename(something))[0]
+ if isdir(something):
+ filepath = join(something, "__init__.py")
+ else:
+ filepath = something
+ else:
+ # suppose it's a module or package
+ modname = something
+ try:
+ filepath = modutils.file_from_modpath(modname.split("."))
+ if filepath is None:
+ continue
+ except (ImportError, SyntaxError) as ex:
+ # The SyntaxError is a Python bug and should be
+ # removed once we move away from imp.find_module: http://bugs.python.org/issue10588
+ errors.append({"key": "fatal", "mod": modname, "ex": ex})
+ continue
+
+ filepath = normpath(filepath)
+ modparts = (modname or something).split(".")
+
+ try:
+ spec = modutils.file_info_from_modpath(modparts, path=sys.path)
+ except ImportError:
+ # Might not be acceptable, don't crash.
+ is_namespace = False
+ is_directory = isdir(something)
+ else:
+ is_namespace = modutils.is_namespace(spec)
+ is_directory = modutils.is_directory(spec)
+
+ if not is_namespace:
+ result.append(
+ {
+ "path": filepath,
+ "name": modname,
+ "isarg": True,
+ "basepath": filepath,
+ "basename": modname,
+ }
+ )
+
+ has_init = (
+ not (modname.endswith(".__init__") or modname == "__init__")
+ and basename(filepath) == "__init__.py"
+ )
+
+ if has_init or is_namespace or is_directory:
+ for subfilepath in modutils.get_module_files(
+ dirname(filepath), black_list, list_all=is_namespace
+ ):
+ if filepath == subfilepath:
+ continue
+ if _basename_in_blacklist_re(basename(subfilepath), black_list_re):
+ continue
+
+ modpath = _modpath_from_file(subfilepath, is_namespace)
+ submodname = ".".join(modpath)
+ result.append(
+ {
+ "path": subfilepath,
+ "name": submodname,
+ "isarg": False,
+ "basepath": filepath,
+ "basename": modname,
+ }
+ )
+ return result, errors
+
+
+def register_plugins(linter, directory):
+ """load all module and package in the given directory, looking for a
+ 'register' function in each one, used to register pylint checkers
+ """
+ imported = {}
+ for filename in listdir(directory):
+ base, extension = splitext(filename)
+ if base in imported or base == "__pycache__":
+ continue
+ if (
+ extension in PY_EXTS
+ and base != "__init__"
+ or (not extension and isdir(join(directory, base)))
+ ):
+ try:
+ module = modutils.load_module_from_file(join(directory, filename))
+ except ValueError:
+ # empty module name (usually emacs auto-save files)
+ continue
+ except ImportError as exc:
+ print(
+ "Problem importing module %s: %s" % (filename, exc), file=sys.stderr
+ )
+ else:
+ if hasattr(module, "register"):
+ module.register(linter)
+ imported[base] = 1
+
+
+def get_global_option(checker, option, default=None):
+ """ Retrieve an option defined by the given *checker* or
+ by all known option providers.
+
+ It will look in the list of all options providers
+ until the given *option* will be found.
+ If the option wasn't found, the *default* value will be returned.
+ """
+ # First, try in the given checker's config.
+ # After that, look in the options providers.
+
+ try:
+ return getattr(checker.config, option.replace("-", "_"))
+ except AttributeError:
+ pass
+ for provider in checker.linter.options_providers:
+ for options in provider.options:
+ if options[0] == option:
+ return getattr(provider.config, option.replace("-", "_"))
+ return default
+
+
+def deprecated_option(
+ shortname=None, opt_type=None, help_msg=None, deprecation_msg=None
+):
+ def _warn_deprecated(option, optname, *args): # pylint: disable=unused-argument
+ if deprecation_msg:
+ sys.stderr.write(deprecation_msg % (optname,))
+
+ option = {
+ "help": help_msg,
+ "hide": True,
+ "type": opt_type,
+ "action": "callback",
+ "callback": _warn_deprecated,
+ "deprecated": True,
+ }
+ if shortname:
+ option["shortname"] = shortname
+ return option
+
+
+def _splitstrip(string, sep=","):
+ """return a list of stripped string by splitting the string given as
+ argument on `sep` (',' by default). Empty string are discarded.
+
+ >>> _splitstrip('a, b, c , 4,,')
+ ['a', 'b', 'c', '4']
+ >>> _splitstrip('a')
+ ['a']
+ >>> _splitstrip('a,\nb,\nc,')
+ ['a', 'b', 'c']
+
+ :type string: str or unicode
+ :param string: a csv line
+
+ :type sep: str or unicode
+ :param sep: field separator, default to the comma (',')
+
+ :rtype: str or unicode
+ :return: the unquoted string (or the input string if it wasn't quoted)
+ """
+ return [word.strip() for word in string.split(sep) if word.strip()]
+
+
+def _unquote(string):
+ """remove optional quotes (simple or double) from the string
+
+ :type string: str or unicode
+ :param string: an optionally quoted string
+
+ :rtype: str or unicode
+ :return: the unquoted string (or the input string if it wasn't quoted)
+ """
+ if not string:
+ return string
+ if string[0] in "\"'":
+ string = string[1:]
+ if string[-1] in "\"'":
+ string = string[:-1]
+ return string
+
+
+def _check_csv(value):
+ if isinstance(value, (list, tuple)):
+ return value
+ return _splitstrip(value)
+
+
+def _comment(string):
+ """return string as a comment"""
+ lines = [line.strip() for line in string.splitlines()]
+ return "# " + ("%s# " % linesep).join(lines)
+
+
+def _format_option_value(optdict, value):
+ """return the user input's value from a 'compiled' value"""
+ if isinstance(value, (list, tuple)):
+ value = ",".join(_format_option_value(optdict, item) for item in value)
+ elif isinstance(value, dict):
+ value = ",".join("%s:%s" % (k, v) for k, v in value.items())
+ elif hasattr(value, "match"): # optdict.get('type') == 'regexp'
+ # compiled regexp
+ value = value.pattern
+ elif optdict.get("type") == "yn":
+ value = "yes" if value else "no"
+ elif isinstance(value, str) and value.isspace():
+ value = "'%s'" % value
+ return value
+
+
+def format_section(stream, section, options, doc=None):
+ """format an options section using the INI format"""
+ if doc:
+ print(_comment(doc), file=stream)
+ print("[%s]" % section, file=stream)
+ _ini_format(stream, options)
+
+
+def _ini_format(stream, options):
+ """format options using the INI format"""
+ for optname, optdict, value in options:
+ value = _format_option_value(optdict, value)
+ help_opt = optdict.get("help")
+ if help_opt:
+ help_opt = normalize_text(help_opt, line_len=79, indent="# ")
+ print(file=stream)
+ print(help_opt, file=stream)
+ else:
+ print(file=stream)
+ if value is None:
+ print("#%s=" % optname, file=stream)
+ else:
+ value = str(value).strip()
+ if re.match(r"^([\w-]+,)+[\w-]+$", str(value)):
+ separator = "\n " + " " * len(optname)
+ value = separator.join(x + "," for x in str(value).split(","))
+ # remove trailing ',' from last element of the list
+ value = value[:-1]
+ print("%s=%s" % (optname, value), file=stream)