summaryrefslogtreecommitdiff
path: root/venv/Lib/site-packages/astroid
diff options
context:
space:
mode:
Diffstat (limited to 'venv/Lib/site-packages/astroid')
-rw-r--r--venv/Lib/site-packages/astroid/__init__.py166
-rw-r--r--venv/Lib/site-packages/astroid/__pkginfo__.py51
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pycbin4434 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pycbin1357 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pycbin1444 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pycbin7017 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pycbin26379 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pycbin15625 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pycbin12292 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pycbin4277 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pycbin3518 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pycbin9369 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pycbin7264 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pycbin21439 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pycbin9301 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pycbin5780 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pycbin17081 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pycbin120480 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pycbin2054 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pycbin8501 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pycbin16680 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pycbin11290 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pycbin39150 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pycbin70867 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pycbin2502 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pycbin3436 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pycbin5704 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/_ast.py49
-rw-r--r--venv/Lib/site-packages/astroid/arguments.py285
-rw-r--r--venv/Lib/site-packages/astroid/as_string.py633
-rw-r--r--venv/Lib/site-packages/astroid/bases.py542
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pycbin1063 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pycbin1575 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pycbin19796 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pycbin2492 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_crypt.cpython-37.pycbin850 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pycbin3368 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_dataclasses.cpython-37.pycbin1278 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pycbin682 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pycbin1565 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pycbin4591 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pycbin4007 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pycbin1924 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_http.cpython-37.pycbin10289 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pycbin1305 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pycbin718 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pycbin2520 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pycbin11452 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pycbin2047 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pycbin623 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pycbin1029 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pycbin1711 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pycbin1261 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pycbin7661 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pycbin5199 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pycbin8323 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pycbin3232 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pycbin1736 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pycbin2189 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pycbin2159 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pycbin2092 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pycbin2172 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pycbin1066 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pycbin5521 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pycbin3600 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pycbin3401 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pycbin737 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pycbin2337 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pycbin629 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_argparse.py33
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_attrs.py65
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py829
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_collections.py74
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_crypt.py26
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_curses.py179
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_dataclasses.py50
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_dateutil.py28
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_fstrings.py51
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_functools.py158
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_gi.py220
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_hashlib.py67
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_http.py201
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_io.py45
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_mechanize.py29
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py106
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py449
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_nose.py77
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_core_fromnumeric.py23
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_core_function_base.py29
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_core_multiarray.py55
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_core_numeric.py43
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_core_numerictypes.py250
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_core_umath.py105
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py153
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_random_mtrand.py70
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_numpy_utils.py56
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py75
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_pytest.py88
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_qt.py82
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_random.py75
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_re.py36
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_six.py200
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_ssl.py74
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_subprocess.py111
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_threading.py31
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_typing.py96
-rw-r--r--venv/Lib/site-packages/astroid/brain/brain_uuid.py20
-rw-r--r--venv/Lib/site-packages/astroid/builder.py435
-rw-r--r--venv/Lib/site-packages/astroid/context.py179
-rw-r--r--venv/Lib/site-packages/astroid/decorators.py141
-rw-r--r--venv/Lib/site-packages/astroid/exceptions.py230
-rw-r--r--venv/Lib/site-packages/astroid/helpers.py273
-rw-r--r--venv/Lib/site-packages/astroid/inference.py943
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/__init__.py0
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pycbin184 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pycbin2137 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pycbin25143 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/_import/__init__.py0
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pycbin192 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pycbin9399 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pycbin408 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/_import/spec.py344
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/_import/util.py10
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py66
-rw-r--r--venv/Lib/site-packages/astroid/interpreter/objectmodel.py738
-rw-r--r--venv/Lib/site-packages/astroid/manager.py337
-rw-r--r--venv/Lib/site-packages/astroid/mixins.py160
-rw-r--r--venv/Lib/site-packages/astroid/modutils.py698
-rw-r--r--venv/Lib/site-packages/astroid/node_classes.py4775
-rw-r--r--venv/Lib/site-packages/astroid/nodes.py175
-rw-r--r--venv/Lib/site-packages/astroid/objects.py282
-rw-r--r--venv/Lib/site-packages/astroid/protocols.py766
-rw-r--r--venv/Lib/site-packages/astroid/raw_building.py468
-rw-r--r--venv/Lib/site-packages/astroid/rebuilder.py1090
-rw-r--r--venv/Lib/site-packages/astroid/scoped_nodes.py2836
-rw-r--r--venv/Lib/site-packages/astroid/test_utils.py73
-rw-r--r--venv/Lib/site-packages/astroid/transforms.py90
-rw-r--r--venv/Lib/site-packages/astroid/util.py164
138 files changed, 0 insertions, 21358 deletions
diff --git a/venv/Lib/site-packages/astroid/__init__.py b/venv/Lib/site-packages/astroid/__init__.py
deleted file mode 100644
index d36a5b4..0000000
--- a/venv/Lib/site-packages/astroid/__init__.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Python Abstract Syntax Tree New Generation
-
-The aim of this module is to provide a common base representation of
-python source code for projects such as pychecker, pyreverse,
-pylint... Well, actually the development of this library is essentially
-governed by pylint's needs.
-
-It extends class defined in the python's _ast module with some
-additional methods and attributes. Instance attributes are added by a
-builder object, which can either generate extended ast (let's call
-them astroid ;) by visiting an existent ast tree or by inspecting living
-object. Methods are added by monkey patching ast classes.
-
-Main modules are:
-
-* nodes and scoped_nodes for more information about methods and
- attributes added to different node classes
-
-* the manager contains a high level object to get astroid trees from
- source files and living objects. It maintains a cache of previously
- constructed tree for quick access
-
-* builder contains the class responsible to build astroid trees
-"""
-
-import enum
-import itertools
-import os
-import sys
-
-import wrapt
-
-
-_Context = enum.Enum("Context", "Load Store Del")
-Load = _Context.Load
-Store = _Context.Store
-Del = _Context.Del
-del _Context
-
-
-from .__pkginfo__ import version as __version__
-
-# WARNING: internal imports order matters !
-
-# pylint: disable=redefined-builtin
-
-# make all exception classes accessible from astroid package
-from astroid.exceptions import *
-
-# make all node classes accessible from astroid package
-from astroid.nodes import *
-
-# trigger extra monkey-patching
-from astroid import inference
-
-# more stuff available
-from astroid import raw_building
-from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod
-from astroid.node_classes import are_exclusive, unpack_infer
-from astroid.scoped_nodes import builtin_lookup
-from astroid.builder import parse, extract_node
-from astroid.util import Uninferable
-
-# make a manager instance (borg) accessible from astroid package
-from astroid.manager import AstroidManager
-
-MANAGER = AstroidManager()
-del AstroidManager
-
-# transform utilities (filters and decorator)
-
-
-# pylint: disable=dangerous-default-value
-@wrapt.decorator
-def _inference_tip_cached(func, instance, args, kwargs, _cache={}):
- """Cache decorator used for inference tips"""
- node = args[0]
- try:
- return iter(_cache[func, node])
- except KeyError:
- result = func(*args, **kwargs)
- # Need to keep an iterator around
- original, copy = itertools.tee(result)
- _cache[func, node] = list(copy)
- return original
-
-
-# pylint: enable=dangerous-default-value
-
-
-def inference_tip(infer_function, raise_on_overwrite=False):
- """Given an instance specific inference function, return a function to be
- given to MANAGER.register_transform to set this inference function.
-
- :param bool raise_on_overwrite: Raise an `InferenceOverwriteError`
- if the inference tip will overwrite another. Used for debugging
-
- Typical usage
-
- .. sourcecode:: python
-
- MANAGER.register_transform(Call, inference_tip(infer_named_tuple),
- predicate)
-
- .. Note::
-
- Using an inference tip will override
- any previously set inference tip for the given
- node. Use a predicate in the transform to prevent
- excess overwrites.
- """
-
- def transform(node, infer_function=infer_function):
- if (
- raise_on_overwrite
- and node._explicit_inference is not None
- and node._explicit_inference is not infer_function
- ):
- raise InferenceOverwriteError(
- "Inference already set to {existing_inference}. "
- "Trying to overwrite with {new_inference} for {node}".format(
- existing_inference=infer_function,
- new_inference=node._explicit_inference,
- node=node,
- )
- )
- # pylint: disable=no-value-for-parameter
- node._explicit_inference = _inference_tip_cached(infer_function)
- return node
-
- return transform
-
-
-def register_module_extender(manager, module_name, get_extension_mod):
- def transform(node):
- extension_module = get_extension_mod()
- for name, objs in extension_module.locals.items():
- node.locals[name] = objs
- for obj in objs:
- if obj.parent is extension_module:
- obj.parent = node
-
- manager.register_transform(Module, transform, lambda n: n.name == module_name)
-
-
-# load brain plugins
-BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), "brain")
-if BRAIN_MODULES_DIR not in sys.path:
- # add it to the end of the list so user path take precedence
- sys.path.append(BRAIN_MODULES_DIR)
-# load modules in this directory
-for module in os.listdir(BRAIN_MODULES_DIR):
- if module.endswith(".py"):
- __import__(module[:-3])
diff --git a/venv/Lib/site-packages/astroid/__pkginfo__.py b/venv/Lib/site-packages/astroid/__pkginfo__.py
deleted file mode 100644
index 4a17b5d..0000000
--- a/venv/Lib/site-packages/astroid/__pkginfo__.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2015-2017 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
-# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
-# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2017 Calen Pennington <cale@edx.org>
-# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""astroid packaging information"""
-
-version = "2.3.3"
-numversion = tuple(int(elem) for elem in version.split(".") if elem.isdigit())
-
-extras_require = {}
-install_requires = [
- "lazy_object_proxy==1.4.*",
- "six~=1.12",
- "wrapt==1.11.*",
- 'typed-ast>=1.4.0,<1.5;implementation_name== "cpython" and python_version<"3.8"',
-]
-
-# pylint: disable=redefined-builtin; why license is a builtin anyway?
-license = "LGPL"
-
-author = "Python Code Quality Authority"
-author_email = "code-quality@python.org"
-mailinglist = "mailto://%s" % author_email
-web = "https://github.com/PyCQA/astroid"
-
-description = "An abstract syntax tree for Python with inference support."
-
-classifiers = [
- "Topic :: Software Development :: Libraries :: Python Modules",
- "Topic :: Software Development :: Quality Assurance",
- "Programming Language :: Python",
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.5",
- "Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: Implementation :: CPython",
- "Programming Language :: Python :: Implementation :: PyPy",
-]
diff --git a/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index eb28207..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc
deleted file mode 100644
index ed3f17b..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc
deleted file mode 100644
index c6f8a74..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc
deleted file mode 100644
index 64896f7..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc
deleted file mode 100644
index 372e534..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc
deleted file mode 100644
index 366b834..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc
deleted file mode 100644
index 6ff12eb..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc
deleted file mode 100644
index 777eede..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc
deleted file mode 100644
index 1bc12f8..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc
deleted file mode 100644
index 211001b..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc
deleted file mode 100644
index bae7ec3..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc
deleted file mode 100644
index c9328c1..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc
deleted file mode 100644
index 31b45d7..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc
deleted file mode 100644
index 7b5b9e4..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc
deleted file mode 100644
index a0f3b48..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc
deleted file mode 100644
index 7abdd4b..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc
deleted file mode 100644
index 18c04f8..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc
deleted file mode 100644
index 460886a..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc
deleted file mode 100644
index d628662..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc
deleted file mode 100644
index 0b414cf..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc
deleted file mode 100644
index 13516ca..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc
deleted file mode 100644
index d767b50..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc
deleted file mode 100644
index 4b6fba6..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc
deleted file mode 100644
index b2f4230..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc b/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc
deleted file mode 100644
index b5e4fe7..0000000
--- a/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/_ast.py b/venv/Lib/site-packages/astroid/_ast.py
deleted file mode 100644
index 2e44c1f..0000000
--- a/venv/Lib/site-packages/astroid/_ast.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import ast
-from collections import namedtuple
-from functools import partial
-from typing import Optional
-import sys
-
-_ast_py2 = _ast_py3 = None
-try:
- import typed_ast.ast3 as _ast_py3
- import typed_ast.ast27 as _ast_py2
-except ImportError:
- pass
-
-
-PY38 = sys.version_info[:2] >= (3, 8)
-if PY38:
- # On Python 3.8, typed_ast was merged back into `ast`
- _ast_py3 = ast
-
-
-FunctionType = namedtuple("FunctionType", ["argtypes", "returns"])
-
-
-def _get_parser_module(parse_python_two: bool = False):
- if parse_python_two:
- parser_module = _ast_py2
- else:
- parser_module = _ast_py3
- return parser_module or ast
-
-
-def _parse(string: str, parse_python_two: bool = False):
- parse_module = _get_parser_module(parse_python_two=parse_python_two)
- parse_func = parse_module.parse
- if _ast_py3:
- if PY38:
- parse_func = partial(parse_func, type_comments=True)
- if not parse_python_two:
- parse_func = partial(parse_func, feature_version=sys.version_info.minor)
- return parse_func(string)
-
-
-def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
- """Given a correct type comment, obtain a FunctionType object"""
- if _ast_py3 is None:
- return None
-
- func_type = _ast_py3.parse(type_comment, "<type_comment>", "func_type")
- return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
diff --git a/venv/Lib/site-packages/astroid/arguments.py b/venv/Lib/site-packages/astroid/arguments.py
deleted file mode 100644
index c4bdc6d..0000000
--- a/venv/Lib/site-packages/astroid/arguments.py
+++ /dev/null
@@ -1,285 +0,0 @@
-# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-from astroid import bases
-from astroid import context as contextmod
-from astroid import exceptions
-from astroid import nodes
-from astroid import util
-
-
-class CallSite:
- """Class for understanding arguments passed into a call site
-
- It needs a call context, which contains the arguments and the
- keyword arguments that were passed into a given call site.
- In order to infer what an argument represents, call
- :meth:`infer_argument` with the corresponding function node
- and the argument name.
- """
-
- def __init__(self, callcontext, argument_context_map=None):
- if argument_context_map is None:
- argument_context_map = {}
- self.argument_context_map = argument_context_map
- args = callcontext.args
- keywords = callcontext.keywords
- self.duplicated_keywords = set()
- self._unpacked_args = self._unpack_args(args)
- self._unpacked_kwargs = self._unpack_keywords(keywords)
-
- self.positional_arguments = [
- arg for arg in self._unpacked_args if arg is not util.Uninferable
- ]
- self.keyword_arguments = {
- key: value
- for key, value in self._unpacked_kwargs.items()
- if value is not util.Uninferable
- }
-
- @classmethod
- def from_call(cls, call_node):
- """Get a CallSite object from the given Call node."""
- callcontext = contextmod.CallContext(call_node.args, call_node.keywords)
- return cls(callcontext)
-
- def has_invalid_arguments(self):
- """Check if in the current CallSite were passed *invalid* arguments
-
- This can mean multiple things. For instance, if an unpacking
- of an invalid object was passed, then this method will return True.
- Other cases can be when the arguments can't be inferred by astroid,
- for example, by passing objects which aren't known statically.
- """
- return len(self.positional_arguments) != len(self._unpacked_args)
-
- def has_invalid_keywords(self):
- """Check if in the current CallSite were passed *invalid* keyword arguments
-
- For instance, unpacking a dictionary with integer keys is invalid
- (**{1:2}), because the keys must be strings, which will make this
- method to return True. Other cases where this might return True if
- objects which can't be inferred were passed.
- """
- return len(self.keyword_arguments) != len(self._unpacked_kwargs)
-
- def _unpack_keywords(self, keywords):
- values = {}
- context = contextmod.InferenceContext()
- context.extra_context = self.argument_context_map
- for name, value in keywords:
- if name is None:
- # Then it's an unpacking operation (**)
- try:
- inferred = next(value.infer(context=context))
- except exceptions.InferenceError:
- values[name] = util.Uninferable
- continue
-
- if not isinstance(inferred, nodes.Dict):
- # Not something we can work with.
- values[name] = util.Uninferable
- continue
-
- for dict_key, dict_value in inferred.items:
- try:
- dict_key = next(dict_key.infer(context=context))
- except exceptions.InferenceError:
- values[name] = util.Uninferable
- continue
- if not isinstance(dict_key, nodes.Const):
- values[name] = util.Uninferable
- continue
- if not isinstance(dict_key.value, str):
- values[name] = util.Uninferable
- continue
- if dict_key.value in values:
- # The name is already in the dictionary
- values[dict_key.value] = util.Uninferable
- self.duplicated_keywords.add(dict_key.value)
- continue
- values[dict_key.value] = dict_value
- else:
- values[name] = value
- return values
-
- def _unpack_args(self, args):
- values = []
- context = contextmod.InferenceContext()
- context.extra_context = self.argument_context_map
- for arg in args:
- if isinstance(arg, nodes.Starred):
- try:
- inferred = next(arg.value.infer(context=context))
- except exceptions.InferenceError:
- values.append(util.Uninferable)
- continue
-
- if inferred is util.Uninferable:
- values.append(util.Uninferable)
- continue
- if not hasattr(inferred, "elts"):
- values.append(util.Uninferable)
- continue
- values.extend(inferred.elts)
- else:
- values.append(arg)
- return values
-
- def infer_argument(self, funcnode, name, context):
- """infer a function argument value according to the call context
-
- Arguments:
- funcnode: The function being called.
- name: The name of the argument whose value is being inferred.
- context: Inference context object
- """
- if name in self.duplicated_keywords:
- raise exceptions.InferenceError(
- "The arguments passed to {func!r} " " have duplicate keywords.",
- call_site=self,
- func=funcnode,
- arg=name,
- context=context,
- )
-
- # Look into the keywords first, maybe it's already there.
- try:
- return self.keyword_arguments[name].infer(context)
- except KeyError:
- pass
-
- # Too many arguments given and no variable arguments.
- if len(self.positional_arguments) > len(funcnode.args.args):
- if not funcnode.args.vararg:
- raise exceptions.InferenceError(
- "Too many positional arguments "
- "passed to {func!r} that does "
- "not have *args.",
- call_site=self,
- func=funcnode,
- arg=name,
- context=context,
- )
-
- positional = self.positional_arguments[: len(funcnode.args.args)]
- vararg = self.positional_arguments[len(funcnode.args.args) :]
- argindex = funcnode.args.find_argname(name)[0]
- kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}
- kwargs = {
- key: value
- for key, value in self.keyword_arguments.items()
- if key not in kwonlyargs
- }
- # If there are too few positionals compared to
- # what the function expects to receive, check to see
- # if the missing positional arguments were passed
- # as keyword arguments and if so, place them into the
- # positional args list.
- if len(positional) < len(funcnode.args.args):
- for func_arg in funcnode.args.args:
- if func_arg.name in kwargs:
- arg = kwargs.pop(func_arg.name)
- positional.append(arg)
-
- if argindex is not None:
- # 2. first argument of instance/class method
- if argindex == 0 and funcnode.type in ("method", "classmethod"):
- if context.boundnode is not None:
- boundnode = context.boundnode
- else:
- # XXX can do better ?
- boundnode = funcnode.parent.frame()
-
- if isinstance(boundnode, nodes.ClassDef):
- # Verify that we're accessing a method
- # of the metaclass through a class, as in
- # `cls.metaclass_method`. In this case, the
- # first argument is always the class.
- method_scope = funcnode.parent.scope()
- if method_scope is boundnode.metaclass():
- return iter((boundnode,))
-
- if funcnode.type == "method":
- if not isinstance(boundnode, bases.Instance):
- boundnode = bases.Instance(boundnode)
- return iter((boundnode,))
- if funcnode.type == "classmethod":
- return iter((boundnode,))
- # if we have a method, extract one position
- # from the index, so we'll take in account
- # the extra parameter represented by `self` or `cls`
- if funcnode.type in ("method", "classmethod"):
- argindex -= 1
- # 2. search arg index
- try:
- return self.positional_arguments[argindex].infer(context)
- except IndexError:
- pass
-
- if funcnode.args.kwarg == name:
- # It wants all the keywords that were passed into
- # the call site.
- if self.has_invalid_keywords():
- raise exceptions.InferenceError(
- "Inference failed to find values for all keyword arguments "
- "to {func!r}: {unpacked_kwargs!r} doesn't correspond to "
- "{keyword_arguments!r}.",
- keyword_arguments=self.keyword_arguments,
- unpacked_kwargs=self._unpacked_kwargs,
- call_site=self,
- func=funcnode,
- arg=name,
- context=context,
- )
- kwarg = nodes.Dict(
- lineno=funcnode.args.lineno,
- col_offset=funcnode.args.col_offset,
- parent=funcnode.args,
- )
- kwarg.postinit(
- [(nodes.const_factory(key), value) for key, value in kwargs.items()]
- )
- return iter((kwarg,))
- if funcnode.args.vararg == name:
- # It wants all the args that were passed into
- # the call site.
- if self.has_invalid_arguments():
- raise exceptions.InferenceError(
- "Inference failed to find values for all positional "
- "arguments to {func!r}: {unpacked_args!r} doesn't "
- "correspond to {positional_arguments!r}.",
- positional_arguments=self.positional_arguments,
- unpacked_args=self._unpacked_args,
- call_site=self,
- func=funcnode,
- arg=name,
- context=context,
- )
- args = nodes.Tuple(
- lineno=funcnode.args.lineno,
- col_offset=funcnode.args.col_offset,
- parent=funcnode.args,
- )
- args.postinit(vararg)
- return iter((args,))
-
- # Check if it's a default parameter.
- try:
- return funcnode.args.default_value(name).infer(context)
- except exceptions.NoDefault:
- pass
- raise exceptions.InferenceError(
- "No value found for argument {name} to " "{func!r}",
- call_site=self,
- func=funcnode,
- arg=name,
- context=context,
- )
diff --git a/venv/Lib/site-packages/astroid/as_string.py b/venv/Lib/site-packages/astroid/as_string.py
deleted file mode 100644
index 3cd6e0d..0000000
--- a/venv/Lib/site-packages/astroid/as_string.py
+++ /dev/null
@@ -1,633 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
-# Copyright (c) 2013-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2017 rr- <rr-@sakuya.pl>
-# Copyright (c) 2018 brendanator <brendan.maginnis@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""This module renders Astroid nodes as string:
-
-* :func:`to_code` function return equivalent (hopefully valid) python string
-
-* :func:`dump` function return an internal representation of nodes found
- in the tree, useful for debugging or understanding the tree structure
-"""
-
-# pylint: disable=unused-argument
-
-DOC_NEWLINE = "\0"
-
-
-class AsStringVisitor:
- """Visitor to render an Astroid node as a valid python code string"""
-
- def __init__(self, indent):
- self.indent = indent
-
- def __call__(self, node):
- """Makes this visitor behave as a simple function"""
- return node.accept(self).replace(DOC_NEWLINE, "\n")
-
- def _docs_dedent(self, doc):
- """Stop newlines in docs being indented by self._stmt_list"""
- return '\n%s"""%s"""' % (self.indent, doc.replace("\n", DOC_NEWLINE))
-
- def _stmt_list(self, stmts, indent=True):
- """return a list of nodes to string"""
- stmts = "\n".join(nstr for nstr in [n.accept(self) for n in stmts] if nstr)
- if indent:
- return self.indent + stmts.replace("\n", "\n" + self.indent)
-
- return stmts
-
- def _precedence_parens(self, node, child, is_left=True):
- """Wrap child in parens only if required to keep same semantics"""
- if self._should_wrap(node, child, is_left):
- return "(%s)" % child.accept(self)
-
- return child.accept(self)
-
- def _should_wrap(self, node, child, is_left):
- """Wrap child if:
- - it has lower precedence
- - same precedence with position opposite to associativity direction
- """
- node_precedence = node.op_precedence()
- child_precedence = child.op_precedence()
-
- if node_precedence > child_precedence:
- # 3 * (4 + 5)
- return True
-
- if (
- node_precedence == child_precedence
- and is_left != node.op_left_associative()
- ):
- # 3 - (4 - 5)
- # (2**3)**4
- return True
-
- return False
-
- ## visit_<node> methods ###########################################
-
- def visit_arguments(self, node):
- """return an astroid.Function node as string"""
- return node.format_args()
-
- def visit_assignattr(self, node):
- """return an astroid.AssAttr node as string"""
- return self.visit_attribute(node)
-
- def visit_assert(self, node):
- """return an astroid.Assert node as string"""
- if node.fail:
- return "assert %s, %s" % (node.test.accept(self), node.fail.accept(self))
- return "assert %s" % node.test.accept(self)
-
- def visit_assignname(self, node):
- """return an astroid.AssName node as string"""
- return node.name
-
- def visit_assign(self, node):
- """return an astroid.Assign node as string"""
- lhs = " = ".join(n.accept(self) for n in node.targets)
- return "%s = %s" % (lhs, node.value.accept(self))
-
- def visit_augassign(self, node):
- """return an astroid.AugAssign node as string"""
- return "%s %s %s" % (node.target.accept(self), node.op, node.value.accept(self))
-
- def visit_annassign(self, node):
- """Return an astroid.AugAssign node as string"""
-
- target = node.target.accept(self)
- annotation = node.annotation.accept(self)
- if node.value is None:
- return "%s: %s" % (target, annotation)
- return "%s: %s = %s" % (target, annotation, node.value.accept(self))
-
- def visit_repr(self, node):
- """return an astroid.Repr node as string"""
- return "`%s`" % node.value.accept(self)
-
- def visit_binop(self, node):
- """return an astroid.BinOp node as string"""
- left = self._precedence_parens(node, node.left)
- right = self._precedence_parens(node, node.right, is_left=False)
- if node.op == "**":
- return "%s%s%s" % (left, node.op, right)
-
- return "%s %s %s" % (left, node.op, right)
-
- def visit_boolop(self, node):
- """return an astroid.BoolOp node as string"""
- values = ["%s" % self._precedence_parens(node, n) for n in node.values]
- return (" %s " % node.op).join(values)
-
- def visit_break(self, node):
- """return an astroid.Break node as string"""
- return "break"
-
- def visit_call(self, node):
- """return an astroid.Call node as string"""
- expr_str = self._precedence_parens(node, node.func)
- args = [arg.accept(self) for arg in node.args]
- if node.keywords:
- keywords = [kwarg.accept(self) for kwarg in node.keywords]
- else:
- keywords = []
-
- args.extend(keywords)
- return "%s(%s)" % (expr_str, ", ".join(args))
-
- def visit_classdef(self, node):
- """return an astroid.ClassDef node as string"""
- decorate = node.decorators.accept(self) if node.decorators else ""
- bases = ", ".join(n.accept(self) for n in node.bases)
- metaclass = node.metaclass()
- if metaclass and not node.has_metaclass_hack():
- if bases:
- bases = "(%s, metaclass=%s)" % (bases, metaclass.name)
- else:
- bases = "(metaclass=%s)" % metaclass.name
- else:
- bases = "(%s)" % bases if bases else ""
- docs = self._docs_dedent(node.doc) if node.doc else ""
- return "\n\n%sclass %s%s:%s\n%s\n" % (
- decorate,
- node.name,
- bases,
- docs,
- self._stmt_list(node.body),
- )
-
- def visit_compare(self, node):
- """return an astroid.Compare node as string"""
- rhs_str = " ".join(
- [
- "%s %s" % (op, self._precedence_parens(node, expr, is_left=False))
- for op, expr in node.ops
- ]
- )
- return "%s %s" % (self._precedence_parens(node, node.left), rhs_str)
-
- def visit_comprehension(self, node):
- """return an astroid.Comprehension node as string"""
- ifs = "".join(" if %s" % n.accept(self) for n in node.ifs)
- return "for %s in %s%s" % (
- node.target.accept(self),
- node.iter.accept(self),
- ifs,
- )
-
- def visit_const(self, node):
- """return an astroid.Const node as string"""
- if node.value is Ellipsis:
- return "..."
- return repr(node.value)
-
- def visit_continue(self, node):
- """return an astroid.Continue node as string"""
- return "continue"
-
- def visit_delete(self, node): # XXX check if correct
- """return an astroid.Delete node as string"""
- return "del %s" % ", ".join(child.accept(self) for child in node.targets)
-
- def visit_delattr(self, node):
- """return an astroid.DelAttr node as string"""
- return self.visit_attribute(node)
-
- def visit_delname(self, node):
- """return an astroid.DelName node as string"""
- return node.name
-
- def visit_decorators(self, node):
- """return an astroid.Decorators node as string"""
- return "@%s\n" % "\n@".join(item.accept(self) for item in node.nodes)
-
- def visit_dict(self, node):
- """return an astroid.Dict node as string"""
- return "{%s}" % ", ".join(self._visit_dict(node))
-
- def _visit_dict(self, node):
- for key, value in node.items:
- key = key.accept(self)
- value = value.accept(self)
- if key == "**":
- # It can only be a DictUnpack node.
- yield key + value
- else:
- yield "%s: %s" % (key, value)
-
- def visit_dictunpack(self, node):
- return "**"
-
- def visit_dictcomp(self, node):
- """return an astroid.DictComp node as string"""
- return "{%s: %s %s}" % (
- node.key.accept(self),
- node.value.accept(self),
- " ".join(n.accept(self) for n in node.generators),
- )
-
- def visit_expr(self, node):
- """return an astroid.Discard node as string"""
- return node.value.accept(self)
-
- def visit_emptynode(self, node):
- """dummy method for visiting an Empty node"""
- return ""
-
- def visit_excepthandler(self, node):
- if node.type:
- if node.name:
- excs = "except %s, %s" % (
- node.type.accept(self),
- node.name.accept(self),
- )
- else:
- excs = "except %s" % node.type.accept(self)
- else:
- excs = "except"
- return "%s:\n%s" % (excs, self._stmt_list(node.body))
-
- def visit_ellipsis(self, node):
- """return an astroid.Ellipsis node as string"""
- return "..."
-
- def visit_empty(self, node):
- """return an Empty node as string"""
- return ""
-
- def visit_exec(self, node):
- """return an astroid.Exec node as string"""
- if node.locals:
- return "exec %s in %s, %s" % (
- node.expr.accept(self),
- node.locals.accept(self),
- node.globals.accept(self),
- )
- if node.globals:
- return "exec %s in %s" % (node.expr.accept(self), node.globals.accept(self))
- return "exec %s" % node.expr.accept(self)
-
- def visit_extslice(self, node):
- """return an astroid.ExtSlice node as string"""
- return ", ".join(dim.accept(self) for dim in node.dims)
-
- def visit_for(self, node):
- """return an astroid.For node as string"""
- fors = "for %s in %s:\n%s" % (
- node.target.accept(self),
- node.iter.accept(self),
- self._stmt_list(node.body),
- )
- if node.orelse:
- fors = "%s\nelse:\n%s" % (fors, self._stmt_list(node.orelse))
- return fors
-
- def visit_importfrom(self, node):
- """return an astroid.ImportFrom node as string"""
- return "from %s import %s" % (
- "." * (node.level or 0) + node.modname,
- _import_string(node.names),
- )
-
- def visit_functiondef(self, node):
- """return an astroid.Function node as string"""
- decorate = node.decorators.accept(self) if node.decorators else ""
- docs = self._docs_dedent(node.doc) if node.doc else ""
- trailer = ":"
- if node.returns:
- return_annotation = " -> " + node.returns.as_string()
- trailer = return_annotation + ":"
- def_format = "\n%sdef %s(%s)%s%s\n%s"
- return def_format % (
- decorate,
- node.name,
- node.args.accept(self),
- trailer,
- docs,
- self._stmt_list(node.body),
- )
-
- def visit_generatorexp(self, node):
- """return an astroid.GeneratorExp node as string"""
- return "(%s %s)" % (
- node.elt.accept(self),
- " ".join(n.accept(self) for n in node.generators),
- )
-
- def visit_attribute(self, node):
- """return an astroid.Getattr node as string"""
- return "%s.%s" % (self._precedence_parens(node, node.expr), node.attrname)
-
- def visit_global(self, node):
- """return an astroid.Global node as string"""
- return "global %s" % ", ".join(node.names)
-
- def visit_if(self, node):
- """return an astroid.If node as string"""
- ifs = ["if %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))]
- if node.has_elif_block():
- ifs.append("el%s" % self._stmt_list(node.orelse, indent=False))
- elif node.orelse:
- ifs.append("else:\n%s" % self._stmt_list(node.orelse))
- return "\n".join(ifs)
-
- def visit_ifexp(self, node):
- """return an astroid.IfExp node as string"""
- return "%s if %s else %s" % (
- self._precedence_parens(node, node.body, is_left=True),
- self._precedence_parens(node, node.test, is_left=True),
- self._precedence_parens(node, node.orelse, is_left=False),
- )
-
- def visit_import(self, node):
- """return an astroid.Import node as string"""
- return "import %s" % _import_string(node.names)
-
- def visit_keyword(self, node):
- """return an astroid.Keyword node as string"""
- if node.arg is None:
- return "**%s" % node.value.accept(self)
- return "%s=%s" % (node.arg, node.value.accept(self))
-
- def visit_lambda(self, node):
- """return an astroid.Lambda node as string"""
- args = node.args.accept(self)
- body = node.body.accept(self)
- if args:
- return "lambda %s: %s" % (args, body)
-
- return "lambda: %s" % body
-
- def visit_list(self, node):
- """return an astroid.List node as string"""
- return "[%s]" % ", ".join(child.accept(self) for child in node.elts)
-
- def visit_listcomp(self, node):
- """return an astroid.ListComp node as string"""
- return "[%s %s]" % (
- node.elt.accept(self),
- " ".join(n.accept(self) for n in node.generators),
- )
-
- def visit_module(self, node):
- """return an astroid.Module node as string"""
- docs = '"""%s"""\n\n' % node.doc if node.doc else ""
- return docs + "\n".join(n.accept(self) for n in node.body) + "\n\n"
-
- def visit_name(self, node):
- """return an astroid.Name node as string"""
- return node.name
-
- def visit_pass(self, node):
- """return an astroid.Pass node as string"""
- return "pass"
-
- def visit_print(self, node):
- """return an astroid.Print node as string"""
- nodes = ", ".join(n.accept(self) for n in node.values)
- if not node.nl:
- nodes = "%s," % nodes
- if node.dest:
- return "print >> %s, %s" % (node.dest.accept(self), nodes)
- return "print %s" % nodes
-
- def visit_raise(self, node):
- """return an astroid.Raise node as string"""
- if node.exc:
- if node.inst:
- if node.tback:
- return "raise %s, %s, %s" % (
- node.exc.accept(self),
- node.inst.accept(self),
- node.tback.accept(self),
- )
- return "raise %s, %s" % (node.exc.accept(self), node.inst.accept(self))
- return "raise %s" % node.exc.accept(self)
- return "raise"
-
- def visit_return(self, node):
- """return an astroid.Return node as string"""
- if node.is_tuple_return() and len(node.value.elts) > 1:
- elts = [child.accept(self) for child in node.value.elts]
- return "return %s" % ", ".join(elts)
-
- if node.value:
- return "return %s" % node.value.accept(self)
-
- return "return"
-
- def visit_index(self, node):
- """return an astroid.Index node as string"""
- return node.value.accept(self)
-
- def visit_set(self, node):
- """return an astroid.Set node as string"""
- return "{%s}" % ", ".join(child.accept(self) for child in node.elts)
-
- def visit_setcomp(self, node):
- """return an astroid.SetComp node as string"""
- return "{%s %s}" % (
- node.elt.accept(self),
- " ".join(n.accept(self) for n in node.generators),
- )
-
- def visit_slice(self, node):
- """return an astroid.Slice node as string"""
- lower = node.lower.accept(self) if node.lower else ""
- upper = node.upper.accept(self) if node.upper else ""
- step = node.step.accept(self) if node.step else ""
- if step:
- return "%s:%s:%s" % (lower, upper, step)
- return "%s:%s" % (lower, upper)
-
- def visit_subscript(self, node):
- """return an astroid.Subscript node as string"""
- idx = node.slice
- if idx.__class__.__name__.lower() == "index":
- idx = idx.value
- idxstr = idx.accept(self)
- if idx.__class__.__name__.lower() == "tuple" and idx.elts:
- # Remove parenthesis in tuple and extended slice.
- # a[(::1, 1:)] is not valid syntax.
- idxstr = idxstr[1:-1]
- return "%s[%s]" % (self._precedence_parens(node, node.value), idxstr)
-
- def visit_tryexcept(self, node):
- """return an astroid.TryExcept node as string"""
- trys = ["try:\n%s" % self._stmt_list(node.body)]
- for handler in node.handlers:
- trys.append(handler.accept(self))
- if node.orelse:
- trys.append("else:\n%s" % self._stmt_list(node.orelse))
- return "\n".join(trys)
-
- def visit_tryfinally(self, node):
- """return an astroid.TryFinally node as string"""
- return "try:\n%s\nfinally:\n%s" % (
- self._stmt_list(node.body),
- self._stmt_list(node.finalbody),
- )
-
- def visit_tuple(self, node):
- """return an astroid.Tuple node as string"""
- if len(node.elts) == 1:
- return "(%s, )" % node.elts[0].accept(self)
- return "(%s)" % ", ".join(child.accept(self) for child in node.elts)
-
- def visit_unaryop(self, node):
- """return an astroid.UnaryOp node as string"""
- if node.op == "not":
- operator = "not "
- else:
- operator = node.op
- return "%s%s" % (operator, self._precedence_parens(node, node.operand))
-
- def visit_while(self, node):
- """return an astroid.While node as string"""
- whiles = "while %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))
- if node.orelse:
- whiles = "%s\nelse:\n%s" % (whiles, self._stmt_list(node.orelse))
- return whiles
-
- def visit_with(self, node): # 'with' without 'as' is possible
- """return an astroid.With node as string"""
- items = ", ".join(
- ("%s" % expr.accept(self)) + (vars and " as %s" % (vars.accept(self)) or "")
- for expr, vars in node.items
- )
- return "with %s:\n%s" % (items, self._stmt_list(node.body))
-
- def visit_yield(self, node):
- """yield an ast.Yield node as string"""
- yi_val = (" " + node.value.accept(self)) if node.value else ""
- expr = "yield" + yi_val
- if node.parent.is_statement:
- return expr
-
- return "(%s)" % (expr,)
-
- def visit_starred(self, node):
- """return Starred node as string"""
- return "*" + node.value.accept(self)
-
- # These aren't for real AST nodes, but for inference objects.
-
- def visit_frozenset(self, node):
- return node.parent.accept(self)
-
- def visit_super(self, node):
- return node.parent.accept(self)
-
- def visit_uninferable(self, node):
- return str(node)
-
-
-class AsStringVisitor3(AsStringVisitor):
- """AsStringVisitor3 overwrites some AsStringVisitor methods"""
-
- def visit_excepthandler(self, node):
- if node.type:
- if node.name:
- excs = "except %s as %s" % (
- node.type.accept(self),
- node.name.accept(self),
- )
- else:
- excs = "except %s" % node.type.accept(self)
- else:
- excs = "except"
- return "%s:\n%s" % (excs, self._stmt_list(node.body))
-
- def visit_nonlocal(self, node):
- """return an astroid.Nonlocal node as string"""
- return "nonlocal %s" % ", ".join(node.names)
-
- def visit_raise(self, node):
- """return an astroid.Raise node as string"""
- if node.exc:
- if node.cause:
- return "raise %s from %s" % (
- node.exc.accept(self),
- node.cause.accept(self),
- )
- return "raise %s" % node.exc.accept(self)
- return "raise"
-
- def visit_yieldfrom(self, node):
- """ Return an astroid.YieldFrom node as string. """
- yi_val = (" " + node.value.accept(self)) if node.value else ""
- expr = "yield from" + yi_val
- if node.parent.is_statement:
- return expr
-
- return "(%s)" % (expr,)
-
- def visit_asyncfunctiondef(self, node):
- function = super(AsStringVisitor3, self).visit_functiondef(node)
- return "async " + function.strip()
-
- def visit_await(self, node):
- return "await %s" % node.value.accept(self)
-
- def visit_asyncwith(self, node):
- return "async %s" % self.visit_with(node)
-
- def visit_asyncfor(self, node):
- return "async %s" % self.visit_for(node)
-
- def visit_joinedstr(self, node):
- # Special treatment for constants,
- # as we want to join literals not reprs
- string = "".join(
- value.value if type(value).__name__ == "Const" else value.accept(self)
- for value in node.values
- )
- return "f'%s'" % string
-
- def visit_formattedvalue(self, node):
- return "{%s}" % node.value.accept(self)
-
- def visit_comprehension(self, node):
- """return an astroid.Comprehension node as string"""
- return "%s%s" % (
- "async " if node.is_async else "",
- super(AsStringVisitor3, self).visit_comprehension(node),
- )
-
- def visit_namedexpr(self, node):
- """Return an assignment expression node as string"""
- target = node.target.accept(self)
- value = node.value.accept(self)
- return "%s := %s" % (target, value)
-
-
-def _import_string(names):
- """return a list of (name, asname) formatted as a string"""
- _names = []
- for name, asname in names:
- if asname is not None:
- _names.append("%s as %s" % (name, asname))
- else:
- _names.append(name)
- return ", ".join(_names)
-
-
-AsStringVisitor = AsStringVisitor3
-
-# This sets the default indent to 4 spaces.
-to_code = AsStringVisitor(" ")
diff --git a/venv/Lib/site-packages/astroid/bases.py b/venv/Lib/site-packages/astroid/bases.py
deleted file mode 100644
index d5b042a..0000000
--- a/venv/Lib/site-packages/astroid/bases.py
+++ /dev/null
@@ -1,542 +0,0 @@
-# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2017 Calen Pennington <calen.pennington@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 Daniel Colascione <dancol@dancol.org>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""This module contains base classes and functions for the nodes and some
-inference utils.
-"""
-
-import builtins
-import collections
-
-from astroid import context as contextmod
-from astroid import exceptions
-from astroid import util
-
-objectmodel = util.lazy_import("interpreter.objectmodel")
-helpers = util.lazy_import("helpers")
-BUILTINS = builtins.__name__
-manager = util.lazy_import("manager")
-MANAGER = manager.AstroidManager()
-
-# TODO: check if needs special treatment
-BUILTINS = "builtins"
-BOOL_SPECIAL_METHOD = "__bool__"
-
-PROPERTIES = {BUILTINS + ".property", "abc.abstractproperty"}
-# List of possible property names. We use this list in order
-# to see if a method is a property or not. This should be
-# pretty reliable and fast, the alternative being to check each
-# decorator to see if its a real property-like descriptor, which
-# can be too complicated.
-# Also, these aren't qualified, because each project can
-# define them, we shouldn't expect to know every possible
-# property-like decorator!
-POSSIBLE_PROPERTIES = {
- "cached_property",
- "cachedproperty",
- "lazyproperty",
- "lazy_property",
- "reify",
- "lazyattribute",
- "lazy_attribute",
- "LazyProperty",
- "lazy",
- "cache_readonly",
-}
-
-
-def _is_property(meth):
- if PROPERTIES.intersection(meth.decoratornames()):
- return True
- stripped = {
- name.split(".")[-1]
- for name in meth.decoratornames()
- if name is not util.Uninferable
- }
- if any(name in stripped for name in POSSIBLE_PROPERTIES):
- return True
-
- # Lookup for subclasses of *property*
- if not meth.decorators:
- return False
- for decorator in meth.decorators.nodes or ():
- inferred = helpers.safe_infer(decorator)
- if inferred is None or inferred is util.Uninferable:
- continue
- if inferred.__class__.__name__ == "ClassDef":
- for base_class in inferred.bases:
- if base_class.__class__.__name__ != "Name":
- continue
- module, _ = base_class.lookup(base_class.name)
- if module.name == BUILTINS and base_class.name == "property":
- return True
-
- return False
-
-
-class Proxy:
- """a simple proxy object
-
- Note:
-
- Subclasses of this object will need a custom __getattr__
- if new instance attributes are created. See the Const class
- """
-
- _proxied = None # proxied object may be set by class or by instance
-
- def __init__(self, proxied=None):
- if proxied is not None:
- self._proxied = proxied
-
- def __getattr__(self, name):
- if name == "_proxied":
- return getattr(self.__class__, "_proxied")
- if name in self.__dict__:
- return self.__dict__[name]
- return getattr(self._proxied, name)
-
- def infer(self, context=None):
- yield self
-
-
-def _infer_stmts(stmts, context, frame=None):
- """Return an iterator on statements inferred by each statement in *stmts*."""
- inferred = False
- if context is not None:
- name = context.lookupname
- context = context.clone()
- else:
- name = None
- context = contextmod.InferenceContext()
-
- for stmt in stmts:
- if stmt is util.Uninferable:
- yield stmt
- inferred = True
- continue
- context.lookupname = stmt._infer_name(frame, name)
- try:
- for inferred in stmt.infer(context=context):
- yield inferred
- inferred = True
- except exceptions.NameInferenceError:
- continue
- except exceptions.InferenceError:
- yield util.Uninferable
- inferred = True
- if not inferred:
- raise exceptions.InferenceError(
- "Inference failed for all members of {stmts!r}.",
- stmts=stmts,
- frame=frame,
- context=context,
- )
-
-
-def _infer_method_result_truth(instance, method_name, context):
- # Get the method from the instance and try to infer
- # its return's truth value.
- meth = next(instance.igetattr(method_name, context=context), None)
- if meth and hasattr(meth, "infer_call_result"):
- if not meth.callable():
- return util.Uninferable
- try:
- for value in meth.infer_call_result(instance, context=context):
- if value is util.Uninferable:
- return value
-
- inferred = next(value.infer(context=context))
- return inferred.bool_value()
- except exceptions.InferenceError:
- pass
- return util.Uninferable
-
-
-class BaseInstance(Proxy):
- """An instance base class, which provides lookup methods for potential instances."""
-
- special_attributes = None
-
- def display_type(self):
- return "Instance of"
-
- def getattr(self, name, context=None, lookupclass=True):
- try:
- values = self._proxied.instance_attr(name, context)
- except exceptions.AttributeInferenceError as exc:
- if self.special_attributes and name in self.special_attributes:
- return [self.special_attributes.lookup(name)]
-
- if lookupclass:
- # Class attributes not available through the instance
- # unless they are explicitly defined.
- return self._proxied.getattr(name, context, class_context=False)
-
- raise exceptions.AttributeInferenceError(
- target=self, attribute=name, context=context
- ) from exc
- # since we've no context information, return matching class members as
- # well
- if lookupclass:
- try:
- return values + self._proxied.getattr(
- name, context, class_context=False
- )
- except exceptions.AttributeInferenceError:
- pass
- return values
-
- def igetattr(self, name, context=None):
- """inferred getattr"""
- if not context:
- context = contextmod.InferenceContext()
- try:
- # avoid recursively inferring the same attr on the same class
- if context.push((self._proxied, name)):
- raise exceptions.InferenceError(
- message="Cannot infer the same attribute again",
- node=self,
- context=context,
- )
-
- # XXX frame should be self._proxied, or not ?
- get_attr = self.getattr(name, context, lookupclass=False)
- yield from _infer_stmts(
- self._wrap_attr(get_attr, context), context, frame=self
- )
- except exceptions.AttributeInferenceError as error:
- try:
- # fallback to class.igetattr since it has some logic to handle
- # descriptors
- # But only if the _proxied is the Class.
- if self._proxied.__class__.__name__ != "ClassDef":
- raise
- attrs = self._proxied.igetattr(name, context, class_context=False)
- yield from self._wrap_attr(attrs, context)
- except exceptions.AttributeInferenceError as error:
- raise exceptions.InferenceError(**vars(error)) from error
-
- def _wrap_attr(self, attrs, context=None):
- """wrap bound methods of attrs in a InstanceMethod proxies"""
- for attr in attrs:
- if isinstance(attr, UnboundMethod):
- if _is_property(attr):
- yield from attr.infer_call_result(self, context)
- else:
- yield BoundMethod(attr, self)
- elif hasattr(attr, "name") and attr.name == "<lambda>":
- if attr.args.args and attr.args.args[0].name == "self":
- yield BoundMethod(attr, self)
- continue
- yield attr
- else:
- yield attr
-
- def infer_call_result(self, caller, context=None):
- """infer what a class instance is returning when called"""
- context = contextmod.bind_context_to_node(context, self)
- inferred = False
- for node in self._proxied.igetattr("__call__", context):
- if node is util.Uninferable or not node.callable():
- continue
- for res in node.infer_call_result(caller, context):
- inferred = True
- yield res
- if not inferred:
- raise exceptions.InferenceError(node=self, caller=caller, context=context)
-
-
-class Instance(BaseInstance):
- """A special node representing a class instance."""
-
- # pylint: disable=unnecessary-lambda
- special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel())
-
- def __repr__(self):
- return "<Instance of %s.%s at 0x%s>" % (
- self._proxied.root().name,
- self._proxied.name,
- id(self),
- )
-
- def __str__(self):
- return "Instance of %s.%s" % (self._proxied.root().name, self._proxied.name)
-
- def callable(self):
- try:
- self._proxied.getattr("__call__", class_context=False)
- return True
- except exceptions.AttributeInferenceError:
- return False
-
- def pytype(self):
- return self._proxied.qname()
-
- def display_type(self):
- return "Instance of"
-
- def bool_value(self):
- """Infer the truth value for an Instance
-
- The truth value of an instance is determined by these conditions:
-
- * if it implements __bool__ on Python 3 or __nonzero__
- on Python 2, then its bool value will be determined by
- calling this special method and checking its result.
- * when this method is not defined, __len__() is called, if it
- is defined, and the object is considered true if its result is
- nonzero. If a class defines neither __len__() nor __bool__(),
- all its instances are considered true.
- """
- context = contextmod.InferenceContext()
- context.callcontext = contextmod.CallContext(args=[])
- context.boundnode = self
-
- try:
- result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context)
- except (exceptions.InferenceError, exceptions.AttributeInferenceError):
- # Fallback to __len__.
- try:
- result = _infer_method_result_truth(self, "__len__", context)
- except (exceptions.AttributeInferenceError, exceptions.InferenceError):
- return True
- return result
-
- # This is set in inference.py.
- def getitem(self, index, context=None):
- pass
-
-
-class UnboundMethod(Proxy):
- """a special node representing a method not bound to an instance"""
-
- # pylint: disable=unnecessary-lambda
- special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel())
-
- def __repr__(self):
- frame = self._proxied.parent.frame()
- return "<%s %s of %s at 0x%s" % (
- self.__class__.__name__,
- self._proxied.name,
- frame.qname(),
- id(self),
- )
-
- def implicit_parameters(self):
- return 0
-
- def is_bound(self):
- return False
-
- def getattr(self, name, context=None):
- if name in self.special_attributes:
- return [self.special_attributes.lookup(name)]
- return self._proxied.getattr(name, context)
-
- def igetattr(self, name, context=None):
- if name in self.special_attributes:
- return iter((self.special_attributes.lookup(name),))
- return self._proxied.igetattr(name, context)
-
- def infer_call_result(self, caller, context):
- """
- The boundnode of the regular context with a function called
- on ``object.__new__`` will be of type ``object``,
- which is incorrect for the argument in general.
- If no context is given the ``object.__new__`` call argument will
- correctly inferred except when inside a call that requires
- the additional context (such as a classmethod) of the boundnode
- to determine which class the method was called from
- """
-
- # If we're unbound method __new__ of builtin object, the result is an
- # instance of the class given as first argument.
- if (
- self._proxied.name == "__new__"
- and self._proxied.parent.frame().qname() == "%s.object" % BUILTINS
- ):
- if caller.args:
- node_context = context.extra_context.get(caller.args[0])
- infer = caller.args[0].infer(context=node_context)
- else:
- infer = []
- return (Instance(x) if x is not util.Uninferable else x for x in infer)
- return self._proxied.infer_call_result(caller, context)
-
- def bool_value(self):
- return True
-
-
-class BoundMethod(UnboundMethod):
- """a special node representing a method bound to an instance"""
-
- # pylint: disable=unnecessary-lambda
- special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel())
-
- def __init__(self, proxy, bound):
- UnboundMethod.__init__(self, proxy)
- self.bound = bound
-
- def implicit_parameters(self):
- return 1
-
- def is_bound(self):
- return True
-
- def _infer_type_new_call(self, caller, context):
- """Try to infer what type.__new__(mcs, name, bases, attrs) returns.
-
- In order for such call to be valid, the metaclass needs to be
- a subtype of ``type``, the name needs to be a string, the bases
- needs to be a tuple of classes
- """
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import node_classes
-
- # Verify the metaclass
- mcs = next(caller.args[0].infer(context=context))
- if mcs.__class__.__name__ != "ClassDef":
- # Not a valid first argument.
- return None
- if not mcs.is_subtype_of("%s.type" % BUILTINS):
- # Not a valid metaclass.
- return None
-
- # Verify the name
- name = next(caller.args[1].infer(context=context))
- if name.__class__.__name__ != "Const":
- # Not a valid name, needs to be a const.
- return None
- if not isinstance(name.value, str):
- # Needs to be a string.
- return None
-
- # Verify the bases
- bases = next(caller.args[2].infer(context=context))
- if bases.__class__.__name__ != "Tuple":
- # Needs to be a tuple.
- return None
- inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts]
- if any(base.__class__.__name__ != "ClassDef" for base in inferred_bases):
- # All the bases needs to be Classes
- return None
-
- # Verify the attributes.
- attrs = next(caller.args[3].infer(context=context))
- if attrs.__class__.__name__ != "Dict":
- # Needs to be a dictionary.
- return None
- cls_locals = collections.defaultdict(list)
- for key, value in attrs.items:
- key = next(key.infer(context=context))
- value = next(value.infer(context=context))
- # Ignore non string keys
- if key.__class__.__name__ == "Const" and isinstance(key.value, str):
- cls_locals[key.value].append(value)
-
- # Build the class from now.
- cls = mcs.__class__(
- name=name.value,
- lineno=caller.lineno,
- col_offset=caller.col_offset,
- parent=caller,
- )
- empty = node_classes.Pass()
- cls.postinit(
- bases=bases.elts,
- body=[empty],
- decorators=[],
- newstyle=True,
- metaclass=mcs,
- keywords=[],
- )
- cls.locals = cls_locals
- return cls
-
- def infer_call_result(self, caller, context=None):
- context = contextmod.bind_context_to_node(context, self.bound)
- if (
- self.bound.__class__.__name__ == "ClassDef"
- and self.bound.name == "type"
- and self.name == "__new__"
- and len(caller.args) == 4
- ):
- # Check if we have a ``type.__new__(mcs, name, bases, attrs)`` call.
- new_cls = self._infer_type_new_call(caller, context)
- if new_cls:
- return iter((new_cls,))
-
- return super(BoundMethod, self).infer_call_result(caller, context)
-
- def bool_value(self):
- return True
-
-
-class Generator(BaseInstance):
- """a special node representing a generator.
-
- Proxied class is set once for all in raw_building.
- """
-
- # pylint: disable=unnecessary-lambda
- special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel())
-
- # pylint: disable=super-init-not-called
- def __init__(self, parent=None):
- self.parent = parent
-
- def callable(self):
- return False
-
- def pytype(self):
- return "%s.generator" % BUILTINS
-
- def display_type(self):
- return "Generator"
-
- def bool_value(self):
- return True
-
- def __repr__(self):
- return "<Generator(%s) l.%s at 0x%s>" % (
- self._proxied.name,
- self.lineno,
- id(self),
- )
-
- def __str__(self):
- return "Generator(%s)" % (self._proxied.name)
-
-
-class AsyncGenerator(Generator):
- """Special node representing an async generator"""
-
- def pytype(self):
- return "%s.async_generator" % BUILTINS
-
- def display_type(self):
- return "AsyncGenerator"
-
- def __repr__(self):
- return "<AsyncGenerator(%s) l.%s at 0x%s>" % (
- self._proxied.name,
- self.lineno,
- id(self),
- )
-
- def __str__(self):
- return "AsyncGenerator(%s)" % (self._proxied.name)
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc
deleted file mode 100644
index 02f8cf7..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc
deleted file mode 100644
index 7cf4841..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc
deleted file mode 100644
index c2a6f46..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc
deleted file mode 100644
index af5833f..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_crypt.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_crypt.cpython-37.pyc
deleted file mode 100644
index a895bb5..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_crypt.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc
deleted file mode 100644
index e33a68c..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dataclasses.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dataclasses.cpython-37.pyc
deleted file mode 100644
index ead95a8..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dataclasses.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc
deleted file mode 100644
index 94c253f..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc
deleted file mode 100644
index 807c54d..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc
deleted file mode 100644
index 1d0fbe5..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc
deleted file mode 100644
index 115a75b..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc
deleted file mode 100644
index 8cd6565..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_http.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_http.cpython-37.pyc
deleted file mode 100644
index ca12de5..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_http.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc
deleted file mode 100644
index 5befdcd..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc
deleted file mode 100644
index e02f078..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc
deleted file mode 100644
index 4c20ea7..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc
deleted file mode 100644
index 4f6155a..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc
deleted file mode 100644
index 872060b..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pyc
deleted file mode 100644
index 275e716..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pyc
deleted file mode 100644
index 1b3da4c..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pyc
deleted file mode 100644
index 4e9eb31..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pyc
deleted file mode 100644
index 6f6e302..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pyc
deleted file mode 100644
index 0c77435..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pyc
deleted file mode 100644
index bb8593b..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pyc
deleted file mode 100644
index f663c18..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pyc
deleted file mode 100644
index 32a3b7b..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pyc
deleted file mode 100644
index 0e950e7..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc
deleted file mode 100644
index bca107d..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc
deleted file mode 100644
index c6647f8..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc
deleted file mode 100644
index 01d5160..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc
deleted file mode 100644
index b5d2c69..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc
deleted file mode 100644
index e317433..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc
deleted file mode 100644
index b5deac2..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc
deleted file mode 100644
index 90e94c9..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc
deleted file mode 100644
index ac6c87d..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc
deleted file mode 100644
index a9214ba..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc
deleted file mode 100644
index 9cb0782..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc b/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc
deleted file mode 100644
index f6850ba..0000000
--- a/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/brain/brain_argparse.py b/venv/Lib/site-packages/astroid/brain/brain_argparse.py
deleted file mode 100644
index d489911..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_argparse.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from astroid import MANAGER, arguments, nodes, inference_tip, UseInferenceDefault
-
-
-def infer_namespace(node, context=None):
- callsite = arguments.CallSite.from_call(node)
- if not callsite.keyword_arguments:
- # Cannot make sense of it.
- raise UseInferenceDefault()
-
- class_node = nodes.ClassDef("Namespace", "docstring")
- class_node.parent = node.parent
- for attr in set(callsite.keyword_arguments):
- fake_node = nodes.EmptyNode()
- fake_node.parent = class_node
- fake_node.attrname = attr
- class_node.instance_attrs[attr] = [fake_node]
- return iter((class_node.instantiate_class(),))
-
-
-def _looks_like_namespace(node):
- func = node.func
- if isinstance(func, nodes.Attribute):
- return (
- func.attrname == "Namespace"
- and isinstance(func.expr, nodes.Name)
- and func.expr.name == "argparse"
- )
- return False
-
-
-MANAGER.register_transform(
- nodes.Call, inference_tip(infer_namespace), _looks_like_namespace
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_attrs.py b/venv/Lib/site-packages/astroid/brain/brain_attrs.py
deleted file mode 100644
index 670736f..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_attrs.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-"""
-Astroid hook for the attrs library
-
-Without this hook pylint reports unsupported-assignment-operation
-for attrs classes
-"""
-
-import astroid
-from astroid import MANAGER
-
-
-ATTRIB_NAMES = frozenset(("attr.ib", "attrib", "attr.attrib"))
-ATTRS_NAMES = frozenset(("attr.s", "attrs", "attr.attrs", "attr.attributes"))
-
-
-def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES):
- """Return True if a decorated node has
- an attr decorator applied."""
- if not node.decorators:
- return False
- for decorator_attribute in node.decorators.nodes:
- if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
- decorator_attribute = decorator_attribute.func
- if decorator_attribute.as_string() in decorator_names:
- return True
- return False
-
-
-def attr_attributes_transform(node):
- """Given that the ClassNode has an attr decorator,
- rewrite class attributes as instance attributes
- """
- # Astroid can't infer this attribute properly
- # Prevents https://github.com/PyCQA/pylint/issues/1884
- node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)]
-
- for cdefbodynode in node.body:
- if not isinstance(cdefbodynode, (astroid.Assign, astroid.AnnAssign)):
- continue
- if isinstance(cdefbodynode.value, astroid.Call):
- if cdefbodynode.value.func.as_string() not in ATTRIB_NAMES:
- continue
- else:
- continue
- targets = (
- cdefbodynode.targets
- if hasattr(cdefbodynode, "targets")
- else [cdefbodynode.target]
- )
- for target in targets:
-
- rhs_node = astroid.Unknown(
- lineno=cdefbodynode.lineno,
- col_offset=cdefbodynode.col_offset,
- parent=cdefbodynode,
- )
- node.locals[target.name] = [rhs_node]
- node.instance_attrs[target.name] = [rhs_node]
-
-
-MANAGER.register_transform(
- astroid.ClassDef, attr_attributes_transform, is_decorated_with_attrs
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py b/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py
deleted file mode 100644
index 2dd7cc5..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py
+++ /dev/null
@@ -1,829 +0,0 @@
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid hooks for various builtins."""
-
-from functools import partial
-from textwrap import dedent
-
-import six
-from astroid import (
- MANAGER,
- UseInferenceDefault,
- AttributeInferenceError,
- inference_tip,
- InferenceError,
- NameInferenceError,
- AstroidTypeError,
- MroError,
-)
-from astroid import arguments
-from astroid.builder import AstroidBuilder
-from astroid import helpers
-from astroid import nodes
-from astroid import objects
-from astroid import scoped_nodes
-from astroid import util
-
-
-OBJECT_DUNDER_NEW = "object.__new__"
-
-
-def _extend_str(class_node, rvalue):
- """function to extend builtin str/unicode class"""
- code = dedent(
- """
- class whatever(object):
- def join(self, iterable):
- return {rvalue}
- def replace(self, old, new, count=None):
- return {rvalue}
- def format(self, *args, **kwargs):
- return {rvalue}
- def encode(self, encoding='ascii', errors=None):
- return ''
- def decode(self, encoding='ascii', errors=None):
- return u''
- def capitalize(self):
- return {rvalue}
- def title(self):
- return {rvalue}
- def lower(self):
- return {rvalue}
- def upper(self):
- return {rvalue}
- def swapcase(self):
- return {rvalue}
- def index(self, sub, start=None, end=None):
- return 0
- def find(self, sub, start=None, end=None):
- return 0
- def count(self, sub, start=None, end=None):
- return 0
- def strip(self, chars=None):
- return {rvalue}
- def lstrip(self, chars=None):
- return {rvalue}
- def rstrip(self, chars=None):
- return {rvalue}
- def rjust(self, width, fillchar=None):
- return {rvalue}
- def center(self, width, fillchar=None):
- return {rvalue}
- def ljust(self, width, fillchar=None):
- return {rvalue}
- """
- )
- code = code.format(rvalue=rvalue)
- fake = AstroidBuilder(MANAGER).string_build(code)["whatever"]
- for method in fake.mymethods():
- method.parent = class_node
- method.lineno = None
- method.col_offset = None
- if "__class__" in method.locals:
- method.locals["__class__"] = [class_node]
- class_node.locals[method.name] = [method]
- method.parent = class_node
-
-
-def _extend_builtins(class_transforms):
- builtin_ast = MANAGER.builtins_module
- for class_name, transform in class_transforms.items():
- transform(builtin_ast[class_name])
-
-
-_extend_builtins(
- {
- "bytes": partial(_extend_str, rvalue="b''"),
- "str": partial(_extend_str, rvalue="''"),
- }
-)
-
-
-def _builtin_filter_predicate(node, builtin_name):
- if isinstance(node.func, nodes.Name) and node.func.name == builtin_name:
- return True
- if isinstance(node.func, nodes.Attribute):
- return (
- node.func.attrname == "fromkeys"
- and isinstance(node.func.expr, nodes.Name)
- and node.func.expr.name == "dict"
- )
- return False
-
-
-def register_builtin_transform(transform, builtin_name):
- """Register a new transform function for the given *builtin_name*.
-
- The transform function must accept two parameters, a node and
- an optional context.
- """
-
- def _transform_wrapper(node, context=None):
- result = transform(node, context=context)
- if result:
- if not result.parent:
- # Let the transformation function determine
- # the parent for its result. Otherwise,
- # we set it to be the node we transformed from.
- result.parent = node
-
- if result.lineno is None:
- result.lineno = node.lineno
- if result.col_offset is None:
- result.col_offset = node.col_offset
- return iter([result])
-
- MANAGER.register_transform(
- nodes.Call,
- inference_tip(_transform_wrapper),
- partial(_builtin_filter_predicate, builtin_name=builtin_name),
- )
-
-
-def _container_generic_inference(node, context, node_type, transform):
- args = node.args
- if not args:
- return node_type()
- if len(node.args) > 1:
- raise UseInferenceDefault()
-
- arg, = args
- transformed = transform(arg)
- if not transformed:
- try:
- inferred = next(arg.infer(context=context))
- except (InferenceError, StopIteration):
- raise UseInferenceDefault()
- if inferred is util.Uninferable:
- raise UseInferenceDefault()
- transformed = transform(inferred)
- if not transformed or transformed is util.Uninferable:
- raise UseInferenceDefault()
- return transformed
-
-
-def _container_generic_transform(arg, klass, iterables, build_elts):
- if isinstance(arg, klass):
- return arg
- elif isinstance(arg, iterables):
- if all(isinstance(elt, nodes.Const) for elt in arg.elts):
- elts = [elt.value for elt in arg.elts]
- else:
- # TODO: Does not handle deduplication for sets.
- elts = filter(None, map(helpers.safe_infer, arg.elts))
- elif isinstance(arg, nodes.Dict):
- # Dicts need to have consts as strings already.
- if not all(isinstance(elt[0], nodes.Const) for elt in arg.items):
- raise UseInferenceDefault()
- elts = [item[0].value for item in arg.items]
- elif isinstance(arg, nodes.Const) and isinstance(
- arg.value, (six.string_types, six.binary_type)
- ):
- elts = arg.value
- else:
- return
- return klass.from_elements(elts=build_elts(elts))
-
-
-def _infer_builtin_container(
- node, context, klass=None, iterables=None, build_elts=None
-):
- transform_func = partial(
- _container_generic_transform,
- klass=klass,
- iterables=iterables,
- build_elts=build_elts,
- )
-
- return _container_generic_inference(node, context, klass, transform_func)
-
-
-# pylint: disable=invalid-name
-infer_tuple = partial(
- _infer_builtin_container,
- klass=nodes.Tuple,
- iterables=(
- nodes.List,
- nodes.Set,
- objects.FrozenSet,
- objects.DictItems,
- objects.DictKeys,
- objects.DictValues,
- ),
- build_elts=tuple,
-)
-
-infer_list = partial(
- _infer_builtin_container,
- klass=nodes.List,
- iterables=(
- nodes.Tuple,
- nodes.Set,
- objects.FrozenSet,
- objects.DictItems,
- objects.DictKeys,
- objects.DictValues,
- ),
- build_elts=list,
-)
-
-infer_set = partial(
- _infer_builtin_container,
- klass=nodes.Set,
- iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, objects.DictKeys),
- build_elts=set,
-)
-
-infer_frozenset = partial(
- _infer_builtin_container,
- klass=objects.FrozenSet,
- iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictKeys),
- build_elts=frozenset,
-)
-
-
-def _get_elts(arg, context):
- is_iterable = lambda n: isinstance(n, (nodes.List, nodes.Tuple, nodes.Set))
- try:
- inferred = next(arg.infer(context))
- except (InferenceError, NameInferenceError):
- raise UseInferenceDefault()
- if isinstance(inferred, nodes.Dict):
- items = inferred.items
- elif is_iterable(inferred):
- items = []
- for elt in inferred.elts:
- # If an item is not a pair of two items,
- # then fallback to the default inference.
- # Also, take in consideration only hashable items,
- # tuples and consts. We are choosing Names as well.
- if not is_iterable(elt):
- raise UseInferenceDefault()
- if len(elt.elts) != 2:
- raise UseInferenceDefault()
- if not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name)):
- raise UseInferenceDefault()
- items.append(tuple(elt.elts))
- else:
- raise UseInferenceDefault()
- return items
-
-
-def infer_dict(node, context=None):
- """Try to infer a dict call to a Dict node.
-
- The function treats the following cases:
-
- * dict()
- * dict(mapping)
- * dict(iterable)
- * dict(iterable, **kwargs)
- * dict(mapping, **kwargs)
- * dict(**kwargs)
-
- If a case can't be inferred, we'll fallback to default inference.
- """
- call = arguments.CallSite.from_call(node)
- if call.has_invalid_arguments() or call.has_invalid_keywords():
- raise UseInferenceDefault
-
- args = call.positional_arguments
- kwargs = list(call.keyword_arguments.items())
-
- if not args and not kwargs:
- # dict()
- return nodes.Dict()
- elif kwargs and not args:
- # dict(a=1, b=2, c=4)
- items = [(nodes.Const(key), value) for key, value in kwargs]
- elif len(args) == 1 and kwargs:
- # dict(some_iterable, b=2, c=4)
- elts = _get_elts(args[0], context)
- keys = [(nodes.Const(key), value) for key, value in kwargs]
- items = elts + keys
- elif len(args) == 1:
- items = _get_elts(args[0], context)
- else:
- raise UseInferenceDefault()
-
- value = nodes.Dict(
- col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
- )
- value.postinit(items)
- return value
-
-
-def infer_super(node, context=None):
- """Understand super calls.
-
- There are some restrictions for what can be understood:
-
- * unbounded super (one argument form) is not understood.
-
- * if the super call is not inside a function (classmethod or method),
- then the default inference will be used.
-
- * if the super arguments can't be inferred, the default inference
- will be used.
- """
- if len(node.args) == 1:
- # Ignore unbounded super.
- raise UseInferenceDefault
-
- scope = node.scope()
- if not isinstance(scope, nodes.FunctionDef):
- # Ignore non-method uses of super.
- raise UseInferenceDefault
- if scope.type not in ("classmethod", "method"):
- # Not interested in staticmethods.
- raise UseInferenceDefault
-
- cls = scoped_nodes.get_wrapping_class(scope)
- if not len(node.args):
- mro_pointer = cls
- # In we are in a classmethod, the interpreter will fill
- # automatically the class as the second argument, not an instance.
- if scope.type == "classmethod":
- mro_type = cls
- else:
- mro_type = cls.instantiate_class()
- else:
- try:
- mro_pointer = next(node.args[0].infer(context=context))
- except InferenceError:
- raise UseInferenceDefault
- try:
- mro_type = next(node.args[1].infer(context=context))
- except InferenceError:
- raise UseInferenceDefault
-
- if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
- # No way we could understand this.
- raise UseInferenceDefault
-
- super_obj = objects.Super(
- mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope
- )
- super_obj.parent = node
- return super_obj
-
-
-def _infer_getattr_args(node, context):
- if len(node.args) not in (2, 3):
- # Not a valid getattr call.
- raise UseInferenceDefault
-
- try:
- obj = next(node.args[0].infer(context=context))
- attr = next(node.args[1].infer(context=context))
- except InferenceError:
- raise UseInferenceDefault
-
- if obj is util.Uninferable or attr is util.Uninferable:
- # If one of the arguments is something we can't infer,
- # then also make the result of the getattr call something
- # which is unknown.
- return util.Uninferable, util.Uninferable
-
- is_string = isinstance(attr, nodes.Const) and isinstance(
- attr.value, six.string_types
- )
- if not is_string:
- raise UseInferenceDefault
-
- return obj, attr.value
-
-
-def infer_getattr(node, context=None):
- """Understand getattr calls
-
- If one of the arguments is an Uninferable object, then the
- result will be an Uninferable object. Otherwise, the normal attribute
- lookup will be done.
- """
- obj, attr = _infer_getattr_args(node, context)
- if (
- obj is util.Uninferable
- or attr is util.Uninferable
- or not hasattr(obj, "igetattr")
- ):
- return util.Uninferable
-
- try:
- return next(obj.igetattr(attr, context=context))
- except (StopIteration, InferenceError, AttributeInferenceError):
- if len(node.args) == 3:
- # Try to infer the default and return it instead.
- try:
- return next(node.args[2].infer(context=context))
- except InferenceError:
- raise UseInferenceDefault
-
- raise UseInferenceDefault
-
-
-def infer_hasattr(node, context=None):
- """Understand hasattr calls
-
- This always guarantees three possible outcomes for calling
- hasattr: Const(False) when we are sure that the object
- doesn't have the intended attribute, Const(True) when
- we know that the object has the attribute and Uninferable
- when we are unsure of the outcome of the function call.
- """
- try:
- obj, attr = _infer_getattr_args(node, context)
- if (
- obj is util.Uninferable
- or attr is util.Uninferable
- or not hasattr(obj, "getattr")
- ):
- return util.Uninferable
- obj.getattr(attr, context=context)
- except UseInferenceDefault:
- # Can't infer something from this function call.
- return util.Uninferable
- except AttributeInferenceError:
- # Doesn't have it.
- return nodes.Const(False)
- return nodes.Const(True)
-
-
-def infer_callable(node, context=None):
- """Understand callable calls
-
- This follows Python's semantics, where an object
- is callable if it provides an attribute __call__,
- even though that attribute is something which can't be
- called.
- """
- if len(node.args) != 1:
- # Invalid callable call.
- raise UseInferenceDefault
-
- argument = node.args[0]
- try:
- inferred = next(argument.infer(context=context))
- except InferenceError:
- return util.Uninferable
- if inferred is util.Uninferable:
- return util.Uninferable
- return nodes.Const(inferred.callable())
-
-
-def infer_bool(node, context=None):
- """Understand bool calls."""
- if len(node.args) > 1:
- # Invalid bool call.
- raise UseInferenceDefault
-
- if not node.args:
- return nodes.Const(False)
-
- argument = node.args[0]
- try:
- inferred = next(argument.infer(context=context))
- except InferenceError:
- return util.Uninferable
- if inferred is util.Uninferable:
- return util.Uninferable
-
- bool_value = inferred.bool_value()
- if bool_value is util.Uninferable:
- return util.Uninferable
- return nodes.Const(bool_value)
-
-
-def infer_type(node, context=None):
- """Understand the one-argument form of *type*."""
- if len(node.args) != 1:
- raise UseInferenceDefault
-
- return helpers.object_type(node.args[0], context)
-
-
-def infer_slice(node, context=None):
- """Understand `slice` calls."""
- args = node.args
- if not 0 < len(args) <= 3:
- raise UseInferenceDefault
-
- infer_func = partial(helpers.safe_infer, context=context)
- args = [infer_func(arg) for arg in args]
- for arg in args:
- if not arg or arg is util.Uninferable:
- raise UseInferenceDefault
- if not isinstance(arg, nodes.Const):
- raise UseInferenceDefault
- if not isinstance(arg.value, (type(None), int)):
- raise UseInferenceDefault
-
- if len(args) < 3:
- # Make sure we have 3 arguments.
- args.extend([None] * (3 - len(args)))
-
- slice_node = nodes.Slice(
- lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
- )
- slice_node.postinit(*args)
- return slice_node
-
-
-def _infer_object__new__decorator(node, context=None):
- # Instantiate class immediately
- # since that's what @object.__new__ does
- return iter((node.instantiate_class(),))
-
-
-def _infer_object__new__decorator_check(node):
- """Predicate before inference_tip
-
- Check if the given ClassDef has an @object.__new__ decorator
- """
- if not node.decorators:
- return False
-
- for decorator in node.decorators.nodes:
- if isinstance(decorator, nodes.Attribute):
- if decorator.as_string() == OBJECT_DUNDER_NEW:
- return True
- return False
-
-
-def infer_issubclass(callnode, context=None):
- """Infer issubclass() calls
-
- :param nodes.Call callnode: an `issubclass` call
- :param InferenceContext: the context for the inference
- :rtype nodes.Const: Boolean Const value of the `issubclass` call
- :raises UseInferenceDefault: If the node cannot be inferred
- """
- call = arguments.CallSite.from_call(callnode)
- if call.keyword_arguments:
- # issubclass doesn't support keyword arguments
- raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments")
- if len(call.positional_arguments) != 2:
- raise UseInferenceDefault(
- "Expected two arguments, got {count}".format(
- count=len(call.positional_arguments)
- )
- )
- # The left hand argument is the obj to be checked
- obj_node, class_or_tuple_node = call.positional_arguments
-
- try:
- obj_type = next(obj_node.infer(context=context))
- except InferenceError as exc:
- raise UseInferenceDefault from exc
- if not isinstance(obj_type, nodes.ClassDef):
- raise UseInferenceDefault("TypeError: arg 1 must be class")
-
- # The right hand argument is the class(es) that the given
- # object is to be checked against.
- try:
- class_container = _class_or_tuple_to_container(
- class_or_tuple_node, context=context
- )
- except InferenceError as exc:
- raise UseInferenceDefault from exc
- try:
- issubclass_bool = helpers.object_issubclass(obj_type, class_container, context)
- except AstroidTypeError as exc:
- raise UseInferenceDefault("TypeError: " + str(exc)) from exc
- except MroError as exc:
- raise UseInferenceDefault from exc
- return nodes.Const(issubclass_bool)
-
-
-def infer_isinstance(callnode, context=None):
- """Infer isinstance calls
-
- :param nodes.Call callnode: an isinstance call
- :param InferenceContext: context for call
- (currently unused but is a common interface for inference)
- :rtype nodes.Const: Boolean Const value of isinstance call
-
- :raises UseInferenceDefault: If the node cannot be inferred
- """
- call = arguments.CallSite.from_call(callnode)
- if call.keyword_arguments:
- # isinstance doesn't support keyword arguments
- raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments")
- if len(call.positional_arguments) != 2:
- raise UseInferenceDefault(
- "Expected two arguments, got {count}".format(
- count=len(call.positional_arguments)
- )
- )
- # The left hand argument is the obj to be checked
- obj_node, class_or_tuple_node = call.positional_arguments
- # The right hand argument is the class(es) that the given
- # obj is to be check is an instance of
- try:
- class_container = _class_or_tuple_to_container(
- class_or_tuple_node, context=context
- )
- except InferenceError:
- raise UseInferenceDefault
- try:
- isinstance_bool = helpers.object_isinstance(obj_node, class_container, context)
- except AstroidTypeError as exc:
- raise UseInferenceDefault("TypeError: " + str(exc))
- except MroError as exc:
- raise UseInferenceDefault from exc
- if isinstance_bool is util.Uninferable:
- raise UseInferenceDefault
- return nodes.Const(isinstance_bool)
-
-
-def _class_or_tuple_to_container(node, context=None):
- # Move inferences results into container
- # to simplify later logic
- # raises InferenceError if any of the inferences fall through
- node_infer = next(node.infer(context=context))
- # arg2 MUST be a type or a TUPLE of types
- # for isinstance
- if isinstance(node_infer, nodes.Tuple):
- class_container = [
- next(node.infer(context=context)) for node in node_infer.elts
- ]
- class_container = [
- klass_node for klass_node in class_container if klass_node is not None
- ]
- else:
- class_container = [node_infer]
- return class_container
-
-
-def infer_len(node, context=None):
- """Infer length calls
-
- :param nodes.Call node: len call to infer
- :param context.InferenceContext: node context
- :rtype nodes.Const: a Const node with the inferred length, if possible
- """
- call = arguments.CallSite.from_call(node)
- if call.keyword_arguments:
- raise UseInferenceDefault("TypeError: len() must take no keyword arguments")
- if len(call.positional_arguments) != 1:
- raise UseInferenceDefault(
- "TypeError: len() must take exactly one argument "
- "({len}) given".format(len=len(call.positional_arguments))
- )
- [argument_node] = call.positional_arguments
- try:
- return nodes.Const(helpers.object_len(argument_node, context=context))
- except (AstroidTypeError, InferenceError) as exc:
- raise UseInferenceDefault(str(exc)) from exc
-
-
-def infer_str(node, context=None):
- """Infer str() calls
-
- :param nodes.Call node: str() call to infer
- :param context.InferenceContext: node context
- :rtype nodes.Const: a Const containing an empty string
- """
- call = arguments.CallSite.from_call(node)
- if call.keyword_arguments:
- raise UseInferenceDefault("TypeError: str() must take no keyword arguments")
- try:
- return nodes.Const("")
- except (AstroidTypeError, InferenceError) as exc:
- raise UseInferenceDefault(str(exc)) from exc
-
-
-def infer_int(node, context=None):
- """Infer int() calls
-
- :param nodes.Call node: int() call to infer
- :param context.InferenceContext: node context
- :rtype nodes.Const: a Const containing the integer value of the int() call
- """
- call = arguments.CallSite.from_call(node)
- if call.keyword_arguments:
- raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
-
- if call.positional_arguments:
- try:
- first_value = next(call.positional_arguments[0].infer(context=context))
- except InferenceError as exc:
- raise UseInferenceDefault(str(exc)) from exc
-
- if first_value is util.Uninferable:
- raise UseInferenceDefault
-
- if isinstance(first_value, nodes.Const) and isinstance(
- first_value.value, (int, str)
- ):
- try:
- actual_value = int(first_value.value)
- except ValueError:
- return nodes.Const(0)
- return nodes.Const(actual_value)
-
- return nodes.Const(0)
-
-
-def infer_dict_fromkeys(node, context=None):
- """Infer dict.fromkeys
-
- :param nodes.Call node: dict.fromkeys() call to infer
- :param context.InferenceContext: node context
- :rtype nodes.Dict:
- a Dictionary containing the values that astroid was able to infer.
- In case the inference failed for any reason, an empty dictionary
- will be inferred instead.
- """
-
- def _build_dict_with_elements(elements):
- new_node = nodes.Dict(
- col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
- )
- new_node.postinit(elements)
- return new_node
-
- call = arguments.CallSite.from_call(node)
- if call.keyword_arguments:
- raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
- if len(call.positional_arguments) not in {1, 2}:
- raise UseInferenceDefault(
- "TypeError: Needs between 1 and 2 positional arguments"
- )
-
- default = nodes.Const(None)
- values = call.positional_arguments[0]
- try:
- inferred_values = next(values.infer(context=context))
- except InferenceError:
- return _build_dict_with_elements([])
- if inferred_values is util.Uninferable:
- return _build_dict_with_elements([])
-
- # Limit to a couple of potential values, as this can become pretty complicated
- accepted_iterable_elements = (nodes.Const,)
- if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)):
- elements = inferred_values.elts
- for element in elements:
- if not isinstance(element, accepted_iterable_elements):
- # Fallback to an empty dict
- return _build_dict_with_elements([])
-
- elements_with_value = [(element, default) for element in elements]
- return _build_dict_with_elements(elements_with_value)
-
- elif isinstance(inferred_values, nodes.Const) and isinstance(
- inferred_values.value, (str, bytes)
- ):
- elements = [
- (nodes.Const(element), default) for element in inferred_values.value
- ]
- return _build_dict_with_elements(elements)
- elif isinstance(inferred_values, nodes.Dict):
- keys = inferred_values.itered()
- for key in keys:
- if not isinstance(key, accepted_iterable_elements):
- # Fallback to an empty dict
- return _build_dict_with_elements([])
-
- elements_with_value = [(element, default) for element in keys]
- return _build_dict_with_elements(elements_with_value)
-
- # Fallback to an empty dictionary
- return _build_dict_with_elements([])
-
-
-# Builtins inference
-register_builtin_transform(infer_bool, "bool")
-register_builtin_transform(infer_super, "super")
-register_builtin_transform(infer_callable, "callable")
-register_builtin_transform(infer_getattr, "getattr")
-register_builtin_transform(infer_hasattr, "hasattr")
-register_builtin_transform(infer_tuple, "tuple")
-register_builtin_transform(infer_set, "set")
-register_builtin_transform(infer_list, "list")
-register_builtin_transform(infer_dict, "dict")
-register_builtin_transform(infer_frozenset, "frozenset")
-register_builtin_transform(infer_type, "type")
-register_builtin_transform(infer_slice, "slice")
-register_builtin_transform(infer_isinstance, "isinstance")
-register_builtin_transform(infer_issubclass, "issubclass")
-register_builtin_transform(infer_len, "len")
-register_builtin_transform(infer_str, "str")
-register_builtin_transform(infer_int, "int")
-register_builtin_transform(infer_dict_fromkeys, "dict.fromkeys")
-
-
-# Infer object.__new__ calls
-MANAGER.register_transform(
- nodes.ClassDef,
- inference_tip(_infer_object__new__decorator),
- _infer_object__new__decorator_check,
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_collections.py b/venv/Lib/site-packages/astroid/brain/brain_collections.py
deleted file mode 100644
index e5b09ec..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_collections.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-import sys
-
-import astroid
-
-
-def _collections_transform():
- return astroid.parse(
- """
- class defaultdict(dict):
- default_factory = None
- def __missing__(self, key): pass
- def __getitem__(self, key): return default_factory
-
- """
- + _deque_mock()
- + _ordered_dict_mock()
- )
-
-
-def _deque_mock():
- base_deque_class = """
- class deque(object):
- maxlen = 0
- def __init__(self, iterable=None, maxlen=None):
- self.iterable = iterable or []
- def append(self, x): pass
- def appendleft(self, x): pass
- def clear(self): pass
- def count(self, x): return 0
- def extend(self, iterable): pass
- def extendleft(self, iterable): pass
- def pop(self): return self.iterable[0]
- def popleft(self): return self.iterable[0]
- def remove(self, value): pass
- def reverse(self): return reversed(self.iterable)
- def rotate(self, n=1): return self
- def __iter__(self): return self
- def __reversed__(self): return self.iterable[::-1]
- def __getitem__(self, index): return self.iterable[index]
- def __setitem__(self, index, value): pass
- def __delitem__(self, index): pass
- def __bool__(self): return bool(self.iterable)
- def __nonzero__(self): return bool(self.iterable)
- def __contains__(self, o): return o in self.iterable
- def __len__(self): return len(self.iterable)
- def __copy__(self): return deque(self.iterable)
- def copy(self): return deque(self.iterable)
- def index(self, x, start=0, end=0): return 0
- def insert(self, x, i): pass
- def __add__(self, other): pass
- def __iadd__(self, other): pass
- def __mul__(self, other): pass
- def __imul__(self, other): pass
- def __rmul__(self, other): pass"""
- return base_deque_class
-
-
-def _ordered_dict_mock():
- base_ordered_dict_class = """
- class OrderedDict(dict):
- def __reversed__(self): return self[::-1]
- def move_to_end(self, key, last=False): pass"""
- return base_ordered_dict_class
-
-
-astroid.register_module_extender(astroid.MANAGER, "collections", _collections_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_crypt.py b/venv/Lib/site-packages/astroid/brain/brain_crypt.py
deleted file mode 100644
index 491ee23..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_crypt.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-import sys
-import astroid
-
-PY37 = sys.version_info >= (3, 7)
-
-if PY37:
- # Since Python 3.7 Hashing Methods are added
- # dynamically to globals()
-
- def _re_transform():
- return astroid.parse(
- """
- from collections import namedtuple
- _Method = namedtuple('_Method', 'name ident salt_chars total_size')
-
- METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
- METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
- METHOD_BLOWFISH = _Method('BLOWFISH', 2, 'b', 22)
- METHOD_MD5 = _Method('MD5', '1', 8, 34)
- METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
- """
- )
-
- astroid.register_module_extender(astroid.MANAGER, "crypt", _re_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_curses.py b/venv/Lib/site-packages/astroid/brain/brain_curses.py
deleted file mode 100644
index 68e88b9..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_curses.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-import astroid
-
-
-def _curses_transform():
- return astroid.parse(
- """
- A_ALTCHARSET = 1
- A_BLINK = 1
- A_BOLD = 1
- A_DIM = 1
- A_INVIS = 1
- A_ITALIC = 1
- A_NORMAL = 1
- A_PROTECT = 1
- A_REVERSE = 1
- A_STANDOUT = 1
- A_UNDERLINE = 1
- A_HORIZONTAL = 1
- A_LEFT = 1
- A_LOW = 1
- A_RIGHT = 1
- A_TOP = 1
- A_VERTICAL = 1
- A_CHARTEXT = 1
- A_ATTRIBUTES = 1
- A_CHARTEXT = 1
- A_COLOR = 1
- KEY_MIN = 1
- KEY_BREAK = 1
- KEY_DOWN = 1
- KEY_UP = 1
- KEY_LEFT = 1
- KEY_RIGHT = 1
- KEY_HOME = 1
- KEY_BACKSPACE = 1
- KEY_F0 = 1
- KEY_Fn = 1
- KEY_DL = 1
- KEY_IL = 1
- KEY_DC = 1
- KEY_IC = 1
- KEY_EIC = 1
- KEY_CLEAR = 1
- KEY_EOS = 1
- KEY_EOL = 1
- KEY_SF = 1
- KEY_SR = 1
- KEY_NPAGE = 1
- KEY_PPAGE = 1
- KEY_STAB = 1
- KEY_CTAB = 1
- KEY_CATAB = 1
- KEY_ENTER = 1
- KEY_SRESET = 1
- KEY_RESET = 1
- KEY_PRINT = 1
- KEY_LL = 1
- KEY_A1 = 1
- KEY_A3 = 1
- KEY_B2 = 1
- KEY_C1 = 1
- KEY_C3 = 1
- KEY_BTAB = 1
- KEY_BEG = 1
- KEY_CANCEL = 1
- KEY_CLOSE = 1
- KEY_COMMAND = 1
- KEY_COPY = 1
- KEY_CREATE = 1
- KEY_END = 1
- KEY_EXIT = 1
- KEY_FIND = 1
- KEY_HELP = 1
- KEY_MARK = 1
- KEY_MESSAGE = 1
- KEY_MOVE = 1
- KEY_NEXT = 1
- KEY_OPEN = 1
- KEY_OPTIONS = 1
- KEY_PREVIOUS = 1
- KEY_REDO = 1
- KEY_REFERENCE = 1
- KEY_REFRESH = 1
- KEY_REPLACE = 1
- KEY_RESTART = 1
- KEY_RESUME = 1
- KEY_SAVE = 1
- KEY_SBEG = 1
- KEY_SCANCEL = 1
- KEY_SCOMMAND = 1
- KEY_SCOPY = 1
- KEY_SCREATE = 1
- KEY_SDC = 1
- KEY_SDL = 1
- KEY_SELECT = 1
- KEY_SEND = 1
- KEY_SEOL = 1
- KEY_SEXIT = 1
- KEY_SFIND = 1
- KEY_SHELP = 1
- KEY_SHOME = 1
- KEY_SIC = 1
- KEY_SLEFT = 1
- KEY_SMESSAGE = 1
- KEY_SMOVE = 1
- KEY_SNEXT = 1
- KEY_SOPTIONS = 1
- KEY_SPREVIOUS = 1
- KEY_SPRINT = 1
- KEY_SREDO = 1
- KEY_SREPLACE = 1
- KEY_SRIGHT = 1
- KEY_SRSUME = 1
- KEY_SSAVE = 1
- KEY_SSUSPEND = 1
- KEY_SUNDO = 1
- KEY_SUSPEND = 1
- KEY_UNDO = 1
- KEY_MOUSE = 1
- KEY_RESIZE = 1
- KEY_MAX = 1
- ACS_BBSS = 1
- ACS_BLOCK = 1
- ACS_BOARD = 1
- ACS_BSBS = 1
- ACS_BSSB = 1
- ACS_BSSS = 1
- ACS_BTEE = 1
- ACS_BULLET = 1
- ACS_CKBOARD = 1
- ACS_DARROW = 1
- ACS_DEGREE = 1
- ACS_DIAMOND = 1
- ACS_GEQUAL = 1
- ACS_HLINE = 1
- ACS_LANTERN = 1
- ACS_LARROW = 1
- ACS_LEQUAL = 1
- ACS_LLCORNER = 1
- ACS_LRCORNER = 1
- ACS_LTEE = 1
- ACS_NEQUAL = 1
- ACS_PI = 1
- ACS_PLMINUS = 1
- ACS_PLUS = 1
- ACS_RARROW = 1
- ACS_RTEE = 1
- ACS_S1 = 1
- ACS_S3 = 1
- ACS_S7 = 1
- ACS_S9 = 1
- ACS_SBBS = 1
- ACS_SBSB = 1
- ACS_SBSS = 1
- ACS_SSBB = 1
- ACS_SSBS = 1
- ACS_SSSB = 1
- ACS_SSSS = 1
- ACS_STERLING = 1
- ACS_TTEE = 1
- ACS_UARROW = 1
- ACS_ULCORNER = 1
- ACS_URCORNER = 1
- ACS_VLINE = 1
- COLOR_BLACK = 1
- COLOR_BLUE = 1
- COLOR_CYAN = 1
- COLOR_GREEN = 1
- COLOR_MAGENTA = 1
- COLOR_RED = 1
- COLOR_WHITE = 1
- COLOR_YELLOW = 1
- """
- )
-
-
-astroid.register_module_extender(astroid.MANAGER, "curses", _curses_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_dataclasses.py b/venv/Lib/site-packages/astroid/brain/brain_dataclasses.py
deleted file mode 100644
index 7a25e0c..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_dataclasses.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-"""
-Astroid hook for the dataclasses library
-"""
-
-import astroid
-from astroid import MANAGER
-
-
-DATACLASSES_DECORATORS = frozenset(("dataclasses.dataclass", "dataclass"))
-
-
-def is_decorated_with_dataclass(node, decorator_names=DATACLASSES_DECORATORS):
- """Return True if a decorated node has a `dataclass` decorator applied."""
- if not node.decorators:
- return False
- for decorator_attribute in node.decorators.nodes:
- if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
- decorator_attribute = decorator_attribute.func
- if decorator_attribute.as_string() in decorator_names:
- return True
- return False
-
-
-def dataclass_transform(node):
- """Rewrite a dataclass to be easily understood by pylint"""
-
- for assign_node in node.body:
- if not isinstance(assign_node, (astroid.AnnAssign, astroid.Assign)):
- continue
-
- targets = (
- assign_node.targets
- if hasattr(assign_node, "targets")
- else [assign_node.target]
- )
- for target in targets:
- rhs_node = astroid.Unknown(
- lineno=assign_node.lineno,
- col_offset=assign_node.col_offset,
- parent=assign_node,
- )
- node.instance_attrs[target.name] = [rhs_node]
- node.locals[target.name] = [rhs_node]
-
-
-MANAGER.register_transform(
- astroid.ClassDef, dataclass_transform, is_decorated_with_dataclass
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_dateutil.py b/venv/Lib/site-packages/astroid/brain/brain_dateutil.py
deleted file mode 100644
index a1c270f..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_dateutil.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015 raylu <lurayl@gmail.com>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid hooks for dateutil"""
-
-import textwrap
-
-from astroid import MANAGER, register_module_extender
-from astroid.builder import AstroidBuilder
-
-
-def dateutil_transform():
- return AstroidBuilder(MANAGER).string_build(
- textwrap.dedent(
- """
- import datetime
- def parse(timestr, parserinfo=None, **kwargs):
- return datetime.datetime()
- """
- )
- )
-
-
-register_module_extender(MANAGER, "dateutil.parser", dateutil_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_fstrings.py b/venv/Lib/site-packages/astroid/brain/brain_fstrings.py
deleted file mode 100644
index 7d8c7b6..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_fstrings.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-import collections
-import sys
-
-import astroid
-
-
-def _clone_node_with_lineno(node, parent, lineno):
- cls = node.__class__
- other_fields = node._other_fields
- _astroid_fields = node._astroid_fields
- init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
- postinit_params = {param: getattr(node, param) for param in _astroid_fields}
- if other_fields:
- init_params.update({param: getattr(node, param) for param in other_fields})
- new_node = cls(**init_params)
- if hasattr(node, "postinit") and _astroid_fields:
- for param, child in postinit_params.items():
- if child and not isinstance(child, collections.Sequence):
- cloned_child = _clone_node_with_lineno(
- node=child, lineno=new_node.lineno, parent=new_node
- )
- postinit_params[param] = cloned_child
- new_node.postinit(**postinit_params)
- return new_node
-
-
-def _transform_formatted_value(node):
- if node.value and node.value.lineno == 1:
- if node.lineno != node.value.lineno:
- new_node = astroid.FormattedValue(
- lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
- )
- new_value = _clone_node_with_lineno(
- node=node.value, lineno=node.lineno, parent=new_node
- )
- new_node.postinit(value=new_value, format_spec=node.format_spec)
- return new_node
-
-
-if sys.version_info[:2] >= (3, 6):
- # TODO: this fix tries to *patch* http://bugs.python.org/issue29051
- # The problem is that FormattedValue.value, which is a Name node,
- # has wrong line numbers, usually 1. This creates problems for pylint,
- # which expects correct line numbers for things such as message control.
- astroid.MANAGER.register_transform(
- astroid.FormattedValue, _transform_formatted_value
- )
diff --git a/venv/Lib/site-packages/astroid/brain/brain_functools.py b/venv/Lib/site-packages/astroid/brain/brain_functools.py
deleted file mode 100644
index 8b594ef..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_functools.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-"""Astroid hooks for understanding functools library module."""
-from functools import partial
-from itertools import chain
-
-import astroid
-from astroid import arguments
-from astroid import BoundMethod
-from astroid import extract_node
-from astroid import helpers
-from astroid.interpreter import objectmodel
-from astroid import MANAGER
-from astroid import objects
-
-
-LRU_CACHE = "functools.lru_cache"
-
-
-class LruWrappedModel(objectmodel.FunctionModel):
- """Special attribute model for functions decorated with functools.lru_cache.
-
- The said decorators patches at decoration time some functions onto
- the decorated function.
- """
-
- @property
- def attr___wrapped__(self):
- return self._instance
-
- @property
- def attr_cache_info(self):
- cache_info = extract_node(
- """
- from functools import _CacheInfo
- _CacheInfo(0, 0, 0, 0)
- """
- )
-
- class CacheInfoBoundMethod(BoundMethod):
- def infer_call_result(self, caller, context=None):
- yield helpers.safe_infer(cache_info)
-
- return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance)
-
- @property
- def attr_cache_clear(self):
- node = extract_node("""def cache_clear(self): pass""")
- return BoundMethod(proxy=node, bound=self._instance.parent.scope())
-
-
-def _transform_lru_cache(node, context=None):
- # TODO: this is not ideal, since the node should be immutable,
- # but due to https://github.com/PyCQA/astroid/issues/354,
- # there's not much we can do now.
- # Replacing the node would work partially, because,
- # in pylint, the old node would still be available, leading
- # to spurious false positives.
- node.special_attributes = LruWrappedModel()(node)
- return
-
-
-def _functools_partial_inference(node, context=None):
- call = arguments.CallSite.from_call(node)
- number_of_positional = len(call.positional_arguments)
- if number_of_positional < 1:
- raise astroid.UseInferenceDefault(
- "functools.partial takes at least one argument"
- )
- if number_of_positional == 1 and not call.keyword_arguments:
- raise astroid.UseInferenceDefault(
- "functools.partial needs at least to have some filled arguments"
- )
-
- partial_function = call.positional_arguments[0]
- try:
- inferred_wrapped_function = next(partial_function.infer(context=context))
- except astroid.InferenceError as exc:
- raise astroid.UseInferenceDefault from exc
- if inferred_wrapped_function is astroid.Uninferable:
- raise astroid.UseInferenceDefault("Cannot infer the wrapped function")
- if not isinstance(inferred_wrapped_function, astroid.FunctionDef):
- raise astroid.UseInferenceDefault("The wrapped function is not a function")
-
- # Determine if the passed keywords into the callsite are supported
- # by the wrapped function.
- function_parameters = chain(
- inferred_wrapped_function.args.args or (),
- inferred_wrapped_function.args.posonlyargs or (),
- inferred_wrapped_function.args.kwonlyargs or (),
- )
- parameter_names = set(
- param.name
- for param in function_parameters
- if isinstance(param, astroid.AssignName)
- )
- if set(call.keyword_arguments) - parameter_names:
- raise astroid.UseInferenceDefault(
- "wrapped function received unknown parameters"
- )
-
- partial_function = objects.PartialFunction(
- call,
- name=inferred_wrapped_function.name,
- doc=inferred_wrapped_function.doc,
- lineno=inferred_wrapped_function.lineno,
- col_offset=inferred_wrapped_function.col_offset,
- parent=inferred_wrapped_function.parent,
- )
- partial_function.postinit(
- args=inferred_wrapped_function.args,
- body=inferred_wrapped_function.body,
- decorators=inferred_wrapped_function.decorators,
- returns=inferred_wrapped_function.returns,
- type_comment_returns=inferred_wrapped_function.type_comment_returns,
- type_comment_args=inferred_wrapped_function.type_comment_args,
- )
- return iter((partial_function,))
-
-
-def _looks_like_lru_cache(node):
- """Check if the given function node is decorated with lru_cache."""
- if not node.decorators:
- return False
- for decorator in node.decorators.nodes:
- if not isinstance(decorator, astroid.Call):
- continue
- if _looks_like_functools_member(decorator, "lru_cache"):
- return True
- return False
-
-
-def _looks_like_functools_member(node, member):
- """Check if the given Call node is a functools.partial call"""
- if isinstance(node.func, astroid.Name):
- return node.func.name == member
- elif isinstance(node.func, astroid.Attribute):
- return (
- node.func.attrname == member
- and isinstance(node.func.expr, astroid.Name)
- and node.func.expr.name == "functools"
- )
-
-
-_looks_like_partial = partial(_looks_like_functools_member, member="partial")
-
-
-MANAGER.register_transform(
- astroid.FunctionDef, _transform_lru_cache, _looks_like_lru_cache
-)
-
-
-MANAGER.register_transform(
- astroid.Call,
- astroid.inference_tip(_functools_partial_inference),
- _looks_like_partial,
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_gi.py b/venv/Lib/site-packages/astroid/brain/brain_gi.py
deleted file mode 100644
index 0970610..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_gi.py
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2014 Cole Robinson <crobinso@redhat.com>
-# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 David Shea <dshea@redhat.com>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2016 Giuseppe Scrivano <gscrivan@redhat.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid hooks for the Python 2 GObject introspection bindings.
-
-Helps with understanding everything imported from 'gi.repository'
-"""
-
-import inspect
-import itertools
-import sys
-import re
-import warnings
-
-from astroid import MANAGER, AstroidBuildingError, nodes
-from astroid.builder import AstroidBuilder
-
-
-_inspected_modules = {}
-
-_identifier_re = r"^[A-Za-z_]\w*$"
-
-
-def _gi_build_stub(parent):
- """
- Inspect the passed module recursively and build stubs for functions,
- classes, etc.
- """
- classes = {}
- functions = {}
- constants = {}
- methods = {}
- for name in dir(parent):
- if name.startswith("__"):
- continue
-
- # Check if this is a valid name in python
- if not re.match(_identifier_re, name):
- continue
-
- try:
- obj = getattr(parent, name)
- except:
- continue
-
- if inspect.isclass(obj):
- classes[name] = obj
- elif inspect.isfunction(obj) or inspect.isbuiltin(obj):
- functions[name] = obj
- elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
- methods[name] = obj
- elif (
- str(obj).startswith("<flags")
- or str(obj).startswith("<enum ")
- or str(obj).startswith("<GType ")
- or inspect.isdatadescriptor(obj)
- ):
- constants[name] = 0
- elif isinstance(obj, (int, str)):
- constants[name] = obj
- elif callable(obj):
- # Fall back to a function for anything callable
- functions[name] = obj
- else:
- # Assume everything else is some manner of constant
- constants[name] = 0
-
- ret = ""
-
- if constants:
- ret += "# %s constants\n\n" % parent.__name__
- for name in sorted(constants):
- if name[0].isdigit():
- # GDK has some busted constant names like
- # Gdk.EventType.2BUTTON_PRESS
- continue
-
- val = constants[name]
-
- strval = str(val)
- if isinstance(val, str):
- strval = '"%s"' % str(val).replace("\\", "\\\\")
- ret += "%s = %s\n" % (name, strval)
-
- if ret:
- ret += "\n\n"
- if functions:
- ret += "# %s functions\n\n" % parent.__name__
- for name in sorted(functions):
- ret += "def %s(*args, **kwargs):\n" % name
- ret += " pass\n"
-
- if ret:
- ret += "\n\n"
- if methods:
- ret += "# %s methods\n\n" % parent.__name__
- for name in sorted(methods):
- ret += "def %s(self, *args, **kwargs):\n" % name
- ret += " pass\n"
-
- if ret:
- ret += "\n\n"
- if classes:
- ret += "# %s classes\n\n" % parent.__name__
- for name, obj in sorted(classes.items()):
- base = "object"
- if issubclass(obj, Exception):
- base = "Exception"
- ret += "class %s(%s):\n" % (name, base)
-
- classret = _gi_build_stub(obj)
- if not classret:
- classret = "pass\n"
-
- for line in classret.splitlines():
- ret += " " + line + "\n"
- ret += "\n"
-
- return ret
-
-
-def _import_gi_module(modname):
- # we only consider gi.repository submodules
- if not modname.startswith("gi.repository."):
- raise AstroidBuildingError(modname=modname)
- # build astroid representation unless we already tried so
- if modname not in _inspected_modules:
- modnames = [modname]
- optional_modnames = []
-
- # GLib and GObject may have some special case handling
- # in pygobject that we need to cope with. However at
- # least as of pygobject3-3.13.91 the _glib module doesn't
- # exist anymore, so if treat these modules as optional.
- if modname == "gi.repository.GLib":
- optional_modnames.append("gi._glib")
- elif modname == "gi.repository.GObject":
- optional_modnames.append("gi._gobject")
-
- try:
- modcode = ""
- for m in itertools.chain(modnames, optional_modnames):
- try:
- with warnings.catch_warnings():
- # Just inspecting the code can raise gi deprecation
- # warnings, so ignore them.
- try:
- from gi import PyGIDeprecationWarning, PyGIWarning
-
- warnings.simplefilter("ignore", PyGIDeprecationWarning)
- warnings.simplefilter("ignore", PyGIWarning)
- except Exception:
- pass
-
- __import__(m)
- modcode += _gi_build_stub(sys.modules[m])
- except ImportError:
- if m not in optional_modnames:
- raise
- except ImportError:
- astng = _inspected_modules[modname] = None
- else:
- astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
- _inspected_modules[modname] = astng
- else:
- astng = _inspected_modules[modname]
- if astng is None:
- raise AstroidBuildingError(modname=modname)
- return astng
-
-
-def _looks_like_require_version(node):
- # Return whether this looks like a call to gi.require_version(<name>, <version>)
- # Only accept function calls with two constant arguments
- if len(node.args) != 2:
- return False
-
- if not all(isinstance(arg, nodes.Const) for arg in node.args):
- return False
-
- func = node.func
- if isinstance(func, nodes.Attribute):
- if func.attrname != "require_version":
- return False
- if isinstance(func.expr, nodes.Name) and func.expr.name == "gi":
- return True
-
- return False
-
- if isinstance(func, nodes.Name):
- return func.name == "require_version"
-
- return False
-
-
-def _register_require_version(node):
- # Load the gi.require_version locally
- try:
- import gi
-
- gi.require_version(node.args[0].value, node.args[1].value)
- except Exception:
- pass
-
- return node
-
-
-MANAGER.register_failed_import_hook(_import_gi_module)
-MANAGER.register_transform(
- nodes.Call, _register_require_version, _looks_like_require_version
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_hashlib.py b/venv/Lib/site-packages/astroid/brain/brain_hashlib.py
deleted file mode 100644
index 98ae774..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_hashlib.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-import sys
-
-import six
-
-import astroid
-
-PY36 = sys.version_info >= (3, 6)
-
-
-def _hashlib_transform():
- signature = "value=''"
- template = """
- class %(name)s(object):
- def __init__(self, %(signature)s): pass
- def digest(self):
- return %(digest)s
- def copy(self):
- return self
- def update(self, value): pass
- def hexdigest(self):
- return ''
- @property
- def name(self):
- return %(name)r
- @property
- def block_size(self):
- return 1
- @property
- def digest_size(self):
- return 1
- """
- algorithms_with_signature = dict.fromkeys(
- ["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature
- )
- if PY36:
- blake2b_signature = "data=b'', *, digest_size=64, key=b'', salt=b'', \
- person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
- node_depth=0, inner_size=0, last_node=False"
- blake2s_signature = "data=b'', *, digest_size=32, key=b'', salt=b'', \
- person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
- node_depth=0, inner_size=0, last_node=False"
- new_algorithms = dict.fromkeys(
- ["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"],
- signature,
- )
- algorithms_with_signature.update(new_algorithms)
- algorithms_with_signature.update(
- {"blake2b": blake2b_signature, "blake2s": blake2s_signature}
- )
- classes = "".join(
- template
- % {
- "name": hashfunc,
- "digest": 'b""' if six.PY3 else '""',
- "signature": signature,
- }
- for hashfunc, signature in algorithms_with_signature.items()
- )
- return astroid.parse(classes)
-
-
-astroid.register_module_extender(astroid.MANAGER, "hashlib", _hashlib_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_http.py b/venv/Lib/site-packages/astroid/brain/brain_http.py
deleted file mode 100644
index a3aa814..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_http.py
+++ /dev/null
@@ -1,201 +0,0 @@
-# Copyright (c) 2018 Claudiu Popa <pcmanticore@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid brain hints for some of the `http` module."""
-import textwrap
-
-import astroid
-from astroid.builder import AstroidBuilder
-
-
-def _http_transform():
- code = textwrap.dedent(
- """
- from collections import namedtuple
- _HTTPStatus = namedtuple('_HTTPStatus', 'value phrase description')
-
- class HTTPStatus:
-
- # informational
- CONTINUE = _HTTPStatus(100, 'Continue', 'Request received, please continue')
- SWITCHING_PROTOCOLS = _HTTPStatus(101, 'Switching Protocols',
- 'Switching to new protocol; obey Upgrade header')
- PROCESSING = _HTTPStatus(102, 'Processing', '')
- OK = _HTTPStatus(200, 'OK', 'Request fulfilled, document follows')
- CREATED = _HTTPStatus(201, 'Created', 'Document created, URL follows')
- ACCEPTED = _HTTPStatus(202, 'Accepted',
- 'Request accepted, processing continues off-line')
- NON_AUTHORITATIVE_INFORMATION = _HTTPStatus(203,
- 'Non-Authoritative Information', 'Request fulfilled from cache')
- NO_CONTENT = _HTTPStatus(204, 'No Content', 'Request fulfilled, nothing follows')
- RESET_CONTENT =_HTTPStatus(205, 'Reset Content', 'Clear input form for further input')
- PARTIAL_CONTENT = _HTTPStatus(206, 'Partial Content', 'Partial content follows')
- MULTI_STATUS = _HTTPStatus(207, 'Multi-Status', '')
- ALREADY_REPORTED = _HTTPStatus(208, 'Already Reported', '')
- IM_USED = _HTTPStatus(226, 'IM Used', '')
- MULTIPLE_CHOICES = _HTTPStatus(300, 'Multiple Choices',
- 'Object has several resources -- see URI list')
- MOVED_PERMANENTLY = _HTTPStatus(301, 'Moved Permanently',
- 'Object moved permanently -- see URI list')
- FOUND = _HTTPStatus(302, 'Found', 'Object moved temporarily -- see URI list')
- SEE_OTHER = _HTTPStatus(303, 'See Other', 'Object moved -- see Method and URL list')
- NOT_MODIFIED = _HTTPStatus(304, 'Not Modified',
- 'Document has not changed since given time')
- USE_PROXY = _HTTPStatus(305, 'Use Proxy',
- 'You must use proxy specified in Location to access this resource')
- TEMPORARY_REDIRECT = _HTTPStatus(307, 'Temporary Redirect',
- 'Object moved temporarily -- see URI list')
- PERMANENT_REDIRECT = _HTTPStatus(308, 'Permanent Redirect',
- 'Object moved permanently -- see URI list')
- BAD_REQUEST = _HTTPStatus(400, 'Bad Request',
- 'Bad request syntax or unsupported method')
- UNAUTHORIZED = _HTTPStatus(401, 'Unauthorized',
- 'No permission -- see authorization schemes')
- PAYMENT_REQUIRED = _HTTPStatus(402, 'Payment Required',
- 'No payment -- see charging schemes')
- FORBIDDEN = _HTTPStatus(403, 'Forbidden',
- 'Request forbidden -- authorization will not help')
- NOT_FOUND = _HTTPStatus(404, 'Not Found',
- 'Nothing matches the given URI')
- METHOD_NOT_ALLOWED = _HTTPStatus(405, 'Method Not Allowed',
- 'Specified method is invalid for this resource')
- NOT_ACCEPTABLE = _HTTPStatus(406, 'Not Acceptable',
- 'URI not available in preferred format')
- PROXY_AUTHENTICATION_REQUIRED = _HTTPStatus(407,
- 'Proxy Authentication Required',
- 'You must authenticate with this proxy before proceeding')
- REQUEST_TIMEOUT = _HTTPStatus(408, 'Request Timeout',
- 'Request timed out; try again later')
- CONFLICT = _HTTPStatus(409, 'Conflict', 'Request conflict')
- GONE = _HTTPStatus(410, 'Gone',
- 'URI no longer exists and has been permanently removed')
- LENGTH_REQUIRED = _HTTPStatus(411, 'Length Required',
- 'Client must specify Content-Length')
- PRECONDITION_FAILED = _HTTPStatus(412, 'Precondition Failed',
- 'Precondition in headers is false')
- REQUEST_ENTITY_TOO_LARGE = _HTTPStatus(413, 'Request Entity Too Large',
- 'Entity is too large')
- REQUEST_URI_TOO_LONG = _HTTPStatus(414, 'Request-URI Too Long',
- 'URI is too long')
- UNSUPPORTED_MEDIA_TYPE = _HTTPStatus(415, 'Unsupported Media Type',
- 'Entity body in unsupported format')
- REQUESTED_RANGE_NOT_SATISFIABLE = _HTTPStatus(416,
- 'Requested Range Not Satisfiable',
- 'Cannot satisfy request range')
- EXPECTATION_FAILED = _HTTPStatus(417, 'Expectation Failed',
- 'Expect condition could not be satisfied')
- MISDIRECTED_REQUEST = _HTTPStatus(421, 'Misdirected Request',
- 'Server is not able to produce a response')
- UNPROCESSABLE_ENTITY = _HTTPStatus(422, 'Unprocessable Entity')
- LOCKED = _HTTPStatus(423, 'Locked')
- FAILED_DEPENDENCY = _HTTPStatus(424, 'Failed Dependency')
- UPGRADE_REQUIRED = _HTTPStatus(426, 'Upgrade Required')
- PRECONDITION_REQUIRED = _HTTPStatus(428, 'Precondition Required',
- 'The origin server requires the request to be conditional')
- TOO_MANY_REQUESTS = _HTTPStatus(429, 'Too Many Requests',
- 'The user has sent too many requests in '
- 'a given amount of time ("rate limiting")')
- REQUEST_HEADER_FIELDS_TOO_LARGE = _HTTPStatus(431,
- 'Request Header Fields Too Large',
- 'The server is unwilling to process the request because its header '
- 'fields are too large')
- UNAVAILABLE_FOR_LEGAL_REASONS = _HTTPStatus(451,
- 'Unavailable For Legal Reasons',
- 'The server is denying access to the '
- 'resource as a consequence of a legal demand')
- INTERNAL_SERVER_ERROR = _HTTPStatus(500, 'Internal Server Error',
- 'Server got itself in trouble')
- NOT_IMPLEMENTED = _HTTPStatus(501, 'Not Implemented',
- 'Server does not support this operation')
- BAD_GATEWAY = _HTTPStatus(502, 'Bad Gateway',
- 'Invalid responses from another server/proxy')
- SERVICE_UNAVAILABLE = _HTTPStatus(503, 'Service Unavailable',
- 'The server cannot process the request due to a high load')
- GATEWAY_TIMEOUT = _HTTPStatus(504, 'Gateway Timeout',
- 'The gateway server did not receive a timely response')
- HTTP_VERSION_NOT_SUPPORTED = _HTTPStatus(505, 'HTTP Version Not Supported',
- 'Cannot fulfill request')
- VARIANT_ALSO_NEGOTIATES = _HTTPStatus(506, 'Variant Also Negotiates')
- INSUFFICIENT_STORAGE = _HTTPStatus(507, 'Insufficient Storage')
- LOOP_DETECTED = _HTTPStatus(508, 'Loop Detected')
- NOT_EXTENDED = _HTTPStatus(510, 'Not Extended')
- NETWORK_AUTHENTICATION_REQUIRED = _HTTPStatus(511,
- 'Network Authentication Required',
- 'The client needs to authenticate to gain network access')
- """
- )
- return AstroidBuilder(astroid.MANAGER).string_build(code)
-
-
-def _http_client_transform():
- return AstroidBuilder(astroid.MANAGER).string_build(
- textwrap.dedent(
- """
- from http import HTTPStatus
-
- CONTINUE = HTTPStatus.CONTINUE
- SWITCHING_PROTOCOLS = HTTPStatus.SWITCHING_PROTOCOLS
- PROCESSING = HTTPStatus.PROCESSING
- OK = HTTPStatus.OK
- CREATED = HTTPStatus.CREATED
- ACCEPTED = HTTPStatus.ACCEPTED
- NON_AUTHORITATIVE_INFORMATION = HTTPStatus.NON_AUTHORITATIVE_INFORMATION
- NO_CONTENT = HTTPStatus.NO_CONTENT
- RESET_CONTENT = HTTPStatus.RESET_CONTENT
- PARTIAL_CONTENT = HTTPStatus.PARTIAL_CONTENT
- MULTI_STATUS = HTTPStatus.MULTI_STATUS
- ALREADY_REPORTED = HTTPStatus.ALREADY_REPORTED
- IM_USED = HTTPStatus.IM_USED
- MULTIPLE_CHOICES = HTTPStatus.MULTIPLE_CHOICES
- MOVED_PERMANENTLY = HTTPStatus.MOVED_PERMANENTLY
- FOUND = HTTPStatus.FOUND
- SEE_OTHER = HTTPStatus.SEE_OTHER
- NOT_MODIFIED = HTTPStatus.NOT_MODIFIED
- USE_PROXY = HTTPStatus.USE_PROXY
- TEMPORARY_REDIRECT = HTTPStatus.TEMPORARY_REDIRECT
- PERMANENT_REDIRECT = HTTPStatus.PERMANENT_REDIRECT
- BAD_REQUEST = HTTPStatus.BAD_REQUEST
- UNAUTHORIZED = HTTPStatus.UNAUTHORIZED
- PAYMENT_REQUIRED = HTTPStatus.PAYMENT_REQUIRED
- FORBIDDEN = HTTPStatus.FORBIDDEN
- NOT_FOUND = HTTPStatus.NOT_FOUND
- METHOD_NOT_ALLOWED = HTTPStatus.METHOD_NOT_ALLOWED
- NOT_ACCEPTABLE = HTTPStatus.NOT_ACCEPTABLE
- PROXY_AUTHENTICATION_REQUIRED = HTTPStatus.PROXY_AUTHENTICATION_REQUIRED
- REQUEST_TIMEOUT = HTTPStatus.REQUEST_TIMEOUT
- CONFLICT = HTTPStatus.CONFLICT
- GONE = HTTPStatus.GONE
- LENGTH_REQUIRED = HTTPStatus.LENGTH_REQUIRED
- PRECONDITION_FAILED = HTTPStatus.PRECONDITION_FAILED
- REQUEST_ENTITY_TOO_LARGE = HTTPStatus.REQUEST_ENTITY_TOO_LARGE
- REQUEST_URI_TOO_LONG = HTTPStatus.REQUEST_URI_TOO_LONG
- UNSUPPORTED_MEDIA_TYPE = HTTPStatus.UNSUPPORTED_MEDIA_TYPE
- REQUESTED_RANGE_NOT_SATISFIABLE = HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE
- EXPECTATION_FAILED = HTTPStatus.EXPECTATION_FAILED
- UNPROCESSABLE_ENTITY = HTTPStatus.UNPROCESSABLE_ENTITY
- LOCKED = HTTPStatus.LOCKED
- FAILED_DEPENDENCY = HTTPStatus.FAILED_DEPENDENCY
- UPGRADE_REQUIRED = HTTPStatus.UPGRADE_REQUIRED
- PRECONDITION_REQUIRED = HTTPStatus.PRECONDITION_REQUIRED
- TOO_MANY_REQUESTS = HTTPStatus.TOO_MANY_REQUESTS
- REQUEST_HEADER_FIELDS_TOO_LARGE = HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE
- INTERNAL_SERVER_ERROR = HTTPStatus.INTERNAL_SERVER_ERROR
- NOT_IMPLEMENTED = HTTPStatus.NOT_IMPLEMENTED
- BAD_GATEWAY = HTTPStatus.BAD_GATEWAY
- SERVICE_UNAVAILABLE = HTTPStatus.SERVICE_UNAVAILABLE
- GATEWAY_TIMEOUT = HTTPStatus.GATEWAY_TIMEOUT
- HTTP_VERSION_NOT_SUPPORTED = HTTPStatus.HTTP_VERSION_NOT_SUPPORTED
- VARIANT_ALSO_NEGOTIATES = HTTPStatus.VARIANT_ALSO_NEGOTIATES
- INSUFFICIENT_STORAGE = HTTPStatus.INSUFFICIENT_STORAGE
- LOOP_DETECTED = HTTPStatus.LOOP_DETECTED
- NOT_EXTENDED = HTTPStatus.NOT_EXTENDED
- NETWORK_AUTHENTICATION_REQUIRED = HTTPStatus.NETWORK_AUTHENTICATION_REQUIRED
- """
- )
- )
-
-
-astroid.register_module_extender(astroid.MANAGER, "http", _http_transform)
-astroid.register_module_extender(astroid.MANAGER, "http.client", _http_client_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_io.py b/venv/Lib/site-packages/astroid/brain/brain_io.py
deleted file mode 100644
index 4c68922..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_io.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid brain hints for some of the _io C objects."""
-
-import astroid
-
-
-BUFFERED = {"BufferedWriter", "BufferedReader"}
-TextIOWrapper = "TextIOWrapper"
-FileIO = "FileIO"
-BufferedWriter = "BufferedWriter"
-
-
-def _generic_io_transform(node, name, cls):
- """Transform the given name, by adding the given *class* as a member of the node."""
-
- io_module = astroid.MANAGER.ast_from_module_name("_io")
- attribute_object = io_module[cls]
- instance = attribute_object.instantiate_class()
- node.locals[name] = [instance]
-
-
-def _transform_text_io_wrapper(node):
- # This is not always correct, since it can vary with the type of the descriptor,
- # being stdout, stderr or stdin. But we cannot get access to the name of the
- # stream, which is why we are using the BufferedWriter class as a default
- # value
- return _generic_io_transform(node, name="buffer", cls=BufferedWriter)
-
-
-def _transform_buffered(node):
- return _generic_io_transform(node, name="raw", cls=FileIO)
-
-
-astroid.MANAGER.register_transform(
- astroid.ClassDef, _transform_buffered, lambda node: node.name in BUFFERED
-)
-astroid.MANAGER.register_transform(
- astroid.ClassDef,
- _transform_text_io_wrapper,
- lambda node: node.name == TextIOWrapper,
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_mechanize.py b/venv/Lib/site-packages/astroid/brain/brain_mechanize.py
deleted file mode 100644
index 93f282e..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_mechanize.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2012-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-from astroid import MANAGER, register_module_extender
-from astroid.builder import AstroidBuilder
-
-
-def mechanize_transform():
- return AstroidBuilder(MANAGER).string_build(
- """
-
-class Browser(object):
- def open(self, url, data=None, timeout=None):
- return None
- def open_novisit(self, url, data=None, timeout=None):
- return None
- def open_local_file(self, filename):
- return None
-
-"""
- )
-
-
-register_module_extender(MANAGER, "mechanize", mechanize_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py b/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py
deleted file mode 100644
index 71256ee..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-import sys
-
-import astroid
-from astroid import exceptions
-
-
-def _multiprocessing_transform():
- module = astroid.parse(
- """
- from multiprocessing.managers import SyncManager
- def Manager():
- return SyncManager()
- """
- )
- # Multiprocessing uses a getattr lookup inside contexts,
- # in order to get the attributes they need. Since it's extremely
- # dynamic, we use this approach to fake it.
- node = astroid.parse(
- """
- from multiprocessing.context import DefaultContext, BaseContext
- default = DefaultContext()
- base = BaseContext()
- """
- )
- try:
- context = next(node["default"].infer())
- base = next(node["base"].infer())
- except exceptions.InferenceError:
- return module
-
- for node in (context, base):
- for key, value in node.locals.items():
- if key.startswith("_"):
- continue
-
- value = value[0]
- if isinstance(value, astroid.FunctionDef):
- # We need to rebound this, since otherwise
- # it will have an extra argument (self).
- value = astroid.BoundMethod(value, node)
- module[key] = value
- return module
-
-
-def _multiprocessing_managers_transform():
- return astroid.parse(
- """
- import array
- import threading
- import multiprocessing.pool as pool
-
- import six
-
- class Namespace(object):
- pass
-
- class Value(object):
- def __init__(self, typecode, value, lock=True):
- self._typecode = typecode
- self._value = value
- def get(self):
- return self._value
- def set(self, value):
- self._value = value
- def __repr__(self):
- return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
- value = property(get, set)
-
- def Array(typecode, sequence, lock=True):
- return array.array(typecode, sequence)
-
- class SyncManager(object):
- Queue = JoinableQueue = six.moves.queue.Queue
- Event = threading.Event
- RLock = threading.RLock
- BoundedSemaphore = threading.BoundedSemaphore
- Condition = threading.Condition
- Barrier = threading.Barrier
- Pool = pool.Pool
- list = list
- dict = dict
- Value = Value
- Array = Array
- Namespace = Namespace
- __enter__ = lambda self: self
- __exit__ = lambda *args: args
-
- def start(self, initializer=None, initargs=None):
- pass
- def shutdown(self):
- pass
- """
- )
-
-
-astroid.register_module_extender(
- astroid.MANAGER, "multiprocessing.managers", _multiprocessing_managers_transform
-)
-astroid.register_module_extender(
- astroid.MANAGER, "multiprocessing", _multiprocessing_transform
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py b/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py
deleted file mode 100644
index de24067..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py
+++ /dev/null
@@ -1,449 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
-# Copyright (c) 2015 David Shea <dshea@redhat.com>
-# Copyright (c) 2015 Philip Lorenz <philip@bithub.de>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2016 Mateusz Bysiek <mb@mbdev.pl>
-# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid hooks for the Python standard library."""
-
-import functools
-import keyword
-from textwrap import dedent
-
-from astroid import MANAGER, UseInferenceDefault, inference_tip, InferenceError
-from astroid import arguments
-from astroid import exceptions
-from astroid import nodes
-from astroid.builder import AstroidBuilder, extract_node
-from astroid import util
-
-
-TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
-ENUM_BASE_NAMES = {
- "Enum",
- "IntEnum",
- "enum.Enum",
- "enum.IntEnum",
- "IntFlag",
- "enum.IntFlag",
-}
-
-
-def _infer_first(node, context):
- if node is util.Uninferable:
- raise UseInferenceDefault
- try:
- value = next(node.infer(context=context))
- if value is util.Uninferable:
- raise UseInferenceDefault()
- else:
- return value
- except StopIteration:
- raise InferenceError()
-
-
-def _find_func_form_arguments(node, context):
- def _extract_namedtuple_arg_or_keyword(position, key_name=None):
-
- if len(args) > position:
- return _infer_first(args[position], context)
- if key_name and key_name in found_keywords:
- return _infer_first(found_keywords[key_name], context)
-
- args = node.args
- keywords = node.keywords
- found_keywords = (
- {keyword.arg: keyword.value for keyword in keywords} if keywords else {}
- )
-
- name = _extract_namedtuple_arg_or_keyword(position=0, key_name="typename")
- names = _extract_namedtuple_arg_or_keyword(position=1, key_name="field_names")
- if name and names:
- return name.value, names
-
- raise UseInferenceDefault()
-
-
-def infer_func_form(node, base_type, context=None, enum=False):
- """Specific inference function for namedtuple or Python 3 enum. """
- # node is a Call node, class name as first argument and generated class
- # attributes as second argument
-
- # namedtuple or enums list of attributes can be a list of strings or a
- # whitespace-separate string
- try:
- name, names = _find_func_form_arguments(node, context)
- try:
- attributes = names.value.replace(",", " ").split()
- except AttributeError:
- if not enum:
- attributes = [
- _infer_first(const, context).value for const in names.elts
- ]
- else:
- # Enums supports either iterator of (name, value) pairs
- # or mappings.
- if hasattr(names, "items") and isinstance(names.items, list):
- attributes = [
- _infer_first(const[0], context).value
- for const in names.items
- if isinstance(const[0], nodes.Const)
- ]
- elif hasattr(names, "elts"):
- # Enums can support either ["a", "b", "c"]
- # or [("a", 1), ("b", 2), ...], but they can't
- # be mixed.
- if all(isinstance(const, nodes.Tuple) for const in names.elts):
- attributes = [
- _infer_first(const.elts[0], context).value
- for const in names.elts
- if isinstance(const, nodes.Tuple)
- ]
- else:
- attributes = [
- _infer_first(const, context).value for const in names.elts
- ]
- else:
- raise AttributeError
- if not attributes:
- raise AttributeError
- except (AttributeError, exceptions.InferenceError):
- raise UseInferenceDefault()
-
- # If we can't infer the name of the class, don't crash, up to this point
- # we know it is a namedtuple anyway.
- name = name or "Uninferable"
- # we want to return a Class node instance with proper attributes set
- class_node = nodes.ClassDef(name, "docstring")
- class_node.parent = node.parent
- # set base class=tuple
- class_node.bases.append(base_type)
- # XXX add __init__(*attributes) method
- for attr in attributes:
- fake_node = nodes.EmptyNode()
- fake_node.parent = class_node
- fake_node.attrname = attr
- class_node.instance_attrs[attr] = [fake_node]
- return class_node, name, attributes
-
-
-def _has_namedtuple_base(node):
- """Predicate for class inference tip
-
- :type node: ClassDef
- :rtype: bool
- """
- return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES
-
-
-def _looks_like(node, name):
- func = node.func
- if isinstance(func, nodes.Attribute):
- return func.attrname == name
- if isinstance(func, nodes.Name):
- return func.name == name
- return False
-
-
-_looks_like_namedtuple = functools.partial(_looks_like, name="namedtuple")
-_looks_like_enum = functools.partial(_looks_like, name="Enum")
-_looks_like_typing_namedtuple = functools.partial(_looks_like, name="NamedTuple")
-
-
-def infer_named_tuple(node, context=None):
- """Specific inference function for namedtuple Call node"""
- tuple_base_name = nodes.Name(name="tuple", parent=node.root())
- class_node, name, attributes = infer_func_form(
- node, tuple_base_name, context=context
- )
- call_site = arguments.CallSite.from_call(node)
- func = next(extract_node("import collections; collections.namedtuple").infer())
- try:
- rename = next(call_site.infer_argument(func, "rename", context)).bool_value()
- except InferenceError:
- rename = False
-
- if rename:
- attributes = _get_renamed_namedtuple_attributes(attributes)
-
- replace_args = ", ".join("{arg}=None".format(arg=arg) for arg in attributes)
- field_def = (
- " {name} = property(lambda self: self[{index:d}], "
- "doc='Alias for field number {index:d}')"
- )
- field_defs = "\n".join(
- field_def.format(name=name, index=index)
- for index, name in enumerate(attributes)
- )
- fake = AstroidBuilder(MANAGER).string_build(
- """
-class %(name)s(tuple):
- __slots__ = ()
- _fields = %(fields)r
- def _asdict(self):
- return self.__dict__
- @classmethod
- def _make(cls, iterable, new=tuple.__new__, len=len):
- return new(cls, iterable)
- def _replace(self, %(replace_args)s):
- return self
- def __getnewargs__(self):
- return tuple(self)
-%(field_defs)s
- """
- % {
- "name": name,
- "fields": attributes,
- "field_defs": field_defs,
- "replace_args": replace_args,
- }
- )
- class_node.locals["_asdict"] = fake.body[0].locals["_asdict"]
- class_node.locals["_make"] = fake.body[0].locals["_make"]
- class_node.locals["_replace"] = fake.body[0].locals["_replace"]
- class_node.locals["_fields"] = fake.body[0].locals["_fields"]
- for attr in attributes:
- class_node.locals[attr] = fake.body[0].locals[attr]
- # we use UseInferenceDefault, we can't be a generator so return an iterator
- return iter([class_node])
-
-
-def _get_renamed_namedtuple_attributes(field_names):
- names = list(field_names)
- seen = set()
- for i, name in enumerate(field_names):
- if (
- not all(c.isalnum() or c == "_" for c in name)
- or keyword.iskeyword(name)
- or not name
- or name[0].isdigit()
- or name.startswith("_")
- or name in seen
- ):
- names[i] = "_%d" % i
- seen.add(name)
- return tuple(names)
-
-
-def infer_enum(node, context=None):
- """ Specific inference function for enum Call node. """
- enum_meta = extract_node(
- """
- class EnumMeta(object):
- 'docstring'
- def __call__(self, node):
- class EnumAttribute(object):
- name = ''
- value = 0
- return EnumAttribute()
- def __iter__(self):
- class EnumAttribute(object):
- name = ''
- value = 0
- return [EnumAttribute()]
- def __reversed__(self):
- class EnumAttribute(object):
- name = ''
- value = 0
- return (EnumAttribute, )
- def __next__(self):
- return next(iter(self))
- def __getitem__(self, attr):
- class Value(object):
- @property
- def name(self):
- return ''
- @property
- def value(self):
- return attr
-
- return Value()
- __members__ = ['']
- """
- )
- class_node = infer_func_form(node, enum_meta, context=context, enum=True)[0]
- return iter([class_node.instantiate_class()])
-
-
-INT_FLAG_ADDITION_METHODS = """
- def __or__(self, other):
- return {name}(self.value | other.value)
- def __and__(self, other):
- return {name}(self.value & other.value)
- def __xor__(self, other):
- return {name}(self.value ^ other.value)
- def __add__(self, other):
- return {name}(self.value + other.value)
- def __div__(self, other):
- return {name}(self.value / other.value)
- def __invert__(self):
- return {name}(~self.value)
- def __mul__(self, other):
- return {name}(self.value * other.value)
-"""
-
-
-def infer_enum_class(node):
- """ Specific inference for enums. """
- for basename in node.basenames:
- # TODO: doesn't handle subclasses yet. This implementation
- # is a hack to support enums.
- if basename not in ENUM_BASE_NAMES:
- continue
- if node.root().name == "enum":
- # Skip if the class is directly from enum module.
- break
- for local, values in node.locals.items():
- if any(not isinstance(value, nodes.AssignName) for value in values):
- continue
-
- targets = []
- stmt = values[0].statement()
- if isinstance(stmt, nodes.Assign):
- if isinstance(stmt.targets[0], nodes.Tuple):
- targets = stmt.targets[0].itered()
- else:
- targets = stmt.targets
- elif isinstance(stmt, nodes.AnnAssign):
- targets = [stmt.target]
-
- inferred_return_value = None
- if isinstance(stmt, nodes.Assign):
- if isinstance(stmt.value, nodes.Const):
- if isinstance(stmt.value.value, str):
- inferred_return_value = repr(stmt.value.value)
- else:
- inferred_return_value = stmt.value.value
- else:
- inferred_return_value = stmt.value.as_string()
-
- new_targets = []
- for target in targets:
- # Replace all the assignments with our mocked class.
- classdef = dedent(
- """
- class {name}({types}):
- @property
- def value(self):
- return {return_value}
- @property
- def name(self):
- return "{name}"
- """.format(
- name=target.name,
- types=", ".join(node.basenames),
- return_value=inferred_return_value,
- )
- )
- if "IntFlag" in basename:
- # Alright, we need to add some additional methods.
- # Unfortunately we still can't infer the resulting objects as
- # Enum members, but once we'll be able to do that, the following
- # should result in some nice symbolic execution
- classdef += INT_FLAG_ADDITION_METHODS.format(name=target.name)
-
- fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
- fake.parent = target.parent
- for method in node.mymethods():
- fake.locals[method.name] = [method]
- new_targets.append(fake.instantiate_class())
- node.locals[local] = new_targets
- break
- return node
-
-
-def infer_typing_namedtuple_class(class_node, context=None):
- """Infer a subclass of typing.NamedTuple"""
- # Check if it has the corresponding bases
- annassigns_fields = [
- annassign.target.name
- for annassign in class_node.body
- if isinstance(annassign, nodes.AnnAssign)
- ]
- code = dedent(
- """
- from collections import namedtuple
- namedtuple({typename!r}, {fields!r})
- """
- ).format(typename=class_node.name, fields=",".join(annassigns_fields))
- node = extract_node(code)
- generated_class_node = next(infer_named_tuple(node, context))
- for method in class_node.mymethods():
- generated_class_node.locals[method.name] = [method]
-
- for assign in class_node.body:
- if not isinstance(assign, nodes.Assign):
- continue
-
- for target in assign.targets:
- attr = target.name
- generated_class_node.locals[attr] = class_node.locals[attr]
-
- return iter((generated_class_node,))
-
-
-def infer_typing_namedtuple(node, context=None):
- """Infer a typing.NamedTuple(...) call."""
- # This is essentially a namedtuple with different arguments
- # so we extract the args and infer a named tuple.
- try:
- func = next(node.func.infer())
- except InferenceError:
- raise UseInferenceDefault
-
- if func.qname() != "typing.NamedTuple":
- raise UseInferenceDefault
-
- if len(node.args) != 2:
- raise UseInferenceDefault
-
- if not isinstance(node.args[1], (nodes.List, nodes.Tuple)):
- raise UseInferenceDefault
-
- names = []
- for elt in node.args[1].elts:
- if not isinstance(elt, (nodes.List, nodes.Tuple)):
- raise UseInferenceDefault
- if len(elt.elts) != 2:
- raise UseInferenceDefault
- names.append(elt.elts[0].as_string())
-
- typename = node.args[0].as_string()
- if names:
- field_names = "({},)".format(",".join(names))
- else:
- field_names = "''"
- node = extract_node(
- "namedtuple({typename}, {fields})".format(typename=typename, fields=field_names)
- )
- return infer_named_tuple(node, context)
-
-
-MANAGER.register_transform(
- nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple
-)
-MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), _looks_like_enum)
-MANAGER.register_transform(
- nodes.ClassDef,
- infer_enum_class,
- predicate=lambda cls: any(
- basename for basename in cls.basenames if basename in ENUM_BASE_NAMES
- ),
-)
-MANAGER.register_transform(
- nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base
-)
-MANAGER.register_transform(
- nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_nose.py b/venv/Lib/site-packages/astroid/brain/brain_nose.py
deleted file mode 100644
index 7b12d76..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_nose.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Hooks for nose library."""
-
-import re
-import textwrap
-
-import astroid
-import astroid.builder
-
-_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
-
-
-def _pep8(name, caps=re.compile("([A-Z])")):
- return caps.sub(lambda m: "_" + m.groups()[0].lower(), name)
-
-
-def _nose_tools_functions():
- """Get an iterator of names and bound methods."""
- module = _BUILDER.string_build(
- textwrap.dedent(
- """
- import unittest
-
- class Test(unittest.TestCase):
- pass
- a = Test()
- """
- )
- )
- try:
- case = next(module["a"].infer())
- except astroid.InferenceError:
- return
- for method in case.methods():
- if method.name.startswith("assert") and "_" not in method.name:
- pep8_name = _pep8(method.name)
- yield pep8_name, astroid.BoundMethod(method, case)
- if method.name == "assertEqual":
- # nose also exports assert_equals.
- yield "assert_equals", astroid.BoundMethod(method, case)
-
-
-def _nose_tools_transform(node):
- for method_name, method in _nose_tools_functions():
- node.locals[method_name] = [method]
-
-
-def _nose_tools_trivial_transform():
- """Custom transform for the nose.tools module."""
- stub = _BUILDER.string_build("""__all__ = []""")
- all_entries = ["ok_", "eq_"]
-
- for pep8_name, method in _nose_tools_functions():
- all_entries.append(pep8_name)
- stub[pep8_name] = method
-
- # Update the __all__ variable, since nose.tools
- # does this manually with .append.
- all_assign = stub["__all__"].parent
- all_object = astroid.List(all_entries)
- all_object.parent = all_assign
- all_assign.value = all_object
- return stub
-
-
-astroid.register_module_extender(
- astroid.MANAGER, "nose.tools.trivial", _nose_tools_trivial_transform
-)
-astroid.MANAGER.register_transform(
- astroid.Module, _nose_tools_transform, lambda n: n.name == "nose.tools"
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_fromnumeric.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_core_fromnumeric.py
deleted file mode 100644
index 43b30e4..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_fromnumeric.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for numpy.core.fromnumeric module."""
-
-import astroid
-
-
-def numpy_core_fromnumeric_transform():
- return astroid.parse(
- """
- def sum(a, axis=None, dtype=None, out=None, keepdims=None, initial=None):
- return numpy.ndarray([0, 0])
- """
- )
-
-
-astroid.register_module_extender(
- astroid.MANAGER, "numpy.core.fromnumeric", numpy_core_fromnumeric_transform
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_function_base.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_core_function_base.py
deleted file mode 100644
index 05a73d9..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_function_base.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for numpy.core.function_base module."""
-
-import functools
-import astroid
-from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
-
-
-METHODS_TO_BE_INFERRED = {
- "linspace": """def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, axis=0):
- return numpy.ndarray([0, 0])""",
- "logspace": """def logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None, axis=0):
- return numpy.ndarray([0, 0])""",
- "geomspace": """def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0):
- return numpy.ndarray([0, 0])""",
-}
-
-for func_name, func_src in METHODS_TO_BE_INFERRED.items():
- inference_function = functools.partial(infer_numpy_member, func_src)
- astroid.MANAGER.register_transform(
- astroid.Attribute,
- astroid.inference_tip(inference_function),
- functools.partial(looks_like_numpy_member, func_name),
- )
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_multiarray.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_core_multiarray.py
deleted file mode 100644
index 3032acc..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_multiarray.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for numpy.core.multiarray module."""
-
-import functools
-import astroid
-from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
-
-
-def numpy_core_multiarray_transform():
- return astroid.parse(
- """
- # different functions defined in multiarray.py
- def inner(a, b):
- return numpy.ndarray([0, 0])
-
- def vdot(a, b):
- return numpy.ndarray([0, 0])
- """
- )
-
-
-astroid.register_module_extender(
- astroid.MANAGER, "numpy.core.multiarray", numpy_core_multiarray_transform
-)
-
-
-METHODS_TO_BE_INFERRED = {
- "array": """def array(object, dtype=None, copy=True, order='K', subok=False, ndmin=0):
- return numpy.ndarray([0, 0])""",
- "dot": """def dot(a, b, out=None):
- return numpy.ndarray([0, 0])""",
- "empty_like": """def empty_like(a, dtype=None, order='K', subok=True):
- return numpy.ndarray((0, 0))""",
- "concatenate": """def concatenate(arrays, axis=None, out=None):
- return numpy.ndarray((0, 0))""",
- "where": """def where(condition, x=None, y=None):
- return numpy.ndarray([0, 0])""",
- "empty": """def empty(shape, dtype=float, order='C'):
- return numpy.ndarray([0, 0])""",
- "zeros": """def zeros(shape, dtype=float, order='C'):
- return numpy.ndarray([0, 0])""",
-}
-
-for method_name, function_src in METHODS_TO_BE_INFERRED.items():
- inference_function = functools.partial(infer_numpy_member, function_src)
- astroid.MANAGER.register_transform(
- astroid.Attribute,
- astroid.inference_tip(inference_function),
- functools.partial(looks_like_numpy_member, method_name),
- )
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numeric.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numeric.py
deleted file mode 100644
index ba43c94..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numeric.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for numpy.core.numeric module."""
-
-import functools
-import astroid
-from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
-
-
-def numpy_core_numeric_transform():
- return astroid.parse(
- """
- # different functions defined in numeric.py
- import numpy
- def zeros_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
- def ones_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
- def full_like(a, fill_value, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
- """
- )
-
-
-astroid.register_module_extender(
- astroid.MANAGER, "numpy.core.numeric", numpy_core_numeric_transform
-)
-
-
-METHODS_TO_BE_INFERRED = {
- "ones": """def ones(shape, dtype=None, order='C'):
- return numpy.ndarray([0, 0])"""
-}
-
-
-for method_name, function_src in METHODS_TO_BE_INFERRED.items():
- inference_function = functools.partial(infer_numpy_member, function_src)
- astroid.MANAGER.register_transform(
- astroid.Attribute,
- astroid.inference_tip(inference_function),
- functools.partial(looks_like_numpy_member, method_name),
- )
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numerictypes.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numerictypes.py
deleted file mode 100644
index 42021fa..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numerictypes.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-# TODO(hippo91) : correct the methods signature.
-
-"""Astroid hooks for numpy.core.numerictypes module."""
-
-import astroid
-
-
-def numpy_core_numerictypes_transform():
- return astroid.parse(
- """
- # different types defined in numerictypes.py
- class generic(object):
- def __init__(self, value):
- self.T = None
- self.base = None
- self.data = None
- self.dtype = None
- self.flags = None
- self.flat = None
- self.imag = None
- self.itemsize = None
- self.nbytes = None
- self.ndim = None
- self.real = None
- self.size = None
- self.strides = None
-
- def all(self): return uninferable
- def any(self): return uninferable
- def argmax(self): return uninferable
- def argmin(self): return uninferable
- def argsort(self): return uninferable
- def astype(self): return uninferable
- def base(self): return uninferable
- def byteswap(self): return uninferable
- def choose(self): return uninferable
- def clip(self): return uninferable
- def compress(self): return uninferable
- def conj(self): return uninferable
- def conjugate(self): return uninferable
- def copy(self): return uninferable
- def cumprod(self): return uninferable
- def cumsum(self): return uninferable
- def data(self): return uninferable
- def diagonal(self): return uninferable
- def dtype(self): return uninferable
- def dump(self): return uninferable
- def dumps(self): return uninferable
- def fill(self): return uninferable
- def flags(self): return uninferable
- def flat(self): return uninferable
- def flatten(self): return uninferable
- def getfield(self): return uninferable
- def imag(self): return uninferable
- def item(self): return uninferable
- def itemset(self): return uninferable
- def itemsize(self): return uninferable
- def max(self): return uninferable
- def mean(self): return uninferable
- def min(self): return uninferable
- def nbytes(self): return uninferable
- def ndim(self): return uninferable
- def newbyteorder(self): return uninferable
- def nonzero(self): return uninferable
- def prod(self): return uninferable
- def ptp(self): return uninferable
- def put(self): return uninferable
- def ravel(self): return uninferable
- def real(self): return uninferable
- def repeat(self): return uninferable
- def reshape(self): return uninferable
- def resize(self): return uninferable
- def round(self): return uninferable
- def searchsorted(self): return uninferable
- def setfield(self): return uninferable
- def setflags(self): return uninferable
- def shape(self): return uninferable
- def size(self): return uninferable
- def sort(self): return uninferable
- def squeeze(self): return uninferable
- def std(self): return uninferable
- def strides(self): return uninferable
- def sum(self): return uninferable
- def swapaxes(self): return uninferable
- def take(self): return uninferable
- def tobytes(self): return uninferable
- def tofile(self): return uninferable
- def tolist(self): return uninferable
- def tostring(self): return uninferable
- def trace(self): return uninferable
- def transpose(self): return uninferable
- def var(self): return uninferable
- def view(self): return uninferable
-
-
- class dtype(object):
- def __init__(self, obj, align=False, copy=False):
- self.alignment = None
- self.base = None
- self.byteorder = None
- self.char = None
- self.descr = None
- self.fields = None
- self.flags = None
- self.hasobject = None
- self.isalignedstruct = None
- self.isbuiltin = None
- self.isnative = None
- self.itemsize = None
- self.kind = None
- self.metadata = None
- self.name = None
- self.names = None
- self.num = None
- self.shape = None
- self.str = None
- self.subdtype = None
- self.type = None
-
- def newbyteorder(self, new_order='S'): return uninferable
- def __neg__(self): return uninferable
-
- class busdaycalendar(object):
- def __init__(self, weekmask='1111100', holidays=None):
- self.holidays = None
- self.weekmask = None
-
- class flexible(generic): pass
- class bool_(generic): pass
- class number(generic):
- def __neg__(self): return uninferable
- class datetime64(generic):
- def __init__(self, nb, unit=None): pass
-
-
- class void(flexible):
- def __init__(self, *args, **kwargs):
- self.base = None
- self.dtype = None
- self.flags = None
- def getfield(self): return uninferable
- def setfield(self): return uninferable
-
-
- class character(flexible): pass
-
-
- class integer(number):
- def __init__(self, value):
- self.denominator = None
- self.numerator = None
-
-
- class inexact(number): pass
-
-
- class str_(str, character):
- def maketrans(self, x, y=None, z=None): return uninferable
-
-
- class bytes_(bytes, character):
- def fromhex(self, string): return uninferable
- def maketrans(self, frm, to): return uninferable
-
-
- class signedinteger(integer): pass
-
-
- class unsignedinteger(integer): pass
-
-
- class complexfloating(inexact): pass
-
-
- class floating(inexact): pass
-
-
- class float64(floating, float):
- def fromhex(self, string): return uninferable
-
-
- class uint64(unsignedinteger): pass
- class complex64(complexfloating): pass
- class int16(signedinteger): pass
- class float96(floating): pass
- class int8(signedinteger): pass
- class uint32(unsignedinteger): pass
- class uint8(unsignedinteger): pass
- class _typedict(dict): pass
- class complex192(complexfloating): pass
- class timedelta64(signedinteger):
- def __init__(self, nb, unit=None): pass
- class int32(signedinteger): pass
- class uint16(unsignedinteger): pass
- class float32(floating): pass
- class complex128(complexfloating, complex): pass
- class float16(floating): pass
- class int64(signedinteger): pass
-
- buffer_type = memoryview
- bool8 = bool_
- byte = int8
- bytes0 = bytes_
- cdouble = complex128
- cfloat = complex128
- clongdouble = complex192
- clongfloat = complex192
- complex_ = complex128
- csingle = complex64
- double = float64
- float_ = float64
- half = float16
- int0 = int32
- int_ = int32
- intc = int32
- intp = int32
- long = int32
- longcomplex = complex192
- longdouble = float96
- longfloat = float96
- longlong = int64
- object0 = object_
- object_ = object_
- short = int16
- single = float32
- singlecomplex = complex64
- str0 = str_
- string_ = bytes_
- ubyte = uint8
- uint = uint32
- uint0 = uint32
- uintc = uint32
- uintp = uint32
- ulonglong = uint64
- unicode = str_
- unicode_ = str_
- ushort = uint16
- void0 = void
- """
- )
-
-
-astroid.register_module_extender(
- astroid.MANAGER, "numpy.core.numerictypes", numpy_core_numerictypes_transform
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_umath.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_core_umath.py
deleted file mode 100644
index 459d38c..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_core_umath.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for numpy.core.umath module."""
-
-import astroid
-
-
-def numpy_core_umath_transform():
- ufunc_optional_keyword_arguments = (
- """out=None, where=True, casting='same_kind', order='K', """
- """dtype=None, subok=True"""
- )
- return astroid.parse(
- """
- # Constants
- e = 2.718281828459045
- euler_gamma = 0.5772156649015329
-
- # No arg functions
- def geterrobj(): return []
-
- # One arg functions
- def seterrobj(errobj): return None
-
- # One arg functions with optional kwargs
- def arccos(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def arccosh(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def arcsin(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def arcsinh(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def arctan(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def arctanh(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def cbrt(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def conj(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def conjugate(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def cosh(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def deg2rad(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def degrees(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def exp2(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def expm1(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def fabs(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def frexp(x, {opt_args:s}): return (numpy.ndarray((0, 0)), numpy.ndarray((0, 0)))
- def isfinite(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def isinf(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def log(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def log1p(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def log2(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def logical_not(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def modf(x, {opt_args:s}): return (numpy.ndarray((0, 0)), numpy.ndarray((0, 0)))
- def negative(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def rad2deg(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def radians(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def reciprocal(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def rint(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def sign(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def signbit(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def sinh(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def spacing(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def square(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def tan(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def tanh(x, {opt_args:s}): return numpy.ndarray((0, 0))
- def trunc(x, {opt_args:s}): return numpy.ndarray((0, 0))
-
- # Two args functions with optional kwargs
- def bitwise_and(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def bitwise_or(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def bitwise_xor(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def copysign(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def divide(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def equal(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def floor_divide(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def fmax(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def fmin(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def fmod(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def greater(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def hypot(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def ldexp(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def left_shift(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def less(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def logaddexp(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def logaddexp2(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def logical_and(x1, x2, {opt_args:s}): return numpy.ndarray([0, 0])
- def logical_or(x1, x2, {opt_args:s}): return numpy.ndarray([0, 0])
- def logical_xor(x1, x2, {opt_args:s}): return numpy.ndarray([0, 0])
- def maximum(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def minimum(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def nextafter(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def not_equal(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def power(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def remainder(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def right_shift(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def subtract(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- def true_divide(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
- """.format(
- opt_args=ufunc_optional_keyword_arguments
- )
- )
-
-
-astroid.register_module_extender(
- astroid.MANAGER, "numpy.core.umath", numpy_core_umath_transform
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py
deleted file mode 100644
index 8c231a3..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for numpy ndarray class."""
-
-import functools
-import astroid
-
-
-def infer_numpy_ndarray(node, context=None):
- ndarray = """
- class ndarray(object):
- def __init__(self, shape, dtype=float, buffer=None, offset=0,
- strides=None, order=None):
- self.T = None
- self.base = None
- self.ctypes = None
- self.data = None
- self.dtype = None
- self.flags = None
- self.flat = None
- self.imag = None
- self.itemsize = None
- self.nbytes = None
- self.ndim = None
- self.real = None
- self.shape = None
- self.size = None
- self.strides = None
-
- def __abs__(self): return numpy.ndarray([0, 0])
- def __add__(self, value): return numpy.ndarray([0, 0])
- def __and__(self, value): return numpy.ndarray([0, 0])
- def __array__(self, dtype=None): return numpy.ndarray([0, 0])
- def __array_wrap__(self, obj): return numpy.ndarray([0, 0])
- def __contains__(self, key): return True
- def __copy__(self): return numpy.ndarray([0, 0])
- def __deepcopy__(self, memo): return numpy.ndarray([0, 0])
- def __divmod__(self, value): return (numpy.ndarray([0, 0]), numpy.ndarray([0, 0]))
- def __eq__(self, value): return numpy.ndarray([0, 0])
- def __float__(self): return 0.
- def __floordiv__(self): return numpy.ndarray([0, 0])
- def __ge__(self, value): return numpy.ndarray([0, 0])
- def __getitem__(self, key): return uninferable
- def __gt__(self, value): return numpy.ndarray([0, 0])
- def __iadd__(self, value): return numpy.ndarray([0, 0])
- def __iand__(self, value): return numpy.ndarray([0, 0])
- def __ifloordiv__(self, value): return numpy.ndarray([0, 0])
- def __ilshift__(self, value): return numpy.ndarray([0, 0])
- def __imod__(self, value): return numpy.ndarray([0, 0])
- def __imul__(self, value): return numpy.ndarray([0, 0])
- def __int__(self): return 0
- def __invert__(self): return numpy.ndarray([0, 0])
- def __ior__(self, value): return numpy.ndarray([0, 0])
- def __ipow__(self, value): return numpy.ndarray([0, 0])
- def __irshift__(self, value): return numpy.ndarray([0, 0])
- def __isub__(self, value): return numpy.ndarray([0, 0])
- def __itruediv__(self, value): return numpy.ndarray([0, 0])
- def __ixor__(self, value): return numpy.ndarray([0, 0])
- def __le__(self, value): return numpy.ndarray([0, 0])
- def __len__(self): return 1
- def __lshift__(self, value): return numpy.ndarray([0, 0])
- def __lt__(self, value): return numpy.ndarray([0, 0])
- def __matmul__(self, value): return numpy.ndarray([0, 0])
- def __mod__(self, value): return numpy.ndarray([0, 0])
- def __mul__(self, value): return numpy.ndarray([0, 0])
- def __ne__(self, value): return numpy.ndarray([0, 0])
- def __neg__(self): return numpy.ndarray([0, 0])
- def __or__(self): return numpy.ndarray([0, 0])
- def __pos__(self): return numpy.ndarray([0, 0])
- def __pow__(self): return numpy.ndarray([0, 0])
- def __repr__(self): return str()
- def __rshift__(self): return numpy.ndarray([0, 0])
- def __setitem__(self, key, value): return uninferable
- def __str__(self): return str()
- def __sub__(self, value): return numpy.ndarray([0, 0])
- def __truediv__(self, value): return numpy.ndarray([0, 0])
- def __xor__(self, value): return numpy.ndarray([0, 0])
- def all(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
- def any(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
- def argmax(self, axis=None, out=None): return np.ndarray([0, 0])
- def argmin(self, axis=None, out=None): return np.ndarray([0, 0])
- def argpartition(self, kth, axis=-1, kind='introselect', order=None): return np.ndarray([0, 0])
- def argsort(self, axis=-1, kind='quicksort', order=None): return np.ndarray([0, 0])
- def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
- def byteswap(self, inplace=False): return np.ndarray([0, 0])
- def choose(self, choices, out=None, mode='raise'): return np.ndarray([0, 0])
- def clip(self, min=None, max=None, out=None): return np.ndarray([0, 0])
- def compress(self, condition, axis=None, out=None): return np.ndarray([0, 0])
- def conj(self): return np.ndarray([0, 0])
- def conjugate(self): return np.ndarray([0, 0])
- def copy(self, order='C'): return np.ndarray([0, 0])
- def cumprod(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
- def cumsum(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
- def diagonal(self, offset=0, axis1=0, axis2=1): return np.ndarray([0, 0])
- def dot(self, b, out=None): return np.ndarray([0, 0])
- def dump(self, file): return None
- def dumps(self): return str()
- def fill(self, value): return None
- def flatten(self, order='C'): return np.ndarray([0, 0])
- def getfield(self, dtype, offset=0): return np.ndarray([0, 0])
- def item(self, *args): return uninferable
- def itemset(self, *args): return None
- def max(self, axis=None, out=None): return np.ndarray([0, 0])
- def mean(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
- def min(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
- def newbyteorder(self, new_order='S'): return np.ndarray([0, 0])
- def nonzero(self): return (1,)
- def partition(self, kth, axis=-1, kind='introselect', order=None): return None
- def prod(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
- def ptp(self, axis=None, out=None): return np.ndarray([0, 0])
- def put(self, indices, values, mode='raise'): return None
- def ravel(self, order='C'): return np.ndarray([0, 0])
- def repeat(self, repeats, axis=None): return np.ndarray([0, 0])
- def reshape(self, shape, order='C'): return np.ndarray([0, 0])
- def resize(self, new_shape, refcheck=True): return None
- def round(self, decimals=0, out=None): return np.ndarray([0, 0])
- def searchsorted(self, v, side='left', sorter=None): return np.ndarray([0, 0])
- def setfield(self, val, dtype, offset=0): return None
- def setflags(self, write=None, align=None, uic=None): return None
- def sort(self, axis=-1, kind='quicksort', order=None): return None
- def squeeze(self, axis=None): return np.ndarray([0, 0])
- def std(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
- def sum(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
- def swapaxes(self, axis1, axis2): return np.ndarray([0, 0])
- def take(self, indices, axis=None, out=None, mode='raise'): return np.ndarray([0, 0])
- def tobytes(self, order='C'): return b''
- def tofile(self, fid, sep="", format="%s"): return None
- def tolist(self, ): return []
- def tostring(self, order='C'): return b''
- def trace(self, offset=0, axis1=0, axis2=1, dtype=None, out=None): return np.ndarray([0, 0])
- def transpose(self, *axes): return np.ndarray([0, 0])
- def var(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
- def view(self, dtype=None, type=None): return np.ndarray([0, 0])
- """
- node = astroid.extract_node(ndarray)
- return node.infer(context=context)
-
-
-def _looks_like_numpy_ndarray(node):
- return isinstance(node, astroid.Attribute) and node.attrname == "ndarray"
-
-
-astroid.MANAGER.register_transform(
- astroid.Attribute,
- astroid.inference_tip(infer_numpy_ndarray),
- _looks_like_numpy_ndarray,
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_random_mtrand.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_random_mtrand.py
deleted file mode 100644
index 772bfc4..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_random_mtrand.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-# TODO(hippo91) : correct the functions return types
-"""Astroid hooks for numpy.random.mtrand module."""
-
-import astroid
-
-
-def numpy_random_mtrand_transform():
- return astroid.parse(
- """
- def beta(a, b, size=None): return uninferable
- def binomial(n, p, size=None): return uninferable
- def bytes(length): return uninferable
- def chisquare(df, size=None): return uninferable
- def choice(a, size=None, replace=True, p=None): return uninferable
- def dirichlet(alpha, size=None): return uninferable
- def exponential(scale=1.0, size=None): return uninferable
- def f(dfnum, dfden, size=None): return uninferable
- def gamma(shape, scale=1.0, size=None): return uninferable
- def geometric(p, size=None): return uninferable
- def get_state(): return uninferable
- def gumbel(loc=0.0, scale=1.0, size=None): return uninferable
- def hypergeometric(ngood, nbad, nsample, size=None): return uninferable
- def laplace(loc=0.0, scale=1.0, size=None): return uninferable
- def logistic(loc=0.0, scale=1.0, size=None): return uninferable
- def lognormal(mean=0.0, sigma=1.0, size=None): return uninferable
- def logseries(p, size=None): return uninferable
- def multinomial(n, pvals, size=None): return uninferable
- def multivariate_normal(mean, cov, size=None): return uninferable
- def negative_binomial(n, p, size=None): return uninferable
- def noncentral_chisquare(df, nonc, size=None): return uninferable
- def noncentral_f(dfnum, dfden, nonc, size=None): return uninferable
- def normal(loc=0.0, scale=1.0, size=None): return uninferable
- def pareto(a, size=None): return uninferable
- def permutation(x): return uninferable
- def poisson(lam=1.0, size=None): return uninferable
- def power(a, size=None): return uninferable
- def rand(*args): return uninferable
- def randint(low, high=None, size=None, dtype='l'):
- import numpy
- return numpy.ndarray((1,1))
- def randn(*args): return uninferable
- def random_integers(low, high=None, size=None): return uninferable
- def random_sample(size=None): return uninferable
- def rayleigh(scale=1.0, size=None): return uninferable
- def seed(seed=None): return uninferable
- def set_state(state): return uninferable
- def shuffle(x): return uninferable
- def standard_cauchy(size=None): return uninferable
- def standard_exponential(size=None): return uninferable
- def standard_gamma(shape, size=None): return uninferable
- def standard_normal(size=None): return uninferable
- def standard_t(df, size=None): return uninferable
- def triangular(left, mode, right, size=None): return uninferable
- def uniform(low=0.0, high=1.0, size=None): return uninferable
- def vonmises(mu, kappa, size=None): return uninferable
- def wald(mean, scale, size=None): return uninferable
- def weibull(a, size=None): return uninferable
- def zipf(a, size=None): return uninferable
- """
- )
-
-
-astroid.register_module_extender(
- astroid.MANAGER, "numpy.random.mtrand", numpy_random_mtrand_transform
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_numpy_utils.py b/venv/Lib/site-packages/astroid/brain/brain_numpy_utils.py
deleted file mode 100644
index 2bad01e..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_numpy_utils.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Different utilities for the numpy brains"""
-
-
-import astroid
-
-
-def infer_numpy_member(src, node, context=None):
- node = astroid.extract_node(src)
- return node.infer(context=context)
-
-
-def _is_a_numpy_module(node: astroid.node_classes.Name) -> bool:
- """
- Returns True if the node is a representation of a numpy module.
-
- For example in :
- import numpy as np
- x = np.linspace(1, 2)
- The node <Name.np> is a representation of the numpy module.
-
- :param node: node to test
- :return: True if the node is a representation of the numpy module.
- """
- module_nickname = node.name
- potential_import_target = [
- x for x in node.lookup(module_nickname)[1] if isinstance(x, astroid.Import)
- ]
- for target in potential_import_target:
- if ("numpy", module_nickname) in target.names:
- return True
- return False
-
-
-def looks_like_numpy_member(
- member_name: str, node: astroid.node_classes.NodeNG
-) -> bool:
- """
- Returns True if the node is a member of numpy whose
- name is member_name.
-
- :param member_name: name of the member
- :param node: node to test
- :return: True if the node is a member of numpy
- """
- return (
- isinstance(node, astroid.Attribute)
- and node.attrname == member_name
- and isinstance(node.expr, astroid.Name)
- and _is_a_numpy_module(node.expr)
- )
diff --git a/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py b/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py
deleted file mode 100644
index 25e7649..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-import astroid
-from astroid import parse
-from astroid import inference_tip
-from astroid import register_module_extender
-from astroid import MANAGER
-
-
-def pkg_resources_transform():
- return parse(
- """
-def require(*requirements):
- return pkg_resources.working_set.require(*requirements)
-
-def run_script(requires, script_name):
- return pkg_resources.working_set.run_script(requires, script_name)
-
-def iter_entry_points(group, name=None):
- return pkg_resources.working_set.iter_entry_points(group, name)
-
-def resource_exists(package_or_requirement, resource_name):
- return get_provider(package_or_requirement).has_resource(resource_name)
-
-def resource_isdir(package_or_requirement, resource_name):
- return get_provider(package_or_requirement).resource_isdir(
- resource_name)
-
-def resource_filename(package_or_requirement, resource_name):
- return get_provider(package_or_requirement).get_resource_filename(
- self, resource_name)
-
-def resource_stream(package_or_requirement, resource_name):
- return get_provider(package_or_requirement).get_resource_stream(
- self, resource_name)
-
-def resource_string(package_or_requirement, resource_name):
- return get_provider(package_or_requirement).get_resource_string(
- self, resource_name)
-
-def resource_listdir(package_or_requirement, resource_name):
- return get_provider(package_or_requirement).resource_listdir(
- resource_name)
-
-def extraction_error():
- pass
-
-def get_cache_path(archive_name, names=()):
- extract_path = self.extraction_path or get_default_cache()
- target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
- return target_path
-
-def postprocess(tempname, filename):
- pass
-
-def set_extraction_path(path):
- pass
-
-def cleanup_resources(force=False):
- pass
-
-def get_distribution(dist):
- return Distribution(dist)
-
-_namespace_packages = {}
-"""
- )
-
-
-register_module_extender(MANAGER, "pkg_resources", pkg_resources_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_pytest.py b/venv/Lib/site-packages/astroid/brain/brain_pytest.py
deleted file mode 100644
index d7e3ac8..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_pytest.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Jeff Quast <contact@jeffquast.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2016 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid hooks for pytest."""
-from __future__ import absolute_import
-from astroid import MANAGER, register_module_extender
-from astroid.builder import AstroidBuilder
-
-
-def pytest_transform():
- return AstroidBuilder(MANAGER).string_build(
- """
-
-try:
- import _pytest.mark
- import _pytest.recwarn
- import _pytest.runner
- import _pytest.python
- import _pytest.skipping
- import _pytest.assertion
-except ImportError:
- pass
-else:
- deprecated_call = _pytest.recwarn.deprecated_call
- warns = _pytest.recwarn.warns
-
- exit = _pytest.runner.exit
- fail = _pytest.runner.fail
- skip = _pytest.runner.skip
- importorskip = _pytest.runner.importorskip
-
- xfail = _pytest.skipping.xfail
- mark = _pytest.mark.MarkGenerator()
- raises = _pytest.python.raises
-
- # New in pytest 3.0
- try:
- approx = _pytest.python.approx
- register_assert_rewrite = _pytest.assertion.register_assert_rewrite
- except AttributeError:
- pass
-
-
-# Moved in pytest 3.0
-
-try:
- import _pytest.freeze_support
- freeze_includes = _pytest.freeze_support.freeze_includes
-except ImportError:
- try:
- import _pytest.genscript
- freeze_includes = _pytest.genscript.freeze_includes
- except ImportError:
- pass
-
-try:
- import _pytest.debugging
- set_trace = _pytest.debugging.pytestPDB().set_trace
-except ImportError:
- try:
- import _pytest.pdb
- set_trace = _pytest.pdb.pytestPDB().set_trace
- except ImportError:
- pass
-
-try:
- import _pytest.fixtures
- fixture = _pytest.fixtures.fixture
- yield_fixture = _pytest.fixtures.yield_fixture
-except ImportError:
- try:
- import _pytest.python
- fixture = _pytest.python.fixture
- yield_fixture = _pytest.python.yield_fixture
- except ImportError:
- pass
-"""
- )
-
-
-register_module_extender(MANAGER, "pytest", pytest_transform)
-register_module_extender(MANAGER, "py.test", pytest_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_qt.py b/venv/Lib/site-packages/astroid/brain/brain_qt.py
deleted file mode 100644
index 8679d14..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_qt.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2017 Roy Wright <roy@wright.org>
-# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid hooks for the PyQT library."""
-
-from astroid import MANAGER, register_module_extender
-from astroid.builder import AstroidBuilder
-from astroid import nodes
-from astroid import parse
-
-
-def _looks_like_signal(node, signal_name="pyqtSignal"):
- if "__class__" in node.instance_attrs:
- try:
- cls = node.instance_attrs["__class__"][0]
- return cls.name == signal_name
- except AttributeError:
- # return False if the cls does not have a name attribute
- pass
- return False
-
-
-def transform_pyqt_signal(node):
- module = parse(
- """
- class pyqtSignal(object):
- def connect(self, slot, type=None, no_receiver_check=False):
- pass
- def disconnect(self, slot):
- pass
- def emit(self, *args):
- pass
- """
- )
- signal_cls = module["pyqtSignal"]
- node.instance_attrs["emit"] = signal_cls["emit"]
- node.instance_attrs["disconnect"] = signal_cls["disconnect"]
- node.instance_attrs["connect"] = signal_cls["connect"]
-
-
-def transform_pyside_signal(node):
- module = parse(
- """
- class NotPySideSignal(object):
- def connect(self, receiver, type=None):
- pass
- def disconnect(self, receiver):
- pass
- def emit(self, *args):
- pass
- """
- )
- signal_cls = module["NotPySideSignal"]
- node.instance_attrs["connect"] = signal_cls["connect"]
- node.instance_attrs["disconnect"] = signal_cls["disconnect"]
- node.instance_attrs["emit"] = signal_cls["emit"]
-
-
-def pyqt4_qtcore_transform():
- return AstroidBuilder(MANAGER).string_build(
- """
-
-def SIGNAL(signal_name): pass
-
-class QObject(object):
- def emit(self, signal): pass
-"""
- )
-
-
-register_module_extender(MANAGER, "PyQt4.QtCore", pyqt4_qtcore_transform)
-MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal)
-MANAGER.register_transform(
- nodes.ClassDef,
- transform_pyside_signal,
- lambda node: node.qname() in ("PySide.QtCore.Signal", "PySide2.QtCore.Signal"),
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_random.py b/venv/Lib/site-packages/astroid/brain/brain_random.py
deleted file mode 100644
index 5ec858a..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_random.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-import random
-
-import astroid
-from astroid import helpers
-from astroid import MANAGER
-
-
-ACCEPTED_ITERABLES_FOR_SAMPLE = (astroid.List, astroid.Set, astroid.Tuple)
-
-
-def _clone_node_with_lineno(node, parent, lineno):
- cls = node.__class__
- other_fields = node._other_fields
- _astroid_fields = node._astroid_fields
- init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
- postinit_params = {param: getattr(node, param) for param in _astroid_fields}
- if other_fields:
- init_params.update({param: getattr(node, param) for param in other_fields})
- new_node = cls(**init_params)
- if hasattr(node, "postinit") and _astroid_fields:
- new_node.postinit(**postinit_params)
- return new_node
-
-
-def infer_random_sample(node, context=None):
- if len(node.args) != 2:
- raise astroid.UseInferenceDefault
-
- length = node.args[1]
- if not isinstance(length, astroid.Const):
- raise astroid.UseInferenceDefault
- if not isinstance(length.value, int):
- raise astroid.UseInferenceDefault
-
- inferred_sequence = helpers.safe_infer(node.args[0], context=context)
- if not inferred_sequence:
- raise astroid.UseInferenceDefault
-
- if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
- raise astroid.UseInferenceDefault
-
- if length.value > len(inferred_sequence.elts):
- # In this case, this will raise a ValueError
- raise astroid.UseInferenceDefault
-
- try:
- elts = random.sample(inferred_sequence.elts, length.value)
- except ValueError:
- raise astroid.UseInferenceDefault
-
- new_node = astroid.List(
- lineno=node.lineno, col_offset=node.col_offset, parent=node.scope()
- )
- new_elts = [
- _clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno)
- for elt in elts
- ]
- new_node.postinit(new_elts)
- return iter((new_node,))
-
-
-def _looks_like_random_sample(node):
- func = node.func
- if isinstance(func, astroid.Attribute):
- return func.attrname == "sample"
- if isinstance(func, astroid.Name):
- return func.name == "sample"
- return False
-
-
-MANAGER.register_transform(
- astroid.Call, astroid.inference_tip(infer_random_sample), _looks_like_random_sample
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_re.py b/venv/Lib/site-packages/astroid/brain/brain_re.py
deleted file mode 100644
index c7ee51a..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_re.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-import sys
-import astroid
-
-PY36 = sys.version_info >= (3, 6)
-
-if PY36:
- # Since Python 3.6 there is the RegexFlag enum
- # where every entry will be exposed via updating globals()
-
- def _re_transform():
- return astroid.parse(
- """
- import sre_compile
- ASCII = sre_compile.SRE_FLAG_ASCII
- IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE
- LOCALE = sre_compile.SRE_FLAG_LOCALE
- UNICODE = sre_compile.SRE_FLAG_UNICODE
- MULTILINE = sre_compile.SRE_FLAG_MULTILINE
- DOTALL = sre_compile.SRE_FLAG_DOTALL
- VERBOSE = sre_compile.SRE_FLAG_VERBOSE
- A = ASCII
- I = IGNORECASE
- L = LOCALE
- U = UNICODE
- M = MULTILINE
- S = DOTALL
- X = VERBOSE
- TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE
- T = TEMPLATE
- DEBUG = sre_compile.SRE_FLAG_DEBUG
- """
- )
-
- astroid.register_module_extender(astroid.MANAGER, "re", _re_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_six.py b/venv/Lib/site-packages/astroid/brain/brain_six.py
deleted file mode 100644
index b342fbf..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_six.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# Copyright (c) 2014-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""Astroid hooks for six module."""
-
-from textwrap import dedent
-
-from astroid import MANAGER, register_module_extender
-from astroid.builder import AstroidBuilder
-from astroid.exceptions import (
- AstroidBuildingError,
- InferenceError,
- AttributeInferenceError,
-)
-from astroid import nodes
-
-
-SIX_ADD_METACLASS = "six.add_metaclass"
-
-
-def _indent(text, prefix, predicate=None):
- """Adds 'prefix' to the beginning of selected lines in 'text'.
-
- If 'predicate' is provided, 'prefix' will only be added to the lines
- where 'predicate(line)' is True. If 'predicate' is not provided,
- it will default to adding 'prefix' to all non-empty lines that do not
- consist solely of whitespace characters.
- """
- if predicate is None:
- predicate = lambda line: line.strip()
-
- def prefixed_lines():
- for line in text.splitlines(True):
- yield prefix + line if predicate(line) else line
-
- return "".join(prefixed_lines())
-
-
-_IMPORTS = """
-import _io
-cStringIO = _io.StringIO
-filter = filter
-from itertools import filterfalse
-input = input
-from sys import intern
-map = map
-range = range
-from imp import reload as reload_module
-from functools import reduce
-from shlex import quote as shlex_quote
-from io import StringIO
-from collections import UserDict, UserList, UserString
-xrange = range
-zip = zip
-from itertools import zip_longest
-import builtins
-import configparser
-import copyreg
-import _dummy_thread
-import http.cookiejar as http_cookiejar
-import http.cookies as http_cookies
-import html.entities as html_entities
-import html.parser as html_parser
-import http.client as http_client
-import http.server as http_server
-BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
-import pickle as cPickle
-import queue
-import reprlib
-import socketserver
-import _thread
-import winreg
-import xmlrpc.server as xmlrpc_server
-import xmlrpc.client as xmlrpc_client
-import urllib.robotparser as urllib_robotparser
-import email.mime.multipart as email_mime_multipart
-import email.mime.nonmultipart as email_mime_nonmultipart
-import email.mime.text as email_mime_text
-import email.mime.base as email_mime_base
-import urllib.parse as urllib_parse
-import urllib.error as urllib_error
-import tkinter
-import tkinter.dialog as tkinter_dialog
-import tkinter.filedialog as tkinter_filedialog
-import tkinter.scrolledtext as tkinter_scrolledtext
-import tkinter.simpledialog as tkinder_simpledialog
-import tkinter.tix as tkinter_tix
-import tkinter.ttk as tkinter_ttk
-import tkinter.constants as tkinter_constants
-import tkinter.dnd as tkinter_dnd
-import tkinter.colorchooser as tkinter_colorchooser
-import tkinter.commondialog as tkinter_commondialog
-import tkinter.filedialog as tkinter_tkfiledialog
-import tkinter.font as tkinter_font
-import tkinter.messagebox as tkinter_messagebox
-import urllib
-import urllib.request as urllib_request
-import urllib.robotparser as urllib_robotparser
-import urllib.parse as urllib_parse
-import urllib.error as urllib_error
-"""
-
-
-def six_moves_transform():
- code = dedent(
- """
- class Moves(object):
- {}
- moves = Moves()
- """
- ).format(_indent(_IMPORTS, " "))
- module = AstroidBuilder(MANAGER).string_build(code)
- module.name = "six.moves"
- return module
-
-
-def _six_fail_hook(modname):
- """Fix six.moves imports due to the dynamic nature of this
- class.
-
- Construct a pseudo-module which contains all the necessary imports
- for six
-
- :param modname: Name of failed module
- :type modname: str
-
- :return: An astroid module
- :rtype: nodes.Module
- """
-
- attribute_of = modname != "six.moves" and modname.startswith("six.moves")
- if modname != "six.moves" and not attribute_of:
- raise AstroidBuildingError(modname=modname)
- module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
- module.name = "six.moves"
- if attribute_of:
- # Facilitate import of submodules in Moves
- start_index = len(module.name)
- attribute = modname[start_index:].lstrip(".").replace(".", "_")
- try:
- import_attr = module.getattr(attribute)[0]
- except AttributeInferenceError:
- raise AstroidBuildingError(modname=modname)
- if isinstance(import_attr, nodes.Import):
- submodule = MANAGER.ast_from_module_name(import_attr.names[0][0])
- return submodule
- # Let dummy submodule imports pass through
- # This will cause an Uninferable result, which is okay
- return module
-
-
-def _looks_like_decorated_with_six_add_metaclass(node):
- if not node.decorators:
- return False
-
- for decorator in node.decorators.nodes:
- if not isinstance(decorator, nodes.Call):
- continue
- if decorator.func.as_string() == SIX_ADD_METACLASS:
- return True
- return False
-
-
-def transform_six_add_metaclass(node):
- """Check if the given class node is decorated with *six.add_metaclass*
-
- If so, inject its argument as the metaclass of the underlying class.
- """
- if not node.decorators:
- return
-
- for decorator in node.decorators.nodes:
- if not isinstance(decorator, nodes.Call):
- continue
-
- try:
- func = next(decorator.func.infer())
- except InferenceError:
- continue
- if func.qname() == SIX_ADD_METACLASS and decorator.args:
- metaclass = decorator.args[0]
- node._metaclass = metaclass
- return node
-
-
-register_module_extender(MANAGER, "six", six_moves_transform)
-register_module_extender(
- MANAGER, "requests.packages.urllib3.packages.six", six_moves_transform
-)
-MANAGER.register_failed_import_hook(_six_fail_hook)
-MANAGER.register_transform(
- nodes.ClassDef,
- transform_six_add_metaclass,
- _looks_like_decorated_with_six_add_metaclass,
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_ssl.py b/venv/Lib/site-packages/astroid/brain/brain_ssl.py
deleted file mode 100644
index 893d8a2..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_ssl.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid hooks for the ssl library."""
-
-from astroid import MANAGER, register_module_extender
-from astroid.builder import AstroidBuilder
-from astroid import nodes
-from astroid import parse
-
-
-def ssl_transform():
- return parse(
- """
- from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
- from _ssl import _SSLContext, MemoryBIO
- from _ssl import (
- SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
- SSLSyscallError, SSLEOFError,
- )
- from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
- from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
- from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
- try:
- from _ssl import RAND_egd
- except ImportError:
- # LibreSSL does not provide RAND_egd
- pass
- from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE,
- OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3,
- OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2,
- OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE)
-
- from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE,
- ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE,
- ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE,
- ALERT_DESCRIPTION_BAD_RECORD_MAC,
- ALERT_DESCRIPTION_CERTIFICATE_EXPIRED,
- ALERT_DESCRIPTION_CERTIFICATE_REVOKED,
- ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN,
- ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE,
- ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR,
- ALERT_DESCRIPTION_DECOMPRESSION_FAILURE,
- ALERT_DESCRIPTION_DECRYPT_ERROR,
- ALERT_DESCRIPTION_HANDSHAKE_FAILURE,
- ALERT_DESCRIPTION_ILLEGAL_PARAMETER,
- ALERT_DESCRIPTION_INSUFFICIENT_SECURITY,
- ALERT_DESCRIPTION_INTERNAL_ERROR,
- ALERT_DESCRIPTION_NO_RENEGOTIATION,
- ALERT_DESCRIPTION_PROTOCOL_VERSION,
- ALERT_DESCRIPTION_RECORD_OVERFLOW,
- ALERT_DESCRIPTION_UNEXPECTED_MESSAGE,
- ALERT_DESCRIPTION_UNKNOWN_CA,
- ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY,
- ALERT_DESCRIPTION_UNRECOGNIZED_NAME,
- ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE,
- ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION,
- ALERT_DESCRIPTION_USER_CANCELLED)
- from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL,
- SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ,
- SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN)
- from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT
- from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN
- from _ssl import _OPENSSL_API_VERSION
- from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2
- from _ssl import PROTOCOL_TLS, PROTOCOL_TLS_CLIENT, PROTOCOL_TLS_SERVER
- """
- )
-
-
-register_module_extender(MANAGER, "ssl", ssl_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_subprocess.py b/venv/Lib/site-packages/astroid/brain/brain_subprocess.py
deleted file mode 100644
index c14dc55..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_subprocess.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright (c) 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-import sys
-import textwrap
-
-import astroid
-
-
-PY37 = sys.version_info >= (3, 7)
-PY36 = sys.version_info >= (3, 6)
-
-
-def _subprocess_transform():
- communicate = (bytes("string", "ascii"), bytes("string", "ascii"))
- communicate_signature = "def communicate(self, input=None, timeout=None)"
- if PY37:
- init = """
- def __init__(self, args, bufsize=0, executable=None,
- stdin=None, stdout=None, stderr=None,
- preexec_fn=None, close_fds=False, shell=False,
- cwd=None, env=None, universal_newlines=False,
- startupinfo=None, creationflags=0, restore_signals=True,
- start_new_session=False, pass_fds=(), *,
- encoding=None, errors=None, text=None):
- pass
- """
- elif PY36:
- init = """
- def __init__(self, args, bufsize=0, executable=None,
- stdin=None, stdout=None, stderr=None,
- preexec_fn=None, close_fds=False, shell=False,
- cwd=None, env=None, universal_newlines=False,
- startupinfo=None, creationflags=0, restore_signals=True,
- start_new_session=False, pass_fds=(), *,
- encoding=None, errors=None):
- pass
- """
- else:
- init = """
- def __init__(self, args, bufsize=0, executable=None,
- stdin=None, stdout=None, stderr=None,
- preexec_fn=None, close_fds=False, shell=False,
- cwd=None, env=None, universal_newlines=False,
- startupinfo=None, creationflags=0, restore_signals=True,
- start_new_session=False, pass_fds=()):
- pass
- """
- wait_signature = "def wait(self, timeout=None)"
- ctx_manager = """
- def __enter__(self): return self
- def __exit__(self, *args): pass
- """
- py3_args = "args = []"
- code = textwrap.dedent(
- """
- def check_output(
- args, *,
- stdin=None,
- stderr=None,
- shell=False,
- cwd=None,
- encoding=None,
- errors=None,
- universal_newlines=False,
- timeout=None,
- env=None
- ):
-
- if universal_newlines:
- return ""
- return b""
- class Popen(object):
- returncode = pid = 0
- stdin = stdout = stderr = file()
- %(py3_args)s
-
- %(communicate_signature)s:
- return %(communicate)r
- %(wait_signature)s:
- return self.returncode
- def poll(self):
- return self.returncode
- def send_signal(self, signal):
- pass
- def terminate(self):
- pass
- def kill(self):
- pass
- %(ctx_manager)s
- """
- % {
- "communicate": communicate,
- "communicate_signature": communicate_signature,
- "wait_signature": wait_signature,
- "ctx_manager": ctx_manager,
- "py3_args": py3_args,
- }
- )
-
- init_lines = textwrap.dedent(init).splitlines()
- indented_init = "\n".join(" " * 4 + line for line in init_lines)
- code += indented_init
- return astroid.parse(code)
-
-
-astroid.register_module_extender(astroid.MANAGER, "subprocess", _subprocess_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_threading.py b/venv/Lib/site-packages/astroid/brain/brain_threading.py
deleted file mode 100644
index dffa55a..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_threading.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-import astroid
-
-
-def _thread_transform():
- return astroid.parse(
- """
- class lock(object):
- def acquire(self, blocking=True, timeout=-1):
- pass
- def release(self):
- pass
- def __enter__(self):
- return True
- def __exit__(self, *args):
- pass
- def locked(self):
- return False
-
- def Lock():
- return lock()
- """
- )
-
-
-astroid.register_module_extender(astroid.MANAGER, "threading", _thread_transform)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_typing.py b/venv/Lib/site-packages/astroid/brain/brain_typing.py
deleted file mode 100644
index 9ff7227..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_typing.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2017 David Euresti <github@euresti.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-"""Astroid hooks for typing.py support."""
-import typing
-
-from astroid import (
- MANAGER,
- UseInferenceDefault,
- extract_node,
- inference_tip,
- nodes,
- InferenceError,
-)
-
-
-TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
-TYPING_TYPEVARS = {"TypeVar", "NewType"}
-TYPING_TYPEVARS_QUALIFIED = {"typing.TypeVar", "typing.NewType"}
-TYPING_TYPE_TEMPLATE = """
-class Meta(type):
- def __getitem__(self, item):
- return self
-
- @property
- def __args__(self):
- return ()
-
-class {0}(metaclass=Meta):
- pass
-"""
-TYPING_MEMBERS = set(typing.__all__)
-
-
-def looks_like_typing_typevar_or_newtype(node):
- func = node.func
- if isinstance(func, nodes.Attribute):
- return func.attrname in TYPING_TYPEVARS
- if isinstance(func, nodes.Name):
- return func.name in TYPING_TYPEVARS
- return False
-
-
-def infer_typing_typevar_or_newtype(node, context=None):
- """Infer a typing.TypeVar(...) or typing.NewType(...) call"""
- try:
- func = next(node.func.infer(context=context))
- except InferenceError as exc:
- raise UseInferenceDefault from exc
-
- if func.qname() not in TYPING_TYPEVARS_QUALIFIED:
- raise UseInferenceDefault
- if not node.args:
- raise UseInferenceDefault
-
- typename = node.args[0].as_string().strip("'")
- node = extract_node(TYPING_TYPE_TEMPLATE.format(typename))
- return node.infer(context=context)
-
-
-def _looks_like_typing_subscript(node):
- """Try to figure out if a Subscript node *might* be a typing-related subscript"""
- if isinstance(node, nodes.Name):
- return node.name in TYPING_MEMBERS
- elif isinstance(node, nodes.Attribute):
- return node.attrname in TYPING_MEMBERS
- elif isinstance(node, nodes.Subscript):
- return _looks_like_typing_subscript(node.value)
- return False
-
-
-def infer_typing_attr(node, context=None):
- """Infer a typing.X[...] subscript"""
- try:
- value = next(node.value.infer())
- except InferenceError as exc:
- raise UseInferenceDefault from exc
-
- if not value.qname().startswith("typing."):
- raise UseInferenceDefault
-
- node = extract_node(TYPING_TYPE_TEMPLATE.format(value.qname().split(".")[-1]))
- return node.infer(context=context)
-
-
-MANAGER.register_transform(
- nodes.Call,
- inference_tip(infer_typing_typevar_or_newtype),
- looks_like_typing_typevar_or_newtype,
-)
-MANAGER.register_transform(
- nodes.Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript
-)
diff --git a/venv/Lib/site-packages/astroid/brain/brain_uuid.py b/venv/Lib/site-packages/astroid/brain/brain_uuid.py
deleted file mode 100644
index 8bda631..0000000
--- a/venv/Lib/site-packages/astroid/brain/brain_uuid.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Astroid hooks for the UUID module."""
-
-
-from astroid import MANAGER
-from astroid import nodes
-
-
-def _patch_uuid_class(node):
- # The .int member is patched using __dict__
- node.locals["int"] = [nodes.Const(0, parent=node)]
-
-
-MANAGER.register_transform(
- nodes.ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID"
-)
diff --git a/venv/Lib/site-packages/astroid/builder.py b/venv/Lib/site-packages/astroid/builder.py
deleted file mode 100644
index ac71093..0000000
--- a/venv/Lib/site-packages/astroid/builder.py
+++ /dev/null
@@ -1,435 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2013 Phil Schaf <flying-sheep@web.de>
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014-2015 Google, Inc.
-# Copyright (c) 2014 Alexander Presnyakov <flagist0@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""The AstroidBuilder makes astroid from living object and / or from _ast
-
-The builder is not thread safe and can't be used to parse different sources
-at the same time.
-"""
-
-import os
-import textwrap
-from tokenize import detect_encoding
-
-from astroid._ast import _parse
-from astroid import bases
-from astroid import exceptions
-from astroid import manager
-from astroid import modutils
-from astroid import raw_building
-from astroid import rebuilder
-from astroid import nodes
-from astroid import util
-
-# The name of the transient function that is used to
-# wrap expressions to be extracted when calling
-# extract_node.
-_TRANSIENT_FUNCTION = "__"
-
-# The comment used to select a statement to be extracted
-# when calling extract_node.
-_STATEMENT_SELECTOR = "#@"
-
-MANAGER = manager.AstroidManager()
-
-
-def open_source_file(filename):
- with open(filename, "rb") as byte_stream:
- encoding = detect_encoding(byte_stream.readline)[0]
- stream = open(filename, "r", newline=None, encoding=encoding)
- data = stream.read()
- return stream, encoding, data
-
-
-def _can_assign_attr(node, attrname):
- try:
- slots = node.slots()
- except NotImplementedError:
- pass
- else:
- if slots and attrname not in {slot.value for slot in slots}:
- return False
- return True
-
-
-class AstroidBuilder(raw_building.InspectBuilder):
- """Class for building an astroid tree from source code or from a live module.
-
- The param *manager* specifies the manager class which should be used.
- If no manager is given, then the default one will be used. The
- param *apply_transforms* determines if the transforms should be
- applied after the tree was built from source or from a live object,
- by default being True.
- """
-
- # pylint: disable=redefined-outer-name
- def __init__(self, manager=None, apply_transforms=True):
- super(AstroidBuilder, self).__init__()
- self._manager = manager or MANAGER
- self._apply_transforms = apply_transforms
-
- def module_build(self, module, modname=None):
- """Build an astroid from a living module instance."""
- node = None
- path = getattr(module, "__file__", None)
- if path is not None:
- path_, ext = os.path.splitext(modutils._path_from_filename(path))
- if ext in (".py", ".pyc", ".pyo") and os.path.exists(path_ + ".py"):
- node = self.file_build(path_ + ".py", modname)
- if node is None:
- # this is a built-in module
- # get a partial representation by introspection
- node = self.inspect_build(module, modname=modname, path=path)
- if self._apply_transforms:
- # We have to handle transformation by ourselves since the
- # rebuilder isn't called for builtin nodes
- node = self._manager.visit_transforms(node)
- return node
-
- def file_build(self, path, modname=None):
- """Build astroid from a source code file (i.e. from an ast)
-
- *path* is expected to be a python source file
- """
- try:
- stream, encoding, data = open_source_file(path)
- except IOError as exc:
- raise exceptions.AstroidBuildingError(
- "Unable to load file {path}:\n{error}",
- modname=modname,
- path=path,
- error=exc,
- ) from exc
- except (SyntaxError, LookupError) as exc:
- raise exceptions.AstroidSyntaxError(
- "Python 3 encoding specification error or unknown encoding:\n"
- "{error}",
- modname=modname,
- path=path,
- error=exc,
- ) from exc
- except UnicodeError as exc: # wrong encoding
- # detect_encoding returns utf-8 if no encoding specified
- raise exceptions.AstroidBuildingError(
- "Wrong or no encoding specified for {filename}.", filename=path
- ) from exc
- with stream:
- # get module name if necessary
- if modname is None:
- try:
- modname = ".".join(modutils.modpath_from_file(path))
- except ImportError:
- modname = os.path.splitext(os.path.basename(path))[0]
- # build astroid representation
- module = self._data_build(data, modname, path)
- return self._post_build(module, encoding)
-
- def string_build(self, data, modname="", path=None):
- """Build astroid from source code string."""
- module = self._data_build(data, modname, path)
- module.file_bytes = data.encode("utf-8")
- return self._post_build(module, "utf-8")
-
- def _post_build(self, module, encoding):
- """Handles encoding and delayed nodes after a module has been built"""
- module.file_encoding = encoding
- self._manager.cache_module(module)
- # post tree building steps after we stored the module in the cache:
- for from_node in module._import_from_nodes:
- if from_node.modname == "__future__":
- for symbol, _ in from_node.names:
- module.future_imports.add(symbol)
- self.add_from_names_to_locals(from_node)
- # handle delayed assattr nodes
- for delayed in module._delayed_assattr:
- self.delayed_assattr(delayed)
-
- # Visit the transforms
- if self._apply_transforms:
- module = self._manager.visit_transforms(module)
- return module
-
- def _data_build(self, data, modname, path):
- """Build tree node from data and add some informations"""
- try:
- node = _parse(data + "\n")
- except (TypeError, ValueError, SyntaxError) as exc:
- raise exceptions.AstroidSyntaxError(
- "Parsing Python code failed:\n{error}",
- source=data,
- modname=modname,
- path=path,
- error=exc,
- ) from exc
- if path is not None:
- node_file = os.path.abspath(path)
- else:
- node_file = "<?>"
- if modname.endswith(".__init__"):
- modname = modname[:-9]
- package = True
- else:
- package = (
- path is not None
- and os.path.splitext(os.path.basename(path))[0] == "__init__"
- )
- builder = rebuilder.TreeRebuilder(self._manager)
- module = builder.visit_module(node, modname, node_file, package)
- module._import_from_nodes = builder._import_from_nodes
- module._delayed_assattr = builder._delayed_assattr
- return module
-
- def add_from_names_to_locals(self, node):
- """Store imported names to the locals
-
- Resort the locals if coming from a delayed node
- """
- _key_func = lambda node: node.fromlineno
-
- def sort_locals(my_list):
- my_list.sort(key=_key_func)
-
- for (name, asname) in node.names:
- if name == "*":
- try:
- imported = node.do_import_module()
- except exceptions.AstroidBuildingError:
- continue
- for name in imported.public_names():
- node.parent.set_local(name, node)
- sort_locals(node.parent.scope().locals[name])
- else:
- node.parent.set_local(asname or name, node)
- sort_locals(node.parent.scope().locals[asname or name])
-
- def delayed_assattr(self, node):
- """Visit a AssAttr node
-
- This adds name to locals and handle members definition.
- """
- try:
- frame = node.frame()
- for inferred in node.expr.infer():
- if inferred is util.Uninferable:
- continue
- try:
- if inferred.__class__ is bases.Instance:
- inferred = inferred._proxied
- iattrs = inferred.instance_attrs
- if not _can_assign_attr(inferred, node.attrname):
- continue
- elif isinstance(inferred, bases.Instance):
- # Const, Tuple, ... we may be wrong, may be not, but
- # anyway we don't want to pollute builtin's namespace
- continue
- elif inferred.is_function:
- iattrs = inferred.instance_attrs
- else:
- iattrs = inferred.locals
- except AttributeError:
- # XXX log error
- continue
- values = iattrs.setdefault(node.attrname, [])
- if node in values:
- continue
- # get assign in __init__ first XXX useful ?
- if (
- frame.name == "__init__"
- and values
- and values[0].frame().name != "__init__"
- ):
- values.insert(0, node)
- else:
- values.append(node)
- except exceptions.InferenceError:
- pass
-
-
-def build_namespace_package_module(name, path):
- return nodes.Module(name, doc="", path=path, package=True)
-
-
-def parse(code, module_name="", path=None, apply_transforms=True):
- """Parses a source string in order to obtain an astroid AST from it
-
- :param str code: The code for the module.
- :param str module_name: The name for the module, if any
- :param str path: The path for the module
- :param bool apply_transforms:
- Apply the transforms for the give code. Use it if you
- don't want the default transforms to be applied.
- """
- code = textwrap.dedent(code)
- builder = AstroidBuilder(manager=MANAGER, apply_transforms=apply_transforms)
- return builder.string_build(code, modname=module_name, path=path)
-
-
-def _extract_expressions(node):
- """Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
-
- The function walks the AST recursively to search for expressions that
- are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
- expression, it completely removes the function call node from the tree,
- replacing it by the wrapped expression inside the parent.
-
- :param node: An astroid node.
- :type node: astroid.bases.NodeNG
- :yields: The sequence of wrapped expressions on the modified tree
- expression can be found.
- """
- if (
- isinstance(node, nodes.Call)
- and isinstance(node.func, nodes.Name)
- and node.func.name == _TRANSIENT_FUNCTION
- ):
- real_expr = node.args[0]
- real_expr.parent = node.parent
- # Search for node in all _astng_fields (the fields checked when
- # get_children is called) of its parent. Some of those fields may
- # be lists or tuples, in which case the elements need to be checked.
- # When we find it, replace it by real_expr, so that the AST looks
- # like no call to _TRANSIENT_FUNCTION ever took place.
- for name in node.parent._astroid_fields:
- child = getattr(node.parent, name)
- if isinstance(child, (list, tuple)):
- for idx, compound_child in enumerate(child):
- if compound_child is node:
- child[idx] = real_expr
- elif child is node:
- setattr(node.parent, name, real_expr)
- yield real_expr
- else:
- for child in node.get_children():
- yield from _extract_expressions(child)
-
-
-def _find_statement_by_line(node, line):
- """Extracts the statement on a specific line from an AST.
-
- If the line number of node matches line, it will be returned;
- otherwise its children are iterated and the function is called
- recursively.
-
- :param node: An astroid node.
- :type node: astroid.bases.NodeNG
- :param line: The line number of the statement to extract.
- :type line: int
- :returns: The statement on the line, or None if no statement for the line
- can be found.
- :rtype: astroid.bases.NodeNG or None
- """
- if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)):
- # This is an inaccuracy in the AST: the nodes that can be
- # decorated do not carry explicit information on which line
- # the actual definition (class/def), but .fromline seems to
- # be close enough.
- node_line = node.fromlineno
- else:
- node_line = node.lineno
-
- if node_line == line:
- return node
-
- for child in node.get_children():
- result = _find_statement_by_line(child, line)
- if result:
- return result
-
- return None
-
-
-def extract_node(code, module_name=""):
- """Parses some Python code as a module and extracts a designated AST node.
-
- Statements:
- To extract one or more statement nodes, append #@ to the end of the line
-
- Examples:
- >>> def x():
- >>> def y():
- >>> return 1 #@
-
- The return statement will be extracted.
-
- >>> class X(object):
- >>> def meth(self): #@
- >>> pass
-
- The function object 'meth' will be extracted.
-
- Expressions:
- To extract arbitrary expressions, surround them with the fake
- function call __(...). After parsing, the surrounded expression
- will be returned and the whole AST (accessible via the returned
- node's parent attribute) will look like the function call was
- never there in the first place.
-
- Examples:
- >>> a = __(1)
-
- The const node will be extracted.
-
- >>> def x(d=__(foo.bar)): pass
-
- The node containing the default argument will be extracted.
-
- >>> def foo(a, b):
- >>> return 0 < __(len(a)) < b
-
- The node containing the function call 'len' will be extracted.
-
- If no statements or expressions are selected, the last toplevel
- statement will be returned.
-
- If the selected statement is a discard statement, (i.e. an expression
- turned into a statement), the wrapped expression is returned instead.
-
- For convenience, singleton lists are unpacked.
-
- :param str code: A piece of Python code that is parsed as
- a module. Will be passed through textwrap.dedent first.
- :param str module_name: The name of the module.
- :returns: The designated node from the parse tree, or a list of nodes.
- :rtype: astroid.bases.NodeNG, or a list of nodes.
- """
-
- def _extract(node):
- if isinstance(node, nodes.Expr):
- return node.value
-
- return node
-
- requested_lines = []
- for idx, line in enumerate(code.splitlines()):
- if line.strip().endswith(_STATEMENT_SELECTOR):
- requested_lines.append(idx + 1)
-
- tree = parse(code, module_name=module_name)
- if not tree.body:
- raise ValueError("Empty tree, cannot extract from it")
-
- extracted = []
- if requested_lines:
- extracted = [_find_statement_by_line(tree, line) for line in requested_lines]
-
- # Modifies the tree.
- extracted.extend(_extract_expressions(tree))
-
- if not extracted:
- extracted.append(tree.body[-1])
-
- extracted = [_extract(node) for node in extracted]
- if len(extracted) == 1:
- return extracted[0]
- return extracted
diff --git a/venv/Lib/site-packages/astroid/context.py b/venv/Lib/site-packages/astroid/context.py
deleted file mode 100644
index 70a9208..0000000
--- a/venv/Lib/site-packages/astroid/context.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Various context related utilities, including inference and call contexts."""
-import contextlib
-import pprint
-from typing import Optional
-
-
-class InferenceContext:
- """Provide context for inference
-
- Store already inferred nodes to save time
- Account for already visited nodes to infinite stop infinite recursion
- """
-
- __slots__ = (
- "path",
- "lookupname",
- "callcontext",
- "boundnode",
- "inferred",
- "extra_context",
- )
-
- def __init__(self, path=None, inferred=None):
- self.path = path or set()
- """
- :type: set(tuple(NodeNG, optional(str)))
-
- Path of visited nodes and their lookupname
-
- Currently this key is ``(node, context.lookupname)``
- """
- self.lookupname = None
- """
- :type: optional[str]
-
- The original name of the node
-
- e.g.
- foo = 1
- The inference of 'foo' is nodes.Const(1) but the lookup name is 'foo'
- """
- self.callcontext = None
- """
- :type: optional[CallContext]
-
- The call arguments and keywords for the given context
- """
- self.boundnode = None
- """
- :type: optional[NodeNG]
-
- The bound node of the given context
-
- e.g. the bound node of object.__new__(cls) is the object node
- """
- self.inferred = inferred or {}
- """
- :type: dict(seq, seq)
-
- Inferred node contexts to their mapped results
- Currently the key is ``(node, lookupname, callcontext, boundnode)``
- and the value is tuple of the inferred results
- """
- self.extra_context = {}
- """
- :type: dict(NodeNG, Context)
-
- Context that needs to be passed down through call stacks
- for call arguments
- """
-
- def push(self, node):
- """Push node into inference path
-
- :return: True if node is already in context path else False
- :rtype: bool
-
- Allows one to see if the given node has already
- been looked at for this inference context"""
- name = self.lookupname
- if (node, name) in self.path:
- return True
-
- self.path.add((node, name))
- return False
-
- def clone(self):
- """Clone inference path
-
- For example, each side of a binary operation (BinOp)
- starts with the same context but diverge as each side is inferred
- so the InferenceContext will need be cloned"""
- # XXX copy lookupname/callcontext ?
- clone = InferenceContext(self.path, inferred=self.inferred)
- clone.callcontext = self.callcontext
- clone.boundnode = self.boundnode
- clone.extra_context = self.extra_context
- return clone
-
- def cache_generator(self, key, generator):
- """Cache result of generator into dictionary
-
- Used to cache inference results"""
- results = []
- for result in generator:
- results.append(result)
- yield result
-
- self.inferred[key] = tuple(results)
-
- @contextlib.contextmanager
- def restore_path(self):
- path = set(self.path)
- yield
- self.path = path
-
- def __str__(self):
- state = (
- "%s=%s"
- % (field, pprint.pformat(getattr(self, field), width=80 - len(field)))
- for field in self.__slots__
- )
- return "%s(%s)" % (type(self).__name__, ",\n ".join(state))
-
-
-class CallContext:
- """Holds information for a call site."""
-
- __slots__ = ("args", "keywords")
-
- def __init__(self, args, keywords=None):
- """
- :param List[NodeNG] args: Call positional arguments
- :param Union[List[nodes.Keyword], None] keywords: Call keywords
- """
- self.args = args
- if keywords:
- keywords = [(arg.arg, arg.value) for arg in keywords]
- else:
- keywords = []
- self.keywords = keywords
-
-
-def copy_context(context: Optional[InferenceContext]) -> InferenceContext:
- """Clone a context if given, or return a fresh contexxt"""
- if context is not None:
- return context.clone()
-
- return InferenceContext()
-
-
-def bind_context_to_node(context, node):
- """Give a context a boundnode
- to retrieve the correct function name or attribute value
- with from further inference.
-
- Do not use an existing context since the boundnode could then
- be incorrectly propagated higher up in the call stack.
-
- :param context: Context to use
- :type context: Optional(context)
-
- :param node: Node to do name lookups from
- :type node NodeNG:
-
- :returns: A new context
- :rtype: InferenceContext
- """
- context = copy_context(context)
- context.boundnode = node
- return context
diff --git a/venv/Lib/site-packages/astroid/decorators.py b/venv/Lib/site-packages/astroid/decorators.py
deleted file mode 100644
index 1448757..0000000
--- a/venv/Lib/site-packages/astroid/decorators.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
-# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-""" A few useful function/method decorators."""
-
-import functools
-
-import wrapt
-
-from astroid import context as contextmod
-from astroid import exceptions
-from astroid import util
-
-
-@wrapt.decorator
-def cached(func, instance, args, kwargs):
- """Simple decorator to cache result of method calls without args."""
- cache = getattr(instance, "__cache", None)
- if cache is None:
- instance.__cache = cache = {}
- try:
- return cache[func]
- except KeyError:
- cache[func] = result = func(*args, **kwargs)
- return result
-
-
-class cachedproperty:
- """ Provides a cached property equivalent to the stacking of
- @cached and @property, but more efficient.
-
- After first usage, the <property_name> becomes part of the object's
- __dict__. Doing:
-
- del obj.<property_name> empties the cache.
-
- Idea taken from the pyramid_ framework and the mercurial_ project.
-
- .. _pyramid: http://pypi.python.org/pypi/pyramid
- .. _mercurial: http://pypi.python.org/pypi/Mercurial
- """
-
- __slots__ = ("wrapped",)
-
- def __init__(self, wrapped):
- try:
- wrapped.__name__
- except AttributeError as exc:
- raise TypeError("%s must have a __name__ attribute" % wrapped) from exc
- self.wrapped = wrapped
-
- @property
- def __doc__(self):
- doc = getattr(self.wrapped, "__doc__", None)
- return "<wrapped by the cachedproperty decorator>%s" % (
- "\n%s" % doc if doc else ""
- )
-
- def __get__(self, inst, objtype=None):
- if inst is None:
- return self
- val = self.wrapped(inst)
- setattr(inst, self.wrapped.__name__, val)
- return val
-
-
-def path_wrapper(func):
- """return the given infer function wrapped to handle the path
-
- Used to stop inference if the node has already been looked
- at for a given `InferenceContext` to prevent infinite recursion
- """
-
- @functools.wraps(func)
- def wrapped(node, context=None, _func=func, **kwargs):
- """wrapper function handling context"""
- if context is None:
- context = contextmod.InferenceContext()
- if context.push(node):
- return None
-
- yielded = set()
- generator = _func(node, context, **kwargs)
- try:
- while True:
- res = next(generator)
- # unproxy only true instance, not const, tuple, dict...
- if res.__class__.__name__ == "Instance":
- ares = res._proxied
- else:
- ares = res
- if ares not in yielded:
- yield res
- yielded.add(ares)
- except StopIteration as error:
- if error.args:
- return error.args[0]
- return None
-
- return wrapped
-
-
-@wrapt.decorator
-def yes_if_nothing_inferred(func, instance, args, kwargs):
- generator = func(*args, **kwargs)
-
- try:
- yield next(generator)
- except StopIteration:
- # generator is empty
- yield util.Uninferable
- return
-
- yield from generator
-
-
-@wrapt.decorator
-def raise_if_nothing_inferred(func, instance, args, kwargs):
- generator = func(*args, **kwargs)
-
- try:
- yield next(generator)
- except StopIteration as error:
- # generator is empty
- if error.args:
- # pylint: disable=not-a-mapping
- raise exceptions.InferenceError(**error.args[0])
- raise exceptions.InferenceError(
- "StopIteration raised without any error information."
- )
-
- yield from generator
diff --git a/venv/Lib/site-packages/astroid/exceptions.py b/venv/Lib/site-packages/astroid/exceptions.py
deleted file mode 100644
index 7e9d655..0000000
--- a/venv/Lib/site-packages/astroid/exceptions.py
+++ /dev/null
@@ -1,230 +0,0 @@
-# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""this module contains exceptions used in the astroid library
-"""
-from astroid import util
-
-
-class AstroidError(Exception):
- """base exception class for all astroid related exceptions
-
- AstroidError and its subclasses are structured, intended to hold
- objects representing state when the exception is thrown. Field
- values are passed to the constructor as keyword-only arguments.
- Each subclass has its own set of standard fields, but use your
- best judgment to decide whether a specific exception instance
- needs more or fewer fields for debugging. Field values may be
- used to lazily generate the error message: self.message.format()
- will be called with the field names and values supplied as keyword
- arguments.
- """
-
- def __init__(self, message="", **kws):
- super(AstroidError, self).__init__(message)
- self.message = message
- for key, value in kws.items():
- setattr(self, key, value)
-
- def __str__(self):
- return self.message.format(**vars(self))
-
-
-class AstroidBuildingError(AstroidError):
- """exception class when we are unable to build an astroid representation
-
- Standard attributes:
- modname: Name of the module that AST construction failed for.
- error: Exception raised during construction.
- """
-
- def __init__(self, message="Failed to import module {modname}.", **kws):
- super(AstroidBuildingError, self).__init__(message, **kws)
-
-
-class AstroidImportError(AstroidBuildingError):
- """Exception class used when a module can't be imported by astroid."""
-
-
-class TooManyLevelsError(AstroidImportError):
- """Exception class which is raised when a relative import was beyond the top-level.
-
- Standard attributes:
- level: The level which was attempted.
- name: the name of the module on which the relative import was attempted.
- """
-
- level = None
- name = None
-
- def __init__(
- self,
- message="Relative import with too many levels " "({level}) for module {name!r}",
- **kws
- ):
- super(TooManyLevelsError, self).__init__(message, **kws)
-
-
-class AstroidSyntaxError(AstroidBuildingError):
- """Exception class used when a module can't be parsed."""
-
-
-class NoDefault(AstroidError):
- """raised by function's `default_value` method when an argument has
- no default value
-
- Standard attributes:
- func: Function node.
- name: Name of argument without a default.
- """
-
- func = None
- name = None
-
- def __init__(self, message="{func!r} has no default for {name!r}.", **kws):
- super(NoDefault, self).__init__(message, **kws)
-
-
-class ResolveError(AstroidError):
- """Base class of astroid resolution/inference error.
-
- ResolveError is not intended to be raised.
-
- Standard attributes:
- context: InferenceContext object.
- """
-
- context = None
-
-
-class MroError(ResolveError):
- """Error raised when there is a problem with method resolution of a class.
-
- Standard attributes:
- mros: A sequence of sequences containing ClassDef nodes.
- cls: ClassDef node whose MRO resolution failed.
- context: InferenceContext object.
- """
-
- mros = ()
- cls = None
-
- def __str__(self):
- mro_names = ", ".join(
- "({})".format(", ".join(b.name for b in m)) for m in self.mros
- )
- return self.message.format(mros=mro_names, cls=self.cls)
-
-
-class DuplicateBasesError(MroError):
- """Error raised when there are duplicate bases in the same class bases."""
-
-
-class InconsistentMroError(MroError):
- """Error raised when a class's MRO is inconsistent."""
-
-
-class SuperError(ResolveError):
- """Error raised when there is a problem with a *super* call.
-
- Standard attributes:
- *super_*: The Super instance that raised the exception.
- context: InferenceContext object.
- """
-
- super_ = None
-
- def __str__(self):
- return self.message.format(**vars(self.super_))
-
-
-class InferenceError(ResolveError):
- """raised when we are unable to infer a node
-
- Standard attributes:
- node: The node inference was called on.
- context: InferenceContext object.
- """
-
- node = None
- context = None
-
- def __init__(self, message="Inference failed for {node!r}.", **kws):
- super(InferenceError, self).__init__(message, **kws)
-
-
-# Why does this inherit from InferenceError rather than ResolveError?
-# Changing it causes some inference tests to fail.
-class NameInferenceError(InferenceError):
- """Raised when a name lookup fails, corresponds to NameError.
-
- Standard attributes:
- name: The name for which lookup failed, as a string.
- scope: The node representing the scope in which the lookup occurred.
- context: InferenceContext object.
- """
-
- name = None
- scope = None
-
- def __init__(self, message="{name!r} not found in {scope!r}.", **kws):
- super(NameInferenceError, self).__init__(message, **kws)
-
-
-class AttributeInferenceError(ResolveError):
- """Raised when an attribute lookup fails, corresponds to AttributeError.
-
- Standard attributes:
- target: The node for which lookup failed.
- attribute: The attribute for which lookup failed, as a string.
- context: InferenceContext object.
- """
-
- target = None
- attribute = None
-
- def __init__(self, message="{attribute!r} not found on {target!r}.", **kws):
- super(AttributeInferenceError, self).__init__(message, **kws)
-
-
-class UseInferenceDefault(Exception):
- """exception to be raised in custom inference function to indicate that it
- should go back to the default behaviour
- """
-
-
-class _NonDeducibleTypeHierarchy(Exception):
- """Raised when is_subtype / is_supertype can't deduce the relation between two types."""
-
-
-class AstroidIndexError(AstroidError):
- """Raised when an Indexable / Mapping does not have an index / key."""
-
-
-class AstroidTypeError(AstroidError):
- """Raised when a TypeError would be expected in Python code."""
-
-
-class InferenceOverwriteError(AstroidError):
- """Raised when an inference tip is overwritten
-
- Currently only used for debugging.
- """
-
-
-# Backwards-compatibility aliases
-OperationError = util.BadOperationMessage
-UnaryOperationError = util.BadUnaryOperationMessage
-BinaryOperationError = util.BadBinaryOperationMessage
-
-SuperArgumentTypeError = SuperError
-UnresolvableName = NameInferenceError
-NotFoundError = AttributeInferenceError
-AstroidBuildingException = AstroidBuildingError
diff --git a/venv/Lib/site-packages/astroid/helpers.py b/venv/Lib/site-packages/astroid/helpers.py
deleted file mode 100644
index be133b3..0000000
--- a/venv/Lib/site-packages/astroid/helpers.py
+++ /dev/null
@@ -1,273 +0,0 @@
-# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""
-Various helper utilities.
-"""
-
-import builtins as builtins_mod
-
-from astroid import bases
-from astroid import context as contextmod
-from astroid import exceptions
-from astroid import manager
-from astroid import nodes
-from astroid import raw_building
-from astroid import scoped_nodes
-from astroid import util
-
-
-BUILTINS = builtins_mod.__name__
-
-
-def _build_proxy_class(cls_name, builtins):
- proxy = raw_building.build_class(cls_name)
- proxy.parent = builtins
- return proxy
-
-
-def _function_type(function, builtins):
- if isinstance(function, scoped_nodes.Lambda):
- if function.root().name == BUILTINS:
- cls_name = "builtin_function_or_method"
- else:
- cls_name = "function"
- elif isinstance(function, bases.BoundMethod):
- cls_name = "method"
- elif isinstance(function, bases.UnboundMethod):
- cls_name = "function"
- return _build_proxy_class(cls_name, builtins)
-
-
-def _object_type(node, context=None):
- astroid_manager = manager.AstroidManager()
- builtins = astroid_manager.builtins_module
- context = context or contextmod.InferenceContext()
-
- for inferred in node.infer(context=context):
- if isinstance(inferred, scoped_nodes.ClassDef):
- if inferred.newstyle:
- metaclass = inferred.metaclass(context=context)
- if metaclass:
- yield metaclass
- continue
- yield builtins.getattr("type")[0]
- elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)):
- yield _function_type(inferred, builtins)
- elif isinstance(inferred, scoped_nodes.Module):
- yield _build_proxy_class("module", builtins)
- else:
- yield inferred._proxied
-
-
-def object_type(node, context=None):
- """Obtain the type of the given node
-
- This is used to implement the ``type`` builtin, which means that it's
- used for inferring type calls, as well as used in a couple of other places
- in the inference.
- The node will be inferred first, so this function can support all
- sorts of objects, as long as they support inference.
- """
-
- try:
- types = set(_object_type(node, context))
- except exceptions.InferenceError:
- return util.Uninferable
- if len(types) > 1 or not types:
- return util.Uninferable
- return list(types)[0]
-
-
-def _object_type_is_subclass(obj_type, class_or_seq, context=None):
- if not isinstance(class_or_seq, (tuple, list)):
- class_seq = (class_or_seq,)
- else:
- class_seq = class_or_seq
-
- if obj_type is util.Uninferable:
- return util.Uninferable
-
- # Instances are not types
- class_seq = [
- item if not isinstance(item, bases.Instance) else util.Uninferable
- for item in class_seq
- ]
- # strict compatibility with issubclass
- # issubclass(type, (object, 1)) evaluates to true
- # issubclass(object, (1, type)) raises TypeError
- for klass in class_seq:
- if klass is util.Uninferable:
- raise exceptions.AstroidTypeError("arg 2 must be a type or tuple of types")
-
- for obj_subclass in obj_type.mro():
- if obj_subclass == klass:
- return True
- return False
-
-
-def object_isinstance(node, class_or_seq, context=None):
- """Check if a node 'isinstance' any node in class_or_seq
-
- :param node: A given node
- :param class_or_seq: Union[nodes.NodeNG, Sequence[nodes.NodeNG]]
- :rtype: bool
-
- :raises AstroidTypeError: if the given ``classes_or_seq`` are not types
- """
- obj_type = object_type(node, context)
- if obj_type is util.Uninferable:
- return util.Uninferable
- return _object_type_is_subclass(obj_type, class_or_seq, context=context)
-
-
-def object_issubclass(node, class_or_seq, context=None):
- """Check if a type is a subclass of any node in class_or_seq
-
- :param node: A given node
- :param class_or_seq: Union[Nodes.NodeNG, Sequence[nodes.NodeNG]]
- :rtype: bool
-
- :raises AstroidTypeError: if the given ``classes_or_seq`` are not types
- :raises AstroidError: if the type of the given node cannot be inferred
- or its type's mro doesn't work
- """
- if not isinstance(node, nodes.ClassDef):
- raise TypeError("{node} needs to be a ClassDef node".format(node=node))
- return _object_type_is_subclass(node, class_or_seq, context=context)
-
-
-def safe_infer(node, context=None):
- """Return the inferred value for the given node.
-
- Return None if inference failed or if there is some ambiguity (more than
- one node has been inferred).
- """
- try:
- inferit = node.infer(context=context)
- value = next(inferit)
- except exceptions.InferenceError:
- return None
- try:
- next(inferit)
- return None # None if there is ambiguity on the inferred node
- except exceptions.InferenceError:
- return None # there is some kind of ambiguity
- except StopIteration:
- return value
-
-
-def has_known_bases(klass, context=None):
- """Return true if all base classes of a class could be inferred."""
- try:
- return klass._all_bases_known
- except AttributeError:
- pass
- for base in klass.bases:
- result = safe_infer(base, context=context)
- # TODO: check for A->B->A->B pattern in class structure too?
- if (
- not isinstance(result, scoped_nodes.ClassDef)
- or result is klass
- or not has_known_bases(result, context=context)
- ):
- klass._all_bases_known = False
- return False
- klass._all_bases_known = True
- return True
-
-
-def _type_check(type1, type2):
- if not all(map(has_known_bases, (type1, type2))):
- raise exceptions._NonDeducibleTypeHierarchy
-
- if not all([type1.newstyle, type2.newstyle]):
- return False
- try:
- return type1 in type2.mro()[:-1]
- except exceptions.MroError:
- # The MRO is invalid.
- raise exceptions._NonDeducibleTypeHierarchy
-
-
-def is_subtype(type1, type2):
- """Check if *type1* is a subtype of *type2*."""
- return _type_check(type1=type2, type2=type1)
-
-
-def is_supertype(type1, type2):
- """Check if *type2* is a supertype of *type1*."""
- return _type_check(type1, type2)
-
-
-def class_instance_as_index(node):
- """Get the value as an index for the given instance.
-
- If an instance provides an __index__ method, then it can
- be used in some scenarios where an integer is expected,
- for instance when multiplying or subscripting a list.
- """
- context = contextmod.InferenceContext()
- context.callcontext = contextmod.CallContext(args=[node])
-
- try:
- for inferred in node.igetattr("__index__", context=context):
- if not isinstance(inferred, bases.BoundMethod):
- continue
-
- for result in inferred.infer_call_result(node, context=context):
- if isinstance(result, nodes.Const) and isinstance(result.value, int):
- return result
- except exceptions.InferenceError:
- pass
- return None
-
-
-def object_len(node, context=None):
- """Infer length of given node object
-
- :param Union[nodes.ClassDef, nodes.Instance] node:
- :param node: Node to infer length of
-
- :raises AstroidTypeError: If an invalid node is returned
- from __len__ method or no __len__ method exists
- :raises InferenceError: If the given node cannot be inferred
- or if multiple nodes are inferred
- :rtype int: Integer length of node
- """
- # pylint: disable=import-outside-toplevel; circular import
- from astroid.objects import FrozenSet
-
- inferred_node = safe_infer(node, context=context)
- if inferred_node is None or inferred_node is util.Uninferable:
- raise exceptions.InferenceError(node=node)
- if isinstance(inferred_node, nodes.Const) and isinstance(
- inferred_node.value, (bytes, str)
- ):
- return len(inferred_node.value)
- if isinstance(inferred_node, (nodes.List, nodes.Set, nodes.Tuple, FrozenSet)):
- return len(inferred_node.elts)
- if isinstance(inferred_node, nodes.Dict):
- return len(inferred_node.items)
- try:
- node_type = object_type(inferred_node, context=context)
- len_call = next(node_type.igetattr("__len__", context=context))
- except exceptions.AttributeInferenceError:
- raise exceptions.AstroidTypeError(
- "object of type '{}' has no len()".format(len_call.pytype())
- )
-
- result_of_len = next(len_call.infer_call_result(node, context))
- if (
- isinstance(result_of_len, nodes.Const)
- and result_of_len.pytype() == "builtins.int"
- ):
- return result_of_len.value
- raise exceptions.AstroidTypeError(
- "'{}' object cannot be interpreted as an integer".format(result_of_len)
- )
diff --git a/venv/Lib/site-packages/astroid/inference.py b/venv/Lib/site-packages/astroid/inference.py
deleted file mode 100644
index 77c6b1d..0000000
--- a/venv/Lib/site-packages/astroid/inference.py
+++ /dev/null
@@ -1,943 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2017 Michał Masłowski <m.maslowski@clearcode.cc>
-# Copyright (c) 2017 Calen Pennington <cale@edx.org>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
-# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""this module contains a set of functions to handle inference on astroid trees
-"""
-
-import functools
-import itertools
-import operator
-
-from astroid import bases
-from astroid import context as contextmod
-from astroid import exceptions
-from astroid import decorators
-from astroid import helpers
-from astroid import manager
-from astroid import nodes
-from astroid.interpreter import dunder_lookup
-from astroid import protocols
-from astroid import util
-
-
-MANAGER = manager.AstroidManager()
-
-
-# .infer method ###############################################################
-
-
-def infer_end(self, context=None):
- """inference's end for node such as Module, ClassDef, FunctionDef,
- Const...
-
- """
- yield self
-
-
-nodes.Module._infer = infer_end
-nodes.ClassDef._infer = infer_end
-nodes.FunctionDef._infer = infer_end
-nodes.Lambda._infer = infer_end
-nodes.Const._infer = infer_end
-nodes.Slice._infer = infer_end
-
-
-def _infer_sequence_helper(node, context=None):
- """Infer all values based on _BaseContainer.elts"""
- values = []
-
- for elt in node.elts:
- if isinstance(elt, nodes.Starred):
- starred = helpers.safe_infer(elt.value, context)
- if not starred:
- raise exceptions.InferenceError(node=node, context=context)
- if not hasattr(starred, "elts"):
- raise exceptions.InferenceError(node=node, context=context)
- values.extend(_infer_sequence_helper(starred))
- elif isinstance(elt, nodes.NamedExpr):
- value = helpers.safe_infer(elt.value, context)
- if not value:
- raise exceptions.InferenceError(node=node, context=context)
- values.append(value)
- else:
- values.append(elt)
- return values
-
-
-@decorators.raise_if_nothing_inferred
-def infer_sequence(self, context=None):
- has_starred_named_expr = any(
- isinstance(e, (nodes.Starred, nodes.NamedExpr)) for e in self.elts
- )
- if has_starred_named_expr:
- values = _infer_sequence_helper(self, context)
- new_seq = type(self)(
- lineno=self.lineno, col_offset=self.col_offset, parent=self.parent
- )
- new_seq.postinit(values)
-
- yield new_seq
- else:
- yield self
-
-
-nodes.List._infer = infer_sequence
-nodes.Tuple._infer = infer_sequence
-nodes.Set._infer = infer_sequence
-
-
-def infer_map(self, context=None):
- if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items):
- yield self
- else:
- items = _infer_map(self, context)
- new_seq = type(self)(self.lineno, self.col_offset, self.parent)
- new_seq.postinit(list(items.items()))
- yield new_seq
-
-
-def _update_with_replacement(lhs_dict, rhs_dict):
- """Delete nodes that equate to duplicate keys
-
- Since an astroid node doesn't 'equal' another node with the same value,
- this function uses the as_string method to make sure duplicate keys
- don't get through
-
- Note that both the key and the value are astroid nodes
-
- Fixes issue with DictUnpack causing duplicte keys
- in inferred Dict items
-
- :param dict(nodes.NodeNG, nodes.NodeNG) lhs_dict: Dictionary to 'merge' nodes into
- :param dict(nodes.NodeNG, nodes.NodeNG) rhs_dict: Dictionary with nodes to pull from
- :return dict(nodes.NodeNG, nodes.NodeNG): merged dictionary of nodes
- """
- combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items())
- # Overwrite keys which have the same string values
- string_map = {key.as_string(): (key, value) for key, value in combined_dict}
- # Return to dictionary
- return dict(string_map.values())
-
-
-def _infer_map(node, context):
- """Infer all values based on Dict.items"""
- values = {}
- for name, value in node.items:
- if isinstance(name, nodes.DictUnpack):
- double_starred = helpers.safe_infer(value, context)
- if not double_starred:
- raise exceptions.InferenceError
- if not isinstance(double_starred, nodes.Dict):
- raise exceptions.InferenceError(node=node, context=context)
- unpack_items = _infer_map(double_starred, context)
- values = _update_with_replacement(values, unpack_items)
- else:
- key = helpers.safe_infer(name, context=context)
- value = helpers.safe_infer(value, context=context)
- if any(not elem for elem in (key, value)):
- raise exceptions.InferenceError(node=node, context=context)
- values = _update_with_replacement(values, {key: value})
- return values
-
-
-nodes.Dict._infer = infer_map
-
-
-def _higher_function_scope(node):
- """ Search for the first function which encloses the given
- scope. This can be used for looking up in that function's
- scope, in case looking up in a lower scope for a particular
- name fails.
-
- :param node: A scope node.
- :returns:
- ``None``, if no parent function scope was found,
- otherwise an instance of :class:`astroid.scoped_nodes.Function`,
- which encloses the given node.
- """
- current = node
- while current.parent and not isinstance(current.parent, nodes.FunctionDef):
- current = current.parent
- if current and current.parent:
- return current.parent
- return None
-
-
-def infer_name(self, context=None):
- """infer a Name: use name lookup rules"""
- frame, stmts = self.lookup(self.name)
- if not stmts:
- # Try to see if the name is enclosed in a nested function
- # and use the higher (first function) scope for searching.
- parent_function = _higher_function_scope(self.scope())
- if parent_function:
- _, stmts = parent_function.lookup(self.name)
-
- if not stmts:
- raise exceptions.NameInferenceError(
- name=self.name, scope=self.scope(), context=context
- )
- context = contextmod.copy_context(context)
- context.lookupname = self.name
- return bases._infer_stmts(stmts, context, frame)
-
-
-# pylint: disable=no-value-for-parameter
-nodes.Name._infer = decorators.raise_if_nothing_inferred(
- decorators.path_wrapper(infer_name)
-)
-nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def infer_call(self, context=None):
- """infer a Call node by trying to guess what the function returns"""
- callcontext = contextmod.copy_context(context)
- callcontext.callcontext = contextmod.CallContext(
- args=self.args, keywords=self.keywords
- )
- callcontext.boundnode = None
- if context is not None:
- callcontext.extra_context = _populate_context_lookup(self, context.clone())
-
- for callee in self.func.infer(context):
- if callee is util.Uninferable:
- yield callee
- continue
- try:
- if hasattr(callee, "infer_call_result"):
- yield from callee.infer_call_result(caller=self, context=callcontext)
- except exceptions.InferenceError:
- continue
- return dict(node=self, context=context)
-
-
-nodes.Call._infer = infer_call
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def infer_import(self, context=None, asname=True):
- """infer an Import node: return the imported module/object"""
- name = context.lookupname
- if name is None:
- raise exceptions.InferenceError(node=self, context=context)
-
- try:
- if asname:
- yield self.do_import_module(self.real_name(name))
- else:
- yield self.do_import_module(name)
- except exceptions.AstroidBuildingError as exc:
- raise exceptions.InferenceError(node=self, context=context) from exc
-
-
-nodes.Import._infer = infer_import
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def infer_import_from(self, context=None, asname=True):
- """infer a ImportFrom node: return the imported module/object"""
- name = context.lookupname
- if name is None:
- raise exceptions.InferenceError(node=self, context=context)
- if asname:
- name = self.real_name(name)
-
- try:
- module = self.do_import_module()
- except exceptions.AstroidBuildingError as exc:
- raise exceptions.InferenceError(node=self, context=context) from exc
-
- try:
- context = contextmod.copy_context(context)
- context.lookupname = name
- stmts = module.getattr(name, ignore_locals=module is self.root())
- return bases._infer_stmts(stmts, context)
- except exceptions.AttributeInferenceError as error:
- raise exceptions.InferenceError(
- error.message, target=self, attribute=name, context=context
- ) from error
-
-
-nodes.ImportFrom._infer = infer_import_from
-
-
-def infer_attribute(self, context=None):
- """infer an Attribute node by using getattr on the associated object"""
- for owner in self.expr.infer(context):
- if owner is util.Uninferable:
- yield owner
- continue
-
- if context and context.boundnode:
- # This handles the situation where the attribute is accessed through a subclass
- # of a base class and the attribute is defined at the base class's level,
- # by taking in consideration a redefinition in the subclass.
- if isinstance(owner, bases.Instance) and isinstance(
- context.boundnode, bases.Instance
- ):
- try:
- if helpers.is_subtype(
- helpers.object_type(context.boundnode),
- helpers.object_type(owner),
- ):
- owner = context.boundnode
- except exceptions._NonDeducibleTypeHierarchy:
- # Can't determine anything useful.
- pass
-
- try:
- context.boundnode = owner
- yield from owner.igetattr(self.attrname, context)
- context.boundnode = None
- except (exceptions.AttributeInferenceError, exceptions.InferenceError):
- context.boundnode = None
- except AttributeError:
- # XXX method / function
- context.boundnode = None
- return dict(node=self, context=context)
-
-
-nodes.Attribute._infer = decorators.raise_if_nothing_inferred(
- decorators.path_wrapper(infer_attribute)
-)
-# won't work with a path wrapper
-nodes.AssignAttr.infer_lhs = decorators.raise_if_nothing_inferred(infer_attribute)
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def infer_global(self, context=None):
- if context.lookupname is None:
- raise exceptions.InferenceError(node=self, context=context)
- try:
- return bases._infer_stmts(self.root().getattr(context.lookupname), context)
- except exceptions.AttributeInferenceError as error:
- raise exceptions.InferenceError(
- error.message, target=self, attribute=context.lookupname, context=context
- ) from error
-
-
-nodes.Global._infer = infer_global
-
-
-_SUBSCRIPT_SENTINEL = object()
-
-
-@decorators.raise_if_nothing_inferred
-def infer_subscript(self, context=None):
- """Inference for subscripts
-
- We're understanding if the index is a Const
- or a slice, passing the result of inference
- to the value's `getitem` method, which should
- handle each supported index type accordingly.
- """
-
- found_one = False
- for value in self.value.infer(context):
- if value is util.Uninferable:
- yield util.Uninferable
- return None
- for index in self.slice.infer(context):
- if index is util.Uninferable:
- yield util.Uninferable
- return None
-
- # Try to deduce the index value.
- index_value = _SUBSCRIPT_SENTINEL
- if value.__class__ == bases.Instance:
- index_value = index
- else:
- if index.__class__ == bases.Instance:
- instance_as_index = helpers.class_instance_as_index(index)
- if instance_as_index:
- index_value = instance_as_index
- else:
- index_value = index
- if index_value is _SUBSCRIPT_SENTINEL:
- raise exceptions.InferenceError(node=self, context=context)
-
- try:
- assigned = value.getitem(index_value, context)
- except (
- exceptions.AstroidTypeError,
- exceptions.AstroidIndexError,
- exceptions.AttributeInferenceError,
- AttributeError,
- ) as exc:
- raise exceptions.InferenceError(node=self, context=context) from exc
-
- # Prevent inferring if the inferred subscript
- # is the same as the original subscripted object.
- if self is assigned or assigned is util.Uninferable:
- yield util.Uninferable
- return None
- yield from assigned.infer(context)
- found_one = True
-
- if found_one:
- return dict(node=self, context=context)
- return None
-
-
-nodes.Subscript._infer = decorators.path_wrapper(infer_subscript)
-nodes.Subscript.infer_lhs = infer_subscript
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def _infer_boolop(self, context=None):
- """Infer a boolean operation (and / or / not).
-
- The function will calculate the boolean operation
- for all pairs generated through inference for each component
- node.
- """
- values = self.values
- if self.op == "or":
- predicate = operator.truth
- else:
- predicate = operator.not_
-
- try:
- values = [value.infer(context=context) for value in values]
- except exceptions.InferenceError:
- yield util.Uninferable
- return None
-
- for pair in itertools.product(*values):
- if any(item is util.Uninferable for item in pair):
- # Can't infer the final result, just yield Uninferable.
- yield util.Uninferable
- continue
-
- bool_values = [item.bool_value() for item in pair]
- if any(item is util.Uninferable for item in bool_values):
- # Can't infer the final result, just yield Uninferable.
- yield util.Uninferable
- continue
-
- # Since the boolean operations are short circuited operations,
- # this code yields the first value for which the predicate is True
- # and if no value respected the predicate, then the last value will
- # be returned (or Uninferable if there was no last value).
- # This is conforming to the semantics of `and` and `or`:
- # 1 and 0 -> 1
- # 0 and 1 -> 0
- # 1 or 0 -> 1
- # 0 or 1 -> 1
- value = util.Uninferable
- for value, bool_value in zip(pair, bool_values):
- if predicate(bool_value):
- yield value
- break
- else:
- yield value
-
- return dict(node=self, context=context)
-
-
-nodes.BoolOp._infer = _infer_boolop
-
-
-# UnaryOp, BinOp and AugAssign inferences
-
-
-def _filter_operation_errors(self, infer_callable, context, error):
- for result in infer_callable(self, context):
- if isinstance(result, error):
- # For the sake of .infer(), we don't care about operation
- # errors, which is the job of pylint. So return something
- # which shows that we can't infer the result.
- yield util.Uninferable
- else:
- yield result
-
-
-def _infer_unaryop(self, context=None):
- """Infer what an UnaryOp should return when evaluated."""
- for operand in self.operand.infer(context):
- try:
- yield operand.infer_unary_op(self.op)
- except TypeError as exc:
- # The operand doesn't support this operation.
- yield util.BadUnaryOperationMessage(operand, self.op, exc)
- except AttributeError as exc:
- meth = protocols.UNARY_OP_METHOD[self.op]
- if meth is None:
- # `not node`. Determine node's boolean
- # value and negate its result, unless it is
- # Uninferable, which will be returned as is.
- bool_value = operand.bool_value()
- if bool_value is not util.Uninferable:
- yield nodes.const_factory(not bool_value)
- else:
- yield util.Uninferable
- else:
- if not isinstance(operand, (bases.Instance, nodes.ClassDef)):
- # The operation was used on something which
- # doesn't support it.
- yield util.BadUnaryOperationMessage(operand, self.op, exc)
- continue
-
- try:
- try:
- methods = dunder_lookup.lookup(operand, meth)
- except exceptions.AttributeInferenceError:
- yield util.BadUnaryOperationMessage(operand, self.op, exc)
- continue
-
- meth = methods[0]
- inferred = next(meth.infer(context=context))
- if inferred is util.Uninferable or not inferred.callable():
- continue
-
- context = contextmod.copy_context(context)
- context.callcontext = contextmod.CallContext(args=[operand])
- call_results = inferred.infer_call_result(self, context=context)
- result = next(call_results, None)
- if result is None:
- # Failed to infer, return the same type.
- yield operand
- else:
- yield result
- except exceptions.AttributeInferenceError as exc:
- # The unary operation special method was not found.
- yield util.BadUnaryOperationMessage(operand, self.op, exc)
- except exceptions.InferenceError:
- yield util.Uninferable
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def infer_unaryop(self, context=None):
- """Infer what an UnaryOp should return when evaluated."""
- yield from _filter_operation_errors(
- self, _infer_unaryop, context, util.BadUnaryOperationMessage
- )
- return dict(node=self, context=context)
-
-
-nodes.UnaryOp._infer_unaryop = _infer_unaryop
-nodes.UnaryOp._infer = infer_unaryop
-
-
-def _is_not_implemented(const):
- """Check if the given const node is NotImplemented."""
- return isinstance(const, nodes.Const) and const.value is NotImplemented
-
-
-def _invoke_binop_inference(instance, opnode, op, other, context, method_name):
- """Invoke binary operation inference on the given instance."""
- methods = dunder_lookup.lookup(instance, method_name)
- context = contextmod.bind_context_to_node(context, instance)
- method = methods[0]
- inferred = next(method.infer(context=context))
- if inferred is util.Uninferable:
- raise exceptions.InferenceError
- return instance.infer_binary_op(opnode, op, other, context, inferred)
-
-
-def _aug_op(instance, opnode, op, other, context, reverse=False):
- """Get an inference callable for an augmented binary operation."""
- method_name = protocols.AUGMENTED_OP_METHOD[op]
- return functools.partial(
- _invoke_binop_inference,
- instance=instance,
- op=op,
- opnode=opnode,
- other=other,
- context=context,
- method_name=method_name,
- )
-
-
-def _bin_op(instance, opnode, op, other, context, reverse=False):
- """Get an inference callable for a normal binary operation.
-
- If *reverse* is True, then the reflected method will be used instead.
- """
- if reverse:
- method_name = protocols.REFLECTED_BIN_OP_METHOD[op]
- else:
- method_name = protocols.BIN_OP_METHOD[op]
- return functools.partial(
- _invoke_binop_inference,
- instance=instance,
- op=op,
- opnode=opnode,
- other=other,
- context=context,
- method_name=method_name,
- )
-
-
-def _get_binop_contexts(context, left, right):
- """Get contexts for binary operations.
-
- This will return two inference contexts, the first one
- for x.__op__(y), the other one for y.__rop__(x), where
- only the arguments are inversed.
- """
- # The order is important, since the first one should be
- # left.__op__(right).
- for arg in (right, left):
- new_context = context.clone()
- new_context.callcontext = contextmod.CallContext(args=[arg])
- new_context.boundnode = None
- yield new_context
-
-
-def _same_type(type1, type2):
- """Check if type1 is the same as type2."""
- return type1.qname() == type2.qname()
-
-
-def _get_binop_flow(
- left, left_type, binary_opnode, right, right_type, context, reverse_context
-):
- """Get the flow for binary operations.
-
- The rules are a bit messy:
-
- * if left and right have the same type, then only one
- method will be called, left.__op__(right)
- * if left and right are unrelated typewise, then first
- left.__op__(right) is tried and if this does not exist
- or returns NotImplemented, then right.__rop__(left) is tried.
- * if left is a subtype of right, then only left.__op__(right)
- is tried.
- * if left is a supertype of right, then right.__rop__(left)
- is first tried and then left.__op__(right)
- """
- op = binary_opnode.op
- if _same_type(left_type, right_type):
- methods = [_bin_op(left, binary_opnode, op, right, context)]
- elif helpers.is_subtype(left_type, right_type):
- methods = [_bin_op(left, binary_opnode, op, right, context)]
- elif helpers.is_supertype(left_type, right_type):
- methods = [
- _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
- _bin_op(left, binary_opnode, op, right, context),
- ]
- else:
- methods = [
- _bin_op(left, binary_opnode, op, right, context),
- _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
- ]
- return methods
-
-
-def _get_aug_flow(
- left, left_type, aug_opnode, right, right_type, context, reverse_context
-):
- """Get the flow for augmented binary operations.
-
- The rules are a bit messy:
-
- * if left and right have the same type, then left.__augop__(right)
- is first tried and then left.__op__(right).
- * if left and right are unrelated typewise, then
- left.__augop__(right) is tried, then left.__op__(right)
- is tried and then right.__rop__(left) is tried.
- * if left is a subtype of right, then left.__augop__(right)
- is tried and then left.__op__(right).
- * if left is a supertype of right, then left.__augop__(right)
- is tried, then right.__rop__(left) and then
- left.__op__(right)
- """
- bin_op = aug_opnode.op.strip("=")
- aug_op = aug_opnode.op
- if _same_type(left_type, right_type):
- methods = [
- _aug_op(left, aug_opnode, aug_op, right, context),
- _bin_op(left, aug_opnode, bin_op, right, context),
- ]
- elif helpers.is_subtype(left_type, right_type):
- methods = [
- _aug_op(left, aug_opnode, aug_op, right, context),
- _bin_op(left, aug_opnode, bin_op, right, context),
- ]
- elif helpers.is_supertype(left_type, right_type):
- methods = [
- _aug_op(left, aug_opnode, aug_op, right, context),
- _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
- _bin_op(left, aug_opnode, bin_op, right, context),
- ]
- else:
- methods = [
- _aug_op(left, aug_opnode, aug_op, right, context),
- _bin_op(left, aug_opnode, bin_op, right, context),
- _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
- ]
- return methods
-
-
-def _infer_binary_operation(left, right, binary_opnode, context, flow_factory):
- """Infer a binary operation between a left operand and a right operand
-
- This is used by both normal binary operations and augmented binary
- operations, the only difference is the flow factory used.
- """
-
- context, reverse_context = _get_binop_contexts(context, left, right)
- left_type = helpers.object_type(left)
- right_type = helpers.object_type(right)
- methods = flow_factory(
- left, left_type, binary_opnode, right, right_type, context, reverse_context
- )
- for method in methods:
- try:
- results = list(method())
- except AttributeError:
- continue
- except exceptions.AttributeInferenceError:
- continue
- except exceptions.InferenceError:
- yield util.Uninferable
- return
- else:
- if any(result is util.Uninferable for result in results):
- yield util.Uninferable
- return
-
- if all(map(_is_not_implemented, results)):
- continue
- not_implemented = sum(
- 1 for result in results if _is_not_implemented(result)
- )
- if not_implemented and not_implemented != len(results):
- # Can't infer yet what this is.
- yield util.Uninferable
- return
-
- yield from results
- return
- # The operation doesn't seem to be supported so let the caller know about it
- yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type)
-
-
-def _infer_binop(self, context):
- """Binary operation inference logic."""
- left = self.left
- right = self.right
-
- # we use two separate contexts for evaluating lhs and rhs because
- # 1. evaluating lhs may leave some undesired entries in context.path
- # which may not let us infer right value of rhs
- context = context or contextmod.InferenceContext()
- lhs_context = contextmod.copy_context(context)
- rhs_context = contextmod.copy_context(context)
- lhs_iter = left.infer(context=lhs_context)
- rhs_iter = right.infer(context=rhs_context)
- for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
- if any(value is util.Uninferable for value in (rhs, lhs)):
- # Don't know how to process this.
- yield util.Uninferable
- return
-
- try:
- yield from _infer_binary_operation(lhs, rhs, self, context, _get_binop_flow)
- except exceptions._NonDeducibleTypeHierarchy:
- yield util.Uninferable
-
-
-@decorators.yes_if_nothing_inferred
-@decorators.path_wrapper
-def infer_binop(self, context=None):
- return _filter_operation_errors(
- self, _infer_binop, context, util.BadBinaryOperationMessage
- )
-
-
-nodes.BinOp._infer_binop = _infer_binop
-nodes.BinOp._infer = infer_binop
-
-
-def _infer_augassign(self, context=None):
- """Inference logic for augmented binary operations."""
- if context is None:
- context = contextmod.InferenceContext()
-
- rhs_context = context.clone()
-
- lhs_iter = self.target.infer_lhs(context=context)
- rhs_iter = self.value.infer(context=rhs_context)
- for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
- if any(value is util.Uninferable for value in (rhs, lhs)):
- # Don't know how to process this.
- yield util.Uninferable
- return
-
- try:
- yield from _infer_binary_operation(
- left=lhs,
- right=rhs,
- binary_opnode=self,
- context=context,
- flow_factory=_get_aug_flow,
- )
- except exceptions._NonDeducibleTypeHierarchy:
- yield util.Uninferable
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def infer_augassign(self, context=None):
- return _filter_operation_errors(
- self, _infer_augassign, context, util.BadBinaryOperationMessage
- )
-
-
-nodes.AugAssign._infer_augassign = _infer_augassign
-nodes.AugAssign._infer = infer_augassign
-
-# End of binary operation inference.
-
-
-@decorators.raise_if_nothing_inferred
-def infer_arguments(self, context=None):
- name = context.lookupname
- if name is None:
- raise exceptions.InferenceError(node=self, context=context)
- return protocols._arguments_infer_argname(self, name, context)
-
-
-nodes.Arguments._infer = infer_arguments
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def infer_assign(self, context=None):
- """infer a AssignName/AssignAttr: need to inspect the RHS part of the
- assign node
- """
- stmt = self.statement()
- if isinstance(stmt, nodes.AugAssign):
- return stmt.infer(context)
-
- stmts = list(self.assigned_stmts(context=context))
- return bases._infer_stmts(stmts, context)
-
-
-nodes.AssignName._infer = infer_assign
-nodes.AssignAttr._infer = infer_assign
-
-
-@decorators.raise_if_nothing_inferred
-@decorators.path_wrapper
-def infer_empty_node(self, context=None):
- if not self.has_underlying_object():
- yield util.Uninferable
- else:
- try:
- yield from MANAGER.infer_ast_from_something(self.object, context=context)
- except exceptions.AstroidError:
- yield util.Uninferable
-
-
-nodes.EmptyNode._infer = infer_empty_node
-
-
-@decorators.raise_if_nothing_inferred
-def infer_index(self, context=None):
- return self.value.infer(context)
-
-
-nodes.Index._infer = infer_index
-
-# TODO: move directly into bases.Instance when the dependency hell
-# will be solved.
-def instance_getitem(self, index, context=None):
- # Rewrap index to Const for this case
- new_context = contextmod.bind_context_to_node(context, self)
- if not context:
- context = new_context
-
- # Create a new callcontext for providing index as an argument.
- new_context.callcontext = contextmod.CallContext(args=[index])
-
- method = next(self.igetattr("__getitem__", context=context), None)
- if not isinstance(method, bases.BoundMethod):
- raise exceptions.InferenceError(
- "Could not find __getitem__ for {node!r}.", node=self, context=context
- )
-
- return next(method.infer_call_result(self, new_context))
-
-
-bases.Instance.getitem = instance_getitem
-
-
-def _populate_context_lookup(call, context):
- # Allows context to be saved for later
- # for inference inside a function
- context_lookup = {}
- if context is None:
- return context_lookup
- for arg in call.args:
- if isinstance(arg, nodes.Starred):
- context_lookup[arg.value] = context
- else:
- context_lookup[arg] = context
- keywords = call.keywords if call.keywords is not None else []
- for keyword in keywords:
- context_lookup[keyword.value] = context
- return context_lookup
-
-
-@decorators.raise_if_nothing_inferred
-def infer_ifexp(self, context=None):
- """Support IfExp inference
-
- If we can't infer the truthiness of the condition, we default
- to inferring both branches. Otherwise, we infer either branch
- depending on the condition.
- """
- both_branches = False
- # We use two separate contexts for evaluating lhs and rhs because
- # evaluating lhs may leave some undesired entries in context.path
- # which may not let us infer right value of rhs.
-
- context = context or contextmod.InferenceContext()
- lhs_context = contextmod.copy_context(context)
- rhs_context = contextmod.copy_context(context)
- try:
- test = next(self.test.infer(context=context.clone()))
- except exceptions.InferenceError:
- both_branches = True
- else:
- if test is not util.Uninferable:
- if test.bool_value():
- yield from self.body.infer(context=lhs_context)
- else:
- yield from self.orelse.infer(context=rhs_context)
- else:
- both_branches = True
- if both_branches:
- yield from self.body.infer(context=lhs_context)
- yield from self.orelse.infer(context=rhs_context)
-
-
-nodes.IfExp._infer = infer_ifexp
diff --git a/venv/Lib/site-packages/astroid/interpreter/__init__.py b/venv/Lib/site-packages/astroid/interpreter/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/__init__.py
+++ /dev/null
diff --git a/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 1bd9d33..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc b/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc
deleted file mode 100644
index 4001903..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc b/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc
deleted file mode 100644
index dc15f91..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/interpreter/_import/__init__.py b/venv/Lib/site-packages/astroid/interpreter/_import/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/_import/__init__.py
+++ /dev/null
diff --git a/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 6cdce33..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc b/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc
deleted file mode 100644
index 1f091df..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc b/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc
deleted file mode 100644
index 2a7fdbb..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/astroid/interpreter/_import/spec.py b/venv/Lib/site-packages/astroid/interpreter/_import/spec.py
deleted file mode 100644
index 84e093b..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/_import/spec.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2017 Chris Philip <chrisp533@gmail.com>
-# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
-# Copyright (c) 2017 ioanatia <ioanatia@users.noreply.github.com>
-# Copyright (c) 2017 Calen Pennington <cale@edx.org>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-
-import abc
-import collections
-import distutils
-import enum
-import imp
-import os
-import sys
-import zipimport
-
-try:
- import importlib.machinery
-
- _HAS_MACHINERY = True
-except ImportError:
- _HAS_MACHINERY = False
-
-try:
- from functools import lru_cache
-except ImportError:
- from backports.functools_lru_cache import lru_cache
-
-from . import util
-
-ModuleType = enum.Enum(
- "ModuleType",
- "C_BUILTIN C_EXTENSION PKG_DIRECTORY "
- "PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE "
- "PY_SOURCE PY_ZIPMODULE PY_NAMESPACE",
-)
-_ImpTypes = {
- imp.C_BUILTIN: ModuleType.C_BUILTIN,
- imp.C_EXTENSION: ModuleType.C_EXTENSION,
- imp.PKG_DIRECTORY: ModuleType.PKG_DIRECTORY,
- imp.PY_COMPILED: ModuleType.PY_COMPILED,
- imp.PY_FROZEN: ModuleType.PY_FROZEN,
- imp.PY_SOURCE: ModuleType.PY_SOURCE,
-}
-if hasattr(imp, "PY_RESOURCE"):
- _ImpTypes[imp.PY_RESOURCE] = ModuleType.PY_RESOURCE
-if hasattr(imp, "PY_CODERESOURCE"):
- _ImpTypes[imp.PY_CODERESOURCE] = ModuleType.PY_CODERESOURCE
-
-
-def _imp_type_to_module_type(imp_type):
- return _ImpTypes[imp_type]
-
-
-_ModuleSpec = collections.namedtuple(
- "_ModuleSpec", "name type location " "origin submodule_search_locations"
-)
-
-
-class ModuleSpec(_ModuleSpec):
- """Defines a class similar to PEP 420's ModuleSpec
-
- A module spec defines a name of a module, its type, location
- and where submodules can be found, if the module is a package.
- """
-
- def __new__(
- cls,
- name,
- module_type,
- location=None,
- origin=None,
- submodule_search_locations=None,
- ):
- return _ModuleSpec.__new__(
- cls,
- name=name,
- type=module_type,
- location=location,
- origin=origin,
- submodule_search_locations=submodule_search_locations,
- )
-
-
-class Finder:
- """A finder is a class which knows how to find a particular module."""
-
- def __init__(self, path=None):
- self._path = path or sys.path
-
- @abc.abstractmethod
- def find_module(self, modname, module_parts, processed, submodule_path):
- """Find the given module
-
- Each finder is responsible for each protocol of finding, as long as
- they all return a ModuleSpec.
-
- :param str modname: The module which needs to be searched.
- :param list module_parts: It should be a list of strings,
- where each part contributes to the module's
- namespace.
- :param list processed: What parts from the module parts were processed
- so far.
- :param list submodule_path: A list of paths where the module
- can be looked into.
- :returns: A ModuleSpec, describing how and where the module was found,
- None, otherwise.
- """
-
- def contribute_to_path(self, spec, processed):
- """Get a list of extra paths where this finder can search."""
-
-
-class ImpFinder(Finder):
- """A finder based on the imp module."""
-
- def find_module(self, modname, module_parts, processed, submodule_path):
- if submodule_path is not None:
- submodule_path = list(submodule_path)
- try:
- stream, mp_filename, mp_desc = imp.find_module(modname, submodule_path)
- except ImportError:
- return None
-
- # Close resources.
- if stream:
- stream.close()
-
- return ModuleSpec(
- name=modname,
- location=mp_filename,
- module_type=_imp_type_to_module_type(mp_desc[2]),
- )
-
- def contribute_to_path(self, spec, processed):
- if spec.location is None:
- # Builtin.
- return None
-
- if _is_setuptools_namespace(spec.location):
- # extend_path is called, search sys.path for module/packages
- # of this name see pkgutil.extend_path documentation
- path = [
- os.path.join(p, *processed)
- for p in sys.path
- if os.path.isdir(os.path.join(p, *processed))
- ]
- # We already import distutils elsewhere in astroid,
- # so if it is the same module, we can use it directly.
- elif spec.name == "distutils" and spec.location in distutils.__path__:
- # distutils is patched inside virtualenvs to pick up submodules
- # from the original Python, not from the virtualenv itself.
- path = list(distutils.__path__)
- else:
- path = [spec.location]
- return path
-
-
-class ExplicitNamespacePackageFinder(ImpFinder):
- """A finder for the explicit namespace packages, generated through pkg_resources."""
-
- def find_module(self, modname, module_parts, processed, submodule_path):
- if processed:
- modname = ".".join(processed + [modname])
- if util.is_namespace(modname) and modname in sys.modules:
- submodule_path = sys.modules[modname].__path__
- return ModuleSpec(
- name=modname,
- location="",
- origin="namespace",
- module_type=ModuleType.PY_NAMESPACE,
- submodule_search_locations=submodule_path,
- )
- return None
-
- def contribute_to_path(self, spec, processed):
- return spec.submodule_search_locations
-
-
-class ZipFinder(Finder):
- """Finder that knows how to find a module inside zip files."""
-
- def __init__(self, path):
- super(ZipFinder, self).__init__(path)
- self._zipimporters = _precache_zipimporters(path)
-
- def find_module(self, modname, module_parts, processed, submodule_path):
- try:
- file_type, filename, path = _search_zip(module_parts, self._zipimporters)
- except ImportError:
- return None
-
- return ModuleSpec(
- name=modname,
- location=filename,
- origin="egg",
- module_type=file_type,
- submodule_search_locations=path,
- )
-
-
-class PathSpecFinder(Finder):
- """Finder based on importlib.machinery.PathFinder."""
-
- def find_module(self, modname, module_parts, processed, submodule_path):
- spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path)
- if spec:
- # origin can be either a string on older Python versions
- # or None in case it is a namespace package:
- # https://github.com/python/cpython/pull/5481
- is_namespace_pkg = spec.origin in ("namespace", None)
- location = spec.origin if not is_namespace_pkg else None
- module_type = ModuleType.PY_NAMESPACE if is_namespace_pkg else None
- spec = ModuleSpec(
- name=spec.name,
- location=location,
- origin=spec.origin,
- module_type=module_type,
- submodule_search_locations=list(spec.submodule_search_locations or []),
- )
- return spec
-
- def contribute_to_path(self, spec, processed):
- if spec.type == ModuleType.PY_NAMESPACE:
- return spec.submodule_search_locations
- return None
-
-
-_SPEC_FINDERS = (ImpFinder, ZipFinder)
-if _HAS_MACHINERY:
- _SPEC_FINDERS += (PathSpecFinder,)
-_SPEC_FINDERS += (ExplicitNamespacePackageFinder,)
-
-
-def _is_setuptools_namespace(location):
- try:
- with open(os.path.join(location, "__init__.py"), "rb") as stream:
- data = stream.read(4096)
- except IOError:
- pass
- else:
- extend_path = b"pkgutil" in data and b"extend_path" in data
- declare_namespace = (
- b"pkg_resources" in data and b"declare_namespace(__name__)" in data
- )
- return extend_path or declare_namespace
-
-
-@lru_cache()
-def _cached_set_diff(left, right):
- result = set(left)
- result.difference_update(right)
- return result
-
-
-def _precache_zipimporters(path=None):
- pic = sys.path_importer_cache
-
- # When measured, despite having the same complexity (O(n)),
- # converting to tuples and then caching the conversion to sets
- # and the set difference is faster than converting to sets
- # and then only caching the set difference.
-
- req_paths = tuple(path or sys.path)
- cached_paths = tuple(pic)
- new_paths = _cached_set_diff(req_paths, cached_paths)
- for entry_path in new_paths:
- try:
- pic[entry_path] = zipimport.zipimporter(entry_path)
- except zipimport.ZipImportError:
- continue
- return pic
-
-
-def _search_zip(modpath, pic):
- for filepath, importer in list(pic.items()):
- if importer is not None:
- found = importer.find_module(modpath[0])
- if found:
- if not importer.find_module(os.path.sep.join(modpath)):
- raise ImportError(
- "No module named %s in %s/%s"
- % (".".join(modpath[1:]), filepath, modpath)
- )
- # import code; code.interact(local=locals())
- return (
- ModuleType.PY_ZIPMODULE,
- os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath),
- filepath,
- )
- raise ImportError("No module named %s" % ".".join(modpath))
-
-
-def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path):
- finders = [finder(search_path) for finder in _SPEC_FINDERS]
- for finder in finders:
- spec = finder.find_module(modname, module_parts, processed, submodule_path)
- if spec is None:
- continue
- return finder, spec
-
- raise ImportError("No module named %s" % ".".join(module_parts))
-
-
-def find_spec(modpath, path=None):
- """Find a spec for the given module.
-
- :type modpath: list or tuple
- :param modpath:
- split module's name (i.e name of a module or package split
- on '.'), with leading empty strings for explicit relative import
-
- :type path: list or None
- :param path:
- optional list of path where the module or package should be
- searched (use sys.path if nothing or None is given)
-
- :rtype: ModuleSpec
- :return: A module spec, which describes how the module was
- found and where.
- """
- _path = path or sys.path
-
- # Need a copy for not mutating the argument.
- modpath = modpath[:]
-
- submodule_path = None
- module_parts = modpath[:]
- processed = []
-
- while modpath:
- modname = modpath.pop(0)
- finder, spec = _find_spec_with_path(
- _path, modname, module_parts, processed, submodule_path or path
- )
- processed.append(modname)
- if modpath:
- submodule_path = finder.contribute_to_path(spec, processed)
-
- if spec.type == ModuleType.PKG_DIRECTORY:
- spec = spec._replace(submodule_search_locations=submodule_path)
-
- return spec
diff --git a/venv/Lib/site-packages/astroid/interpreter/_import/util.py b/venv/Lib/site-packages/astroid/interpreter/_import/util.py
deleted file mode 100644
index a917bd3..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/_import/util.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-
-try:
- import pkg_resources
-except ImportError:
- pkg_resources = None
-
-
-def is_namespace(modname):
- return pkg_resources is not None and modname in pkg_resources._namespace_packages
diff --git a/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py b/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py
deleted file mode 100644
index 0ae9bc9..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Contains logic for retrieving special methods.
-
-This implementation does not rely on the dot attribute access
-logic, found in ``.getattr()``. The difference between these two
-is that the dunder methods are looked with the type slots
-(you can find more about these here
-http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/)
-As such, the lookup for the special methods is actually simpler than
-the dot attribute access.
-"""
-import itertools
-
-import astroid
-from astroid import exceptions
-
-
-def _lookup_in_mro(node, name):
- attrs = node.locals.get(name, [])
-
- nodes = itertools.chain.from_iterable(
- ancestor.locals.get(name, []) for ancestor in node.ancestors(recurs=True)
- )
- values = list(itertools.chain(attrs, nodes))
- if not values:
- raise exceptions.AttributeInferenceError(attribute=name, target=node)
-
- return values
-
-
-def lookup(node, name):
- """Lookup the given special method name in the given *node*
-
- If the special method was found, then a list of attributes
- will be returned. Otherwise, `astroid.AttributeInferenceError`
- is going to be raised.
- """
- if isinstance(
- node, (astroid.List, astroid.Tuple, astroid.Const, astroid.Dict, astroid.Set)
- ):
- return _builtin_lookup(node, name)
- if isinstance(node, astroid.Instance):
- return _lookup_in_mro(node, name)
- if isinstance(node, astroid.ClassDef):
- return _class_lookup(node, name)
-
- raise exceptions.AttributeInferenceError(attribute=name, target=node)
-
-
-def _class_lookup(node, name):
- metaclass = node.metaclass()
- if metaclass is None:
- raise exceptions.AttributeInferenceError(attribute=name, target=node)
-
- return _lookup_in_mro(metaclass, name)
-
-
-def _builtin_lookup(node, name):
- values = node.locals.get(name, [])
- if not values:
- raise exceptions.AttributeInferenceError(attribute=name, target=node)
-
- return values
diff --git a/venv/Lib/site-packages/astroid/interpreter/objectmodel.py b/venv/Lib/site-packages/astroid/interpreter/objectmodel.py
deleted file mode 100644
index 5e488d9..0000000
--- a/venv/Lib/site-packages/astroid/interpreter/objectmodel.py
+++ /dev/null
@@ -1,738 +0,0 @@
-# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2017 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2017 Calen Pennington <cale@edx.org>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-"""
-Data object model, as per https://docs.python.org/3/reference/datamodel.html.
-
-This module describes, at least partially, a data object model for some
-of astroid's nodes. The model contains special attributes that nodes such
-as functions, classes, modules etc have, such as __doc__, __class__,
-__module__ etc, being used when doing attribute lookups over nodes.
-
-For instance, inferring `obj.__class__` will first trigger an inference
-of the `obj` variable. If it was successfully inferred, then an attribute
-`__class__ will be looked for in the inferred object. This is the part
-where the data model occurs. The model is attached to those nodes
-and the lookup mechanism will try to see if attributes such as
-`__class__` are defined by the model or not. If they are defined,
-the model will be requested to return the corresponding value of that
-attribute. Thus the model can be viewed as a special part of the lookup
-mechanism.
-"""
-
-import itertools
-import pprint
-import os
-import types
-from functools import lru_cache
-
-import astroid
-from astroid import context as contextmod
-from astroid import exceptions
-from astroid import node_classes
-
-
-IMPL_PREFIX = "attr_"
-
-
-def _dunder_dict(instance, attributes):
- obj = node_classes.Dict(parent=instance)
-
- # Convert the keys to node strings
- keys = [
- node_classes.Const(value=value, parent=obj) for value in list(attributes.keys())
- ]
-
- # The original attribute has a list of elements for each key,
- # but that is not useful for retrieving the special attribute's value.
- # In this case, we're picking the last value from each list.
- values = [elem[-1] for elem in attributes.values()]
-
- obj.postinit(list(zip(keys, values)))
- return obj
-
-
-class ObjectModel:
- def __init__(self):
- self._instance = None
-
- def __repr__(self):
- result = []
- cname = type(self).__name__
- string = "%(cname)s(%(fields)s)"
- alignment = len(cname) + 1
- for field in sorted(self.attributes()):
- width = 80 - len(field) - alignment
- lines = pprint.pformat(field, indent=2, width=width).splitlines(True)
-
- inner = [lines[0]]
- for line in lines[1:]:
- inner.append(" " * alignment + line)
- result.append(field)
-
- return string % {
- "cname": cname,
- "fields": (",\n" + " " * alignment).join(result),
- }
-
- def __call__(self, instance):
- self._instance = instance
- return self
-
- def __get__(self, instance, cls=None):
- # ObjectModel needs to be a descriptor so that just doing
- # `special_attributes = SomeObjectModel` should be enough in the body of a node.
- # But at the same time, node.special_attributes should return an object
- # which can be used for manipulating the special attributes. That's the reason
- # we pass the instance through which it got accessed to ObjectModel.__call__,
- # returning itself afterwards, so we can still have access to the
- # underlying data model and to the instance for which it got accessed.
- return self(instance)
-
- def __contains__(self, name):
- return name in self.attributes()
-
- @lru_cache(maxsize=None)
- def attributes(self):
- """Get the attributes which are exported by this object model."""
- return [
- obj[len(IMPL_PREFIX) :] for obj in dir(self) if obj.startswith(IMPL_PREFIX)
- ]
-
- def lookup(self, name):
- """Look up the given *name* in the current model
-
- It should return an AST or an interpreter object,
- but if the name is not found, then an AttributeInferenceError will be raised.
- """
-
- if name in self.attributes():
- return getattr(self, IMPL_PREFIX + name)
- raise exceptions.AttributeInferenceError(target=self._instance, attribute=name)
-
-
-class ModuleModel(ObjectModel):
- def _builtins(self):
- builtins_ast_module = astroid.MANAGER.builtins_module
- return builtins_ast_module.special_attributes.lookup("__dict__")
-
- @property
- def attr_builtins(self):
- return self._builtins()
-
- @property
- def attr___path__(self):
- if not self._instance.package:
- raise exceptions.AttributeInferenceError(
- target=self._instance, attribute="__path__"
- )
-
- path_objs = [
- node_classes.Const(
- value=path
- if not path.endswith("__init__.py")
- else os.path.dirname(path),
- parent=self._instance,
- )
- for path in self._instance.path
- ]
-
- container = node_classes.List(parent=self._instance)
- container.postinit(path_objs)
-
- return container
-
- @property
- def attr___name__(self):
- return node_classes.Const(value=self._instance.name, parent=self._instance)
-
- @property
- def attr___doc__(self):
- return node_classes.Const(value=self._instance.doc, parent=self._instance)
-
- @property
- def attr___file__(self):
- return node_classes.Const(value=self._instance.file, parent=self._instance)
-
- @property
- def attr___dict__(self):
- return _dunder_dict(self._instance, self._instance.globals)
-
- @property
- def attr___package__(self):
- if not self._instance.package:
- value = ""
- else:
- value = self._instance.name
-
- return node_classes.Const(value=value, parent=self._instance)
-
- # These are related to the Python 3 implementation of the
- # import system,
- # https://docs.python.org/3/reference/import.html#import-related-module-attributes
-
- @property
- def attr___spec__(self):
- # No handling for now.
- return node_classes.Unknown()
-
- @property
- def attr___loader__(self):
- # No handling for now.
- return node_classes.Unknown()
-
- @property
- def attr___cached__(self):
- # No handling for now.
- return node_classes.Unknown()
-
-
-class FunctionModel(ObjectModel):
- @property
- def attr___name__(self):
- return node_classes.Const(value=self._instance.name, parent=self._instance)
-
- @property
- def attr___doc__(self):
- return node_classes.Const(value=self._instance.doc, parent=self._instance)
-
- @property
- def attr___qualname__(self):
- return node_classes.Const(value=self._instance.qname(), parent=self._instance)
-
- @property
- def attr___defaults__(self):
- func = self._instance
- if not func.args.defaults:
- return node_classes.Const(value=None, parent=func)
-
- defaults_obj = node_classes.Tuple(parent=func)
- defaults_obj.postinit(func.args.defaults)
- return defaults_obj
-
- @property
- def attr___annotations__(self):
- obj = node_classes.Dict(parent=self._instance)
-
- if not self._instance.returns:
- returns = None
- else:
- returns = self._instance.returns
-
- args = self._instance.args
- pair_annotations = itertools.chain(
- zip(args.args or [], args.annotations),
- zip(args.kwonlyargs, args.kwonlyargs_annotations),
- zip(args.posonlyargs or [], args.posonlyargs_annotations),
- )
-
- annotations = {
- arg.name: annotation for (arg, annotation) in pair_annotations if annotation
- }
- if args.varargannotation:
- annotations[args.vararg] = args.varargannotation
- if args.kwargannotation:
- annotations[args.kwarg] = args.kwargannotation
- if returns:
- annotations["return"] = returns
-
- items = [
- (node_classes.Const(key, parent=obj), value)
- for (key, value) in annotations.items()
- ]
-
- obj.postinit(items)
- return obj
-
- @property
- def attr___dict__(self):
- return node_classes.Dict(parent=self._instance)
-
- attr___globals__ = attr___dict__
-
- @property
- def attr___kwdefaults__(self):
- def _default_args(args, parent):
- for arg in args.kwonlyargs:
- try:
- default = args.default_value(arg.name)
- except exceptions.NoDefault:
- continue
-
- name = node_classes.Const(arg.name, parent=parent)
- yield name, default
-
- args = self._instance.args
- obj = node_classes.Dict(parent=self._instance)
- defaults = dict(_default_args(args, obj))
-
- obj.postinit(list(defaults.items()))
- return obj
-
- @property
- def attr___module__(self):
- return node_classes.Const(self._instance.root().qname())
-
- @property
- def attr___get__(self):
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import bases
-
- func = self._instance
-
- class DescriptorBoundMethod(bases.BoundMethod):
- """Bound method which knows how to understand calling descriptor binding."""
-
- def implicit_parameters(self):
- # Different than BoundMethod since the signature
- # is different.
- return 0
-
- def infer_call_result(self, caller, context=None):
- if len(caller.args) > 2 or len(caller.args) < 1:
- raise exceptions.InferenceError(
- "Invalid arguments for descriptor binding",
- target=self,
- context=context,
- )
-
- context = contextmod.copy_context(context)
- cls = next(caller.args[0].infer(context=context))
-
- if cls is astroid.Uninferable:
- raise exceptions.InferenceError(
- "Invalid class inferred", target=self, context=context
- )
-
- # For some reason func is a Node that the below
- # code is not expecting
- if isinstance(func, bases.BoundMethod):
- yield func
- return
-
- # Rebuild the original value, but with the parent set as the
- # class where it will be bound.
- new_func = func.__class__(
- name=func.name,
- doc=func.doc,
- lineno=func.lineno,
- col_offset=func.col_offset,
- parent=cls,
- )
- # pylint: disable=no-member
- new_func.postinit(func.args, func.body, func.decorators, func.returns)
-
- # Build a proper bound method that points to our newly built function.
- proxy = bases.UnboundMethod(new_func)
- yield bases.BoundMethod(proxy=proxy, bound=cls)
-
- @property
- def args(self):
- """Overwrite the underlying args to match those of the underlying func
-
- Usually the underlying *func* is a function/method, as in:
-
- def test(self):
- pass
-
- This has only the *self* parameter but when we access test.__get__
- we get a new object which has two parameters, *self* and *type*.
- """
- nonlocal func
- positional_or_keyword_params = func.args.args.copy()
- positional_or_keyword_params.append(astroid.AssignName(name="type"))
-
- positional_only_params = func.args.posonlyargs.copy()
-
- arguments = astroid.Arguments(parent=func.args.parent)
- arguments.postinit(
- args=positional_or_keyword_params,
- posonlyargs=positional_only_params,
- defaults=[],
- kwonlyargs=[],
- kw_defaults=[],
- annotations=[],
- )
- return arguments
-
- return DescriptorBoundMethod(proxy=self._instance, bound=self._instance)
-
- # These are here just for completion.
- @property
- def attr___ne__(self):
- return node_classes.Unknown()
-
- attr___subclasshook__ = attr___ne__
- attr___str__ = attr___ne__
- attr___sizeof__ = attr___ne__
- attr___setattr___ = attr___ne__
- attr___repr__ = attr___ne__
- attr___reduce__ = attr___ne__
- attr___reduce_ex__ = attr___ne__
- attr___new__ = attr___ne__
- attr___lt__ = attr___ne__
- attr___eq__ = attr___ne__
- attr___gt__ = attr___ne__
- attr___format__ = attr___ne__
- attr___delattr___ = attr___ne__
- attr___getattribute__ = attr___ne__
- attr___hash__ = attr___ne__
- attr___init__ = attr___ne__
- attr___dir__ = attr___ne__
- attr___call__ = attr___ne__
- attr___class__ = attr___ne__
- attr___closure__ = attr___ne__
- attr___code__ = attr___ne__
-
-
-class ClassModel(ObjectModel):
- @property
- def attr___module__(self):
- return node_classes.Const(self._instance.root().qname())
-
- @property
- def attr___name__(self):
- return node_classes.Const(self._instance.name)
-
- @property
- def attr___qualname__(self):
- return node_classes.Const(self._instance.qname())
-
- @property
- def attr___doc__(self):
- return node_classes.Const(self._instance.doc)
-
- @property
- def attr___mro__(self):
- if not self._instance.newstyle:
- raise exceptions.AttributeInferenceError(
- target=self._instance, attribute="__mro__"
- )
-
- mro = self._instance.mro()
- obj = node_classes.Tuple(parent=self._instance)
- obj.postinit(mro)
- return obj
-
- @property
- def attr_mro(self):
- if not self._instance.newstyle:
- raise exceptions.AttributeInferenceError(
- target=self._instance, attribute="mro"
- )
-
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import bases
-
- other_self = self
-
- # Cls.mro is a method and we need to return one in order to have a proper inference.
- # The method we're returning is capable of inferring the underlying MRO though.
- class MroBoundMethod(bases.BoundMethod):
- def infer_call_result(self, caller, context=None):
- yield other_self.attr___mro__
-
- implicit_metaclass = self._instance.implicit_metaclass()
- mro_method = implicit_metaclass.locals["mro"][0]
- return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass)
-
- @property
- def attr___bases__(self):
- obj = node_classes.Tuple()
- context = contextmod.InferenceContext()
- elts = list(self._instance._inferred_bases(context))
- obj.postinit(elts=elts)
- return obj
-
- @property
- def attr___class__(self):
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import helpers
-
- return helpers.object_type(self._instance)
-
- @property
- def attr___subclasses__(self):
- """Get the subclasses of the underlying class
-
- This looks only in the current module for retrieving the subclasses,
- thus it might miss a couple of them.
- """
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import bases
- from astroid import scoped_nodes
-
- if not self._instance.newstyle:
- raise exceptions.AttributeInferenceError(
- target=self._instance, attribute="__subclasses__"
- )
-
- qname = self._instance.qname()
- root = self._instance.root()
- classes = [
- cls
- for cls in root.nodes_of_class(scoped_nodes.ClassDef)
- if cls != self._instance and cls.is_subtype_of(qname)
- ]
-
- obj = node_classes.List(parent=self._instance)
- obj.postinit(classes)
-
- class SubclassesBoundMethod(bases.BoundMethod):
- def infer_call_result(self, caller, context=None):
- yield obj
-
- implicit_metaclass = self._instance.implicit_metaclass()
- subclasses_method = implicit_metaclass.locals["__subclasses__"][0]
- return SubclassesBoundMethod(proxy=subclasses_method, bound=implicit_metaclass)
-
- @property
- def attr___dict__(self):
- return node_classes.Dict(parent=self._instance)
-
-
-class SuperModel(ObjectModel):
- @property
- def attr___thisclass__(self):
- return self._instance.mro_pointer
-
- @property
- def attr___self_class__(self):
- return self._instance._self_class
-
- @property
- def attr___self__(self):
- return self._instance.type
-
- @property
- def attr___class__(self):
- return self._instance._proxied
-
-
-class UnboundMethodModel(ObjectModel):
- @property
- def attr___class__(self):
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import helpers
-
- return helpers.object_type(self._instance)
-
- @property
- def attr___func__(self):
- return self._instance._proxied
-
- @property
- def attr___self__(self):
- return node_classes.Const(value=None, parent=self._instance)
-
- attr_im_func = attr___func__
- attr_im_class = attr___class__
- attr_im_self = attr___self__
-
-
-class BoundMethodModel(FunctionModel):
- @property
- def attr___func__(self):
- return self._instance._proxied._proxied
-
- @property
- def attr___self__(self):
- return self._instance.bound
-
-
-class GeneratorModel(FunctionModel):
- def __new__(cls, *args, **kwargs):
- # Append the values from the GeneratorType unto this object.
- ret = super(GeneratorModel, cls).__new__(cls, *args, **kwargs)
- generator = astroid.MANAGER.builtins_module["generator"]
- for name, values in generator.locals.items():
- method = values[0]
- patched = lambda cls, meth=method: meth
-
- setattr(type(ret), IMPL_PREFIX + name, property(patched))
-
- return ret
-
- @property
- def attr___name__(self):
- return node_classes.Const(
- value=self._instance.parent.name, parent=self._instance
- )
-
- @property
- def attr___doc__(self):
- return node_classes.Const(
- value=self._instance.parent.doc, parent=self._instance
- )
-
-
-class AsyncGeneratorModel(GeneratorModel):
- def __new__(cls, *args, **kwargs):
- # Append the values from the AGeneratorType unto this object.
- ret = super().__new__(cls, *args, **kwargs)
- astroid_builtins = astroid.MANAGER.builtins_module
- generator = astroid_builtins.get("async_generator")
- if generator is None:
- # Make it backward compatible.
- generator = astroid_builtins.get("generator")
-
- for name, values in generator.locals.items():
- method = values[0]
- patched = lambda cls, meth=method: meth
-
- setattr(type(ret), IMPL_PREFIX + name, property(patched))
-
- return ret
-
-
-class InstanceModel(ObjectModel):
- @property
- def attr___class__(self):
- return self._instance._proxied
-
- @property
- def attr___module__(self):
- return node_classes.Const(self._instance.root().qname())
-
- @property
- def attr___doc__(self):
- return node_classes.Const(self._instance.doc)
-
- @property
- def attr___dict__(self):
- return _dunder_dict(self._instance, self._instance.instance_attrs)
-
-
-# Exception instances
-
-
-class ExceptionInstanceModel(InstanceModel):
- @property
- def attr_args(self):
- message = node_classes.Const("")
- args = node_classes.Tuple(parent=self._instance)
- args.postinit((message,))
- return args
-
- @property
- def attr___traceback__(self):
- builtins_ast_module = astroid.MANAGER.builtins_module
- traceback_type = builtins_ast_module[types.TracebackType.__name__]
- return traceback_type.instantiate_class()
-
-
-class SyntaxErrorInstanceModel(ExceptionInstanceModel):
- @property
- def attr_text(self):
- return node_classes.Const("")
-
-
-class OSErrorInstanceModel(ExceptionInstanceModel):
- @property
- def attr_filename(self):
- return node_classes.Const("")
-
- @property
- def attr_errno(self):
- return node_classes.Const(0)
-
- @property
- def attr_strerror(self):
- return node_classes.Const("")
-
- attr_filename2 = attr_filename
-
-
-class ImportErrorInstanceModel(ExceptionInstanceModel):
- @property
- def attr_name(self):
- return node_classes.Const("")
-
- @property
- def attr_path(self):
- return node_classes.Const("")
-
-
-BUILTIN_EXCEPTIONS = {
- "builtins.SyntaxError": SyntaxErrorInstanceModel,
- "builtins.ImportError": ImportErrorInstanceModel,
- # These are all similar to OSError in terms of attributes
- "builtins.OSError": OSErrorInstanceModel,
- "builtins.BlockingIOError": OSErrorInstanceModel,
- "builtins.BrokenPipeError": OSErrorInstanceModel,
- "builtins.ChildProcessError": OSErrorInstanceModel,
- "builtins.ConnectionAbortedError": OSErrorInstanceModel,
- "builtins.ConnectionError": OSErrorInstanceModel,
- "builtins.ConnectionRefusedError": OSErrorInstanceModel,
- "builtins.ConnectionResetError": OSErrorInstanceModel,
- "builtins.FileExistsError": OSErrorInstanceModel,
- "builtins.FileNotFoundError": OSErrorInstanceModel,
- "builtins.InterruptedError": OSErrorInstanceModel,
- "builtins.IsADirectoryError": OSErrorInstanceModel,
- "builtins.NotADirectoryError": OSErrorInstanceModel,
- "builtins.PermissionError": OSErrorInstanceModel,
- "builtins.ProcessLookupError": OSErrorInstanceModel,
- "builtins.TimeoutError": OSErrorInstanceModel,
-}
-
-
-class DictModel(ObjectModel):
- @property
- def attr___class__(self):
- return self._instance._proxied
-
- def _generic_dict_attribute(self, obj, name):
- """Generate a bound method that can infer the given *obj*."""
-
- class DictMethodBoundMethod(astroid.BoundMethod):
- def infer_call_result(self, caller, context=None):
- yield obj
-
- meth = next(self._instance._proxied.igetattr(name))
- return DictMethodBoundMethod(proxy=meth, bound=self._instance)
-
- @property
- def attr_items(self):
- elems = []
- obj = node_classes.List(parent=self._instance)
- for key, value in self._instance.items:
- elem = node_classes.Tuple(parent=obj)
- elem.postinit((key, value))
- elems.append(elem)
- obj.postinit(elts=elems)
-
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import objects
-
- obj = objects.DictItems(obj)
- return self._generic_dict_attribute(obj, "items")
-
- @property
- def attr_keys(self):
- keys = [key for (key, _) in self._instance.items]
- obj = node_classes.List(parent=self._instance)
- obj.postinit(elts=keys)
-
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import objects
-
- obj = objects.DictKeys(obj)
- return self._generic_dict_attribute(obj, "keys")
-
- @property
- def attr_values(self):
-
- values = [value for (_, value) in self._instance.items]
- obj = node_classes.List(parent=self._instance)
- obj.postinit(values)
-
- # pylint: disable=import-outside-toplevel; circular import
- from astroid import objects
-
- obj = objects.DictValues(obj)
- return self._generic_dict_attribute(obj, "values")
diff --git a/venv/Lib/site-packages/astroid/manager.py b/venv/Lib/site-packages/astroid/manager.py
deleted file mode 100644
index e5fd0d6..0000000
--- a/venv/Lib/site-packages/astroid/manager.py
+++ /dev/null
@@ -1,337 +0,0 @@
-# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 BioGeek <jeroen.vangoey@gmail.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2017 Iva Miholic <ivamiho@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""astroid manager: avoid multiple astroid build of a same module when
-possible by providing a class responsible to get astroid representation
-from various source and using a cache of built modules)
-"""
-
-import os
-import zipimport
-
-from astroid import exceptions
-from astroid.interpreter._import import spec
-from astroid import modutils
-from astroid import transforms
-
-
-ZIP_IMPORT_EXTS = (".zip", ".egg", ".whl")
-
-
-def safe_repr(obj):
- try:
- return repr(obj)
- except Exception: # pylint: disable=broad-except
- return "???"
-
-
-class AstroidManager:
- """the astroid manager, responsible to build astroid from files
- or modules.
-
- Use the Borg pattern.
- """
-
- name = "astroid loader"
- brain = {}
-
- def __init__(self):
- self.__dict__ = AstroidManager.brain
- if not self.__dict__:
- # NOTE: cache entries are added by the [re]builder
- self.astroid_cache = {}
- self._mod_file_cache = {}
- self._failed_import_hooks = []
- self.always_load_extensions = False
- self.optimize_ast = False
- self.extension_package_whitelist = set()
- self._transform = transforms.TransformVisitor()
-
- # Export these APIs for convenience
- self.register_transform = self._transform.register_transform
- self.unregister_transform = self._transform.unregister_transform
- self.max_inferable_values = 100
-
- @property
- def builtins_module(self):
- return self.astroid_cache["builtins"]
-
- def visit_transforms(self, node):
- """Visit the transforms and apply them to the given *node*."""
- return self._transform.visit(node)
-
- def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
- """given a module name, return the astroid object"""
- try:
- filepath = modutils.get_source_file(filepath, include_no_ext=True)
- source = True
- except modutils.NoSourceFile:
- pass
- if modname is None:
- try:
- modname = ".".join(modutils.modpath_from_file(filepath))
- except ImportError:
- modname = filepath
- if (
- modname in self.astroid_cache
- and self.astroid_cache[modname].file == filepath
- ):
- return self.astroid_cache[modname]
- if source:
- # pylint: disable=import-outside-toplevel; circular import
- from astroid.builder import AstroidBuilder
-
- return AstroidBuilder(self).file_build(filepath, modname)
- if fallback and modname:
- return self.ast_from_module_name(modname)
- raise exceptions.AstroidBuildingError(
- "Unable to build an AST for {path}.", path=filepath
- )
-
- def _build_stub_module(self, modname):
- # pylint: disable=import-outside-toplevel; circular import
- from astroid.builder import AstroidBuilder
-
- return AstroidBuilder(self).string_build("", modname)
-
- def _build_namespace_module(self, modname, path):
- # pylint: disable=import-outside-toplevel; circular import
- from astroid.builder import build_namespace_package_module
-
- return build_namespace_package_module(modname, path)
-
- def _can_load_extension(self, modname):
- if self.always_load_extensions:
- return True
- if modutils.is_standard_module(modname):
- return True
- parts = modname.split(".")
- return any(
- ".".join(parts[:x]) in self.extension_package_whitelist
- for x in range(1, len(parts) + 1)
- )
-
- def ast_from_module_name(self, modname, context_file=None):
- """given a module name, return the astroid object"""
- if modname in self.astroid_cache:
- return self.astroid_cache[modname]
- if modname == "__main__":
- return self._build_stub_module(modname)
- old_cwd = os.getcwd()
- if context_file:
- os.chdir(os.path.dirname(context_file))
- try:
- found_spec = self.file_from_module_name(modname, context_file)
- if found_spec.type == spec.ModuleType.PY_ZIPMODULE:
- module = self.zip_import_data(found_spec.location)
- if module is not None:
- return module
-
- elif found_spec.type in (
- spec.ModuleType.C_BUILTIN,
- spec.ModuleType.C_EXTENSION,
- ):
- if (
- found_spec.type == spec.ModuleType.C_EXTENSION
- and not self._can_load_extension(modname)
- ):
- return self._build_stub_module(modname)
- try:
- module = modutils.load_module_from_name(modname)
- except Exception as ex:
- raise exceptions.AstroidImportError(
- "Loading {modname} failed with:\n{error}",
- modname=modname,
- path=found_spec.location,
- ) from ex
- return self.ast_from_module(module, modname)
-
- elif found_spec.type == spec.ModuleType.PY_COMPILED:
- raise exceptions.AstroidImportError(
- "Unable to load compiled module {modname}.",
- modname=modname,
- path=found_spec.location,
- )
-
- elif found_spec.type == spec.ModuleType.PY_NAMESPACE:
- return self._build_namespace_module(
- modname, found_spec.submodule_search_locations
- )
-
- if found_spec.location is None:
- raise exceptions.AstroidImportError(
- "Can't find a file for module {modname}.", modname=modname
- )
-
- return self.ast_from_file(found_spec.location, modname, fallback=False)
- except exceptions.AstroidBuildingError as e:
- for hook in self._failed_import_hooks:
- try:
- return hook(modname)
- except exceptions.AstroidBuildingError:
- pass
- raise e
- finally:
- os.chdir(old_cwd)
-
- def zip_import_data(self, filepath):
- if zipimport is None:
- return None
-
- # pylint: disable=import-outside-toplevel; circular import
- from astroid.builder import AstroidBuilder
-
- builder = AstroidBuilder(self)
- for ext in ZIP_IMPORT_EXTS:
- try:
- eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
- except ValueError:
- continue
- try:
- importer = zipimport.zipimporter(eggpath + ext)
- zmodname = resource.replace(os.path.sep, ".")
- if importer.is_package(resource):
- zmodname = zmodname + ".__init__"
- module = builder.string_build(
- importer.get_source(resource), zmodname, filepath
- )
- return module
- except Exception: # pylint: disable=broad-except
- continue
- return None
-
- def file_from_module_name(self, modname, contextfile):
- try:
- value = self._mod_file_cache[(modname, contextfile)]
- except KeyError:
- try:
- value = modutils.file_info_from_modpath(
- modname.split("."), context_file=contextfile
- )
- except ImportError as ex:
- value = exceptions.AstroidImportError(
- "Failed to import module {modname} with error:\n{error}.",
- modname=modname,
- error=ex,
- )
- self._mod_file_cache[(modname, contextfile)] = value
- if isinstance(value, exceptions.AstroidBuildingError):
- raise value
- return value
-
- def ast_from_module(self, module, modname=None):
- """given an imported module, return the astroid object"""
- modname = modname or module.__name__
- if modname in self.astroid_cache:
- return self.astroid_cache[modname]
- try:
- # some builtin modules don't have __file__ attribute
- filepath = module.__file__
- if modutils.is_python_source(filepath):
- return self.ast_from_file(filepath, modname)
- except AttributeError:
- pass
-
- # pylint: disable=import-outside-toplevel; circular import
- from astroid.builder import AstroidBuilder
-
- return AstroidBuilder(self).module_build(module, modname)
-
- def ast_from_class(self, klass, modname=None):
- """get astroid for the given class"""
- if modname is None:
- try:
- modname = klass.__module__
- except AttributeError as exc:
- raise exceptions.AstroidBuildingError(
- "Unable to get module for class {class_name}.",
- cls=klass,
- class_repr=safe_repr(klass),
- modname=modname,
- ) from exc
- modastroid = self.ast_from_module_name(modname)
- return modastroid.getattr(klass.__name__)[0] # XXX
-
- def infer_ast_from_something(self, obj, context=None):
- """infer astroid for the given class"""
- if hasattr(obj, "__class__") and not isinstance(obj, type):
- klass = obj.__class__
- else:
- klass = obj
- try:
- modname = klass.__module__
- except AttributeError as exc:
- raise exceptions.AstroidBuildingError(
- "Unable to get module for {class_repr}.",
- cls=klass,
- class_repr=safe_repr(klass),
- ) from exc
- except Exception as exc:
- raise exceptions.AstroidImportError(
- "Unexpected error while retrieving module for {class_repr}:\n"
- "{error}",
- cls=klass,
- class_repr=safe_repr(klass),
- ) from exc
- try:
- name = klass.__name__
- except AttributeError as exc:
- raise exceptions.AstroidBuildingError(
- "Unable to get name for {class_repr}:\n",
- cls=klass,
- class_repr=safe_repr(klass),
- ) from exc
- except Exception as exc:
- raise exceptions.AstroidImportError(
- "Unexpected error while retrieving name for {class_repr}:\n" "{error}",
- cls=klass,
- class_repr=safe_repr(klass),
- ) from exc
- # take care, on living object __module__ is regularly wrong :(
- modastroid = self.ast_from_module_name(modname)
- if klass is obj:
- for inferred in modastroid.igetattr(name, context):
- yield inferred
- else:
- for inferred in modastroid.igetattr(name, context):
- yield inferred.instantiate_class()
-
- def register_failed_import_hook(self, hook):
- """Registers a hook to resolve imports that cannot be found otherwise.
-
- `hook` must be a function that accepts a single argument `modname` which
- contains the name of the module or package that could not be imported.
- If `hook` can resolve the import, must return a node of type `astroid.Module`,
- otherwise, it must raise `AstroidBuildingError`.
- """
- self._failed_import_hooks.append(hook)
-
- def cache_module(self, module):
- """Cache a module if no module with the same name is known yet."""
- self.astroid_cache.setdefault(module.name, module)
-
- def bootstrap(self):
- """Bootstrap the required AST modules needed for the manager to work
-
- The bootstrap usually involves building the AST for the builtins
- module, which is required by the rest of astroid to work correctly.
- """
- from astroid import raw_building # pylint: disable=import-outside-toplevel
-
- raw_building._astroid_bootstrapping()
-
- def clear_cache(self):
- """Clear the underlying cache. Also bootstraps the builtins module."""
- self.astroid_cache.clear()
- self.bootstrap()
diff --git a/venv/Lib/site-packages/astroid/mixins.py b/venv/Lib/site-packages/astroid/mixins.py
deleted file mode 100644
index 497a840..0000000
--- a/venv/Lib/site-packages/astroid/mixins.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright (c) 2010-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""This module contains some mixins for the different nodes.
-"""
-import itertools
-
-from astroid import decorators
-from astroid import exceptions
-
-
-class BlockRangeMixIn:
- """override block range """
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- return self.lineno
-
- def _elsed_block_range(self, lineno, orelse, last=None):
- """handle block line numbers range for try/finally, for, if and while
- statements
- """
- if lineno == self.fromlineno:
- return lineno, lineno
- if orelse:
- if lineno >= orelse[0].fromlineno:
- return lineno, orelse[-1].tolineno
- return lineno, orelse[0].fromlineno - 1
- return lineno, last or self.tolineno
-
-
-class FilterStmtsMixin:
- """Mixin for statement filtering and assignment type"""
-
- def _get_filtered_stmts(self, _, node, _stmts, mystmt):
- """method used in _filter_stmts to get statements and trigger break"""
- if self.statement() is mystmt:
- # original node's statement is the assignment, only keep
- # current node (gen exp, list comp)
- return [node], True
- return _stmts, False
-
- def assign_type(self):
- return self
-
-
-class AssignTypeMixin:
- def assign_type(self):
- return self
-
- def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt):
- """method used in filter_stmts"""
- if self is mystmt:
- return _stmts, True
- if self.statement() is mystmt:
- # original node's statement is the assignment, only keep
- # current node (gen exp, list comp)
- return [node], True
- return _stmts, False
-
-
-class ParentAssignTypeMixin(AssignTypeMixin):
- def assign_type(self):
- return self.parent.assign_type()
-
-
-class ImportFromMixin(FilterStmtsMixin):
- """MixIn for From and Import Nodes"""
-
- def _infer_name(self, frame, name):
- return name
-
- def do_import_module(self, modname=None):
- """return the ast for a module whose name is <modname> imported by <self>
- """
- # handle special case where we are on a package node importing a module
- # using the same name as the package, which may end in an infinite loop
- # on relative imports
- # XXX: no more needed ?
- mymodule = self.root()
- level = getattr(self, "level", None) # Import as no level
- if modname is None:
- modname = self.modname
- # XXX we should investigate deeper if we really want to check
- # importing itself: modname and mymodule.name be relative or absolute
- if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
- # FIXME: we used to raise InferenceError here, but why ?
- return mymodule
-
- return mymodule.import_module(
- modname, level=level, relative_only=level and level >= 1
- )
-
- def real_name(self, asname):
- """get name from 'as' name"""
- for name, _asname in self.names:
- if name == "*":
- return asname
- if not _asname:
- name = name.split(".", 1)[0]
- _asname = name
- if asname == _asname:
- return name
- raise exceptions.AttributeInferenceError(
- "Could not find original name for {attribute} in {target!r}",
- target=self,
- attribute=asname,
- )
-
-
-class MultiLineBlockMixin:
- """Mixin for nodes with multi-line blocks, e.g. For and FunctionDef.
- Note that this does not apply to every node with a `body` field.
- For instance, an If node has a multi-line body, but the body of an
- IfExpr is not multi-line, and hence cannot contain Return nodes,
- Assign nodes, etc.
- """
-
- @decorators.cachedproperty
- def _multi_line_blocks(self):
- return tuple(getattr(self, field) for field in self._multi_line_block_fields)
-
- def _get_return_nodes_skip_functions(self):
- for block in self._multi_line_blocks:
- for child_node in block:
- if child_node.is_function:
- continue
- yield from child_node._get_return_nodes_skip_functions()
-
- def _get_yield_nodes_skip_lambdas(self):
- for block in self._multi_line_blocks:
- for child_node in block:
- if child_node.is_lambda:
- continue
- yield from child_node._get_yield_nodes_skip_lambdas()
-
- @decorators.cached
- def _get_assign_nodes(self):
- children_assign_nodes = (
- child_node._get_assign_nodes()
- for block in self._multi_line_blocks
- for child_node in block
- )
- return list(itertools.chain.from_iterable(children_assign_nodes))
-
-
-class NoChildrenMixin:
- """Mixin for nodes with no children, e.g. Pass."""
-
- def get_children(self):
- yield from ()
diff --git a/venv/Lib/site-packages/astroid/modutils.py b/venv/Lib/site-packages/astroid/modutils.py
deleted file mode 100644
index 0c009b1..0000000
--- a/venv/Lib/site-packages/astroid/modutils.py
+++ /dev/null
@@ -1,698 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2014 Denis Laxalde <denis.laxalde@logilab.fr>
-# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2018 Mario Corchero <mcorcherojim@bloomberg.net>
-# Copyright (c) 2018 Mario Corchero <mariocj89@gmail.com>
-# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Python modules manipulation utility functions.
-
-:type PY_SOURCE_EXTS: tuple(str)
-:var PY_SOURCE_EXTS: list of possible python source file extension
-
-:type STD_LIB_DIRS: set of str
-:var STD_LIB_DIRS: directories where standard modules are located
-
-:type BUILTIN_MODULES: dict
-:var BUILTIN_MODULES: dictionary with builtin module names has key
-"""
-import imp
-import os
-import platform
-import sys
-import itertools
-from distutils.sysconfig import get_python_lib # pylint: disable=import-error
-
-# pylint: disable=import-error, no-name-in-module
-from distutils.errors import DistutilsPlatformError
-
-# distutils is replaced by virtualenv with a module that does
-# weird path manipulations in order to get to the
-# real distutils module.
-
-from .interpreter._import import spec
-from .interpreter._import import util
-
-if sys.platform.startswith("win"):
- PY_SOURCE_EXTS = ("py", "pyw")
- PY_COMPILED_EXTS = ("dll", "pyd")
-else:
- PY_SOURCE_EXTS = ("py",)
- PY_COMPILED_EXTS = ("so",)
-
-
-try:
- # The explicit sys.prefix is to work around a patch in virtualenv that
- # replaces the 'real' sys.prefix (i.e. the location of the binary)
- # with the prefix from which the virtualenv was created. This throws
- # off the detection logic for standard library modules, thus the
- # workaround.
- STD_LIB_DIRS = {
- get_python_lib(standard_lib=True, prefix=sys.prefix),
- # Take care of installations where exec_prefix != prefix.
- get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
- get_python_lib(standard_lib=True),
- }
-# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
-# non-valid path, see https://bugs.pypy.org/issue1164
-except DistutilsPlatformError:
- STD_LIB_DIRS = set()
-
-if os.name == "nt":
- STD_LIB_DIRS.add(os.path.join(sys.prefix, "dlls"))
- try:
- # real_prefix is defined when running inside virtual environments,
- # created with the **virtualenv** library.
- STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "dlls"))
- except AttributeError:
- # sys.base_exec_prefix is always defined, but in a virtual environment
- # created with the stdlib **venv** module, it points to the original
- # installation, if the virtual env is activated.
- try:
- STD_LIB_DIRS.add(os.path.join(sys.base_exec_prefix, "dlls"))
- except AttributeError:
- pass
-
-if platform.python_implementation() == "PyPy":
- _root = os.path.join(sys.prefix, "lib_pypy")
- STD_LIB_DIRS.add(_root)
- try:
- # real_prefix is defined when running inside virtualenv.
- STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "lib_pypy"))
- except AttributeError:
- pass
- del _root
-if os.name == "posix":
- # Need the real prefix is we're under a virtualenv, otherwise
- # the usual one will do.
- try:
- prefix = sys.real_prefix
- except AttributeError:
- prefix = sys.prefix
-
- def _posix_path(path):
- base_python = "python%d.%d" % sys.version_info[:2]
- return os.path.join(prefix, path, base_python)
-
- STD_LIB_DIRS.add(_posix_path("lib"))
- if sys.maxsize > 2 ** 32:
- # This tries to fix a problem with /usr/lib64 builds,
- # where systems are running both 32-bit and 64-bit code
- # on the same machine, which reflects into the places where
- # standard library could be found. More details can be found
- # here http://bugs.python.org/issue1294959.
- # An easy reproducing case would be
- # https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753
- STD_LIB_DIRS.add(_posix_path("lib64"))
-
-EXT_LIB_DIRS = {get_python_lib(), get_python_lib(True)}
-IS_JYTHON = platform.python_implementation() == "Jython"
-BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True)
-
-
-class NoSourceFile(Exception):
- """exception raised when we are not able to get a python
- source file for a precompiled file
- """
-
-
-def _normalize_path(path):
- return os.path.normcase(os.path.abspath(path))
-
-
-def _canonicalize_path(path):
- return os.path.realpath(os.path.expanduser(path))
-
-
-def _path_from_filename(filename, is_jython=IS_JYTHON):
- if not is_jython:
- return filename
- head, has_pyclass, _ = filename.partition("$py.class")
- if has_pyclass:
- return head + ".py"
- return filename
-
-
-def _handle_blacklist(blacklist, dirnames, filenames):
- """remove files/directories in the black list
-
- dirnames/filenames are usually from os.walk
- """
- for norecurs in blacklist:
- if norecurs in dirnames:
- dirnames.remove(norecurs)
- elif norecurs in filenames:
- filenames.remove(norecurs)
-
-
-_NORM_PATH_CACHE = {}
-
-
-def _cache_normalize_path(path):
- """abspath with caching"""
- # _module_file calls abspath on every path in sys.path every time it's
- # called; on a larger codebase this easily adds up to half a second just
- # assembling path components. This cache alleviates that.
- try:
- return _NORM_PATH_CACHE[path]
- except KeyError:
- if not path: # don't cache result for ''
- return _normalize_path(path)
- result = _NORM_PATH_CACHE[path] = _normalize_path(path)
- return result
-
-
-def load_module_from_name(dotted_name, path=None, use_sys=True):
- """Load a Python module from its name.
-
- :type dotted_name: str
- :param dotted_name: python name of a module or package
-
- :type path: list or None
- :param path:
- optional list of path where the module or package should be
- searched (use sys.path if nothing or None is given)
-
- :type use_sys: bool
- :param use_sys:
- boolean indicating whether the sys.modules dictionary should be
- used or not
-
-
- :raise ImportError: if the module or package is not found
-
- :rtype: module
- :return: the loaded module
- """
- return load_module_from_modpath(dotted_name.split("."), path, use_sys)
-
-
-def load_module_from_modpath(parts, path=None, use_sys=1):
- """Load a python module from its split name.
-
- :type parts: list(str) or tuple(str)
- :param parts:
- python name of a module or package split on '.'
-
- :type path: list or None
- :param path:
- optional list of path where the module or package should be
- searched (use sys.path if nothing or None is given)
-
- :type use_sys: bool
- :param use_sys:
- boolean indicating whether the sys.modules dictionary should be used or not
-
- :raise ImportError: if the module or package is not found
-
- :rtype: module
- :return: the loaded module
- """
- if use_sys:
- try:
- return sys.modules[".".join(parts)]
- except KeyError:
- pass
- modpath = []
- prevmodule = None
- for part in parts:
- modpath.append(part)
- curname = ".".join(modpath)
- module = None
- if len(modpath) != len(parts):
- # even with use_sys=False, should try to get outer packages from sys.modules
- module = sys.modules.get(curname)
- elif use_sys:
- # because it may have been indirectly loaded through a parent
- module = sys.modules.get(curname)
- if module is None:
- mp_file, mp_filename, mp_desc = imp.find_module(part, path)
- module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
- # mp_file still needs to be closed.
- if mp_file:
- mp_file.close()
- if prevmodule:
- setattr(prevmodule, part, module)
- _file = getattr(module, "__file__", "")
- prevmodule = module
- if not _file and util.is_namespace(curname):
- continue
- if not _file and len(modpath) != len(parts):
- raise ImportError("no module in %s" % ".".join(parts[len(modpath) :]))
- path = [os.path.dirname(_file)]
- return module
-
-
-def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None):
- """Load a Python module from it's path.
-
- :type filepath: str
- :param filepath: path to the python module or package
-
- :type path: list or None
- :param path:
- optional list of path where the module or package should be
- searched (use sys.path if nothing or None is given)
-
- :type use_sys: bool
- :param use_sys:
- boolean indicating whether the sys.modules dictionary should be
- used or not
-
-
- :raise ImportError: if the module or package is not found
-
- :rtype: module
- :return: the loaded module
- """
- modpath = modpath_from_file(filepath, extrapath)
- return load_module_from_modpath(modpath, path, use_sys)
-
-
-def check_modpath_has_init(path, mod_path):
- """check there are some __init__.py all along the way"""
- modpath = []
- for part in mod_path:
- modpath.append(part)
- path = os.path.join(path, part)
- if not _has_init(path):
- old_namespace = util.is_namespace(".".join(modpath))
- if not old_namespace:
- return False
- return True
-
-
-def _get_relative_base_path(filename, path_to_check):
- """Extracts the relative mod path of the file to import from
-
- Check if a file is within the passed in path and if so, returns the
- relative mod path from the one passed in.
-
- If the filename is no in path_to_check, returns None
-
- Note this function will look for both abs and realpath of the file,
- this allows to find the relative base path even if the file is a
- symlink of a file in the passed in path
-
- Examples:
- _get_relative_base_path("/a/b/c/d.py", "/a/b") -> ["c","d"]
- _get_relative_base_path("/a/b/c/d.py", "/dev") -> None
- """
- importable_path = None
- path_to_check = os.path.normcase(path_to_check)
- abs_filename = os.path.abspath(filename)
- if os.path.normcase(abs_filename).startswith(path_to_check):
- importable_path = abs_filename
-
- real_filename = os.path.realpath(filename)
- if os.path.normcase(real_filename).startswith(path_to_check):
- importable_path = real_filename
-
- if importable_path:
- base_path = os.path.splitext(importable_path)[0]
- relative_base_path = base_path[len(path_to_check) :]
- return [pkg for pkg in relative_base_path.split(os.sep) if pkg]
-
- return None
-
-
-def modpath_from_file_with_callback(filename, extrapath=None, is_package_cb=None):
- filename = os.path.expanduser(_path_from_filename(filename))
-
- if extrapath is not None:
- for path_ in itertools.chain(map(_canonicalize_path, extrapath), extrapath):
- path = os.path.abspath(path_)
- if not path:
- continue
- submodpath = _get_relative_base_path(filename, path)
- if not submodpath:
- continue
- if is_package_cb(path, submodpath[:-1]):
- return extrapath[path_].split(".") + submodpath
-
- for path in itertools.chain(map(_canonicalize_path, sys.path), sys.path):
- path = _cache_normalize_path(path)
- if not path:
- continue
- modpath = _get_relative_base_path(filename, path)
- if not modpath:
- continue
- if is_package_cb(path, modpath[:-1]):
- return modpath
-
- raise ImportError(
- "Unable to find module for %s in %s" % (filename, ", \n".join(sys.path))
- )
-
-
-def modpath_from_file(filename, extrapath=None):
- """given a file path return the corresponding split module's name
- (i.e name of a module or package split on '.')
-
- :type filename: str
- :param filename: file's path for which we want the module's name
-
- :type extrapath: dict
- :param extrapath:
- optional extra search path, with path as key and package name for the path
- as value. This is usually useful to handle package split in multiple
- directories using __path__ trick.
-
-
- :raise ImportError:
- if the corresponding module's name has not been found
-
- :rtype: list(str)
- :return: the corresponding split module's name
- """
- return modpath_from_file_with_callback(filename, extrapath, check_modpath_has_init)
-
-
-def file_from_modpath(modpath, path=None, context_file=None):
- return file_info_from_modpath(modpath, path, context_file).location
-
-
-def file_info_from_modpath(modpath, path=None, context_file=None):
- """given a mod path (i.e. split module / package name), return the
- corresponding file, giving priority to source file over precompiled
- file if it exists
-
- :type modpath: list or tuple
- :param modpath:
- split module's name (i.e name of a module or package split
- on '.')
- (this means explicit relative imports that start with dots have
- empty strings in this list!)
-
- :type path: list or None
- :param path:
- optional list of path where the module or package should be
- searched (use sys.path if nothing or None is given)
-
- :type context_file: str or None
- :param context_file:
- context file to consider, necessary if the identifier has been
- introduced using a relative import unresolvable in the actual
- context (i.e. modutils)
-
- :raise ImportError: if there is no such module in the directory
-
- :rtype: (str or None, import type)
- :return:
- the path to the module's file or None if it's an integrated
- builtin module such as 'sys'
- """
- if context_file is not None:
- context = os.path.dirname(context_file)
- else:
- context = context_file
- if modpath[0] == "xml":
- # handle _xmlplus
- try:
- return _spec_from_modpath(["_xmlplus"] + modpath[1:], path, context)
- except ImportError:
- return _spec_from_modpath(modpath, path, context)
- elif modpath == ["os", "path"]:
- # FIXME: currently ignoring search_path...
- return spec.ModuleSpec(
- name="os.path", location=os.path.__file__, module_type=imp.PY_SOURCE
- )
- return _spec_from_modpath(modpath, path, context)
-
-
-def get_module_part(dotted_name, context_file=None):
- """given a dotted name return the module part of the name :
-
- >>> get_module_part('astroid.as_string.dump')
- 'astroid.as_string'
-
- :type dotted_name: str
- :param dotted_name: full name of the identifier we are interested in
-
- :type context_file: str or None
- :param context_file:
- context file to consider, necessary if the identifier has been
- introduced using a relative import unresolvable in the actual
- context (i.e. modutils)
-
-
- :raise ImportError: if there is no such module in the directory
-
- :rtype: str or None
- :return:
- the module part of the name or None if we have not been able at
- all to import the given name
-
- XXX: deprecated, since it doesn't handle package precedence over module
- (see #10066)
- """
- # os.path trick
- if dotted_name.startswith("os.path"):
- return "os.path"
- parts = dotted_name.split(".")
- if context_file is not None:
- # first check for builtin module which won't be considered latter
- # in that case (path != None)
- if parts[0] in BUILTIN_MODULES:
- if len(parts) > 2:
- raise ImportError(dotted_name)
- return parts[0]
- # don't use += or insert, we want a new list to be created !
- path = None
- starti = 0
- if parts[0] == "":
- assert (
- context_file is not None
- ), "explicit relative import, but no context_file?"
- path = [] # prevent resolving the import non-relatively
- starti = 1
- while parts[starti] == "": # for all further dots: change context
- starti += 1
- context_file = os.path.dirname(context_file)
- for i in range(starti, len(parts)):
- try:
- file_from_modpath(
- parts[starti : i + 1], path=path, context_file=context_file
- )
- except ImportError:
- if i < max(1, len(parts) - 2):
- raise
- return ".".join(parts[:i])
- return dotted_name
-
-
-def get_module_files(src_directory, blacklist, list_all=False):
- """given a package directory return a list of all available python
- module's files in the package and its subpackages
-
- :type src_directory: str
- :param src_directory:
- path of the directory corresponding to the package
-
- :type blacklist: list or tuple
- :param blacklist: iterable
- list of files or directories to ignore.
-
- :type list_all: bool
- :param list_all:
- get files from all paths, including ones without __init__.py
-
- :rtype: list
- :return:
- the list of all available python module's files in the package and
- its subpackages
- """
- files = []
- for directory, dirnames, filenames in os.walk(src_directory):
- if directory in blacklist:
- continue
- _handle_blacklist(blacklist, dirnames, filenames)
- # check for __init__.py
- if not list_all and "__init__.py" not in filenames:
- dirnames[:] = ()
- continue
- for filename in filenames:
- if _is_python_file(filename):
- src = os.path.join(directory, filename)
- files.append(src)
- return files
-
-
-def get_source_file(filename, include_no_ext=False):
- """given a python module's file name return the matching source file
- name (the filename will be returned identically if it's already an
- absolute path to a python source file...)
-
- :type filename: str
- :param filename: python module's file name
-
-
- :raise NoSourceFile: if no source file exists on the file system
-
- :rtype: str
- :return: the absolute path of the source file if it exists
- """
- filename = os.path.abspath(_path_from_filename(filename))
- base, orig_ext = os.path.splitext(filename)
- for ext in PY_SOURCE_EXTS:
- source_path = "%s.%s" % (base, ext)
- if os.path.exists(source_path):
- return source_path
- if include_no_ext and not orig_ext and os.path.exists(base):
- return base
- raise NoSourceFile(filename)
-
-
-def is_python_source(filename):
- """
- rtype: bool
- return: True if the filename is a python source file
- """
- return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
-
-
-def is_standard_module(modname, std_path=None):
- """try to guess if a module is a standard python module (by default,
- see `std_path` parameter's description)
-
- :type modname: str
- :param modname: name of the module we are interested in
-
- :type std_path: list(str) or tuple(str)
- :param std_path: list of path considered has standard
-
-
- :rtype: bool
- :return:
- true if the module:
- - is located on the path listed in one of the directory in `std_path`
- - is a built-in module
- """
- modname = modname.split(".")[0]
- try:
- filename = file_from_modpath([modname])
- except ImportError:
- # import failed, i'm probably not so wrong by supposing it's
- # not standard...
- return False
- # modules which are not living in a file are considered standard
- # (sys and __builtin__ for instance)
- if filename is None:
- # we assume there are no namespaces in stdlib
- return not util.is_namespace(modname)
- filename = _normalize_path(filename)
- for path in EXT_LIB_DIRS:
- if filename.startswith(_cache_normalize_path(path)):
- return False
- if std_path is None:
- std_path = STD_LIB_DIRS
- for path in std_path:
- if filename.startswith(_cache_normalize_path(path)):
- return True
- return False
-
-
-def is_relative(modname, from_file):
- """return true if the given module name is relative to the given
- file name
-
- :type modname: str
- :param modname: name of the module we are interested in
-
- :type from_file: str
- :param from_file:
- path of the module from which modname has been imported
-
- :rtype: bool
- :return:
- true if the module has been imported relatively to `from_file`
- """
- if not os.path.isdir(from_file):
- from_file = os.path.dirname(from_file)
- if from_file in sys.path:
- return False
- try:
- stream, _, _ = imp.find_module(modname.split(".")[0], [from_file])
-
- # Close the stream to avoid ResourceWarnings.
- if stream:
- stream.close()
- return True
- except ImportError:
- return False
-
-
-# internal only functions #####################################################
-
-
-def _spec_from_modpath(modpath, path=None, context=None):
- """given a mod path (i.e. split module / package name), return the
- corresponding spec
-
- this function is used internally, see `file_from_modpath`'s
- documentation for more information
- """
- assert modpath
- location = None
- if context is not None:
- try:
- found_spec = spec.find_spec(modpath, [context])
- location = found_spec.location
- except ImportError:
- found_spec = spec.find_spec(modpath, path)
- location = found_spec.location
- else:
- found_spec = spec.find_spec(modpath, path)
- if found_spec.type == spec.ModuleType.PY_COMPILED:
- try:
- location = get_source_file(found_spec.location)
- return found_spec._replace(
- location=location, type=spec.ModuleType.PY_SOURCE
- )
- except NoSourceFile:
- return found_spec._replace(location=location)
- elif found_spec.type == spec.ModuleType.C_BUILTIN:
- # integrated builtin module
- return found_spec._replace(location=None)
- elif found_spec.type == spec.ModuleType.PKG_DIRECTORY:
- location = _has_init(found_spec.location)
- return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE)
- return found_spec
-
-
-def _is_python_file(filename):
- """return true if the given filename should be considered as a python file
-
- .pyc and .pyo are ignored
- """
- return filename.endswith((".py", ".so", ".pyd", ".pyw"))
-
-
-def _has_init(directory):
- """if the given directory has a valid __init__ file, return its path,
- else return None
- """
- mod_or_pack = os.path.join(directory, "__init__")
- for ext in PY_SOURCE_EXTS + ("pyc", "pyo"):
- if os.path.exists(mod_or_pack + "." + ext):
- return mod_or_pack + "." + ext
- return None
-
-
-def is_namespace(specobj):
- return specobj.type == spec.ModuleType.PY_NAMESPACE
-
-
-def is_directory(specobj):
- return specobj.type == spec.ModuleType.PKG_DIRECTORY
diff --git a/venv/Lib/site-packages/astroid/node_classes.py b/venv/Lib/site-packages/astroid/node_classes.py
deleted file mode 100644
index 994c96b..0000000
--- a/venv/Lib/site-packages/astroid/node_classes.py
+++ /dev/null
@@ -1,4775 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
-# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2016 Dave Baum <dbaum@google.com>
-# Copyright (c) 2017-2018 Ashley Whetter <ashley@awhetter.co.uk>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2017 rr- <rr-@sakuya.pl>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2018 brendanator <brendan.maginnis@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-# pylint: disable=too-many-lines; https://github.com/PyCQA/astroid/issues/465
-
-"""Module for some node classes. More nodes in scoped_nodes.py
-"""
-
-import abc
-import builtins as builtins_mod
-import itertools
-import pprint
-import sys
-from functools import lru_cache, singledispatch as _singledispatch
-
-from astroid import as_string
-from astroid import bases
-from astroid import context as contextmod
-from astroid import decorators
-from astroid import exceptions
-from astroid import manager
-from astroid import mixins
-from astroid import util
-
-
-BUILTINS = builtins_mod.__name__
-MANAGER = manager.AstroidManager()
-PY38 = sys.version_info[:2] >= (3, 8)
-
-
-def _is_const(value):
- return isinstance(value, tuple(CONST_CLS))
-
-
-@decorators.raise_if_nothing_inferred
-def unpack_infer(stmt, context=None):
- """recursively generate nodes inferred by the given statement.
- If the inferred value is a list or a tuple, recurse on the elements
- """
- if isinstance(stmt, (List, Tuple)):
- for elt in stmt.elts:
- if elt is util.Uninferable:
- yield elt
- continue
- yield from unpack_infer(elt, context)
- return dict(node=stmt, context=context)
- # if inferred is a final node, return it and stop
- inferred = next(stmt.infer(context))
- if inferred is stmt:
- yield inferred
- return dict(node=stmt, context=context)
- # else, infer recursively, except Uninferable object that should be returned as is
- for inferred in stmt.infer(context):
- if inferred is util.Uninferable:
- yield inferred
- else:
- yield from unpack_infer(inferred, context)
-
- return dict(node=stmt, context=context)
-
-
-def are_exclusive(
- stmt1, stmt2, exceptions=None
-): # pylint: disable=redefined-outer-name
- """return true if the two given statements are mutually exclusive
-
- `exceptions` may be a list of exception names. If specified, discard If
- branches and check one of the statement is in an exception handler catching
- one of the given exceptions.
-
- algorithm :
- 1) index stmt1's parents
- 2) climb among stmt2's parents until we find a common parent
- 3) if the common parent is a If or TryExcept statement, look if nodes are
- in exclusive branches
- """
- # index stmt1's parents
- stmt1_parents = {}
- children = {}
- node = stmt1.parent
- previous = stmt1
- while node:
- stmt1_parents[node] = 1
- children[node] = previous
- previous = node
- node = node.parent
- # climb among stmt2's parents until we find a common parent
- node = stmt2.parent
- previous = stmt2
- while node:
- if node in stmt1_parents:
- # if the common parent is a If or TryExcept statement, look if
- # nodes are in exclusive branches
- if isinstance(node, If) and exceptions is None:
- if (
- node.locate_child(previous)[1]
- is not node.locate_child(children[node])[1]
- ):
- return True
- elif isinstance(node, TryExcept):
- c2attr, c2node = node.locate_child(previous)
- c1attr, c1node = node.locate_child(children[node])
- if c1node is not c2node:
- first_in_body_caught_by_handlers = (
- c2attr == "handlers"
- and c1attr == "body"
- and previous.catch(exceptions)
- )
- second_in_body_caught_by_handlers = (
- c2attr == "body"
- and c1attr == "handlers"
- and children[node].catch(exceptions)
- )
- first_in_else_other_in_handlers = (
- c2attr == "handlers" and c1attr == "orelse"
- )
- second_in_else_other_in_handlers = (
- c2attr == "orelse" and c1attr == "handlers"
- )
- if any(
- (
- first_in_body_caught_by_handlers,
- second_in_body_caught_by_handlers,
- first_in_else_other_in_handlers,
- second_in_else_other_in_handlers,
- )
- ):
- return True
- elif c2attr == "handlers" and c1attr == "handlers":
- return previous is not children[node]
- return False
- previous = node
- node = node.parent
- return False
-
-
-# getitem() helpers.
-
-_SLICE_SENTINEL = object()
-
-
-def _slice_value(index, context=None):
- """Get the value of the given slice index."""
-
- if isinstance(index, Const):
- if isinstance(index.value, (int, type(None))):
- return index.value
- elif index is None:
- return None
- else:
- # Try to infer what the index actually is.
- # Since we can't return all the possible values,
- # we'll stop at the first possible value.
- try:
- inferred = next(index.infer(context=context))
- except exceptions.InferenceError:
- pass
- else:
- if isinstance(inferred, Const):
- if isinstance(inferred.value, (int, type(None))):
- return inferred.value
-
- # Use a sentinel, because None can be a valid
- # value that this function can return,
- # as it is the case for unspecified bounds.
- return _SLICE_SENTINEL
-
-
-def _infer_slice(node, context=None):
- lower = _slice_value(node.lower, context)
- upper = _slice_value(node.upper, context)
- step = _slice_value(node.step, context)
- if all(elem is not _SLICE_SENTINEL for elem in (lower, upper, step)):
- return slice(lower, upper, step)
-
- raise exceptions.AstroidTypeError(
- message="Could not infer slice used in subscript",
- node=node,
- index=node.parent,
- context=context,
- )
-
-
-def _container_getitem(instance, elts, index, context=None):
- """Get a slice or an item, using the given *index*, for the given sequence."""
- try:
- if isinstance(index, Slice):
- index_slice = _infer_slice(index, context=context)
- new_cls = instance.__class__()
- new_cls.elts = elts[index_slice]
- new_cls.parent = instance.parent
- return new_cls
- if isinstance(index, Const):
- return elts[index.value]
- except IndexError as exc:
- raise exceptions.AstroidIndexError(
- message="Index {index!s} out of range",
- node=instance,
- index=index,
- context=context,
- ) from exc
- except TypeError as exc:
- raise exceptions.AstroidTypeError(
- message="Type error {error!r}", node=instance, index=index, context=context
- ) from exc
-
- raise exceptions.AstroidTypeError("Could not use %s as subscript index" % index)
-
-
-OP_PRECEDENCE = {
- op: precedence
- for precedence, ops in enumerate(
- [
- ["Lambda"], # lambda x: x + 1
- ["IfExp"], # 1 if True else 2
- ["or"],
- ["and"],
- ["not"],
- ["Compare"], # in, not in, is, is not, <, <=, >, >=, !=, ==
- ["|"],
- ["^"],
- ["&"],
- ["<<", ">>"],
- ["+", "-"],
- ["*", "@", "/", "//", "%"],
- ["UnaryOp"], # +, -, ~
- ["**"],
- ["Await"],
- ]
- )
- for op in ops
-}
-
-
-class NodeNG:
- """ A node of the new Abstract Syntax Tree (AST).
-
- This is the base class for all Astroid node classes.
- """
-
- is_statement = False
- """Whether this node indicates a statement.
-
- :type: bool
- """
- optional_assign = False # True for For (and for Comprehension if py <3.0)
- """Whether this node optionally assigns a variable.
-
- This is for loop assignments because loop won't necessarily perform an
- assignment if the loop has no iterations.
- This is also the case from comprehensions in Python 2.
-
- :type: bool
- """
- is_function = False # True for FunctionDef nodes
- """Whether this node indicates a function.
-
- :type: bool
- """
- is_lambda = False
- # Attributes below are set by the builder module or by raw factories
- lineno = None
- """The line that this node appears on in the source code.
-
- :type: int or None
- """
- col_offset = None
- """The column that this node appears on in the source code.
-
- :type: int or None
- """
- parent = None
- """The parent node in the syntax tree.
-
- :type: NodeNG or None
- """
- _astroid_fields = ()
- """Node attributes that contain child nodes.
-
- This is redefined in most concrete classes.
-
- :type: tuple(str)
- """
- _other_fields = ()
- """Node attributes that do not contain child nodes.
-
- :type: tuple(str)
- """
- _other_other_fields = ()
- """Attributes that contain AST-dependent fields.
-
- :type: tuple(str)
- """
- # instance specific inference function infer(node, context)
- _explicit_inference = None
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- """
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.lineno = lineno
- self.col_offset = col_offset
- self.parent = parent
-
- def infer(self, context=None, **kwargs):
- """Get a generator of the inferred values.
-
- This is the main entry point to the inference system.
-
- .. seealso:: :ref:`inference`
-
- If the instance has some explicit inference function set, it will be
- called instead of the default interface.
-
- :returns: The inferred values.
- :rtype: iterable
- """
- if context is not None:
- context = context.extra_context.get(self, context)
- if self._explicit_inference is not None:
- # explicit_inference is not bound, give it self explicitly
- try:
- # pylint: disable=not-callable
- return self._explicit_inference(self, context, **kwargs)
- except exceptions.UseInferenceDefault:
- pass
-
- if not context:
- return self._infer(context, **kwargs)
-
- key = (self, context.lookupname, context.callcontext, context.boundnode)
- if key in context.inferred:
- return iter(context.inferred[key])
-
- gen = context.cache_generator(key, self._infer(context, **kwargs))
- return util.limit_inference(gen, MANAGER.max_inferable_values)
-
- def _repr_name(self):
- """Get a name for nice representation.
-
- This is either :attr:`name`, :attr:`attrname`, or the empty string.
-
- :returns: The nice name.
- :rtype: str
- """
- names = {"name", "attrname"}
- if all(name not in self._astroid_fields for name in names):
- return getattr(self, "name", getattr(self, "attrname", ""))
- return ""
-
- def __str__(self):
- rname = self._repr_name()
- cname = type(self).__name__
- if rname:
- string = "%(cname)s.%(rname)s(%(fields)s)"
- alignment = len(cname) + len(rname) + 2
- else:
- string = "%(cname)s(%(fields)s)"
- alignment = len(cname) + 1
- result = []
- for field in self._other_fields + self._astroid_fields:
- value = getattr(self, field)
- width = 80 - len(field) - alignment
- lines = pprint.pformat(value, indent=2, width=width).splitlines(True)
-
- inner = [lines[0]]
- for line in lines[1:]:
- inner.append(" " * alignment + line)
- result.append("%s=%s" % (field, "".join(inner)))
-
- return string % {
- "cname": cname,
- "rname": rname,
- "fields": (",\n" + " " * alignment).join(result),
- }
-
- def __repr__(self):
- rname = self._repr_name()
- if rname:
- string = "<%(cname)s.%(rname)s l.%(lineno)s at 0x%(id)x>"
- else:
- string = "<%(cname)s l.%(lineno)s at 0x%(id)x>"
- return string % {
- "cname": type(self).__name__,
- "rname": rname,
- "lineno": self.fromlineno,
- "id": id(self),
- }
-
- def accept(self, visitor):
- """Visit this node using the given visitor."""
- func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
- return func(self)
-
- def get_children(self):
- """Get the child nodes below this node.
-
- :returns: The children.
- :rtype: iterable(NodeNG)
- """
- for field in self._astroid_fields:
- attr = getattr(self, field)
- if attr is None:
- continue
- if isinstance(attr, (list, tuple)):
- yield from attr
- else:
- yield attr
-
- def last_child(self):
- """An optimized version of list(get_children())[-1]
-
- :returns: The last child, or None if no children exist.
- :rtype: NodeNG or None
- """
- for field in self._astroid_fields[::-1]:
- attr = getattr(self, field)
- if not attr: # None or empty listy / tuple
- continue
- if isinstance(attr, (list, tuple)):
- return attr[-1]
-
- return attr
- return None
-
- def parent_of(self, node):
- """Check if this node is the parent of the given node.
-
- :param node: The node to check if it is the child.
- :type node: NodeNG
-
- :returns: True if this node is the parent of the given node,
- False otherwise.
- :rtype: bool
- """
- parent = node.parent
- while parent is not None:
- if self is parent:
- return True
- parent = parent.parent
- return False
-
- def statement(self):
- """The first parent node, including self, marked as statement node.
-
- :returns: The first parent statement.
- :rtype: NodeNG
- """
- if self.is_statement:
- return self
- return self.parent.statement()
-
- def frame(self):
- """The first parent frame node.
-
- A frame node is a :class:`Module`, :class:`FunctionDef`,
- or :class:`ClassDef`.
-
- :returns: The first parent frame node.
- :rtype: Module or FunctionDef or ClassDef
- """
- return self.parent.frame()
-
- def scope(self):
- """The first parent node defining a new scope.
-
- :returns: The first parent scope node.
- :rtype: Module or FunctionDef or ClassDef or Lambda or GenExpr
- """
- if self.parent:
- return self.parent.scope()
- return None
-
- def root(self):
- """Return the root node of the syntax tree.
-
- :returns: The root node.
- :rtype: Module
- """
- if self.parent:
- return self.parent.root()
- return self
-
- def child_sequence(self, child):
- """Search for the sequence that contains this child.
-
- :param child: The child node to search sequences for.
- :type child: NodeNG
-
- :returns: The sequence containing the given child node.
- :rtype: iterable(NodeNG)
-
- :raises AstroidError: If no sequence could be found that contains
- the given child.
- """
- for field in self._astroid_fields:
- node_or_sequence = getattr(self, field)
- if node_or_sequence is child:
- return [node_or_sequence]
- # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
- if (
- isinstance(node_or_sequence, (tuple, list))
- and child in node_or_sequence
- ):
- return node_or_sequence
-
- msg = "Could not find %s in %s's children"
- raise exceptions.AstroidError(msg % (repr(child), repr(self)))
-
- def locate_child(self, child):
- """Find the field of this node that contains the given child.
-
- :param child: The child node to search fields for.
- :type child: NodeNG
-
- :returns: A tuple of the name of the field that contains the child,
- and the sequence or node that contains the child node.
- :rtype: tuple(str, iterable(NodeNG) or NodeNG)
-
- :raises AstroidError: If no field could be found that contains
- the given child.
- """
- for field in self._astroid_fields:
- node_or_sequence = getattr(self, field)
- # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
- if child is node_or_sequence:
- return field, child
- if (
- isinstance(node_or_sequence, (tuple, list))
- and child in node_or_sequence
- ):
- return field, node_or_sequence
- msg = "Could not find %s in %s's children"
- raise exceptions.AstroidError(msg % (repr(child), repr(self)))
-
- # FIXME : should we merge child_sequence and locate_child ? locate_child
- # is only used in are_exclusive, child_sequence one time in pylint.
-
- def next_sibling(self):
- """The next sibling statement node.
-
- :returns: The next sibling statement node.
- :rtype: NodeNG or None
- """
- return self.parent.next_sibling()
-
- def previous_sibling(self):
- """The previous sibling statement.
-
- :returns: The previous sibling statement node.
- :rtype: NodeNG or None
- """
- return self.parent.previous_sibling()
-
- # these are lazy because they're relatively expensive to compute for every
- # single node, and they rarely get looked at
-
- @decorators.cachedproperty
- def fromlineno(self):
- """The first line that this node appears on in the source code.
-
- :type: int or None
- """
- if self.lineno is None:
- return self._fixed_source_line()
-
- return self.lineno
-
- @decorators.cachedproperty
- def tolineno(self):
- """The last line that this node appears on in the source code.
-
- :type: int or None
- """
- if not self._astroid_fields:
- # can't have children
- lastchild = None
- else:
- lastchild = self.last_child()
- if lastchild is None:
- return self.fromlineno
-
- return lastchild.tolineno
-
- def _fixed_source_line(self):
- """Attempt to find the line that this node appears on.
-
- We need this method since not all nodes have :attr:`lineno` set.
-
- :returns: The line number of this node,
- or None if this could not be determined.
- :rtype: int or None
- """
- line = self.lineno
- _node = self
- try:
- while line is None:
- _node = next(_node.get_children())
- line = _node.lineno
- except StopIteration:
- _node = self.parent
- while _node and line is None:
- line = _node.lineno
- _node = _node.parent
- return line
-
- def block_range(self, lineno):
- """Get a range from the given line number to where this node ends.
-
- :param lineno: The line number to start the range at.
- :type lineno: int
-
- :returns: The range of line numbers that this node belongs to,
- starting at the given line number.
- :rtype: tuple(int, int or None)
- """
- return lineno, self.tolineno
-
- def set_local(self, name, stmt):
- """Define that the given name is declared in the given statement node.
-
- This definition is stored on the parent scope node.
-
- .. seealso:: :meth:`scope`
-
- :param name: The name that is being defined.
- :type name: str
-
- :param stmt: The statement that defines the given name.
- :type stmt: NodeNG
- """
- self.parent.set_local(name, stmt)
-
- def nodes_of_class(self, klass, skip_klass=None):
- """Get the nodes (including this one or below) of the given types.
-
- :param klass: The types of node to search for.
- :type klass: builtins.type or tuple(builtins.type)
-
- :param skip_klass: The types of node to ignore. This is useful to ignore
- subclasses of :attr:`klass`.
- :type skip_klass: builtins.type or tuple(builtins.type)
-
- :returns: The node of the given types.
- :rtype: iterable(NodeNG)
- """
- if isinstance(self, klass):
- yield self
-
- if skip_klass is None:
- for child_node in self.get_children():
- yield from child_node.nodes_of_class(klass, skip_klass)
-
- return
-
- for child_node in self.get_children():
- if isinstance(child_node, skip_klass):
- continue
- yield from child_node.nodes_of_class(klass, skip_klass)
-
- @decorators.cached
- def _get_assign_nodes(self):
- return []
-
- def _get_name_nodes(self):
- for child_node in self.get_children():
- yield from child_node._get_name_nodes()
-
- def _get_return_nodes_skip_functions(self):
- yield from ()
-
- def _get_yield_nodes_skip_lambdas(self):
- yield from ()
-
- def _infer_name(self, frame, name):
- # overridden for ImportFrom, Import, Global, TryExcept and Arguments
- pass
-
- def _infer(self, context=None):
- """we don't know how to resolve a statement by default"""
- # this method is overridden by most concrete classes
- raise exceptions.InferenceError(
- "No inference function for {node!r}.", node=self, context=context
- )
-
- def inferred(self):
- """Get a list of the inferred values.
-
- .. seealso:: :ref:`inference`
-
- :returns: The inferred values.
- :rtype: list
- """
- return list(self.infer())
-
- def instantiate_class(self):
- """Instantiate an instance of the defined class.
-
- .. note::
-
- On anything other than a :class:`ClassDef` this will return self.
-
- :returns: An instance of the defined class.
- :rtype: object
- """
- return self
-
- def has_base(self, node):
- """Check if this node inherits from the given type.
-
- :param node: The node defining the base to look for.
- Usually this is a :class:`Name` node.
- :type node: NodeNG
- """
- return False
-
- def callable(self):
- """Whether this node defines something that is callable.
-
- :returns: True if this defines something that is callable,
- False otherwise.
- :rtype: bool
- """
- return False
-
- def eq(self, value):
- return False
-
- def as_string(self):
- """Get the source code that this node represents.
-
- :returns: The source code.
- :rtype: str
- """
- return as_string.to_code(self)
-
- def repr_tree(
- self,
- ids=False,
- include_linenos=False,
- ast_state=False,
- indent=" ",
- max_depth=0,
- max_width=80,
- ):
- """Get a string representation of the AST from this node.
-
- :param ids: If true, includes the ids with the node type names.
- :type ids: bool
-
- :param include_linenos: If true, includes the line numbers and
- column offsets.
- :type include_linenos: bool
-
- :param ast_state: If true, includes information derived from
- the whole AST like local and global variables.
- :type ast_state: bool
-
- :param indent: A string to use to indent the output string.
- :type indent: str
-
- :param max_depth: If set to a positive integer, won't return
- nodes deeper than max_depth in the string.
- :type max_depth: int
-
- :param max_width: Attempt to format the output string to stay
- within this number of characters, but can exceed it under some
- circumstances. Only positive integer values are valid, the default is 80.
- :type max_width: int
-
- :returns: The string representation of the AST.
- :rtype: str
- """
- # pylint: disable=too-many-statements
- @_singledispatch
- def _repr_tree(node, result, done, cur_indent="", depth=1):
- """Outputs a representation of a non-tuple/list, non-node that's
- contained within an AST, including strings.
- """
- lines = pprint.pformat(
- node, width=max(max_width - len(cur_indent), 1)
- ).splitlines(True)
- result.append(lines[0])
- result.extend([cur_indent + line for line in lines[1:]])
- return len(lines) != 1
-
- # pylint: disable=unused-variable; doesn't understand singledispatch
- @_repr_tree.register(tuple)
- @_repr_tree.register(list)
- def _repr_seq(node, result, done, cur_indent="", depth=1):
- """Outputs a representation of a sequence that's contained within an AST."""
- cur_indent += indent
- result.append("[")
- if not node:
- broken = False
- elif len(node) == 1:
- broken = _repr_tree(node[0], result, done, cur_indent, depth)
- elif len(node) == 2:
- broken = _repr_tree(node[0], result, done, cur_indent, depth)
- if not broken:
- result.append(", ")
- else:
- result.append(",\n")
- result.append(cur_indent)
- broken = _repr_tree(node[1], result, done, cur_indent, depth) or broken
- else:
- result.append("\n")
- result.append(cur_indent)
- for child in node[:-1]:
- _repr_tree(child, result, done, cur_indent, depth)
- result.append(",\n")
- result.append(cur_indent)
- _repr_tree(node[-1], result, done, cur_indent, depth)
- broken = True
- result.append("]")
- return broken
-
- # pylint: disable=unused-variable; doesn't understand singledispatch
- @_repr_tree.register(NodeNG)
- def _repr_node(node, result, done, cur_indent="", depth=1):
- """Outputs a strings representation of an astroid node."""
- if node in done:
- result.append(
- indent
- + "<Recursion on %s with id=%s" % (type(node).__name__, id(node))
- )
- return False
- done.add(node)
-
- if max_depth and depth > max_depth:
- result.append("...")
- return False
- depth += 1
- cur_indent += indent
- if ids:
- result.append("%s<0x%x>(\n" % (type(node).__name__, id(node)))
- else:
- result.append("%s(" % type(node).__name__)
- fields = []
- if include_linenos:
- fields.extend(("lineno", "col_offset"))
- fields.extend(node._other_fields)
- fields.extend(node._astroid_fields)
- if ast_state:
- fields.extend(node._other_other_fields)
- if not fields:
- broken = False
- elif len(fields) == 1:
- result.append("%s=" % fields[0])
- broken = _repr_tree(
- getattr(node, fields[0]), result, done, cur_indent, depth
- )
- else:
- result.append("\n")
- result.append(cur_indent)
- for field in fields[:-1]:
- result.append("%s=" % field)
- _repr_tree(getattr(node, field), result, done, cur_indent, depth)
- result.append(",\n")
- result.append(cur_indent)
- result.append("%s=" % fields[-1])
- _repr_tree(getattr(node, fields[-1]), result, done, cur_indent, depth)
- broken = True
- result.append(")")
- return broken
-
- result = []
- _repr_tree(self, result, set())
- return "".join(result)
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- The boolean value of a node can have three
- possible values:
-
- * False: For instance, empty data structures,
- False, empty strings, instances which return
- explicitly False from the __nonzero__ / __bool__
- method.
- * True: Most of constructs are True by default:
- classes, functions, modules etc
- * Uninferable: The inference engine is uncertain of the
- node's value.
-
- :returns: The boolean value of this node.
- :rtype: bool or Uninferable
- """
- return util.Uninferable
-
- def op_precedence(self):
- # Look up by class name or default to highest precedence
- return OP_PRECEDENCE.get(self.__class__.__name__, len(OP_PRECEDENCE))
-
- def op_left_associative(self):
- # Everything is left associative except `**` and IfExp
- return True
-
-
-class Statement(NodeNG):
- """Statement node adding a few attributes"""
-
- is_statement = True
- """Whether this node indicates a statement.
-
- :type: bool
- """
-
- def next_sibling(self):
- """The next sibling statement node.
-
- :returns: The next sibling statement node.
- :rtype: NodeNG or None
- """
- stmts = self.parent.child_sequence(self)
- index = stmts.index(self)
- try:
- return stmts[index + 1]
- except IndexError:
- pass
-
- def previous_sibling(self):
- """The previous sibling statement.
-
- :returns: The previous sibling statement node.
- :rtype: NodeNG or None
- """
- stmts = self.parent.child_sequence(self)
- index = stmts.index(self)
- if index >= 1:
- return stmts[index - 1]
- return None
-
-
-class _BaseContainer(
- mixins.ParentAssignTypeMixin, NodeNG, bases.Instance, metaclass=abc.ABCMeta
-):
- """Base class for Set, FrozenSet, Tuple and List."""
-
- _astroid_fields = ("elts",)
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- """
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.elts = []
- """The elements in the node.
-
- :type: list(NodeNG)
- """
-
- super(_BaseContainer, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, elts):
- """Do some setup after initialisation.
-
- :param elts: The list of elements the that node contains.
- :type elts: list(NodeNG)
- """
- self.elts = elts
-
- @classmethod
- def from_elements(cls, elts=None):
- """Create a node of this type from the given list of elements.
-
- :param elts: The list of elements that the node should contain.
- :type elts: list(NodeNG)
-
- :returns: A new node containing the given elements.
- :rtype: NodeNG
- """
- node = cls()
- if elts is None:
- node.elts = []
- else:
- node.elts = [const_factory(e) if _is_const(e) else e for e in elts]
- return node
-
- def itered(self):
- """An iterator over the elements this node contains.
-
- :returns: The contents of this node.
- :rtype: iterable(NodeNG)
- """
- return self.elts
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- :rtype: bool or Uninferable
- """
- return bool(self.elts)
-
- @abc.abstractmethod
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
-
- def get_children(self):
- yield from self.elts
-
-
-class LookupMixIn:
- """Mixin to look up a name in the right scope."""
-
- @lru_cache(maxsize=None)
- def lookup(self, name):
- """Lookup where the given variable is assigned.
-
- The lookup starts from self's scope. If self is not a frame itself
- and the name is found in the inner frame locals, statements will be
- filtered to remove ignorable statements according to self's location.
-
- :param name: The name of the variable to find assignments for.
- :type name: str
-
- :returns: The scope node and the list of assignments associated to the
- given name according to the scope where it has been found (locals,
- globals or builtin).
- :rtype: tuple(str, list(NodeNG))
- """
- return self.scope().scope_lookup(self, name)
-
- def ilookup(self, name):
- """Lookup the inferred values of the given variable.
-
- :param name: The variable name to find values for.
- :type name: str
-
- :returns: The inferred values of the statements returned from
- :meth:`lookup`.
- :rtype: iterable
- """
- frame, stmts = self.lookup(name)
- context = contextmod.InferenceContext()
- return bases._infer_stmts(stmts, context, frame)
-
- def _get_filtered_node_statements(self, nodes):
- statements = [(node, node.statement()) for node in nodes]
- # Next we check if we have ExceptHandlers that are parent
- # of the underlying variable, in which case the last one survives
- if len(statements) > 1 and all(
- isinstance(stmt, ExceptHandler) for _, stmt in statements
- ):
- statements = [
- (node, stmt) for node, stmt in statements if stmt.parent_of(self)
- ]
- return statements
-
- def _filter_stmts(self, stmts, frame, offset):
- """Filter the given list of statements to remove ignorable statements.
-
- If self is not a frame itself and the name is found in the inner
- frame locals, statements will be filtered to remove ignorable
- statements according to self's location.
-
- :param stmts: The statements to filter.
- :type stmts: list(NodeNG)
-
- :param frame: The frame that all of the given statements belong to.
- :type frame: NodeNG
-
- :param offset: The line offset to filter statements up to.
- :type offset: int
-
- :returns: The filtered statements.
- :rtype: list(NodeNG)
- """
- # if offset == -1, my actual frame is not the inner frame but its parent
- #
- # class A(B): pass
- #
- # we need this to resolve B correctly
- if offset == -1:
- myframe = self.frame().parent.frame()
- else:
- myframe = self.frame()
- # If the frame of this node is the same as the statement
- # of this node, then the node is part of a class or
- # a function definition and the frame of this node should be the
- # the upper frame, not the frame of the definition.
- # For more information why this is important,
- # see Pylint issue #295.
- # For example, for 'b', the statement is the same
- # as the frame / scope:
- #
- # def test(b=1):
- # ...
-
- if self.statement() is myframe and myframe.parent:
- myframe = myframe.parent.frame()
- mystmt = self.statement()
- # line filtering if we are in the same frame
- #
- # take care node may be missing lineno information (this is the case for
- # nodes inserted for living objects)
- if myframe is frame and mystmt.fromlineno is not None:
- assert mystmt.fromlineno is not None, mystmt
- mylineno = mystmt.fromlineno + offset
- else:
- # disabling lineno filtering
- mylineno = 0
-
- _stmts = []
- _stmt_parents = []
- statements = self._get_filtered_node_statements(stmts)
-
- for node, stmt in statements:
- # line filtering is on and we have reached our location, break
- if stmt.fromlineno > mylineno > 0:
- break
- # Ignore decorators with the same name as the
- # decorated function
- # Fixes issue #375
- if mystmt is stmt and is_from_decorator(self):
- continue
- assert hasattr(node, "assign_type"), (
- node,
- node.scope(),
- node.scope().locals,
- )
- assign_type = node.assign_type()
- if node.has_base(self):
- break
-
- _stmts, done = assign_type._get_filtered_stmts(self, node, _stmts, mystmt)
- if done:
- break
-
- optional_assign = assign_type.optional_assign
- if optional_assign and assign_type.parent_of(self):
- # we are inside a loop, loop var assignment is hiding previous
- # assignment
- _stmts = [node]
- _stmt_parents = [stmt.parent]
- continue
-
- if isinstance(assign_type, NamedExpr):
- _stmts = [node]
- continue
-
- # XXX comment various branches below!!!
- try:
- pindex = _stmt_parents.index(stmt.parent)
- except ValueError:
- pass
- else:
- # we got a parent index, this means the currently visited node
- # is at the same block level as a previously visited node
- if _stmts[pindex].assign_type().parent_of(assign_type):
- # both statements are not at the same block level
- continue
- # if currently visited node is following previously considered
- # assignment and both are not exclusive, we can drop the
- # previous one. For instance in the following code ::
- #
- # if a:
- # x = 1
- # else:
- # x = 2
- # print x
- #
- # we can't remove neither x = 1 nor x = 2 when looking for 'x'
- # of 'print x'; while in the following ::
- #
- # x = 1
- # x = 2
- # print x
- #
- # we can remove x = 1 when we see x = 2
- #
- # moreover, on loop assignment types, assignment won't
- # necessarily be done if the loop has no iteration, so we don't
- # want to clear previous assignments if any (hence the test on
- # optional_assign)
- if not (optional_assign or are_exclusive(_stmts[pindex], node)):
- if (
- # In case of partial function node, if the statement is different
- # from the origin function then it can be deleted otherwise it should
- # remain to be able to correctly infer the call to origin function.
- not node.is_function
- or node.qname() != "PartialFunction"
- or node.name != _stmts[pindex].name
- ):
- del _stmt_parents[pindex]
- del _stmts[pindex]
- if isinstance(node, AssignName):
- if not optional_assign and stmt.parent is mystmt.parent:
- _stmts = []
- _stmt_parents = []
- elif isinstance(node, DelName):
- _stmts = []
- _stmt_parents = []
- continue
- if not are_exclusive(self, node):
- _stmts.append(node)
- _stmt_parents.append(stmt.parent)
- return _stmts
-
-
-# Name classes
-
-
-class AssignName(
- mixins.NoChildrenMixin, LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG
-):
- """Variation of :class:`ast.Assign` representing assignment to a name.
-
- An :class:`AssignName` is the name of something that is assigned to.
- This includes variables defined in a function signature or in a loop.
-
- >>> node = astroid.extract_node('variable = range(10)')
- >>> node
- <Assign l.1 at 0x7effe1db8550>
- >>> list(node.get_children())
- [<AssignName.variable l.1 at 0x7effe1db8748>, <Call l.1 at 0x7effe1db8630>]
- >>> list(node.get_children())[0].as_string()
- 'variable'
- """
-
- _other_fields = ("name",)
-
- def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
- """
- :param name: The name that is assigned to.
- :type name: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.name = name
- """The name that is assigned to.
-
- :type: str or None
- """
-
- super(AssignName, self).__init__(lineno, col_offset, parent)
-
-
-class DelName(
- mixins.NoChildrenMixin, LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG
-):
- """Variation of :class:`ast.Delete` representing deletion of a name.
-
- A :class:`DelName` is the name of something that is deleted.
-
- >>> node = astroid.extract_node("del variable #@")
- >>> list(node.get_children())
- [<DelName.variable l.1 at 0x7effe1da4d30>]
- >>> list(node.get_children())[0].as_string()
- 'variable'
- """
-
- _other_fields = ("name",)
-
- def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
- """
- :param name: The name that is being deleted.
- :type name: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.name = name
- """The name that is being deleted.
-
- :type: str or None
- """
-
- super(DelName, self).__init__(lineno, col_offset, parent)
-
-
-class Name(mixins.NoChildrenMixin, LookupMixIn, NodeNG):
- """Class representing an :class:`ast.Name` node.
-
- A :class:`Name` node is something that is named, but not covered by
- :class:`AssignName` or :class:`DelName`.
-
- >>> node = astroid.extract_node('range(10)')
- >>> node
- <Call l.1 at 0x7effe1db8710>
- >>> list(node.get_children())
- [<Name.range l.1 at 0x7effe1db86a0>, <Const.int l.1 at 0x7effe1db8518>]
- >>> list(node.get_children())[0].as_string()
- 'range'
- """
-
- _other_fields = ("name",)
-
- def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
- """
- :param name: The name that this node refers to.
- :type name: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.name = name
- """The name that this node refers to.
-
- :type: str or None
- """
-
- super(Name, self).__init__(lineno, col_offset, parent)
-
- def _get_name_nodes(self):
- yield self
-
- for child_node in self.get_children():
- yield from child_node._get_name_nodes()
-
-
-class Arguments(mixins.AssignTypeMixin, NodeNG):
- """Class representing an :class:`ast.arguments` node.
-
- An :class:`Arguments` node represents that arguments in a
- function definition.
-
- >>> node = astroid.extract_node('def foo(bar): pass')
- >>> node
- <FunctionDef.foo l.1 at 0x7effe1db8198>
- >>> node.args
- <Arguments l.1 at 0x7effe1db82e8>
- """
-
- # Python 3.4+ uses a different approach regarding annotations,
- # each argument is a new class, _ast.arg, which exposes an
- # 'annotation' attribute. In astroid though, arguments are exposed
- # as is in the Arguments node and the only way to expose annotations
- # is by using something similar with Python 3.3:
- # - we expose 'varargannotation' and 'kwargannotation' of annotations
- # of varargs and kwargs.
- # - we expose 'annotation', a list with annotations for
- # for each normal argument. If an argument doesn't have an
- # annotation, its value will be None.
-
- _astroid_fields = (
- "args",
- "defaults",
- "kwonlyargs",
- "posonlyargs",
- "kw_defaults",
- "annotations",
- "varargannotation",
- "kwargannotation",
- "kwonlyargs_annotations",
- "type_comment_args",
- )
- varargannotation = None
- """The type annotation for the variable length arguments.
-
- :type: NodeNG
- """
- kwargannotation = None
- """The type annotation for the variable length keyword arguments.
-
- :type: NodeNG
- """
-
- _other_fields = ("vararg", "kwarg")
-
- def __init__(self, vararg=None, kwarg=None, parent=None):
- """
- :param vararg: The name of the variable length arguments.
- :type vararg: str or None
-
- :param kwarg: The name of the variable length keyword arguments.
- :type kwarg: str or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- super(Arguments, self).__init__(parent=parent)
- self.vararg = vararg
- """The name of the variable length arguments.
-
- :type: str or None
- """
-
- self.kwarg = kwarg
- """The name of the variable length keyword arguments.
-
- :type: str or None
- """
-
- self.args = []
- """The names of the required arguments.
-
- :type: list(AssignName)
- """
-
- self.defaults = []
- """The default values for arguments that can be passed positionally.
-
- :type: list(NodeNG)
- """
-
- self.kwonlyargs = []
- """The keyword arguments that cannot be passed positionally.
-
- :type: list(AssignName)
- """
-
- self.posonlyargs = []
- """The arguments that can only be passed positionally.
-
- :type: list(AssignName)
- """
-
- self.kw_defaults = []
- """The default values for keyword arguments that cannot be passed positionally.
-
- :type: list(NodeNG)
- """
-
- self.annotations = []
- """The type annotations of arguments that can be passed positionally.
-
- :type: list(NodeNG)
- """
-
- self.posonlyargs_annotations = []
- """The type annotations of arguments that can only be passed positionally.
-
- :type: list(NodeNG)
- """
-
- self.kwonlyargs_annotations = []
- """The type annotations of arguments that cannot be passed positionally.
-
- :type: list(NodeNG)
- """
-
- self.type_comment_args = []
- """The type annotation, passed by a type comment, of each argument.
-
- If an argument does not have a type comment,
- the value for that argument will be None.
-
- :type: list(NodeNG or None)
- """
-
- # pylint: disable=too-many-arguments
- def postinit(
- self,
- args,
- defaults,
- kwonlyargs,
- kw_defaults,
- annotations,
- posonlyargs=None,
- kwonlyargs_annotations=None,
- posonlyargs_annotations=None,
- varargannotation=None,
- kwargannotation=None,
- type_comment_args=None,
- ):
- """Do some setup after initialisation.
-
- :param args: The names of the required arguments.
- :type args: list(AssignName)
-
- :param defaults: The default values for arguments that can be passed
- positionally.
- :type defaults: list(NodeNG)
-
- :param kwonlyargs: The keyword arguments that cannot be passed
- positionally.
- :type kwonlyargs: list(AssignName)
-
- :param posonlyargs: The arguments that can only be passed
- positionally.
- :type kwonlyargs: list(AssignName)
-
- :param kw_defaults: The default values for keyword arguments that
- cannot be passed positionally.
- :type kw_defaults: list(NodeNG)
-
- :param annotations: The type annotations of arguments that can be
- passed positionally.
- :type annotations: list(NodeNG)
-
- :param kwonlyargs_annotations: The type annotations of arguments that
- cannot be passed positionally. This should always be passed in
- Python 3.
- :type kwonlyargs_annotations: list(NodeNG)
-
- :param posonlyargs_annotations: The type annotations of arguments that
- can only be passed positionally. This should always be passed in
- Python 3.
- :type posonlyargs_annotations: list(NodeNG)
-
- :param varargannotation: The type annotation for the variable length
- arguments.
- :type varargannotation: NodeNG
-
- :param kwargannotation: The type annotation for the variable length
- keyword arguments.
- :type kwargannotation: NodeNG
-
- :param type_comment_args: The type annotation,
- passed by a type comment, of each argument.
- :type type_comment_args: list(NodeNG or None)
- """
- self.args = args
- self.defaults = defaults
- self.kwonlyargs = kwonlyargs
- self.posonlyargs = posonlyargs
- self.kw_defaults = kw_defaults
- self.annotations = annotations
- self.kwonlyargs_annotations = kwonlyargs_annotations
- self.posonlyargs_annotations = posonlyargs_annotations
- self.varargannotation = varargannotation
- self.kwargannotation = kwargannotation
- self.type_comment_args = type_comment_args
-
- # pylint: disable=too-many-arguments
-
- def _infer_name(self, frame, name):
- if self.parent is frame:
- return name
- return None
-
- @decorators.cachedproperty
- def fromlineno(self):
- """The first line that this node appears on in the source code.
-
- :type: int or None
- """
- lineno = super(Arguments, self).fromlineno
- return max(lineno, self.parent.fromlineno or 0)
-
- def format_args(self):
- """Get the arguments formatted as string.
-
- :returns: The formatted arguments.
- :rtype: str
- """
- result = []
- positional_only_defaults = []
- positional_or_keyword_defaults = self.defaults
- if self.defaults:
- args = self.args or []
- positional_or_keyword_defaults = self.defaults[-len(args) :]
- positional_only_defaults = self.defaults[: len(self.defaults) - len(args)]
-
- if self.posonlyargs:
- result.append(_format_args(self.posonlyargs, positional_only_defaults))
- result.append("/")
- if self.args:
- result.append(
- _format_args(
- self.args,
- positional_or_keyword_defaults,
- getattr(self, "annotations", None),
- )
- )
- if self.vararg:
- result.append("*%s" % self.vararg)
- if self.kwonlyargs:
- if not self.vararg:
- result.append("*")
- result.append(
- _format_args(
- self.kwonlyargs, self.kw_defaults, self.kwonlyargs_annotations
- )
- )
- if self.kwarg:
- result.append("**%s" % self.kwarg)
- return ", ".join(result)
-
- def default_value(self, argname):
- """Get the default value for an argument.
-
- :param argname: The name of the argument to get the default value for.
- :type argname: str
-
- :raises NoDefault: If there is no default value defined for the
- given argument.
- """
- args = list(itertools.chain((self.posonlyargs or ()), self.args or ()))
- index = _find_arg(argname, args)[0]
- if index is not None:
- idx = index - (len(args) - len(self.defaults))
- if idx >= 0:
- return self.defaults[idx]
- index = _find_arg(argname, self.kwonlyargs)[0]
- if index is not None and self.kw_defaults[index] is not None:
- return self.kw_defaults[index]
- raise exceptions.NoDefault(func=self.parent, name=argname)
-
- def is_argument(self, name):
- """Check if the given name is defined in the arguments.
-
- :param name: The name to check for.
- :type name: str
-
- :returns: True if the given name is defined in the arguments,
- False otherwise.
- :rtype: bool
- """
- if name == self.vararg:
- return True
- if name == self.kwarg:
- return True
- return (
- self.find_argname(name, rec=True)[1] is not None
- or self.kwonlyargs
- and _find_arg(name, self.kwonlyargs, rec=True)[1] is not None
- )
-
- def find_argname(self, argname, rec=False):
- """Get the index and :class:`AssignName` node for given name.
-
- :param argname: The name of the argument to search for.
- :type argname: str
-
- :param rec: Whether or not to include arguments in unpacked tuples
- in the search.
- :type rec: bool
-
- :returns: The index and node for the argument.
- :rtype: tuple(str or None, AssignName or None)
- """
- if (
- self.args or self.posonlyargs
- ): # self.args may be None in some cases (builtin function)
- arguments = itertools.chain(self.posonlyargs or (), self.args or ())
- return _find_arg(argname, arguments, rec)
- return None, None
-
- def get_children(self):
- yield from self.posonlyargs or ()
- yield from self.args or ()
-
- yield from self.defaults
- yield from self.kwonlyargs
-
- for elt in self.kw_defaults:
- if elt is not None:
- yield elt
-
- for elt in self.annotations:
- if elt is not None:
- yield elt
-
- if self.varargannotation is not None:
- yield self.varargannotation
-
- if self.kwargannotation is not None:
- yield self.kwargannotation
-
- for elt in self.kwonlyargs_annotations:
- if elt is not None:
- yield elt
-
-
-def _find_arg(argname, args, rec=False):
- for i, arg in enumerate(args):
- if isinstance(arg, Tuple):
- if rec:
- found = _find_arg(argname, arg.elts)
- if found[0] is not None:
- return found
- elif arg.name == argname:
- return i, arg
- return None, None
-
-
-def _format_args(args, defaults=None, annotations=None):
- values = []
- if args is None:
- return ""
- if annotations is None:
- annotations = []
- if defaults is not None:
- default_offset = len(args) - len(defaults)
- packed = itertools.zip_longest(args, annotations)
- for i, (arg, annotation) in enumerate(packed):
- if isinstance(arg, Tuple):
- values.append("(%s)" % _format_args(arg.elts))
- else:
- argname = arg.name
- default_sep = "="
- if annotation is not None:
- argname += ": " + annotation.as_string()
- default_sep = " = "
- values.append(argname)
-
- if defaults is not None and i >= default_offset:
- if defaults[i - default_offset] is not None:
- values[-1] += default_sep + defaults[i - default_offset].as_string()
- return ", ".join(values)
-
-
-class AssignAttr(mixins.ParentAssignTypeMixin, NodeNG):
- """Variation of :class:`ast.Assign` representing assignment to an attribute.
-
- >>> node = astroid.extract_node('self.attribute = range(10)')
- >>> node
- <Assign l.1 at 0x7effe1d521d0>
- >>> list(node.get_children())
- [<AssignAttr.attribute l.1 at 0x7effe1d52320>, <Call l.1 at 0x7effe1d522e8>]
- >>> list(node.get_children())[0].as_string()
- 'self.attribute'
- """
-
- _astroid_fields = ("expr",)
- _other_fields = ("attrname",)
- expr = None
- """What has the attribute that is being assigned to.
-
- :type: NodeNG or None
- """
-
- def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None):
- """
- :param attrname: The name of the attribute being assigned to.
- :type attrname: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.attrname = attrname
- """The name of the attribute being assigned to.
-
- :type: str or None
- """
-
- super(AssignAttr, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, expr=None):
- """Do some setup after initialisation.
-
- :param expr: What has the attribute that is being assigned to.
- :type expr: NodeNG or None
- """
- self.expr = expr
-
- def get_children(self):
- yield self.expr
-
-
-class Assert(Statement):
- """Class representing an :class:`ast.Assert` node.
-
- An :class:`Assert` node represents an assert statement.
-
- >>> node = astroid.extract_node('assert len(things) == 10, "Not enough things"')
- >>> node
- <Assert l.1 at 0x7effe1d527b8>
- """
-
- _astroid_fields = ("test", "fail")
- test = None
- """The test that passes or fails the assertion.
-
- :type: NodeNG or None
- """
- fail = None
- """The message shown when the assertion fails.
-
- :type: NodeNG or None
- """
-
- def postinit(self, test=None, fail=None):
- """Do some setup after initialisation.
-
- :param test: The test that passes or fails the assertion.
- :type test: NodeNG or None
-
- :param fail: The message shown when the assertion fails.
- :type fail: NodeNG or None
- """
- self.fail = fail
- self.test = test
-
- def get_children(self):
- yield self.test
-
- if self.fail is not None:
- yield self.fail
-
-
-class Assign(mixins.AssignTypeMixin, Statement):
- """Class representing an :class:`ast.Assign` node.
-
- An :class:`Assign` is a statement where something is explicitly
- asssigned to.
-
- >>> node = astroid.extract_node('variable = range(10)')
- >>> node
- <Assign l.1 at 0x7effe1db8550>
- """
-
- _astroid_fields = ("targets", "value")
- _other_other_fields = ("type_annotation",)
- targets = None
- """What is being assigned to.
-
- :type: list(NodeNG) or None
- """
- value = None
- """The value being assigned to the variables.
-
- :type: NodeNG or None
- """
- type_annotation = None
- """If present, this will contain the type annotation passed by a type comment
-
- :type: NodeNG or None
- """
-
- def postinit(self, targets=None, value=None, type_annotation=None):
- """Do some setup after initialisation.
-
- :param targets: What is being assigned to.
- :type targets: list(NodeNG) or None
-
- :param value: The value being assigned to the variables.
- :type: NodeNG or None
- """
- self.targets = targets
- self.value = value
- self.type_annotation = type_annotation
-
- def get_children(self):
- yield from self.targets
-
- yield self.value
-
- @decorators.cached
- def _get_assign_nodes(self):
- return [self] + list(self.value._get_assign_nodes())
-
- def _get_yield_nodes_skip_lambdas(self):
- yield from self.value._get_yield_nodes_skip_lambdas()
-
-
-class AnnAssign(mixins.AssignTypeMixin, Statement):
- """Class representing an :class:`ast.AnnAssign` node.
-
- An :class:`AnnAssign` is an assignment with a type annotation.
-
- >>> node = astroid.extract_node('variable: List[int] = range(10)')
- >>> node
- <AnnAssign l.1 at 0x7effe1d4c630>
- """
-
- _astroid_fields = ("target", "annotation", "value")
- _other_fields = ("simple",)
- target = None
- """What is being assigned to.
-
- :type: NodeNG or None
- """
- annotation = None
- """The type annotation of what is being assigned to.
-
- :type: NodeNG
- """
- value = None
- """The value being assigned to the variables.
-
- :type: NodeNG or None
- """
- simple = None
- """Whether :attr:`target` is a pure name or a complex statement.
-
- :type: int
- """
-
- def postinit(self, target, annotation, simple, value=None):
- """Do some setup after initialisation.
-
- :param target: What is being assigned to.
- :type target: NodeNG
-
- :param annotation: The type annotation of what is being assigned to.
- :type: NodeNG
-
- :param simple: Whether :attr:`target` is a pure name
- or a complex statement.
- :type simple: int
-
- :param value: The value being assigned to the variables.
- :type: NodeNG or None
- """
- self.target = target
- self.annotation = annotation
- self.value = value
- self.simple = simple
-
- def get_children(self):
- yield self.target
- yield self.annotation
-
- if self.value is not None:
- yield self.value
-
-
-class AugAssign(mixins.AssignTypeMixin, Statement):
- """Class representing an :class:`ast.AugAssign` node.
-
- An :class:`AugAssign` is an assignment paired with an operator.
-
- >>> node = astroid.extract_node('variable += 1')
- >>> node
- <AugAssign l.1 at 0x7effe1db4d68>
- """
-
- _astroid_fields = ("target", "value")
- _other_fields = ("op",)
- target = None
- """What is being assigned to.
-
- :type: NodeNG or None
- """
- value = None
- """The value being assigned to the variable.
-
- :type: NodeNG or None
- """
-
- def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
- """
- :param op: The operator that is being combined with the assignment.
- This includes the equals sign.
- :type op: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.op = op
- """The operator that is being combined with the assignment.
-
- This includes the equals sign.
-
- :type: str or None
- """
-
- super(AugAssign, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, target=None, value=None):
- """Do some setup after initialisation.
-
- :param target: What is being assigned to.
- :type target: NodeNG or None
-
- :param value: The value being assigned to the variable.
- :type: NodeNG or None
- """
- self.target = target
- self.value = value
-
- # This is set by inference.py
- def _infer_augassign(self, context=None):
- raise NotImplementedError
-
- def type_errors(self, context=None):
- """Get a list of type errors which can occur during inference.
-
- Each TypeError is represented by a :class:`BadBinaryOperationMessage` ,
- which holds the original exception.
-
- :returns: The list of possible type errors.
- :rtype: list(BadBinaryOperationMessage)
- """
- try:
- results = self._infer_augassign(context=context)
- return [
- result
- for result in results
- if isinstance(result, util.BadBinaryOperationMessage)
- ]
- except exceptions.InferenceError:
- return []
-
- def get_children(self):
- yield self.target
- yield self.value
-
-
-class Repr(NodeNG):
- """Class representing an :class:`ast.Repr` node.
-
- A :class:`Repr` node represents the backtick syntax,
- which is a deprecated alias for :func:`repr` removed in Python 3.
-
- >>> node = astroid.extract_node('`variable`')
- >>> node
- <Repr l.1 at 0x7fa0951d75d0>
- """
-
- _astroid_fields = ("value",)
- value = None
- """What is having :func:`repr` called on it.
-
- :type: NodeNG or None
- """
-
- def postinit(self, value=None):
- """Do some setup after initialisation.
-
- :param value: What is having :func:`repr` called on it.
- :type value: NodeNG or None
- """
- self.value = value
-
-
-class BinOp(NodeNG):
- """Class representing an :class:`ast.BinOp` node.
-
- A :class:`BinOp` node is an application of a binary operator.
-
- >>> node = astroid.extract_node('a + b')
- >>> node
- <BinOp l.1 at 0x7f23b2e8cfd0>
- """
-
- _astroid_fields = ("left", "right")
- _other_fields = ("op",)
- left = None
- """What is being applied to the operator on the left side.
-
- :type: NodeNG or None
- """
- right = None
- """What is being applied to the operator on the right side.
-
- :type: NodeNG or None
- """
-
- def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
- """
- :param op: The operator.
- :type: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.op = op
- """The operator.
-
- :type: str or None
- """
-
- super(BinOp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, left=None, right=None):
- """Do some setup after initialisation.
-
- :param left: What is being applied to the operator on the left side.
- :type left: NodeNG or None
-
- :param right: What is being applied to the operator on the right side.
- :type right: NodeNG or None
- """
- self.left = left
- self.right = right
-
- # This is set by inference.py
- def _infer_binop(self, context=None):
- raise NotImplementedError
-
- def type_errors(self, context=None):
- """Get a list of type errors which can occur during inference.
-
- Each TypeError is represented by a :class:`BadBinaryOperationMessage`,
- which holds the original exception.
-
- :returns: The list of possible type errors.
- :rtype: list(BadBinaryOperationMessage)
- """
- try:
- results = self._infer_binop(context=context)
- return [
- result
- for result in results
- if isinstance(result, util.BadBinaryOperationMessage)
- ]
- except exceptions.InferenceError:
- return []
-
- def get_children(self):
- yield self.left
- yield self.right
-
- def op_precedence(self):
- return OP_PRECEDENCE[self.op]
-
- def op_left_associative(self):
- # 2**3**4 == 2**(3**4)
- return self.op != "**"
-
-
-class BoolOp(NodeNG):
- """Class representing an :class:`ast.BoolOp` node.
-
- A :class:`BoolOp` is an application of a boolean operator.
-
- >>> node = astroid.extract_node('a and b')
- >>> node
- <BinOp l.1 at 0x7f23b2e71c50>
- """
-
- _astroid_fields = ("values",)
- _other_fields = ("op",)
- values = None
- """The values being applied to the operator.
-
- :type: list(NodeNG) or None
- """
-
- def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
- """
- :param op: The operator.
- :type: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.op = op
- """The operator.
-
- :type: str or None
- """
-
- super(BoolOp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, values=None):
- """Do some setup after initialisation.
-
- :param values: The values being applied to the operator.
- :type values: list(NodeNG) or None
- """
- self.values = values
-
- def get_children(self):
- yield from self.values
-
- def op_precedence(self):
- return OP_PRECEDENCE[self.op]
-
-
-class Break(mixins.NoChildrenMixin, Statement):
- """Class representing an :class:`ast.Break` node.
-
- >>> node = astroid.extract_node('break')
- >>> node
- <Break l.1 at 0x7f23b2e9e5c0>
- """
-
-
-class Call(NodeNG):
- """Class representing an :class:`ast.Call` node.
-
- A :class:`Call` node is a call to a function, method, etc.
-
- >>> node = astroid.extract_node('function()')
- >>> node
- <Call l.1 at 0x7f23b2e71eb8>
- """
-
- _astroid_fields = ("func", "args", "keywords")
- func = None
- """What is being called.
-
- :type: NodeNG or None
- """
- args = None
- """The positional arguments being given to the call.
-
- :type: list(NodeNG) or None
- """
- keywords = None
- """The keyword arguments being given to the call.
-
- :type: list(NodeNG) or None
- """
-
- def postinit(self, func=None, args=None, keywords=None):
- """Do some setup after initialisation.
-
- :param func: What is being called.
- :type func: NodeNG or None
-
- :param args: The positional arguments being given to the call.
- :type args: list(NodeNG) or None
-
- :param keywords: The keyword arguments being given to the call.
- :type keywords: list(NodeNG) or None
- """
- self.func = func
- self.args = args
- self.keywords = keywords
-
- @property
- def starargs(self):
- """The positional arguments that unpack something.
-
- :type: list(Starred)
- """
- args = self.args or []
- return [arg for arg in args if isinstance(arg, Starred)]
-
- @property
- def kwargs(self):
- """The keyword arguments that unpack something.
-
- :type: list(Keyword)
- """
- keywords = self.keywords or []
- return [keyword for keyword in keywords if keyword.arg is None]
-
- def get_children(self):
- yield self.func
-
- yield from self.args
-
- yield from self.keywords or ()
-
-
-class Compare(NodeNG):
- """Class representing an :class:`ast.Compare` node.
-
- A :class:`Compare` node indicates a comparison.
-
- >>> node = astroid.extract_node('a <= b <= c')
- >>> node
- <Compare l.1 at 0x7f23b2e9e6d8>
- >>> node.ops
- [('<=', <Name.b l.1 at 0x7f23b2e9e2b0>), ('<=', <Name.c l.1 at 0x7f23b2e9e390>)]
- """
-
- _astroid_fields = ("left", "ops")
- left = None
- """The value at the left being applied to a comparison operator.
-
- :type: NodeNG or None
- """
- ops = None
- """The remainder of the operators and their relevant right hand value.
-
- :type: list(tuple(str, NodeNG)) or None
- """
-
- def postinit(self, left=None, ops=None):
- """Do some setup after initialisation.
-
- :param left: The value at the left being applied to a comparison
- operator.
- :type left: NodeNG or None
-
- :param ops: The remainder of the operators
- and their relevant right hand value.
- :type ops: list(tuple(str, NodeNG)) or None
- """
- self.left = left
- self.ops = ops
-
- def get_children(self):
- """Get the child nodes below this node.
-
- Overridden to handle the tuple fields and skip returning the operator
- strings.
-
- :returns: The children.
- :rtype: iterable(NodeNG)
- """
- yield self.left
- for _, comparator in self.ops:
- yield comparator # we don't want the 'op'
-
- def last_child(self):
- """An optimized version of list(get_children())[-1]
-
- :returns: The last child.
- :rtype: NodeNG
- """
- # XXX maybe if self.ops:
- return self.ops[-1][1]
- # return self.left
-
-
-class Comprehension(NodeNG):
- """Class representing an :class:`ast.comprehension` node.
-
- A :class:`Comprehension` indicates the loop inside any type of
- comprehension including generator expressions.
-
- >>> node = astroid.extract_node('[x for x in some_values]')
- >>> list(node.get_children())
- [<Name.x l.1 at 0x7f23b2e352b0>, <Comprehension l.1 at 0x7f23b2e35320>]
- >>> list(node.get_children())[1].as_string()
- 'for x in some_values'
- """
-
- _astroid_fields = ("target", "iter", "ifs")
- _other_fields = ("is_async",)
- target = None
- """What is assigned to by the comprehension.
-
- :type: NodeNG or None
- """
- iter = None
- """What is iterated over by the comprehension.
-
- :type: NodeNG or None
- """
- ifs = None
- """The contents of any if statements that filter the comprehension.
-
- :type: list(NodeNG) or None
- """
- is_async = None
- """Whether this is an asynchronous comprehension or not.
-
- :type: bool or None
- """
-
- def __init__(self, parent=None):
- """
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- super(Comprehension, self).__init__()
- self.parent = parent
-
- # pylint: disable=redefined-builtin; same name as builtin ast module.
- def postinit(self, target=None, iter=None, ifs=None, is_async=None):
- """Do some setup after initialisation.
-
- :param target: What is assigned to by the comprehension.
- :type target: NodeNG or None
-
- :param iter: What is iterated over by the comprehension.
- :type iter: NodeNG or None
-
- :param ifs: The contents of any if statements that filter
- the comprehension.
- :type ifs: list(NodeNG) or None
-
- :param is_async: Whether this is an asynchronous comprehension or not.
- :type: bool or None
- """
- self.target = target
- self.iter = iter
- self.ifs = ifs
- self.is_async = is_async
-
- optional_assign = True
- """Whether this node optionally assigns a variable.
-
- :type: bool
- """
-
- def assign_type(self):
- """The type of assignment that this node performs.
-
- :returns: The assignment type.
- :rtype: NodeNG
- """
- return self
-
- def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt):
- """method used in filter_stmts"""
- if self is mystmt:
- if isinstance(lookup_node, (Const, Name)):
- return [lookup_node], True
-
- elif self.statement() is mystmt:
- # original node's statement is the assignment, only keeps
- # current node (gen exp, list comp)
-
- return [node], True
-
- return stmts, False
-
- def get_children(self):
- yield self.target
- yield self.iter
-
- yield from self.ifs
-
-
-class Const(mixins.NoChildrenMixin, NodeNG, bases.Instance):
- """Class representing any constant including num, str, bool, None, bytes.
-
- >>> node = astroid.extract_node('(5, "This is a string.", True, None, b"bytes")')
- >>> node
- <Tuple.tuple l.1 at 0x7f23b2e358d0>
- >>> list(node.get_children())
- [<Const.int l.1 at 0x7f23b2e35940>,
- <Const.str l.1 at 0x7f23b2e35978>,
- <Const.bool l.1 at 0x7f23b2e359b0>,
- <Const.NoneType l.1 at 0x7f23b2e359e8>,
- <Const.bytes l.1 at 0x7f23b2e35a20>]
- """
-
- _other_fields = ("value",)
-
- def __init__(self, value, lineno=None, col_offset=None, parent=None):
- """
- :param value: The value that the constant represents.
- :type value: object
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.value = value
- """The value that the constant represents.
-
- :type: object
- """
-
- super(Const, self).__init__(lineno, col_offset, parent)
-
- def __getattr__(self, name):
- # This is needed because of Proxy's __getattr__ method.
- # Calling object.__new__ on this class without calling
- # __init__ would result in an infinite loop otherwise
- # since __getattr__ is called when an attribute doesn't
- # exist and self._proxied indirectly calls self.value
- # and Proxy __getattr__ calls self.value
- if name == "value":
- raise AttributeError
- return super().__getattr__(name)
-
- def getitem(self, index, context=None):
- """Get an item from this node if subscriptable.
-
- :param index: The node to use as a subscript index.
- :type index: Const or Slice
-
- :raises AstroidTypeError: When the given index cannot be used as a
- subscript index, or if this node is not subscriptable.
- """
- if isinstance(index, Const):
- index_value = index.value
- elif isinstance(index, Slice):
- index_value = _infer_slice(index, context=context)
-
- else:
- raise exceptions.AstroidTypeError(
- "Could not use type {} as subscript index".format(type(index))
- )
-
- try:
- if isinstance(self.value, (str, bytes)):
- return Const(self.value[index_value])
- except IndexError as exc:
- raise exceptions.AstroidIndexError(
- message="Index {index!r} out of range",
- node=self,
- index=index,
- context=context,
- ) from exc
- except TypeError as exc:
- raise exceptions.AstroidTypeError(
- message="Type error {error!r}", node=self, index=index, context=context
- ) from exc
-
- raise exceptions.AstroidTypeError("%r (value=%s)" % (self, self.value))
-
- def has_dynamic_getattr(self):
- """Check if the node has a custom __getattr__ or __getattribute__.
-
- :returns: True if the class has a custom
- __getattr__ or __getattribute__, False otherwise.
- For a :class:`Const` this is always ``False``.
- :rtype: bool
- """
- return False
-
- def itered(self):
- """An iterator over the elements this node contains.
-
- :returns: The contents of this node.
- :rtype: iterable(str)
-
- :raises TypeError: If this node does not represent something that is iterable.
- """
- if isinstance(self.value, str):
- return self.value
- raise TypeError()
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- return self._proxied.qname()
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- :rtype: bool
- """
- return bool(self.value)
-
-
-class Continue(mixins.NoChildrenMixin, Statement):
- """Class representing an :class:`ast.Continue` node.
-
- >>> node = astroid.extract_node('continue')
- >>> node
- <Continue l.1 at 0x7f23b2e35588>
- """
-
-
-class Decorators(NodeNG):
- """A node representing a list of decorators.
-
- A :class:`Decorators` is the decorators that are applied to
- a method or function.
-
- >>> node = astroid.extract_node('''
- @property
- def my_property(self):
- return 3
- ''')
- >>> node
- <FunctionDef.my_property l.2 at 0x7f23b2e35d30>
- >>> list(node.get_children())[0]
- <Decorators l.1 at 0x7f23b2e35d68>
- """
-
- _astroid_fields = ("nodes",)
- nodes = None
- """The decorators that this node contains.
-
- :type: list(Name or Call) or None
- """
-
- def postinit(self, nodes):
- """Do some setup after initialisation.
-
- :param nodes: The decorators that this node contains.
- :type nodes: list(Name or Call)
- """
- self.nodes = nodes
-
- def scope(self):
- """The first parent node defining a new scope.
-
- :returns: The first parent scope node.
- :rtype: Module or FunctionDef or ClassDef or Lambda or GenExpr
- """
- # skip the function node to go directly to the upper level scope
- return self.parent.parent.scope()
-
- def get_children(self):
- yield from self.nodes
-
-
-class DelAttr(mixins.ParentAssignTypeMixin, NodeNG):
- """Variation of :class:`ast.Delete` representing deletion of an attribute.
-
- >>> node = astroid.extract_node('del self.attr')
- >>> node
- <Delete l.1 at 0x7f23b2e35f60>
- >>> list(node.get_children())[0]
- <DelAttr.attr l.1 at 0x7f23b2e411d0>
- """
-
- _astroid_fields = ("expr",)
- _other_fields = ("attrname",)
- expr = None
- """The name that this node represents.
-
- :type: Name or None
- """
-
- def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None):
- """
- :param attrname: The name of the attribute that is being deleted.
- :type attrname: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.attrname = attrname
- """The name of the attribute that is being deleted.
-
- :type: str or None
- """
-
- super(DelAttr, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, expr=None):
- """Do some setup after initialisation.
-
- :param expr: The name that this node represents.
- :type expr: Name or None
- """
- self.expr = expr
-
- def get_children(self):
- yield self.expr
-
-
-class Delete(mixins.AssignTypeMixin, Statement):
- """Class representing an :class:`ast.Delete` node.
-
- A :class:`Delete` is a ``del`` statement this is deleting something.
-
- >>> node = astroid.extract_node('del self.attr')
- >>> node
- <Delete l.1 at 0x7f23b2e35f60>
- """
-
- _astroid_fields = ("targets",)
- targets = None
- """What is being deleted.
-
- :type: list(NodeNG) or None
- """
-
- def postinit(self, targets=None):
- """Do some setup after initialisation.
-
- :param targets: What is being deleted.
- :type targets: list(NodeNG) or None
- """
- self.targets = targets
-
- def get_children(self):
- yield from self.targets
-
-
-class Dict(NodeNG, bases.Instance):
- """Class representing an :class:`ast.Dict` node.
-
- A :class:`Dict` is a dictionary that is created with ``{}`` syntax.
-
- >>> node = astroid.extract_node('{1: "1"}')
- >>> node
- <Dict.dict l.1 at 0x7f23b2e35cc0>
- """
-
- _astroid_fields = ("items",)
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- """
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.items = []
- """The key-value pairs contained in the dictionary.
-
- :type: list(tuple(NodeNG, NodeNG))
- """
-
- super(Dict, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, items):
- """Do some setup after initialisation.
-
- :param items: The key-value pairs contained in the dictionary.
- :type items: list(tuple(NodeNG, NodeNG))
- """
- self.items = items
-
- @classmethod
- def from_elements(cls, items=None):
- """Create a :class:`Dict` of constants from a live dictionary.
-
- :param items: The items to store in the node.
- :type items: dict
-
- :returns: The created dictionary node.
- :rtype: Dict
- """
- node = cls()
- if items is None:
- node.items = []
- else:
- node.items = [
- (const_factory(k), const_factory(v) if _is_const(v) else v)
- for k, v in items.items()
- # The keys need to be constants
- if _is_const(k)
- ]
- return node
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- return "%s.dict" % BUILTINS
-
- def get_children(self):
- """Get the key and value nodes below this node.
-
- Children are returned in the order that they are defined in the source
- code, key first then the value.
-
- :returns: The children.
- :rtype: iterable(NodeNG)
- """
- for key, value in self.items:
- yield key
- yield value
-
- def last_child(self):
- """An optimized version of list(get_children())[-1]
-
- :returns: The last child, or None if no children exist.
- :rtype: NodeNG or None
- """
- if self.items:
- return self.items[-1][1]
- return None
-
- def itered(self):
- """An iterator over the keys this node contains.
-
- :returns: The keys of this node.
- :rtype: iterable(NodeNG)
- """
- return [key for (key, _) in self.items]
-
- def getitem(self, index, context=None):
- """Get an item from this node.
-
- :param index: The node to use as a subscript index.
- :type index: Const or Slice
-
- :raises AstroidTypeError: When the given index cannot be used as a
- subscript index, or if this node is not subscriptable.
- :raises AstroidIndexError: If the given index does not exist in the
- dictionary.
- """
- for key, value in self.items:
- # TODO(cpopa): no support for overriding yet, {1:2, **{1: 3}}.
- if isinstance(key, DictUnpack):
- try:
- return value.getitem(index, context)
- except (exceptions.AstroidTypeError, exceptions.AstroidIndexError):
- continue
- for inferredkey in key.infer(context):
- if inferredkey is util.Uninferable:
- continue
- if isinstance(inferredkey, Const) and isinstance(index, Const):
- if inferredkey.value == index.value:
- return value
-
- raise exceptions.AstroidIndexError(index)
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- :rtype: bool
- """
- return bool(self.items)
-
-
-class Expr(Statement):
- """Class representing an :class:`ast.Expr` node.
-
- An :class:`Expr` is any expression that does not have its value used or
- stored.
-
- >>> node = astroid.extract_node('method()')
- >>> node
- <Call l.1 at 0x7f23b2e352b0>
- >>> node.parent
- <Expr l.1 at 0x7f23b2e35278>
- """
-
- _astroid_fields = ("value",)
- value = None
- """What the expression does.
-
- :type: NodeNG or None
- """
-
- def postinit(self, value=None):
- """Do some setup after initialisation.
-
- :param value: What the expression does.
- :type value: NodeNG or None
- """
- self.value = value
-
- def get_children(self):
- yield self.value
-
- def _get_yield_nodes_skip_lambdas(self):
- if not self.value.is_lambda:
- yield from self.value._get_yield_nodes_skip_lambdas()
-
-
-class Ellipsis(mixins.NoChildrenMixin, NodeNG): # pylint: disable=redefined-builtin
- """Class representing an :class:`ast.Ellipsis` node.
-
- An :class:`Ellipsis` is the ``...`` syntax.
-
- >>> node = astroid.extract_node('...')
- >>> node
- <Ellipsis l.1 at 0x7f23b2e35160>
- """
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For an :class:`Ellipsis` this is always ``True``.
- :rtype: bool
- """
- return True
-
-
-class EmptyNode(mixins.NoChildrenMixin, NodeNG):
- """Holds an arbitrary object in the :attr:`LocalsDictNodeNG.locals`."""
-
- object = None
-
-
-class ExceptHandler(mixins.MultiLineBlockMixin, mixins.AssignTypeMixin, Statement):
- """Class representing an :class:`ast.ExceptHandler`. node.
-
- An :class:`ExceptHandler` is an ``except`` block on a try-except.
-
- >>> node = astroid.extract_node('''
- try:
- do_something()
- except Exception as error:
- print("Error!")
- ''')
- >>> node
- <TryExcept l.2 at 0x7f23b2e9d908>
- >>> >>> node.handlers
- [<ExceptHandler l.4 at 0x7f23b2e9e860>]
- """
-
- _astroid_fields = ("type", "name", "body")
- _multi_line_block_fields = ("body",)
- type = None
- """The types that the block handles.
-
- :type: Tuple or NodeNG or None
- """
- name = None
- """The name that the caught exception is assigned to.
-
- :type: AssignName or None
- """
- body = None
- """The contents of the block.
-
- :type: list(NodeNG) or None
- """
-
- def get_children(self):
- if self.type is not None:
- yield self.type
-
- if self.name is not None:
- yield self.name
-
- yield from self.body
-
- # pylint: disable=redefined-builtin; had to use the same name as builtin ast module.
- def postinit(self, type=None, name=None, body=None):
- """Do some setup after initialisation.
-
- :param type: The types that the block handles.
- :type type: Tuple or NodeNG or None
-
- :param name: The name that the caught exception is assigned to.
- :type name: AssignName or None
-
- :param body:The contents of the block.
- :type body: list(NodeNG) or None
- """
- self.type = type
- self.name = name
- self.body = body
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- """The line on which the beginning of this block ends.
-
- :type: int
- """
- if self.name:
- return self.name.tolineno
- if self.type:
- return self.type.tolineno
- return self.lineno
-
- def catch(self, exceptions): # pylint: disable=redefined-outer-name
- """Check if this node handles any of the given exceptions.
-
- If ``exceptions`` is empty, this will default to ``True``.
-
- :param exceptions: The name of the exceptions to check for.
- :type exceptions: list(str)
- """
- if self.type is None or exceptions is None:
- return True
- for node in self.type._get_name_nodes():
- if node.name in exceptions:
- return True
- return False
-
-
-class Exec(Statement):
- """Class representing the ``exec`` statement.
-
- >>> node = astroid.extract_node('exec "True"')
- >>> node
- <Exec l.1 at 0x7f0e8106c6d0>
- """
-
- _astroid_fields = ("expr", "globals", "locals")
- expr = None
- """The expression to be executed.
-
- :type: NodeNG or None
- """
- globals = None
- """The globals dictionary to execute with.
-
- :type: NodeNG or None
- """
- locals = None
- """The locals dictionary to execute with.
-
- :type: NodeNG or None
- """
-
- # pylint: disable=redefined-builtin; had to use the same name as builtin ast module.
- def postinit(self, expr=None, globals=None, locals=None):
- """Do some setup after initialisation.
-
- :param expr: The expression to be executed.
- :type expr: NodeNG or None
-
- :param globals:The globals dictionary to execute with.
- :type globals: NodeNG or None
-
- :param locals: The locals dictionary to execute with.
- :type locals: NodeNG or None
- """
- self.expr = expr
- self.globals = globals
- self.locals = locals
-
-
-class ExtSlice(NodeNG):
- """Class representing an :class:`ast.ExtSlice` node.
-
- An :class:`ExtSlice` is a complex slice expression.
-
- >>> node = astroid.extract_node('l[1:3, 5]')
- >>> node
- <Subscript l.1 at 0x7f23b2e9e550>
- >>> node.slice
- <ExtSlice l.1 at 0x7f23b7b05ef0>
- """
-
- _astroid_fields = ("dims",)
- dims = None
- """The simple dimensions that form the complete slice.
-
- :type: list(NodeNG) or None
- """
-
- def postinit(self, dims=None):
- """Do some setup after initialisation.
-
- :param dims: The simple dimensions that form the complete slice.
- :type dims: list(NodeNG) or None
- """
- self.dims = dims
-
-
-class For(
- mixins.MultiLineBlockMixin,
- mixins.BlockRangeMixIn,
- mixins.AssignTypeMixin,
- Statement,
-):
- """Class representing an :class:`ast.For` node.
-
- >>> node = astroid.extract_node('for thing in things: print(thing)')
- >>> node
- <For l.1 at 0x7f23b2e8cf28>
- """
-
- _astroid_fields = ("target", "iter", "body", "orelse")
- _other_other_fields = ("type_annotation",)
- _multi_line_block_fields = ("body", "orelse")
- target = None
- """What the loop assigns to.
-
- :type: NodeNG or None
- """
- iter = None
- """What the loop iterates over.
-
- :type: NodeNG or None
- """
- body = None
- """The contents of the body of the loop.
-
- :type: list(NodeNG) or None
- """
- orelse = None
- """The contents of the ``else`` block of the loop.
-
- :type: list(NodeNG) or None
- """
- type_annotation = None
- """If present, this will contain the type annotation passed by a type comment
-
- :type: NodeNG or None
- """
-
- # pylint: disable=redefined-builtin; had to use the same name as builtin ast module.
- def postinit(
- self, target=None, iter=None, body=None, orelse=None, type_annotation=None
- ):
- """Do some setup after initialisation.
-
- :param target: What the loop assigns to.
- :type target: NodeNG or None
-
- :param iter: What the loop iterates over.
- :type iter: NodeNG or None
-
- :param body: The contents of the body of the loop.
- :type body: list(NodeNG) or None
-
- :param orelse: The contents of the ``else`` block of the loop.
- :type orelse: list(NodeNG) or None
- """
- self.target = target
- self.iter = iter
- self.body = body
- self.orelse = orelse
- self.type_annotation = type_annotation
-
- optional_assign = True
- """Whether this node optionally assigns a variable.
-
- This is always ``True`` for :class:`For` nodes.
-
- :type: bool
- """
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- """The line on which the beginning of this block ends.
-
- :type: int
- """
- return self.iter.tolineno
-
- def get_children(self):
- yield self.target
- yield self.iter
-
- yield from self.body
- yield from self.orelse
-
-
-class AsyncFor(For):
- """Class representing an :class:`ast.AsyncFor` node.
-
- An :class:`AsyncFor` is an asynchronous :class:`For` built with
- the ``async`` keyword.
-
- >>> node = astroid.extract_node('''
- async def func(things):
- async for thing in things:
- print(thing)
- ''')
- >>> node
- <AsyncFunctionDef.func l.2 at 0x7f23b2e416d8>
- >>> node.body[0]
- <AsyncFor l.3 at 0x7f23b2e417b8>
- """
-
-
-class Await(NodeNG):
- """Class representing an :class:`ast.Await` node.
-
- An :class:`Await` is the ``await`` keyword.
-
- >>> node = astroid.extract_node('''
- async def func(things):
- await other_func()
- ''')
- >>> node
- <AsyncFunctionDef.func l.2 at 0x7f23b2e41748>
- >>> node.body[0]
- <Expr l.3 at 0x7f23b2e419e8>
- >>> list(node.body[0].get_children())[0]
- <Await l.3 at 0x7f23b2e41a20>
- """
-
- _astroid_fields = ("value",)
- value = None
- """What to wait for.
-
- :type: NodeNG or None
- """
-
- def postinit(self, value=None):
- """Do some setup after initialisation.
-
- :param value: What to wait for.
- :type value: NodeNG or None
- """
- self.value = value
-
- def get_children(self):
- yield self.value
-
-
-class ImportFrom(mixins.NoChildrenMixin, mixins.ImportFromMixin, Statement):
- """Class representing an :class:`ast.ImportFrom` node.
-
- >>> node = astroid.extract_node('from my_package import my_module')
- >>> node
- <ImportFrom l.1 at 0x7f23b2e415c0>
- """
-
- _other_fields = ("modname", "names", "level")
-
- def __init__(
- self, fromname, names, level=0, lineno=None, col_offset=None, parent=None
- ):
- """
- :param fromname: The module that is being imported from.
- :type fromname: str or None
-
- :param names: What is being imported from the module.
- :type names: list(tuple(str, str or None))
-
- :param level: The level of relative import.
- :type level: int
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.modname = fromname
- """The module that is being imported from.
-
- This is ``None`` for relative imports.
-
- :type: str or None
- """
-
- self.names = names
- """What is being imported from the module.
-
- Each entry is a :class:`tuple` of the name being imported,
- and the alias that the name is assigned to (if any).
-
- :type: list(tuple(str, str or None))
- """
-
- self.level = level
- """The level of relative import.
-
- Essentially this is the number of dots in the import.
- This is always 0 for absolute imports.
-
- :type: int
- """
-
- super(ImportFrom, self).__init__(lineno, col_offset, parent)
-
-
-class Attribute(NodeNG):
- """Class representing an :class:`ast.Attribute` node."""
-
- _astroid_fields = ("expr",)
- _other_fields = ("attrname",)
- expr = None
- """The name that this node represents.
-
- :type: Name or None
- """
-
- def __init__(self, attrname=None, lineno=None, col_offset=None, parent=None):
- """
- :param attrname: The name of the attribute.
- :type attrname: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.attrname = attrname
- """The name of the attribute.
-
- :type: str or None
- """
-
- super(Attribute, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, expr=None):
- """Do some setup after initialisation.
-
- :param expr: The name that this node represents.
- :type expr: Name or None
- """
- self.expr = expr
-
- def get_children(self):
- yield self.expr
-
-
-class Global(mixins.NoChildrenMixin, Statement):
- """Class representing an :class:`ast.Global` node.
-
- >>> node = astroid.extract_node('global a_global')
- >>> node
- <Global l.1 at 0x7f23b2e9de10>
- """
-
- _other_fields = ("names",)
-
- def __init__(self, names, lineno=None, col_offset=None, parent=None):
- """
- :param names: The names being declared as global.
- :type names: list(str)
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.names = names
- """The names being declared as global.
-
- :type: list(str)
- """
-
- super(Global, self).__init__(lineno, col_offset, parent)
-
- def _infer_name(self, frame, name):
- return name
-
-
-class If(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, Statement):
- """Class representing an :class:`ast.If` node.
-
- >>> node = astroid.extract_node('if condition: print(True)')
- >>> node
- <If l.1 at 0x7f23b2e9dd30>
- """
-
- _astroid_fields = ("test", "body", "orelse")
- _multi_line_block_fields = ("body", "orelse")
- test = None
- """The condition that the statement tests.
-
- :type: NodeNG or None
- """
- body = None
- """The contents of the block.
-
- :type: list(NodeNG) or None
- """
- orelse = None
- """The contents of the ``else`` block.
-
- :type: list(NodeNG) or None
- """
-
- def postinit(self, test=None, body=None, orelse=None):
- """Do some setup after initialisation.
-
- :param test: The condition that the statement tests.
- :type test: NodeNG or None
-
- :param body: The contents of the block.
- :type body: list(NodeNG) or None
-
- :param orelse: The contents of the ``else`` block.
- :type orelse: list(NodeNG) or None
- """
- self.test = test
- self.body = body
- self.orelse = orelse
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- """The line on which the beginning of this block ends.
-
- :type: int
- """
- return self.test.tolineno
-
- def block_range(self, lineno):
- """Get a range from the given line number to where this node ends.
-
- :param lineno: The line number to start the range at.
- :type lineno: int
-
- :returns: The range of line numbers that this node belongs to,
- starting at the given line number.
- :rtype: tuple(int, int)
- """
- if lineno == self.body[0].fromlineno:
- return lineno, lineno
- if lineno <= self.body[-1].tolineno:
- return lineno, self.body[-1].tolineno
- return self._elsed_block_range(lineno, self.orelse, self.body[0].fromlineno - 1)
-
- def get_children(self):
- yield self.test
-
- yield from self.body
- yield from self.orelse
-
- def has_elif_block(self):
- return len(self.orelse) == 1 and isinstance(self.orelse[0], If)
-
-
-class IfExp(NodeNG):
- """Class representing an :class:`ast.IfExp` node.
-
- >>> node = astroid.extract_node('value if condition else other')
- >>> node
- <IfExp l.1 at 0x7f23b2e9dbe0>
- """
-
- _astroid_fields = ("test", "body", "orelse")
- test = None
- """The condition that the statement tests.
-
- :type: NodeNG or None
- """
- body = None
- """The contents of the block.
-
- :type: list(NodeNG) or None
- """
- orelse = None
- """The contents of the ``else`` block.
-
- :type: list(NodeNG) or None
- """
-
- def postinit(self, test=None, body=None, orelse=None):
- """Do some setup after initialisation.
-
- :param test: The condition that the statement tests.
- :type test: NodeNG or None
-
- :param body: The contents of the block.
- :type body: list(NodeNG) or None
-
- :param orelse: The contents of the ``else`` block.
- :type orelse: list(NodeNG) or None
- """
- self.test = test
- self.body = body
- self.orelse = orelse
-
- def get_children(self):
- yield self.test
- yield self.body
- yield self.orelse
-
- def op_left_associative(self):
- # `1 if True else 2 if False else 3` is parsed as
- # `1 if True else (2 if False else 3)`
- return False
-
-
-class Import(mixins.NoChildrenMixin, mixins.ImportFromMixin, Statement):
- """Class representing an :class:`ast.Import` node.
-
- >>> node = astroid.extract_node('import astroid')
- >>> node
- <Import l.1 at 0x7f23b2e4e5c0>
- """
-
- _other_fields = ("names",)
-
- def __init__(self, names=None, lineno=None, col_offset=None, parent=None):
- """
- :param names: The names being imported.
- :type names: list(tuple(str, str or None)) or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.names = names
- """The names being imported.
-
- Each entry is a :class:`tuple` of the name being imported,
- and the alias that the name is assigned to (if any).
-
- :type: list(tuple(str, str or None)) or None
- """
-
- super(Import, self).__init__(lineno, col_offset, parent)
-
-
-class Index(NodeNG):
- """Class representing an :class:`ast.Index` node.
-
- An :class:`Index` is a simple subscript.
-
- >>> node = astroid.extract_node('things[1]')
- >>> node
- <Subscript l.1 at 0x7f23b2e9e2b0>
- >>> node.slice
- <Index l.1 at 0x7f23b2e9e6a0>
- """
-
- _astroid_fields = ("value",)
- value = None
- """The value to subscript with.
-
- :type: NodeNG or None
- """
-
- def postinit(self, value=None):
- """Do some setup after initialisation.
-
- :param value: The value to subscript with.
- :type value: NodeNG or None
- """
- self.value = value
-
- def get_children(self):
- yield self.value
-
-
-class Keyword(NodeNG):
- """Class representing an :class:`ast.keyword` node.
-
- >>> node = astroid.extract_node('function(a_kwarg=True)')
- >>> node
- <Call l.1 at 0x7f23b2e9e320>
- >>> node.keywords
- [<Keyword l.1 at 0x7f23b2e9e9b0>]
- """
-
- _astroid_fields = ("value",)
- _other_fields = ("arg",)
- value = None
- """The value being assigned to the keyword argument.
-
- :type: NodeNG or None
- """
-
- def __init__(self, arg=None, lineno=None, col_offset=None, parent=None):
- """
- :param arg: The argument being assigned to.
- :type arg: Name or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.arg = arg
- """The argument being assigned to.
-
- :type: Name or None
- """
-
- super(Keyword, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, value=None):
- """Do some setup after initialisation.
-
- :param value: The value being assigned to the ketword argument.
- :type value: NodeNG or None
- """
- self.value = value
-
- def get_children(self):
- yield self.value
-
-
-class List(_BaseContainer):
- """Class representing an :class:`ast.List` node.
-
- >>> node = astroid.extract_node('[1, 2, 3]')
- >>> node
- <List.list l.1 at 0x7f23b2e9e128>
- """
-
- _other_fields = ("ctx",)
-
- def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None):
- """
- :param ctx: Whether the list is assigned to or loaded from.
- :type ctx: Context or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.ctx = ctx
- """Whether the list is assigned to or loaded from.
-
- :type: Context or None
- """
-
- super(List, self).__init__(lineno, col_offset, parent)
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- return "%s.list" % BUILTINS
-
- def getitem(self, index, context=None):
- """Get an item from this node.
-
- :param index: The node to use as a subscript index.
- :type index: Const or Slice
- """
- return _container_getitem(self, self.elts, index, context=context)
-
-
-class Nonlocal(mixins.NoChildrenMixin, Statement):
- """Class representing an :class:`ast.Nonlocal` node.
-
- >>> node = astroid.extract_node('''
- def function():
- nonlocal var
- ''')
- >>> node
- <FunctionDef.function l.2 at 0x7f23b2e9e208>
- >>> node.body[0]
- <Nonlocal l.3 at 0x7f23b2e9e908>
- """
-
- _other_fields = ("names",)
-
- def __init__(self, names, lineno=None, col_offset=None, parent=None):
- """
- :param names: The names being declared as not local.
- :type names: list(str)
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.names = names
- """The names being declared as not local.
-
- :type: list(str)
- """
-
- super(Nonlocal, self).__init__(lineno, col_offset, parent)
-
- def _infer_name(self, frame, name):
- return name
-
-
-class Pass(mixins.NoChildrenMixin, Statement):
- """Class representing an :class:`ast.Pass` node.
-
- >>> node = astroid.extract_node('pass')
- >>> node
- <Pass l.1 at 0x7f23b2e9e748>
- """
-
-
-class Print(Statement):
- """Class representing an :class:`ast.Print` node.
-
- >>> node = astroid.extract_node('print "A message"')
- >>> node
- <Print l.1 at 0x7f0e8101d290>
- """
-
- _astroid_fields = ("dest", "values")
- dest = None
- """Where to print to.
-
- :type: NodeNG or None
- """
- values = None
- """What to print.
-
- :type: list(NodeNG) or None
- """
-
- def __init__(self, nl=None, lineno=None, col_offset=None, parent=None):
- """
- :param nl: Whether to print a new line.
- :type nl: bool or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.nl = nl
- """Whether to print a new line.
-
- :type: bool or None
- """
-
- super(Print, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, dest=None, values=None):
- """Do some setup after initialisation.
-
- :param dest: Where to print to.
- :type dest: NodeNG or None
-
- :param values: What to print.
- :type values: list(NodeNG) or None
- """
- self.dest = dest
- self.values = values
-
-
-class Raise(Statement):
- """Class representing an :class:`ast.Raise` node.
-
- >>> node = astroid.extract_node('raise RuntimeError("Something bad happened!")')
- >>> node
- <Raise l.1 at 0x7f23b2e9e828>
- """
-
- exc = None
- """What is being raised.
-
- :type: NodeNG or None
- """
- _astroid_fields = ("exc", "cause")
- cause = None
- """The exception being used to raise this one.
-
- :type: NodeNG or None
- """
-
- def postinit(self, exc=None, cause=None):
- """Do some setup after initialisation.
-
- :param exc: What is being raised.
- :type exc: NodeNG or None
-
- :param cause: The exception being used to raise this one.
- :type cause: NodeNG or None
- """
- self.exc = exc
- self.cause = cause
-
- def raises_not_implemented(self):
- """Check if this node raises a :class:`NotImplementedError`.
-
- :returns: True if this node raises a :class:`NotImplementedError`,
- False otherwise.
- :rtype: bool
- """
- if not self.exc:
- return False
- for name in self.exc._get_name_nodes():
- if name.name == "NotImplementedError":
- return True
- return False
-
- def get_children(self):
- if self.exc is not None:
- yield self.exc
-
- if self.cause is not None:
- yield self.cause
-
-
-class Return(Statement):
- """Class representing an :class:`ast.Return` node.
-
- >>> node = astroid.extract_node('return True')
- >>> node
- <Return l.1 at 0x7f23b8211908>
- """
-
- _astroid_fields = ("value",)
- value = None
- """The value being returned.
-
- :type: NodeNG or None
- """
-
- def postinit(self, value=None):
- """Do some setup after initialisation.
-
- :param value: The value being returned.
- :type value: NodeNG or None
- """
- self.value = value
-
- def get_children(self):
- if self.value is not None:
- yield self.value
-
- def is_tuple_return(self):
- return isinstance(self.value, Tuple)
-
- def _get_return_nodes_skip_functions(self):
- yield self
-
-
-class Set(_BaseContainer):
- """Class representing an :class:`ast.Set` node.
-
- >>> node = astroid.extract_node('{1, 2, 3}')
- >>> node
- <Set.set l.1 at 0x7f23b2e71d68>
- """
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- return "%s.set" % BUILTINS
-
-
-class Slice(NodeNG):
- """Class representing an :class:`ast.Slice` node.
-
- >>> node = astroid.extract_node('things[1:3]')
- >>> node
- <Subscript l.1 at 0x7f23b2e71f60>
- >>> node.slice
- <Slice l.1 at 0x7f23b2e71e80>
- """
-
- _astroid_fields = ("lower", "upper", "step")
- lower = None
- """The lower index in the slice.
-
- :type: NodeNG or None
- """
- upper = None
- """The upper index in the slice.
-
- :type: NodeNG or None
- """
- step = None
- """The step to take between indexes.
-
- :type: NodeNG or None
- """
-
- def postinit(self, lower=None, upper=None, step=None):
- """Do some setup after initialisation.
-
- :param lower: The lower index in the slice.
- :value lower: NodeNG or None
-
- :param upper: The upper index in the slice.
- :value upper: NodeNG or None
-
- :param step: The step to take between index.
- :param step: NodeNG or None
- """
- self.lower = lower
- self.upper = upper
- self.step = step
-
- def _wrap_attribute(self, attr):
- """Wrap the empty attributes of the Slice in a Const node."""
- if not attr:
- const = const_factory(attr)
- const.parent = self
- return const
- return attr
-
- @decorators.cachedproperty
- def _proxied(self):
- builtins = MANAGER.builtins_module
- return builtins.getattr("slice")[0]
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- return "%s.slice" % BUILTINS
-
- def igetattr(self, attrname, context=None):
- """Infer the possible values of the given attribute on the slice.
-
- :param attrname: The name of the attribute to infer.
- :type attrname: str
-
- :returns: The inferred possible values.
- :rtype: iterable(NodeNG)
- """
- if attrname == "start":
- yield self._wrap_attribute(self.lower)
- elif attrname == "stop":
- yield self._wrap_attribute(self.upper)
- elif attrname == "step":
- yield self._wrap_attribute(self.step)
- else:
- yield from self.getattr(attrname, context=context)
-
- def getattr(self, attrname, context=None):
- return self._proxied.getattr(attrname, context)
-
- def get_children(self):
- if self.lower is not None:
- yield self.lower
-
- if self.upper is not None:
- yield self.upper
-
- if self.step is not None:
- yield self.step
-
-
-class Starred(mixins.ParentAssignTypeMixin, NodeNG):
- """Class representing an :class:`ast.Starred` node.
-
- >>> node = astroid.extract_node('*args')
- >>> node
- <Starred l.1 at 0x7f23b2e41978>
- """
-
- _astroid_fields = ("value",)
- _other_fields = ("ctx",)
- value = None
- """What is being unpacked.
-
- :type: NodeNG or None
- """
-
- def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None):
- """
- :param ctx: Whether the list is assigned to or loaded from.
- :type ctx: Context or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.ctx = ctx
- """Whether the starred item is assigned to or loaded from.
-
- :type: Context or None
- """
-
- super(Starred, self).__init__(
- lineno=lineno, col_offset=col_offset, parent=parent
- )
-
- def postinit(self, value=None):
- """Do some setup after initialisation.
-
- :param value: What is being unpacked.
- :type value: NodeNG or None
- """
- self.value = value
-
- def get_children(self):
- yield self.value
-
-
-class Subscript(NodeNG):
- """Class representing an :class:`ast.Subscript` node.
-
- >>> node = astroid.extract_node('things[1:3]')
- >>> node
- <Subscript l.1 at 0x7f23b2e71f60>
- """
-
- _astroid_fields = ("value", "slice")
- _other_fields = ("ctx",)
- value = None
- """What is being indexed.
-
- :type: NodeNG or None
- """
- slice = None
- """The slice being used to lookup.
-
- :type: NodeNG or None
- """
-
- def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None):
- """
- :param ctx: Whether the subscripted item is assigned to or loaded from.
- :type ctx: Context or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.ctx = ctx
- """Whether the subscripted item is assigned to or loaded from.
-
- :type: Context or None
- """
-
- super(Subscript, self).__init__(
- lineno=lineno, col_offset=col_offset, parent=parent
- )
-
- # pylint: disable=redefined-builtin; had to use the same name as builtin ast module.
- def postinit(self, value=None, slice=None):
- """Do some setup after initialisation.
-
- :param value: What is being indexed.
- :type value: NodeNG or None
-
- :param slice: The slice being used to lookup.
- :type slice: NodeNG or None
- """
- self.value = value
- self.slice = slice
-
- def get_children(self):
- yield self.value
- yield self.slice
-
-
-class TryExcept(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, Statement):
- """Class representing an :class:`ast.TryExcept` node.
-
- >>> node = astroid.extract_node('''
- try:
- do_something()
- except Exception as error:
- print("Error!")
- ''')
- >>> node
- <TryExcept l.2 at 0x7f23b2e9d908>
- """
-
- _astroid_fields = ("body", "handlers", "orelse")
- _multi_line_block_fields = ("body", "handlers", "orelse")
- body = None
- """The contents of the block to catch exceptions from.
-
- :type: list(NodeNG) or None
- """
- handlers = None
- """The exception handlers.
-
- :type: list(ExceptHandler) or None
- """
- orelse = None
- """The contents of the ``else`` block.
-
- :type: list(NodeNG) or None
- """
-
- def postinit(self, body=None, handlers=None, orelse=None):
- """Do some setup after initialisation.
-
- :param body: The contents of the block to catch exceptions from.
- :type body: list(NodeNG) or None
-
- :param handlers: The exception handlers.
- :type handlers: list(ExceptHandler) or None
-
- :param orelse: The contents of the ``else`` block.
- :type orelse: list(NodeNG) or None
- """
- self.body = body
- self.handlers = handlers
- self.orelse = orelse
-
- def _infer_name(self, frame, name):
- return name
-
- def block_range(self, lineno):
- """Get a range from the given line number to where this node ends.
-
- :param lineno: The line number to start the range at.
- :type lineno: int
-
- :returns: The range of line numbers that this node belongs to,
- starting at the given line number.
- :rtype: tuple(int, int)
- """
- last = None
- for exhandler in self.handlers:
- if exhandler.type and lineno == exhandler.type.fromlineno:
- return lineno, lineno
- if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno:
- return lineno, exhandler.body[-1].tolineno
- if last is None:
- last = exhandler.body[0].fromlineno - 1
- return self._elsed_block_range(lineno, self.orelse, last)
-
- def get_children(self):
- yield from self.body
-
- yield from self.handlers or ()
- yield from self.orelse or ()
-
-
-class TryFinally(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, Statement):
- """Class representing an :class:`ast.TryFinally` node.
-
- >>> node = astroid.extract_node('''
- try:
- do_something()
- except Exception as error:
- print("Error!")
- finally:
- print("Cleanup!")
- ''')
- >>> node
- <TryFinally l.2 at 0x7f23b2e41d68>
- """
-
- _astroid_fields = ("body", "finalbody")
- _multi_line_block_fields = ("body", "finalbody")
- body = None
- """The try-except that the finally is attached to.
-
- :type: list(TryExcept) or None
- """
- finalbody = None
- """The contents of the ``finally`` block.
-
- :type: list(NodeNG) or None
- """
-
- def postinit(self, body=None, finalbody=None):
- """Do some setup after initialisation.
-
- :param body: The try-except that the finally is attached to.
- :type body: list(TryExcept) or None
-
- :param finalbody: The contents of the ``finally`` block.
- :type finalbody: list(NodeNG) or None
- """
- self.body = body
- self.finalbody = finalbody
-
- def block_range(self, lineno):
- """Get a range from the given line number to where this node ends.
-
- :param lineno: The line number to start the range at.
- :type lineno: int
-
- :returns: The range of line numbers that this node belongs to,
- starting at the given line number.
- :rtype: tuple(int, int)
- """
- child = self.body[0]
- # py2.5 try: except: finally:
- if (
- isinstance(child, TryExcept)
- and child.fromlineno == self.fromlineno
- and child.tolineno >= lineno > self.fromlineno
- ):
- return child.block_range(lineno)
- return self._elsed_block_range(lineno, self.finalbody)
-
- def get_children(self):
- yield from self.body
- yield from self.finalbody
-
-
-class Tuple(_BaseContainer):
- """Class representing an :class:`ast.Tuple` node.
-
- >>> node = astroid.extract_node('(1, 2, 3)')
- >>> node
- <Tuple.tuple l.1 at 0x7f23b2e41780>
- """
-
- _other_fields = ("ctx",)
-
- def __init__(self, ctx=None, lineno=None, col_offset=None, parent=None):
- """
- :param ctx: Whether the tuple is assigned to or loaded from.
- :type ctx: Context or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.ctx = ctx
- """Whether the tuple is assigned to or loaded from.
-
- :type: Context or None
- """
-
- super(Tuple, self).__init__(lineno, col_offset, parent)
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- return "%s.tuple" % BUILTINS
-
- def getitem(self, index, context=None):
- """Get an item from this node.
-
- :param index: The node to use as a subscript index.
- :type index: Const or Slice
- """
- return _container_getitem(self, self.elts, index, context=context)
-
-
-class UnaryOp(NodeNG):
- """Class representing an :class:`ast.UnaryOp` node.
-
- >>> node = astroid.extract_node('-5')
- >>> node
- <UnaryOp l.1 at 0x7f23b2e4e198>
- """
-
- _astroid_fields = ("operand",)
- _other_fields = ("op",)
- operand = None
- """What the unary operator is applied to.
-
- :type: NodeNG or None
- """
-
- def __init__(self, op=None, lineno=None, col_offset=None, parent=None):
- """
- :param op: The operator.
- :type: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.op = op
- """The operator.
-
- :type: str or None
- """
-
- super(UnaryOp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, operand=None):
- """Do some setup after initialisation.
-
- :param operand: What the unary operator is applied to.
- :type operand: NodeNG or None
- """
- self.operand = operand
-
- # This is set by inference.py
- def _infer_unaryop(self, context=None):
- raise NotImplementedError
-
- def type_errors(self, context=None):
- """Get a list of type errors which can occur during inference.
-
- Each TypeError is represented by a :class:`BadBinaryOperationMessage`,
- which holds the original exception.
-
- :returns: The list of possible type errors.
- :rtype: list(BadBinaryOperationMessage)
- """
- try:
- results = self._infer_unaryop(context=context)
- return [
- result
- for result in results
- if isinstance(result, util.BadUnaryOperationMessage)
- ]
- except exceptions.InferenceError:
- return []
-
- def get_children(self):
- yield self.operand
-
- def op_precedence(self):
- if self.op == "not":
- return OP_PRECEDENCE[self.op]
-
- return super().op_precedence()
-
-
-class While(mixins.MultiLineBlockMixin, mixins.BlockRangeMixIn, Statement):
- """Class representing an :class:`ast.While` node.
-
- >>> node = astroid.extract_node('''
- while condition():
- print("True")
- ''')
- >>> node
- <While l.2 at 0x7f23b2e4e390>
- """
-
- _astroid_fields = ("test", "body", "orelse")
- _multi_line_block_fields = ("body", "orelse")
- test = None
- """The condition that the loop tests.
-
- :type: NodeNG or None
- """
- body = None
- """The contents of the loop.
-
- :type: list(NodeNG) or None
- """
- orelse = None
- """The contents of the ``else`` block.
-
- :type: list(NodeNG) or None
- """
-
- def postinit(self, test=None, body=None, orelse=None):
- """Do some setup after initialisation.
-
- :param test: The condition that the loop tests.
- :type test: NodeNG or None
-
- :param body: The contents of the loop.
- :type body: list(NodeNG) or None
-
- :param orelse: The contents of the ``else`` block.
- :type orelse: list(NodeNG) or None
- """
- self.test = test
- self.body = body
- self.orelse = orelse
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- """The line on which the beginning of this block ends.
-
- :type: int
- """
- return self.test.tolineno
-
- def block_range(self, lineno):
- """Get a range from the given line number to where this node ends.
-
- :param lineno: The line number to start the range at.
- :type lineno: int
-
- :returns: The range of line numbers that this node belongs to,
- starting at the given line number.
- :rtype: tuple(int, int)
- """
- return self._elsed_block_range(lineno, self.orelse)
-
- def get_children(self):
- yield self.test
-
- yield from self.body
- yield from self.orelse
-
-
-class With(
- mixins.MultiLineBlockMixin,
- mixins.BlockRangeMixIn,
- mixins.AssignTypeMixin,
- Statement,
-):
- """Class representing an :class:`ast.With` node.
-
- >>> node = astroid.extract_node('''
- with open(file_path) as file_:
- print(file_.read())
- ''')
- >>> node
- <With l.2 at 0x7f23b2e4e710>
- """
-
- _astroid_fields = ("items", "body")
- _other_other_fields = ("type_annotation",)
- _multi_line_block_fields = ("body",)
- items = None
- """The pairs of context managers and the names they are assigned to.
-
- :type: list(tuple(NodeNG, AssignName or None)) or None
- """
- body = None
- """The contents of the ``with`` block.
-
- :type: list(NodeNG) or None
- """
- type_annotation = None
- """If present, this will contain the type annotation passed by a type comment
-
- :type: NodeNG or None
- """
-
- def postinit(self, items=None, body=None, type_annotation=None):
- """Do some setup after initialisation.
-
- :param items: The pairs of context managers and the names
- they are assigned to.
- :type items: list(tuple(NodeNG, AssignName or None)) or None
-
- :param body: The contents of the ``with`` block.
- :type body: list(NodeNG) or None
- """
- self.items = items
- self.body = body
- self.type_annotation = type_annotation
-
- @decorators.cachedproperty
- def blockstart_tolineno(self):
- """The line on which the beginning of this block ends.
-
- :type: int
- """
- return self.items[-1][0].tolineno
-
- def get_children(self):
- """Get the child nodes below this node.
-
- :returns: The children.
- :rtype: iterable(NodeNG)
- """
- for expr, var in self.items:
- yield expr
- if var:
- yield var
- yield from self.body
-
-
-class AsyncWith(With):
- """Asynchronous ``with`` built with the ``async`` keyword."""
-
-
-class Yield(NodeNG):
- """Class representing an :class:`ast.Yield` node.
-
- >>> node = astroid.extract_node('yield True')
- >>> node
- <Yield l.1 at 0x7f23b2e4e5f8>
- """
-
- _astroid_fields = ("value",)
- value = None
- """The value to yield.
-
- :type: NodeNG or None
- """
-
- def postinit(self, value=None):
- """Do some setup after initialisation.
-
- :param value: The value to yield.
- :type value: NodeNG or None
- """
- self.value = value
-
- def get_children(self):
- if self.value is not None:
- yield self.value
-
- def _get_yield_nodes_skip_lambdas(self):
- yield self
-
-
-class YieldFrom(Yield):
- """Class representing an :class:`ast.YieldFrom` node."""
-
-
-class DictUnpack(mixins.NoChildrenMixin, NodeNG):
- """Represents the unpacking of dicts into dicts using :pep:`448`."""
-
-
-class FormattedValue(NodeNG):
- """Class representing an :class:`ast.FormattedValue` node.
-
- Represents a :pep:`498` format string.
-
- >>> node = astroid.extract_node('f"Format {type_}"')
- >>> node
- <JoinedStr l.1 at 0x7f23b2e4ed30>
- >>> node.values
- [<Const.str l.1 at 0x7f23b2e4eda0>, <FormattedValue l.1 at 0x7f23b2e4edd8>]
- """
-
- _astroid_fields = ("value", "format_spec")
- value = None
- """The value to be formatted into the string.
-
- :type: NodeNG or None
- """
- conversion = None
- """The type of formatting to be applied to the value.
-
- .. seealso::
- :class:`ast.FormattedValue`
-
- :type: int or None
- """
- format_spec = None
- """The formatting to be applied to the value.
-
- .. seealso::
- :class:`ast.FormattedValue`
-
- :type: JoinedStr or None
- """
-
- def postinit(self, value, conversion=None, format_spec=None):
- """Do some setup after initialisation.
-
- :param value: The value to be formatted into the string.
- :type value: NodeNG
-
- :param conversion: The type of formatting to be applied to the value.
- :type conversion: int or None
-
- :param format_spec: The formatting to be applied to the value.
- :type format_spec: JoinedStr or None
- """
- self.value = value
- self.conversion = conversion
- self.format_spec = format_spec
-
- def get_children(self):
- yield self.value
-
- if self.format_spec is not None:
- yield self.format_spec
-
-
-class JoinedStr(NodeNG):
- """Represents a list of string expressions to be joined.
-
- >>> node = astroid.extract_node('f"Format {type_}"')
- >>> node
- <JoinedStr l.1 at 0x7f23b2e4ed30>
- """
-
- _astroid_fields = ("values",)
- values = None
- """The string expressions to be joined.
-
- :type: list(FormattedValue or Const) or None
- """
-
- def postinit(self, values=None):
- """Do some setup after initialisation.
-
- :param value: The string expressions to be joined.
-
- :type: list(FormattedValue or Const) or None
- """
- self.values = values
-
- def get_children(self):
- yield from self.values
-
-
-class NamedExpr(mixins.AssignTypeMixin, NodeNG):
- """Represents the assignment from the assignment expression
-
- >>> module = astroid.parse('if a := 1: pass')
- >>> module.body[0].test
- <NamedExpr l.1 at 0x7f23b2e4ed30>
- """
-
- _astroid_fields = ("target", "value")
- target = None
- """The assignment target
-
- :type: Name
- """
- value = None
- """The value that gets assigned in the expression
-
- :type: NodeNG
- """
-
- def postinit(self, target, value):
- self.target = target
- self.value = value
-
-
-class Unknown(mixins.AssignTypeMixin, NodeNG):
- """This node represents a node in a constructed AST where
- introspection is not possible. At the moment, it's only used in
- the args attribute of FunctionDef nodes where function signature
- introspection failed.
- """
-
- name = "Unknown"
-
- def qname(self):
- return "Unknown"
-
- def infer(self, context=None, **kwargs):
- """Inference on an Unknown node immediately terminates."""
- yield util.Uninferable
-
-
-# constants ##############################################################
-
-CONST_CLS = {
- list: List,
- tuple: Tuple,
- dict: Dict,
- set: Set,
- type(None): Const,
- type(NotImplemented): Const,
-}
-if PY38:
- CONST_CLS[type(...)] = Const
-
-
-def _update_const_classes():
- """update constant classes, so the keys of CONST_CLS can be reused"""
- klasses = (bool, int, float, complex, str, bytes)
- for kls in klasses:
- CONST_CLS[kls] = Const
-
-
-_update_const_classes()
-
-
-def _two_step_initialization(cls, value):
- instance = cls()
- instance.postinit(value)
- return instance
-
-
-def _dict_initialization(cls, value):
- if isinstance(value, dict):
- value = tuple(value.items())
- return _two_step_initialization(cls, value)
-
-
-_CONST_CLS_CONSTRUCTORS = {
- List: _two_step_initialization,
- Tuple: _two_step_initialization,
- Dict: _dict_initialization,
- Set: _two_step_initialization,
- Const: lambda cls, value: cls(value),
-}
-
-
-def const_factory(value):
- """return an astroid node for a python value"""
- # XXX we should probably be stricter here and only consider stuff in
- # CONST_CLS or do better treatment: in case where value is not in CONST_CLS,
- # we should rather recall the builder on this value than returning an empty
- # node (another option being that const_factory shouldn't be called with something
- # not in CONST_CLS)
- assert not isinstance(value, NodeNG)
-
- # Hack for ignoring elements of a sequence
- # or a mapping, in order to avoid transforming
- # each element to an AST. This is fixed in 2.0
- # and this approach is a temporary hack.
- if isinstance(value, (list, set, tuple, dict)):
- elts = []
- else:
- elts = value
-
- try:
- initializer_cls = CONST_CLS[value.__class__]
- initializer = _CONST_CLS_CONSTRUCTORS[initializer_cls]
- return initializer(initializer_cls, elts)
- except (KeyError, AttributeError):
- node = EmptyNode()
- node.object = value
- return node
-
-
-def is_from_decorator(node):
- """Return True if the given node is the child of a decorator"""
- parent = node.parent
- while parent is not None:
- if isinstance(parent, Decorators):
- return True
- parent = parent.parent
- return False
diff --git a/venv/Lib/site-packages/astroid/nodes.py b/venv/Lib/site-packages/astroid/nodes.py
deleted file mode 100644
index bf6911a..0000000
--- a/venv/Lib/site-packages/astroid/nodes.py
+++ /dev/null
@@ -1,175 +0,0 @@
-# Copyright (c) 2006-2011, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
-# Copyright (c) 2017 Ashley Whetter <ashley@awhetter.co.uk>
-# Copyright (c) 2017 rr- <rr-@sakuya.pl>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Every available node class.
-
-.. seealso::
- :doc:`ast documentation <green_tree_snakes:nodes>`
-
-All nodes inherit from :class:`~astroid.node_classes.NodeNG`.
-"""
-# pylint: disable=unused-import,redefined-builtin
-
-from astroid.node_classes import (
- Arguments,
- AssignAttr,
- Assert,
- Assign,
- AnnAssign,
- AssignName,
- AugAssign,
- Repr,
- BinOp,
- BoolOp,
- Break,
- Call,
- Compare,
- Comprehension,
- Const,
- Continue,
- Decorators,
- DelAttr,
- DelName,
- Delete,
- Dict,
- Expr,
- Ellipsis,
- EmptyNode,
- ExceptHandler,
- Exec,
- ExtSlice,
- For,
- ImportFrom,
- Attribute,
- Global,
- If,
- IfExp,
- Import,
- Index,
- Keyword,
- List,
- Name,
- NamedExpr,
- Nonlocal,
- Pass,
- Print,
- Raise,
- Return,
- Set,
- Slice,
- Starred,
- Subscript,
- TryExcept,
- TryFinally,
- Tuple,
- UnaryOp,
- While,
- With,
- Yield,
- YieldFrom,
- const_factory,
- AsyncFor,
- Await,
- AsyncWith,
- FormattedValue,
- JoinedStr,
- # Node not present in the builtin ast module.
- DictUnpack,
- Unknown,
-)
-from astroid.scoped_nodes import (
- Module,
- GeneratorExp,
- Lambda,
- DictComp,
- ListComp,
- SetComp,
- FunctionDef,
- ClassDef,
- AsyncFunctionDef,
-)
-
-
-ALL_NODE_CLASSES = (
- AsyncFunctionDef,
- AsyncFor,
- AsyncWith,
- Await,
- Arguments,
- AssignAttr,
- Assert,
- Assign,
- AnnAssign,
- AssignName,
- AugAssign,
- Repr,
- BinOp,
- BoolOp,
- Break,
- Call,
- ClassDef,
- Compare,
- Comprehension,
- Const,
- Continue,
- Decorators,
- DelAttr,
- DelName,
- Delete,
- Dict,
- DictComp,
- DictUnpack,
- Expr,
- Ellipsis,
- EmptyNode,
- ExceptHandler,
- Exec,
- ExtSlice,
- For,
- ImportFrom,
- FunctionDef,
- Attribute,
- GeneratorExp,
- Global,
- If,
- IfExp,
- Import,
- Index,
- Keyword,
- Lambda,
- List,
- ListComp,
- Name,
- NamedExpr,
- Nonlocal,
- Module,
- Pass,
- Print,
- Raise,
- Return,
- Set,
- SetComp,
- Slice,
- Starred,
- Subscript,
- TryExcept,
- TryFinally,
- Tuple,
- UnaryOp,
- While,
- With,
- Yield,
- YieldFrom,
- FormattedValue,
- JoinedStr,
-)
diff --git a/venv/Lib/site-packages/astroid/objects.py b/venv/Lib/site-packages/astroid/objects.py
deleted file mode 100644
index 888ca36..0000000
--- a/venv/Lib/site-packages/astroid/objects.py
+++ /dev/null
@@ -1,282 +0,0 @@
-# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""
-Inference objects are a way to represent composite AST nodes,
-which are used only as inference results, so they can't be found in the
-original AST tree. For instance, inferring the following frozenset use,
-leads to an inferred FrozenSet:
-
- Call(func=Name('frozenset'), args=Tuple(...))
-"""
-
-import builtins
-
-from astroid import bases
-from astroid import decorators
-from astroid import exceptions
-from astroid import MANAGER
-from astroid import node_classes
-from astroid import scoped_nodes
-from astroid import util
-
-
-BUILTINS = builtins.__name__
-objectmodel = util.lazy_import("interpreter.objectmodel")
-
-
-class FrozenSet(node_classes._BaseContainer):
- """class representing a FrozenSet composite node"""
-
- def pytype(self):
- return "%s.frozenset" % BUILTINS
-
- def _infer(self, context=None):
- yield self
-
- @decorators.cachedproperty
- def _proxied(self): # pylint: disable=method-hidden
- ast_builtins = MANAGER.builtins_module
- return ast_builtins.getattr("frozenset")[0]
-
-
-class Super(node_classes.NodeNG):
- """Proxy class over a super call.
-
- This class offers almost the same behaviour as Python's super,
- which is MRO lookups for retrieving attributes from the parents.
-
- The *mro_pointer* is the place in the MRO from where we should
- start looking, not counting it. *mro_type* is the object which
- provides the MRO, it can be both a type or an instance.
- *self_class* is the class where the super call is, while
- *scope* is the function where the super call is.
- """
-
- # pylint: disable=unnecessary-lambda
- special_attributes = util.lazy_descriptor(lambda: objectmodel.SuperModel())
-
- # pylint: disable=super-init-not-called
- def __init__(self, mro_pointer, mro_type, self_class, scope):
- self.type = mro_type
- self.mro_pointer = mro_pointer
- self._class_based = False
- self._self_class = self_class
- self._scope = scope
-
- def _infer(self, context=None):
- yield self
-
- def super_mro(self):
- """Get the MRO which will be used to lookup attributes in this super."""
- if not isinstance(self.mro_pointer, scoped_nodes.ClassDef):
- raise exceptions.SuperError(
- "The first argument to super must be a subtype of "
- "type, not {mro_pointer}.",
- super_=self,
- )
-
- if isinstance(self.type, scoped_nodes.ClassDef):
- # `super(type, type)`, most likely in a class method.
- self._class_based = True
- mro_type = self.type
- else:
- mro_type = getattr(self.type, "_proxied", None)
- if not isinstance(mro_type, (bases.Instance, scoped_nodes.ClassDef)):
- raise exceptions.SuperError(
- "The second argument to super must be an "
- "instance or subtype of type, not {type}.",
- super_=self,
- )
-
- if not mro_type.newstyle:
- raise exceptions.SuperError(
- "Unable to call super on old-style classes.", super_=self
- )
-
- mro = mro_type.mro()
- if self.mro_pointer not in mro:
- raise exceptions.SuperError(
- "The second argument to super must be an "
- "instance or subtype of type, not {type}.",
- super_=self,
- )
-
- index = mro.index(self.mro_pointer)
- return mro[index + 1 :]
-
- @decorators.cachedproperty
- def _proxied(self):
- ast_builtins = MANAGER.builtins_module
- return ast_builtins.getattr("super")[0]
-
- def pytype(self):
- return "%s.super" % BUILTINS
-
- def display_type(self):
- return "Super of"
-
- @property
- def name(self):
- """Get the name of the MRO pointer."""
- return self.mro_pointer.name
-
- def qname(self):
- return "super"
-
- def igetattr(self, name, context=None):
- """Retrieve the inferred values of the given attribute name."""
-
- if name in self.special_attributes:
- yield self.special_attributes.lookup(name)
- return
-
- try:
- mro = self.super_mro()
- # Don't let invalid MROs or invalid super calls
- # leak out as is from this function.
- except exceptions.SuperError as exc:
- raise exceptions.AttributeInferenceError(
- (
- "Lookup for {name} on {target!r} because super call {super!r} "
- "is invalid."
- ),
- target=self,
- attribute=name,
- context=context,
- super_=exc.super_,
- ) from exc
- except exceptions.MroError as exc:
- raise exceptions.AttributeInferenceError(
- (
- "Lookup for {name} on {target!r} failed because {cls!r} has an "
- "invalid MRO."
- ),
- target=self,
- attribute=name,
- context=context,
- mros=exc.mros,
- cls=exc.cls,
- ) from exc
- found = False
- for cls in mro:
- if name not in cls.locals:
- continue
-
- found = True
- for inferred in bases._infer_stmts([cls[name]], context, frame=self):
- if not isinstance(inferred, scoped_nodes.FunctionDef):
- yield inferred
- continue
-
- # We can obtain different descriptors from a super depending
- # on what we are accessing and where the super call is.
- if inferred.type == "classmethod":
- yield bases.BoundMethod(inferred, cls)
- elif self._scope.type == "classmethod" and inferred.type == "method":
- yield inferred
- elif self._class_based or inferred.type == "staticmethod":
- yield inferred
- elif bases._is_property(inferred):
- # TODO: support other descriptors as well.
- try:
- yield from inferred.infer_call_result(self, context)
- except exceptions.InferenceError:
- yield util.Uninferable
- else:
- yield bases.BoundMethod(inferred, cls)
-
- if not found:
- raise exceptions.AttributeInferenceError(
- target=self, attribute=name, context=context
- )
-
- def getattr(self, name, context=None):
- return list(self.igetattr(name, context=context))
-
-
-class ExceptionInstance(bases.Instance):
- """Class for instances of exceptions
-
- It has special treatment for some of the exceptions's attributes,
- which are transformed at runtime into certain concrete objects, such as
- the case of .args.
- """
-
- @decorators.cachedproperty
- def special_attributes(self):
- qname = self.qname()
- instance = objectmodel.BUILTIN_EXCEPTIONS.get(
- qname, objectmodel.ExceptionInstanceModel
- )
- return instance()(self)
-
-
-class DictInstance(bases.Instance):
- """Special kind of instances for dictionaries
-
- This instance knows the underlying object model of the dictionaries, which means
- that methods such as .values or .items can be properly inferred.
- """
-
- # pylint: disable=unnecessary-lambda
- special_attributes = util.lazy_descriptor(lambda: objectmodel.DictModel())
-
-
-# Custom objects tailored for dictionaries, which are used to
-# disambiguate between the types of Python 2 dict's method returns
-# and Python 3 (where they return set like objects).
-class DictItems(bases.Proxy):
- __str__ = node_classes.NodeNG.__str__
- __repr__ = node_classes.NodeNG.__repr__
-
-
-class DictKeys(bases.Proxy):
- __str__ = node_classes.NodeNG.__str__
- __repr__ = node_classes.NodeNG.__repr__
-
-
-class DictValues(bases.Proxy):
- __str__ = node_classes.NodeNG.__str__
- __repr__ = node_classes.NodeNG.__repr__
-
-
-class PartialFunction(scoped_nodes.FunctionDef):
- """A class representing partial function obtained via functools.partial"""
-
- def __init__(
- self, call, name=None, doc=None, lineno=None, col_offset=None, parent=None
- ):
- super().__init__(name, doc, lineno, col_offset, parent)
- self.filled_positionals = len(call.positional_arguments[1:])
- self.filled_args = call.positional_arguments[1:]
- self.filled_keywords = call.keyword_arguments
-
- def infer_call_result(self, caller=None, context=None):
- if context:
- current_passed_keywords = {
- keyword for (keyword, _) in context.callcontext.keywords
- }
- for keyword, value in self.filled_keywords.items():
- if keyword not in current_passed_keywords:
- context.callcontext.keywords.append((keyword, value))
-
- call_context_args = context.callcontext.args or []
- context.callcontext.args = self.filled_args + call_context_args
-
- return super().infer_call_result(caller=caller, context=context)
-
- def qname(self):
- return self.__class__.__name__
-
-
-# TODO: Hack to solve the circular import problem between node_classes and objects
-# This is not needed in 2.0, which has a cleaner design overall
-node_classes.Dict.__bases__ = (node_classes.NodeNG, DictInstance)
diff --git a/venv/Lib/site-packages/astroid/protocols.py b/venv/Lib/site-packages/astroid/protocols.py
deleted file mode 100644
index c1825f1..0000000
--- a/venv/Lib/site-packages/astroid/protocols.py
+++ /dev/null
@@ -1,766 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2017-2018 Ashley Whetter <ashley@awhetter.co.uk>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2017 rr- <rr-@sakuya.pl>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""this module contains a set of functions to handle python protocols for nodes
-where it makes sense.
-"""
-
-import collections
-import operator as operator_mod
-
-import itertools
-
-from astroid import Store
-from astroid import arguments
-from astroid import bases
-from astroid import context as contextmod
-from astroid import exceptions
-from astroid import decorators
-from astroid import node_classes
-from astroid import helpers
-from astroid import nodes
-from astroid import util
-
-raw_building = util.lazy_import("raw_building")
-objects = util.lazy_import("objects")
-
-
-def _reflected_name(name):
- return "__r" + name[2:]
-
-
-def _augmented_name(name):
- return "__i" + name[2:]
-
-
-_CONTEXTLIB_MGR = "contextlib.contextmanager"
-BIN_OP_METHOD = {
- "+": "__add__",
- "-": "__sub__",
- "/": "__truediv__",
- "//": "__floordiv__",
- "*": "__mul__",
- "**": "__pow__",
- "%": "__mod__",
- "&": "__and__",
- "|": "__or__",
- "^": "__xor__",
- "<<": "__lshift__",
- ">>": "__rshift__",
- "@": "__matmul__",
-}
-
-REFLECTED_BIN_OP_METHOD = {
- key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items()
-}
-AUGMENTED_OP_METHOD = {
- key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items()
-}
-
-UNARY_OP_METHOD = {
- "+": "__pos__",
- "-": "__neg__",
- "~": "__invert__",
- "not": None, # XXX not '__nonzero__'
-}
-_UNARY_OPERATORS = {
- "+": operator_mod.pos,
- "-": operator_mod.neg,
- "~": operator_mod.invert,
- "not": operator_mod.not_,
-}
-
-
-def _infer_unary_op(obj, op):
- func = _UNARY_OPERATORS[op]
- value = func(obj)
- return nodes.const_factory(value)
-
-
-nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op)
-nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op)
-nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op)
-nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op)
-nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op)
-
-# Binary operations
-
-BIN_OP_IMPL = {
- "+": lambda a, b: a + b,
- "-": lambda a, b: a - b,
- "/": lambda a, b: a / b,
- "//": lambda a, b: a // b,
- "*": lambda a, b: a * b,
- "**": lambda a, b: a ** b,
- "%": lambda a, b: a % b,
- "&": lambda a, b: a & b,
- "|": lambda a, b: a | b,
- "^": lambda a, b: a ^ b,
- "<<": lambda a, b: a << b,
- ">>": lambda a, b: a >> b,
- "@": operator_mod.matmul,
-}
-for _KEY, _IMPL in list(BIN_OP_IMPL.items()):
- BIN_OP_IMPL[_KEY + "="] = _IMPL
-
-
-@decorators.yes_if_nothing_inferred
-def const_infer_binary_op(self, opnode, operator, other, context, _):
- not_implemented = nodes.Const(NotImplemented)
- if isinstance(other, nodes.Const):
- try:
- impl = BIN_OP_IMPL[operator]
- try:
- yield nodes.const_factory(impl(self.value, other.value))
- except TypeError:
- # ArithmeticError is not enough: float >> float is a TypeError
- yield not_implemented
- except Exception: # pylint: disable=broad-except
- yield util.Uninferable
- except TypeError:
- yield not_implemented
- elif isinstance(self.value, str) and operator == "%":
- # TODO(cpopa): implement string interpolation later on.
- yield util.Uninferable
- else:
- yield not_implemented
-
-
-nodes.Const.infer_binary_op = const_infer_binary_op
-
-
-def _multiply_seq_by_int(self, opnode, other, context):
- node = self.__class__(parent=opnode)
- filtered_elts = (
- helpers.safe_infer(elt, context) or util.Uninferable
- for elt in self.elts
- if elt is not util.Uninferable
- )
- node.elts = list(filtered_elts) * other.value
- return node
-
-
-def _filter_uninferable_nodes(elts, context):
- for elt in elts:
- if elt is util.Uninferable:
- yield nodes.Unknown()
- else:
- for inferred in elt.infer(context):
- if inferred is not util.Uninferable:
- yield inferred
- else:
- yield nodes.Unknown()
-
-
-@decorators.yes_if_nothing_inferred
-def tl_infer_binary_op(self, opnode, operator, other, context, method):
- not_implemented = nodes.Const(NotImplemented)
- if isinstance(other, self.__class__) and operator == "+":
- node = self.__class__(parent=opnode)
- node.elts = list(
- itertools.chain(
- _filter_uninferable_nodes(self.elts, context),
- _filter_uninferable_nodes(other.elts, context),
- )
- )
- yield node
- elif isinstance(other, nodes.Const) and operator == "*":
- if not isinstance(other.value, int):
- yield not_implemented
- return
- yield _multiply_seq_by_int(self, opnode, other, context)
- elif isinstance(other, bases.Instance) and operator == "*":
- # Verify if the instance supports __index__.
- as_index = helpers.class_instance_as_index(other)
- if not as_index:
- yield util.Uninferable
- else:
- yield _multiply_seq_by_int(self, opnode, as_index, context)
- else:
- yield not_implemented
-
-
-nodes.Tuple.infer_binary_op = tl_infer_binary_op
-nodes.List.infer_binary_op = tl_infer_binary_op
-
-
-@decorators.yes_if_nothing_inferred
-def instance_class_infer_binary_op(self, opnode, operator, other, context, method):
- return method.infer_call_result(self, context)
-
-
-bases.Instance.infer_binary_op = instance_class_infer_binary_op
-nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op
-
-
-# assignment ##################################################################
-
-"""the assigned_stmts method is responsible to return the assigned statement
-(e.g. not inferred) according to the assignment type.
-
-The `assign_path` argument is used to record the lhs path of the original node.
-For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path
-will be [1, 1] once arrived to the Assign node.
-
-The `context` argument is the current inference context which should be given
-to any intermediary inference necessary.
-"""
-
-
-def _resolve_looppart(parts, assign_path, context):
- """recursive function to resolve multiple assignments on loops"""
- assign_path = assign_path[:]
- index = assign_path.pop(0)
- for part in parts:
- if part is util.Uninferable:
- continue
- if not hasattr(part, "itered"):
- continue
- try:
- itered = part.itered()
- except TypeError:
- continue
- for stmt in itered:
- index_node = nodes.Const(index)
- try:
- assigned = stmt.getitem(index_node, context)
- except (
- AttributeError,
- exceptions.AstroidTypeError,
- exceptions.AstroidIndexError,
- ):
- continue
- if not assign_path:
- # we achieved to resolved the assignment path,
- # don't infer the last part
- yield assigned
- elif assigned is util.Uninferable:
- break
- else:
- # we are not yet on the last part of the path
- # search on each possibly inferred value
- try:
- yield from _resolve_looppart(
- assigned.infer(context), assign_path, context
- )
- except exceptions.InferenceError:
- break
-
-
-@decorators.raise_if_nothing_inferred
-def for_assigned_stmts(self, node=None, context=None, assign_path=None):
- if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False):
- # Skip inferring of async code for now
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
- if assign_path is None:
- for lst in self.iter.infer(context):
- if isinstance(lst, (nodes.Tuple, nodes.List)):
- yield from lst.elts
- else:
- yield from _resolve_looppart(self.iter.infer(context), assign_path, context)
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
-
-
-nodes.For.assigned_stmts = for_assigned_stmts
-nodes.Comprehension.assigned_stmts = for_assigned_stmts
-
-
-def sequence_assigned_stmts(self, node=None, context=None, assign_path=None):
- if assign_path is None:
- assign_path = []
- try:
- index = self.elts.index(node)
- except ValueError as exc:
- raise exceptions.InferenceError(
- "Tried to retrieve a node {node!r} which does not exist",
- node=self,
- assign_path=assign_path,
- context=context,
- ) from exc
-
- assign_path.insert(0, index)
- return self.parent.assigned_stmts(
- node=self, context=context, assign_path=assign_path
- )
-
-
-nodes.Tuple.assigned_stmts = sequence_assigned_stmts
-nodes.List.assigned_stmts = sequence_assigned_stmts
-
-
-def assend_assigned_stmts(self, node=None, context=None, assign_path=None):
- return self.parent.assigned_stmts(node=self, context=context)
-
-
-nodes.AssignName.assigned_stmts = assend_assigned_stmts
-nodes.AssignAttr.assigned_stmts = assend_assigned_stmts
-
-
-def _arguments_infer_argname(self, name, context):
- # arguments information may be missing, in which case we can't do anything
- # more
- if not (self.args or self.vararg or self.kwarg):
- yield util.Uninferable
- return
- # first argument of instance/class method
- if self.args and getattr(self.args[0], "name", None) == name:
- functype = self.parent.type
- cls = self.parent.parent.scope()
- is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass"
- # If this is a metaclass, then the first argument will always
- # be the class, not an instance.
- if is_metaclass or functype == "classmethod":
- yield cls
- return
- if functype == "method":
- yield bases.Instance(cls)
- return
-
- if context and context.callcontext:
- call_site = arguments.CallSite(context.callcontext, context.extra_context)
- yield from call_site.infer_argument(self.parent, name, context)
- return
-
- if name == self.vararg:
- vararg = nodes.const_factory(())
- vararg.parent = self
- yield vararg
- return
- if name == self.kwarg:
- kwarg = nodes.const_factory({})
- kwarg.parent = self
- yield kwarg
- return
- # if there is a default value, yield it. And then yield Uninferable to reflect
- # we can't guess given argument value
- try:
- context = contextmod.copy_context(context)
- yield from self.default_value(name).infer(context)
- yield util.Uninferable
- except exceptions.NoDefault:
- yield util.Uninferable
-
-
-def arguments_assigned_stmts(self, node=None, context=None, assign_path=None):
- if context.callcontext:
- # reset call context/name
- callcontext = context.callcontext
- context = contextmod.copy_context(context)
- context.callcontext = None
- args = arguments.CallSite(callcontext)
- return args.infer_argument(self.parent, node.name, context)
- return _arguments_infer_argname(self, node.name, context)
-
-
-nodes.Arguments.assigned_stmts = arguments_assigned_stmts
-
-
-@decorators.raise_if_nothing_inferred
-def assign_assigned_stmts(self, node=None, context=None, assign_path=None):
- if not assign_path:
- yield self.value
- return None
- yield from _resolve_assignment_parts(
- self.value.infer(context), assign_path, context
- )
-
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
-
-
-def assign_annassigned_stmts(self, node=None, context=None, assign_path=None):
- for inferred in assign_assigned_stmts(self, node, context, assign_path):
- if inferred is None:
- yield util.Uninferable
- else:
- yield inferred
-
-
-nodes.Assign.assigned_stmts = assign_assigned_stmts
-nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts
-nodes.AugAssign.assigned_stmts = assign_assigned_stmts
-
-
-def _resolve_assignment_parts(parts, assign_path, context):
- """recursive function to resolve multiple assignments"""
- assign_path = assign_path[:]
- index = assign_path.pop(0)
- for part in parts:
- assigned = None
- if isinstance(part, nodes.Dict):
- # A dictionary in an iterating context
- try:
- assigned, _ = part.items[index]
- except IndexError:
- return
-
- elif hasattr(part, "getitem"):
- index_node = nodes.Const(index)
- try:
- assigned = part.getitem(index_node, context)
- except (exceptions.AstroidTypeError, exceptions.AstroidIndexError):
- return
-
- if not assigned:
- return
-
- if not assign_path:
- # we achieved to resolved the assignment path, don't infer the
- # last part
- yield assigned
- elif assigned is util.Uninferable:
- return
- else:
- # we are not yet on the last part of the path search on each
- # possibly inferred value
- try:
- yield from _resolve_assignment_parts(
- assigned.infer(context), assign_path, context
- )
- except exceptions.InferenceError:
- return
-
-
-@decorators.raise_if_nothing_inferred
-def excepthandler_assigned_stmts(self, node=None, context=None, assign_path=None):
- for assigned in node_classes.unpack_infer(self.type):
- if isinstance(assigned, nodes.ClassDef):
- assigned = objects.ExceptionInstance(assigned)
-
- yield assigned
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
-
-
-nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts
-
-
-def _infer_context_manager(self, mgr, context):
- inferred = next(mgr.infer(context=context))
- if isinstance(inferred, bases.Generator):
- # Check if it is decorated with contextlib.contextmanager.
- func = inferred.parent
- if not func.decorators:
- raise exceptions.InferenceError(
- "No decorators found on inferred generator %s", node=func
- )
-
- for decorator_node in func.decorators.nodes:
- decorator = next(decorator_node.infer(context))
- if isinstance(decorator, nodes.FunctionDef):
- if decorator.qname() == _CONTEXTLIB_MGR:
- break
- else:
- # It doesn't interest us.
- raise exceptions.InferenceError(node=func)
-
- # Get the first yield point. If it has multiple yields,
- # then a RuntimeError will be raised.
-
- possible_yield_points = func.nodes_of_class(nodes.Yield)
- # Ignore yields in nested functions
- yield_point = next(
- (node for node in possible_yield_points if node.scope() == func), None
- )
- if yield_point:
- if not yield_point.value:
- const = nodes.Const(None)
- const.parent = yield_point
- const.lineno = yield_point.lineno
- yield const
- else:
- yield from yield_point.value.infer(context=context)
- elif isinstance(inferred, bases.Instance):
- try:
- enter = next(inferred.igetattr("__enter__", context=context))
- except (exceptions.InferenceError, exceptions.AttributeInferenceError):
- raise exceptions.InferenceError(node=inferred)
- if not isinstance(enter, bases.BoundMethod):
- raise exceptions.InferenceError(node=enter)
- yield from enter.infer_call_result(self, context)
- else:
- raise exceptions.InferenceError(node=mgr)
-
-
-@decorators.raise_if_nothing_inferred
-def with_assigned_stmts(self, node=None, context=None, assign_path=None):
- """Infer names and other nodes from a *with* statement.
-
- This enables only inference for name binding in a *with* statement.
- For instance, in the following code, inferring `func` will return
- the `ContextManager` class, not whatever ``__enter__`` returns.
- We are doing this intentionally, because we consider that the context
- manager result is whatever __enter__ returns and what it is binded
- using the ``as`` keyword.
-
- class ContextManager(object):
- def __enter__(self):
- return 42
- with ContextManager() as f:
- pass
-
- # ContextManager().infer() will return ContextManager
- # f.infer() will return 42.
-
- Arguments:
- self: nodes.With
- node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`.
- context: Inference context used for caching already inferred objects
- assign_path:
- A list of indices, where each index specifies what item to fetch from
- the inference results.
- """
- try:
- mgr = next(mgr for (mgr, vars) in self.items if vars == node)
- except StopIteration:
- return None
- if assign_path is None:
- yield from _infer_context_manager(self, mgr, context)
- else:
- for result in _infer_context_manager(self, mgr, context):
- # Walk the assign_path and get the item at the final index.
- obj = result
- for index in assign_path:
- if not hasattr(obj, "elts"):
- raise exceptions.InferenceError(
- "Wrong type ({targets!r}) for {node!r} assignment",
- node=self,
- targets=node,
- assign_path=assign_path,
- context=context,
- )
- try:
- obj = obj.elts[index]
- except IndexError as exc:
- raise exceptions.InferenceError(
- "Tried to infer a nonexistent target with index {index} "
- "in {node!r}.",
- node=self,
- targets=node,
- assign_path=assign_path,
- context=context,
- ) from exc
- except TypeError as exc:
- raise exceptions.InferenceError(
- "Tried to unpack a non-iterable value " "in {node!r}.",
- node=self,
- targets=node,
- assign_path=assign_path,
- context=context,
- ) from exc
- yield obj
- return dict(node=self, unknown=node, assign_path=assign_path, context=context)
-
-
-nodes.With.assigned_stmts = with_assigned_stmts
-
-
-@decorators.raise_if_nothing_inferred
-def named_expr_assigned_stmts(self, node, context=None, assign_path=None):
- """Infer names and other nodes from an assignment expression"""
- if self.target == node:
- yield from self.value.infer(context=context)
- else:
- raise exceptions.InferenceError(
- "Cannot infer NamedExpr node {node!r}",
- node=self,
- assign_path=assign_path,
- context=context,
- )
-
-
-nodes.NamedExpr.assigned_stmts = named_expr_assigned_stmts
-
-
-@decorators.yes_if_nothing_inferred
-def starred_assigned_stmts(self, node=None, context=None, assign_path=None):
- """
- Arguments:
- self: nodes.Starred
- node: a node related to the current underlying Node.
- context: Inference context used for caching already inferred objects
- assign_path:
- A list of indices, where each index specifies what item to fetch from
- the inference results.
- """
- # pylint: disable=too-many-locals,too-many-branches,too-many-statements
- def _determine_starred_iteration_lookups(starred, target, lookups):
- # Determine the lookups for the rhs of the iteration
- itered = target.itered()
- for index, element in enumerate(itered):
- if (
- isinstance(element, nodes.Starred)
- and element.value.name == starred.value.name
- ):
- lookups.append((index, len(itered)))
- break
- if isinstance(element, nodes.Tuple):
- lookups.append((index, len(element.itered())))
- _determine_starred_iteration_lookups(starred, element, lookups)
-
- stmt = self.statement()
- if not isinstance(stmt, (nodes.Assign, nodes.For)):
- raise exceptions.InferenceError(
- "Statement {stmt!r} enclosing {node!r} " "must be an Assign or For node.",
- node=self,
- stmt=stmt,
- unknown=node,
- context=context,
- )
-
- if context is None:
- context = contextmod.InferenceContext()
-
- if isinstance(stmt, nodes.Assign):
- value = stmt.value
- lhs = stmt.targets[0]
-
- if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1:
- raise exceptions.InferenceError(
- "Too many starred arguments in the " " assignment targets {lhs!r}.",
- node=self,
- targets=lhs,
- unknown=node,
- context=context,
- )
-
- try:
- rhs = next(value.infer(context))
- except exceptions.InferenceError:
- yield util.Uninferable
- return
- if rhs is util.Uninferable or not hasattr(rhs, "itered"):
- yield util.Uninferable
- return
-
- try:
- elts = collections.deque(rhs.itered())
- except TypeError:
- yield util.Uninferable
- return
-
- # Unpack iteratively the values from the rhs of the assignment,
- # until the find the starred node. What will remain will
- # be the list of values which the Starred node will represent
- # This is done in two steps, from left to right to remove
- # anything before the starred node and from right to left
- # to remove anything after the starred node.
-
- for index, left_node in enumerate(lhs.elts):
- if not isinstance(left_node, nodes.Starred):
- if not elts:
- break
- elts.popleft()
- continue
- lhs_elts = collections.deque(reversed(lhs.elts[index:]))
- for right_node in lhs_elts:
- if not isinstance(right_node, nodes.Starred):
- if not elts:
- break
- elts.pop()
- continue
- # We're done
- packed = nodes.List(
- ctx=Store, parent=self, lineno=lhs.lineno, col_offset=lhs.col_offset
- )
- packed.postinit(elts=elts)
- yield packed
- break
-
- if isinstance(stmt, nodes.For):
- try:
- inferred_iterable = next(stmt.iter.infer(context=context))
- except exceptions.InferenceError:
- yield util.Uninferable
- return
- if inferred_iterable is util.Uninferable or not hasattr(
- inferred_iterable, "itered"
- ):
- yield util.Uninferable
- return
- try:
- itered = inferred_iterable.itered()
- except TypeError:
- yield util.Uninferable
- return
-
- target = stmt.target
-
- if not isinstance(target, nodes.Tuple):
- raise exceptions.InferenceError(
- "Could not make sense of this, the target must be a tuple",
- context=context,
- )
-
- lookups = []
- _determine_starred_iteration_lookups(self, target, lookups)
- if not lookups:
- raise exceptions.InferenceError(
- "Could not make sense of this, needs at least a lookup", context=context
- )
-
- # Make the last lookup a slice, since that what we want for a Starred node
- last_element_index, last_element_length = lookups[-1]
- is_starred_last = last_element_index == (last_element_length - 1)
-
- lookup_slice = slice(
- last_element_index,
- None if is_starred_last else (last_element_length - last_element_index),
- )
- lookups[-1] = lookup_slice
-
- for element in itered:
-
- # We probably want to infer the potential values *for each* element in an
- # iterable, but we can't infer a list of all values, when only a list of
- # step values are expected:
- #
- # for a, *b in [...]:
- # b
- #
- # *b* should now point to just the elements at that particular iteration step,
- # which astroid can't know about.
-
- found_element = None
- for lookup in lookups:
- if not hasattr(element, "itered"):
- break
- if not isinstance(lookup, slice):
- # Grab just the index, not the whole length
- lookup = lookup[0]
- try:
- itered_inner_element = element.itered()
- element = itered_inner_element[lookup]
- except IndexError:
- break
- except TypeError:
- # Most likely the itered() call failed, cannot make sense of this
- yield util.Uninferable
- return
- else:
- found_element = element
-
- unpacked = nodes.List(
- ctx=Store, parent=self, lineno=self.lineno, col_offset=self.col_offset
- )
- unpacked.postinit(elts=found_element or [])
- yield unpacked
- return
-
- yield util.Uninferable
-
-
-nodes.Starred.assigned_stmts = starred_assigned_stmts
diff --git a/venv/Lib/site-packages/astroid/raw_building.py b/venv/Lib/site-packages/astroid/raw_building.py
deleted file mode 100644
index d94f924..0000000
--- a/venv/Lib/site-packages/astroid/raw_building.py
+++ /dev/null
@@ -1,468 +0,0 @@
-# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
-# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2014 Google, Inc.
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2015 Ovidiu Sabou <ovidiu@sabou.org>
-# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""this module contains a set of functions to create astroid trees from scratch
-(build_* functions) or from living object (object_build_* functions)
-"""
-
-import builtins
-import inspect
-import os
-import sys
-import types
-
-from astroid import bases
-from astroid import manager
-from astroid import node_classes
-from astroid import nodes
-
-
-MANAGER = manager.AstroidManager()
-# the keys of CONST_CLS eg python builtin types
-
-_CONSTANTS = tuple(node_classes.CONST_CLS)
-_BUILTINS = vars(builtins)
-TYPE_NONE = type(None)
-TYPE_NOTIMPLEMENTED = type(NotImplemented)
-TYPE_ELLIPSIS = type(...)
-
-
-def _io_discrepancy(member):
- # _io module names itself `io`: http://bugs.python.org/issue18602
- member_self = getattr(member, "__self__", None)
- return (
- member_self
- and inspect.ismodule(member_self)
- and member_self.__name__ == "_io"
- and member.__module__ == "io"
- )
-
-
-def _attach_local_node(parent, node, name):
- node.name = name # needed by add_local_node
- parent.add_local_node(node)
-
-
-def _add_dunder_class(func, member):
- """Add a __class__ member to the given func node, if we can determine it."""
- python_cls = member.__class__
- cls_name = getattr(python_cls, "__name__", None)
- if not cls_name:
- return
- cls_bases = [ancestor.__name__ for ancestor in python_cls.__bases__]
- ast_klass = build_class(cls_name, cls_bases, python_cls.__doc__)
- func.instance_attrs["__class__"] = [ast_klass]
-
-
-_marker = object()
-
-
-def attach_dummy_node(node, name, runtime_object=_marker):
- """create a dummy node and register it in the locals of the given
- node with the specified name
- """
- enode = nodes.EmptyNode()
- enode.object = runtime_object
- _attach_local_node(node, enode, name)
-
-
-def _has_underlying_object(self):
- return self.object is not None and self.object is not _marker
-
-
-nodes.EmptyNode.has_underlying_object = _has_underlying_object
-
-
-def attach_const_node(node, name, value):
- """create a Const node and register it in the locals of the given
- node with the specified name
- """
- if name not in node.special_attributes:
- _attach_local_node(node, nodes.const_factory(value), name)
-
-
-def attach_import_node(node, modname, membername):
- """create a ImportFrom node and register it in the locals of the given
- node with the specified name
- """
- from_node = nodes.ImportFrom(modname, [(membername, None)])
- _attach_local_node(node, from_node, membername)
-
-
-def build_module(name, doc=None):
- """create and initialize an astroid Module node"""
- node = nodes.Module(name, doc, pure_python=False)
- node.package = False
- node.parent = None
- return node
-
-
-def build_class(name, basenames=(), doc=None):
- """create and initialize an astroid ClassDef node"""
- node = nodes.ClassDef(name, doc)
- for base in basenames:
- basenode = nodes.Name()
- basenode.name = base
- node.bases.append(basenode)
- basenode.parent = node
- return node
-
-
-def build_function(name, args=None, posonlyargs=None, defaults=None, doc=None):
- """create and initialize an astroid FunctionDef node"""
- args, defaults, posonlyargs = args or [], defaults or [], posonlyargs or []
- # first argument is now a list of decorators
- func = nodes.FunctionDef(name, doc)
- func.args = argsnode = nodes.Arguments()
- argsnode.args = []
- argsnode.posonlyargs = []
- for arg in args:
- argsnode.args.append(nodes.Name())
- argsnode.args[-1].name = arg
- argsnode.args[-1].parent = argsnode
- for arg in posonlyargs:
- argsnode.posonlyargs.append(nodes.Name())
- argsnode.posonlyargs[-1].name = arg
- argsnode.posonlyargs[-1].parent = argsnode
- argsnode.defaults = []
- for default in defaults:
- argsnode.defaults.append(nodes.const_factory(default))
- argsnode.defaults[-1].parent = argsnode
- argsnode.kwarg = None
- argsnode.vararg = None
- argsnode.parent = func
- if args:
- register_arguments(func)
- return func
-
-
-def build_from_import(fromname, names):
- """create and initialize an astroid ImportFrom import statement"""
- return nodes.ImportFrom(fromname, [(name, None) for name in names])
-
-
-def register_arguments(func, args=None):
- """add given arguments to local
-
- args is a list that may contains nested lists
- (i.e. def func(a, (b, c, d)): ...)
- """
- if args is None:
- args = func.args.args
- if func.args.vararg:
- func.set_local(func.args.vararg, func.args)
- if func.args.kwarg:
- func.set_local(func.args.kwarg, func.args)
- for arg in args:
- if isinstance(arg, nodes.Name):
- func.set_local(arg.name, arg)
- else:
- register_arguments(func, arg.elts)
-
-
-def object_build_class(node, member, localname):
- """create astroid for a living class object"""
- basenames = [base.__name__ for base in member.__bases__]
- return _base_class_object_build(node, member, basenames, localname=localname)
-
-
-def object_build_function(node, member, localname):
- """create astroid for a living function object"""
- signature = inspect.signature(member)
- args = []
- defaults = []
- posonlyargs = []
- for param_name, param in signature.parameters.items():
- if param.kind == inspect.Parameter.POSITIONAL_ONLY:
- posonlyargs.append(param_name)
- elif param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
- args.append(param_name)
- elif param.kind == inspect.Parameter.VAR_POSITIONAL:
- args.append(param_name)
- elif param.kind == inspect.Parameter.VAR_KEYWORD:
- args.append(param_name)
- if param.default is not inspect._empty:
- defaults.append(param.default)
- func = build_function(
- getattr(member, "__name__", None) or localname,
- args,
- posonlyargs,
- defaults,
- member.__doc__,
- )
- node.add_local_node(func, localname)
-
-
-def object_build_datadescriptor(node, member, name):
- """create astroid for a living data descriptor object"""
- return _base_class_object_build(node, member, [], name)
-
-
-def object_build_methoddescriptor(node, member, localname):
- """create astroid for a living method descriptor object"""
- # FIXME get arguments ?
- func = build_function(
- getattr(member, "__name__", None) or localname, doc=member.__doc__
- )
- # set node's arguments to None to notice that we have no information, not
- # and empty argument list
- func.args.args = None
- node.add_local_node(func, localname)
- _add_dunder_class(func, member)
-
-
-def _base_class_object_build(node, member, basenames, name=None, localname=None):
- """create astroid for a living class object, with a given set of base names
- (e.g. ancestors)
- """
- klass = build_class(
- name or getattr(member, "__name__", None) or localname,
- basenames,
- member.__doc__,
- )
- klass._newstyle = isinstance(member, type)
- node.add_local_node(klass, localname)
- try:
- # limit the instantiation trick since it's too dangerous
- # (such as infinite test execution...)
- # this at least resolves common case such as Exception.args,
- # OSError.errno
- if issubclass(member, Exception):
- instdict = member().__dict__
- else:
- raise TypeError
- except TypeError:
- pass
- else:
- for item_name, obj in instdict.items():
- valnode = nodes.EmptyNode()
- valnode.object = obj
- valnode.parent = klass
- valnode.lineno = 1
- klass.instance_attrs[item_name] = [valnode]
- return klass
-
-
-def _build_from_function(node, name, member, module):
- # verify this is not an imported function
- try:
- code = member.__code__
- except AttributeError:
- # Some implementations don't provide the code object,
- # such as Jython.
- code = None
- filename = getattr(code, "co_filename", None)
- if filename is None:
- assert isinstance(member, object)
- object_build_methoddescriptor(node, member, name)
- elif filename != getattr(module, "__file__", None):
- attach_dummy_node(node, name, member)
- else:
- object_build_function(node, member, name)
-
-
-class InspectBuilder:
- """class for building nodes from living object
-
- this is actually a really minimal representation, including only Module,
- FunctionDef and ClassDef nodes and some others as guessed.
- """
-
- def __init__(self):
- self._done = {}
- self._module = None
-
- def inspect_build(self, module, modname=None, path=None):
- """build astroid from a living module (i.e. using inspect)
- this is used when there is no python source code available (either
- because it's a built-in module or because the .py is not available)
- """
- self._module = module
- if modname is None:
- modname = module.__name__
- try:
- node = build_module(modname, module.__doc__)
- except AttributeError:
- # in jython, java modules have no __doc__ (see #109562)
- node = build_module(modname)
- node.file = node.path = os.path.abspath(path) if path else path
- node.name = modname
- MANAGER.cache_module(node)
- node.package = hasattr(module, "__path__")
- self._done = {}
- self.object_build(node, module)
- return node
-
- def object_build(self, node, obj):
- """recursive method which create a partial ast from real objects
- (only function, class, and method are handled)
- """
- if obj in self._done:
- return self._done[obj]
- self._done[obj] = node
- for name in dir(obj):
- try:
- member = getattr(obj, name)
- except AttributeError:
- # damned ExtensionClass.Base, I know you're there !
- attach_dummy_node(node, name)
- continue
- if inspect.ismethod(member):
- member = member.__func__
- if inspect.isfunction(member):
- _build_from_function(node, name, member, self._module)
- elif inspect.isbuiltin(member):
- if not _io_discrepancy(member) and self.imported_member(
- node, member, name
- ):
- continue
- object_build_methoddescriptor(node, member, name)
- elif inspect.isclass(member):
- if self.imported_member(node, member, name):
- continue
- if member in self._done:
- class_node = self._done[member]
- if class_node not in node.locals.get(name, ()):
- node.add_local_node(class_node, name)
- else:
- class_node = object_build_class(node, member, name)
- # recursion
- self.object_build(class_node, member)
- if name == "__class__" and class_node.parent is None:
- class_node.parent = self._done[self._module]
- elif inspect.ismethoddescriptor(member):
- assert isinstance(member, object)
- object_build_methoddescriptor(node, member, name)
- elif inspect.isdatadescriptor(member):
- assert isinstance(member, object)
- object_build_datadescriptor(node, member, name)
- elif isinstance(member, _CONSTANTS):
- attach_const_node(node, name, member)
- elif inspect.isroutine(member):
- # This should be called for Jython, where some builtin
- # methods aren't caught by isbuiltin branch.
- _build_from_function(node, name, member, self._module)
- else:
- # create an empty node so that the name is actually defined
- attach_dummy_node(node, name, member)
- return None
-
- def imported_member(self, node, member, name):
- """verify this is not an imported class or handle it"""
- # /!\ some classes like ExtensionClass doesn't have a __module__
- # attribute ! Also, this may trigger an exception on badly built module
- # (see http://www.logilab.org/ticket/57299 for instance)
- try:
- modname = getattr(member, "__module__", None)
- except TypeError:
- modname = None
- if modname is None:
- if name in ("__new__", "__subclasshook__"):
- # Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14)
- # >>> print object.__new__.__module__
- # None
- modname = builtins.__name__
- else:
- attach_dummy_node(node, name, member)
- return True
-
- real_name = {"gtk": "gtk_gtk", "_io": "io"}.get(modname, modname)
-
- if real_name != self._module.__name__:
- # check if it sounds valid and then add an import node, else use a
- # dummy node
- try:
- getattr(sys.modules[modname], name)
- except (KeyError, AttributeError):
- attach_dummy_node(node, name, member)
- else:
- attach_import_node(node, modname, name)
- return True
- return False
-
-
-### astroid bootstrapping ######################################################
-
-_CONST_PROXY = {}
-
-# TODO : find a nicer way to handle this situation;
-def _set_proxied(const):
- return _CONST_PROXY[const.value.__class__]
-
-
-def _astroid_bootstrapping():
- """astroid bootstrapping the builtins module"""
- # this boot strapping is necessary since we need the Const nodes to
- # inspect_build builtins, and then we can proxy Const
- builder = InspectBuilder()
- astroid_builtin = builder.inspect_build(builtins)
-
- # pylint: disable=redefined-outer-name
- for cls, node_cls in node_classes.CONST_CLS.items():
- if cls is TYPE_NONE:
- proxy = build_class("NoneType")
- proxy.parent = astroid_builtin
- elif cls is TYPE_NOTIMPLEMENTED:
- proxy = build_class("NotImplementedType")
- proxy.parent = astroid_builtin
- elif cls is TYPE_ELLIPSIS:
- proxy = build_class("Ellipsis")
- proxy.parent = astroid_builtin
- else:
- proxy = astroid_builtin.getattr(cls.__name__)[0]
- if cls in (dict, list, set, tuple):
- node_cls._proxied = proxy
- else:
- _CONST_PROXY[cls] = proxy
-
- # Set the builtin module as parent for some builtins.
- nodes.Const._proxied = property(_set_proxied)
-
- _GeneratorType = nodes.ClassDef(
- types.GeneratorType.__name__, types.GeneratorType.__doc__
- )
- _GeneratorType.parent = astroid_builtin
- bases.Generator._proxied = _GeneratorType
- builder.object_build(bases.Generator._proxied, types.GeneratorType)
-
- if hasattr(types, "AsyncGeneratorType"):
- # pylint: disable=no-member; AsyncGeneratorType
- _AsyncGeneratorType = nodes.ClassDef(
- types.AsyncGeneratorType.__name__, types.AsyncGeneratorType.__doc__
- )
- _AsyncGeneratorType.parent = astroid_builtin
- bases.AsyncGenerator._proxied = _AsyncGeneratorType
- builder.object_build(bases.AsyncGenerator._proxied, types.AsyncGeneratorType)
- builtin_types = (
- types.GetSetDescriptorType,
- types.GeneratorType,
- types.MemberDescriptorType,
- TYPE_NONE,
- TYPE_NOTIMPLEMENTED,
- types.FunctionType,
- types.MethodType,
- types.BuiltinFunctionType,
- types.ModuleType,
- types.TracebackType,
- )
- for _type in builtin_types:
- if _type.__name__ not in astroid_builtin:
- cls = nodes.ClassDef(_type.__name__, _type.__doc__)
- cls.parent = astroid_builtin
- builder.object_build(cls, _type)
- astroid_builtin[_type.__name__] = cls
-
-
-_astroid_bootstrapping()
diff --git a/venv/Lib/site-packages/astroid/rebuilder.py b/venv/Lib/site-packages/astroid/rebuilder.py
deleted file mode 100644
index fb78f7b..0000000
--- a/venv/Lib/site-packages/astroid/rebuilder.py
+++ /dev/null
@@ -1,1090 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2009-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2014 Alexander Presnyakov <flagist0@gmail.com>
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
-# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2017 rr- <rr-@sakuya.pl>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""this module contains utilities for rebuilding a _ast tree in
-order to get a single Astroid representation
-"""
-
-import sys
-
-import astroid
-from astroid._ast import _parse, _get_parser_module, parse_function_type_comment
-from astroid import nodes
-
-
-CONST_NAME_TRANSFORMS = {"None": None, "True": True, "False": False}
-
-REDIRECT = {
- "arguments": "Arguments",
- "comprehension": "Comprehension",
- "ListCompFor": "Comprehension",
- "GenExprFor": "Comprehension",
- "excepthandler": "ExceptHandler",
- "keyword": "Keyword",
-}
-PY37 = sys.version_info >= (3, 7)
-PY38 = sys.version_info >= (3, 8)
-
-
-def _binary_operators_from_module(module):
- binary_operators = {
- module.Add: "+",
- module.BitAnd: "&",
- module.BitOr: "|",
- module.BitXor: "^",
- module.Div: "/",
- module.FloorDiv: "//",
- module.MatMult: "@",
- module.Mod: "%",
- module.Mult: "*",
- module.Pow: "**",
- module.Sub: "-",
- module.LShift: "<<",
- module.RShift: ">>",
- }
- return binary_operators
-
-
-def _bool_operators_from_module(module):
- return {module.And: "and", module.Or: "or"}
-
-
-def _unary_operators_from_module(module):
- return {module.UAdd: "+", module.USub: "-", module.Not: "not", module.Invert: "~"}
-
-
-def _compare_operators_from_module(module):
- return {
- module.Eq: "==",
- module.Gt: ">",
- module.GtE: ">=",
- module.In: "in",
- module.Is: "is",
- module.IsNot: "is not",
- module.Lt: "<",
- module.LtE: "<=",
- module.NotEq: "!=",
- module.NotIn: "not in",
- }
-
-
-def _contexts_from_module(module):
- return {
- module.Load: astroid.Load,
- module.Store: astroid.Store,
- module.Del: astroid.Del,
- module.Param: astroid.Store,
- }
-
-
-def _visit_or_none(node, attr, visitor, parent, visit="visit", **kws):
- """If the given node has an attribute, visits the attribute, and
- otherwise returns None.
-
- """
- value = getattr(node, attr, None)
- if value:
- return getattr(visitor, visit)(value, parent, **kws)
-
- return None
-
-
-class TreeRebuilder:
- """Rebuilds the _ast tree to become an Astroid tree"""
-
- def __init__(self, manager, parse_python_two: bool = False):
- self._manager = manager
- self._global_names = []
- self._import_from_nodes = []
- self._delayed_assattr = []
- self._visit_meths = {}
-
- # Configure the right classes for the right module
- self._parser_module = _get_parser_module(parse_python_two=parse_python_two)
- self._unary_op_classes = _unary_operators_from_module(self._parser_module)
- self._cmp_op_classes = _compare_operators_from_module(self._parser_module)
- self._bool_op_classes = _bool_operators_from_module(self._parser_module)
- self._bin_op_classes = _binary_operators_from_module(self._parser_module)
- self._context_classes = _contexts_from_module(self._parser_module)
-
- def _get_doc(self, node):
- try:
- if PY37 and hasattr(node, "docstring"):
- doc = node.docstring
- return node, doc
- if node.body and isinstance(node.body[0], self._parser_module.Expr):
-
- first_value = node.body[0].value
- if isinstance(first_value, self._parser_module.Str) or (
- PY38
- and isinstance(first_value, self._parser_module.Constant)
- and isinstance(first_value.value, str)
- ):
- doc = first_value.value if PY38 else first_value.s
- node.body = node.body[1:]
- return node, doc
- except IndexError:
- pass # ast built from scratch
- return node, None
-
- def _get_context(self, node):
- return self._context_classes.get(type(node.ctx), astroid.Load)
-
- def visit_module(self, node, modname, modpath, package):
- """visit a Module node by returning a fresh instance of it"""
- node, doc = self._get_doc(node)
- newnode = nodes.Module(
- name=modname,
- doc=doc,
- file=modpath,
- path=[modpath],
- package=package,
- parent=None,
- )
- newnode.postinit([self.visit(child, newnode) for child in node.body])
- return newnode
-
- def visit(self, node, parent):
- cls = node.__class__
- if cls in self._visit_meths:
- visit_method = self._visit_meths[cls]
- else:
- cls_name = cls.__name__
- visit_name = "visit_" + REDIRECT.get(cls_name, cls_name).lower()
- visit_method = getattr(self, visit_name)
- self._visit_meths[cls] = visit_method
- return visit_method(node, parent)
-
- def _save_assignment(self, node, name=None):
- """save assignement situation since node.parent is not available yet"""
- if self._global_names and node.name in self._global_names[-1]:
- node.root().set_local(node.name, node)
- else:
- node.parent.set_local(node.name, node)
-
- def visit_arguments(self, node, parent):
- """visit an Arguments node by returning a fresh instance of it"""
- vararg, kwarg = node.vararg, node.kwarg
- newnode = nodes.Arguments(
- vararg.arg if vararg else None, kwarg.arg if kwarg else None, parent
- )
- args = [self.visit(child, newnode) for child in node.args]
- defaults = [self.visit(child, newnode) for child in node.defaults]
- varargannotation = None
- kwargannotation = None
- posonlyargs = []
- # change added in 82732 (7c5c678e4164), vararg and kwarg
- # are instances of `_ast.arg`, not strings
- if vararg:
- if node.vararg.annotation:
- varargannotation = self.visit(node.vararg.annotation, newnode)
- vararg = vararg.arg
- if kwarg:
- if node.kwarg.annotation:
- kwargannotation = self.visit(node.kwarg.annotation, newnode)
- kwarg = kwarg.arg
- kwonlyargs = [self.visit(child, newnode) for child in node.kwonlyargs]
- kw_defaults = [
- self.visit(child, newnode) if child else None for child in node.kw_defaults
- ]
- annotations = [
- self.visit(arg.annotation, newnode) if arg.annotation else None
- for arg in node.args
- ]
- kwonlyargs_annotations = [
- self.visit(arg.annotation, newnode) if arg.annotation else None
- for arg in node.kwonlyargs
- ]
-
- posonlyargs_annotations = []
- if PY38:
- posonlyargs = [self.visit(child, newnode) for child in node.posonlyargs]
- posonlyargs_annotations = [
- self.visit(arg.annotation, newnode) if arg.annotation else None
- for arg in node.posonlyargs
- ]
- type_comment_args = [
- self.check_type_comment(child, parent=newnode) for child in node.args
- ]
-
- newnode.postinit(
- args=args,
- defaults=defaults,
- kwonlyargs=kwonlyargs,
- posonlyargs=posonlyargs,
- kw_defaults=kw_defaults,
- annotations=annotations,
- kwonlyargs_annotations=kwonlyargs_annotations,
- posonlyargs_annotations=posonlyargs_annotations,
- varargannotation=varargannotation,
- kwargannotation=kwargannotation,
- type_comment_args=type_comment_args,
- )
- # save argument names in locals:
- if vararg:
- newnode.parent.set_local(vararg, newnode)
- if kwarg:
- newnode.parent.set_local(kwarg, newnode)
- return newnode
-
- def visit_assert(self, node, parent):
- """visit a Assert node by returning a fresh instance of it"""
- newnode = nodes.Assert(node.lineno, node.col_offset, parent)
- if node.msg:
- msg = self.visit(node.msg, newnode)
- else:
- msg = None
- newnode.postinit(self.visit(node.test, newnode), msg)
- return newnode
-
- def check_type_comment(self, node, parent):
- type_comment = getattr(node, "type_comment", None)
- if not type_comment:
- return None
-
- try:
- type_comment_ast = _parse(type_comment)
- except SyntaxError:
- # Invalid type comment, just skip it.
- return None
-
- type_object = self.visit(type_comment_ast.body[0], parent=parent)
- if not isinstance(type_object, nodes.Expr):
- return None
-
- return type_object.value
-
- def check_function_type_comment(self, node):
- type_comment = getattr(node, "type_comment", None)
- if not type_comment:
- return None
-
- try:
- type_comment_ast = parse_function_type_comment(type_comment)
- except SyntaxError:
- # Invalid type comment, just skip it.
- return None
-
- returns = None
- argtypes = [
- self.visit(elem, node) for elem in (type_comment_ast.argtypes or [])
- ]
- if type_comment_ast.returns:
- returns = self.visit(type_comment_ast.returns, node)
-
- return returns, argtypes
-
- def visit_assign(self, node, parent):
- """visit a Assign node by returning a fresh instance of it"""
- newnode = nodes.Assign(node.lineno, node.col_offset, parent)
- type_annotation = self.check_type_comment(node, parent=newnode)
- newnode.postinit(
- targets=[self.visit(child, newnode) for child in node.targets],
- value=self.visit(node.value, newnode),
- type_annotation=type_annotation,
- )
- return newnode
-
- def visit_assignname(self, node, parent, node_name=None):
- """visit a node and return a AssignName node"""
- newnode = nodes.AssignName(
- node_name,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
- self._save_assignment(newnode)
- return newnode
-
- def visit_augassign(self, node, parent):
- """visit a AugAssign node by returning a fresh instance of it"""
- newnode = nodes.AugAssign(
- self._bin_op_classes[type(node.op)] + "=",
- node.lineno,
- node.col_offset,
- parent,
- )
- newnode.postinit(
- self.visit(node.target, newnode), self.visit(node.value, newnode)
- )
- return newnode
-
- def visit_repr(self, node, parent):
- """visit a Backquote node by returning a fresh instance of it"""
- newnode = nodes.Repr(node.lineno, node.col_offset, parent)
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
- def visit_binop(self, node, parent):
- """visit a BinOp node by returning a fresh instance of it"""
- newnode = nodes.BinOp(
- self._bin_op_classes[type(node.op)], node.lineno, node.col_offset, parent
- )
- newnode.postinit(
- self.visit(node.left, newnode), self.visit(node.right, newnode)
- )
- return newnode
-
- def visit_boolop(self, node, parent):
- """visit a BoolOp node by returning a fresh instance of it"""
- newnode = nodes.BoolOp(
- self._bool_op_classes[type(node.op)], node.lineno, node.col_offset, parent
- )
- newnode.postinit([self.visit(child, newnode) for child in node.values])
- return newnode
-
- def visit_break(self, node, parent):
- """visit a Break node by returning a fresh instance of it"""
- return nodes.Break(
- getattr(node, "lineno", None), getattr(node, "col_offset", None), parent
- )
-
- def visit_call(self, node, parent):
- """visit a CallFunc node by returning a fresh instance of it"""
- newnode = nodes.Call(node.lineno, node.col_offset, parent)
- starargs = _visit_or_none(node, "starargs", self, newnode)
- kwargs = _visit_or_none(node, "kwargs", self, newnode)
- args = [self.visit(child, newnode) for child in node.args]
-
- if node.keywords:
- keywords = [self.visit(child, newnode) for child in node.keywords]
- else:
- keywords = None
- if starargs:
- new_starargs = nodes.Starred(
- col_offset=starargs.col_offset,
- lineno=starargs.lineno,
- parent=starargs.parent,
- )
- new_starargs.postinit(value=starargs)
- args.append(new_starargs)
- if kwargs:
- new_kwargs = nodes.Keyword(
- arg=None,
- col_offset=kwargs.col_offset,
- lineno=kwargs.lineno,
- parent=kwargs.parent,
- )
- new_kwargs.postinit(value=kwargs)
- if keywords:
- keywords.append(new_kwargs)
- else:
- keywords = [new_kwargs]
-
- newnode.postinit(self.visit(node.func, newnode), args, keywords)
- return newnode
-
- def visit_classdef(self, node, parent, newstyle=None):
- """visit a ClassDef node to become astroid"""
- node, doc = self._get_doc(node)
- newnode = nodes.ClassDef(node.name, doc, node.lineno, node.col_offset, parent)
- metaclass = None
- for keyword in node.keywords:
- if keyword.arg == "metaclass":
- metaclass = self.visit(keyword, newnode).value
- break
- if node.decorator_list:
- decorators = self.visit_decorators(node, newnode)
- else:
- decorators = None
- newnode.postinit(
- [self.visit(child, newnode) for child in node.bases],
- [self.visit(child, newnode) for child in node.body],
- decorators,
- newstyle,
- metaclass,
- [
- self.visit(kwd, newnode)
- for kwd in node.keywords
- if kwd.arg != "metaclass"
- ],
- )
- return newnode
-
- def visit_const(self, node, parent):
- """visit a Const node by returning a fresh instance of it"""
- return nodes.Const(
- node.value,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
-
- def visit_continue(self, node, parent):
- """visit a Continue node by returning a fresh instance of it"""
- return nodes.Continue(
- getattr(node, "lineno", None), getattr(node, "col_offset", None), parent
- )
-
- def visit_compare(self, node, parent):
- """visit a Compare node by returning a fresh instance of it"""
- newnode = nodes.Compare(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.left, newnode),
- [
- (self._cmp_op_classes[op.__class__], self.visit(expr, newnode))
- for (op, expr) in zip(node.ops, node.comparators)
- ],
- )
- return newnode
-
- def visit_comprehension(self, node, parent):
- """visit a Comprehension node by returning a fresh instance of it"""
- newnode = nodes.Comprehension(parent)
- newnode.postinit(
- self.visit(node.target, newnode),
- self.visit(node.iter, newnode),
- [self.visit(child, newnode) for child in node.ifs],
- getattr(node, "is_async", None),
- )
- return newnode
-
- def visit_decorators(self, node, parent):
- """visit a Decorators node by returning a fresh instance of it"""
- # /!\ node is actually a _ast.FunctionDef node while
- # parent is an astroid.nodes.FunctionDef node
- if PY38:
- # Set the line number of the first decorator for Python 3.8+.
- lineno = node.decorator_list[0].lineno
- else:
- lineno = node.lineno
- newnode = nodes.Decorators(lineno, node.col_offset, parent)
- newnode.postinit([self.visit(child, newnode) for child in node.decorator_list])
- return newnode
-
- def visit_delete(self, node, parent):
- """visit a Delete node by returning a fresh instance of it"""
- newnode = nodes.Delete(node.lineno, node.col_offset, parent)
- newnode.postinit([self.visit(child, newnode) for child in node.targets])
- return newnode
-
- def _visit_dict_items(self, node, parent, newnode):
- for key, value in zip(node.keys, node.values):
- rebuilt_value = self.visit(value, newnode)
- if not key:
- # Python 3.5 and extended unpacking
- rebuilt_key = nodes.DictUnpack(
- rebuilt_value.lineno, rebuilt_value.col_offset, parent
- )
- else:
- rebuilt_key = self.visit(key, newnode)
- yield rebuilt_key, rebuilt_value
-
- def visit_dict(self, node, parent):
- """visit a Dict node by returning a fresh instance of it"""
- newnode = nodes.Dict(node.lineno, node.col_offset, parent)
- items = list(self._visit_dict_items(node, parent, newnode))
- newnode.postinit(items)
- return newnode
-
- def visit_dictcomp(self, node, parent):
- """visit a DictComp node by returning a fresh instance of it"""
- newnode = nodes.DictComp(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.key, newnode),
- self.visit(node.value, newnode),
- [self.visit(child, newnode) for child in node.generators],
- )
- return newnode
-
- def visit_expr(self, node, parent):
- """visit a Expr node by returning a fresh instance of it"""
- newnode = nodes.Expr(node.lineno, node.col_offset, parent)
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
- # Not used in Python 3.8+.
- def visit_ellipsis(self, node, parent):
- """visit an Ellipsis node by returning a fresh instance of it"""
- return nodes.Ellipsis(
- getattr(node, "lineno", None), getattr(node, "col_offset", None), parent
- )
-
- def visit_emptynode(self, node, parent):
- """visit an EmptyNode node by returning a fresh instance of it"""
- return nodes.EmptyNode(
- getattr(node, "lineno", None), getattr(node, "col_offset", None), parent
- )
-
- def visit_excepthandler(self, node, parent):
- """visit an ExceptHandler node by returning a fresh instance of it"""
- newnode = nodes.ExceptHandler(node.lineno, node.col_offset, parent)
- # /!\ node.name can be a tuple
- newnode.postinit(
- _visit_or_none(node, "type", self, newnode),
- _visit_or_none(node, "name", self, newnode),
- [self.visit(child, newnode) for child in node.body],
- )
- return newnode
-
- def visit_exec(self, node, parent):
- """visit an Exec node by returning a fresh instance of it"""
- newnode = nodes.Exec(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.body, newnode),
- _visit_or_none(node, "globals", self, newnode),
- _visit_or_none(node, "locals", self, newnode),
- )
- return newnode
-
- # Not used in Python 3.8+.
- def visit_extslice(self, node, parent):
- """visit an ExtSlice node by returning a fresh instance of it"""
- newnode = nodes.ExtSlice(parent=parent)
- newnode.postinit([self.visit(dim, newnode) for dim in node.dims])
- return newnode
-
- def _visit_for(self, cls, node, parent):
- """visit a For node by returning a fresh instance of it"""
- newnode = cls(node.lineno, node.col_offset, parent)
- type_annotation = self.check_type_comment(node, parent=newnode)
- newnode.postinit(
- target=self.visit(node.target, newnode),
- iter=self.visit(node.iter, newnode),
- body=[self.visit(child, newnode) for child in node.body],
- orelse=[self.visit(child, newnode) for child in node.orelse],
- type_annotation=type_annotation,
- )
- return newnode
-
- def visit_for(self, node, parent):
- return self._visit_for(nodes.For, node, parent)
-
- def visit_importfrom(self, node, parent):
- """visit an ImportFrom node by returning a fresh instance of it"""
- names = [(alias.name, alias.asname) for alias in node.names]
- newnode = nodes.ImportFrom(
- node.module or "",
- names,
- node.level or None,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
- # store From names to add them to locals after building
- self._import_from_nodes.append(newnode)
- return newnode
-
- def _visit_functiondef(self, cls, node, parent):
- """visit an FunctionDef node to become astroid"""
- self._global_names.append({})
- node, doc = self._get_doc(node)
-
- lineno = node.lineno
- if PY38 and node.decorator_list:
- # Python 3.8 sets the line number of a decorated function
- # to be the actual line number of the function, but the
- # previous versions expected the decorator's line number instead.
- # We reset the function's line number to that of the
- # first decorator to maintain backward compatibility.
- # It's not ideal but this discrepancy was baked into
- # the framework for *years*.
- lineno = node.decorator_list[0].lineno
-
- newnode = cls(node.name, doc, lineno, node.col_offset, parent)
- if node.decorator_list:
- decorators = self.visit_decorators(node, newnode)
- else:
- decorators = None
- if node.returns:
- returns = self.visit(node.returns, newnode)
- else:
- returns = None
-
- type_comment_args = type_comment_returns = None
- type_comment_annotation = self.check_function_type_comment(node)
- if type_comment_annotation:
- type_comment_returns, type_comment_args = type_comment_annotation
- newnode.postinit(
- args=self.visit(node.args, newnode),
- body=[self.visit(child, newnode) for child in node.body],
- decorators=decorators,
- returns=returns,
- type_comment_returns=type_comment_returns,
- type_comment_args=type_comment_args,
- )
- self._global_names.pop()
- return newnode
-
- def visit_functiondef(self, node, parent):
- return self._visit_functiondef(nodes.FunctionDef, node, parent)
-
- def visit_generatorexp(self, node, parent):
- """visit a GeneratorExp node by returning a fresh instance of it"""
- newnode = nodes.GeneratorExp(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.elt, newnode),
- [self.visit(child, newnode) for child in node.generators],
- )
- return newnode
-
- def visit_attribute(self, node, parent):
- """visit an Attribute node by returning a fresh instance of it"""
- context = self._get_context(node)
- if context == astroid.Del:
- # FIXME : maybe we should reintroduce and visit_delattr ?
- # for instance, deactivating assign_ctx
- newnode = nodes.DelAttr(node.attr, node.lineno, node.col_offset, parent)
- elif context == astroid.Store:
- newnode = nodes.AssignAttr(node.attr, node.lineno, node.col_offset, parent)
- # Prohibit a local save if we are in an ExceptHandler.
- if not isinstance(parent, astroid.ExceptHandler):
- self._delayed_assattr.append(newnode)
- else:
- newnode = nodes.Attribute(node.attr, node.lineno, node.col_offset, parent)
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
- def visit_global(self, node, parent):
- """visit a Global node to become astroid"""
- newnode = nodes.Global(
- node.names,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
- if self._global_names: # global at the module level, no effect
- for name in node.names:
- self._global_names[-1].setdefault(name, []).append(newnode)
- return newnode
-
- def visit_if(self, node, parent):
- """visit an If node by returning a fresh instance of it"""
- newnode = nodes.If(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.test, newnode),
- [self.visit(child, newnode) for child in node.body],
- [self.visit(child, newnode) for child in node.orelse],
- )
- return newnode
-
- def visit_ifexp(self, node, parent):
- """visit a IfExp node by returning a fresh instance of it"""
- newnode = nodes.IfExp(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.test, newnode),
- self.visit(node.body, newnode),
- self.visit(node.orelse, newnode),
- )
- return newnode
-
- def visit_import(self, node, parent):
- """visit a Import node by returning a fresh instance of it"""
- names = [(alias.name, alias.asname) for alias in node.names]
- newnode = nodes.Import(
- names,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
- # save import names in parent's locals:
- for (name, asname) in newnode.names:
- name = asname or name
- parent.set_local(name.split(".")[0], newnode)
- return newnode
-
- # Not used in Python 3.8+.
- def visit_index(self, node, parent):
- """visit a Index node by returning a fresh instance of it"""
- newnode = nodes.Index(parent=parent)
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
- def visit_keyword(self, node, parent):
- """visit a Keyword node by returning a fresh instance of it"""
- newnode = nodes.Keyword(node.arg, parent=parent)
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
- def visit_lambda(self, node, parent):
- """visit a Lambda node by returning a fresh instance of it"""
- newnode = nodes.Lambda(node.lineno, node.col_offset, parent)
- newnode.postinit(self.visit(node.args, newnode), self.visit(node.body, newnode))
- return newnode
-
- def visit_list(self, node, parent):
- """visit a List node by returning a fresh instance of it"""
- context = self._get_context(node)
- newnode = nodes.List(
- ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent
- )
- newnode.postinit([self.visit(child, newnode) for child in node.elts])
- return newnode
-
- def visit_listcomp(self, node, parent):
- """visit a ListComp node by returning a fresh instance of it"""
- newnode = nodes.ListComp(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.elt, newnode),
- [self.visit(child, newnode) for child in node.generators],
- )
- return newnode
-
- def visit_name(self, node, parent):
- """visit a Name node by returning a fresh instance of it"""
- context = self._get_context(node)
- # True and False can be assigned to something in py2x, so we have to
- # check first the context.
- if context == astroid.Del:
- newnode = nodes.DelName(node.id, node.lineno, node.col_offset, parent)
- elif context == astroid.Store:
- newnode = nodes.AssignName(node.id, node.lineno, node.col_offset, parent)
- elif node.id in CONST_NAME_TRANSFORMS:
- newnode = nodes.Const(
- CONST_NAME_TRANSFORMS[node.id],
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
- return newnode
- else:
- newnode = nodes.Name(node.id, node.lineno, node.col_offset, parent)
- # XXX REMOVE me :
- if context in (astroid.Del, astroid.Store): # 'Aug' ??
- self._save_assignment(newnode)
- return newnode
-
- def visit_constant(self, node, parent):
- """visit a Constant node by returning a fresh instance of Const"""
- return nodes.Const(
- node.value,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
-
- # Not used in Python 3.8+.
- def visit_str(self, node, parent):
- """visit a String/Bytes node by returning a fresh instance of Const"""
- return nodes.Const(
- node.s,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
-
- visit_bytes = visit_str
-
- # Not used in Python 3.8+.
- def visit_num(self, node, parent):
- """visit a Num node by returning a fresh instance of Const"""
- return nodes.Const(
- node.n,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
-
- def visit_pass(self, node, parent):
- """visit a Pass node by returning a fresh instance of it"""
- return nodes.Pass(node.lineno, node.col_offset, parent)
-
- def visit_print(self, node, parent):
- """visit a Print node by returning a fresh instance of it"""
- newnode = nodes.Print(node.nl, node.lineno, node.col_offset, parent)
- newnode.postinit(
- _visit_or_none(node, "dest", self, newnode),
- [self.visit(child, newnode) for child in node.values],
- )
- return newnode
-
- def visit_raise(self, node, parent):
- """visit a Raise node by returning a fresh instance of it"""
- newnode = nodes.Raise(node.lineno, node.col_offset, parent)
- # pylint: disable=too-many-function-args
- newnode.postinit(
- _visit_or_none(node, "type", self, newnode),
- _visit_or_none(node, "inst", self, newnode),
- _visit_or_none(node, "tback", self, newnode),
- )
- return newnode
-
- def visit_return(self, node, parent):
- """visit a Return node by returning a fresh instance of it"""
- newnode = nodes.Return(node.lineno, node.col_offset, parent)
- if node.value is not None:
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
- def visit_set(self, node, parent):
- """visit a Set node by returning a fresh instance of it"""
- newnode = nodes.Set(node.lineno, node.col_offset, parent)
- newnode.postinit([self.visit(child, newnode) for child in node.elts])
- return newnode
-
- def visit_setcomp(self, node, parent):
- """visit a SetComp node by returning a fresh instance of it"""
- newnode = nodes.SetComp(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.elt, newnode),
- [self.visit(child, newnode) for child in node.generators],
- )
- return newnode
-
- def visit_slice(self, node, parent):
- """visit a Slice node by returning a fresh instance of it"""
- newnode = nodes.Slice(parent=parent)
- newnode.postinit(
- _visit_or_none(node, "lower", self, newnode),
- _visit_or_none(node, "upper", self, newnode),
- _visit_or_none(node, "step", self, newnode),
- )
- return newnode
-
- def visit_subscript(self, node, parent):
- """visit a Subscript node by returning a fresh instance of it"""
- context = self._get_context(node)
- newnode = nodes.Subscript(
- ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent
- )
- newnode.postinit(
- self.visit(node.value, newnode), self.visit(node.slice, newnode)
- )
- return newnode
-
- def visit_tryexcept(self, node, parent):
- """visit a TryExcept node by returning a fresh instance of it"""
- newnode = nodes.TryExcept(node.lineno, node.col_offset, parent)
- newnode.postinit(
- [self.visit(child, newnode) for child in node.body],
- [self.visit(child, newnode) for child in node.handlers],
- [self.visit(child, newnode) for child in node.orelse],
- )
- return newnode
-
- def visit_tryfinally(self, node, parent):
- """visit a TryFinally node by returning a fresh instance of it"""
- newnode = nodes.TryFinally(node.lineno, node.col_offset, parent)
- newnode.postinit(
- [self.visit(child, newnode) for child in node.body],
- [self.visit(n, newnode) for n in node.finalbody],
- )
- return newnode
-
- def visit_tuple(self, node, parent):
- """visit a Tuple node by returning a fresh instance of it"""
- context = self._get_context(node)
- newnode = nodes.Tuple(
- ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent
- )
- newnode.postinit([self.visit(child, newnode) for child in node.elts])
- return newnode
-
- def visit_unaryop(self, node, parent):
- """visit a UnaryOp node by returning a fresh instance of it"""
- newnode = nodes.UnaryOp(
- self._unary_op_classes[node.op.__class__],
- node.lineno,
- node.col_offset,
- parent,
- )
- newnode.postinit(self.visit(node.operand, newnode))
- return newnode
-
- def visit_while(self, node, parent):
- """visit a While node by returning a fresh instance of it"""
- newnode = nodes.While(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.test, newnode),
- [self.visit(child, newnode) for child in node.body],
- [self.visit(child, newnode) for child in node.orelse],
- )
- return newnode
-
- def visit_with(self, node, parent):
- newnode = nodes.With(node.lineno, node.col_offset, parent)
- expr = self.visit(node.context_expr, newnode)
- if node.optional_vars is not None:
- optional_vars = self.visit(node.optional_vars, newnode)
- else:
- optional_vars = None
-
- type_annotation = self.check_type_comment(node, parent=newnode)
- newnode.postinit(
- items=[(expr, optional_vars)],
- body=[self.visit(child, newnode) for child in node.body],
- type_annotation=type_annotation,
- )
- return newnode
-
- def visit_yield(self, node, parent):
- """visit a Yield node by returning a fresh instance of it"""
- newnode = nodes.Yield(node.lineno, node.col_offset, parent)
- if node.value is not None:
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
-
-class TreeRebuilder3(TreeRebuilder):
- """extend and overwrite TreeRebuilder for python3k"""
-
- def visit_arg(self, node, parent):
- """visit an arg node by returning a fresh AssName instance"""
- return self.visit_assignname(node, parent, node.arg)
-
- # Not used in Python 3.8+.
- def visit_nameconstant(self, node, parent):
- # in Python 3.4 we have NameConstant for True / False / None
- return nodes.Const(
- node.value,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
-
- def visit_excepthandler(self, node, parent):
- """visit an ExceptHandler node by returning a fresh instance of it"""
- newnode = nodes.ExceptHandler(node.lineno, node.col_offset, parent)
- if node.name:
- name = self.visit_assignname(node, newnode, node.name)
- else:
- name = None
- newnode.postinit(
- _visit_or_none(node, "type", self, newnode),
- name,
- [self.visit(child, newnode) for child in node.body],
- )
- return newnode
-
- def visit_nonlocal(self, node, parent):
- """visit a Nonlocal node and return a new instance of it"""
- return nodes.Nonlocal(
- node.names,
- getattr(node, "lineno", None),
- getattr(node, "col_offset", None),
- parent,
- )
-
- def visit_raise(self, node, parent):
- """visit a Raise node by returning a fresh instance of it"""
- newnode = nodes.Raise(node.lineno, node.col_offset, parent)
- # no traceback; anyway it is not used in Pylint
- newnode.postinit(
- _visit_or_none(node, "exc", self, newnode),
- _visit_or_none(node, "cause", self, newnode),
- )
- return newnode
-
- def visit_starred(self, node, parent):
- """visit a Starred node and return a new instance of it"""
- context = self._get_context(node)
- newnode = nodes.Starred(
- ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent
- )
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
- def visit_try(self, node, parent):
- # python 3.3 introduce a new Try node replacing
- # TryFinally/TryExcept nodes
- if node.finalbody:
- newnode = nodes.TryFinally(node.lineno, node.col_offset, parent)
- if node.handlers:
- body = [self.visit_tryexcept(node, newnode)]
- else:
- body = [self.visit(child, newnode) for child in node.body]
- newnode.postinit(body, [self.visit(n, newnode) for n in node.finalbody])
- return newnode
- if node.handlers:
- return self.visit_tryexcept(node, parent)
- return None
-
- def visit_annassign(self, node, parent):
- """visit an AnnAssign node by returning a fresh instance of it"""
- newnode = nodes.AnnAssign(node.lineno, node.col_offset, parent)
- annotation = _visit_or_none(node, "annotation", self, newnode)
- newnode.postinit(
- target=self.visit(node.target, newnode),
- annotation=annotation,
- simple=node.simple,
- value=_visit_or_none(node, "value", self, newnode),
- )
- return newnode
-
- def _visit_with(self, cls, node, parent):
- if "items" not in node._fields:
- # python < 3.3
- return super(TreeRebuilder3, self).visit_with(node, parent)
-
- newnode = cls(node.lineno, node.col_offset, parent)
-
- def visit_child(child):
- expr = self.visit(child.context_expr, newnode)
- var = _visit_or_none(child, "optional_vars", self, newnode)
- return expr, var
-
- type_annotation = self.check_type_comment(node, parent=newnode)
- newnode.postinit(
- items=[visit_child(child) for child in node.items],
- body=[self.visit(child, newnode) for child in node.body],
- type_annotation=type_annotation,
- )
- return newnode
-
- def visit_with(self, node, parent):
- return self._visit_with(nodes.With, node, parent)
-
- def visit_yieldfrom(self, node, parent):
- newnode = nodes.YieldFrom(node.lineno, node.col_offset, parent)
- if node.value is not None:
- newnode.postinit(self.visit(node.value, newnode))
- return newnode
-
- def visit_classdef(self, node, parent, newstyle=True):
- return super(TreeRebuilder3, self).visit_classdef(
- node, parent, newstyle=newstyle
- )
-
- # Async structs added in Python 3.5
- def visit_asyncfunctiondef(self, node, parent):
- return self._visit_functiondef(nodes.AsyncFunctionDef, node, parent)
-
- def visit_asyncfor(self, node, parent):
- return self._visit_for(nodes.AsyncFor, node, parent)
-
- def visit_await(self, node, parent):
- newnode = nodes.Await(node.lineno, node.col_offset, parent)
- newnode.postinit(value=self.visit(node.value, newnode))
- return newnode
-
- def visit_asyncwith(self, node, parent):
- return self._visit_with(nodes.AsyncWith, node, parent)
-
- def visit_joinedstr(self, node, parent):
- newnode = nodes.JoinedStr(node.lineno, node.col_offset, parent)
- newnode.postinit([self.visit(child, newnode) for child in node.values])
- return newnode
-
- def visit_formattedvalue(self, node, parent):
- newnode = nodes.FormattedValue(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.value, newnode),
- node.conversion,
- _visit_or_none(node, "format_spec", self, newnode),
- )
- return newnode
-
- def visit_namedexpr(self, node, parent):
- newnode = nodes.NamedExpr(node.lineno, node.col_offset, parent)
- newnode.postinit(
- self.visit(node.target, newnode), self.visit(node.value, newnode)
- )
- return newnode
-
-
-TreeRebuilder = TreeRebuilder3
diff --git a/venv/Lib/site-packages/astroid/scoped_nodes.py b/venv/Lib/site-packages/astroid/scoped_nodes.py
deleted file mode 100644
index d02b653..0000000
--- a/venv/Lib/site-packages/astroid/scoped_nodes.py
+++ /dev/null
@@ -1,2836 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
-# Copyright (c) 2011, 2013-2015 Google, Inc.
-# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2013 Phil Schaf <flying-sheep@web.de>
-# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
-# Copyright (c) 2015-2016 Florian Bruhin <me@the-compiler.org>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
-# Copyright (c) 2015 Philip Lorenz <philip@bithub.de>
-# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
-# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2017-2018 Ashley Whetter <ashley@awhetter.co.uk>
-# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
-# Copyright (c) 2017 David Euresti <david@dropbox.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
-# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-"""
-This module contains the classes for "scoped" node, i.e. which are opening a
-new local scope in the language definition : Module, ClassDef, FunctionDef (and
-Lambda, GeneratorExp, DictComp and SetComp to some extent).
-"""
-
-import builtins
-import sys
-import io
-import itertools
-from typing import Optional, List
-
-from astroid import bases
-from astroid import context as contextmod
-from astroid import exceptions
-from astroid import decorators as decorators_mod
-from astroid.interpreter import objectmodel
-from astroid.interpreter import dunder_lookup
-from astroid import manager
-from astroid import mixins
-from astroid import node_classes
-from astroid import util
-
-
-BUILTINS = builtins.__name__
-ITER_METHODS = ("__iter__", "__getitem__")
-EXCEPTION_BASE_CLASSES = frozenset({"Exception", "BaseException"})
-objects = util.lazy_import("objects")
-
-
-def _c3_merge(sequences, cls, context):
- """Merges MROs in *sequences* to a single MRO using the C3 algorithm.
-
- Adapted from http://www.python.org/download/releases/2.3/mro/.
-
- """
- result = []
- while True:
- sequences = [s for s in sequences if s] # purge empty sequences
- if not sequences:
- return result
- for s1 in sequences: # find merge candidates among seq heads
- candidate = s1[0]
- for s2 in sequences:
- if candidate in s2[1:]:
- candidate = None
- break # reject the current head, it appears later
- else:
- break
- if not candidate:
- # Show all the remaining bases, which were considered as
- # candidates for the next mro sequence.
- raise exceptions.InconsistentMroError(
- message="Cannot create a consistent method resolution order "
- "for MROs {mros} of class {cls!r}.",
- mros=sequences,
- cls=cls,
- context=context,
- )
-
- result.append(candidate)
- # remove the chosen candidate
- for seq in sequences:
- if seq[0] == candidate:
- del seq[0]
- return None
-
-
-def clean_duplicates_mro(sequences, cls, context):
- for sequence in sequences:
- names = [
- (node.lineno, node.qname()) if node.name else None for node in sequence
- ]
- last_index = dict(map(reversed, enumerate(names)))
- if names and names[0] is not None and last_index[names[0]] != 0:
- raise exceptions.DuplicateBasesError(
- message="Duplicates found in MROs {mros} for {cls!r}.",
- mros=sequences,
- cls=cls,
- context=context,
- )
- yield [
- node
- for i, (node, name) in enumerate(zip(sequence, names))
- if name is None or last_index[name] == i
- ]
-
-
-def function_to_method(n, klass):
- if isinstance(n, FunctionDef):
- if n.type == "classmethod":
- return bases.BoundMethod(n, klass)
- if n.type != "staticmethod":
- return bases.UnboundMethod(n)
- return n
-
-
-MANAGER = manager.AstroidManager()
-
-
-def builtin_lookup(name):
- """lookup a name into the builtin module
- return the list of matching statements and the astroid for the builtin
- module
- """
- builtin_astroid = MANAGER.ast_from_module(builtins)
- if name == "__dict__":
- return builtin_astroid, ()
- try:
- stmts = builtin_astroid.locals[name]
- except KeyError:
- stmts = ()
- return builtin_astroid, stmts
-
-
-# TODO move this Mixin to mixins.py; problem: 'FunctionDef' in _scope_lookup
-class LocalsDictNodeNG(node_classes.LookupMixIn, node_classes.NodeNG):
- """ this class provides locals handling common to Module, FunctionDef
- and ClassDef nodes, including a dict like interface for direct access
- to locals information
- """
-
- # attributes below are set by the builder module or by raw factories
-
- locals = {}
- """A map of the name of a local variable to the node defining the local.
-
- :type: dict(str, NodeNG)
- """
-
- def qname(self):
- """Get the 'qualified' name of the node.
-
- For example: module.name, module.class.name ...
-
- :returns: The qualified name.
- :rtype: str
- """
- # pylint: disable=no-member; github.com/pycqa/astroid/issues/278
- if self.parent is None:
- return self.name
- return "%s.%s" % (self.parent.frame().qname(), self.name)
-
- def frame(self):
- """The first parent frame node.
-
- A frame node is a :class:`Module`, :class:`FunctionDef`,
- or :class:`ClassDef`.
-
- :returns: The first parent frame node.
- :rtype: Module or FunctionDef or ClassDef
- """
- return self
-
- def scope(self):
- """The first parent node defining a new scope.
-
- :returns: The first parent scope node.
- :rtype: Module or FunctionDef or ClassDef or Lambda or GenExpr
- """
- return self
-
- def _scope_lookup(self, node, name, offset=0):
- """XXX method for interfacing the scope lookup"""
- try:
- stmts = node._filter_stmts(self.locals[name], self, offset)
- except KeyError:
- stmts = ()
- if stmts:
- return self, stmts
- if self.parent: # i.e. not Module
- # nested scope: if parent scope is a function, that's fine
- # else jump to the module
- pscope = self.parent.scope()
- if not pscope.is_function:
- pscope = pscope.root()
- return pscope.scope_lookup(node, name)
- return builtin_lookup(name) # Module
-
- def set_local(self, name, stmt):
- """Define that the given name is declared in the given statement node.
-
- .. seealso:: :meth:`scope`
-
- :param name: The name that is being defined.
- :type name: str
-
- :param stmt: The statement that defines the given name.
- :type stmt: NodeNG
- """
- # assert not stmt in self.locals.get(name, ()), (self, stmt)
- self.locals.setdefault(name, []).append(stmt)
-
- __setitem__ = set_local
-
- def _append_node(self, child):
- """append a child, linking it in the tree"""
- # pylint: disable=no-member; depending by the class
- # which uses the current class as a mixin or base class.
- # It's rewritten in 2.0, so it makes no sense for now
- # to spend development time on it.
- self.body.append(child)
- child.parent = self
-
- def add_local_node(self, child_node, name=None):
- """Append a child that should alter the locals of this scope node.
-
- :param child_node: The child node that will alter locals.
- :type child_node: NodeNG
-
- :param name: The name of the local that will be altered by
- the given child node.
- :type name: str or None
- """
- if name != "__class__":
- # add __class__ node as a child will cause infinite recursion later!
- self._append_node(child_node)
- self.set_local(name or child_node.name, child_node)
-
- def __getitem__(self, item):
- """The first node the defines the given local.
-
- :param item: The name of the locally defined object.
- :type item: str
-
- :raises KeyError: If the name is not defined.
- """
- return self.locals[item][0]
-
- def __iter__(self):
- """Iterate over the names of locals defined in this scoped node.
-
- :returns: The names of the defined locals.
- :rtype: iterable(str)
- """
- return iter(self.keys())
-
- def keys(self):
- """The names of locals defined in this scoped node.
-
- :returns: The names of the defined locals.
- :rtype: list(str)
- """
- return list(self.locals.keys())
-
- def values(self):
- """The nodes that define the locals in this scoped node.
-
- :returns: The nodes that define locals.
- :rtype: list(NodeNG)
- """
- return [self[key] for key in self.keys()]
-
- def items(self):
- """Get the names of the locals and the node that defines the local.
-
- :returns: The names of locals and their associated node.
- :rtype: list(tuple(str, NodeNG))
- """
- return list(zip(self.keys(), self.values()))
-
- def __contains__(self, name):
- """Check if a local is defined in this scope.
-
- :param name: The name of the local to check for.
- :type name: str
-
- :returns: True if this node has a local of the given name,
- False otherwise.
- :rtype: bool
- """
- return name in self.locals
-
-
-class Module(LocalsDictNodeNG):
- """Class representing an :class:`ast.Module` node.
-
- >>> node = astroid.extract_node('import astroid')
- >>> node
- <Import l.1 at 0x7f23b2e4e5c0>
- >>> node.parent
- <Module l.0 at 0x7f23b2e4eda0>
- """
-
- _astroid_fields = ("body",)
-
- fromlineno = 0
- """The first line that this node appears on in the source code.
-
- :type: int or None
- """
- lineno = 0
- """The line that this node appears on in the source code.
-
- :type: int or None
- """
-
- # attributes below are set by the builder module or by raw factories
-
- file = None
- """The path to the file that this ast has been extracted from.
-
- This will be ``None`` when the representation has been built from a
- built-in module.
-
- :type: str or None
- """
- file_bytes = None
- """The string/bytes that this ast was built from.
-
- :type: str or bytes or None
- """
- file_encoding = None
- """The encoding of the source file.
-
- This is used to get unicode out of a source file.
- Python 2 only.
-
- :type: str or None
- """
- name = None
- """The name of the module.
-
- :type: str or None
- """
- pure_python = None
- """Whether the ast was built from source.
-
- :type: bool or None
- """
- package = None
- """Whether the node represents a package or a module.
-
- :type: bool or None
- """
- globals = None
- """A map of the name of a global variable to the node defining the global.
-
- :type: dict(str, NodeNG)
- """
-
- # Future imports
- future_imports = None
- """The imports from ``__future__``.
-
- :type: set(str) or None
- """
- special_attributes = objectmodel.ModuleModel()
- """The names of special attributes that this module has.
-
- :type: objectmodel.ModuleModel
- """
-
- # names of module attributes available through the global scope
- scope_attrs = {"__name__", "__doc__", "__file__", "__path__", "__package__"}
- """The names of module attributes available through the global scope.
-
- :type: str(str)
- """
-
- _other_fields = (
- "name",
- "doc",
- "file",
- "path",
- "package",
- "pure_python",
- "future_imports",
- )
- _other_other_fields = ("locals", "globals")
-
- def __init__(
- self,
- name,
- doc,
- file=None,
- path: Optional[List[str]] = None,
- package=None,
- parent=None,
- pure_python=True,
- ):
- """
- :param name: The name of the module.
- :type name: str
-
- :param doc: The module docstring.
- :type doc: str
-
- :param file: The path to the file that this ast has been extracted from.
- :type file: str or None
-
- :param path:
- :type path: Optional[List[str]]
-
- :param package: Whether the node represents a package or a module.
- :type package: bool or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
-
- :param pure_python: Whether the ast was built from source.
- :type pure_python: bool or None
- """
- self.name = name
- self.doc = doc
- self.file = file
- self.path = path
- self.package = package
- self.parent = parent
- self.pure_python = pure_python
- self.locals = self.globals = {}
- """A map of the name of a local variable to the node defining the local.
-
- :type: dict(str, NodeNG)
- """
- self.body = []
- """The contents of the module.
-
- :type: list(NodeNG) or None
- """
- self.future_imports = set()
-
- # pylint: enable=redefined-builtin
-
- def postinit(self, body=None):
- """Do some setup after initialisation.
-
- :param body: The contents of the module.
- :type body: list(NodeNG) or None
- """
- self.body = body
-
- def _get_stream(self):
- if self.file_bytes is not None:
- return io.BytesIO(self.file_bytes)
- if self.file is not None:
- stream = open(self.file, "rb")
- return stream
- return None
-
- def stream(self):
- """Get a stream to the underlying file or bytes.
-
- :type: file or io.BytesIO or None
- """
- return self._get_stream()
-
- def block_range(self, lineno):
- """Get a range from where this node starts to where this node ends.
-
- :param lineno: Unused.
- :type lineno: int
-
- :returns: The range of line numbers that this node belongs to.
- :rtype: tuple(int, int)
- """
- return self.fromlineno, self.tolineno
-
- def scope_lookup(self, node, name, offset=0):
- """Lookup where the given variable is assigned.
-
- :param node: The node to look for assignments up to.
- Any assignments after the given node are ignored.
- :type node: NodeNG
-
- :param name: The name of the variable to find assignments for.
- :type name: str
-
- :param offset: The line offset to filter statements up to.
- :type offset: int
-
- :returns: This scope node and the list of assignments associated to the
- given name according to the scope where it has been found (locals,
- globals or builtin).
- :rtype: tuple(str, list(NodeNG))
- """
- if name in self.scope_attrs and name not in self.locals:
- try:
- return self, self.getattr(name)
- except exceptions.AttributeInferenceError:
- return self, ()
- return self._scope_lookup(node, name, offset)
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- return "%s.module" % BUILTINS
-
- def display_type(self):
- """A human readable type of this node.
-
- :returns: The type of this node.
- :rtype: str
- """
- return "Module"
-
- def getattr(self, name, context=None, ignore_locals=False):
- result = []
- name_in_locals = name in self.locals
-
- if name in self.special_attributes and not ignore_locals and not name_in_locals:
- result = [self.special_attributes.lookup(name)]
- elif not ignore_locals and name_in_locals:
- result = self.locals[name]
- elif self.package:
- try:
- result = [self.import_module(name, relative_only=True)]
- except (exceptions.AstroidBuildingError, SyntaxError) as exc:
- raise exceptions.AttributeInferenceError(
- target=self, attribute=name, context=context
- ) from exc
- result = [n for n in result if not isinstance(n, node_classes.DelName)]
- if result:
- return result
- raise exceptions.AttributeInferenceError(
- target=self, attribute=name, context=context
- )
-
- def igetattr(self, name, context=None):
- """Infer the possible values of the given variable.
-
- :param name: The name of the variable to infer.
- :type name: str
-
- :returns: The inferred possible values.
- :rtype: iterable(NodeNG) or None
- """
- # set lookup name since this is necessary to infer on import nodes for
- # instance
- context = contextmod.copy_context(context)
- context.lookupname = name
- try:
- return bases._infer_stmts(self.getattr(name, context), context, frame=self)
- except exceptions.AttributeInferenceError as error:
- raise exceptions.InferenceError(
- error.message, target=self, attribute=name, context=context
- ) from error
-
- def fully_defined(self):
- """Check if this module has been build from a .py file.
-
- If so, the module contains a complete representation,
- including the code.
-
- :returns: True if the module has been built from a .py file.
- :rtype: bool
- """
- return self.file is not None and self.file.endswith(".py")
-
- def statement(self):
- """The first parent node, including self, marked as statement node.
-
- :returns: The first parent statement.
- :rtype: NodeNG
- """
- return self
-
- def previous_sibling(self):
- """The previous sibling statement.
-
- :returns: The previous sibling statement node.
- :rtype: NodeNG or None
- """
-
- def next_sibling(self):
- """The next sibling statement node.
-
- :returns: The next sibling statement node.
- :rtype: NodeNG or None
- """
-
- _absolute_import_activated = True
-
- def absolute_import_activated(self):
- """Whether :pep:`328` absolute import behaviour has been enabled.
-
- :returns: True if :pep:`328` has been enabled, False otherwise.
- :rtype: bool
- """
- return self._absolute_import_activated
-
- def import_module(self, modname, relative_only=False, level=None):
- """Get the ast for a given module as if imported from this module.
-
- :param modname: The name of the module to "import".
- :type modname: str
-
- :param relative_only: Whether to only consider relative imports.
- :type relative_only: bool
-
- :param level: The level of relative import.
- :type level: int or None
-
- :returns: The imported module ast.
- :rtype: NodeNG
- """
- if relative_only and level is None:
- level = 0
- absmodname = self.relative_to_absolute_name(modname, level)
-
- try:
- return MANAGER.ast_from_module_name(absmodname)
- except exceptions.AstroidBuildingError:
- # we only want to import a sub module or package of this module,
- # skip here
- if relative_only:
- raise
- return MANAGER.ast_from_module_name(modname)
-
- def relative_to_absolute_name(self, modname, level):
- """Get the absolute module name for a relative import.
-
- The relative import can be implicit or explicit.
-
- :param modname: The module name to convert.
- :type modname: str
-
- :param level: The level of relative import.
- :type level: int
-
- :returns: The absolute module name.
- :rtype: str
-
- :raises TooManyLevelsError: When the relative import refers to a
- module too far above this one.
- """
- # XXX this returns non sens when called on an absolute import
- # like 'pylint.checkers.astroid.utils'
- # XXX doesn't return absolute name if self.name isn't absolute name
- if self.absolute_import_activated() and level is None:
- return modname
- if level:
- if self.package:
- level = level - 1
- if level and self.name.count(".") < level:
- raise exceptions.TooManyLevelsError(level=level, name=self.name)
-
- package_name = self.name.rsplit(".", level)[0]
- elif self.package:
- package_name = self.name
- else:
- package_name = self.name.rsplit(".", 1)[0]
-
- if package_name:
- if not modname:
- return package_name
- return "%s.%s" % (package_name, modname)
- return modname
-
- def wildcard_import_names(self):
- """The list of imported names when this module is 'wildcard imported'.
-
- It doesn't include the '__builtins__' name which is added by the
- current CPython implementation of wildcard imports.
-
- :returns: The list of imported names.
- :rtype: list(str)
- """
- # We separate the different steps of lookup in try/excepts
- # to avoid catching too many Exceptions
- default = [name for name in self.keys() if not name.startswith("_")]
- try:
- all_values = self["__all__"]
- except KeyError:
- return default
-
- try:
- explicit = next(all_values.assigned_stmts())
- except exceptions.InferenceError:
- return default
- except AttributeError:
- # not an assignment node
- # XXX infer?
- return default
-
- # Try our best to detect the exported name.
- inferred = []
- try:
- explicit = next(explicit.infer())
- except exceptions.InferenceError:
- return default
- if not isinstance(explicit, (node_classes.Tuple, node_classes.List)):
- return default
-
- str_const = lambda node: (
- isinstance(node, node_classes.Const) and isinstance(node.value, str)
- )
- for node in explicit.elts:
- if str_const(node):
- inferred.append(node.value)
- else:
- try:
- inferred_node = next(node.infer())
- except exceptions.InferenceError:
- continue
- if str_const(inferred_node):
- inferred.append(inferred_node.value)
- return inferred
-
- def public_names(self):
- """The list of the names that are publicly available in this module.
-
- :returns: The list of publc names.
- :rtype: list(str)
- """
- return [name for name in self.keys() if not name.startswith("_")]
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For a :class:`Module` this is always ``True``.
- :rtype: bool
- """
- return True
-
- def get_children(self):
- yield from self.body
-
-
-class ComprehensionScope(LocalsDictNodeNG):
- """Scoping for different types of comprehensions."""
-
- def frame(self):
- """The first parent frame node.
-
- A frame node is a :class:`Module`, :class:`FunctionDef`,
- or :class:`ClassDef`.
-
- :returns: The first parent frame node.
- :rtype: Module or FunctionDef or ClassDef
- """
- return self.parent.frame()
-
- scope_lookup = LocalsDictNodeNG._scope_lookup
-
-
-class GeneratorExp(ComprehensionScope):
- """Class representing an :class:`ast.GeneratorExp` node.
-
- >>> node = astroid.extract_node('(thing for thing in things if thing)')
- >>> node
- <GeneratorExp l.1 at 0x7f23b2e4e400>
- """
-
- _astroid_fields = ("elt", "generators")
- _other_other_fields = ("locals",)
- elt = None
- """The element that forms the output of the expression.
-
- :type: NodeNG or None
- """
- generators = None
- """The generators that are looped through.
-
- :type: list(Comprehension) or None
- """
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- """
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.locals = {}
- """A map of the name of a local variable to the node defining the local.
-
- :type: dict(str, NodeNG)
- """
-
- super(GeneratorExp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, elt=None, generators=None):
- """Do some setup after initialisation.
-
- :param elt: The element that forms the output of the expression.
- :type elt: NodeNG or None
-
- :param generators: The generators that are looped through.
- :type generators: list(Comprehension) or None
- """
- self.elt = elt
- if generators is None:
- self.generators = []
- else:
- self.generators = generators
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For a :class:`GeneratorExp` this is always ``True``.
- :rtype: bool
- """
- return True
-
- def get_children(self):
- yield self.elt
-
- yield from self.generators
-
-
-class DictComp(ComprehensionScope):
- """Class representing an :class:`ast.DictComp` node.
-
- >>> node = astroid.extract_node('{k:v for k, v in things if k > v}')
- >>> node
- <DictComp l.1 at 0x7f23b2e41d68>
- """
-
- _astroid_fields = ("key", "value", "generators")
- _other_other_fields = ("locals",)
- key = None
- """What produces the keys.
-
- :type: NodeNG or None
- """
- value = None
- """What produces the values.
-
- :type: NodeNG or None
- """
- generators = None
- """The generators that are looped through.
-
- :type: list(Comprehension) or None
- """
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- """
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.locals = {}
- """A map of the name of a local variable to the node defining the local.
-
- :type: dict(str, NodeNG)
- """
-
- super(DictComp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, key=None, value=None, generators=None):
- """Do some setup after initialisation.
-
- :param key: What produces the keys.
- :type key: NodeNG or None
-
- :param value: What produces the values.
- :type value: NodeNG or None
-
- :param generators: The generators that are looped through.
- :type generators: list(Comprehension) or None
- """
- self.key = key
- self.value = value
- if generators is None:
- self.generators = []
- else:
- self.generators = generators
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For a :class:`DictComp` this is always :class:`Uninferable`.
- :rtype: Uninferable
- """
- return util.Uninferable
-
- def get_children(self):
- yield self.key
- yield self.value
-
- yield from self.generators
-
-
-class SetComp(ComprehensionScope):
- """Class representing an :class:`ast.SetComp` node.
-
- >>> node = astroid.extract_node('{thing for thing in things if thing}')
- >>> node
- <SetComp l.1 at 0x7f23b2e41898>
- """
-
- _astroid_fields = ("elt", "generators")
- _other_other_fields = ("locals",)
- elt = None
- """The element that forms the output of the expression.
-
- :type: NodeNG or None
- """
- generators = None
- """The generators that are looped through.
-
- :type: list(Comprehension) or None
- """
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- """
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.locals = {}
- """A map of the name of a local variable to the node defining the local.
-
- :type: dict(str, NodeNG)
- """
-
- super(SetComp, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, elt=None, generators=None):
- """Do some setup after initialisation.
-
- :param elt: The element that forms the output of the expression.
- :type elt: NodeNG or None
-
- :param generators: The generators that are looped through.
- :type generators: list(Comprehension) or None
- """
- self.elt = elt
- if generators is None:
- self.generators = []
- else:
- self.generators = generators
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For a :class:`SetComp` this is always :class:`Uninferable`.
- :rtype: Uninferable
- """
- return util.Uninferable
-
- def get_children(self):
- yield self.elt
-
- yield from self.generators
-
-
-class _ListComp(node_classes.NodeNG):
- """Class representing an :class:`ast.ListComp` node.
-
- >>> node = astroid.extract_node('[thing for thing in things if thing]')
- >>> node
- <ListComp l.1 at 0x7f23b2e418d0>
- """
-
- _astroid_fields = ("elt", "generators")
- elt = None
- """The element that forms the output of the expression.
-
- :type: NodeNG or None
- """
- generators = None
- """The generators that are looped through.
-
- :type: list(Comprehension) or None
- """
-
- def postinit(self, elt=None, generators=None):
- """Do some setup after initialisation.
-
- :param elt: The element that forms the output of the expression.
- :type elt: NodeNG or None
-
- :param generators: The generators that are looped through.
- :type generators: list(Comprehension) or None
- """
- self.elt = elt
- self.generators = generators
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For a :class:`ListComp` this is always :class:`Uninferable`.
- :rtype: Uninferable
- """
- return util.Uninferable
-
- def get_children(self):
- yield self.elt
-
- yield from self.generators
-
-
-class ListComp(_ListComp, ComprehensionScope):
- """Class representing an :class:`ast.ListComp` node.
-
- >>> node = astroid.extract_node('[thing for thing in things if thing]')
- >>> node
- <ListComp l.1 at 0x7f23b2e418d0>
- """
-
- _other_other_fields = ("locals",)
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- self.locals = {}
- """A map of the name of a local variable to the node defining it.
-
- :type: dict(str, NodeNG)
- """
-
- super(ListComp, self).__init__(lineno, col_offset, parent)
-
-
-def _infer_decorator_callchain(node):
- """Detect decorator call chaining and see if the end result is a
- static or a classmethod.
- """
- if not isinstance(node, FunctionDef):
- return None
- if not node.parent:
- return None
- try:
- result = next(node.infer_call_result(node.parent))
- except exceptions.InferenceError:
- return None
- if isinstance(result, bases.Instance):
- result = result._proxied
- if isinstance(result, ClassDef):
- if result.is_subtype_of("%s.classmethod" % BUILTINS):
- return "classmethod"
- if result.is_subtype_of("%s.staticmethod" % BUILTINS):
- return "staticmethod"
- return None
-
-
-class Lambda(mixins.FilterStmtsMixin, LocalsDictNodeNG):
- """Class representing an :class:`ast.Lambda` node.
-
- >>> node = astroid.extract_node('lambda arg: arg + 1')
- >>> node
- <Lambda.<lambda> l.1 at 0x7f23b2e41518>
- """
-
- _astroid_fields = ("args", "body")
- _other_other_fields = ("locals",)
- name = "<lambda>"
- is_lambda = True
-
- def implicit_parameters(self):
- return 0
-
- # function's type, 'function' | 'method' | 'staticmethod' | 'classmethod'
- @property
- def type(self):
- """Whether this is a method or function.
-
- :returns: 'method' if this is a method, 'function' otherwise.
- :rtype: str
- """
- # pylint: disable=no-member
- if self.args.args and self.args.args[0].name == "self":
- if isinstance(self.parent.scope(), ClassDef):
- return "method"
- return "function"
-
- def __init__(self, lineno=None, col_offset=None, parent=None):
- """
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.locals = {}
- """A map of the name of a local variable to the node defining it.
-
- :type: dict(str, NodeNG)
- """
-
- self.args = []
- """The arguments that the function takes.
-
- :type: Arguments or list
- """
-
- self.body = []
- """The contents of the function body.
-
- :type: list(NodeNG)
- """
-
- super(Lambda, self).__init__(lineno, col_offset, parent)
-
- def postinit(self, args, body):
- """Do some setup after initialisation.
-
- :param args: The arguments that the function takes.
- :type args: Arguments
-
- :param body: The contents of the function body.
- :type body: list(NodeNG)
- """
- self.args = args
- self.body = body
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- if "method" in self.type:
- return "%s.instancemethod" % BUILTINS
- return "%s.function" % BUILTINS
-
- def display_type(self):
- """A human readable type of this node.
-
- :returns: The type of this node.
- :rtype: str
- """
- if "method" in self.type:
- return "Method"
- return "Function"
-
- def callable(self):
- """Whether this node defines something that is callable.
-
- :returns: True if this defines something that is callable,
- False otherwise.
- For a :class:`Lambda` this is always ``True``.
- :rtype: bool
- """
- return True
-
- def argnames(self):
- """Get the names of each of the arguments.
-
- :returns: The names of the arguments.
- :rtype: list(str)
- """
- # pylint: disable=no-member; github.com/pycqa/astroid/issues/291
- # args is in fact redefined later on by postinit. Can't be changed
- # to None due to a strong interaction between Lambda and FunctionDef.
-
- if self.args.args: # maybe None with builtin functions
- names = _rec_get_names(self.args.args)
- else:
- names = []
- if self.args.vararg:
- names.append(self.args.vararg)
- if self.args.kwarg:
- names.append(self.args.kwarg)
- return names
-
- def infer_call_result(self, caller, context=None):
- """Infer what the function returns when called.
-
- :param caller: Unused
- :type caller: object
- """
- # pylint: disable=no-member; github.com/pycqa/astroid/issues/291
- # args is in fact redefined later on by postinit. Can't be changed
- # to None due to a strong interaction between Lambda and FunctionDef.
- return self.body.infer(context)
-
- def scope_lookup(self, node, name, offset=0):
- """Lookup where the given names is assigned.
-
- :param node: The node to look for assignments up to.
- Any assignments after the given node are ignored.
- :type node: NodeNG
-
- :param name: The name to find assignments for.
- :type name: str
-
- :param offset: The line offset to filter statements up to.
- :type offset: int
-
- :returns: This scope node and the list of assignments associated to the
- given name according to the scope where it has been found (locals,
- globals or builtin).
- :rtype: tuple(str, list(NodeNG))
- """
- # pylint: disable=no-member; github.com/pycqa/astroid/issues/291
- # args is in fact redefined later on by postinit. Can't be changed
- # to None due to a strong interaction between Lambda and FunctionDef.
-
- if node in self.args.defaults or node in self.args.kw_defaults:
- frame = self.parent.frame()
- # line offset to avoid that def func(f=func) resolve the default
- # value to the defined function
- offset = -1
- else:
- # check this is not used in function decorators
- frame = self
- return frame._scope_lookup(node, name, offset)
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For a :class:`Lambda` this is always ``True``.
- :rtype: bool
- """
- return True
-
- def get_children(self):
- yield self.args
- yield self.body
-
-
-class FunctionDef(mixins.MultiLineBlockMixin, node_classes.Statement, Lambda):
- """Class representing an :class:`ast.FunctionDef`.
-
- >>> node = astroid.extract_node('''
- ... def my_func(arg):
- ... return arg + 1
- ... ''')
- >>> node
- <FunctionDef.my_func l.2 at 0x7f23b2e71e10>
- """
-
- _astroid_fields = ("decorators", "args", "returns", "body")
- _multi_line_block_fields = ("body",)
- returns = None
- decorators = None
- """The decorators that are applied to this method or function.
-
- :type: Decorators or None
- """
- special_attributes = objectmodel.FunctionModel()
- """The names of special attributes that this function has.
-
- :type: objectmodel.FunctionModel
- """
- is_function = True
- """Whether this node indicates a function.
-
- For a :class:`FunctionDef` this is always ``True``.
-
- :type: bool
- """
- type_annotation = None
- """If present, this will contain the type annotation passed by a type comment
-
- :type: NodeNG or None
- """
- type_comment_args = None
- """
- If present, this will contain the type annotation for arguments
- passed by a type comment
- """
- type_comment_returns = None
- """If present, this will contain the return type annotation, passed by a type comment"""
- # attributes below are set by the builder module or by raw factories
- _other_fields = ("name", "doc")
- _other_other_fields = (
- "locals",
- "_type",
- "type_comment_returns",
- "type_comment_args",
- )
- _type = None
-
- def __init__(self, name=None, doc=None, lineno=None, col_offset=None, parent=None):
- """
- :param name: The name of the function.
- :type name: str or None
-
- :param doc: The function's docstring.
- :type doc: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.name = name
- """The name of the function.
-
- :type name: str or None
- """
-
- self.doc = doc
- """The function's docstring.
-
- :type doc: str or None
- """
-
- self.instance_attrs = {}
- super(FunctionDef, self).__init__(lineno, col_offset, parent)
- if parent:
- frame = parent.frame()
- frame.set_local(name, self)
-
- # pylint: disable=arguments-differ; different than Lambdas
- def postinit(
- self,
- args,
- body,
- decorators=None,
- returns=None,
- type_comment_returns=None,
- type_comment_args=None,
- ):
- """Do some setup after initialisation.
-
- :param args: The arguments that the function takes.
- :type args: Arguments or list
-
- :param body: The contents of the function body.
- :type body: list(NodeNG)
-
- :param decorators: The decorators that are applied to this
- method or function.
- :type decorators: Decorators or None
- :params type_comment_returns:
- The return type annotation passed via a type comment.
- :params type_comment_args:
- The args type annotation passed via a type comment.
- """
- self.args = args
- self.body = body
- self.decorators = decorators
- self.returns = returns
- self.type_comment_returns = type_comment_returns
- self.type_comment_args = type_comment_args
-
- @decorators_mod.cachedproperty
- def extra_decorators(self):
- """The extra decorators that this function can have.
-
- Additional decorators are considered when they are used as
- assignments, as in ``method = staticmethod(method)``.
- The property will return all the callables that are used for
- decoration.
-
- :type: list(NodeNG)
- """
- frame = self.parent.frame()
- if not isinstance(frame, ClassDef):
- return []
-
- decorators = []
- for assign in frame._get_assign_nodes():
- if isinstance(assign.value, node_classes.Call) and isinstance(
- assign.value.func, node_classes.Name
- ):
- for assign_node in assign.targets:
- if not isinstance(assign_node, node_classes.AssignName):
- # Support only `name = callable(name)`
- continue
-
- if assign_node.name != self.name:
- # Interested only in the assignment nodes that
- # decorates the current method.
- continue
- try:
- meth = frame[self.name]
- except KeyError:
- continue
- else:
- # Must be a function and in the same frame as the
- # original method.
- if (
- isinstance(meth, FunctionDef)
- and assign_node.frame() == frame
- ):
- decorators.append(assign.value)
- return decorators
-
- @decorators_mod.cachedproperty
- def type(self): # pylint: disable=invalid-overridden-method
- """The function type for this node.
-
- Possible values are: method, function, staticmethod, classmethod.
-
- :type: str
- """
- builtin_descriptors = {"classmethod", "staticmethod"}
-
- for decorator in self.extra_decorators:
- if decorator.func.name in builtin_descriptors:
- return decorator.func.name
-
- frame = self.parent.frame()
- type_name = "function"
- if isinstance(frame, ClassDef):
- if self.name == "__new__":
- return "classmethod"
- if sys.version_info >= (3, 6) and self.name == "__init_subclass__":
- return "classmethod"
-
- type_name = "method"
-
- if not self.decorators:
- return type_name
-
- for node in self.decorators.nodes:
- if isinstance(node, node_classes.Name):
- if node.name in builtin_descriptors:
- return node.name
-
- if isinstance(node, node_classes.Call):
- # Handle the following case:
- # @some_decorator(arg1, arg2)
- # def func(...)
- #
- try:
- current = next(node.func.infer())
- except exceptions.InferenceError:
- continue
- _type = _infer_decorator_callchain(current)
- if _type is not None:
- return _type
-
- try:
- for inferred in node.infer():
- # Check to see if this returns a static or a class method.
- _type = _infer_decorator_callchain(inferred)
- if _type is not None:
- return _type
-
- if not isinstance(inferred, ClassDef):
- continue
- for ancestor in inferred.ancestors():
- if not isinstance(ancestor, ClassDef):
- continue
- if ancestor.is_subtype_of("%s.classmethod" % BUILTINS):
- return "classmethod"
- if ancestor.is_subtype_of("%s.staticmethod" % BUILTINS):
- return "staticmethod"
- except exceptions.InferenceError:
- pass
- return type_name
-
- @decorators_mod.cachedproperty
- def fromlineno(self):
- """The first line that this node appears on in the source code.
-
- :type: int or None
- """
- # lineno is the line number of the first decorator, we want the def
- # statement lineno
- lineno = self.lineno
- if self.decorators is not None:
- lineno += sum(
- node.tolineno - node.lineno + 1 for node in self.decorators.nodes
- )
-
- return lineno
-
- @decorators_mod.cachedproperty
- def blockstart_tolineno(self):
- """The line on which the beginning of this block ends.
-
- :type: int
- """
- return self.args.tolineno
-
- def block_range(self, lineno):
- """Get a range from the given line number to where this node ends.
-
- :param lineno: Unused.
- :type lineno: int
-
- :returns: The range of line numbers that this node belongs to,
- :rtype: tuple(int, int)
- """
- return self.fromlineno, self.tolineno
-
- def getattr(self, name, context=None):
- """this method doesn't look in the instance_attrs dictionary since it's
- done by an Instance proxy at inference time.
- """
- if name in self.instance_attrs:
- return self.instance_attrs[name]
- if name in self.special_attributes:
- return [self.special_attributes.lookup(name)]
- raise exceptions.AttributeInferenceError(target=self, attribute=name)
-
- def igetattr(self, name, context=None):
- """Inferred getattr, which returns an iterator of inferred statements."""
- try:
- return bases._infer_stmts(self.getattr(name, context), context, frame=self)
- except exceptions.AttributeInferenceError as error:
- raise exceptions.InferenceError(
- error.message, target=self, attribute=name, context=context
- ) from error
-
- def is_method(self):
- """Check if this function node represents a method.
-
- :returns: True if this is a method, False otherwise.
- :rtype: bool
- """
- # check we are defined in a ClassDef, because this is usually expected
- # (e.g. pylint...) when is_method() return True
- return self.type != "function" and isinstance(self.parent.frame(), ClassDef)
-
- @decorators_mod.cached
- def decoratornames(self):
- """Get the qualified names of each of the decorators on this function.
-
- :returns: The names of the decorators.
- :rtype: set(str)
- """
- result = set()
- decoratornodes = []
- if self.decorators is not None:
- decoratornodes += self.decorators.nodes
- decoratornodes += self.extra_decorators
- for decnode in decoratornodes:
- try:
- for infnode in decnode.infer():
- result.add(infnode.qname())
- except exceptions.InferenceError:
- continue
- return result
-
- def is_bound(self):
- """Check if the function is bound to an instance or class.
-
- :returns: True if the function is bound to an instance or class,
- False otherwise.
- :rtype: bool
- """
- return self.type == "classmethod"
-
- def is_abstract(self, pass_is_abstract=True):
- """Check if the method is abstract.
-
- A method is considered abstract if any of the following is true:
- * The only statement is 'raise NotImplementedError'
- * The only statement is 'pass' and pass_is_abstract is True
- * The method is annotated with abc.astractproperty/abc.abstractmethod
-
- :returns: True if the method is abstract, False otherwise.
- :rtype: bool
- """
- if self.decorators:
- for node in self.decorators.nodes:
- try:
- inferred = next(node.infer())
- except exceptions.InferenceError:
- continue
- if inferred and inferred.qname() in (
- "abc.abstractproperty",
- "abc.abstractmethod",
- ):
- return True
-
- for child_node in self.body:
- if isinstance(child_node, node_classes.Raise):
- if child_node.raises_not_implemented():
- return True
- return pass_is_abstract and isinstance(child_node, node_classes.Pass)
- # empty function is the same as function with a single "pass" statement
- if pass_is_abstract:
- return True
-
- def is_generator(self):
- """Check if this is a generator function.
-
- :returns: True is this is a generator function, False otherwise.
- :rtype: bool
- """
- return next(self._get_yield_nodes_skip_lambdas(), False)
-
- def infer_call_result(self, caller=None, context=None):
- """Infer what the function returns when called.
-
- :returns: What the function returns.
- :rtype: iterable(NodeNG or Uninferable) or None
- """
- if self.is_generator():
- if isinstance(self, AsyncFunctionDef):
- generator_cls = bases.AsyncGenerator
- else:
- generator_cls = bases.Generator
- result = generator_cls(self)
- yield result
- return
- # This is really a gigantic hack to work around metaclass generators
- # that return transient class-generating functions. Pylint's AST structure
- # cannot handle a base class object that is only used for calling __new__,
- # but does not contribute to the inheritance structure itself. We inject
- # a fake class into the hierarchy here for several well-known metaclass
- # generators, and filter it out later.
- if (
- self.name == "with_metaclass"
- and len(self.args.args) == 1
- and self.args.vararg is not None
- ):
- metaclass = next(caller.args[0].infer(context))
- if isinstance(metaclass, ClassDef):
- class_bases = [next(arg.infer(context)) for arg in caller.args[1:]]
- new_class = ClassDef(name="temporary_class")
- new_class.hide = True
- new_class.parent = self
- new_class.postinit(
- bases=[base for base in class_bases if base != util.Uninferable],
- body=[],
- decorators=[],
- metaclass=metaclass,
- )
- yield new_class
- return
- returns = self._get_return_nodes_skip_functions()
-
- first_return = next(returns, None)
- if not first_return:
- if self.body and isinstance(self.body[-1], node_classes.Assert):
- yield node_classes.Const(None)
- return
-
- raise exceptions.InferenceError(
- "The function does not have any return statements"
- )
-
- for returnnode in itertools.chain((first_return,), returns):
- if returnnode.value is None:
- yield node_classes.Const(None)
- else:
- try:
- yield from returnnode.value.infer(context)
- except exceptions.InferenceError:
- yield util.Uninferable
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For a :class:`FunctionDef` this is always ``True``.
- :rtype: bool
- """
- return True
-
- def get_children(self):
- if self.decorators is not None:
- yield self.decorators
-
- yield self.args
-
- if self.returns is not None:
- yield self.returns
-
- yield from self.body
-
- def scope_lookup(self, node, name, offset=0):
- """Lookup where the given name is assigned."""
- if name == "__class__":
- # __class__ is an implicit closure reference created by the compiler
- # if any methods in a class body refer to either __class__ or super.
- # In our case, we want to be able to look it up in the current scope
- # when `__class__` is being used.
- frame = self.parent.frame()
- if isinstance(frame, ClassDef):
- return self, [frame]
- return super().scope_lookup(node, name, offset)
-
-
-class AsyncFunctionDef(FunctionDef):
- """Class representing an :class:`ast.FunctionDef` node.
-
- A :class:`AsyncFunctionDef` is an asynchronous function
- created with the `async` keyword.
-
- >>> node = astroid.extract_node('''
- async def func(things):
- async for thing in things:
- print(thing)
- ''')
- >>> node
- <AsyncFunctionDef.func l.2 at 0x7f23b2e416d8>
- >>> node.body[0]
- <AsyncFor l.3 at 0x7f23b2e417b8>
- """
-
-
-def _rec_get_names(args, names=None):
- """return a list of all argument names"""
- if names is None:
- names = []
- for arg in args:
- if isinstance(arg, node_classes.Tuple):
- _rec_get_names(arg.elts, names)
- else:
- names.append(arg.name)
- return names
-
-
-def _is_metaclass(klass, seen=None):
- """ Return if the given class can be
- used as a metaclass.
- """
- if klass.name == "type":
- return True
- if seen is None:
- seen = set()
- for base in klass.bases:
- try:
- for baseobj in base.infer():
- baseobj_name = baseobj.qname()
- if baseobj_name in seen:
- continue
-
- seen.add(baseobj_name)
- if isinstance(baseobj, bases.Instance):
- # not abstract
- return False
- if baseobj is util.Uninferable:
- continue
- if baseobj is klass:
- continue
- if not isinstance(baseobj, ClassDef):
- continue
- if baseobj._type == "metaclass":
- return True
- if _is_metaclass(baseobj, seen):
- return True
- except exceptions.InferenceError:
- continue
- return False
-
-
-def _class_type(klass, ancestors=None):
- """return a ClassDef node type to differ metaclass and exception
- from 'regular' classes
- """
- # XXX we have to store ancestors in case we have an ancestor loop
- if klass._type is not None:
- return klass._type
- if _is_metaclass(klass):
- klass._type = "metaclass"
- elif klass.name.endswith("Exception"):
- klass._type = "exception"
- else:
- if ancestors is None:
- ancestors = set()
- klass_name = klass.qname()
- if klass_name in ancestors:
- # XXX we are in loop ancestors, and have found no type
- klass._type = "class"
- return "class"
- ancestors.add(klass_name)
- for base in klass.ancestors(recurs=False):
- name = _class_type(base, ancestors)
- if name != "class":
- if name == "metaclass" and not _is_metaclass(klass):
- # don't propagate it if the current class
- # can't be a metaclass
- continue
- klass._type = base.type
- break
- if klass._type is None:
- klass._type = "class"
- return klass._type
-
-
-def get_wrapping_class(node):
- """Get the class that wraps the given node.
-
- We consider that a class wraps a node if the class
- is a parent for the said node.
-
- :returns: The class that wraps the given node
- :rtype: ClassDef or None
- """
-
- klass = node.frame()
- while klass is not None and not isinstance(klass, ClassDef):
- if klass.parent is None:
- klass = None
- else:
- klass = klass.parent.frame()
- return klass
-
-
-class ClassDef(mixins.FilterStmtsMixin, LocalsDictNodeNG, node_classes.Statement):
- """Class representing an :class:`ast.ClassDef` node.
-
- >>> node = astroid.extract_node('''
- class Thing:
- def my_meth(self, arg):
- return arg + self.offset
- ''')
- >>> node
- <ClassDef.Thing l.2 at 0x7f23b2e9e748>
- """
-
- # some of the attributes below are set by the builder module or
- # by a raw factories
-
- # a dictionary of class instances attributes
- _astroid_fields = ("decorators", "bases", "body") # name
-
- decorators = None
- """The decorators that are applied to this class.
-
- :type: Decorators or None
- """
- special_attributes = objectmodel.ClassModel()
- """The names of special attributes that this class has.
-
- :type: objectmodel.ClassModel
- """
-
- _type = None
- _metaclass_hack = False
- hide = False
- type = property(
- _class_type,
- doc=(
- "The class type for this node.\n\n"
- "Possible values are: class, metaclass, exception.\n\n"
- ":type: str"
- ),
- )
- _other_fields = ("name", "doc")
- _other_other_fields = ("locals", "_newstyle")
- _newstyle = None
-
- def __init__(self, name=None, doc=None, lineno=None, col_offset=None, parent=None):
- """
- :param name: The name of the class.
- :type name: str or None
-
- :param doc: The function's docstring.
- :type doc: str or None
-
- :param lineno: The line that this node appears on in the source code.
- :type lineno: int or None
-
- :param col_offset: The column that this node appears on in the
- source code.
- :type col_offset: int or None
-
- :param parent: The parent node in the syntax tree.
- :type parent: NodeNG or None
- """
- self.instance_attrs = {}
- self.locals = {}
- """A map of the name of a local variable to the node defining it.
-
- :type: dict(str, NodeNG)
- """
-
- self.keywords = []
- """The keywords given to the class definition.
-
- This is usually for :pep:`3115` style metaclass declaration.
-
- :type: list(Keyword) or None
- """
-
- self.bases = []
- """What the class inherits from.
-
- :type: list(NodeNG)
- """
-
- self.body = []
- """The contents of the class body.
-
- :type: list(NodeNG)
- """
-
- self.name = name
- """The name of the class.
-
- :type name: str or None
- """
-
- self.doc = doc
- """The class' docstring.
-
- :type doc: str or None
- """
-
- super(ClassDef, self).__init__(lineno, col_offset, parent)
- if parent is not None:
- parent.frame().set_local(name, self)
-
- for local_name, node in self.implicit_locals():
- self.add_local_node(node, local_name)
-
- def implicit_parameters(self):
- return 1
-
- def implicit_locals(self):
- """Get implicitly defined class definition locals.
-
- :returns: the the name and Const pair for each local
- :rtype: tuple(tuple(str, node_classes.Const), ...)
- """
- locals_ = (("__module__", self.special_attributes.attr___module__),)
- # __qualname__ is defined in PEP3155
- locals_ += (("__qualname__", self.special_attributes.attr___qualname__),)
- return locals_
-
- # pylint: disable=redefined-outer-name
- def postinit(
- self, bases, body, decorators, newstyle=None, metaclass=None, keywords=None
- ):
- """Do some setup after initialisation.
-
- :param bases: What the class inherits from.
- :type bases: list(NodeNG)
-
- :param body: The contents of the class body.
- :type body: list(NodeNG)
-
- :param decorators: The decorators that are applied to this class.
- :type decorators: Decorators or None
-
- :param newstyle: Whether this is a new style class or not.
- :type newstyle: bool or None
-
- :param metaclass: The metaclass of this class.
- :type metaclass: NodeNG or None
-
- :param keywords: The keywords given to the class definition.
- :type keywords: list(Keyword) or None
- """
- self.keywords = keywords
- self.bases = bases
- self.body = body
- self.decorators = decorators
- if newstyle is not None:
- self._newstyle = newstyle
- if metaclass is not None:
- self._metaclass = metaclass
-
- def _newstyle_impl(self, context=None):
- if context is None:
- context = contextmod.InferenceContext()
- if self._newstyle is not None:
- return self._newstyle
- for base in self.ancestors(recurs=False, context=context):
- if base._newstyle_impl(context):
- self._newstyle = True
- break
- klass = self.declared_metaclass()
- # could be any callable, we'd need to infer the result of klass(name,
- # bases, dict). punt if it's not a class node.
- if klass is not None and isinstance(klass, ClassDef):
- self._newstyle = klass._newstyle_impl(context)
- if self._newstyle is None:
- self._newstyle = False
- return self._newstyle
-
- _newstyle = None
- newstyle = property(
- _newstyle_impl,
- doc=("Whether this is a new style class or not\n\n" ":type: bool or None"),
- )
-
- @decorators_mod.cachedproperty
- def blockstart_tolineno(self):
- """The line on which the beginning of this block ends.
-
- :type: int
- """
- if self.bases:
- return self.bases[-1].tolineno
-
- return self.fromlineno
-
- def block_range(self, lineno):
- """Get a range from the given line number to where this node ends.
-
- :param lineno: Unused.
- :type lineno: int
-
- :returns: The range of line numbers that this node belongs to,
- :rtype: tuple(int, int)
- """
- return self.fromlineno, self.tolineno
-
- def pytype(self):
- """Get the name of the type that this node represents.
-
- :returns: The name of the type.
- :rtype: str
- """
- if self.newstyle:
- return "%s.type" % BUILTINS
- return "%s.classobj" % BUILTINS
-
- def display_type(self):
- """A human readable type of this node.
-
- :returns: The type of this node.
- :rtype: str
- """
- return "Class"
-
- def callable(self):
- """Whether this node defines something that is callable.
-
- :returns: True if this defines something that is callable,
- False otherwise.
- For a :class:`ClassDef` this is always ``True``.
- :rtype: bool
- """
- return True
-
- def is_subtype_of(self, type_name, context=None):
- """Whether this class is a subtype of the given type.
-
- :param type_name: The name of the type of check against.
- :type type_name: str
-
- :returns: True if this class is a subtype of the given type,
- False otherwise.
- :rtype: bool
- """
- if self.qname() == type_name:
- return True
- for anc in self.ancestors(context=context):
- if anc.qname() == type_name:
- return True
- return False
-
- def _infer_type_call(self, caller, context):
- name_node = next(caller.args[0].infer(context))
- if isinstance(name_node, node_classes.Const) and isinstance(
- name_node.value, str
- ):
- name = name_node.value
- else:
- return util.Uninferable
-
- result = ClassDef(name, None)
-
- # Get the bases of the class.
- class_bases = next(caller.args[1].infer(context))
- if isinstance(class_bases, (node_classes.Tuple, node_classes.List)):
- result.bases = class_bases.itered()
- else:
- # There is currently no AST node that can represent an 'unknown'
- # node (Uninferable is not an AST node), therefore we simply return Uninferable here
- # although we know at least the name of the class.
- return util.Uninferable
-
- # Get the members of the class
- try:
- members = next(caller.args[2].infer(context))
- except exceptions.InferenceError:
- members = None
-
- if members and isinstance(members, node_classes.Dict):
- for attr, value in members.items:
- if isinstance(attr, node_classes.Const) and isinstance(attr.value, str):
- result.locals[attr.value] = [value]
-
- result.parent = caller.parent
- return result
-
- def infer_call_result(self, caller, context=None):
- """infer what a class is returning when called"""
- if (
- self.is_subtype_of("%s.type" % (BUILTINS,), context)
- and len(caller.args) == 3
- ):
- result = self._infer_type_call(caller, context)
- yield result
- return
-
- dunder_call = None
- try:
- metaclass = self.metaclass(context=context)
- if metaclass is not None:
- dunder_call = next(metaclass.igetattr("__call__", context))
- except exceptions.AttributeInferenceError:
- pass
-
- if dunder_call and dunder_call.qname() != "builtins.type.__call__":
- # Call type.__call__ if not set metaclass
- # (since type is the default metaclass)
- context = contextmod.bind_context_to_node(context, self)
- yield from dunder_call.infer_call_result(caller, context)
- else:
- if any(cls.name in EXCEPTION_BASE_CLASSES for cls in self.mro()):
- # Subclasses of exceptions can be exception instances
- yield objects.ExceptionInstance(self)
- else:
- yield bases.Instance(self)
-
- def scope_lookup(self, node, name, offset=0):
- """Lookup where the given name is assigned.
-
- :param node: The node to look for assignments up to.
- Any assignments after the given node are ignored.
- :type node: NodeNG
-
- :param name: The name to find assignments for.
- :type name: str
-
- :param offset: The line offset to filter statements up to.
- :type offset: int
-
- :returns: This scope node and the list of assignments associated to the
- given name according to the scope where it has been found (locals,
- globals or builtin).
- :rtype: tuple(str, list(NodeNG))
- """
- # If the name looks like a builtin name, just try to look
- # into the upper scope of this class. We might have a
- # decorator that it's poorly named after a builtin object
- # inside this class.
- lookup_upper_frame = (
- isinstance(node.parent, node_classes.Decorators)
- and name in MANAGER.builtins_module
- )
- if (
- any(node == base or base.parent_of(node) for base in self.bases)
- or lookup_upper_frame
- ):
- # Handle the case where we have either a name
- # in the bases of a class, which exists before
- # the actual definition or the case where we have
- # a Getattr node, with that name.
- #
- # name = ...
- # class A(name):
- # def name(self): ...
- #
- # import name
- # class A(name.Name):
- # def name(self): ...
-
- frame = self.parent.frame()
- # line offset to avoid that class A(A) resolve the ancestor to
- # the defined class
- offset = -1
- else:
- frame = self
- return frame._scope_lookup(node, name, offset)
-
- @property
- def basenames(self):
- """The names of the parent classes
-
- Names are given in the order they appear in the class definition.
-
- :type: list(str)
- """
- return [bnode.as_string() for bnode in self.bases]
-
- def ancestors(self, recurs=True, context=None):
- """Iterate over the base classes in prefixed depth first order.
-
- :param recurs: Whether to recurse or return direct ancestors only.
- :type recurs: bool
-
- :returns: The base classes
- :rtype: iterable(NodeNG)
- """
- # FIXME: should be possible to choose the resolution order
- # FIXME: inference make infinite loops possible here
- yielded = {self}
- if context is None:
- context = contextmod.InferenceContext()
- if not self.bases and self.qname() != "builtins.object":
- yield builtin_lookup("object")[1][0]
- return
-
- for stmt in self.bases:
- with context.restore_path():
- try:
- for baseobj in stmt.infer(context):
- if not isinstance(baseobj, ClassDef):
- if isinstance(baseobj, bases.Instance):
- baseobj = baseobj._proxied
- else:
- continue
- if not baseobj.hide:
- if baseobj in yielded:
- continue
- yielded.add(baseobj)
- yield baseobj
- if not recurs:
- continue
- for grandpa in baseobj.ancestors(recurs=True, context=context):
- if grandpa is self:
- # This class is the ancestor of itself.
- break
- if grandpa in yielded:
- continue
- yielded.add(grandpa)
- yield grandpa
- except exceptions.InferenceError:
- continue
-
- def local_attr_ancestors(self, name, context=None):
- """Iterate over the parents that define the given name.
-
- :param name: The name to find definitions for.
- :type name: str
-
- :returns: The parents that define the given name.
- :rtype: iterable(NodeNG)
- """
- # Look up in the mro if we can. This will result in the
- # attribute being looked up just as Python does it.
- try:
- ancestors = self.mro(context)[1:]
- except exceptions.MroError:
- # Fallback to use ancestors, we can't determine
- # a sane MRO.
- ancestors = self.ancestors(context=context)
- for astroid in ancestors:
- if name in astroid:
- yield astroid
-
- def instance_attr_ancestors(self, name, context=None):
- """Iterate over the parents that define the given name as an attribute.
-
- :param name: The name to find definitions for.
- :type name: str
-
- :returns: The parents that define the given name as
- an instance attribute.
- :rtype: iterable(NodeNG)
- """
- for astroid in self.ancestors(context=context):
- if name in astroid.instance_attrs:
- yield astroid
-
- def has_base(self, node):
- """Whether this class directly inherits from the given node.
-
- :param node: The node to check for.
- :type node: NodeNG
-
- :returns: True if this class directly inherits from the given node.
- :rtype: bool
- """
- return node in self.bases
-
- def local_attr(self, name, context=None):
- """Get the list of assign nodes associated to the given name.
-
- Assignments are looked for in both this class and in parents.
-
- :returns: The list of assignments to the given name.
- :rtype: list(NodeNG)
-
- :raises AttributeInferenceError: If no attribute with this name
- can be found in this class or parent classes.
- """
- result = []
- if name in self.locals:
- result = self.locals[name]
- else:
- class_node = next(self.local_attr_ancestors(name, context), None)
- if class_node:
- result = class_node.locals[name]
- result = [n for n in result if not isinstance(n, node_classes.DelAttr)]
- if result:
- return result
- raise exceptions.AttributeInferenceError(
- target=self, attribute=name, context=context
- )
-
- def instance_attr(self, name, context=None):
- """Get the list of nodes associated to the given attribute name.
-
- Assignments are looked for in both this class and in parents.
-
- :returns: The list of assignments to the given name.
- :rtype: list(NodeNG)
-
- :raises AttributeInferenceError: If no attribute with this name
- can be found in this class or parent classes.
- """
- # Return a copy, so we don't modify self.instance_attrs,
- # which could lead to infinite loop.
- values = list(self.instance_attrs.get(name, []))
- # get all values from parents
- for class_node in self.instance_attr_ancestors(name, context):
- values += class_node.instance_attrs[name]
- values = [n for n in values if not isinstance(n, node_classes.DelAttr)]
- if values:
- return values
- raise exceptions.AttributeInferenceError(
- target=self, attribute=name, context=context
- )
-
- def instantiate_class(self):
- """Get an :class:`Instance` of the :class:`ClassDef` node.
-
- :returns: An :class:`Instance` of the :class:`ClassDef` node,
- or self if this is not possible.
- :rtype: Instance or ClassDef
- """
- return bases.Instance(self)
-
- def getattr(self, name, context=None, class_context=True):
- """Get an attribute from this class, using Python's attribute semantic.
-
- This method doesn't look in the :attr:`instance_attrs` dictionary
- since it is done by an :class:`Instance` proxy at inference time.
- It may return an :class:`Uninferable` object if
- the attribute has not been
- found, but a ``__getattr__`` or ``__getattribute__`` method is defined.
- If ``class_context`` is given, then it is considered that the
- attribute is accessed from a class context,
- e.g. ClassDef.attribute, otherwise it might have been accessed
- from an instance as well. If ``class_context`` is used in that
- case, then a lookup in the implicit metaclass and the explicit
- metaclass will be done.
-
- :param name: The attribute to look for.
- :type name: str
-
- :param class_context: Whether the attribute can be accessed statically.
- :type class_context: bool
-
- :returns: The attribute.
- :rtype: list(NodeNG)
-
- :raises AttributeInferenceError: If the attribute cannot be inferred.
- """
- values = self.locals.get(name, [])
- if name in self.special_attributes and class_context and not values:
- result = [self.special_attributes.lookup(name)]
- if name == "__bases__":
- # Need special treatment, since they are mutable
- # and we need to return all the values.
- result += values
- return result
-
- # don't modify the list in self.locals!
- values = list(values)
- for classnode in self.ancestors(recurs=True, context=context):
- values += classnode.locals.get(name, [])
-
- if class_context:
- values += self._metaclass_lookup_attribute(name, context)
-
- if not values:
- raise exceptions.AttributeInferenceError(
- target=self, attribute=name, context=context
- )
-
- # Look for AnnAssigns, which are not attributes in the purest sense.
- for value in values:
- if isinstance(value, node_classes.AssignName):
- stmt = value.statement()
- if isinstance(stmt, node_classes.AnnAssign) and stmt.value is None:
- raise exceptions.AttributeInferenceError(
- target=self, attribute=name, context=context
- )
- return values
-
- def _metaclass_lookup_attribute(self, name, context):
- """Search the given name in the implicit and the explicit metaclass."""
- attrs = set()
- implicit_meta = self.implicit_metaclass()
- metaclass = self.metaclass()
- for cls in {implicit_meta, metaclass}:
- if cls and cls != self and isinstance(cls, ClassDef):
- cls_attributes = self._get_attribute_from_metaclass(cls, name, context)
- attrs.update(set(cls_attributes))
- return attrs
-
- def _get_attribute_from_metaclass(self, cls, name, context):
- try:
- attrs = cls.getattr(name, context=context, class_context=True)
- except exceptions.AttributeInferenceError:
- return
-
- for attr in bases._infer_stmts(attrs, context, frame=cls):
- if not isinstance(attr, FunctionDef):
- yield attr
- continue
-
- if bases._is_property(attr):
- yield from attr.infer_call_result(self, context)
- continue
- if attr.type == "classmethod":
- # If the method is a classmethod, then it will
- # be bound to the metaclass, not to the class
- # from where the attribute is retrieved.
- # get_wrapping_class could return None, so just
- # default to the current class.
- frame = get_wrapping_class(attr) or self
- yield bases.BoundMethod(attr, frame)
- elif attr.type == "staticmethod":
- yield attr
- else:
- yield bases.BoundMethod(attr, self)
-
- def igetattr(self, name, context=None, class_context=True):
- """Infer the possible values of the given variable.
-
- :param name: The name of the variable to infer.
- :type name: str
-
- :returns: The inferred possible values.
- :rtype: iterable(NodeNG or Uninferable)
- """
- # set lookup name since this is necessary to infer on import nodes for
- # instance
- context = contextmod.copy_context(context)
- context.lookupname = name
- try:
- attr = self.getattr(name, context, class_context=class_context)[0]
- for inferred in bases._infer_stmts([attr], context, frame=self):
- # yield Uninferable object instead of descriptors when necessary
- if not isinstance(inferred, node_classes.Const) and isinstance(
- inferred, bases.Instance
- ):
- try:
- inferred._proxied.getattr("__get__", context)
- except exceptions.AttributeInferenceError:
- yield inferred
- else:
- yield util.Uninferable
- else:
- yield function_to_method(inferred, self)
- except exceptions.AttributeInferenceError as error:
- if not name.startswith("__") and self.has_dynamic_getattr(context):
- # class handle some dynamic attributes, return a Uninferable object
- yield util.Uninferable
- else:
- raise exceptions.InferenceError(
- error.message, target=self, attribute=name, context=context
- )
-
- def has_dynamic_getattr(self, context=None):
- """Check if the class has a custom __getattr__ or __getattribute__.
-
- If any such method is found and it is not from
- builtins, nor from an extension module, then the function
- will return True.
-
- :returns: True if the class has a custom
- __getattr__ or __getattribute__, False otherwise.
- :rtype: bool
- """
-
- def _valid_getattr(node):
- root = node.root()
- return root.name != BUILTINS and getattr(root, "pure_python", None)
-
- try:
- return _valid_getattr(self.getattr("__getattr__", context)[0])
- except exceptions.AttributeInferenceError:
- # if self.newstyle: XXX cause an infinite recursion error
- try:
- getattribute = self.getattr("__getattribute__", context)[0]
- return _valid_getattr(getattribute)
- except exceptions.AttributeInferenceError:
- pass
- return False
-
- def getitem(self, index, context=None):
- """Return the inference of a subscript.
-
- This is basically looking up the method in the metaclass and calling it.
-
- :returns: The inferred value of a subscript to this class.
- :rtype: NodeNG
-
- :raises AstroidTypeError: If this class does not define a
- ``__getitem__`` method.
- """
- try:
- methods = dunder_lookup.lookup(self, "__getitem__")
- except exceptions.AttributeInferenceError as exc:
- raise exceptions.AstroidTypeError(node=self, context=context) from exc
-
- method = methods[0]
-
- # Create a new callcontext for providing index as an argument.
- new_context = contextmod.bind_context_to_node(context, self)
- new_context.callcontext = contextmod.CallContext(args=[index])
-
- try:
- return next(method.infer_call_result(self, new_context))
- except exceptions.InferenceError:
- return util.Uninferable
-
- def methods(self):
- """Iterate over all of the method defined in this class and its parents.
-
- :returns: The methods defined on the class.
- :rtype: iterable(FunctionDef)
- """
- done = {}
- for astroid in itertools.chain(iter((self,)), self.ancestors()):
- for meth in astroid.mymethods():
- if meth.name in done:
- continue
- done[meth.name] = None
- yield meth
-
- def mymethods(self):
- """Iterate over all of the method defined in this class only.
-
- :returns: The methods defined on the class.
- :rtype: iterable(FunctionDef)
- """
- for member in self.values():
- if isinstance(member, FunctionDef):
- yield member
-
- def implicit_metaclass(self):
- """Get the implicit metaclass of the current class.
-
- For newstyle classes, this will return an instance of builtins.type.
- For oldstyle classes, it will simply return None, since there's
- no implicit metaclass there.
-
- :returns: The metaclass.
- :rtype: builtins.type or None
- """
- if self.newstyle:
- return builtin_lookup("type")[1][0]
- return None
-
- _metaclass = None
-
- def declared_metaclass(self, context=None):
- """Return the explicit declared metaclass for the current class.
-
- An explicit declared metaclass is defined
- either by passing the ``metaclass`` keyword argument
- in the class definition line (Python 3) or (Python 2) by
- having a ``__metaclass__`` class attribute, or if there are
- no explicit bases but there is a global ``__metaclass__`` variable.
-
- :returns: The metaclass of this class,
- or None if one could not be found.
- :rtype: NodeNG or None
- """
- for base in self.bases:
- try:
- for baseobj in base.infer(context=context):
- if isinstance(baseobj, ClassDef) and baseobj.hide:
- self._metaclass = baseobj._metaclass
- self._metaclass_hack = True
- break
- except exceptions.InferenceError:
- pass
-
- if self._metaclass:
- # Expects this from Py3k TreeRebuilder
- try:
- return next(
- node
- for node in self._metaclass.infer(context=context)
- if node is not util.Uninferable
- )
- except (exceptions.InferenceError, StopIteration):
- return None
-
- return None
-
- def _find_metaclass(self, seen=None, context=None):
- if seen is None:
- seen = set()
- seen.add(self)
-
- klass = self.declared_metaclass(context=context)
- if klass is None:
- for parent in self.ancestors(context=context):
- if parent not in seen:
- klass = parent._find_metaclass(seen)
- if klass is not None:
- break
- return klass
-
- def metaclass(self, context=None):
- """Get the metaclass of this class.
-
- If this class does not define explicitly a metaclass,
- then the first defined metaclass in ancestors will be used
- instead.
-
- :returns: The metaclass of this class.
- :rtype: NodeNG or None
- """
- return self._find_metaclass(context=context)
-
- def has_metaclass_hack(self):
- return self._metaclass_hack
-
- def _islots(self):
- """ Return an iterator with the inferred slots. """
- if "__slots__" not in self.locals:
- return None
- for slots in self.igetattr("__slots__"):
- # check if __slots__ is a valid type
- for meth in ITER_METHODS:
- try:
- slots.getattr(meth)
- break
- except exceptions.AttributeInferenceError:
- continue
- else:
- continue
-
- if isinstance(slots, node_classes.Const):
- # a string. Ignore the following checks,
- # but yield the node, only if it has a value
- if slots.value:
- yield slots
- continue
- if not hasattr(slots, "itered"):
- # we can't obtain the values, maybe a .deque?
- continue
-
- if isinstance(slots, node_classes.Dict):
- values = [item[0] for item in slots.items]
- else:
- values = slots.itered()
- if values is util.Uninferable:
- continue
- if not values:
- # Stop the iteration, because the class
- # has an empty list of slots.
- return values
-
- for elt in values:
- try:
- for inferred in elt.infer():
- if inferred is util.Uninferable:
- continue
- if not isinstance(
- inferred, node_classes.Const
- ) or not isinstance(inferred.value, str):
- continue
- if not inferred.value:
- continue
- yield inferred
- except exceptions.InferenceError:
- continue
-
- return None
-
- def _slots(self):
- if not self.newstyle:
- raise NotImplementedError(
- "The concept of slots is undefined for old-style classes."
- )
-
- slots = self._islots()
- try:
- first = next(slots)
- except StopIteration as exc:
- # The class doesn't have a __slots__ definition or empty slots.
- if exc.args and exc.args[0] not in ("", None):
- return exc.args[0]
- return None
- return [first] + list(slots)
-
- # Cached, because inferring them all the time is expensive
- @decorators_mod.cached
- def slots(self):
- """Get all the slots for this node.
-
- :returns: The names of slots for this class.
- If the class doesn't define any slot, through the ``__slots__``
- variable, then this function will return a None.
- Also, it will return None in the case the slots were not inferred.
- :rtype: list(str) or None
- """
-
- def grouped_slots():
- # Not interested in object, since it can't have slots.
- for cls in self.mro()[:-1]:
- try:
- cls_slots = cls._slots()
- except NotImplementedError:
- continue
- if cls_slots is not None:
- yield from cls_slots
- else:
- yield None
-
- if not self.newstyle:
- raise NotImplementedError(
- "The concept of slots is undefined for old-style classes."
- )
-
- slots = list(grouped_slots())
- if not all(slot is not None for slot in slots):
- return None
-
- return sorted(slots, key=lambda item: item.value)
-
- def _inferred_bases(self, context=None):
- # Similar with .ancestors, but the difference is when one base is inferred,
- # only the first object is wanted. That's because
- # we aren't interested in superclasses, as in the following
- # example:
- #
- # class SomeSuperClass(object): pass
- # class SomeClass(SomeSuperClass): pass
- # class Test(SomeClass): pass
- #
- # Inferring SomeClass from the Test's bases will give
- # us both SomeClass and SomeSuperClass, but we are interested
- # only in SomeClass.
-
- if context is None:
- context = contextmod.InferenceContext()
- if not self.bases and self.qname() != "builtins.object":
- yield builtin_lookup("object")[1][0]
- return
-
- for stmt in self.bases:
- try:
- baseobj = next(stmt.infer(context=context))
- except exceptions.InferenceError:
- continue
- if isinstance(baseobj, bases.Instance):
- baseobj = baseobj._proxied
- if not isinstance(baseobj, ClassDef):
- continue
- if not baseobj.hide:
- yield baseobj
- else:
- yield from baseobj.bases
-
- def _compute_mro(self, context=None):
- inferred_bases = list(self._inferred_bases(context=context))
- bases_mro = []
- for base in inferred_bases:
- if base is self:
- continue
-
- try:
- mro = base._compute_mro(context=context)
- bases_mro.append(mro)
- except NotImplementedError:
- # Some classes have in their ancestors both newstyle and
- # old style classes. For these we can't retrieve the .mro,
- # although in Python it's possible, since the class we are
- # currently working is in fact new style.
- # So, we fallback to ancestors here.
- ancestors = list(base.ancestors(context=context))
- bases_mro.append(ancestors)
-
- unmerged_mro = [[self]] + bases_mro + [inferred_bases]
- unmerged_mro = list(clean_duplicates_mro(unmerged_mro, self, context))
- return _c3_merge(unmerged_mro, self, context)
-
- def mro(self, context=None) -> List["ClassDef"]:
- """Get the method resolution order, using C3 linearization.
-
- :returns: The list of ancestors, sorted by the mro.
- :rtype: list(NodeNG)
- :raises DuplicateBasesError: Duplicate bases in the same class base
- :raises InconsistentMroError: A class' MRO is inconsistent
- """
- return self._compute_mro(context=context)
-
- def bool_value(self):
- """Determine the boolean value of this node.
-
- :returns: The boolean value of this node.
- For a :class:`ClassDef` this is always ``True``.
- :rtype: bool
- """
- return True
-
- def get_children(self):
- if self.decorators is not None:
- yield self.decorators
-
- yield from self.bases
- yield from self.body
-
- @decorators_mod.cached
- def _get_assign_nodes(self):
- children_assign_nodes = (
- child_node._get_assign_nodes() for child_node in self.body
- )
- return list(itertools.chain.from_iterable(children_assign_nodes))
diff --git a/venv/Lib/site-packages/astroid/test_utils.py b/venv/Lib/site-packages/astroid/test_utils.py
deleted file mode 100644
index 6c965ef..0000000
--- a/venv/Lib/site-packages/astroid/test_utils.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright (c) 2013-2014 Google, Inc.
-# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
-# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
-# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-"""Utility functions for test code that uses astroid ASTs as input."""
-import contextlib
-import functools
-import sys
-import warnings
-
-import pytest
-
-from astroid import nodes
-
-
-def require_version(minver=None, maxver=None):
- """ Compare version of python interpreter to the given one. Skip the test
- if older.
- """
-
- def parse(string, default=None):
- string = string or default
- try:
- return tuple(int(v) for v in string.split("."))
- except ValueError as exc:
- raise ValueError(
- "{string} is not a correct version : should be X.Y[.Z].".format(
- string=string
- )
- ) from exc
-
- def check_require_version(f):
- current = sys.version_info[:3]
- if parse(minver, "0") < current <= parse(maxver, "4"):
- return f
-
- str_version = ".".join(str(v) for v in sys.version_info)
-
- @functools.wraps(f)
- def new_f(*args, **kwargs):
- if minver is not None:
- pytest.skip(
- "Needs Python > %s. Current version is %s." % (minver, str_version)
- )
- elif maxver is not None:
- pytest.skip(
- "Needs Python <= %s. Current version is %s." % (maxver, str_version)
- )
-
- return new_f
-
- return check_require_version
-
-
-def get_name_node(start_from, name, index=0):
- return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index]
-
-
-@contextlib.contextmanager
-def enable_warning(warning):
- warnings.simplefilter("always", warning)
- try:
- yield
- finally:
- # Reset it to default value, so it will take
- # into account the values from the -W flag.
- warnings.simplefilter("default", warning)
diff --git a/venv/Lib/site-packages/astroid/transforms.py b/venv/Lib/site-packages/astroid/transforms.py
deleted file mode 100644
index e5506cc..0000000
--- a/venv/Lib/site-packages/astroid/transforms.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-
-import collections
-from functools import lru_cache
-
-
-class TransformVisitor:
- """A visitor for handling transforms.
-
- The standard approach of using it is to call
- :meth:`~visit` with an *astroid* module and the class
- will take care of the rest, walking the tree and running the
- transforms for each encountered node.
- """
-
- TRANSFORM_MAX_CACHE_SIZE = 10000
-
- def __init__(self):
- self.transforms = collections.defaultdict(list)
-
- @lru_cache(maxsize=TRANSFORM_MAX_CACHE_SIZE)
- def _transform(self, node):
- """Call matching transforms for the given node if any and return the
- transformed node.
- """
- cls = node.__class__
- if cls not in self.transforms:
- # no transform registered for this class of node
- return node
-
- transforms = self.transforms[cls]
- for transform_func, predicate in transforms:
- if predicate is None or predicate(node):
- ret = transform_func(node)
- # if the transformation function returns something, it's
- # expected to be a replacement for the node
- if ret is not None:
- node = ret
- if ret.__class__ != cls:
- # Can no longer apply the rest of the transforms.
- break
- return node
-
- def _visit(self, node):
- if hasattr(node, "_astroid_fields"):
- for name in node._astroid_fields:
- value = getattr(node, name)
- visited = self._visit_generic(value)
- if visited != value:
- setattr(node, name, visited)
- return self._transform(node)
-
- def _visit_generic(self, node):
- if isinstance(node, list):
- return [self._visit_generic(child) for child in node]
- if isinstance(node, tuple):
- return tuple(self._visit_generic(child) for child in node)
- if not node or isinstance(node, str):
- return node
-
- return self._visit(node)
-
- def register_transform(self, node_class, transform, predicate=None):
- """Register `transform(node)` function to be applied on the given
- astroid's `node_class` if `predicate` is None or returns true
- when called with the node as argument.
-
- The transform function may return a value which is then used to
- substitute the original node in the tree.
- """
- self.transforms[node_class].append((transform, predicate))
-
- def unregister_transform(self, node_class, transform, predicate=None):
- """Unregister the given transform."""
- self.transforms[node_class].remove((transform, predicate))
-
- def visit(self, module):
- """Walk the given astroid *tree* and transform each encountered node
-
- Only the nodes which have transforms registered will actually
- be replaced or changed.
- """
- module.body = [self._visit(child) for child in module.body]
- return self._transform(module)
diff --git a/venv/Lib/site-packages/astroid/util.py b/venv/Lib/site-packages/astroid/util.py
deleted file mode 100644
index 3ab7561..0000000
--- a/venv/Lib/site-packages/astroid/util.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
-# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
-# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
-# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
-
-# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
-# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
-
-import warnings
-from itertools import islice
-
-import importlib
-import lazy_object_proxy
-
-
-def lazy_descriptor(obj):
- class DescriptorProxy(lazy_object_proxy.Proxy):
- def __get__(self, instance, owner=None):
- return self.__class__.__get__(self, instance)
-
- return DescriptorProxy(obj)
-
-
-def lazy_import(module_name):
- return lazy_object_proxy.Proxy(
- lambda: importlib.import_module("." + module_name, "astroid")
- )
-
-
-@object.__new__
-class Uninferable:
- """Special inference object, which is returned when inference fails."""
-
- def __repr__(self):
- return "Uninferable"
-
- __str__ = __repr__
-
- def __getattribute__(self, name):
- if name == "next":
- raise AttributeError("next method should not be called")
- if name.startswith("__") and name.endswith("__"):
- return object.__getattribute__(self, name)
- if name == "accept":
- return object.__getattribute__(self, name)
- return self
-
- def __call__(self, *args, **kwargs):
- return self
-
- def __bool__(self):
- return False
-
- __nonzero__ = __bool__
-
- def accept(self, visitor):
- func = getattr(visitor, "visit_uninferable")
- return func(self)
-
-
-class BadOperationMessage:
- """Object which describes a TypeError occurred somewhere in the inference chain
-
- This is not an exception, but a container object which holds the types and
- the error which occurred.
- """
-
-
-class BadUnaryOperationMessage(BadOperationMessage):
- """Object which describes operational failures on UnaryOps."""
-
- def __init__(self, operand, op, error):
- self.operand = operand
- self.op = op
- self.error = error
-
- @property
- def _object_type_helper(self):
- helpers = lazy_import("helpers")
- return helpers.object_type
-
- def _object_type(self, obj):
- # pylint: disable=not-callable; can't infer lazy_import
- objtype = self._object_type_helper(obj)
- if objtype is Uninferable:
- return None
-
- return objtype
-
- def __str__(self):
- if hasattr(self.operand, "name"):
- operand_type = self.operand.name
- else:
- object_type = self._object_type(self.operand)
- if hasattr(object_type, "name"):
- operand_type = object_type.name
- else:
- # Just fallback to as_string
- operand_type = object_type.as_string()
-
- msg = "bad operand type for unary {}: {}"
- return msg.format(self.op, operand_type)
-
-
-class BadBinaryOperationMessage(BadOperationMessage):
- """Object which describes type errors for BinOps."""
-
- def __init__(self, left_type, op, right_type):
- self.left_type = left_type
- self.right_type = right_type
- self.op = op
-
- def __str__(self):
- msg = "unsupported operand type(s) for {}: {!r} and {!r}"
- return msg.format(self.op, self.left_type.name, self.right_type.name)
-
-
-def _instancecheck(cls, other):
- wrapped = cls.__wrapped__
- other_cls = other.__class__
- is_instance_of = wrapped is other_cls or issubclass(other_cls, wrapped)
- warnings.warn(
- "%r is deprecated and slated for removal in astroid "
- "2.0, use %r instead" % (cls.__class__.__name__, wrapped.__name__),
- PendingDeprecationWarning,
- stacklevel=2,
- )
- return is_instance_of
-
-
-def proxy_alias(alias_name, node_type):
- """Get a Proxy from the given name to the given node type."""
- proxy = type(
- alias_name,
- (lazy_object_proxy.Proxy,),
- {
- "__class__": object.__dict__["__class__"],
- "__instancecheck__": _instancecheck,
- },
- )
- return proxy(lambda: node_type)
-
-
-def limit_inference(iterator, size):
- """Limit inference amount.
-
- Limit inference amount to help with performance issues with
- exponentially exploding possible results.
-
- :param iterator: Inference generator to limit
- :type iterator: Iterator(NodeNG)
-
- :param size: Maximum mount of nodes yielded plus an
- Uninferable at the end if limit reached
- :type size: int
-
- :yields: A possibly modified generator
- :rtype param: Iterable
- """
- yield from islice(iterator, size)
- has_more = next(iterator, False)
- if has_more is not False:
- yield Uninferable
- return