From d476d2e053f937c0060f696312f301591e4f43ea Mon Sep 17 00:00:00 2001
From: brenda-br
Date: Thu, 23 Feb 2023 22:14:39 +0530
Subject: Restructure Code -1
---
.../DistillationColumnStagewiseResults.py | 18 +
src/main/python/DockWidgets/DockWidget.py | 167 +
.../DockWidgets/DockWidgetCompoundSeparator.py | 133 +
.../DockWidgets/DockWidgetCompressorExpander.py | 166 +
.../DockWidgets/DockWidgetDistillationColumn.py | 375 +
src/main/python/DockWidgets/DockWidgetFlash.py | 86 +
.../python/DockWidgets/DockWidgetMaterialStream.py | 373 +
src/main/python/DockWidgets/DockWidgetMixer.py | 66 +
.../python/DockWidgets/DockWidgetShortcutColumn.py | 148 +
src/main/python/DockWidgets/DockWidgetSplitter.py | 85 +
src/main/python/DockWidgets/__init__.py | 0
...stillationColumnStagewiseResults.cpython-36.pyc | Bin 0 -> 767 bytes
...stillationColumnStagewiseResults.cpython-37.pyc | Bin 0 -> 946 bytes
.../__pycache__/DockWidget.cpython-36.pyc | Bin 0 -> 4404 bytes
.../__pycache__/DockWidget.cpython-37.pyc | Bin 0 -> 4934 bytes
.../DockWidgetCompoundSeparator.cpython-36.pyc | Bin 0 -> 3271 bytes
.../DockWidgetCompoundSeparator.cpython-37.pyc | Bin 0 -> 4422 bytes
.../DockWidgetCompressorExpander.cpython-36.pyc | Bin 0 -> 5450 bytes
.../DockWidgetCompressorExpander.cpython-37.pyc | Bin 0 -> 5657 bytes
.../DockWidgetDistillationColumn.cpython-36.pyc | Bin 0 -> 9874 bytes
.../DockWidgetDistillationColumn.cpython-37.pyc | Bin 0 -> 10480 bytes
.../__pycache__/DockWidgetFlash.cpython-36.pyc | Bin 0 -> 2871 bytes
.../__pycache__/DockWidgetFlash.cpython-37.pyc | Bin 0 -> 3521 bytes
.../DockWidgetMaterialStream.cpython-36.pyc | Bin 0 -> 8673 bytes
.../DockWidgetMaterialStream.cpython-37.pyc | Bin 0 -> 9641 bytes
.../__pycache__/DockWidgetMixer.cpython-36.pyc | Bin 0 -> 2057 bytes
.../__pycache__/DockWidgetMixer.cpython-37.pyc | Bin 0 -> 2707 bytes
.../DockWidgetShortcutColumn.cpython-36.pyc | Bin 0 -> 5015 bytes
.../DockWidgetShortcutColumn.cpython-37.pyc | Bin 0 -> 5671 bytes
.../__pycache__/DockWidgetSplitter.cpython-36.pyc | Bin 0 -> 2689 bytes
.../__pycache__/DockWidgetSplitter.cpython-37.pyc | Bin 0 -> 3342 bytes
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 208 bytes
src/main/python/OMChem/CompSep.py | 72 +
src/main/python/OMChem/ConvReactor.py | 44 +
src/main/python/OMChem/Cooler.py | 76 +
src/main/python/OMChem/DistCol.py | 106 +
src/main/python/OMChem/EngStm.py | 18 +
src/main/python/OMChem/Flash.py | 57 +
src/main/python/OMChem/Flowsheet.py | 306 +
src/main/python/OMChem/Heater.py | 79 +
src/main/python/OMChem/Mixer.py | 58 +
src/main/python/OMChem/Pump.py | 66 +
src/main/python/OMChem/ShortcutColumn.py | 88 +
src/main/python/OMChem/Splitter.py | 61 +
src/main/python/OMChem/Valve.py | 61 +
src/main/python/OMChem/__init__.py | 0
.../OMChem/__pycache__/EngStm.cpython-36.pyc | Bin 0 -> 918 bytes
.../OMChem/__pycache__/EngStm.cpython-37.pyc | Bin 0 -> 929 bytes
.../OMChem/__pycache__/Flowsheet.cpython-36.pyc | Bin 0 -> 8274 bytes
.../OMChem/__pycache__/Flowsheet.cpython-37.pyc | Bin 0 -> 8508 bytes
.../OMChem/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 203 bytes
src/main/python/OMChem/adiabatic_comp.py | 70 +
src/main/python/OMChem/adiabatic_exp.py | 70 +
src/main/python/OMChem/setup.py | 54 +
src/main/python/Redo.dat | 0
src/main/python/mainApp.py | 413 +
src/main/python/utils/Bin_Phase_env.py | 240 +
src/main/python/utils/ComponentSelector.py | 185 +
src/main/python/utils/Container.py | 250 +
src/main/python/utils/Graphics.py | 709 ++
src/main/python/utils/Streams.py | 451 ++
src/main/python/utils/UnitOperations.py | 751 ++
src/main/python/utils/__init__.py | 0
.../utils/__pycache__/Bin_Phase_env.cpython-37.pyc | Bin 0 -> 8017 bytes
.../__pycache__/ComponentSelector.cpython-37.pyc | Bin 0 -> 5514 bytes
.../utils/__pycache__/Container.cpython-37.pyc | Bin 0 -> 7735 bytes
.../utils/__pycache__/Graphics.cpython-37.pyc | Bin 0 -> 23649 bytes
.../utils/__pycache__/Streams.cpython-37.pyc | Bin 0 -> 12803 bytes
.../__pycache__/UnitOperations.cpython-37.pyc | Bin 0 -> 26939 bytes
.../utils/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 202 bytes
src/main/python/utils/thermopackage.txt | 6 +
.../__pycache__/mccabe.cpython-37.pyc | Bin 0 -> 11136 bytes
.../site-packages/__pycache__/six.cpython-37.pyc | Bin 0 -> 26864 bytes
.../site-packages/astroid-2.3.3.dist-info/COPYING | 339 +
.../astroid-2.3.3.dist-info/COPYING.LESSER | 510 ++
.../astroid-2.3.3.dist-info/INSTALLER | 1 +
.../site-packages/astroid-2.3.3.dist-info/METADATA | 117 +
.../site-packages/astroid-2.3.3.dist-info/RECORD | 145 +
.../site-packages/astroid-2.3.3.dist-info/WHEEL | 5 +
.../astroid-2.3.3.dist-info/top_level.txt | 1 +
.../venv/Lib/site-packages/astroid/__init__.py | 166 +
.../venv/Lib/site-packages/astroid/__pkginfo__.py | 51 +
.../astroid/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 4434 bytes
.../astroid/__pycache__/__pkginfo__.cpython-37.pyc | Bin 0 -> 1357 bytes
.../astroid/__pycache__/_ast.cpython-37.pyc | Bin 0 -> 1444 bytes
.../astroid/__pycache__/arguments.cpython-37.pyc | Bin 0 -> 7017 bytes
.../astroid/__pycache__/as_string.cpython-37.pyc | Bin 0 -> 26379 bytes
.../astroid/__pycache__/bases.cpython-37.pyc | Bin 0 -> 15625 bytes
.../astroid/__pycache__/builder.cpython-37.pyc | Bin 0 -> 12292 bytes
.../astroid/__pycache__/context.cpython-37.pyc | Bin 0 -> 4277 bytes
.../astroid/__pycache__/decorators.cpython-37.pyc | Bin 0 -> 3518 bytes
.../astroid/__pycache__/exceptions.cpython-37.pyc | Bin 0 -> 9369 bytes
.../astroid/__pycache__/helpers.cpython-37.pyc | Bin 0 -> 7264 bytes
.../astroid/__pycache__/inference.cpython-37.pyc | Bin 0 -> 21439 bytes
.../astroid/__pycache__/manager.cpython-37.pyc | Bin 0 -> 9301 bytes
.../astroid/__pycache__/mixins.cpython-37.pyc | Bin 0 -> 5780 bytes
.../astroid/__pycache__/modutils.cpython-37.pyc | Bin 0 -> 17081 bytes
.../__pycache__/node_classes.cpython-37.pyc | Bin 0 -> 120480 bytes
.../astroid/__pycache__/nodes.cpython-37.pyc | Bin 0 -> 2054 bytes
.../astroid/__pycache__/objects.cpython-37.pyc | Bin 0 -> 8501 bytes
.../astroid/__pycache__/protocols.cpython-37.pyc | Bin 0 -> 16680 bytes
.../__pycache__/raw_building.cpython-37.pyc | Bin 0 -> 11290 bytes
.../astroid/__pycache__/rebuilder.cpython-37.pyc | Bin 0 -> 39150 bytes
.../__pycache__/scoped_nodes.cpython-37.pyc | Bin 0 -> 70867 bytes
.../astroid/__pycache__/test_utils.cpython-37.pyc | Bin 0 -> 2502 bytes
.../astroid/__pycache__/transforms.cpython-37.pyc | Bin 0 -> 3436 bytes
.../astroid/__pycache__/util.cpython-37.pyc | Bin 0 -> 5704 bytes
.../python/venv/Lib/site-packages/astroid/_ast.py | 49 +
.../venv/Lib/site-packages/astroid/arguments.py | 285 +
.../venv/Lib/site-packages/astroid/as_string.py | 633 ++
.../python/venv/Lib/site-packages/astroid/bases.py | 542 ++
.../__pycache__/brain_argparse.cpython-37.pyc | Bin 0 -> 1063 bytes
.../brain/__pycache__/brain_attrs.cpython-37.pyc | Bin 0 -> 1575 bytes
.../brain_builtin_inference.cpython-37.pyc | Bin 0 -> 19796 bytes
.../__pycache__/brain_collections.cpython-37.pyc | Bin 0 -> 2492 bytes
.../brain/__pycache__/brain_crypt.cpython-37.pyc | Bin 0 -> 850 bytes
.../brain/__pycache__/brain_curses.cpython-37.pyc | Bin 0 -> 3368 bytes
.../__pycache__/brain_dataclasses.cpython-37.pyc | Bin 0 -> 1278 bytes
.../__pycache__/brain_dateutil.cpython-37.pyc | Bin 0 -> 682 bytes
.../__pycache__/brain_fstrings.cpython-37.pyc | Bin 0 -> 1565 bytes
.../__pycache__/brain_functools.cpython-37.pyc | Bin 0 -> 4591 bytes
.../brain/__pycache__/brain_gi.cpython-37.pyc | Bin 0 -> 4007 bytes
.../brain/__pycache__/brain_hashlib.cpython-37.pyc | Bin 0 -> 1924 bytes
.../brain/__pycache__/brain_http.cpython-37.pyc | Bin 0 -> 10289 bytes
.../brain/__pycache__/brain_io.cpython-37.pyc | Bin 0 -> 1305 bytes
.../__pycache__/brain_mechanize.cpython-37.pyc | Bin 0 -> 718 bytes
.../brain_multiprocessing.cpython-37.pyc | Bin 0 -> 2520 bytes
.../brain_namedtuple_enum.cpython-37.pyc | Bin 0 -> 11452 bytes
.../brain/__pycache__/brain_nose.cpython-37.pyc | Bin 0 -> 2047 bytes
.../brain_numpy_core_fromnumeric.cpython-37.pyc | Bin 0 -> 623 bytes
.../brain_numpy_core_function_base.cpython-37.pyc | Bin 0 -> 1029 bytes
.../brain_numpy_core_multiarray.cpython-37.pyc | Bin 0 -> 1711 bytes
.../brain_numpy_core_numeric.cpython-37.pyc | Bin 0 -> 1261 bytes
.../brain_numpy_core_numerictypes.cpython-37.pyc | Bin 0 -> 7661 bytes
.../brain_numpy_core_umath.cpython-37.pyc | Bin 0 -> 5199 bytes
.../__pycache__/brain_numpy_ndarray.cpython-37.pyc | Bin 0 -> 8323 bytes
.../brain_numpy_random_mtrand.cpython-37.pyc | Bin 0 -> 3232 bytes
.../__pycache__/brain_numpy_utils.cpython-37.pyc | Bin 0 -> 1736 bytes
.../__pycache__/brain_pkg_resources.cpython-37.pyc | Bin 0 -> 2189 bytes
.../brain/__pycache__/brain_pytest.cpython-37.pyc | Bin 0 -> 2159 bytes
.../brain/__pycache__/brain_qt.cpython-37.pyc | Bin 0 -> 2092 bytes
.../brain/__pycache__/brain_random.cpython-37.pyc | Bin 0 -> 2172 bytes
.../brain/__pycache__/brain_re.cpython-37.pyc | Bin 0 -> 1066 bytes
.../brain/__pycache__/brain_six.cpython-37.pyc | Bin 0 -> 5521 bytes
.../brain/__pycache__/brain_ssl.cpython-37.pyc | Bin 0 -> 3600 bytes
.../__pycache__/brain_subprocess.cpython-37.pyc | Bin 0 -> 3401 bytes
.../__pycache__/brain_threading.cpython-37.pyc | Bin 0 -> 737 bytes
.../brain/__pycache__/brain_typing.cpython-37.pyc | Bin 0 -> 2337 bytes
.../brain/__pycache__/brain_uuid.cpython-37.pyc | Bin 0 -> 629 bytes
.../site-packages/astroid/brain/brain_argparse.py | 33 +
.../Lib/site-packages/astroid/brain/brain_attrs.py | 65 +
.../astroid/brain/brain_builtin_inference.py | 829 ++
.../astroid/brain/brain_collections.py | 74 +
.../Lib/site-packages/astroid/brain/brain_crypt.py | 26 +
.../site-packages/astroid/brain/brain_curses.py | 179 +
.../astroid/brain/brain_dataclasses.py | 50 +
.../site-packages/astroid/brain/brain_dateutil.py | 28 +
.../site-packages/astroid/brain/brain_fstrings.py | 51 +
.../site-packages/astroid/brain/brain_functools.py | 158 +
.../Lib/site-packages/astroid/brain/brain_gi.py | 220 +
.../site-packages/astroid/brain/brain_hashlib.py | 67 +
.../Lib/site-packages/astroid/brain/brain_http.py | 201 +
.../Lib/site-packages/astroid/brain/brain_io.py | 45 +
.../site-packages/astroid/brain/brain_mechanize.py | 29 +
.../astroid/brain/brain_multiprocessing.py | 106 +
.../astroid/brain/brain_namedtuple_enum.py | 449 ++
.../Lib/site-packages/astroid/brain/brain_nose.py | 77 +
.../astroid/brain/brain_numpy_core_fromnumeric.py | 23 +
.../brain/brain_numpy_core_function_base.py | 29 +
.../astroid/brain/brain_numpy_core_multiarray.py | 55 +
.../astroid/brain/brain_numpy_core_numeric.py | 43 +
.../astroid/brain/brain_numpy_core_numerictypes.py | 250 +
.../astroid/brain/brain_numpy_core_umath.py | 105 +
.../astroid/brain/brain_numpy_ndarray.py | 153 +
.../astroid/brain/brain_numpy_random_mtrand.py | 70 +
.../astroid/brain/brain_numpy_utils.py | 56 +
.../astroid/brain/brain_pkg_resources.py | 75 +
.../site-packages/astroid/brain/brain_pytest.py | 88 +
.../Lib/site-packages/astroid/brain/brain_qt.py | 82 +
.../site-packages/astroid/brain/brain_random.py | 75 +
.../Lib/site-packages/astroid/brain/brain_re.py | 36 +
.../Lib/site-packages/astroid/brain/brain_six.py | 200 +
.../Lib/site-packages/astroid/brain/brain_ssl.py | 74 +
.../astroid/brain/brain_subprocess.py | 111 +
.../site-packages/astroid/brain/brain_threading.py | 31 +
.../site-packages/astroid/brain/brain_typing.py | 96 +
.../Lib/site-packages/astroid/brain/brain_uuid.py | 20 +
.../venv/Lib/site-packages/astroid/builder.py | 435 ++
.../venv/Lib/site-packages/astroid/context.py | 179 +
.../venv/Lib/site-packages/astroid/decorators.py | 141 +
.../venv/Lib/site-packages/astroid/exceptions.py | 230 +
.../venv/Lib/site-packages/astroid/helpers.py | 273 +
.../venv/Lib/site-packages/astroid/inference.py | 943 +++
.../site-packages/astroid/interpreter/__init__.py | 0
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 184 bytes
.../__pycache__/dunder_lookup.cpython-37.pyc | Bin 0 -> 2137 bytes
.../__pycache__/objectmodel.cpython-37.pyc | Bin 0 -> 25143 bytes
.../astroid/interpreter/_import/__init__.py | 0
.../_import/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 192 bytes
.../_import/__pycache__/spec.cpython-37.pyc | Bin 0 -> 9399 bytes
.../_import/__pycache__/util.cpython-37.pyc | Bin 0 -> 408 bytes
.../astroid/interpreter/_import/spec.py | 344 +
.../astroid/interpreter/_import/util.py | 10 +
.../astroid/interpreter/dunder_lookup.py | 66 +
.../astroid/interpreter/objectmodel.py | 738 ++
.../venv/Lib/site-packages/astroid/manager.py | 337 +
.../venv/Lib/site-packages/astroid/mixins.py | 160 +
.../venv/Lib/site-packages/astroid/modutils.py | 698 ++
.../venv/Lib/site-packages/astroid/node_classes.py | 4775 ++++++++++++
.../python/venv/Lib/site-packages/astroid/nodes.py | 175 +
.../venv/Lib/site-packages/astroid/objects.py | 282 +
.../venv/Lib/site-packages/astroid/protocols.py | 766 ++
.../venv/Lib/site-packages/astroid/raw_building.py | 468 ++
.../venv/Lib/site-packages/astroid/rebuilder.py | 1090 +++
.../venv/Lib/site-packages/astroid/scoped_nodes.py | 2836 +++++++
.../venv/Lib/site-packages/astroid/test_utils.py | 73 +
.../venv/Lib/site-packages/astroid/transforms.py | 90 +
.../python/venv/Lib/site-packages/astroid/util.py | 164 +
.../colorama-0.4.3.dist-info/INSTALLER | 1 +
.../colorama-0.4.3.dist-info/LICENSE.txt | 27 +
.../colorama-0.4.3.dist-info/METADATA | 411 +
.../site-packages/colorama-0.4.3.dist-info/RECORD | 18 +
.../site-packages/colorama-0.4.3.dist-info/WHEEL | 6 +
.../colorama-0.4.3.dist-info/top_level.txt | 1 +
.../venv/Lib/site-packages/colorama/__init__.py | 6 +
.../colorama/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 425 bytes
.../colorama/__pycache__/ansi.cpython-37.pyc | Bin 0 -> 3323 bytes
.../__pycache__/ansitowin32.cpython-37.pyc | Bin 0 -> 7579 bytes
.../colorama/__pycache__/initialise.cpython-37.pyc | Bin 0 -> 1644 bytes
.../colorama/__pycache__/win32.cpython-37.pyc | Bin 0 -> 3838 bytes
.../colorama/__pycache__/winterm.cpython-37.pyc | Bin 0 -> 4586 bytes
.../python/venv/Lib/site-packages/colorama/ansi.py | 102 +
.../venv/Lib/site-packages/colorama/ansitowin32.py | 257 +
.../venv/Lib/site-packages/colorama/initialise.py | 80 +
.../venv/Lib/site-packages/colorama/win32.py | 152 +
.../venv/Lib/site-packages/colorama/winterm.py | 169 +
.../python/venv/Lib/site-packages/easy-install.pth | 2 +
.../site-packages/isort-4.3.21.dist-info/INSTALLER | 1 +
.../site-packages/isort-4.3.21.dist-info/LICENSE | 21 +
.../site-packages/isort-4.3.21.dist-info/METADATA | 697 ++
.../site-packages/isort-4.3.21.dist-info/RECORD | 30 +
.../Lib/site-packages/isort-4.3.21.dist-info/WHEEL | 6 +
.../isort-4.3.21.dist-info/entry_points.txt | 9 +
.../isort-4.3.21.dist-info/top_level.txt | 1 +
.../venv/Lib/site-packages/isort/__init__.py | 28 +
.../venv/Lib/site-packages/isort/__main__.py | 9 +
.../isort/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 1597 bytes
.../isort/__pycache__/__main__.cpython-37.pyc | Bin 0 -> 348 bytes
.../isort/__pycache__/finders.cpython-37.pyc | Bin 0 -> 11987 bytes
.../isort/__pycache__/hooks.cpython-37.pyc | Bin 0 -> 3208 bytes
.../isort/__pycache__/isort.cpython-37.pyc | Bin 0 -> 29949 bytes
.../isort/__pycache__/main.cpython-37.pyc | Bin 0 -> 16724 bytes
.../isort/__pycache__/natural.cpython-37.pyc | Bin 0 -> 2308 bytes
.../isort/__pycache__/pie_slice.cpython-37.pyc | Bin 0 -> 5008 bytes
.../isort/__pycache__/pylama_isort.cpython-37.pyc | Bin 0 -> 1009 bytes
.../isort/__pycache__/settings.cpython-37.pyc | Bin 0 -> 12274 bytes
.../isort/__pycache__/utils.cpython-37.pyc | Bin 0 -> 1600 bytes
.../python/venv/Lib/site-packages/isort/finders.py | 382 +
.../python/venv/Lib/site-packages/isort/hooks.py | 91 +
.../python/venv/Lib/site-packages/isort/isort.py | 1060 +++
.../python/venv/Lib/site-packages/isort/main.py | 401 +
.../python/venv/Lib/site-packages/isort/natural.py | 47 +
.../venv/Lib/site-packages/isort/pie_slice.py | 154 +
.../venv/Lib/site-packages/isort/pylama_isort.py | 29 +
.../venv/Lib/site-packages/isort/settings.py | 356 +
.../python/venv/Lib/site-packages/isort/utils.py | 53 +
.../lazy_object_proxy-1.4.3.dist-info/AUTHORS.rst | 10 +
.../lazy_object_proxy-1.4.3.dist-info/INSTALLER | 1 +
.../lazy_object_proxy-1.4.3.dist-info/LICENSE | 21 +
.../lazy_object_proxy-1.4.3.dist-info/METADATA | 166 +
.../lazy_object_proxy-1.4.3.dist-info/RECORD | 20 +
.../lazy_object_proxy-1.4.3.dist-info/WHEEL | 5 +
.../top_level.txt | 1 +
.../site-packages/lazy_object_proxy/__init__.py | 23 +
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 564 bytes
.../__pycache__/_version.cpython-37.pyc | Bin 0 -> 209 bytes
.../__pycache__/compat.cpython-37.pyc | Bin 0 -> 441 bytes
.../__pycache__/simple.cpython-37.pyc | Bin 0 -> 7764 bytes
.../__pycache__/slots.cpython-37.pyc | Bin 0 -> 15235 bytes
.../__pycache__/utils.cpython-37.pyc | Bin 0 -> 784 bytes
.../site-packages/lazy_object_proxy/_version.py | 4 +
.../lazy_object_proxy/cext.cp37-win_amd64.pyd | Bin 0 -> 31744 bytes
.../Lib/site-packages/lazy_object_proxy/compat.py | 9 +
.../Lib/site-packages/lazy_object_proxy/simple.py | 246 +
.../Lib/site-packages/lazy_object_proxy/slots.py | 414 +
.../Lib/site-packages/lazy_object_proxy/utils.py | 13 +
.../mccabe-0.6.1.dist-info/DESCRIPTION.rst | 152 +
.../site-packages/mccabe-0.6.1.dist-info/INSTALLER | 1 +
.../site-packages/mccabe-0.6.1.dist-info/METADATA | 178 +
.../site-packages/mccabe-0.6.1.dist-info/RECORD | 10 +
.../Lib/site-packages/mccabe-0.6.1.dist-info/WHEEL | 6 +
.../mccabe-0.6.1.dist-info/entry_points.txt | 3 +
.../mccabe-0.6.1.dist-info/metadata.json | 1 +
.../mccabe-0.6.1.dist-info/top_level.txt | 1 +
src/main/python/venv/Lib/site-packages/mccabe.py | 347 +
.../pip-19.0.3-py3.7.egg/EGG-INFO/PKG-INFO | 73 +
.../pip-19.0.3-py3.7.egg/EGG-INFO/SOURCES.txt | 391 +
.../EGG-INFO/dependency_links.txt | 1 +
.../pip-19.0.3-py3.7.egg/EGG-INFO/entry_points.txt | 5 +
.../pip-19.0.3-py3.7.egg/EGG-INFO/not-zip-safe | 1 +
.../pip-19.0.3-py3.7.egg/EGG-INFO/top_level.txt | 1 +
.../pip-19.0.3-py3.7.egg/pip/__init__.py | 1 +
.../pip-19.0.3-py3.7.egg/pip/__main__.py | 19 +
.../pip/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 215 bytes
.../pip/__pycache__/__main__.cpython-37.pyc | Bin 0 -> 467 bytes
.../pip-19.0.3-py3.7.egg/pip/_internal/__init__.py | 78 +
.../_internal/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 1859 bytes
.../_internal/__pycache__/build_env.cpython-37.pyc | Bin 0 -> 7472 bytes
.../pip/_internal/__pycache__/cache.cpython-37.pyc | Bin 0 -> 7067 bytes
.../__pycache__/configuration.cpython-37.pyc | Bin 0 -> 9852 bytes
.../_internal/__pycache__/download.cpython-37.pyc | Bin 0 -> 21188 bytes
.../__pycache__/exceptions.cpython-37.pyc | Bin 0 -> 11759 bytes
.../pip/_internal/__pycache__/index.cpython-37.pyc | Bin 0 -> 25269 bytes
.../_internal/__pycache__/locations.cpython-37.pyc | Bin 0 -> 4446 bytes
.../__pycache__/pep425tags.cpython-37.pyc | Bin 0 -> 8178 bytes
.../_internal/__pycache__/pyproject.cpython-37.pyc | Bin 0 -> 3195 bytes
.../_internal/__pycache__/resolve.cpython-37.pyc | Bin 0 -> 9113 bytes
.../pip/_internal/__pycache__/wheel.cpython-37.pyc | Bin 0 -> 25946 bytes
.../pip/_internal/build_env.py | 215 +
.../pip-19.0.3-py3.7.egg/pip/_internal/cache.py | 224 +
.../pip/_internal/cli/__init__.py | 4 +
.../cli/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 290 bytes
.../cli/__pycache__/autocompletion.cpython-37.pyc | Bin 0 -> 5103 bytes
.../cli/__pycache__/base_command.cpython-37.pyc | Bin 0 -> 7831 bytes
.../cli/__pycache__/cmdoptions.cpython-37.pyc | Bin 0 -> 16906 bytes
.../cli/__pycache__/main_parser.cpython-37.pyc | Bin 0 -> 2376 bytes
.../cli/__pycache__/parser.cpython-37.pyc | Bin 0 -> 8954 bytes
.../cli/__pycache__/status_codes.cpython-37.pyc | Bin 0 -> 419 bytes
.../pip/_internal/cli/autocompletion.py | 152 +
.../pip/_internal/cli/base_command.py | 341 +
.../pip/_internal/cli/cmdoptions.py | 809 ++
.../pip/_internal/cli/main_parser.py | 104 +
.../pip/_internal/cli/parser.py | 261 +
.../pip/_internal/cli/status_codes.py | 8 +
.../pip/_internal/commands/__init__.py | 79 +
.../commands/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 2518 bytes
.../commands/__pycache__/check.cpython-37.pyc | Bin 0 -> 1343 bytes
.../commands/__pycache__/completion.cpython-37.pyc | Bin 0 -> 3092 bytes
.../__pycache__/configuration.cpython-37.pyc | Bin 0 -> 6444 bytes
.../commands/__pycache__/download.cpython-37.pyc | Bin 0 -> 4731 bytes
.../commands/__pycache__/freeze.cpython-37.pyc | Bin 0 -> 2888 bytes
.../commands/__pycache__/hash.cpython-37.pyc | Bin 0 -> 2082 bytes
.../commands/__pycache__/help.cpython-37.pyc | Bin 0 -> 1258 bytes
.../commands/__pycache__/install.cpython-37.pyc | Bin 0 -> 12475 bytes
.../commands/__pycache__/list.cpython-37.pyc | Bin 0 -> 8724 bytes
.../commands/__pycache__/search.cpython-37.pyc | Bin 0 -> 4324 bytes
.../commands/__pycache__/show.cpython-37.pyc | Bin 0 -> 5905 bytes
.../commands/__pycache__/uninstall.cpython-37.pyc | Bin 0 -> 2714 bytes
.../commands/__pycache__/wheel.cpython-37.pyc | Bin 0 -> 5017 bytes
.../pip/_internal/commands/check.py | 41 +
.../pip/_internal/commands/completion.py | 94 +
.../pip/_internal/commands/configuration.py | 227 +
.../pip/_internal/commands/download.py | 176 +
.../pip/_internal/commands/freeze.py | 96 +
.../pip/_internal/commands/hash.py | 57 +
.../pip/_internal/commands/help.py | 37 +
.../pip/_internal/commands/install.py | 566 ++
.../pip/_internal/commands/list.py | 301 +
.../pip/_internal/commands/search.py | 135 +
.../pip/_internal/commands/show.py | 168 +
.../pip/_internal/commands/uninstall.py | 78 +
.../pip/_internal/commands/wheel.py | 186 +
.../pip/_internal/configuration.py | 387 +
.../pip-19.0.3-py3.7.egg/pip/_internal/download.py | 971 +++
.../pip/_internal/exceptions.py | 274 +
.../pip-19.0.3-py3.7.egg/pip/_internal/index.py | 990 +++
.../pip/_internal/locations.py | 211 +
.../pip/_internal/models/__init__.py | 2 +
.../models/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 278 bytes
.../models/__pycache__/candidate.cpython-37.pyc | Bin 0 -> 1327 bytes
.../__pycache__/format_control.cpython-37.pyc | Bin 0 -> 2281 bytes
.../models/__pycache__/index.cpython-37.pyc | Bin 0 -> 1182 bytes
.../models/__pycache__/link.cpython-37.pyc | Bin 0 -> 5016 bytes
.../pip/_internal/models/candidate.py | 31 +
.../pip/_internal/models/format_control.py | 73 +
.../pip/_internal/models/index.py | 31 +
.../pip/_internal/models/link.py | 163 +
.../pip/_internal/operations/__init__.py | 0
.../operations/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 214 bytes
.../operations/__pycache__/check.cpython-37.pyc | Bin 0 -> 3644 bytes
.../operations/__pycache__/freeze.cpython-37.pyc | Bin 0 -> 5629 bytes
.../operations/__pycache__/prepare.cpython-37.pyc | Bin 0 -> 10290 bytes
.../pip/_internal/operations/check.py | 155 +
.../pip/_internal/operations/freeze.py | 247 +
.../pip/_internal/operations/prepare.py | 413 +
.../pip/_internal/pep425tags.py | 381 +
.../pip/_internal/pyproject.py | 171 +
.../pip/_internal/req/__init__.py | 77 +
.../req/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 1711 bytes
.../req/__pycache__/constructors.cpython-37.pyc | Bin 0 -> 7631 bytes
.../req/__pycache__/req_file.cpython-37.pyc | Bin 0 -> 9209 bytes
.../req/__pycache__/req_install.cpython-37.pyc | Bin 0 -> 25046 bytes
.../req/__pycache__/req_set.cpython-37.pyc | Bin 0 -> 6046 bytes
.../req/__pycache__/req_tracker.cpython-37.pyc | Bin 0 -> 3160 bytes
.../req/__pycache__/req_uninstall.cpython-37.pyc | Bin 0 -> 17003 bytes
.../pip/_internal/req/constructors.py | 339 +
.../pip/_internal/req/req_file.py | 382 +
.../pip/_internal/req/req_install.py | 1021 +++
.../pip/_internal/req/req_set.py | 197 +
.../pip/_internal/req/req_tracker.py | 88 +
.../pip/_internal/req/req_uninstall.py | 596 ++
.../pip-19.0.3-py3.7.egg/pip/_internal/resolve.py | 393 +
.../pip/_internal/utils/__init__.py | 0
.../utils/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 209 bytes
.../utils/__pycache__/appdirs.cpython-37.pyc | Bin 0 -> 8056 bytes
.../utils/__pycache__/compat.cpython-37.pyc | Bin 0 -> 6157 bytes
.../utils/__pycache__/deprecation.cpython-37.pyc | Bin 0 -> 2583 bytes
.../utils/__pycache__/encoding.cpython-37.pyc | Bin 0 -> 1286 bytes
.../utils/__pycache__/filesystem.cpython-37.pyc | Bin 0 -> 678 bytes
.../utils/__pycache__/glibc.cpython-37.pyc | Bin 0 -> 1697 bytes
.../utils/__pycache__/hashes.cpython-37.pyc | Bin 0 -> 3616 bytes
.../utils/__pycache__/logging.cpython-37.pyc | Bin 0 -> 7843 bytes
.../utils/__pycache__/misc.cpython-37.pyc | Bin 0 -> 25748 bytes
.../utils/__pycache__/models.cpython-37.pyc | Bin 0 -> 1958 bytes
.../utils/__pycache__/outdated.cpython-37.pyc | Bin 0 -> 4094 bytes
.../utils/__pycache__/packaging.cpython-37.pyc | Bin 0 -> 2630 bytes
.../__pycache__/setuptools_build.cpython-37.pyc | Bin 0 -> 404 bytes
.../utils/__pycache__/temp_dir.cpython-37.pyc | Bin 0 -> 4931 bytes
.../utils/__pycache__/typing.cpython-37.pyc | Bin 0 -> 1353 bytes
.../_internal/utils/__pycache__/ui.cpython-37.pyc | Bin 0 -> 12315 bytes
.../pip/_internal/utils/appdirs.py | 270 +
.../pip/_internal/utils/compat.py | 264 +
.../pip/_internal/utils/deprecation.py | 90 +
.../pip/_internal/utils/encoding.py | 39 +
.../pip/_internal/utils/filesystem.py | 30 +
.../pip/_internal/utils/glibc.py | 93 +
.../pip/_internal/utils/hashes.py | 115 +
.../pip/_internal/utils/logging.py | 318 +
.../pip/_internal/utils/misc.py | 1040 +++
.../pip/_internal/utils/models.py | 40 +
.../pip/_internal/utils/outdated.py | 164 +
.../pip/_internal/utils/packaging.py | 85 +
.../pip/_internal/utils/setuptools_build.py | 8 +
.../pip/_internal/utils/temp_dir.py | 155 +
.../pip/_internal/utils/typing.py | 29 +
.../pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py | 441 ++
.../pip/_internal/vcs/__init__.py | 534 ++
.../vcs/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 15400 bytes
.../vcs/__pycache__/bazaar.cpython-37.pyc | Bin 0 -> 3817 bytes
.../_internal/vcs/__pycache__/git.cpython-37.pyc | Bin 0 -> 9459 bytes
.../vcs/__pycache__/mercurial.cpython-37.pyc | Bin 0 -> 3790 bytes
.../vcs/__pycache__/subversion.cpython-37.pyc | Bin 0 -> 6002 bytes
.../pip/_internal/vcs/bazaar.py | 114 +
.../pip-19.0.3-py3.7.egg/pip/_internal/vcs/git.py | 369 +
.../pip/_internal/vcs/mercurial.py | 103 +
.../pip/_internal/vcs/subversion.py | 200 +
.../pip-19.0.3-py3.7.egg/pip/_internal/wheel.py | 1095 +++
.../pip-19.0.3-py3.7.egg/pip/_vendor/__init__.py | 111 +
.../_vendor/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 2897 bytes
.../pip/_vendor/__pycache__/appdirs.cpython-37.pyc | Bin 0 -> 20626 bytes
.../_vendor/__pycache__/pyparsing.cpython-37.pyc | Bin 0 -> 220624 bytes
.../_vendor/__pycache__/retrying.cpython-37.pyc | Bin 0 -> 8107 bytes
.../pip/_vendor/__pycache__/six.cpython-37.pyc | Bin 0 -> 26431 bytes
.../pip-19.0.3-py3.7.egg/pip/_vendor/appdirs.py | 604 ++
.../pip/_vendor/cachecontrol/__init__.py | 11 +
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 567 bytes
.../__pycache__/adapter.cpython-37.pyc | Bin 0 -> 3053 bytes
.../cachecontrol/__pycache__/cache.cpython-37.pyc | Bin 0 -> 1783 bytes
.../cachecontrol/__pycache__/compat.cpython-37.pyc | Bin 0 -> 774 bytes
.../__pycache__/controller.cpython-37.pyc | Bin 0 -> 7653 bytes
.../__pycache__/filewrapper.cpython-37.pyc | Bin 0 -> 2171 bytes
.../__pycache__/serialize.cpython-37.pyc | Bin 0 -> 4255 bytes
.../__pycache__/wrapper.cpython-37.pyc | Bin 0 -> 675 bytes
.../pip/_vendor/cachecontrol/_cmd.py | 57 +
.../pip/_vendor/cachecontrol/adapter.py | 133 +
.../pip/_vendor/cachecontrol/cache.py | 39 +
.../pip/_vendor/cachecontrol/caches/__init__.py | 2 +
.../caches/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 311 bytes
.../caches/__pycache__/file_cache.cpython-37.pyc | Bin 0 -> 3245 bytes
.../caches/__pycache__/redis_cache.cpython-37.pyc | Bin 0 -> 1567 bytes
.../pip/_vendor/cachecontrol/caches/file_cache.py | 146 +
.../pip/_vendor/cachecontrol/caches/redis_cache.py | 33 +
.../pip/_vendor/cachecontrol/compat.py | 29 +
.../pip/_vendor/cachecontrol/controller.py | 367 +
.../pip/_vendor/cachecontrol/filewrapper.py | 80 +
.../pip/_vendor/cachecontrol/heuristics.py | 135 +
.../pip/_vendor/cachecontrol/serialize.py | 186 +
.../pip/_vendor/cachecontrol/wrapper.py | 29 +
.../pip/_vendor/certifi/__init__.py | 3 +
.../pip/_vendor/certifi/__main__.py | 2 +
.../certifi/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 274 bytes
.../certifi/__pycache__/core.cpython-37.pyc | Bin 0 -> 535 bytes
.../pip/_vendor/certifi/cacert.pem | 4512 +++++++++++
.../pip/_vendor/certifi/core.py | 20 +
.../pip/_vendor/chardet/__init__.py | 39 +
.../chardet/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 861 bytes
.../chardet/__pycache__/big5freq.cpython-37.pyc | Bin 0 -> 27196 bytes
.../chardet/__pycache__/big5prober.cpython-37.pyc | Bin 0 -> 1137 bytes
.../__pycache__/chardistribution.cpython-37.pyc | Bin 0 -> 6323 bytes
.../__pycache__/charsetgroupprober.cpython-37.pyc | Bin 0 -> 2244 bytes
.../__pycache__/charsetprober.cpython-37.pyc | Bin 0 -> 3454 bytes
.../__pycache__/codingstatemachine.cpython-37.pyc | Bin 0 -> 2901 bytes
.../chardet/__pycache__/compat.cpython-37.pyc | Bin 0 -> 372 bytes
.../chardet/__pycache__/cp949prober.cpython-37.pyc | Bin 0 -> 1144 bytes
.../chardet/__pycache__/enums.cpython-37.pyc | Bin 0 -> 2635 bytes
.../chardet/__pycache__/escprober.cpython-37.pyc | Bin 0 -> 2622 bytes
.../chardet/__pycache__/escsm.cpython-37.pyc | Bin 0 -> 7083 bytes
.../chardet/__pycache__/eucjpprober.cpython-37.pyc | Bin 0 -> 2430 bytes
.../chardet/__pycache__/euckrfreq.cpython-37.pyc | Bin 0 -> 12080 bytes
.../chardet/__pycache__/euckrprober.cpython-37.pyc | Bin 0 -> 1145 bytes
.../chardet/__pycache__/euctwfreq.cpython-37.pyc | Bin 0 -> 27200 bytes
.../chardet/__pycache__/euctwprober.cpython-37.pyc | Bin 0 -> 1145 bytes
.../chardet/__pycache__/gb2312freq.cpython-37.pyc | Bin 0 -> 19124 bytes
.../__pycache__/gb2312prober.cpython-37.pyc | Bin 0 -> 1153 bytes
.../__pycache__/hebrewprober.cpython-37.pyc | Bin 0 -> 2987 bytes
.../chardet/__pycache__/jisfreq.cpython-37.pyc | Bin 0 -> 22152 bytes
.../chardet/__pycache__/jpcntx.cpython-37.pyc | Bin 0 -> 38031 bytes
.../__pycache__/langbulgarianmodel.cpython-37.pyc | Bin 0 -> 23645 bytes
.../__pycache__/langcyrillicmodel.cpython-37.pyc | Bin 0 -> 29101 bytes
.../__pycache__/langgreekmodel.cpython-37.pyc | Bin 0 -> 23603 bytes
.../__pycache__/langhebrewmodel.cpython-37.pyc | Bin 0 -> 22232 bytes
.../__pycache__/langthaimodel.cpython-37.pyc | Bin 0 -> 22211 bytes
.../__pycache__/langturkishmodel.cpython-37.pyc | Bin 0 -> 22234 bytes
.../__pycache__/latin1prober.cpython-37.pyc | Bin 0 -> 2944 bytes
.../__pycache__/mbcharsetprober.cpython-37.pyc | Bin 0 -> 2249 bytes
.../__pycache__/mbcsgroupprober.cpython-37.pyc | Bin 0 -> 1140 bytes
.../chardet/__pycache__/mbcssm.cpython-37.pyc | Bin 0 -> 15695 bytes
.../__pycache__/sbcharsetprober.cpython-37.pyc | Bin 0 -> 3002 bytes
.../__pycache__/sbcsgroupprober.cpython-37.pyc | Bin 0 -> 1630 bytes
.../chardet/__pycache__/sjisprober.cpython-37.pyc | Bin 0 -> 2456 bytes
.../__pycache__/universaldetector.cpython-37.pyc | Bin 0 -> 5846 bytes
.../chardet/__pycache__/utf8prober.cpython-37.pyc | Bin 0 -> 1987 bytes
.../chardet/__pycache__/version.cpython-37.pyc | Bin 0 -> 456 bytes
.../pip/_vendor/chardet/big5freq.py | 386 +
.../pip/_vendor/chardet/big5prober.py | 47 +
.../pip/_vendor/chardet/chardistribution.py | 233 +
.../pip/_vendor/chardet/charsetgroupprober.py | 106 +
.../pip/_vendor/chardet/charsetprober.py | 145 +
.../pip/_vendor/chardet/cli/__init__.py | 1 +
.../pip/_vendor/chardet/cli/chardetect.py | 85 +
.../pip/_vendor/chardet/codingstatemachine.py | 88 +
.../pip/_vendor/chardet/compat.py | 34 +
.../pip/_vendor/chardet/cp949prober.py | 49 +
.../pip/_vendor/chardet/enums.py | 76 +
.../pip/_vendor/chardet/escprober.py | 101 +
.../pip/_vendor/chardet/escsm.py | 246 +
.../pip/_vendor/chardet/eucjpprober.py | 92 +
.../pip/_vendor/chardet/euckrfreq.py | 195 +
.../pip/_vendor/chardet/euckrprober.py | 47 +
.../pip/_vendor/chardet/euctwfreq.py | 387 +
.../pip/_vendor/chardet/euctwprober.py | 46 +
.../pip/_vendor/chardet/gb2312freq.py | 283 +
.../pip/_vendor/chardet/gb2312prober.py | 46 +
.../pip/_vendor/chardet/hebrewprober.py | 292 +
.../pip/_vendor/chardet/jisfreq.py | 325 +
.../pip/_vendor/chardet/jpcntx.py | 233 +
.../pip/_vendor/chardet/langbulgarianmodel.py | 228 +
.../pip/_vendor/chardet/langcyrillicmodel.py | 333 +
.../pip/_vendor/chardet/langgreekmodel.py | 225 +
.../pip/_vendor/chardet/langhebrewmodel.py | 200 +
.../pip/_vendor/chardet/langhungarianmodel.py | 225 +
.../pip/_vendor/chardet/langthaimodel.py | 199 +
.../pip/_vendor/chardet/langturkishmodel.py | 193 +
.../pip/_vendor/chardet/latin1prober.py | 145 +
.../pip/_vendor/chardet/mbcharsetprober.py | 91 +
.../pip/_vendor/chardet/mbcsgroupprober.py | 54 +
.../pip/_vendor/chardet/mbcssm.py | 572 ++
.../pip/_vendor/chardet/sbcharsetprober.py | 132 +
.../pip/_vendor/chardet/sbcsgroupprober.py | 73 +
.../pip/_vendor/chardet/sjisprober.py | 92 +
.../pip/_vendor/chardet/universaldetector.py | 286 +
.../pip/_vendor/chardet/utf8prober.py | 82 +
.../pip/_vendor/chardet/version.py | 9 +
.../pip/_vendor/colorama/__init__.py | 6 +
.../colorama/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 461 bytes
.../colorama/__pycache__/ansi.cpython-37.pyc | Bin 0 -> 3359 bytes
.../__pycache__/ansitowin32.cpython-37.pyc | Bin 0 -> 7615 bytes
.../colorama/__pycache__/initialise.cpython-37.pyc | Bin 0 -> 1680 bytes
.../colorama/__pycache__/win32.cpython-37.pyc | Bin 0 -> 3874 bytes
.../colorama/__pycache__/winterm.cpython-37.pyc | Bin 0 -> 4622 bytes
.../pip/_vendor/colorama/ansi.py | 102 +
.../pip/_vendor/colorama/ansitowin32.py | 257 +
.../pip/_vendor/colorama/initialise.py | 80 +
.../pip/_vendor/colorama/win32.py | 152 +
.../pip/_vendor/colorama/winterm.py | 169 +
.../pip/_vendor/distlib/__init__.py | 23 +
.../distlib/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 1059 bytes
.../distlib/__pycache__/compat.cpython-37.pyc | Bin 0 -> 32069 bytes
.../distlib/__pycache__/resources.cpython-37.pyc | Bin 0 -> 10903 bytes
.../distlib/__pycache__/scripts.cpython-37.pyc | Bin 0 -> 11093 bytes
.../distlib/__pycache__/util.cpython-37.pyc | Bin 0 -> 47971 bytes
.../pip/_vendor/distlib/_backport/__init__.py | 6 +
.../pip/_vendor/distlib/_backport/misc.py | 41 +
.../pip/_vendor/distlib/_backport/shutil.py | 761 ++
.../pip/_vendor/distlib/_backport/sysconfig.cfg | 84 +
.../pip/_vendor/distlib/_backport/sysconfig.py | 788 ++
.../pip/_vendor/distlib/_backport/tarfile.py | 2607 +++++++
.../pip/_vendor/distlib/compat.py | 1120 +++
.../pip/_vendor/distlib/database.py | 1339 ++++
.../pip/_vendor/distlib/index.py | 516 ++
.../pip/_vendor/distlib/locators.py | 1295 +++
.../pip/_vendor/distlib/manifest.py | 393 +
.../pip/_vendor/distlib/markers.py | 131 +
.../pip/_vendor/distlib/metadata.py | 1094 +++
.../pip/_vendor/distlib/resources.py | 355 +
.../pip/_vendor/distlib/scripts.py | 417 +
.../pip/_vendor/distlib/t32.exe | Bin 0 -> 92672 bytes
.../pip/_vendor/distlib/t64.exe | Bin 0 -> 102400 bytes
.../pip/_vendor/distlib/util.py | 1756 +++++
.../pip/_vendor/distlib/version.py | 736 ++
.../pip/_vendor/distlib/w32.exe | Bin 0 -> 89088 bytes
.../pip/_vendor/distlib/w64.exe | Bin 0 -> 99328 bytes
.../pip/_vendor/distlib/wheel.py | 988 +++
.../pip-19.0.3-py3.7.egg/pip/_vendor/distro.py | 1197 +++
.../pip/_vendor/html5lib/__init__.py | 35 +
.../html5lib/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 1330 bytes
.../html5lib/__pycache__/_ihatexml.cpython-37.pyc | Bin 0 -> 13777 bytes
.../__pycache__/_inputstream.cpython-37.pyc | Bin 0 -> 22668 bytes
.../html5lib/__pycache__/_tokenizer.cpython-37.pyc | Bin 0 -> 41569 bytes
.../html5lib/__pycache__/_utils.cpython-37.pyc | Bin 0 -> 3322 bytes
.../html5lib/__pycache__/constants.cpython-37.pyc | Bin 0 -> 66234 bytes
.../__pycache__/html5parser.cpython-37.pyc | Bin 0 -> 97831 bytes
.../html5lib/__pycache__/serializer.cpython-37.pyc | Bin 0 -> 10847 bytes
.../pip/_vendor/html5lib/_ihatexml.py | 288 +
.../pip/_vendor/html5lib/_inputstream.py | 923 +++
.../pip/_vendor/html5lib/_tokenizer.py | 1721 ++++
.../pip/_vendor/html5lib/_trie/__init__.py | 14 +
.../_trie/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 443 bytes
.../_trie/__pycache__/_base.cpython-37.pyc | Bin 0 -> 1526 bytes
.../_trie/__pycache__/datrie.cpython-37.pyc | Bin 0 -> 2045 bytes
.../html5lib/_trie/__pycache__/py.cpython-37.pyc | Bin 0 -> 2248 bytes
.../pip/_vendor/html5lib/_trie/_base.py | 37 +
.../pip/_vendor/html5lib/_trie/datrie.py | 44 +
.../pip/_vendor/html5lib/_trie/py.py | 67 +
.../pip/_vendor/html5lib/_utils.py | 124 +
.../pip/_vendor/html5lib/constants.py | 2947 +++++++
.../pip/_vendor/html5lib/filters/__init__.py | 0
.../html5lib/filters/alphabeticalattributes.py | 29 +
.../pip/_vendor/html5lib/filters/base.py | 12 +
.../html5lib/filters/inject_meta_charset.py | 73 +
.../pip/_vendor/html5lib/filters/lint.py | 93 +
.../pip/_vendor/html5lib/filters/optionaltags.py | 207 +
.../pip/_vendor/html5lib/filters/sanitizer.py | 896 +++
.../pip/_vendor/html5lib/filters/whitespace.py | 38 +
.../pip/_vendor/html5lib/html5parser.py | 2791 +++++++
.../pip/_vendor/html5lib/serializer.py | 409 +
.../pip/_vendor/html5lib/treeadapters/__init__.py | 30 +
.../pip/_vendor/html5lib/treeadapters/genshi.py | 54 +
.../pip/_vendor/html5lib/treeadapters/sax.py | 50 +
.../pip/_vendor/html5lib/treebuilders/__init__.py | 88 +
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 3338 bytes
.../treebuilders/__pycache__/base.cpython-37.pyc | Bin 0 -> 11261 bytes
.../treebuilders/__pycache__/etree.cpython-37.pyc | Bin 0 -> 11870 bytes
.../pip/_vendor/html5lib/treebuilders/base.py | 417 +
.../pip/_vendor/html5lib/treebuilders/dom.py | 236 +
.../pip/_vendor/html5lib/treebuilders/etree.py | 340 +
.../_vendor/html5lib/treebuilders/etree_lxml.py | 366 +
.../pip/_vendor/html5lib/treewalkers/__init__.py | 154 +
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 4015 bytes
.../pip/_vendor/html5lib/treewalkers/base.py | 252 +
.../pip/_vendor/html5lib/treewalkers/dom.py | 43 +
.../pip/_vendor/html5lib/treewalkers/etree.py | 130 +
.../pip/_vendor/html5lib/treewalkers/etree_lxml.py | 213 +
.../pip/_vendor/html5lib/treewalkers/genshi.py | 69 +
.../pip/_vendor/idna/__init__.py | 2 +
.../idna/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 275 bytes
.../_vendor/idna/__pycache__/core.cpython-37.pyc | Bin 0 -> 9078 bytes
.../idna/__pycache__/idnadata.cpython-37.pyc | Bin 0 -> 21449 bytes
.../idna/__pycache__/intranges.cpython-37.pyc | Bin 0 -> 1815 bytes
.../idna/__pycache__/package_data.cpython-37.pyc | Bin 0 -> 229 bytes
.../pip-19.0.3-py3.7.egg/pip/_vendor/idna/codec.py | 118 +
.../pip/_vendor/idna/compat.py | 12 +
.../pip-19.0.3-py3.7.egg/pip/_vendor/idna/core.py | 396 +
.../pip/_vendor/idna/idnadata.py | 1979 +++++
.../pip/_vendor/idna/intranges.py | 53 +
.../pip/_vendor/idna/package_data.py | 2 +
.../pip/_vendor/idna/uts46data.py | 8205 ++++++++++++++++++++
.../pip-19.0.3-py3.7.egg/pip/_vendor/ipaddress.py | 2419 ++++++
.../pip/_vendor/lockfile/__init__.py | 347 +
.../lockfile/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 9925 bytes
.../__pycache__/linklockfile.cpython-37.pyc | Bin 0 -> 2306 bytes
.../__pycache__/mkdirlockfile.cpython-37.pyc | Bin 0 -> 2668 bytes
.../pip/_vendor/lockfile/linklockfile.py | 73 +
.../pip/_vendor/lockfile/mkdirlockfile.py | 84 +
.../pip/_vendor/lockfile/pidlockfile.py | 190 +
.../pip/_vendor/lockfile/sqlitelockfile.py | 156 +
.../pip/_vendor/lockfile/symlinklockfile.py | 70 +
.../pip/_vendor/msgpack/__init__.py | 66 +
.../msgpack/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 2087 bytes
.../msgpack/__pycache__/_version.cpython-37.pyc | Bin 0 -> 236 bytes
.../msgpack/__pycache__/exceptions.cpython-37.pyc | Bin 0 -> 2192 bytes
.../msgpack/__pycache__/fallback.cpython-37.pyc | Bin 0 -> 24564 bytes
.../pip/_vendor/msgpack/_version.py | 1 +
.../pip/_vendor/msgpack/exceptions.py | 41 +
.../pip/_vendor/msgpack/fallback.py | 977 +++
.../pip/_vendor/packaging/__about__.py | 27 +
.../pip/_vendor/packaging/__init__.py | 26 +
.../packaging/__pycache__/__about__.cpython-37.pyc | Bin 0 -> 749 bytes
.../packaging/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 587 bytes
.../packaging/__pycache__/_compat.cpython-37.pyc | Bin 0 -> 1027 bytes
.../__pycache__/_structures.cpython-37.pyc | Bin 0 -> 2879 bytes
.../packaging/__pycache__/markers.cpython-37.pyc | Bin 0 -> 8882 bytes
.../__pycache__/requirements.cpython-37.pyc | Bin 0 -> 4000 bytes
.../__pycache__/specifiers.cpython-37.pyc | Bin 0 -> 19777 bytes
.../packaging/__pycache__/utils.cpython-37.pyc | Bin 0 -> 1465 bytes
.../packaging/__pycache__/version.cpython-37.pyc | Bin 0 -> 11969 bytes
.../pip/_vendor/packaging/_compat.py | 31 +
.../pip/_vendor/packaging/_structures.py | 68 +
.../pip/_vendor/packaging/markers.py | 296 +
.../pip/_vendor/packaging/requirements.py | 138 +
.../pip/_vendor/packaging/specifiers.py | 749 ++
.../pip/_vendor/packaging/utils.py | 57 +
.../pip/_vendor/packaging/version.py | 420 +
.../pip/_vendor/pep517/__init__.py | 4 +
.../pep517/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 300 bytes
.../pep517/__pycache__/compat.cpython-37.pyc | Bin 0 -> 1036 bytes
.../pep517/__pycache__/wrappers.cpython-37.pyc | Bin 0 -> 5501 bytes
.../pip/_vendor/pep517/_in_process.py | 207 +
.../pip/_vendor/pep517/build.py | 108 +
.../pip/_vendor/pep517/check.py | 202 +
.../pip/_vendor/pep517/colorlog.py | 115 +
.../pip/_vendor/pep517/compat.py | 23 +
.../pip/_vendor/pep517/envbuild.py | 158 +
.../pip/_vendor/pep517/wrappers.py | 163 +
.../pip/_vendor/pkg_resources/__init__.py | 3171 ++++++++
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 96837 bytes
.../__pycache__/py31compat.cpython-37.pyc | Bin 0 -> 660 bytes
.../pip/_vendor/pkg_resources/py31compat.py | 23 +
.../pip/_vendor/progress/__init__.py | 127 +
.../progress/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 3928 bytes
.../progress/__pycache__/bar.cpython-37.pyc | Bin 0 -> 2750 bytes
.../progress/__pycache__/helpers.cpython-37.pyc | Bin 0 -> 3034 bytes
.../progress/__pycache__/spinner.cpython-37.pyc | Bin 0 -> 1509 bytes
.../pip/_vendor/progress/bar.py | 94 +
.../pip/_vendor/progress/counter.py | 48 +
.../pip/_vendor/progress/helpers.py | 91 +
.../pip/_vendor/progress/spinner.py | 44 +
.../pip-19.0.3-py3.7.egg/pip/_vendor/pyparsing.py | 6452 +++++++++++++++
.../pip/_vendor/pytoml/__init__.py | 4 +
.../pytoml/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 394 bytes
.../_vendor/pytoml/__pycache__/core.cpython-37.pyc | Bin 0 -> 957 bytes
.../pytoml/__pycache__/parser.cpython-37.pyc | Bin 0 -> 10074 bytes
.../_vendor/pytoml/__pycache__/test.cpython-37.pyc | Bin 0 -> 1257 bytes
.../pytoml/__pycache__/utils.cpython-37.pyc | Bin 0 -> 2156 bytes
.../pytoml/__pycache__/writer.cpython-37.pyc | Bin 0 -> 3591 bytes
.../pip/_vendor/pytoml/core.py | 13 +
.../pip/_vendor/pytoml/parser.py | 341 +
.../pip/_vendor/pytoml/utils.py | 67 +
.../pip/_vendor/pytoml/writer.py | 106 +
.../pip/_vendor/requests/__init__.py | 133 +
.../requests/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 3505 bytes
.../__pycache__/__version__.cpython-37.pyc | Bin 0 -> 568 bytes
.../__pycache__/_internal_utils.cpython-37.pyc | Bin 0 -> 1326 bytes
.../requests/__pycache__/adapters.cpython-37.pyc | Bin 0 -> 16903 bytes
.../requests/__pycache__/api.cpython-37.pyc | Bin 0 -> 6509 bytes
.../requests/__pycache__/auth.cpython-37.pyc | Bin 0 -> 8360 bytes
.../requests/__pycache__/certs.cpython-37.pyc | Bin 0 -> 651 bytes
.../requests/__pycache__/compat.cpython-37.pyc | Bin 0 -> 1630 bytes
.../requests/__pycache__/cookies.cpython-37.pyc | Bin 0 -> 18805 bytes
.../requests/__pycache__/exceptions.cpython-37.pyc | Bin 0 -> 5523 bytes
.../requests/__pycache__/hooks.cpython-37.pyc | Bin 0 -> 998 bytes
.../requests/__pycache__/models.cpython-37.pyc | Bin 0 -> 24126 bytes
.../requests/__pycache__/packages.cpython-37.pyc | Bin 0 -> 528 bytes
.../requests/__pycache__/sessions.cpython-37.pyc | Bin 0 -> 19446 bytes
.../__pycache__/status_codes.cpython-37.pyc | Bin 0 -> 4184 bytes
.../requests/__pycache__/structures.cpython-37.pyc | Bin 0 -> 4397 bytes
.../requests/__pycache__/utils.cpython-37.pyc | Bin 0 -> 22057 bytes
.../pip/_vendor/requests/__version__.py | 14 +
.../pip/_vendor/requests/_internal_utils.py | 42 +
.../pip/_vendor/requests/adapters.py | 533 ++
.../pip/_vendor/requests/api.py | 158 +
.../pip/_vendor/requests/auth.py | 305 +
.../pip/_vendor/requests/certs.py | 18 +
.../pip/_vendor/requests/compat.py | 74 +
.../pip/_vendor/requests/cookies.py | 549 ++
.../pip/_vendor/requests/exceptions.py | 126 +
.../pip/_vendor/requests/help.py | 119 +
.../pip/_vendor/requests/hooks.py | 34 +
.../pip/_vendor/requests/models.py | 953 +++
.../pip/_vendor/requests/packages.py | 16 +
.../pip/_vendor/requests/sessions.py | 770 ++
.../pip/_vendor/requests/status_codes.py | 120 +
.../pip/_vendor/requests/structures.py | 103 +
.../pip/_vendor/requests/utils.py | 977 +++
.../pip-19.0.3-py3.7.egg/pip/_vendor/retrying.py | 267 +
.../pip-19.0.3-py3.7.egg/pip/_vendor/six.py | 952 +++
.../pip/_vendor/urllib3/__init__.py | 92 +
.../urllib3/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 2133 bytes
.../__pycache__/_collections.cpython-37.pyc | Bin 0 -> 10697 bytes
.../urllib3/__pycache__/connection.cpython-37.pyc | Bin 0 -> 10166 bytes
.../__pycache__/connectionpool.cpython-37.pyc | Bin 0 -> 23671 bytes
.../urllib3/__pycache__/exceptions.cpython-37.pyc | Bin 0 -> 10418 bytes
.../urllib3/__pycache__/fields.cpython-37.pyc | Bin 0 -> 5886 bytes
.../urllib3/__pycache__/filepost.cpython-37.pyc | Bin 0 -> 2778 bytes
.../urllib3/__pycache__/poolmanager.cpython-37.pyc | Bin 0 -> 12738 bytes
.../urllib3/__pycache__/request.cpython-37.pyc | Bin 0 -> 5600 bytes
.../urllib3/__pycache__/response.cpython-37.pyc | Bin 0 -> 18765 bytes
.../pip/_vendor/urllib3/_collections.py | 329 +
.../pip/_vendor/urllib3/connection.py | 391 +
.../pip/_vendor/urllib3/connectionpool.py | 896 +++
.../pip/_vendor/urllib3/contrib/__init__.py | 0
.../contrib/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 217 bytes
.../__pycache__/_appengine_environ.cpython-37.pyc | Bin 0 -> 1113 bytes
.../contrib/__pycache__/socks.cpython-37.pyc | Bin 0 -> 4923 bytes
.../_vendor/urllib3/contrib/_appengine_environ.py | 30 +
.../urllib3/contrib/_securetransport/__init__.py | 0
.../urllib3/contrib/_securetransport/bindings.py | 593 ++
.../urllib3/contrib/_securetransport/low_level.py | 346 +
.../pip/_vendor/urllib3/contrib/appengine.py | 289 +
.../pip/_vendor/urllib3/contrib/ntlmpool.py | 111 +
.../pip/_vendor/urllib3/contrib/pyopenssl.py | 466 ++
.../pip/_vendor/urllib3/contrib/securetransport.py | 804 ++
.../pip/_vendor/urllib3/contrib/socks.py | 192 +
.../pip/_vendor/urllib3/exceptions.py | 246 +
.../pip/_vendor/urllib3/fields.py | 178 +
.../pip/_vendor/urllib3/filepost.py | 98 +
.../pip/_vendor/urllib3/packages/__init__.py | 5 +
.../packages/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 331 bytes
.../packages/__pycache__/six.cpython-37.pyc | Bin 0 -> 24419 bytes
.../_vendor/urllib3/packages/backports/__init__.py | 0
.../_vendor/urllib3/packages/backports/makefile.py | 53 +
.../pip/_vendor/urllib3/packages/six.py | 868 +++
.../packages/ssl_match_hostname/__init__.py | 19 +
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 572 bytes
.../packages/ssl_match_hostname/_implementation.py | 156 +
.../pip/_vendor/urllib3/poolmanager.py | 450 ++
.../pip/_vendor/urllib3/request.py | 150 +
.../pip/_vendor/urllib3/response.py | 705 ++
.../pip/_vendor/urllib3/util/__init__.py | 54 +
.../util/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 1009 bytes
.../util/__pycache__/connection.cpython-37.pyc | Bin 0 -> 3184 bytes
.../urllib3/util/__pycache__/queue.cpython-37.pyc | Bin 0 -> 1058 bytes
.../util/__pycache__/request.cpython-37.pyc | Bin 0 -> 3239 bytes
.../util/__pycache__/response.cpython-37.pyc | Bin 0 -> 1987 bytes
.../urllib3/util/__pycache__/retry.cpython-37.pyc | Bin 0 -> 12673 bytes
.../urllib3/util/__pycache__/ssl_.cpython-37.pyc | Bin 0 -> 9565 bytes
.../util/__pycache__/timeout.cpython-37.pyc | Bin 0 -> 8788 bytes
.../urllib3/util/__pycache__/url.cpython-37.pyc | Bin 0 -> 5198 bytes
.../urllib3/util/__pycache__/wait.cpython-37.pyc | Bin 0 -> 3150 bytes
.../pip/_vendor/urllib3/util/connection.py | 134 +
.../pip/_vendor/urllib3/util/queue.py | 21 +
.../pip/_vendor/urllib3/util/request.py | 118 +
.../pip/_vendor/urllib3/util/response.py | 87 +
.../pip/_vendor/urllib3/util/retry.py | 411 +
.../pip/_vendor/urllib3/util/ssl_.py | 381 +
.../pip/_vendor/urllib3/util/timeout.py | 242 +
.../pip/_vendor/urllib3/util/url.py | 230 +
.../pip/_vendor/urllib3/util/wait.py | 150 +
.../pip/_vendor/webencodings/__init__.py | 342 +
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 9693 bytes
.../webencodings/__pycache__/labels.cpython-37.pyc | Bin 0 -> 4107 bytes
.../pip/_vendor/webencodings/labels.py | 231 +
.../pip/_vendor/webencodings/mklabels.py | 59 +
.../pip/_vendor/webencodings/tests.py | 153 +
.../pip/_vendor/webencodings/x_user_defined.py | 325 +
.../site-packages/pylint-2.4.4.dist-info/COPYING | 340 +
.../site-packages/pylint-2.4.4.dist-info/INSTALLER | 1 +
.../site-packages/pylint-2.4.4.dist-info/METADATA | 202 +
.../site-packages/pylint-2.4.4.dist-info/RECORD | 161 +
.../Lib/site-packages/pylint-2.4.4.dist-info/WHEEL | 5 +
.../pylint-2.4.4.dist-info/entry_points.txt | 6 +
.../pylint-2.4.4.dist-info/top_level.txt | 1 +
.../venv/Lib/site-packages/pylint/__init__.py | 43 +
.../venv/Lib/site-packages/pylint/__main__.py | 7 +
.../venv/Lib/site-packages/pylint/__pkginfo__.py | 85 +
.../pylint/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 1084 bytes
.../pylint/__pycache__/__main__.cpython-37.pyc | Bin 0 -> 210 bytes
.../pylint/__pycache__/__pkginfo__.cpython-37.pyc | Bin 0 -> 2632 bytes
.../pylint/__pycache__/config.cpython-37.pyc | Bin 0 -> 25815 bytes
.../pylint/__pycache__/constants.cpython-37.pyc | Bin 0 -> 1018 bytes
.../pylint/__pycache__/epylint.cpython-37.pyc | Bin 0 -> 4958 bytes
.../pylint/__pycache__/exceptions.cpython-37.pyc | Bin 0 -> 1361 bytes
.../pylint/__pycache__/graph.cpython-37.pyc | Bin 0 -> 5211 bytes
.../pylint/__pycache__/interfaces.cpython-37.pyc | Bin 0 -> 3665 bytes
.../pylint/__pycache__/lint.cpython-37.pyc | Bin 0 -> 45362 bytes
.../pylint/__pycache__/testutils.cpython-37.pyc | Bin 0 -> 9521 bytes
.../Lib/site-packages/pylint/checkers/__init__.py | 64 +
.../checkers/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 1580 bytes
.../checkers/__pycache__/async.cpython-37.pyc | Bin 0 -> 2722 bytes
.../checkers/__pycache__/base.cpython-37.pyc | Bin 0 -> 61785 bytes
.../__pycache__/base_checker.cpython-37.pyc | Bin 0 -> 6481 bytes
.../checkers/__pycache__/classes.cpython-37.pyc | Bin 0 -> 44537 bytes
.../__pycache__/design_analysis.cpython-37.pyc | Bin 0 -> 11667 bytes
.../checkers/__pycache__/exceptions.cpython-37.pyc | Bin 0 -> 15668 bytes
.../checkers/__pycache__/format.cpython-37.pyc | Bin 0 -> 31580 bytes
.../checkers/__pycache__/imports.cpython-37.pyc | Bin 0 -> 25427 bytes
.../checkers/__pycache__/logging.cpython-37.pyc | Bin 0 -> 10919 bytes
.../checkers/__pycache__/misc.cpython-37.pyc | Bin 0 -> 4597 bytes
.../checkers/__pycache__/newstyle.cpython-37.pyc | Bin 0 -> 2422 bytes
.../checkers/__pycache__/python3.cpython-37.pyc | Bin 0 -> 34941 bytes
.../__pycache__/raw_metrics.cpython-37.pyc | Bin 0 -> 3254 bytes
.../__pycache__/refactoring.cpython-37.pyc | Bin 0 -> 45321 bytes
.../checkers/__pycache__/similar.cpython-37.pyc | Bin 0 -> 12304 bytes
.../checkers/__pycache__/spelling.cpython-37.pyc | Bin 0 -> 9755 bytes
.../checkers/__pycache__/stdlib.cpython-37.pyc | Bin 0 -> 12738 bytes
.../checkers/__pycache__/strings.cpython-37.pyc | Bin 0 -> 17427 bytes
.../checkers/__pycache__/typecheck.cpython-37.pyc | Bin 0 -> 40274 bytes
.../checkers/__pycache__/utils.cpython-37.pyc | Bin 0 -> 31460 bytes
.../checkers/__pycache__/variables.cpython-37.pyc | Bin 0 -> 44587 bytes
.../Lib/site-packages/pylint/checkers/async.py | 89 +
.../venv/Lib/site-packages/pylint/checkers/base.py | 2333 ++++++
.../site-packages/pylint/checkers/base_checker.py | 187 +
.../Lib/site-packages/pylint/checkers/classes.py | 1844 +++++
.../pylint/checkers/design_analysis.py | 496 ++
.../site-packages/pylint/checkers/exceptions.py | 546 ++
.../Lib/site-packages/pylint/checkers/format.py | 1332 ++++
.../Lib/site-packages/pylint/checkers/imports.py | 981 +++
.../Lib/site-packages/pylint/checkers/logging.py | 384 +
.../venv/Lib/site-packages/pylint/checkers/misc.py | 171 +
.../Lib/site-packages/pylint/checkers/newstyle.py | 127 +
.../Lib/site-packages/pylint/checkers/python3.py | 1398 ++++
.../site-packages/pylint/checkers/raw_metrics.py | 119 +
.../site-packages/pylint/checkers/refactoring.py | 1510 ++++
.../Lib/site-packages/pylint/checkers/similar.py | 452 ++
.../Lib/site-packages/pylint/checkers/spelling.py | 411 +
.../Lib/site-packages/pylint/checkers/stdlib.py | 452 ++
.../Lib/site-packages/pylint/checkers/strings.py | 755 ++
.../Lib/site-packages/pylint/checkers/typecheck.py | 1770 +++++
.../Lib/site-packages/pylint/checkers/utils.py | 1253 +++
.../Lib/site-packages/pylint/checkers/variables.py | 1987 +++++
.../python/venv/Lib/site-packages/pylint/config.py | 913 +++
.../venv/Lib/site-packages/pylint/constants.py | 43 +
.../venv/Lib/site-packages/pylint/epylint.py | 197 +
.../venv/Lib/site-packages/pylint/exceptions.py | 29 +
.../site-packages/pylint/extensions/__init__.py | 0
.../extensions/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 181 bytes
.../__pycache__/_check_docs_utils.cpython-37.pyc | Bin 0 -> 18819 bytes
.../__pycache__/bad_builtin.cpython-37.pyc | Bin 0 -> 1967 bytes
.../__pycache__/broad_try_clause.cpython-37.pyc | Bin 0 -> 1702 bytes
.../__pycache__/check_docs.cpython-37.pyc | Bin 0 -> 685 bytes
.../__pycache__/check_elif.cpython-37.pyc | Bin 0 -> 2647 bytes
.../__pycache__/comparetozero.cpython-37.pyc | Bin 0 -> 1959 bytes
.../__pycache__/docparams.cpython-37.pyc | Bin 0 -> 14746 bytes
.../extensions/__pycache__/docstyle.cpython-37.pyc | Bin 0 -> 2503 bytes
.../__pycache__/emptystring.cpython-37.pyc | Bin 0 -> 2035 bytes
.../extensions/__pycache__/mccabe.cpython-37.pyc | Bin 0 -> 5579 bytes
.../overlapping_exceptions.cpython-37.pyc | Bin 0 -> 2604 bytes
.../redefined_variable_type.cpython-37.pyc | Bin 0 -> 3248 bytes
.../pylint/extensions/_check_docs_utils.py | 792 ++
.../site-packages/pylint/extensions/bad_builtin.py | 69 +
.../pylint/extensions/broad_try_clause.py | 59 +
.../site-packages/pylint/extensions/check_docs.py | 23 +
.../site-packages/pylint/extensions/check_elif.py | 77 +
.../pylint/extensions/comparetozero.py | 74 +
.../site-packages/pylint/extensions/docparams.py | 536 ++
.../site-packages/pylint/extensions/docstyle.py | 89 +
.../site-packages/pylint/extensions/emptystring.py | 74 +
.../Lib/site-packages/pylint/extensions/mccabe.py | 196 +
.../pylint/extensions/overlapping_exceptions.py | 88 +
.../pylint/extensions/redefined_variable_type.py | 116 +
.../python/venv/Lib/site-packages/pylint/graph.py | 197 +
.../venv/Lib/site-packages/pylint/interfaces.py | 102 +
.../python/venv/Lib/site-packages/pylint/lint.py | 1817 +++++
.../Lib/site-packages/pylint/message/__init__.py | 54 +
.../message/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 664 bytes
.../message/__pycache__/message.cpython-37.pyc | Bin 0 -> 1225 bytes
.../__pycache__/message_definition.cpython-37.pyc | Bin 0 -> 2982 bytes
.../message_definition_store.cpython-37.pyc | Bin 0 -> 4075 bytes
.../message_handler_mix_in.cpython-37.pyc | Bin 0 -> 11049 bytes
.../__pycache__/message_id_store.cpython-37.pyc | Bin 0 -> 4925 bytes
.../Lib/site-packages/pylint/message/message.py | 53 +
.../pylint/message/message_definition.py | 84 +
.../pylint/message/message_definition_store.py | 90 +
.../pylint/message/message_handler_mix_in.py | 393 +
.../pylint/message/message_id_store.py | 128 +
.../Lib/site-packages/pylint/pyreverse/__init__.py | 8 +
.../pyreverse/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 241 bytes
.../__pycache__/diadefslib.cpython-37.pyc | Bin 0 -> 7621 bytes
.../pyreverse/__pycache__/diagrams.cpython-37.pyc | Bin 0 -> 8716 bytes
.../pyreverse/__pycache__/inspector.cpython-37.pyc | Bin 0 -> 10187 bytes
.../pyreverse/__pycache__/main.cpython-37.pyc | Bin 0 -> 4522 bytes
.../pyreverse/__pycache__/utils.cpython-37.pyc | Bin 0 -> 5787 bytes
.../pyreverse/__pycache__/vcgutils.cpython-37.pyc | Bin 0 -> 4697 bytes
.../pyreverse/__pycache__/writer.cpython-37.pyc | Bin 0 -> 7286 bytes
.../site-packages/pylint/pyreverse/diadefslib.py | 238 +
.../Lib/site-packages/pylint/pyreverse/diagrams.py | 268 +
.../site-packages/pylint/pyreverse/inspector.py | 357 +
.../Lib/site-packages/pylint/pyreverse/main.py | 214 +
.../Lib/site-packages/pylint/pyreverse/utils.py | 220 +
.../Lib/site-packages/pylint/pyreverse/vcgutils.py | 229 +
.../Lib/site-packages/pylint/pyreverse/writer.py | 213 +
.../Lib/site-packages/pylint/reporters/__init__.py | 34 +
.../reporters/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 823 bytes
.../__pycache__/base_reporter.cpython-37.pyc | Bin 0 -> 2767 bytes
.../__pycache__/collecting_reporter.cpython-37.pyc | Bin 0 -> 817 bytes
.../__pycache__/json_reporter.cpython-37.pyc | Bin 0 -> 2003 bytes
.../reports_handler_mix_in.cpython-37.pyc | Bin 0 -> 3028 bytes
.../reporters/__pycache__/text.cpython-37.pyc | Bin 0 -> 7263 bytes
.../pylint/reporters/base_reporter.py | 66 +
.../pylint/reporters/collecting_reporter.py | 21 +
.../pylint/reporters/json_reporter.py | 58 +
.../pylint/reporters/reports_handler_mix_in.py | 79 +
.../Lib/site-packages/pylint/reporters/text.py | 247 +
.../pylint/reporters/ureports/__init__.py | 96 +
.../ureports/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 3065 bytes
.../ureports/__pycache__/nodes.cpython-37.pyc | Bin 0 -> 6062 bytes
.../__pycache__/text_writer.cpython-37.pyc | Bin 0 -> 3673 bytes
.../pylint/reporters/ureports/nodes.py | 188 +
.../pylint/reporters/ureports/text_writer.py | 94 +
.../venv/Lib/site-packages/pylint/testutils.py | 298 +
.../Lib/site-packages/pylint/utils/__init__.py | 64 +
.../utils/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 869 bytes
.../utils/__pycache__/ast_walker.cpython-37.pyc | Bin 0 -> 2078 bytes
.../utils/__pycache__/file_state.cpython-37.pyc | Bin 0 -> 3852 bytes
.../pylint/utils/__pycache__/utils.cpython-37.pyc | Bin 0 -> 10339 bytes
.../Lib/site-packages/pylint/utils/ast_walker.py | 79 +
.../Lib/site-packages/pylint/utils/file_state.py | 138 +
.../venv/Lib/site-packages/pylint/utils/utils.py | 371 +
.../Lib/site-packages/setuptools-40.8.0-py3.7.egg | Bin 0 -> 571911 bytes
.../python/venv/Lib/site-packages/setuptools.pth | 1 +
.../site-packages/six-1.14.0.dist-info/INSTALLER | 1 +
.../Lib/site-packages/six-1.14.0.dist-info/LICENSE | 18 +
.../site-packages/six-1.14.0.dist-info/METADATA | 49 +
.../Lib/site-packages/six-1.14.0.dist-info/RECORD | 8 +
.../Lib/site-packages/six-1.14.0.dist-info/WHEEL | 6 +
.../six-1.14.0.dist-info/top_level.txt | 1 +
src/main/python/venv/Lib/site-packages/six.py | 980 +++
.../typed_ast-1.4.1.dist-info/INSTALLER | 1 +
.../typed_ast-1.4.1.dist-info/LICENSE | 290 +
.../typed_ast-1.4.1.dist-info/METADATA | 28 +
.../site-packages/typed_ast-1.4.1.dist-info/RECORD | 18 +
.../site-packages/typed_ast-1.4.1.dist-info/WHEEL | 5 +
.../typed_ast-1.4.1.dist-info/top_level.txt | 3 +
.../venv/Lib/site-packages/typed_ast/__init__.py | 1 +
.../typed_ast/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 197 bytes
.../typed_ast/__pycache__/ast27.cpython-37.pyc | Bin 0 -> 12419 bytes
.../typed_ast/__pycache__/ast3.cpython-37.pyc | Bin 0 -> 13335 bytes
.../__pycache__/conversions.cpython-37.pyc | Bin 0 -> 7916 bytes
.../typed_ast/_ast27.cp37-win_amd64.pyd | Bin 0 -> 165888 bytes
.../typed_ast/_ast3.cp37-win_amd64.pyd | Bin 0 -> 186368 bytes
.../venv/Lib/site-packages/typed_ast/ast27.py | 324 +
.../venv/Lib/site-packages/typed_ast/ast3.py | 348 +
.../Lib/site-packages/typed_ast/conversions.py | 232 +
.../tests/__pycache__/test_basics.cpython-37.pyc | Bin 0 -> 7451 bytes
.../site-packages/typed_ast/tests/test_basics.py | 326 +
.../wrapt-1.11.2-py3.7.egg-info/PKG-INFO | 166 +
.../wrapt-1.11.2-py3.7.egg-info/SOURCES.txt | 10 +
.../dependency_links.txt | 1 +
.../installed-files.txt | 12 +
.../wrapt-1.11.2-py3.7.egg-info/top_level.txt | 1 +
.../venv/Lib/site-packages/wrapt/__init__.py | 16 +
.../wrapt/__pycache__/__init__.cpython-37.pyc | Bin 0 -> 984 bytes
.../wrapt/__pycache__/decorators.cpython-37.pyc | Bin 0 -> 8918 bytes
.../wrapt/__pycache__/importer.cpython-37.pyc | Bin 0 -> 4242 bytes
.../wrapt/__pycache__/wrappers.cpython-37.pyc | Bin 0 -> 24030 bytes
.../venv/Lib/site-packages/wrapt/decorators.py | 514 ++
.../venv/Lib/site-packages/wrapt/importer.py | 230 +
.../venv/Lib/site-packages/wrapt/wrappers.py | 943 +++
src/main/python/venv/Scripts/Activate.ps1 | 51 +
src/main/python/venv/Scripts/activate | 76 +
src/main/python/venv/Scripts/activate.bat | 45 +
src/main/python/venv/Scripts/deactivate.bat | 21 +
.../python/venv/Scripts/easy_install-3.7-script.py | 12 +
src/main/python/venv/Scripts/easy_install-3.7.exe | Bin 0 -> 74752 bytes
.../python/venv/Scripts/easy_install-script.py | 12 +
src/main/python/venv/Scripts/easy_install.exe | Bin 0 -> 74752 bytes
src/main/python/venv/Scripts/epylint.exe | Bin 0 -> 102800 bytes
src/main/python/venv/Scripts/isort.exe | Bin 0 -> 102790 bytes
src/main/python/venv/Scripts/pip-script.py | 12 +
src/main/python/venv/Scripts/pip.exe | Bin 0 -> 74752 bytes
src/main/python/venv/Scripts/pip3-script.py | 12 +
src/main/python/venv/Scripts/pip3.7-script.py | 12 +
src/main/python/venv/Scripts/pip3.7.exe | Bin 0 -> 74752 bytes
src/main/python/venv/Scripts/pip3.exe | Bin 0 -> 74752 bytes
src/main/python/venv/Scripts/pylint.exe | Bin 0 -> 102798 bytes
src/main/python/venv/Scripts/pyreverse.exe | Bin 0 -> 102804 bytes
src/main/python/venv/Scripts/python.exe | Bin 0 -> 522768 bytes
src/main/python/venv/Scripts/python_d.exe | Bin 0 -> 688128 bytes
src/main/python/venv/Scripts/pythonw.exe | Bin 0 -> 522256 bytes
src/main/python/venv/Scripts/pythonw_d.exe | Bin 0 -> 687104 bytes
src/main/python/venv/Scripts/symilar.exe | Bin 0 -> 102800 bytes
src/main/python/venv/pyvenv.cfg | 3 +
1062 files changed, 174970 insertions(+)
create mode 100644 src/main/python/DockWidgets/DistillationColumnStagewiseResults.py
create mode 100644 src/main/python/DockWidgets/DockWidget.py
create mode 100644 src/main/python/DockWidgets/DockWidgetCompoundSeparator.py
create mode 100644 src/main/python/DockWidgets/DockWidgetCompressorExpander.py
create mode 100644 src/main/python/DockWidgets/DockWidgetDistillationColumn.py
create mode 100644 src/main/python/DockWidgets/DockWidgetFlash.py
create mode 100644 src/main/python/DockWidgets/DockWidgetMaterialStream.py
create mode 100644 src/main/python/DockWidgets/DockWidgetMixer.py
create mode 100644 src/main/python/DockWidgets/DockWidgetShortcutColumn.py
create mode 100644 src/main/python/DockWidgets/DockWidgetSplitter.py
create mode 100644 src/main/python/DockWidgets/__init__.py
create mode 100644 src/main/python/DockWidgets/__pycache__/DistillationColumnStagewiseResults.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DistillationColumnStagewiseResults.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidget.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidget.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetCompoundSeparator.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetCompoundSeparator.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetCompressorExpander.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetCompressorExpander.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetDistillationColumn.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetDistillationColumn.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetFlash.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetFlash.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetMaterialStream.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetMaterialStream.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetMixer.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetMixer.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetShortcutColumn.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetShortcutColumn.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetSplitter.cpython-36.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/DockWidgetSplitter.cpython-37.pyc
create mode 100644 src/main/python/DockWidgets/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/OMChem/CompSep.py
create mode 100644 src/main/python/OMChem/ConvReactor.py
create mode 100644 src/main/python/OMChem/Cooler.py
create mode 100644 src/main/python/OMChem/DistCol.py
create mode 100644 src/main/python/OMChem/EngStm.py
create mode 100644 src/main/python/OMChem/Flash.py
create mode 100644 src/main/python/OMChem/Flowsheet.py
create mode 100644 src/main/python/OMChem/Heater.py
create mode 100644 src/main/python/OMChem/Mixer.py
create mode 100644 src/main/python/OMChem/Pump.py
create mode 100644 src/main/python/OMChem/ShortcutColumn.py
create mode 100644 src/main/python/OMChem/Splitter.py
create mode 100644 src/main/python/OMChem/Valve.py
create mode 100644 src/main/python/OMChem/__init__.py
create mode 100644 src/main/python/OMChem/__pycache__/EngStm.cpython-36.pyc
create mode 100644 src/main/python/OMChem/__pycache__/EngStm.cpython-37.pyc
create mode 100644 src/main/python/OMChem/__pycache__/Flowsheet.cpython-36.pyc
create mode 100644 src/main/python/OMChem/__pycache__/Flowsheet.cpython-37.pyc
create mode 100644 src/main/python/OMChem/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/OMChem/adiabatic_comp.py
create mode 100644 src/main/python/OMChem/adiabatic_exp.py
create mode 100644 src/main/python/OMChem/setup.py
create mode 100644 src/main/python/Redo.dat
create mode 100644 src/main/python/mainApp.py
create mode 100644 src/main/python/utils/Bin_Phase_env.py
create mode 100644 src/main/python/utils/ComponentSelector.py
create mode 100644 src/main/python/utils/Container.py
create mode 100644 src/main/python/utils/Graphics.py
create mode 100644 src/main/python/utils/Streams.py
create mode 100644 src/main/python/utils/UnitOperations.py
create mode 100644 src/main/python/utils/__init__.py
create mode 100644 src/main/python/utils/__pycache__/Bin_Phase_env.cpython-37.pyc
create mode 100644 src/main/python/utils/__pycache__/ComponentSelector.cpython-37.pyc
create mode 100644 src/main/python/utils/__pycache__/Container.cpython-37.pyc
create mode 100644 src/main/python/utils/__pycache__/Graphics.cpython-37.pyc
create mode 100644 src/main/python/utils/__pycache__/Streams.cpython-37.pyc
create mode 100644 src/main/python/utils/__pycache__/UnitOperations.cpython-37.pyc
create mode 100644 src/main/python/utils/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/utils/thermopackage.txt
create mode 100644 src/main/python/venv/Lib/site-packages/__pycache__/mccabe.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/__pycache__/six.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/COPYING
create mode 100644 src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/COPYING.LESSER
create mode 100644 src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/INSTALLER
create mode 100644 src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/METADATA
create mode 100644 src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/RECORD
create mode 100644 src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/WHEEL
create mode 100644 src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pkginfo__.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/_ast.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/arguments.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/as_string.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/bases.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_crypt.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dataclasses.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_http.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_argparse.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_attrs.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_collections.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_crypt.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_curses.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_dataclasses.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_dateutil.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_fstrings.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_functools.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_gi.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_hashlib.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_http.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_io.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_mechanize.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_nose.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_fromnumeric.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_function_base.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_multiarray.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numeric.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numerictypes.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_umath.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_random_mtrand.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_pytest.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_qt.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_random.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_re.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_six.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_ssl.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_subprocess.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_threading.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_typing.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/brain/brain_uuid.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/builder.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/context.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/decorators.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/exceptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/helpers.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/inference.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/spec.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/util.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/interpreter/objectmodel.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/manager.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/mixins.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/modutils.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/node_classes.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/nodes.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/objects.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/protocols.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/raw_building.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/rebuilder.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/scoped_nodes.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/test_utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/transforms.py
create mode 100644 src/main/python/venv/Lib/site-packages/astroid/util.py
create mode 100644 src/main/python/venv/Lib/site-packages/colorama-0.4.3.dist-info/INSTALLER
create mode 100644 src/main/python/venv/Lib/site-packages/colorama-0.4.3.dist-info/LICENSE.txt
create mode 100644 src/main/python/venv/Lib/site-packages/colorama-0.4.3.dist-info/METADATA
create mode 100644 src/main/python/venv/Lib/site-packages/colorama-0.4.3.dist-info/RECORD
create mode 100644 src/main/python/venv/Lib/site-packages/colorama-0.4.3.dist-info/WHEEL
create mode 100644 src/main/python/venv/Lib/site-packages/colorama-0.4.3.dist-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/__pycache__/ansi.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/__pycache__/ansitowin32.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/__pycache__/initialise.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/__pycache__/win32.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/__pycache__/winterm.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/ansi.py
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/ansitowin32.py
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/initialise.py
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/win32.py
create mode 100644 src/main/python/venv/Lib/site-packages/colorama/winterm.py
create mode 100644 src/main/python/venv/Lib/site-packages/easy-install.pth
create mode 100644 src/main/python/venv/Lib/site-packages/isort-4.3.21.dist-info/INSTALLER
create mode 100644 src/main/python/venv/Lib/site-packages/isort-4.3.21.dist-info/LICENSE
create mode 100644 src/main/python/venv/Lib/site-packages/isort-4.3.21.dist-info/METADATA
create mode 100644 src/main/python/venv/Lib/site-packages/isort-4.3.21.dist-info/RECORD
create mode 100644 src/main/python/venv/Lib/site-packages/isort-4.3.21.dist-info/WHEEL
create mode 100644 src/main/python/venv/Lib/site-packages/isort-4.3.21.dist-info/entry_points.txt
create mode 100644 src/main/python/venv/Lib/site-packages/isort-4.3.21.dist-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__main__.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/__main__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/finders.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/hooks.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/isort.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/main.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/natural.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/pie_slice.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/pylama_isort.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/settings.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/__pycache__/utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/isort/finders.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/hooks.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/isort.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/main.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/natural.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/pie_slice.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/pylama_isort.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/settings.py
create mode 100644 src/main/python/venv/Lib/site-packages/isort/utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy-1.4.3.dist-info/AUTHORS.rst
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy-1.4.3.dist-info/INSTALLER
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy-1.4.3.dist-info/LICENSE
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy-1.4.3.dist-info/METADATA
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy-1.4.3.dist-info/RECORD
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy-1.4.3.dist-info/WHEEL
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy-1.4.3.dist-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/__pycache__/_version.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/__pycache__/compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/__pycache__/simple.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/__pycache__/slots.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/__pycache__/utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/_version.py
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/cext.cp37-win_amd64.pyd
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/simple.py
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/slots.py
create mode 100644 src/main/python/venv/Lib/site-packages/lazy_object_proxy/utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe-0.6.1.dist-info/DESCRIPTION.rst
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe-0.6.1.dist-info/INSTALLER
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe-0.6.1.dist-info/METADATA
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe-0.6.1.dist-info/RECORD
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe-0.6.1.dist-info/WHEEL
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe-0.6.1.dist-info/entry_points.txt
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe-0.6.1.dist-info/metadata.json
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe-0.6.1.dist-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/mccabe.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/PKG-INFO
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/SOURCES.txt
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/dependency_links.txt
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/entry_points.txt
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/not-zip-safe
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/__main__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/__pycache__/__main__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/build_env.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/cache.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/configuration.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/download.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/exceptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/index.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/locations.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pep425tags.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pyproject.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/resolve.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/wheel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/build_env.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cache.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/parser.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/autocompletion.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/base_command.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/cmdoptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/main_parser.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/parser.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/status_codes.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/check.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/completion.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/download.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/hash.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/help.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/install.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/list.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/search.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/show.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/check.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/completion.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/configuration.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/download.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/freeze.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/hash.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/help.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/install.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/list.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/search.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/show.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/uninstall.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/wheel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/configuration.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/download.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/exceptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/index.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/locations.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/candidate.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/format_control.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/index.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/link.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/candidate.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/format_control.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/index.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/link.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/check.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/check.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/freeze.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/prepare.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pep425tags.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pyproject.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/resolve.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/git.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/bazaar.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/git.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/mercurial.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/subversion.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/wheel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/__pycache__/appdirs.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/__pycache__/pyparsing.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/__pycache__/retrying.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/__pycache__/six.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/appdirs.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/adapter.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/cache.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/file_cache.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/controller.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/heuristics.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/serialize.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/__main__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/__pycache__/core.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/cacert.pem
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/core.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__pycache__/version.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/big5freq.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/big5prober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/chardistribution.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/charsetgroupprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/charsetprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cli/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cli/chardetect.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/codingstatemachine.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cp949prober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/enums.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/escprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/escsm.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/eucjpprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euckrfreq.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euckrprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euctwfreq.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euctwprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/gb2312freq.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/gb2312prober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/hebrewprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/jisfreq.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/jpcntx.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langbulgarianmodel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langcyrillicmodel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langgreekmodel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langhebrewmodel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langhungarianmodel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langthaimodel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/langturkishmodel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/latin1prober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/mbcharsetprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/mbcsgroupprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/mbcssm.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/sbcharsetprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/sbcsgroupprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/sjisprober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/universaldetector.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/utf8prober.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/version.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/ansi.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/ansitowin32.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/initialise.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/win32.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/winterm.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/__pycache__/util.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/misc.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/shutil.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/tarfile.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/database.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/index.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/locators.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/manifest.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/markers.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/metadata.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/resources.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/scripts.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/t32.exe
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/t64.exe
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/util.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/version.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/w32.exe
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/w64.exe
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/wheel.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distro.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_ihatexml.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_inputstream.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_tokenizer.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/_base.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/datrie.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/py.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/constants.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/alphabeticalattributes.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/base.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/inject_meta_charset.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/lint.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/optionaltags.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/sanitizer.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/whitespace.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/html5parser.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/serializer.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/genshi.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/sax.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/base.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/dom.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treebuilders/etree_lxml.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/base.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/dom.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/etree_lxml.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/genshi.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/__pycache__/core.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/codec.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/core.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/idnadata.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/intranges.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/package_data.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/uts46data.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/ipaddress.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/__pycache__/linklockfile.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/linklockfile.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/mkdirlockfile.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/pidlockfile.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/sqlitelockfile.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/symlinklockfile.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/_version.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/exceptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/fallback.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__about__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__pycache__/version.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/_compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/_structures.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/markers.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/requirements.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/specifiers.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/version.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/_in_process.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/build.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/check.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/colorlog.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/envbuild.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/wrappers.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pkg_resources/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pkg_resources/py31compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/__pycache__/bar.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/__pycache__/helpers.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/bar.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/counter.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/helpers.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/spinner.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pyparsing.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__pycache__/core.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__pycache__/parser.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__pycache__/test.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__pycache__/utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__pycache__/writer.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/core.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/parser.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/writer.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/api.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/auth.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/certs.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/compat.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/models.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/packages.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/structures.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__pycache__/utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__version__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/_internal_utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/adapters.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/api.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/auth.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/certs.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/compat.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/cookies.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/exceptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/help.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/hooks.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/models.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/packages.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/sessions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/status_codes.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/structures.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/retrying.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/six.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/_collections.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/connection.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/connectionpool.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_appengine_environ.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/bindings.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/appengine.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/ntlmpool.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/pyopenssl.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/securetransport.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/socks.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/exceptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/fields.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/filepost.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/backports/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/backports/makefile.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/six.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/poolmanager.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/request.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/response.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/connection.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/queue.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/request.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/response.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/retry.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/ssl_.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/timeout.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/url.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/wait.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/labels.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/mklabels.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/tests.py
create mode 100644 src/main/python/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/x_user_defined.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint-2.4.4.dist-info/COPYING
create mode 100644 src/main/python/venv/Lib/site-packages/pylint-2.4.4.dist-info/INSTALLER
create mode 100644 src/main/python/venv/Lib/site-packages/pylint-2.4.4.dist-info/METADATA
create mode 100644 src/main/python/venv/Lib/site-packages/pylint-2.4.4.dist-info/RECORD
create mode 100644 src/main/python/venv/Lib/site-packages/pylint-2.4.4.dist-info/WHEEL
create mode 100644 src/main/python/venv/Lib/site-packages/pylint-2.4.4.dist-info/entry_points.txt
create mode 100644 src/main/python/venv/Lib/site-packages/pylint-2.4.4.dist-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__main__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pkginfo__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/__main__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/__pkginfo__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/config.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/constants.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/epylint.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/exceptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/graph.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/interfaces.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/lint.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/__pycache__/testutils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/async.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/base.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/base_checker.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/classes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/design_analysis.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/exceptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/format.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/imports.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/logging.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/misc.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/newstyle.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/python3.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/raw_metrics.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/refactoring.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/similar.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/spelling.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/stdlib.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/strings.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/typecheck.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/__pycache__/variables.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/async.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/base.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/base_checker.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/classes.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/design_analysis.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/exceptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/format.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/imports.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/logging.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/misc.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/newstyle.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/python3.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/raw_metrics.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/refactoring.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/similar.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/spelling.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/stdlib.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/strings.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/typecheck.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/checkers/variables.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/config.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/constants.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/epylint.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/exceptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/_check_docs_utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/bad_builtin.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/broad_try_clause.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/check_docs.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/check_elif.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/comparetozero.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/docparams.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/docstyle.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/emptystring.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/mccabe.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/overlapping_exceptions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/__pycache__/redefined_variable_type.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/_check_docs_utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/bad_builtin.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/broad_try_clause.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/check_docs.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/check_elif.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/comparetozero.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/docparams.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/docstyle.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/emptystring.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/mccabe.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/overlapping_exceptions.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/extensions/redefined_variable_type.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/graph.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/interfaces.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/lint.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/__pycache__/message.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/__pycache__/message_definition.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/__pycache__/message_definition_store.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/__pycache__/message_handler_mix_in.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/__pycache__/message_id_store.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/message.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/message_definition.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/message_definition_store.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/message_handler_mix_in.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/message/message_id_store.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__pycache__/diadefslib.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__pycache__/diagrams.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__pycache__/inspector.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__pycache__/main.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__pycache__/utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__pycache__/vcgutils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/__pycache__/writer.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/diadefslib.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/diagrams.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/inspector.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/main.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/vcgutils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/pyreverse/writer.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/__pycache__/base_reporter.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/__pycache__/collecting_reporter.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/__pycache__/json_reporter.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/__pycache__/reports_handler_mix_in.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/__pycache__/text.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/base_reporter.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/collecting_reporter.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/json_reporter.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/reports_handler_mix_in.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/text.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/ureports/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/nodes.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/ureports/__pycache__/text_writer.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/ureports/nodes.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/reporters/ureports/text_writer.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/testutils.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/utils/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/utils/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/utils/__pycache__/ast_walker.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/utils/__pycache__/file_state.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/utils/__pycache__/utils.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/utils/ast_walker.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/utils/file_state.py
create mode 100644 src/main/python/venv/Lib/site-packages/pylint/utils/utils.py
create mode 100644 src/main/python/venv/Lib/site-packages/setuptools-40.8.0-py3.7.egg
create mode 100644 src/main/python/venv/Lib/site-packages/setuptools.pth
create mode 100644 src/main/python/venv/Lib/site-packages/six-1.14.0.dist-info/INSTALLER
create mode 100644 src/main/python/venv/Lib/site-packages/six-1.14.0.dist-info/LICENSE
create mode 100644 src/main/python/venv/Lib/site-packages/six-1.14.0.dist-info/METADATA
create mode 100644 src/main/python/venv/Lib/site-packages/six-1.14.0.dist-info/RECORD
create mode 100644 src/main/python/venv/Lib/site-packages/six-1.14.0.dist-info/WHEEL
create mode 100644 src/main/python/venv/Lib/site-packages/six-1.14.0.dist-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/six.py
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast-1.4.1.dist-info/INSTALLER
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast-1.4.1.dist-info/LICENSE
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast-1.4.1.dist-info/METADATA
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast-1.4.1.dist-info/RECORD
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast-1.4.1.dist-info/WHEEL
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast-1.4.1.dist-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/__pycache__/ast27.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/__pycache__/ast3.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/__pycache__/conversions.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/_ast27.cp37-win_amd64.pyd
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/_ast3.cp37-win_amd64.pyd
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/ast27.py
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/ast3.py
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/conversions.py
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/tests/__pycache__/test_basics.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/typed_ast/tests/test_basics.py
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt-1.11.2-py3.7.egg-info/PKG-INFO
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt-1.11.2-py3.7.egg-info/SOURCES.txt
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt-1.11.2-py3.7.egg-info/dependency_links.txt
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt-1.11.2-py3.7.egg-info/installed-files.txt
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt-1.11.2-py3.7.egg-info/top_level.txt
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt/__init__.py
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt/__pycache__/__init__.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt/__pycache__/decorators.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt/__pycache__/importer.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt/__pycache__/wrappers.cpython-37.pyc
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt/decorators.py
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt/importer.py
create mode 100644 src/main/python/venv/Lib/site-packages/wrapt/wrappers.py
create mode 100644 src/main/python/venv/Scripts/Activate.ps1
create mode 100644 src/main/python/venv/Scripts/activate
create mode 100644 src/main/python/venv/Scripts/activate.bat
create mode 100644 src/main/python/venv/Scripts/deactivate.bat
create mode 100644 src/main/python/venv/Scripts/easy_install-3.7-script.py
create mode 100644 src/main/python/venv/Scripts/easy_install-3.7.exe
create mode 100644 src/main/python/venv/Scripts/easy_install-script.py
create mode 100644 src/main/python/venv/Scripts/easy_install.exe
create mode 100644 src/main/python/venv/Scripts/epylint.exe
create mode 100644 src/main/python/venv/Scripts/isort.exe
create mode 100644 src/main/python/venv/Scripts/pip-script.py
create mode 100644 src/main/python/venv/Scripts/pip.exe
create mode 100644 src/main/python/venv/Scripts/pip3-script.py
create mode 100644 src/main/python/venv/Scripts/pip3.7-script.py
create mode 100644 src/main/python/venv/Scripts/pip3.7.exe
create mode 100644 src/main/python/venv/Scripts/pip3.exe
create mode 100644 src/main/python/venv/Scripts/pylint.exe
create mode 100644 src/main/python/venv/Scripts/pyreverse.exe
create mode 100644 src/main/python/venv/Scripts/python.exe
create mode 100644 src/main/python/venv/Scripts/python_d.exe
create mode 100644 src/main/python/venv/Scripts/pythonw.exe
create mode 100644 src/main/python/venv/Scripts/pythonw_d.exe
create mode 100644 src/main/python/venv/Scripts/symilar.exe
create mode 100644 src/main/python/venv/pyvenv.cfg
(limited to 'src/main/python')
diff --git a/src/main/python/DockWidgets/DistillationColumnStagewiseResults.py b/src/main/python/DockWidgets/DistillationColumnStagewiseResults.py
new file mode 100644
index 0000000..65527dc
--- /dev/null
+++ b/src/main/python/DockWidgets/DistillationColumnStagewiseResults.py
@@ -0,0 +1,18 @@
+import os, sys
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.uic import loadUiType
+from PyQt5.QtWidgets import QWidget
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DistillationColumnStagewiseResults.ui')
+
+
+class DistillationColumnStagewiseResults(QWidget,ui_dialog):
+
+ def __init__(self, parent=None):
+ QWidget.__init__(self, parent)
+ self.setupUi(self)
+ # self.setWindowTitle(self.parent.obj.name)
diff --git a/src/main/python/DockWidgets/DockWidget.py b/src/main/python/DockWidgets/DockWidget.py
new file mode 100644
index 0000000..a97212a
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidget.py
@@ -0,0 +1,167 @@
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+from python.utils.ComponentSelector import *
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidget.ui')
+
+class DockWidget(QDockWidget,ui_dialog):
+
+ def __init__(self,name,comptype,obj,container, parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = {}
+ self.modes()
+ self.comboBox.currentIndexChanged.connect(self.mode_selection)
+
+ print("constructor ", self.input_dict)
+ self.pushButton_2.clicked.connect(self.param)
+
+ self.dict = {} # a dictionary
+ self.container = container
+
+ # input data tab
+ def modes(self):
+ modes_list = self.obj.modes_list
+ if(modes_list):
+ for j in modes_list:
+ self.comboBox.addItem(str(self.obj.variables[j]['name']))
+ self.comboBox.setCurrentText(self.obj.variables[self.obj.mode]['name'])
+ self.mode_selection()
+ else:
+ self.comboBox.setDisabled(True)
+ self.input_dict= {}
+ self.input_dict = self.obj.param_getter()
+ self.input_params_list()
+
+ def mode_selection(self):
+ self.input_dict= {}
+ for i in reversed(range(self.formLayout.count())):
+ self.formLayout.removeRow(i)
+ print(self.comboBox.currentText())
+ for i in self.obj.variables:
+ if self.obj.variables[i]['name'] == self.comboBox.currentText():
+ currentText = i
+ break
+ self.input_dict = self.obj.param_getter(currentText)
+ print('mode selection ', self.input_dict)
+ self.input_params_list()
+
+ def input_params_list(self):
+ try:
+ print("input_params_list ", self.input_dict)
+ for c,i in enumerate(self.input_dict):
+ #print(i)
+ if i == None:
+ continue
+ l = QLineEdit(str(self.obj.variables[i]['value']))
+ lay = QGridLayout()
+ lay.addWidget(QLabel(self.obj.variables[i]['name']+":"),0,0, alignment=Qt.AlignLeft)
+ lay.addWidget(l,0,1, alignment=Qt.AlignCenter)
+ lay.addWidget(QLabel(self.obj.variables[i]['unit']),0,2, alignment=Qt.AlignCenter)
+ self.formLayout.addRow(lay)
+ self.input_dict[i] = l
+ except Exception as e:
+ print(e)
+
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def param(self):
+ try:
+ self.dict = {}
+ #print("param.input_dict ", self.input_dict)
+ for i in self.input_dict:
+ if (self.input_dict[i] == None):
+ continue
+ else:
+ #print(self.input_dict[i], i, self.obj.type)
+ if (self.input_dict[i].text()):
+ self.dict[i] = self.input_dict[i].text()
+ else:
+ #print(self.input_dict[i].text())
+ self.show_error()
+ break
+
+ #print("param ", self.dict)
+ self.obj.param_setter(self.dict)
+ for i in self.container.graphics.graphicsView.items():
+ try:
+ if(i.name == self.name):
+ i.update_tooltip()
+ except:
+ pass
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+
+ except Exception as e:
+ print(e)
+
+ @staticmethod
+ def show_result(lst):
+ for i in lst:
+ try:
+ i.results_category(i.name)
+ except AttributeError:
+ pass
+
+ def clear_results(self):
+ self.tableWidget.setRowCount(0)
+
+ # result data tab
+ def results_category(self,name):
+ flag = True
+ try:
+ #print("Under result category name ", name)
+ result=self.container.result
+ obj = self.container.fetch_object(name)
+ self.tableWidget.setRowCount(0)
+ variKeys = list(obj.variables.keys())
+ #print(variKeys)
+ for i, val in enumerate(variKeys):
+ propertyname = name + '.' + val
+ #print(i,val, propertyname)
+ if propertyname in result[0]:
+ ind = result[0].index(propertyname)
+ resultval = str(result[-1][ind])
+ #print("######Resultsfetch####",val,resultval)
+ rowPosition = self.tableWidget.rowCount()
+ self.tableWidget.insertRow(rowPosition)
+ self.tableWidget.setItem(rowPosition , 0, QTableWidgetItem(obj.variables[val]['name']))
+ self.tableWidget.setItem(rowPosition , 1, QTableWidgetItem(resultval))
+ self.tableWidget.setItem(rowPosition , 2, QTableWidgetItem(obj.variables[val]['unit']))
+ self.tableWidget.resizeColumnsToContents()
+
+ # Updating result in class
+ obj.variables[val]['value'] = resultval
+ # try:
+ # if obj.type == "Heater":
+ # print(obj.variables[val]['name'] + str(obj.variables[val]['value']))
+ # except Exception as e:
+ # print(e)
+
+
+
+ except Exception as e:
+ print(e)
+
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/DockWidgetCompoundSeparator.py b/src/main/python/DockWidgets/DockWidgetCompoundSeparator.py
new file mode 100644
index 0000000..4ae503a
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidgetCompoundSeparator.py
@@ -0,0 +1,133 @@
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+import pandas as pd
+from functools import partial
+from python.utils.ComponentSelector import *
+from collections import defaultdict
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidgetCompoundSeparator.ui')
+
+class DockWidgetCompoundSeparator(QDockWidget,ui_dialog):
+
+ def __init__(self,name,comptype,obj,container,parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = []
+ self.lst = []
+ self.input_params_list()
+ self.dict = []
+
+ def input_params_list(self):
+ try:
+ if self.type == 'CompoundSeparator':
+ self.lst.clear()
+ self.calculationGroupBox = QGroupBox('Calculation Parameters')
+ self.calculationLayout = QGridLayout()
+
+ r1 = QRadioButton('Stream 1')
+ r1.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
+ r2 = QRadioButton('Stream 2')
+ r2.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
+ if self.obj.variables['SepStrm']['value'] == 1:
+ r1.setChecked(True)
+ r2.setChecked(False)
+ else:
+ r1.setChecked(False)
+ r2.setChecked(True)
+
+
+ self.lst = [r1, r2]
+ self.calculationLayout.addWidget(r1, 0, 1)
+ self.calculationLayout.addWidget(r2, 0, 2)
+
+ for k,val in enumerate(self.obj.compounds):
+ combo = QComboBox()
+ #print("CompoundSeparator combo")
+ for j in self.obj.SepFact_modes:
+ combo.addItem(str(j))
+ #print(self.obj.variables['SepFact_c']['value'][k])
+ combo.setCurrentText(self.obj.variables['SepFact_c']['value'][k])
+ combo.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
+ l = QLineEdit(str(self.obj.variables['SepVal_c']['value'][k]))
+ l.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
+ self.calculationLayout.addWidget(QLabel(val+" :"), k+1,0, alignment=Qt.AlignLeft)
+ self.calculationLayout.addWidget(combo, k+1, 1, alignment=Qt.AlignCenter)
+ self.calculationLayout.addWidget(l,k+1,2, alignment=Qt.AlignCenter)
+ self.lst.append(combo)
+ self.lst.append(l)
+
+ self.calculationLayout.setColumnStretch(3, len(self.obj.compounds)+1)
+ self.calculationGroupBox.setLayout(self.calculationLayout)
+
+ btn = QPushButton('Submit')
+ btn.clicked.connect(self.param)
+
+ self.gridLayout.setVerticalSpacing(5)
+ self.gridLayout.addWidget(self.calculationGroupBox,0,0)
+ self.gridLayout.addWidget(btn,1,0)
+
+ self.input_dict = self.lst
+
+ except Exception as e:
+ print(e)
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def update_compounds(self):
+ try:
+ self.obj.init_variables()
+ t_item = self.calculationGroupBox.layout().itemAt(0)
+ self.calculationGroupBox.layout().removeItem(t_item)
+ while(t_item):
+ t_widget = t_item.widget()
+ if(t_widget):
+ t_widget.setHidden(True)
+ self.calculationGroupBox.layout().removeWidget(t_widget)
+ t_item = self.calculationGroupBox.layout().itemAt(0)
+ self.input_params_list()
+ except Exception as e:
+ print(e)
+
+
+ def param(self):
+ try:
+ self.dict=[]
+
+ self.dict = [self.input_dict[0].isChecked(), self.input_dict[1].isChecked()]
+ j = 2
+ for i in range(len(self.obj.compounds)):
+ self.dict.append(self.input_dict[j+i].currentText())
+ if(self.input_dict[j+i+1].text()):
+ self.dict.append(self.input_dict[j+i+1].text())
+ j += 1
+ else:
+ self.show_error()
+
+
+ self.obj.param_setter(self.dict)
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+
+ except Exception as e:
+ print(e)
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/DockWidgetCompressorExpander.py b/src/main/python/DockWidgets/DockWidgetCompressorExpander.py
new file mode 100644
index 0000000..2439801
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidgetCompressorExpander.py
@@ -0,0 +1,166 @@
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+from python.utils.ComponentSelector import *
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidgetCompressorExpander.ui')
+
+class DockWidgetCompressorExpander(QDockWidget,ui_dialog):
+
+ def __init__(self,name,comptype,obj,container, parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = {}
+ self.x_pclist = []
+ self.modes()
+ self.comboBox.currentIndexChanged.connect(self.mode_selection)
+
+ self.pushButton_2.clicked.connect(self.param)
+ self.dict = {}
+
+ self.name_type = None
+ self.container = container
+
+ # input data tab
+ def modes(self):
+ modes_list = self.obj.modes_list
+ if(modes_list):
+ for j in modes_list:
+ self.comboBox.addItem(str(self.obj.variables[j]['name']))
+ self.mode_selection()
+ else:
+ self.comboBox.setDisabled(True)
+ self.input_dict= {}
+ self.input_dict = self.obj.param_getter()
+ self.input_params_list()
+
+ def mode_selection(self):
+ self.input_dict= {}
+ for i in reversed(range(self.formLayout.count())):
+ self.formLayout.removeRow(i)
+ print(self.comboBox.currentText())
+ for i in self.obj.variables:
+ if self.obj.variables[i]['name'] == self.comboBox.currentText():
+ currentText = i
+ break
+ self.input_dict = self.obj.param_getter(currentText)
+ print('mode selection ', self.input_dict)
+ self.input_params_list()
+
+ def input_params_list(self):
+ try:
+ print("input_params_list ", self.input_dict)
+ for c,i in enumerate(self.input_dict):
+ if i == None:
+ continue
+
+ l = QLineEdit()
+ if self.input_dict[i] != None:
+ l.setText(str(self.input_dict[i]))
+ lay = QGridLayout()
+ lay.addWidget(QLabel(self.obj.variables[i]['name']+":"),0,0, alignment=Qt.AlignLeft)
+ lay.addWidget(l,0,1, alignment=Qt.AlignCenter)
+ lay.addWidget(QLabel(self.obj.variables[i]['unit']),0,2, alignment=Qt.AlignCenter)
+
+ self.formLayout.addRow(lay)
+ self.input_dict[i] = l
+
+ self.lines = [line.rstrip('\n') for line in open('thermopackage.txt')]
+ for j in self.lines:
+ self.cbTP.addItem(str(j))
+ self.input_dict['Thermo Package'] = self.cbTP
+
+ except Exception as e:
+ print(e)
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def param(self):
+ try:
+ self.dict={}
+ for i in self.input_dict:
+ if (self.input_dict[i] == None):
+ continue
+ elif (i == "Thermo Package"):
+ self.dict[i] = self.input_dict[i].currentText()
+ else:
+ print(self.input_dict[i], i, self.obj.type)
+ if (self.input_dict[i].text()):
+ self.dict[i] = self.input_dict[i].text()
+ else:
+ print(self.input_dict[i].text())
+ self.show_error()
+ break
+
+ self.obj.param_setter(self.dict)
+
+ for i in self.container.graphics.graphicsView.items():
+ try:
+ if(i.name == self.name):
+ i.update_tooltip()
+ except:
+ pass
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+
+ except Exception as e:
+ print(e)
+
+ @staticmethod
+ def show_result(lst):
+ for i in lst:
+ try:
+ i.results_category(i.name)
+ except AttributeError:
+ pass
+
+ def clear_results(self):
+ self.tableWidget.setRowCount(0)
+
+ # result data tab
+ def results_category(self,name):
+ flag = True
+ try:
+ print("Under result category name ", name)
+ result=self.container.result
+ obj = self.container.fetch_object(name)
+ self.tableWidget.setRowCount(0)
+ variKeys = list(obj.variables.keys())
+ print(variKeys)
+ for i, val in enumerate(variKeys):
+ propertyname = name + '.' + val
+ print(i,val, propertyname)
+ if propertyname in result[0]:
+ ind = result[0].index(propertyname)
+ resultval = str(result[-1][ind])
+ print("######Resultsfetch####",val,resultval)
+ rowPosition = self.tableWidget.rowCount()
+ self.tableWidget.insertRow(rowPosition)
+ self.tableWidget.setItem(rowPosition , 0, QTableWidgetItem(obj.variables[val]['name']))
+ self.tableWidget.setItem(rowPosition , 1, QTableWidgetItem(resultval))
+ self.tableWidget.setItem(rowPosition , 2, QTableWidgetItem(obj.variables[val]['unit']))
+ self.tableWidget.resizeColumnsToContents()
+
+ except Exception as e:
+ print(e)
+
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/DockWidgetDistillationColumn.py b/src/main/python/DockWidgets/DockWidgetDistillationColumn.py
new file mode 100644
index 0000000..cf71f58
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidgetDistillationColumn.py
@@ -0,0 +1,375 @@
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+import pandas as pd
+from functools import partial
+from collections import defaultdict
+
+from python.utils.ComponentSelector import *
+from python.DockWidgets.DistillationColumnStagewiseResults import DistillationColumnStagewiseResults
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidgetDistillationColumn.ui')
+
+
+class DockWidgetDistillationColumn(QDockWidget, ui_dialog):
+
+ def __init__(self,name,comptype,obj,container,parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = []
+ self.pushButton_2.clicked.connect(self.param)
+ self.dict = []
+ self.input_params_list()
+ self.name_type = None
+ self.container = container
+ self.stage_res_table = DistillationColumnStagewiseResults()
+ self.stageResultsButton.clicked.connect(self.showStagewiseResults)
+
+ # input data tab
+
+ def input_params_list(self):
+ try:
+ print("input_params_list ", self.input_dict)
+
+ # tab 1
+
+ l1 = QLineEdit()
+ l1.setText(str(self.obj.variables['Nt']['value']))
+ self.lay1.addWidget(QLabel(self.obj.variables['Nt']['name'] + " :"), 0 ,0, alignment=Qt.AlignLeft)
+ self.lay1.addWidget(l1,0,1, alignment=Qt.AlignLeft)
+ self.input_dict.append(l1)
+
+ for i in range(self.obj.variables['Ni']['value']):
+ print(i)
+ l = QLineEdit()
+ if len(self.obj.variables['InT_s']['value']) is not 0:
+ l.setText(str(self.obj.variables['InT_s']['value'][i]))
+ self.lay1.addWidget(QLabel(self.obj.variables['InT_s']['name'] +" " + str(i+1) + " location :"),2*(i+1),0, alignment=Qt.AlignLeft)
+ self.lay1.addWidget(l,2*(i+1),1, alignment=Qt.AlignLeft)
+ self.input_dict.append(l)
+
+ # tab 2
+ self.l4.setText(self.obj.variables['Ctype']['name']+":")
+
+ self.u1.setText(self.obj.variables['Ctype']['unit'])
+ self.l5.setText(self.obj.variables['Pcond']['name']+":")
+ self.le5.setText(str(self.obj.variables['Pcond']['value']))
+ self.u2.setText(self.obj.variables['Pcond']['unit'])
+ self.l6.setText(self.obj.variables['C_Spec']['name']+":")
+ self.le6.setText(str(self.obj.variables['C_Spec']['value']))
+ self.l7.setText("Compounds :")
+
+ self.cb5.addItem("Total")
+ self.cb5.addItem("Partial")
+ self.cb5.setCurrentText(self.obj.variables['Ctype']['value'])
+ for j in self.obj.Cspec_list:
+ self.cb1.addItem(str(j))
+ self.cb1.setCurrentText(self.obj.variables['C_Spec']['type'])
+ for j in self.obj.compounds:
+ self.cb2.addItem(str(j))
+ self.cb2.setCurrentText(self.obj.variables['C_Spec']['comp'])
+
+ self.cb2.setDisabled(True)
+ self.cb1.currentIndexChanged.connect(self.fun2)
+
+ self.input_dict.append(self.cb5)
+ self.input_dict.append(self.le5)
+ self.input_dict.append(self.cb1)
+ self.input_dict.append(self.cb2)
+ self.input_dict.append(self.le6)
+
+ # tab3
+ self.l8.setText(self.obj.variables['Preb']['name']+":")
+ self.le7.setText(str(self.obj.variables['Preb']['value']))
+ self.u3.setText(self.obj.variables['Preb']['unit'])
+ self.l9.setText(self.obj.variables['R_Spec']['name']+":")
+ self.le8.setText(str(self.obj.variables['R_Spec']['value']))
+ self.l10.setText('Compounds')
+
+ for j in self.obj.Rspec_list:
+ self.cb3.addItem(str(j))
+ self.cb3.setCurrentText(self.obj.variables['R_Spec']['type'])
+ for j in self.obj.compounds:
+ self.cb4.addItem(str(j))
+ self.cb4.setCurrentText(self.obj.variables['R_Spec']['comp'])
+ self.cb4.setDisabled(True)
+ self.cb3.currentIndexChanged.connect(self.fun3)
+
+ self.input_dict.append(self.le7)
+ self.input_dict.append(self.cb3)
+ self.input_dict.append(self.cb4)
+ self.input_dict.append(self.le8)
+
+ self.lines = [line.rstrip('\n') for line in open('thermopackage.txt')]
+ for j in self.lines:
+ self.cbTP.addItem(str(j))
+ self.cbTP.setCurrentText(self.obj.variables['thermo_package']['value'])
+
+ self.input_dict.append(self.cbTP)
+
+ # self.input_dict = [self.le1, self.le2, self.le3, self.cb5, self.le5, self.cb1, self.cb2, self.le6, self.le7, self.cb3, self.cb4, self.le8]
+
+ except Exception as e:
+ print(e)
+
+ def update_compounds(self):
+ self.cb2.clear()
+ self.cb4.clear()
+ for j in self.obj.compounds:
+ self.cb2.addItem(str(j))
+ self.cb2.setCurrentText(self.obj.variables['C_Spec']['comp'])
+ for j in self.obj.compounds:
+ self.cb4.addItem(str(j))
+ self.cb4.setCurrentText(self.obj.variables['R_Spec']['comp'])
+
+ def fun2(self):
+ if self.cb1.currentText() == 'Compound Molar Fraction' or self.cb1.currentText() == 'Compound Molar Flow (mol/s)':
+ self.cb2.setDisabled(False)
+ else:
+ self.cb2.setDisabled(True)
+
+ def fun3(self):
+ if self.cb3.currentText() == 'Compound Molar Fraction' or self.cb3.currentText() == 'Compound Molar Flow (mol/s)':
+ self.cb4.setDisabled(False)
+ else:
+ self.cb4.setDisabled(True)
+
+ def Show_Error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def param(self):
+ try:
+ self.dict= []
+ temp = 0
+ print("param.input_dict ", self.input_dict)
+ self.dict.append(int(self.input_dict[0].text()))
+
+ for i in range(self.obj.variables['Ni']['value']):
+ self.dict.append(int(self.input_dict[i+1].text()))
+ temp = i + 1
+ print(temp)
+
+ print(temp)
+ print(self.input_dict[temp+1])
+ self.dict.append(self.input_dict[temp+1].currentText())
+ print(temp+1)
+ self.dict.append(int(self.input_dict[temp+2].text()))
+ print(temp+2)
+ self.dict.append(self.input_dict[temp+3].currentText())
+ print(temp+3)
+ self.dict.append(self.input_dict[temp+4].currentText())
+ print(temp+4)
+ self.dict.append(int(self.input_dict[temp+5].text()))
+ print(temp+5)
+ self.dict.append(int(self.input_dict[temp+6].text()))
+ print(temp+6)
+ self.dict.append(self.input_dict[temp+7].currentText())
+ print(temp+7)
+ self.dict.append(self.input_dict[temp+8].currentText())
+ print(temp+8)
+ self.dict.append(int(self.input_dict[temp+9].text()))
+ print(temp+9)
+ self.dict.append(self.input_dict[temp+10].currentText())
+ print(temp + 10)
+
+ print("param ", self.dict)
+ self.obj.param_setter(self.dict)
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+
+ except Exception as e:
+ print(e)
+
+ def showStagewiseResults(self):
+ self.stage_res_table.show()
+
+ @staticmethod
+ def showResult(lst):
+ # DockWidget1.flag = True
+ for i in lst:
+ try:
+ i.results_category(i.name)
+ except AttributeError:
+ pass
+
+ def clear_results(self):
+ self.tableWidget.setRowCount(0)
+ self.stage_res_table.T_table.setRowCount(0)
+ self.stage_res_table.T_table.setColumnCount(0)
+ self.stage_res_table.x_pc_table.setRowCount(0)
+ self.stage_res_table.x_pc_table.setColumnCount(0)
+
+ # result data tab
+ def results_category(self,name):
+ flag = True
+ try:
+ print("Under result category name ", name)
+ result = self.container.result
+ obj = self.container.fetch_object(name)
+ self.tableWidget.setRowCount(0)
+ variKeys = obj.result_parameters
+ print(variKeys)
+ for i, val in enumerate(variKeys):
+ propertyname = name + '.' + val
+ print(i, val, propertyname)
+ if propertyname in result[0]:
+ ind = result[0].index(propertyname)
+ resultval = str(result[-1][ind])
+ obj.variables[val]['value'] = result[-1][ind]
+ print("######Resultsfetch####", val, resultval)
+ rowPosition = self.tableWidget.rowCount()
+ self.tableWidget.insertRow(rowPosition)
+ self.tableWidget.setItem(rowPosition, 0, QTableWidgetItem(obj.variables[val]['name']))
+ self.tableWidget.setItem(rowPosition, 1, QTableWidgetItem(resultval))
+ self.tableWidget.setItem(rowPosition, 2, QTableWidgetItem(obj.variables[val]['unit']))
+ self.tableWidget.resizeColumnsToContents()
+
+ # Stagewise Results
+ Nt = self.obj.variables['Nt']['value']
+ Nc = len(self.obj.compounds)
+ # initializing temporary arrays
+ Stages_T = [None for i in range(Nt)]
+ # Can be uncommented when F_p and F_pc implemented in modelica table
+ # Stages_F_p = [[None for i in range(3)] for j in range(Nt)]
+ # Stages_F_pc = [[[None for i in range(3)] for j in range(Nc)] for k in range(Nt)]
+ Stages_x_pc = [[[None for i in range(3)] for j in range(Nc)] for k in range(Nt)]
+
+ Stages_res_varikeys = ['T']
+ for i in range(Nc):
+ for j in range(3):
+ Stages_res_varikeys.append('x_pc[' + str(j + 1) + ',' + str(i + 1) + ']')
+
+ for v in Stages_res_varikeys:
+ propertyname = name + '.condenser.' + v
+ if propertyname in result[0]:
+ ind = result[0].index(propertyname)
+ if v == 'T':
+ Stages_T[0] = result[-1][ind]
+ # Can be uncommented when F_p is implemented in modelica model
+ # elif v == 'F_p':
+ # if result[0][ind][result[0][ind].index('[') + 1] == '1':
+ # Stages_F_p[0][0] = result[-1][ind]
+ # elif result[0][ind][result[0][ind].index('[') + 1] == '2':
+ # Stages_F_p[0][1] = result[-1][ind]
+ # else:
+ # Stages_F_p[0][2] = result[-1][ind]
+ else:
+ print(ind)
+ phase_no = int(result[0][ind][result[0][ind].index('[') + 1])
+ comp_no = int(result[0][ind][result[0][ind].index(']') - 1])
+ Stages_x_pc[0][comp_no - 1][phase_no - 1] = result[-1][ind]
+ # Can be uncommented and improved when F_pc implemented in modelica model
+ # if v == 'F_pc':
+ # Stages_F_pc[0][comp_no - 1][phase_no - 1] = result[-1][ind]
+ # else:
+ # Stages_x_pc[0][comp_no - 1][phase_no - 1] = result[-1][ind]
+
+ for i in range(1, Nt - 1):
+ propertyname = name + '.tray[' + str(i) + '].' + v
+ if propertyname in result[0]:
+ ind = result[0].index(propertyname)
+ if v == 'T':
+ Stages_T[i] = result[-1][ind]
+ # Can be uncommented when F_p implemented in modelica model
+ # elif v == 'F_p':
+ # if result[0][ind][result[0][ind].index('[') + 1] == '1':
+ # Stages_F_p[i][0] = result[-1][ind]
+ # elif result[0][ind][result[0][ind].index('[') + 1] == '2':
+ # Stages_F_p[i][1] = result[-1][ind]
+ # else:
+ # Stages_F_p[i][2] = result[-1][ind]
+ else:
+ print(ind)
+ print(result[0][ind])
+ phase_no = int(result[0][ind].split('.')[-1][result[0][ind].split('.')[-1].index('[') + 1])
+ comp_no = int(result[0][ind].split('.')[-1][result[0][ind].split('.')[-1].index(']') - 1])
+ Stages_x_pc[i][comp_no - 1][phase_no - 1] = result[-1][ind]
+ # Can be uncommented when F_pc implemented in modelica model
+ # if v == 'F_pc':
+ # Stages_F_pc[i][comp_no - 1][phase_no - 1] = result[-1][ind]
+ # else:
+ # Stages_x_pc[i][comp_no - 1][phase_no - 1] = result[-1][ind]
+
+ propertyname = name + '.reboiler.' + v
+ if propertyname in result[0]:
+ ind = result[0].index(propertyname)
+ if v == 'T':
+ Stages_T[-1] = result[-1][ind]
+ # Can be uncommented when F_p implemented in modelica model
+ # elif v == 'F_p':
+ # if result[0][ind][result[0][ind].index('[') + 1] == '1':
+ # Stages_F_p[-1][0] = result[-1][ind]
+ # elif result[0][ind][result[0][ind].index('[') + 1] == '2':
+ # Stages_F_p[-1][1] = result[-1][ind]
+ # else:
+ # Stages_F_p[-1][2] = result[-1][ind]
+ else:
+ print(ind)
+ phase_no = int(result[0][ind][result[0][ind].index('[') + 1])
+ comp_no = int(result[0][ind][result[0][ind].index(']') - 1])
+ Stages_x_pc[-1][comp_no - 1][phase_no - 1] = result[-1][ind]
+ # Can be uncommented when F_pc implemented in modelica model
+ # if v == 'F_pc':
+ # Stages_F_pc[-1][comp_no - 1][phase_no - 1] = result[-1][ind]
+ # else:
+ # Stages_x_pc[-1][comp_no - 1][phase_no - 1] = result[-1][ind]
+
+ # Assigning temp variables to obj variabes
+ self.obj.variables['Stages.T']['value'] = Stages_T
+ # Can be uncommented when F_p and F_pc implemented in modelica model
+ # self.obj.variables['Stages.F_p']['value'] = Stages_F_p
+ # self.obj.variables['Stages.F_pc']['value'] = Stages_F_pc
+ self.obj.variables['Stages.x_pc']['value'] = Stages_x_pc
+
+ # filling stagewise result table
+ tables = [self.stage_res_table.T_table, self.stage_res_table.x_pc_table]
+ # Can be uncommented when F_p and F_pc implemented in modelica model
+ # tables = [self.stage_res_table.T_table, self.stage_res_table.F_p_table, self.stage_res_table.F_pc_table, self.stage_res_table.x_pc_table]
+
+ for t in tables:
+ t.setRowCount(Nt)
+ t.setVerticalHeaderItem(0, QTableWidgetItem('Condenser'))
+ t.setVerticalHeaderItem(Nt -1, QTableWidgetItem('Reboiler'))
+ for i in range(1, Nt - 1):
+ t.setVerticalHeaderItem(i, QTableWidgetItem('Stage ' + str(i)))
+
+ T_table = self.stage_res_table.T_table
+ T_table.setColumnCount(1)
+ for i in range(Nt):
+ T_table.setItem(i, 0, QTableWidgetItem(Stages_T[i]))
+
+ x_pc_table = self.stage_res_table.x_pc_table
+ x_pc_table.setColumnCount(2*Nc)
+ for i in range(Nc):
+ x_pc_table.setHorizontalHeaderItem(2*i, QTableWidgetItem(self.obj.compounds[i] + '(Vapor)'))
+ x_pc_table.setHorizontalHeaderItem(2*i + 1, QTableWidgetItem(self.obj.compounds[i] + '(Liquid)'))
+
+ for i in range(Nt):
+ for j in range(Nc):
+ x_pc_table.setItem(i, 2*j, QTableWidgetItem(Stages_x_pc[i][j][1]))
+ x_pc_table.setItem(i, 2 * j + 1, QTableWidgetItem(Stages_x_pc[i][j][2]))
+
+ for t in tables:
+ t.resizeColumnsToContents()
+ except Exception as e:
+ print(e)
+
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/DockWidgetFlash.py b/src/main/python/DockWidgets/DockWidgetFlash.py
new file mode 100644
index 0000000..7742e62
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidgetFlash.py
@@ -0,0 +1,86 @@
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+from python.utils.ComponentSelector import *
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidgetFlash.ui')
+
+class DockWidgetFlash(QDockWidget,ui_dialog):
+
+ def __init__(self,name,comptype,obj,container,parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = []
+ self.input_params_list()
+ self.btn.clicked.connect(self.param)
+ self.dict = [] # a list
+
+ def input_params_list(self):
+ try:
+ self.l1.setText(self.obj.variables['thermo_package']['name']+":")
+ self.lines = [line.rstrip('\n') for line in open('thermopackage.txt')]
+ for j in self.lines:
+ self.cb1.addItem(str(j))
+ self.cb1.setCurrentText(self.obj.variables['thermo_package']['value'])
+
+ self.check1.setText(self.obj.variables['Tdef']['name']+":")
+ self.le2.setText(str(self.obj.variables['Tdef']['value']))
+ self.u2.setText(self.obj.variables['Tdef']['unit'])
+ self.check1.toggled.connect(self.fun)
+ self.check1.setChecked(self.obj.variables['BTdef']['value'])
+ self.check2.setText(self.obj.variables['Pdef']['name']+":")
+ self.le3.setText(str(self.obj.variables['Pdef']['value']))
+ self.u3.setText(self.obj.variables['Pdef']['unit'])
+ self.check2.toggled.connect(self.fun)
+ self.check2.setChecked(self.obj.variables['BPdef']['value'])
+
+ self.input_dict = [self.cb1, self.check1, self.le2, self.check2, self.le3]
+
+ except Exception as e:
+ print(e)
+
+ def fun(self):
+ if self.check1.isChecked():
+ self.le2.setDisabled(False)
+ else:
+ self.le2.setDisabled(True)
+ if self.check2.isChecked():
+ self.le3.setDisabled(False)
+ else:
+ self.le3.setDisabled(True)
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def param(self):
+ try:
+ self.dict = []
+ print("param.input_dict ", self.input_dict)
+ self.dict = [self.input_dict[0].currentText(),self.input_dict[1].isChecked(), float(self.input_dict[2].text()), self.input_dict[3].isChecked(), float(self.input_dict[4].text())]
+ print("param ", self.dict)
+ self.obj.param_setter(self.dict)
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+
+ except Exception as e:
+ print(e)
+
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/DockWidgetMaterialStream.py b/src/main/python/DockWidgets/DockWidgetMaterialStream.py
new file mode 100644
index 0000000..5bc77c5
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidgetMaterialStream.py
@@ -0,0 +1,373 @@
+import os, sys
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+from python.utils.ComponentSelector import *
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidgetMaterialStream.ui')
+
+class DockWidgetMaterialStream(QDockWidget,ui_dialog):
+
+ def __init__(self,name,comptype,obj,container,parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = {}
+ self.x_pclist = []
+
+ self.comboBox.currentIndexChanged.connect(self.mode_selection)
+
+ self.pushButton_2.clicked.connect(self.param)
+ self.dict = {} # a dictionary
+
+ self.name_type = None
+ self.container = container
+
+ header = QTreeWidgetItem(['Compound','Value','Unit'])
+ self.mTreeWidget.setHeaderItem(header)
+ self.lTreeWidget.setHeaderItem(header)
+ self.vTreeWidget.setHeaderItem(header)
+ lines = [line.rstrip('\n') for line in open(parentPath+'/python/utils/thermopackage.txt')]
+ for j in lines:
+ self.cbTP.addItem(str(j))
+ self.modes()
+
+ # input data tab
+ def modes(self):
+ modes_list = self.obj.modes_list
+ if(modes_list):
+ for j in modes_list:
+ self.comboBox.addItem(str(j))
+ self.comboBox.setCurrentText(self.obj.mode)
+ self.mode_selection()
+ else:
+ self.input_dict= {}
+ self.input_dict = self.obj.param_getter()
+ self.input_params_list()
+
+ def mode_selection(self):
+ self.input_dict= {}
+ try: # removing existing rows while changing modes
+ for i in reversed(range(self.formLayout.count())):
+ self.formLayout.removeRow(i)
+ except Exception as e:
+ print(e)
+ self.input_dict = self.obj.param_getter(self.comboBox.currentText())
+ self.obj.mode = self.comboBox.currentText()
+ self.input_params_list()
+
+ def input_params_list(self):
+ try:
+ for c,i in enumerate(self.input_dict):
+ if(i=="x_pc"):
+ noc = len(compound_selected)
+ #print(noc)
+ self.x_pclist.clear()
+
+ self.comp_gb = QGroupBox("Mole Fractions")
+ lay = QGridLayout()
+ for j in range(noc):
+ try:
+ l = QLineEdit(str(self.obj.variables['x_pc']['value'][j]))
+ except:
+ l = QLineEdit()
+ # if self.input_dict[i] != '':
+ # l.setText(str(self.obj.variables['x_pc']['value'][j]))
+ # print('l = ', str(self.obj.variables['x_pc']['value'][j]))
+
+ self.input_dict[i] = "x_pc"
+ lay.addWidget(QLabel(str(compound_selected[j])+":"),j,0, alignment= Qt.AlignLeft)
+ lay.addWidget(l,j,1, alignment=Qt.AlignCenter)
+ self.x_pclist.append(l)
+ lay.setSizeConstraint(QLayout.SetFixedSize)
+ self.comp_gb.setLayout(lay)
+ self.formLayout.addRow(self.comp_gb)
+ elif i == "Thermo Package":
+ self.cbTP.setCurrentText(self.input_dict[i])
+ else:
+ #print("elseloop")
+ l = QLineEdit()
+ if self.input_dict[i] != None:
+ l.setText(str(self.input_dict[i]))
+
+ lay = QGridLayout()
+ if i !='MolFlow':
+ lay.addWidget(QLabel(self.obj.variables[i]['name']+":"),0,0, alignment=Qt.AlignLeft)
+ else:
+ lay.addWidget(QLabel(i+":"),0,0, alignment=Qt.AlignLeft)
+ lay.addWidget(l,0,1, alignment=Qt.AlignCenter)
+ if(i != 'MolFlow'):
+ lay.addWidget(QLabel(self.obj.variables[i]['unit']),0,2, alignment=Qt.AlignCenter)
+ else:
+ lay.addWidget(QLabel("mol/s"),0,2, alignment=Qt.AlignCenter)
+ self.formLayout.addRow(lay)
+ self.input_dict[i] = l
+
+
+ except Exception as e:
+ print(e)
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def update_compounds(self):
+ try:
+ noc = len(compound_selected)
+ #print(noc)
+ self.x_pclist.clear()
+
+ lay = QGridLayout()
+ for j in range(noc):
+ l = QLineEdit()
+ lay.addWidget(QLabel(str(compound_selected[j]) + ":"), j, 0, alignment=Qt.AlignLeft)
+ lay.addWidget(l, j, 1, alignment=Qt.AlignCenter)
+ self.x_pclist.append(l)
+ lay.setSizeConstraint(QLayout.SetFixedSize)
+ self.comp_gb.setLayout(lay)
+ indexx = self.comboBox.currentIndex()
+ self.comboBox.setCurrentIndex(1)
+ self.comboBox.setCurrentIndex(indexx)
+ self.obj.init_variables()
+ except Exception as e:
+ print(e)
+
+ def param(self):
+ try:
+ self.dict={}
+ #print("param.input_dict ", self.input_dict)
+ for i in self.input_dict:
+ #print(i)
+ if(i =="x_pc"):
+ l=[]
+ mf = []
+ total_moles = 0
+ for mol_frac in self.x_pclist:
+ if (mol_frac.text()):
+ l.append(mol_frac.text())
+ total_moles += float(l[-1])
+ else:
+ self.show_error()
+ break
+ for c in range(len(compound_selected)):
+ mf.append(str(float(l[c])/total_moles))
+ self.obj.variables[compound_selected[c]]['value'] = str(float(l[c])/total_moles)
+ self.x_pclist[c].setText(mf[-1])
+ self.dict[i] = ",".join(mf)
+ elif (i == "Thermo Package"):
+ self.dict[i] = self.cbTP.currentText()
+ else:
+ if (self.input_dict[i].text()):
+ self.dict[i] = self.input_dict[i].text()
+ else:
+ #print(self.input_dict[i])
+ self.show_error()
+ break
+
+ #print("param ", self.dict)
+
+ self.obj.param_setter(self.dict)
+
+ for i in self.container.graphics.graphicsView.items():
+ try:
+ if(i.name == self.name):
+ i.update_tooltip()
+ except:
+ pass
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+
+ except Exception as e:
+ print(e)
+
+ def update_input_values(self):
+ self.init()
+
+ @staticmethod
+ def show_result(ms_lst):
+ for i in ms_lst:
+ i.results_category(i.name)
+
+ def clear_results(self):
+ self.mTreeWidget.clear()
+ self.mTableWidget.setRowCount(0)
+ self.lTreeWidget.clear()
+ self.lTableWidget.setRowCount(0)
+ self.vTreeWidget.clear()
+ self.vTableWidget.setRowCount(0)
+
+ # result data tab
+ def results_category(self,name):
+ try:
+ #print("Under result category name ", name)
+ result=self.container.result
+ obj = self.container.fetch_object(name)
+
+
+ d = {"Mole Fraction":"x_pc", "Mass Fraction":"xm_pc", "Mole Flow":"F_pc", "Mass Flow":"Fm_pc"}
+ ms_lst = list(d.keys())
+ klst = list(d.values())
+
+ p = {"Pressure":"P", "Temperature":"T","Vapour Phase Mole Fraction":"xvap", "Phase Molar Enthalpy":"H_p",
+ "Phase Molar Entropy":"S_p", "Molar Flow Rate":"F_p","Mass Flow Rate":"Fm_p"}
+
+ # Amounts Tab
+ if obj.type == 'MaterialStream':
+ ll = [] # list for basis names
+ for basis in d:
+ propertyname = name + '.' + d[basis]
+ #print("basis ", basis, propertyname)
+ for i in result[0]:
+ if (propertyname in i):
+ ll.append(i)
+ #print(ll)
+
+ j = 0
+ namee = 'none'
+ #print("namee ", namee)
+ #initialization for treewidgets
+ lroot = 1
+ mroot = 1
+ vroot = 1
+
+
+ for i,k in enumerate(ll):
+ ind = result[0].index(k)
+ #print("index ", ind)
+ #print("str ", k)
+ resultval = str(result[-1][ind])
+ #print("######Resultsfetch####",resultval)
+ #print(k[k.find(".")+1:k.find("[")])
+ obj.variables[k.split('.')[1]]['value'] = resultval
+
+ if namee not in k:
+ mroot = QTreeWidgetItem(self.mTreeWidget, [ms_lst[j]])
+ lroot = QTreeWidgetItem(self.lTreeWidget, [ms_lst[j]])
+ vroot = QTreeWidgetItem(self.vTreeWidget, [ms_lst[j]])
+ namee = klst[j]
+
+ phase_no = int(k[k.index(',') - 1]) # phase no is from modelica list
+ compound_no = int(k[k.index(',') + 1]) - 1 # compound is from python list
+
+ if phase_no == 1:
+ child = QTreeWidgetItem(mroot, [compound_selected[compound_no], str(round(float(resultval),4)),
+ obj.variables[k.split('.')[1]]['unit']])
+ elif phase_no == 2:
+ child = QTreeWidgetItem(lroot, [compound_selected[compound_no], str(round(float(resultval),4)),
+ obj.variables[k.split('.')[1]]['unit']])
+ elif phase_no == 3:
+ child = QTreeWidgetItem(vroot, [compound_selected[compound_no], str(round(float(resultval),4)),
+ obj.variables[k.split('.')[1]]['unit']])
+ if (compound_no + 1) == len(compound_selected):
+ j += 1
+
+
+
+ # Phase Properties Tab
+ phaseResLst = []
+ for phase in p:
+ propertyname = name + '.' + p[phase]
+ #print("phase ", phase, propertyname)
+ for i in result[0]:
+ if i.find('['):
+ if (propertyname == i[0:i.find('[')]):
+ phaseResLst.append(i)
+ if propertyname == i:
+ phaseResLst.append(i)
+ #print(phaseResLst)
+
+ self.mTableWidget.setRowCount(0)
+ self.lTableWidget.setRowCount(0)
+ self.vTableWidget.setRowCount(0)
+
+ for i,val in enumerate(phaseResLst):
+ ind = result[0].index(val)
+ resultval = str(result[-1][ind])
+ #print(resultval, i, val)
+ obj.variables[val.split('.')[1]]['value'] = resultval
+ if '[' in val:
+ #print(val)
+ temp = val[val.find('.')+1:val.find('[')]
+ #print(temp)
+ if '1' in val.split('.')[1]:
+ #print(obj.variables[val.split('.')[1]]['name'])
+ mrowPosition = self.mTableWidget.rowCount()
+ self.mTableWidget.insertRow(mrowPosition)
+ self.mTableWidget.setItem(mrowPosition , 0, QTableWidgetItem(obj.variables[val.split('.')[1]]['name']))
+ self.mTableWidget.setItem(mrowPosition , 1, QTableWidgetItem(str(round(float(resultval),4))))
+ self.mTableWidget.setItem(mrowPosition , 2, QTableWidgetItem(obj.variables[val.split('.')[1]]['unit']))
+ self.mTableWidget.resizeColumnsToContents()
+
+ if '2' in val.split('.')[1]:
+ lrowPosition = self.lTableWidget.rowCount()
+ self.lTableWidget.insertRow(lrowPosition)
+ self.lTableWidget.setItem(lrowPosition , 0, QTableWidgetItem(obj.variables[val.split('.')[1]]['name']))
+ self.lTableWidget.setItem(lrowPosition , 1, QTableWidgetItem(str(round(float(resultval),4))))
+ self.lTableWidget.setItem(lrowPosition , 2, QTableWidgetItem(obj.variables[val.split('.')[1]]['unit']))
+ self.lTableWidget.resizeColumnsToContents()
+ if '3' in val.split('.')[1]:
+ vrowPosition = self.vTableWidget.rowCount()
+ self.vTableWidget.insertRow(vrowPosition)
+ self.vTableWidget.setItem(vrowPosition , 0, QTableWidgetItem(obj.variables[val.split('.')[1]]['name']))
+ self.vTableWidget.setItem(vrowPosition , 1, QTableWidgetItem(str(round(float(resultval),4))))
+ self.vTableWidget.setItem(vrowPosition , 2, QTableWidgetItem(obj.variables[val.split('.')[1]]['unit']))
+ self.vTableWidget.resizeColumnsToContents()
+ if not '[' in val:
+ #print(obj.variables[val.split('.')[1]]['name'])
+ mrowPosition = self.mTableWidget.rowCount()
+ self.mTableWidget.insertRow(mrowPosition)
+ self.mTableWidget.setItem(mrowPosition , 0, QTableWidgetItem(obj.variables[val.split('.')[1]]['name']))
+ self.mTableWidget.setItem(mrowPosition , 1, QTableWidgetItem(str(round(float(resultval),4))))
+ self.mTableWidget.setItem(mrowPosition , 2, QTableWidgetItem(obj.variables[val.split('.')[1]]['unit']))
+ self.mTableWidget.resizeColumnsToContents()
+
+
+ # updating the input data from fetched results from simulation
+ #print(self.comboBox.currentText())
+
+ self.input_dict = {}
+ self.input_dict = self.obj.param_getter(self.comboBox.currentText())
+ # print("before", self.input_dict)
+ #self.input_dict.pop("x_pc")
+ # temp = self.input_dict.pop('thermo_package')
+ for i in range(len(compound_selected)):
+ #print(i)
+ self.input_dict['x_pc[1,' + str(i+1) + ']'] = self.obj.variables['x_pc[1,' + str(i+1) +']']['value']
+ # self.input_dict['thermo_package'] = temp
+ # print("after", self.input_dict)
+
+ # changing index for updating the input data
+ indexx = self.comboBox.currentIndex()
+ self.comboBox.setCurrentIndex(1)
+ self.comboBox.setCurrentIndex(indexx)
+
+ try:
+
+ for i in self.parent().container.graphics.graphicsView.items():
+ try:
+ if i.obj == self.obj:
+ i.update_tooltip()
+ except Exception as e:
+ pass
+ except Exception as e:
+ print(e)
+
+
+ except Exception as e:
+ print(e)
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/DockWidgetMixer.py b/src/main/python/DockWidgets/DockWidgetMixer.py
new file mode 100644
index 0000000..5d89589
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidgetMixer.py
@@ -0,0 +1,66 @@
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+from python.utils.ComponentSelector import *
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidgetMixer.ui')
+
+class DockWidgetMixer(QDockWidget,ui_dialog):
+
+ def __init__(self,name,comptype,obj,container,parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = []
+ self.x_pclist = []
+ self.input_params_list()
+ self.btn.clicked.connect(self.param)
+ self.dict = {}
+
+ # input data tab
+ def input_params_list(self):
+ try:
+ self.l1.setText(self.obj.variables['NI']['name']+":")
+ self.le1.setText(str(self.obj.variables['NI']['value']))
+ self.u1.setText(self.obj.variables['NI']['unit'])
+ for i in self.obj.Pout_modes:
+ self.cb2.addItem(str(i))
+ self.cb2.setCurrentText(self.obj.variables['outPress']['value'])
+
+ self.l2.setText(self.obj.variables['outPress']['name']+":")
+ self.input_dict = [self.le1, self.cb2]
+
+ except Exception as e:
+ print(e)
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def param(self):
+ try:
+ self.dict={}
+ self.dict = [int(self.input_dict[0].text()), self.input_dict[1].currentText()]
+ self.obj.param_setter(self.dict)
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+
+ except Exception as e:
+ print(e)
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/DockWidgetShortcutColumn.py b/src/main/python/DockWidgets/DockWidgetShortcutColumn.py
new file mode 100644
index 0000000..43d01e9
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidgetShortcutColumn.py
@@ -0,0 +1,148 @@
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+from python.utils.ComponentSelector import *
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidgetShortcutColumn.ui')
+
+class DockWidgetShortcutColumn(QDockWidget,ui_dialog):
+
+ def __init__(self,name,comptype,obj,container,parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = []
+ self.input_params_list()
+ self.btn.clicked.connect(self.param)
+ self.dict = []
+
+ self.name_type = None
+ self.container = container
+
+ # input data tab
+ def input_params_list(self):
+ try:
+ self.l1.setText(self.obj.variables['HKey']['name']+":")
+ self.l2.setText(self.obj.variables['LKey']['name']+":")
+
+ print(self.obj.compounds)
+ for i in self.obj.compounds:
+ self.cb1.addItem(str(i))
+ self.cb2.addItem(str(i))
+ self.cb1.setCurrentText(self.obj.compounds[int(self.obj.variables['HKey']['value'])-1])
+ self.cb2.setCurrentText(self.obj.compounds[int(self.obj.variables['LKey']['value'])-1])
+
+ self.l3.setText(self.obj.variables['HKey_x_pc']['name']+":")
+ self.le3.setText(str(self.obj.variables['HKey_x_pc']['value']))
+ self.u3.setText(self.obj.variables['HKey_x_pc']['unit'])
+ self.l4.setText(self.obj.variables['LKey_x_pc']['name']+":")
+ self.u4.setText(self.obj.variables['LKey_x_pc']['unit'])
+ self.le4.setText(str(self.obj.variables['LKey_x_pc']['value']))
+
+ self.l5.setText(self.obj.variables['Ctype']['name']+":")
+ self.cb5.addItem('Total')
+ self.cb5.addItem('Partial')
+ self.cb5.setCurrentText(self.obj.variables['Ctype']['value'])
+
+ self.l6.setText(self.obj.variables['Pcond']['name']+":")
+ self.le6.setText(str(self.obj.variables['Pcond']['value']))
+ self.u6.setText(self.obj.variables['Pcond']['unit'])
+ self.l7.setText(self.obj.variables['Preb']['name']+":")
+ self.u7.setText(self.obj.variables['Preb']['unit'])
+ self.le7.setText(str(self.obj.variables['Preb']['value']))
+
+ self.l8.setText(self.obj.variables['RR']['name']+":")
+ self.le8.setText(str(self.obj.variables['RR']['value']))
+
+ self.l9.setText("Thermo Package :")
+
+ self.lines = [line.rstrip('\n') for line in open('thermopackage.txt')]
+ for j in self.lines:
+ self.cb6.addItem(str(j))
+ self.cb6.setCurrentText(self.obj.variables['thermo_package']['value'])
+
+ self.input_dict = [self.cb1, self.cb2, self.le3, self.le4, self.cb5, self.le6, self.le7, self.le8, self.cb6]
+
+ except Exception as e:
+ print(e)
+
+ def update_compounds(self):
+ self.cb1.clear()
+ self.cb2.clear()
+ for i in self.obj.compounds:
+ self.cb1.addItem(str(i))
+ self.cb2.addItem(str(i))
+ self.cb1.setCurrentText(self.obj.compounds[int(self.obj.variables['HKey']['value']) - 1])
+ self.cb2.setCurrentText(self.obj.compounds[int(self.obj.variables['LKey']['value']) - 1])
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def param(self):
+ try:
+ self.dict=[]
+ self.dict = [self.input_dict[0].currentText(),self.input_dict[1].currentText(),float(self.input_dict[2].text()), float(self.input_dict[3].text()),
+ self.input_dict[4].currentText(), float(self.input_dict[5].text()), float(self.input_dict[6].text()), float(self.input_dict[7].text()),
+ self.input_dict[8].currentText()]
+ self.obj.param_setter(self.dict)
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+
+ except Exception as e:
+ print(e)
+
+ @staticmethod
+ def show_result(lst):
+ for i in lst:
+ try:
+ i.results_category(i.name)
+ except AttributeError:
+ pass
+
+ def clear_results(self):
+ self.tableWidget.setRowCount(0)
+
+ # result data tab
+ def results_category(self,name):
+ flag = True
+ try:
+ print("Under result category name ", name)
+ result=self.container.result
+ obj = self.container.fetch_object(name)
+ self.tableWidget.setRowCount(0)
+ variKeys = obj.result_parameters
+ print(variKeys)
+ for i, val in enumerate(variKeys):
+ propertyname = name + '.' + val
+ print(i,val, propertyname)
+ if propertyname in result[0]:
+ ind = result[0].index(propertyname)
+ resultval = str(result[-1][ind])
+ obj.variables[val]['value']= result[-1][ind]
+ print("######Resultsfetch####",val,resultval)
+ rowPosition = self.tableWidget.rowCount()
+ self.tableWidget.insertRow(rowPosition)
+ self.tableWidget.setItem(rowPosition , 0, QTableWidgetItem(obj.variables[val]['name']))
+ self.tableWidget.setItem(rowPosition , 1, QTableWidgetItem(resultval))
+ self.tableWidget.setItem(rowPosition , 2, QTableWidgetItem(obj.variables[val]['unit']))
+ self.tableWidget.resizeColumnsToContents()
+ except Exception as e:
+ print(e)
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/DockWidgetSplitter.py b/src/main/python/DockWidgets/DockWidgetSplitter.py
new file mode 100644
index 0000000..296e111
--- /dev/null
+++ b/src/main/python/DockWidgets/DockWidgetSplitter.py
@@ -0,0 +1,85 @@
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+from python.utils.ComponentSelector import *
+from python.utils.Graphics import *
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/DockWidgets/DockWidgetSplitter.ui')
+
+class DockWidgetSplitter(QDockWidget,ui_dialog):
+
+ def __init__(self,name,comptype,obj,container,parent=None):
+ QDockWidget.__init__(self,parent)
+ self.setupUi(self)
+ self.setWindowTitle(obj.name)
+ self.name=name
+ self.obj=obj
+ self.type = comptype
+ self.input_dict = []
+ self.input_params_list()
+ self.btn.clicked.connect(self.param)
+ self.dict = {}
+
+ # input data tab
+ def input_params_list(self):
+ try:
+ self.l1.setText(self.obj.variables['No']['name']+":")
+ self.le1.setText(str(self.obj.variables['No']['value']))
+ self.u1.setText(self.obj.variables['No']['unit'])
+
+ self.l2.setText(self.obj.variables['CalcType']['name'] + ":")
+ for i in self.obj.CalcType_modes:
+ self.cb2.addItem(str(i))
+ self.cb2.setCurrentText(self.obj.variables['CalcType']['value'])
+
+ self.l3.setText("Stream 1 :")
+ self.le3.setText(str(self.obj.variables['SpecVal_s']['value'][0]))
+ self.u3.setText(self.obj.variables['SpecVal_s']['unit'])
+ self.l4.setText("Stream 2 :")
+ self.le4.setText(str(self.obj.variables['SpecVal_s']['value'][1]))
+ self.u4.setText(str(self.obj.variables['SpecVal_s']['unit']))
+ self.cb2.currentIndexChanged.connect(self.fun)
+
+ self.input_dict = [self.le1, self.cb2, self.le3, self.le4]
+
+ except Exception as e:
+ print(e)
+
+ def fun(self):
+ if self.cb2.currentText() == 'Molar_Flow':
+ self.u3.setText('mol/s')
+ self.u4.setText('mol/s')
+ elif self.cb2.currentText() == 'Mass_Flow':
+ self.u3.setText('g/s')
+ self.u4.setText('g/s')
+ else:
+ self.u3.setText('')
+ self.u4.setText('')
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Please fill all fields with data")
+
+ def param(self):
+ try:
+ self.dict={}
+ self.dict = [int(self.input_dict[0].text()),self.input_dict[1].currentText(), float(self.input_dict[2].text()), float(self.input_dict[3].text())]
+ self.obj.param_setter(self.dict)
+ if(self.isVisible()):
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
+ self.hide()
+ except Exception as e:
+ print(e)
+
+ def closeEvent(self,event):
+ scrollHVal = self.parent().container.graphics.graphicsView.horizontalScrollBarVal
+ currentVal = self.parent().container.graphics.graphicsView.horizontalScrollBar().value()
+ self.parent().container.graphics.graphicsView.horizontalScrollBar().setValue(currentVal-189)
\ No newline at end of file
diff --git a/src/main/python/DockWidgets/__init__.py b/src/main/python/DockWidgets/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/main/python/DockWidgets/__pycache__/DistillationColumnStagewiseResults.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DistillationColumnStagewiseResults.cpython-36.pyc
new file mode 100644
index 0000000..0b8a850
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DistillationColumnStagewiseResults.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DistillationColumnStagewiseResults.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DistillationColumnStagewiseResults.cpython-37.pyc
new file mode 100644
index 0000000..98f972c
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DistillationColumnStagewiseResults.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidget.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidget.cpython-36.pyc
new file mode 100644
index 0000000..362d798
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidget.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidget.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidget.cpython-37.pyc
new file mode 100644
index 0000000..16d524f
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidget.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetCompoundSeparator.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetCompoundSeparator.cpython-36.pyc
new file mode 100644
index 0000000..35f8cc8
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetCompoundSeparator.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetCompoundSeparator.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetCompoundSeparator.cpython-37.pyc
new file mode 100644
index 0000000..ddb230a
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetCompoundSeparator.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetCompressorExpander.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetCompressorExpander.cpython-36.pyc
new file mode 100644
index 0000000..922d625
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetCompressorExpander.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetCompressorExpander.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetCompressorExpander.cpython-37.pyc
new file mode 100644
index 0000000..b50fc09
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetCompressorExpander.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetDistillationColumn.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetDistillationColumn.cpython-36.pyc
new file mode 100644
index 0000000..b2d28ee
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetDistillationColumn.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetDistillationColumn.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetDistillationColumn.cpython-37.pyc
new file mode 100644
index 0000000..7b8ffd6
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetDistillationColumn.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetFlash.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetFlash.cpython-36.pyc
new file mode 100644
index 0000000..5d96e15
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetFlash.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetFlash.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetFlash.cpython-37.pyc
new file mode 100644
index 0000000..9bb461c
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetFlash.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetMaterialStream.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetMaterialStream.cpython-36.pyc
new file mode 100644
index 0000000..dcdd91b
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetMaterialStream.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetMaterialStream.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetMaterialStream.cpython-37.pyc
new file mode 100644
index 0000000..426a9a7
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetMaterialStream.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetMixer.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetMixer.cpython-36.pyc
new file mode 100644
index 0000000..7ce7bc5
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetMixer.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetMixer.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetMixer.cpython-37.pyc
new file mode 100644
index 0000000..05e5d7f
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetMixer.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetShortcutColumn.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetShortcutColumn.cpython-36.pyc
new file mode 100644
index 0000000..a43a296
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetShortcutColumn.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetShortcutColumn.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetShortcutColumn.cpython-37.pyc
new file mode 100644
index 0000000..e6c7383
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetShortcutColumn.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetSplitter.cpython-36.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetSplitter.cpython-36.pyc
new file mode 100644
index 0000000..ddefbdd
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetSplitter.cpython-36.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/DockWidgetSplitter.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/DockWidgetSplitter.cpython-37.pyc
new file mode 100644
index 0000000..9461f0b
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/DockWidgetSplitter.cpython-37.pyc differ
diff --git a/src/main/python/DockWidgets/__pycache__/__init__.cpython-37.pyc b/src/main/python/DockWidgets/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..afad453
Binary files /dev/null and b/src/main/python/DockWidgets/__pycache__/__init__.cpython-37.pyc differ
diff --git a/src/main/python/OMChem/CompSep.py b/src/main/python/OMChem/CompSep.py
new file mode 100644
index 0000000..9579498
--- /dev/null
+++ b/src/main/python/OMChem/CompSep.py
@@ -0,0 +1,72 @@
+from OMChem.EngStm import EngStm
+import json
+class CompSep():
+ counter = 1
+ def __init__(self,CompNames = [],name='CompSep',SepFact=['Molar_Flow','Mass_Flow'],SepStrm=1,SepFactValue=[]):
+ self.SepFact = json.dumps(SepFact).replace('[','{').replace(']','}')
+ self.SepStrm = str(SepStrm)
+ self.SepFactValue = json.dumps(SepFactValue).replace('[','{').replace(']','}')
+ #self.name = name
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = []
+ self.OutputStms = []
+ self.type = 'CompSep'
+ self.EngStms = EngStm(name='EngStm')
+
+ # new
+ self.name = name + str(CompSep.counter)
+ self.no_of_input = 1
+ self.no_of_output = 2
+ CompSep.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def modesList(self):
+ return []
+
+ def paramgetter(self,mode=None):
+ dict = {"SepStrm":None,"SepFactValue":None,"SepFact":None}
+ return dict
+
+ def paramsetter(self,dict):
+ self.SepStrm = dict['SepStrm']
+ self.SepFactValue = dict['SepFactValue']
+ self.SepFact = dict['SepFact']
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStms.name+';\n'
+ self.OM_data_init = self.OM_data_init + (
+ "Simulator.Unit_Operations.Compound_Separator " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")
+ self.OM_data_init = self.OM_data_init + ("sepFact = "+self.SepFact+",sepStrm = " + self.SepStrm + ", sepFactVal = " + self.SepFactValue + ");\n")
+
+ return self.OM_data_init
+
+ def connect(self,InputStms = None,OutputStms = []):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ comp_count = len(addedcomp)
+ strcount = 1
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.inlet' + ');\n')
+
+ for strm in self.OutputStms:
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + strm.name + '.inlet,' + self.name + '.outlet'+str(strcount)+');\n')
+ strcount += 1
+
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.EngStms.name + '.outlet,' + self.name + '.energy);\n')
+ sepFac = str(self.SepFactValue).strip('[').strip(']')
+
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.sepFactVal= {'+ sepFac + '};\n')
+
+ return self.OM_data_eqn
+
diff --git a/src/main/python/OMChem/ConvReactor.py b/src/main/python/OMChem/ConvReactor.py
new file mode 100644
index 0000000..ccdf524
--- /dev/null
+++ b/src/main/python/OMChem/ConvReactor.py
@@ -0,0 +1,44 @@
+import json
+from OMChem.EngStm import EngStm
+class ConvReactor():
+ def __init__(self,name='',Nr=None,b=None,X=None,Z=None,a=[],operation=None,Tdef=None):
+ self.Nr = str(Nr)
+ self.b = str(b)
+ self.X = str(X)
+ self.Z = str(Z)
+ self.a = json.dumps(a).replace('[','{').replace(']','}')
+ self.operation = str(operation)
+ self.name = name
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = []
+ self.OutputStms = []
+ self.Tdef = str(Tdef)
+
+ self.type = 'ConvReactor'
+ self.EngStms = EngStms(name="EngStm")
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStms.name+';\n'
+ self.OM_data_init = self.OM_data_init + ( "Simulator.Unit_Operations.ConversionReactor " + self.name + "(Nr = " + self.Nr + ',b = ' + self.b + ',X = ' + self.X + ',Z = ' + self.Z + ',a = ' + self.a + ',operation = ' + self.operation + ',Tdef = ' + self.Tdef + ');\n')
+ return self.OM_data_init
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ comp_count = len(addedcomp)
+ strcount = 1
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.inlet' + ');\n')
+
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.OutputStms[0].name + '.inlet,' + self.name + '.outlet);\n')
+
+
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.EngStms.name + '.inlet,' + self.name + '.EnergyStream);\n')
+
+ return self.OM_data_eqn
diff --git a/src/main/python/OMChem/Cooler.py b/src/main/python/OMChem/Cooler.py
new file mode 100644
index 0000000..9a44033
--- /dev/null
+++ b/src/main/python/OMChem/Cooler.py
@@ -0,0 +1,76 @@
+from OMChem.EngStm import EngStm
+class Cooler():
+ counter = 1
+ def __init__(self,name='Cooler',PressureDrop = None, eff = None):
+ self.PressureDrop = PressureDrop
+ self.eff = eff
+ # self.name = name
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ self.OutputStms = None
+ #self.heatRem = heatRem
+ self.EngStms = EngStm(name='EngStm')
+ self.type = 'Cooler'
+ self.mode = None
+ self.modeVal = None
+
+ self.Prop = {
+ 'pressDrop':None,
+ 'eff':None,
+ 'outT':None,
+ 'tempDrop':None,
+ 'heatRem':None,
+ }
+ # new
+ self.name = name + str(Cooler.counter)
+ self.no_of_input = 1
+ self.no_of_output = 1
+ Cooler.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def modesList(self):
+ return ["heatRem","outT","outVapPhasMolFrac","tempDrop","enFlo"]
+
+ def paramgetter(self,mode="heatRem"):
+ self.mode = mode
+ dict = {"PressureDrop":None,"eff":None,self.mode:None}
+ return dict
+
+ def paramsetter(self,dict):
+
+ self.PressureDrop = dict['PressureDrop']
+ self.eff = dict['eff']
+ self.modeVal = dict[self.mode]
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStms.name+';\n'
+ self.OM_data_init = self.OM_data_init + (
+ "Simulator.Unit_Operations.Cooler " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")
+ self.OM_data_init = self.OM_data_init + 'pressDrop = ' + str(self.PressureDrop) + ','
+ self.OM_data_init = self.OM_data_init + 'eff = ' + str(self.eff) + ');\n'
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ # self.OM_data_eqn = self.name + '.pressDrop = ' + str(self.PressDrop) + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.inlet' + ');\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.outlet,' + self.OutputStms[0].name + '.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.EngStms.name + '.outlet,'+ self.name + '.energy);\n')
+ if(self.mode =="enFlo"):
+ self.OM_data_eqn = self.OM_data_eqn + (self.EngStms.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+ else:
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+ return self.OM_data_eqn
\ No newline at end of file
diff --git a/src/main/python/OMChem/DistCol.py b/src/main/python/OMChem/DistCol.py
new file mode 100644
index 0000000..1492bc2
--- /dev/null
+++ b/src/main/python/OMChem/DistCol.py
@@ -0,0 +1,106 @@
+from OMChem.EngStm import EngStm
+class DistCol():
+ counter = 1
+ def __init__(self,name='DistCol', numStage = None,numFeeds = None,feedStages = None):
+ self.numStage = numStage
+ self.numFeeds=numFeeds
+ self.feedStages=feedStages
+ #self.name = name[0]
+ self.name = name + str(DistCol.counter)
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ self.OutputStms = None
+ self.EngStm1 = EngStm(name='EngStm1'+self.name)
+ self.EngStm2 = EngStm(name='EngStm2'+self.name)
+ #self.count = name[1]
+ self.count = DistCol.counter
+ self.thermoPackage='Raoults_Law'
+ self.type = 'DistCol'
+ self.mode = None
+ self.condType=''
+ self.modeVal = None
+ self.condP=None
+ self.rebP=None
+
+ # new
+ self.no_of_input = 2
+ self.no_of_output = 2
+ DistCol.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def modesList(self):
+ return ["refluxRatio","sideDrawMolFlo","T"]
+
+ def paramgetter(self,mode="refluxRatio"):
+ self.mode=mode
+ dict = { "numStage" : None,"numFeeds" :None,"feedStages" :None,"thermoPackage":None,"condType":None,self.mode:None,"condensor.P":None,"reboiler.P":None}
+ return dict
+
+ def paramsetter(self,dict):
+ self.numStage = dict["numStage"]
+ self.numFeeds = dict["numFeeds"]
+ self.feedStages = dict["feedStages"].split(",")
+ self.modeVal=dict[self.mode]
+ self.condP=dict["condensor.P"]
+ self.rebP=dict["reboiler.P"]
+ self.condType=dict["condType"]
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ self.OM_data_init = self.OM_data_init + 'model Condensor\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Unit_Operations.Distillation_Column.Cond;\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Files.Thermodynamic_Packages.'+self.thermoPackage+';\n'
+ self.OM_data_init = self.OM_data_init + 'end Condensor;\n'
+ self.OM_data_init = self.OM_data_init + 'model Tray\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Unit_Operations.Distillation_Column.DistTray;\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Files.Thermodynamic_Packages.'+self.thermoPackage+';\n'
+ self.OM_data_init = self.OM_data_init + 'end Tray;\n'
+ self.OM_data_init = self.OM_data_init + 'model Reboiler\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Unit_Operations.Distillation_Column.Reb;\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Files.Thermodynamic_Packages.'+self.thermoPackage+';\n'
+ self.OM_data_init = self.OM_data_init + 'end Reboiler;\n'
+ self.OM_data_init = self.OM_data_init + ("model distCol"+str(self.count)+"\n")
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Unit_Operations.Distillation_Column.DistCol;\n" )
+ self.OM_data_init = self.OM_data_init + ("Condensor condensor(Nc = Nc, comp = comp, condType =condType, boolFeed = boolFeed[1], T(start = 300));\n" )
+ self.OM_data_init = self.OM_data_init + ("Reboiler reboiler(Nc = Nc, comp = comp, boolFeed = boolFeed[noOfStages]);\n" )
+ self.OM_data_init = self.OM_data_init + ("Tray tray[noOfStages - 2](each Nc = Nc, each comp = comp, boolFeed = boolFeed[2:noOfStages -1]);\n" )
+ self.OM_data_init = self.OM_data_init + ("end distCol"+str(self.count)+";\n")
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + (
+ "distCol"+str(self.count)+" "+ self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.feedStages=str(self.feedStages).strip('[').strip(']')
+ self.feedStages = self.feedStages.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")+("noOfStages="+self.numStage+","+"noOfFeeds="+self.numFeeds+",feedStages="+"{"+self.feedStages+"}"+",condensor.condType="+"\""+self.condType+"\""+");\n")
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStm1.name+';\n'
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStm2.name+';\n'
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ # self.OM_data_eqn = self.name + '.pressDrop = ' + str(self.PressDrop) + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + ('connect('+self.name+'.'+'condensor_duty'+','+ self.EngStm1.name+'.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect('+self.name+'.reboiler_duty'+', '+self.EngStm2.name+'.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect('+self.name+'.distillate'+", "+self.OutputStms[0].name+'.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect('+self.name+'.bottoms'+", "+self.OutputStms[1].name+'.inlet);\n')
+ for i in range(len(self.InputStms)):
+ self.OM_data_eqn = self.OM_data_eqn + ('connect('+self.InputStms[i].name+'.outlet'+", "+self.name+'.feed['+str(i+1)+']);\n')
+ self.OM_data_eqn = self.OM_data_eqn + (self.OutputStms[1].name+'.'+'totMolFlow[1] = '+str(self.OutputStms[1].Prop['totMolFlo[1]'])+';\n')
+ if self.mode=="refluxRatio":
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.'+str(self.mode)+'='+ str(self.modeVal) + ';\n')
+ else:
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.condensor.'+self.mode+'='+ str(self.modeVal) + ';\n')
+
+ self.OM_data_eqn = self.OM_data_eqn + self.name +'.reboiler.P='+self.rebP+';\n'
+ self.OM_data_eqn = self.OM_data_eqn + self.name +'.condensor.P='+self.condP+';\n'
+ return self.OM_data_eqn
+
diff --git a/src/main/python/OMChem/EngStm.py b/src/main/python/OMChem/EngStm.py
new file mode 100644
index 0000000..c02a5d5
--- /dev/null
+++ b/src/main/python/OMChem/EngStm.py
@@ -0,0 +1,18 @@
+
+class EngStm():
+
+ def __init__(self,name = 'Engstm'):
+ self.name = name
+ self.type = 'EngStm'
+ self.OM_data_init = ''
+ self.OM_data_eqn = ''
+
+
+ def OM_Flowsheet_Init(self,addedcomp):
+ self.OM_data_init = ''
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.name+';\n'
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self,addedcomp):
+ self.OM_data_eqn=''
+ return self.OM_data_eqn
diff --git a/src/main/python/OMChem/Flash.py b/src/main/python/OMChem/Flash.py
new file mode 100644
index 0000000..a3bf5dc
--- /dev/null
+++ b/src/main/python/OMChem/Flash.py
@@ -0,0 +1,57 @@
+class Flash():
+ counter = 1
+ def __init__(self,name='Flash'):
+ #self.name = name[0]
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ self.OutputStms = None
+ self.type = 'flash'
+ #self.count = name[1]
+ self.count = Flash.counter
+ self.thermoPackage =None
+
+ # new
+ self.name = name + str(Flash.counter)
+ self.no_of_input = 1
+ self.no_of_output = 2
+ Flash.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def modesList(self):
+ return []
+
+ def paramgetter(self,mode=None):
+ dict = {"thermoPackage":None}
+ return dict
+ def paramsetter(self,dict):
+ self.thermoPackage = dict['thermoPackage']
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ self.OM_data_init = self.OM_data_init + ("model fls"+str(self.count)+"\n")
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Unit_Operations.Flash;\n" )
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Files.Thermodynamic_Packages."+self.thermoPackage+";\n")
+ self.OM_data_init = self.OM_data_init + ("end fls"+str(self.count)+";\n")
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + (
+ "fls"+str(self.count)+" "+ self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("});\n")
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ comp_count = len(addedcomp)
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.feed' + ');\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.vapor,' + self.OutputStms[0].name + '.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.liquid,' + self.OutputStms[1].name + '.inlet);\n')
+ return self.OM_data_eqn
\ No newline at end of file
diff --git a/src/main/python/OMChem/Flowsheet.py b/src/main/python/OMChem/Flowsheet.py
new file mode 100644
index 0000000..855c71e
--- /dev/null
+++ b/src/main/python/OMChem/Flowsheet.py
@@ -0,0 +1,306 @@
+import os
+import csv
+from subprocess import Popen, PIPE
+import pandas as pd
+
+class Flowsheet():
+ def __init__(self):
+ self.sim_name = '../Simulator'
+ self.sim_method = ''
+ self.unit_operations = []
+ self.data = []
+ self.compounds = []
+ self.interface = ''
+ self.omc_path = None
+ self.root_dir = os.getcwd() # Chemical-Simulator-GUI
+ self.sim_dir_path = os.path.join(self.root_dir, self.sim_name) # Chemical-Simulator-GUI/Simulator
+ self.Flomo_path = os.path.join(self.sim_dir_path,'Flowsheet.mo')
+ self.eqn_mos_path = os.path.join(self.sim_dir_path,'simulateEQN.mos')
+ self.sm_mos_path = os.path.join(self.sim_dir_path,'simulateSM.mos')
+ self.result_data = []
+ self.stdout=None
+ self.stderr=None
+
+ def get_omc_path(self,msg):
+ try:
+ self.omhome = os.environ.get('OPENMODELICAHOME')
+ if self.omhome is None:
+ self.omhome = os.path.split(os.path.split(os.path.realpath(spawn.find_executable("omc")))[0])[0]
+ elif os.path.exists('/opt/local/bin/omc'):
+ self.omhome = '/opt/local'
+ elif os.path.exists('/usr/bin/omc'):
+ self.omhome = '/usr'
+ return os.path.join(self.omhome, 'bin', 'omc')
+ except BaseException:
+ msg.parent().parent().parent().container.disableInterfaceforSimulation(False)
+ msg.append("Installation Error : The OpenModelica compiler is missing in the System path please install it.")
+ print("The OpenModelica compiler is missing in the System path please install it" )
+ raise
+
+ def add_unit_operations(self,unitop):
+ self.unit_operations.append(unitop)
+
+ def remove_unit_operations(self,unitop):
+ self.unit_operations.remove(unitop)
+
+ def add_compound_list(self,C):
+ self.compounds = C
+
+ def send_for_simulation_Eqn(self,msg):
+ self.result_data = []
+ self.omc_path = self.get_omc_path(msg)
+ #print(self.omc_path)
+
+ if self.sim_method == 'Eqn':
+ simpath = self.eqn_mos_path
+ os.chdir(self.sim_dir_path)
+
+ self.process = Popen([self.omc_path, '-s',simpath], stdout=PIPE, stderr=PIPE)
+ self.stdout, self.stderr = self.process.communicate()
+
+ os.chdir(self.root_dir)
+ if ('timeSimulation = 0.0,\n' in self.stdout.decode("utf-8")):
+ self.result_data = []
+ else:
+ csvpath = os.path.join(self.sim_dir_path,'Flowsheet_res.csv')
+ with open (csvpath,'r') as resultFile:
+ self.result_data = []
+ csvreader = csv.reader(resultFile,delimiter=',')
+ for row in csvreader:
+ self.result_data.append(row)
+
+ def send_for_simulation_SM(self,unitop):
+ self.result_data = []
+ self.omc_path = self.get_omc_path()
+ os.chdir(self.sim_dir_path)
+ self.process = Popen([self.omc_path, '-s',unitop.name,'.mos'], stdout=PIPE, stderr=PIPE)
+ stdout, stderr = self.process.communicate()
+ # print("############### StdOut ################")
+ # print(stdout)
+ self.result_data = []
+ print('Simulating '+unitop.name+'...')
+ csvpath = os.path.join(self.sim_dir_path,unitop.name+'_res.csv')
+ with open(csvpath,'r') as resultFile:
+ csvreader = csv.reader(resultFile,delimiter=',')
+ for row in csvreader:
+ self.result_data.append(row)
+ self.ext_data()
+
+ def ext_data(self):
+ for unit in self.unit_operations:
+ if unit[0].type == 'MaterialStream':
+ for key, value in unit[0].Prop.items():
+ property_name = unit[0].name + '.' + key
+ if property_name in self.result_data[0]:
+ ind = self.result_data[0].index(property_name)
+ resultval = str(self.result_data[-1][ind])
+ unit[0].Prop[key] = resultval
+
+ def simulate_EQN(self,msg):
+ self.data = []
+ self.sim_method = 'Eqn'
+ self.data.append("model Flowsheet\n")
+
+ tempCompounds = self.compounds
+ for c in tempCompounds:
+ CompName = c
+ CompName = CompName.replace(" ", "")
+ CompName = CompName.replace("-", "")
+ CompName = CompName.replace(",", "")
+ CompName = CompName.replace("1", "One")
+ CompName = CompName.replace("2", "Two")
+ CompName = CompName.replace("3", "Three")
+ CompName = CompName.replace("4", "Four")
+ CompName = CompName.replace("5", "Five")
+ self.data.append("parameter database." + CompName +' '+ CompName + "; \n")
+ tempCompounds[tempCompounds.index(c)] = CompName
+
+ self.data.append("parameter Integer Nc = " + str(len(tempCompounds)) + ";\n")
+ self.data.append("parameter Simulator.Files.ChemsepDatabase.GeneralProperties C[Nc] = {" +
+ str(tempCompounds).strip('[').strip(']').replace("'", "") + "};\n")
+
+ for unitop in self.unit_operations:
+ if unitop.type != 'MaterialStream':
+ self.data.append(unitop.OM_Flowsheet_Initialize())
+ else:
+ self.data.append(unitop.OM_Flowsheet_Initialize(self.compounds))
+
+ self.data.append("equation\n")
+
+ self.outlist = []
+ self.stm = ['MaterialStream','EngStm']
+ for unitop in self.unit_operations:
+ if unitop.type not in self.stm:
+ for j in unitop.output_stms.values():
+ self.outlist.append(j)
+
+ for unitop in self.unit_operations:
+ if unitop not in self.outlist:
+ if unitop.type == 'MaterialStream':
+ self.data.append(unitop.OM_Flowsheet_Equation(self.compounds,'Eqn'))
+ else:
+ self.data.append(unitop.OM_Flowsheet_Equation())
+ else:
+ pass
+ with open(self.Flomo_path, 'w') as txtfile:
+ for d in self.data:
+ txtfile.write(str(d))
+ txtfile.write('end Flowsheet;\n')
+ #print(self.eqn_mos_path)
+ with open(self.eqn_mos_path, 'w') as mosFile:
+ mosFile.write('loadModel(Modelica);\n')
+ mosFile.write('loadFile(\"Simulator/package.mo\");\n')
+ mosFile.write('loadFile(\"database.mo\");\n')
+ mosFile.write('loadFile(\"Flowsheet.mo\");\n')
+ mosFile.write('simulate(Flowsheet, outputFormat=\"csv\", stopTime=1.0, numberOfIntervals=1);\n')
+
+ #print('Initiating Simulation in Equation Oriented Mode')
+
+ self.send_for_simulation_Eqn(msg)
+
+ def simulate_SM(self,ip,op):
+ self.sim_method = 'SM'
+ self.data = []
+ self.result_data = []
+ self.unit = []
+ self.csvlist = []
+
+ for i in ip:
+ common = ip[i]
+
+ for k,v in op.items():
+ if(set(v) & set(common)):
+ if((i in self.unit) and (k in self.unit)):
+ pass
+ elif(i in self.unit):
+ self.unit.insert(self.unit.index(i),k)
+ elif(k in self.unit):
+ self.unit.append(i)
+ else:
+ self.unit.append(k)
+ self.unit.append(i)
+
+ for unitop in self.unit:
+ os.chdir(self.root_dir)
+ self.data = []
+ if unitop.type not in ['MaterialStream','EngStm']:
+ inpstms = unitop.input_stms
+ outstms = unitop.output_stms
+
+ try:
+ engstms = unitop.EngStms
+ except:
+ engstms = None
+
+ self.data.append("model "+unitop.name.lower()+'\n')
+
+ for c in self.compounds:
+ c = c.title()
+ lcase = c.lower()
+ self.data.append("parameter Simulator.Files.Chemsep_Database." + c +' '+ c + "; \n")
+
+ self.data.append(unitop.OM_Flowsheet_Initialize())
+
+ if type(outstms) is list:
+ for stm in outstms:
+ self.data.append(stm.OM_Flowsheet_Initialize())
+ else:
+ self.data.append(outstms.OM_Flowsheet_Initialize())
+
+ if engstms:
+ self.data.append(engstms.OM_Flowsheet_Initialize())
+
+ if type(inpstms) is list:
+ for stm in inpstms:
+ self.data.append(stm.OM_Flowsheet_Initialize())
+ else:
+ self.data.append(inpstms.OM_Flowsheet_Initialize())
+
+ self.data.append('equation\n')
+ self.data.append(unitop.OM_Flowsheet_Equation())
+
+ if type(inpstms) is list:
+ for stm in inpstms:
+ self.data.append(stm.OM_Flowsheet_Equation())
+ else:
+ self.data.append(inpstms.OM_Flowsheet_Equation())
+
+ unitmofile = os.path.join(self.sim_dir_path,unitop.name.lower()+'.mo')
+
+ with open(unitmofile,'w') as unitFile:
+ for d in self.data:
+ unitFile.write(d)
+ unitFile.write('end '+unitop.name.lower()+';\n')
+
+ unitmosfile = os.path.join(self.sim_dir_path,unitop.name.lower()+'.mos')
+ with open(unitmosfile, 'w') as mosFile:
+ mosFile.write('loadModel(Modelica);\n')
+ mosFile.write("loadFile(\"Simulator\package.mo\");\n")
+
+ mosFile.write("loadFile(\""+unitop.name.lower()+".mo\");\n")
+ mosFile.write("simulate("+unitop.name.lower()+", outputFormat=\"csv\", stopTime=1.0, numberOfIntervals=1);\n")
+
+ print("Initiating simulation in Sequential Modular Mode")
+ self.omc_path = self.get_omc_path()
+ os.chdir(self.sim_dir_path)
+ sim = os.path.join(self.sim_dir_path,unitop.name.lower()+'.mos')
+ self.process = Popen([self.omc_path, '-s',sim], stdout=PIPE, stderr=PIPE)
+ self.stdout, self.stderr = self.process.communicate()
+ os.chdir(self.root_dir)
+
+ # print("############### StdOut ################")
+ # print(self.stdout)
+ # print("############### StdErr ################")
+ # print(self.stderr)
+ # print('Simulating '+unitop.name.lower()+'...')
+ csvpath = os.path.join(self.sim_dir_path,unitop.name.lower()+'_res.csv')
+
+ self.csvlist.append(csvpath)
+
+ with open(csvpath,'r') as resultFile:
+ csvreader = csv.reader(resultFile,delimiter=',')
+ for row in csvreader:
+ self.result_data.append(row)
+
+ os.chdir(self.root_dir)
+ if type(inpstms) is list:
+ for stm in inpstms:
+ for key,value in stm.Prop.items():
+ property_name = stm.name + '.' + key
+ if property_name in self.result_data[0]:
+ ind = self.result_data[0].index(property_name)
+ resultval = str(self.result_data[-1][ind])
+ stm.Prop[key] = resultval
+ else:
+ for key, value in inpstms.Prop.items():
+ property_name = inpstms.name + '.' + key
+ if property_name in self.result_data[0]:
+ ind = self.result_data[0].index(property_name)
+ resultval = str(self.result_data[-1][ind])
+ inpstms.Prop[key] = resultval
+
+ if type(outstms) is list:
+ for stm in outstms:
+ for key, value in stm.Prop.items():
+ property_name = stm.name + '.' + key
+ if property_name in self.result_data[0]:
+ ind = self.result_data[0].index(property_name)
+ resultval = str(self.result_data[-1][ind])
+ stm.Prop[key] = resultval
+ else:
+ for key, value in outstms.Prop.items():
+ property_name = outstms.name + '.' + key
+ if property_name in self.result_data[0]:
+ ind = self.result_data[0].index(property_name)
+ resultval = str(self.result_data[-1][ind])
+ outstms.Prop[key] = resultval
+
+ self.dataframes = [pd.read_csv(i) for i in self.csvlist]
+ os.chdir(self.sim_dir_path)
+ dffinal = pd.concat(self.dataframes,axis=1)
+ dffinal.to_csv('FlowsheetSEQ.csv',index=False)
+ self.result_data.clear()
+ with open(os.path.join(self.sim_dir_path+'/FlowsheetSEQ.csv'),'r') as resultFile:
+ csvreader = csv.reader(resultFile,delimiter=',')
+ for row in csvreader:
+ self.result_data.append(row)
diff --git a/src/main/python/OMChem/Heater.py b/src/main/python/OMChem/Heater.py
new file mode 100644
index 0000000..cf9b8f1
--- /dev/null
+++ b/src/main/python/OMChem/Heater.py
@@ -0,0 +1,79 @@
+from OMChem.EngStm import EngStm
+class Heater():
+ counter = 1
+ def __init__(self,name='Heater',PressureDrop = None, eff = None):
+ self.PressureDrop = PressureDrop
+ self.eff = eff
+ #self.name = name
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ #self.heatAdd=heatAdd
+ #self.outT = outT
+ self.OutputStms = None
+ self.EngStms = EngStm(name='EngStm'+name)
+ self.type = 'Heater'
+ self.mode = None
+ self.modeVal = None
+
+ self.Prop = {
+ 'pressDrop':None,
+ 'eff':None,
+ 'outT':None,
+ 'tempInc':None,
+ 'heatAdd':None,
+ }
+
+ # new
+ self.name = name + str(Heater.counter)
+ self.no_of_input = 1
+ self.no_of_output = 1
+ Heater.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def modesList(self):
+ return ["heatAdd","outT","outVapPhasMolFrac","tempInc","enFlo"]
+
+ def paramgetter(self,mode="heatAdd"):
+ self.mode = mode
+ dict = {"PressureDrop":None,"eff":None,self.mode:None}
+ return dict
+
+ def paramsetter(self,dict):
+
+ self.PressureDrop = dict['PressureDrop']
+ self.eff = dict['eff']
+ self.modeVal = dict[self.mode]
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStms.name+';\n'
+ self.OM_data_init = self.OM_data_init + (
+ "Simulator.Unit_Operations.Heater " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")
+ self.OM_data_init = self.OM_data_init + 'pressDrop = ' + str(self.PressureDrop) +','
+ self.OM_data_init = self.OM_data_init + 'eff = ' + str(self.eff) + ');\n'
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ # self.OM_data_eqn = self.name + '.pressDrop = ' + str(self.PressDrop) + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.inlet' + ');\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.outlet,' + self.OutputStms[0].name + '.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.EngStms.name + '.outlet,' + self.name + '.energy);\n')
+
+ if(self.mode =="enFlo"):
+ self.OM_data_eqn = self.OM_data_eqn + (self.EngStms.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+ else:
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+ return self.OM_data_eqn
diff --git a/src/main/python/OMChem/Mixer.py b/src/main/python/OMChem/Mixer.py
new file mode 100644
index 0000000..c726cdc
--- /dev/null
+++ b/src/main/python/OMChem/Mixer.py
@@ -0,0 +1,58 @@
+class Mixer():
+ counter = 1 #
+ def __init__(self,name='Mixer',NOI=5,Pcal = 'Inlet_Average'):
+ self.NOI = NOI
+ self.Pcal = Pcal
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = []
+ self.OutputStms = None
+ self.type = 'Mixer'
+
+ # new
+ self.name = name + str(Mixer.counter)
+ self.no_of_input = 4
+ self.no_of_output = 1
+ Mixer.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def modesList(self):
+ return []
+
+ def paramgetter(self,mode=None):
+ dict = {}
+ return dict
+
+ def paramsetter(self,dict):
+ self.NOI = dict["NOI"]
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ comp_count = len(addedcomp)
+
+ self.OM_data_init = self.OM_data_init + (
+ "Simulator.Unit_Operations.Mixer " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")
+ self.OM_data_init = self.OM_data_init + ("outPress = \""+self.Pcal+"\",NI=" + str(self.NOI) + ");\n")
+ return self.OM_data_init
+
+ def connect(self,InputStms = [],OutputStms = None):
+ self.NOI=len(InputStms)
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ comp_count = len(addedcomp)
+ strcount = 1
+ for strm in self.InputStms:
+ self.OM_data_eqn = self.OM_data_eqn + (
+ 'connect(' + strm.name + '.outlet,' + self.name + '.inlet[' + str(strcount) + ']);\n')
+ strcount += 1
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.outlet,' + self.OutputStms[0].name + '.inlet);\n')
+ return self.OM_data_eqn
diff --git a/src/main/python/OMChem/Pump.py b/src/main/python/OMChem/Pump.py
new file mode 100644
index 0000000..3b6ed2c
--- /dev/null
+++ b/src/main/python/OMChem/Pump.py
@@ -0,0 +1,66 @@
+from OMChem.EngStm import EngStm
+class Pump():
+ counter = 1
+ def __init__(self,name='Pump',eff = None):
+ self.eff = eff
+ #self.name = name
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ #self.heatAdd=heatAdd
+ #self.outT = outT
+ self.OutputStms = None
+ self.EngStms = EngStm(name='EngStm'+name)
+ self.type = 'Pump'
+ self.mode = None
+ self.modeVal = None
+ # new
+ self.name = name + str(Pump.counter)
+ self.no_of_input = 1
+ self.no_of_output = 1
+ Pump.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def modesList(self):
+ return ["pressInc","outP","reqPow","enFlo"]
+
+ def paramgetter(self,mode="pressInc"):
+ self.mode = mode
+ dict = {"eff":None,self.mode:None}
+ return dict
+
+ def paramsetter(self,dict):
+ self.eff = dict['eff']
+ self.modeVal = dict[self.mode]
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStms.name+';\n'
+ self.OM_data_init = self.OM_data_init + (
+ "Simulator.Unit_Operations.Centrifugal_Pump " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")
+ self.OM_data_init = self.OM_data_init + 'eff = ' + str(self.eff) + ');\n'
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ # self.OM_data_eqn = self.name + '.pressDrop = ' + str(self.PressDrop) + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.inlet' + ');\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.outlet,' + self.OutputStms[0].name + '.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.EngStms.name + '.outlet,' + self.name + '.energy);\n')
+
+ if(self.mode =="enFlo"):
+ self.OM_data_eqn = self.OM_data_eqn + (self.EngStms.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+ else:
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+ return self.OM_data_eqn
diff --git a/src/main/python/OMChem/ShortcutColumn.py b/src/main/python/OMChem/ShortcutColumn.py
new file mode 100644
index 0000000..a018873
--- /dev/null
+++ b/src/main/python/OMChem/ShortcutColumn.py
@@ -0,0 +1,88 @@
+from OMChem.EngStm import EngStm
+class ShortcutColumn():
+ counter = 1
+ def __init__(self,name='ShortCol',condP = None, rebP = None, LKey = None, HKey = None):
+ self.condP = condP
+ self.rebP = rebP
+ self.LKey = LKey
+ self.HKey = HKey
+ self.LKeyMolFrac = None
+ self.HKeyMolFrac = None
+ #self.name = name[0]
+ self.name = name + str(ShortcutColumn.counter)
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ self.OutputStms = None
+
+ self.EngStm1 = EngStm(name='EngStm1'+self.name)
+ self.EngStm2 = EngStm(name='EngStm2'+self.name)
+ #self.count = name[1]
+ self.count = ShortcutColumn.counter
+ self.condType=''
+ self.actR = None
+ self.thermoPackage='Raoults_Law'
+ self.type = 'ShortCol'
+
+ # new
+ self.no_of_input = 1
+ self.no_of_output = 2
+ ShortcutColumn.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def modesList(self):
+ return []
+
+ def paramgetter(self,mode=None):
+ dict = { "HKey" : None,"LKey" :None,"HKeyMolFrac":None,"LKeyMolFrac":None,"condType":None,"thermoPackage":None,"condP":None,"rebP":None,"actR":None}
+ return dict
+
+ def paramsetter(self,dict):
+ self.HKey = dict["HKey"]
+ self.LKey = dict["LKey"]
+ self.HKeyMolFrac = dict["HKeyMolFrac"]
+ self.LKeyMolFrac=dict["LKeyMolFrac"]
+ self.condP=dict["condP"]
+ self.rebP=dict["rebP"]
+ self.actR=dict["actR"]
+ self.condType=dict["condType"]
+ self.thermoPackage=dict["thermoPackage"]
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ self.OM_data_init = self.OM_data_init + ("model sc"+str(self.count)+"\n")
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Unit_Operations.Shortcut_Column;\n" )
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Files.Thermodynamic_Packages."+self.thermoPackage+";\n")
+ self.OM_data_init = self.OM_data_init + ("end sc"+str(self.count)+";\n")
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + "sc"+str(self.count)+ " " + self.name + "(Nc = " + str(comp_count)
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")
+ self.OM_data_init = self.OM_data_init + 'condType = ' + "\""+self.condType+"\""+','
+ self.OM_data_init = self.OM_data_init + 'HKey = ' + str(self.HKey) +','
+ self.OM_data_init = self.OM_data_init + 'LKey = ' + str(self.LKey) + ');\n'
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStm1.name+';\n'
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStm2.name+';\n'
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.reboiler_duty,' +self.EngStm1.name +'.outlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.EngStm2.name+'.inlet,' + self.name + '.condenser_duty);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.distillate,' + self.OutputStms[0].name + '.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.bottoms,' + self.OutputStms[1].name + '.inlet);\n')
+
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.feed);\n')
+ self.OM_data_eqn = self.OM_data_eqn + self.name +'.rebP = ' + self.rebP + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + self.name +'.condP = ' + self.condP + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + self.name +'.mixMolFrac[2,'+self.name+'.LKey]='+self.LKeyMolFrac+ ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + self.name +'.mixMolFrac[3,'+self.name+'.HKey]='+self.HKeyMolFrac+ ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + self.name +'.actR=' + self.actR + ';\n'
+ return self.OM_data_eqn
\ No newline at end of file
diff --git a/src/main/python/OMChem/Splitter.py b/src/main/python/OMChem/Splitter.py
new file mode 100644
index 0000000..f873146
--- /dev/null
+++ b/src/main/python/OMChem/Splitter.py
@@ -0,0 +1,61 @@
+
+import json
+class Splitter():
+ counter = 1
+ def __init__(self,name='Splitter',NOO=5,calcType = 'Molar_Flow',specval = [50,50]):
+ self.NOO = NOO
+ self.calcType = calcType
+ self.name = name
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ self.OutputStms = []
+ self.type = 'Splitter'
+ self.specval = json.dumps(specval).replace('[','{').replace(']','}')
+ # new
+ self.name = name + str(Splitter.counter)
+ self.no_of_input = 1
+ self.no_of_output = 4
+ Splitter.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def modesList(self):
+ return []
+ def paramgetter(self,mode=None):
+ dict = {"NOO":None}
+ return dict
+ def paramsetter(self,dict):
+ self.NOI = dict["NOO"]
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + (
+ "Simulator.Unit_Operations.Splitter " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")
+ self.OM_data_init = self.OM_data_init + ("calcType = \""+self.calcType+"\",NO=" + str(self.NOO) + ");\n")
+ return self.OM_data_init
+
+ def connect(self,InputStms = None,OutputStms = []):
+ self.NOO=len(OutputStms)
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ comp_count = len(addedcomp)
+ strcount = 1
+ print("Output#########",self.OutputStms)
+ for strm in self.OutputStms:
+ self.OM_data_eqn = self.OM_data_eqn + (
+ 'connect(' + strm.name + '.inlet,' + self.name + '.outlet[' + str(strcount) + ']);\n')
+ strcount += 1
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.inlet,' + self.InputStms[0].name + '.outlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + self.name +'.' +'specVal =' + self.specval +';\n'
+ return self.OM_data_eqn
diff --git a/src/main/python/OMChem/Valve.py b/src/main/python/OMChem/Valve.py
new file mode 100644
index 0000000..9e8494b
--- /dev/null
+++ b/src/main/python/OMChem/Valve.py
@@ -0,0 +1,61 @@
+class Valve():
+ counter = 1
+ def __init__(self,name='Valve'):
+ #self.PressDrop = PressureDrop
+ self.mode = None
+ self.modeVal = None
+ #self.name = name
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ self.OutputStms = None
+ self.type = 'Valve'
+
+ self.Prop = {
+ 'pressDrop':None,
+ 'outP':None
+ }
+ # new
+ self.name = name + str(Valve.counter)
+ self.no_of_input = 1
+ self.no_of_output = 1
+ Valve.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def modesList(self):
+ return ["pressDrop","outP"]
+
+ def paramgetter(self,mode="pressDrop"):
+ self.mode = mode
+ dict = {self.mode:None}
+ return dict
+
+ def paramsetter(self,dict):
+
+ self.modeVal = dict[self.mode]
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + (
+ "Simulator.Unit_Operations.Valve " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("});\n")
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.inlet' + ');\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.outlet,' + self.OutputStms[0].name + '.inlet);\n')
+ print("asdfvfdasdsqdfdedfdfv12345678987654321234567898765")
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+ return self.OM_data_eqn
\ No newline at end of file
diff --git a/src/main/python/OMChem/__init__.py b/src/main/python/OMChem/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/main/python/OMChem/__pycache__/EngStm.cpython-36.pyc b/src/main/python/OMChem/__pycache__/EngStm.cpython-36.pyc
new file mode 100644
index 0000000..dfeb291
Binary files /dev/null and b/src/main/python/OMChem/__pycache__/EngStm.cpython-36.pyc differ
diff --git a/src/main/python/OMChem/__pycache__/EngStm.cpython-37.pyc b/src/main/python/OMChem/__pycache__/EngStm.cpython-37.pyc
new file mode 100644
index 0000000..fd407a2
Binary files /dev/null and b/src/main/python/OMChem/__pycache__/EngStm.cpython-37.pyc differ
diff --git a/src/main/python/OMChem/__pycache__/Flowsheet.cpython-36.pyc b/src/main/python/OMChem/__pycache__/Flowsheet.cpython-36.pyc
new file mode 100644
index 0000000..528d9e1
Binary files /dev/null and b/src/main/python/OMChem/__pycache__/Flowsheet.cpython-36.pyc differ
diff --git a/src/main/python/OMChem/__pycache__/Flowsheet.cpython-37.pyc b/src/main/python/OMChem/__pycache__/Flowsheet.cpython-37.pyc
new file mode 100644
index 0000000..d602758
Binary files /dev/null and b/src/main/python/OMChem/__pycache__/Flowsheet.cpython-37.pyc differ
diff --git a/src/main/python/OMChem/__pycache__/__init__.cpython-37.pyc b/src/main/python/OMChem/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..d9f9b6b
Binary files /dev/null and b/src/main/python/OMChem/__pycache__/__init__.cpython-37.pyc differ
diff --git a/src/main/python/OMChem/adiabatic_comp.py b/src/main/python/OMChem/adiabatic_comp.py
new file mode 100644
index 0000000..d0a6b72
--- /dev/null
+++ b/src/main/python/OMChem/adiabatic_comp.py
@@ -0,0 +1,70 @@
+from OMChem.EngStm import EngStm
+class AdiabaticCompressor():
+ counter = 1
+ def __init__(self,name='AdiabaticCompressor', eff = None):
+ self.eff = eff
+ #self.name = name[0]
+ self.name = name + str(AdiabaticCompressor.counter)
+
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ self.OutputStms = None
+ self.EngStms = EngStm(name='EngStm'+self.name)
+ #self.count = name[1]
+ self.count = AdiabaticCompressor.counter
+ self.type = 'AdiabaticCompressor'
+ self.thermoPackage ="RaoultsLaw"
+ self.mode = None
+ self.modeVal = None
+ # new
+ self.no_of_input = 1
+ self.no_of_output = 1
+ AdiabaticCompressor.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def modesList(self):
+ return ["pressInc","outP","reqPow"]
+
+ def paramgetter(self,mode="pressInc"):
+ self.mode = mode
+ dict = {"eff":None,self.mode:None,"thermoPackage":None}
+ return dict
+
+ def paramsetter(self,dict):
+ self.eff = dict['eff']
+ self.modeVal = dict[self.mode]
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStms.name+';\n'
+ self.OM_data_init = self.OM_data_init + ("model adiaComp"+str(self.count)+"\n")
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Unit_Operations.Adiabatic_Compressor;\n" )
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Files.Thermodynamic_Packages."+self.thermoPackage+";\n")
+ self.OM_data_init = self.OM_data_init + ("end adiaComp"+str(self.count)+";\n")
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + (
+ "adiaComp"+str(self.count)+" "+ self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")+("eff="+self.eff+");\n")
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ # self.OM_data_eqn = self.name + '.pressDrop = ' + str(self.PressDrop) + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.inlet' + ');\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.outlet,' + self.OutputStms[0].name + '.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.EngStms.name + '.outlet,' + self.name + '.energy);\n')
+
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+
+ return self.OM_data_eqn
+
diff --git a/src/main/python/OMChem/adiabatic_exp.py b/src/main/python/OMChem/adiabatic_exp.py
new file mode 100644
index 0000000..2754f0a
--- /dev/null
+++ b/src/main/python/OMChem/adiabatic_exp.py
@@ -0,0 +1,70 @@
+from OMChem.EngStm import EngStm
+class AdiabaticExpander():
+ counter = 1
+ def __init__(self,name='AdiabaticExpander', eff = None):
+ self.eff = eff
+ #self.name = name[0]
+ self.name = name + str(AdiabaticExpander.counter)
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.InputStms = None
+ self.OutputStms = None
+ self.EngStms = EngStm(name='EngStm'+self.name)
+ #self.count = name[1]
+ self.count = AdiabaticExpander.counter
+ self.type = 'AdiabaticExpander'
+ self.thermoPackage ="RaoultsLaw"
+ self.mode = None
+ self.modeVal = None
+
+ # new
+ self.no_of_input = 1
+ self.no_of_output = 1
+ AdiabaticExpander.counter += 1
+
+ def getname(self):
+ return self.name
+
+ def connect(self,InputStms = None,OutputStms = None):
+ self.InputStms = InputStms
+ self.OutputStms = OutputStms
+
+ def modesList(self):
+ return ["pressDrop","outP","genPow"]
+
+ def paramgetter(self,mode="pressDrop"):
+ self.mode = mode
+ dict = {"eff":None,self.mode:None,"thermoPackage":None}
+ return dict
+
+ def paramsetter(self,dict):
+ self.eff = dict['eff']
+ self.modeVal = dict[self.mode]
+
+ def OM_Flowsheet_Init(self, addedcomp):
+ self.OM_data_init = ''
+ self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStms.name+';\n'
+ self.OM_data_init = self.OM_data_init + ("model adiaComp"+str(self.count)+"\n")
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Unit_Operations.Adiabatic_Expander;\n" )
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Files.Thermodynamic_Packages."+self.thermoPackage+";\n")
+ self.OM_data_init = self.OM_data_init + ("end adiaComp"+str(self.count)+";\n")
+ comp_count = len(addedcomp)
+ self.OM_data_init = self.OM_data_init + (
+ "adiaComp"+str(self.count)+" "+ self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (",comp = {")
+ comp = str(addedcomp).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")+("eff="+self.eff+");\n")
+ return self.OM_data_init
+
+ def OM_Flowsheet_Eqn(self, addedcomp):
+ self.OM_data_eqn = ''
+ # self.OM_data_eqn = self.name + '.pressDrop = ' + str(self.PressDrop) + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.InputStms[0].name + '.outlet,' + self.name + '.inlet' + ');\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.name + '.outlet,' + self.OutputStms[0].name + '.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + ('connect(' + self.EngStms.name + '.outlet,' + self.name + '.energy);\n')
+
+ self.OM_data_eqn = self.OM_data_eqn + (self.name+'.'+self.mode+'='+ self.modeVal + ';\n')
+
+ return self.OM_data_eqn
+
diff --git a/src/main/python/OMChem/setup.py b/src/main/python/OMChem/setup.py
new file mode 100644
index 0000000..037da1c
--- /dev/null
+++ b/src/main/python/OMChem/setup.py
@@ -0,0 +1,54 @@
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+from subprocess import call
+import sys
+import os
+# Python 3.3 offers shutil.which()
+from distutils import spawn
+
+def warningOrError(errorOnFailure, msg):
+ if errorOnFailure:
+ raise Exception(msg)
+ else:
+ print(msg)
+
+def generateIDL():
+ errorOnFailure = not os.path.exists(os.path.join(os.path.dirname(__file__), 'OMPythonIDL', '__init__.py'))
+ try:
+ omhome = os.path.split(os.path.split(os.path.realpath(spawn.find_executable("omc")))[0])[0]
+ except:
+ omhome = None
+ omhome = omhome or os.environ.get('OPENMODELICAHOME')
+
+ if omhome is None:
+ warningOrError(errorOnFailure, "Failed to find OPENMODELICAHOME (searched for environment variable as well as the omc executable)")
+ return
+ idl = os.path.join(omhome,"share","omc","omc_communication.idl")
+ if not os.path.exists(idl):
+ warningOrError(errorOnFailure, "Path not found: %s" % idl)
+ return
+
+ if 0 is not call(["omniidl","-bpython","-Wbglobal=_OMCIDL","-Wbpackage=OMPythonIDL",idl]):
+ warningOrError(errorOnFailure, "omniidl command failed")
+ return
+ print("Generated OMPythonIDL files")
+generateIDL()
+
+setup(name='OMPython',
+ version='2.0.7',
+ description='OpenModelica-Python API Interface',
+ author='Anand Kalaiarasi Ganeson',
+ author_email='ganan642@student.liu.se',
+ maintainer='Adeel Asghar',
+ maintainer_email='adeel.asghar@liu.se',
+ license="BSD, OSMC-PL 1.2, GPL (user's choice)",
+ url='http://openmodelica.org/',
+ packages=['OMPython', 'OMPython.OMParser', 'OMPythonIDL', 'OMPythonIDL._OMCIDL', 'OMPythonIDL._OMCIDL__POA'],
+ install_requires=[
+ # 'omniORB', # Required, but not part of pypi
+ 'pyparsing'
+ ]
+)
diff --git a/src/main/python/Redo.dat b/src/main/python/Redo.dat
new file mode 100644
index 0000000..e69de29
diff --git a/src/main/python/mainApp.py b/src/main/python/mainApp.py
new file mode 100644
index 0000000..3f6f4be
--- /dev/null
+++ b/src/main/python/mainApp.py
@@ -0,0 +1,413 @@
+import pickle
+import threading
+import os
+import ctypes
+import sys
+import datetime
+from functools import partial
+
+current = os.path.dirname(os.path.realpath(__file__))
+parentPath = os.path.dirname(current)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+import PyQt5.QtGui as QtGui
+import PyQt5.QtCore as QtCore
+import PyQt5.QtWidgets as QtWidgets
+
+from python.OMChem.Flowsheet import Flowsheet
+from python.utils.ComponentSelector import *
+from python.utils.Bin_Phase_env import *
+from python.utils.UnitOperations import *
+from python.utils.Streams import *
+from python.utils.Container import *
+from python.utils.Graphics import *
+
+ui,_ = loadUiType(parentPath+'/ui/utils/main.ui')
+
+'''
+ MainApp class is responsible for all the main App Ui operations
+'''
+class MainApp(QMainWindow,ui):
+ '''
+ Initializing the application
+ '''
+ def __init__(self):
+ QMainWindow.__init__(self)
+
+ # Loading and setting up style sheet
+ self.setupUi(self)
+
+ # Initializing attributes
+ self.zoom_count = 0
+ self.thrd = None
+
+ # Creating instances of classes for the main app
+ self.container = Container(self.textBrowser, self.graphicsView)
+ self.comp = ComponentSelector(self)
+ self.comp.accepted.connect(self.update_compounds)
+
+ # Setting up interactive canvas
+ self.scene = self.container.graphics.get_scene()
+ self.graphicsView.setScene(self.scene)
+ self.graphicsView.setMouseTracking(True)
+ self.graphicsView.keyPressEvent=self.delete_call
+
+ self.setDockNestingEnabled(True)
+ self.setCorner(Qt.BottomRightCorner, Qt.RightDockWidgetArea)
+ self.setCorner(Qt.BottomLeftCorner, Qt.LeftDockWidgetArea)
+ self.addDockWidget(Qt.BottomDockWidgetArea,self.dockWidget_2)
+
+ # Calling initialisation
+ self.menu_bar()
+ self.button_handler()
+ self.comp.show()
+
+ '''
+ MenuBar function handels all the all the operations of
+ menu bar like new,zoom,comounds selector, simulation options.
+ '''
+ def menu_bar(self):
+ self.actionSelectCompounds.triggered.connect(self.select_compounds)
+ self.actionSelectCompounds.setShortcut('Ctrl+C')
+ self.actionZoomIn.triggered.connect(self.zoom_in)
+ self.actionZoomIn.setShortcut('Ctrl++')
+ self.actionNew.triggered.connect(self.new)
+ self.actionNew.setShortcut('Ctrl+N')
+ self.actionZoomOut.triggered.connect(self.zoom_out)
+ self.actionZoomOut.setShortcut('Ctrl+-')
+ self.actionResetZoom.triggered.connect(self.zoom_reset)
+ self.actionResetZoom.setShortcut('Ctrl+R')
+ self.actionHelp.triggered.connect(self.help)
+ self.actionHelp.setShortcut('Ctrl+H')
+ self.actionSequentialMode.triggered.connect(partial(self.simulate,'SM'))
+ self.actionSequentialMode.setShortcut('Ctrl+M')
+ self.actionEquationOriented.triggered.connect(partial(self.simulate,'EQN'))
+ self.actionEquationOriented.setShortcut('Ctrl+E')
+ self.actionUndo.triggered.connect(self.undo)
+ self.actionUndo.setShortcut('Ctrl+Z')
+ self.actionRedo.triggered.connect(self.redo)
+ self.actionRedo.setShortcut('Ctrl+Y')
+ self.actionSave.triggered.connect(self.save)
+ self.actionSave.setShortcut('Ctrl+S')
+ self.actionOpen.triggered.connect(self.open)
+ self.actionOpen.setShortcut('Ctrl+O')
+ self.actionTerminate.triggered.connect(self.terminate)
+ self.actionTerminate.setShortcut('Ctrl+T')
+ self.actionBinaryPhaseEnvelope.triggered.connect(self.bin_phase_env)
+ self.actionViewMessageBrowser.triggered.connect(self.toggle_message_browser_view)
+ self.actionViewComponentSelector.triggered.connect(self.toggle_component_selector_view)
+
+ '''
+ Handles all the buttons of different components.
+ '''
+ def button_handler(self):
+ self.pushButton.clicked.connect(partial(self.component,'MaterialStream'))
+ self.pushButton_7.clicked.connect(partial(self.component,'Mixer'))
+ self.pushButton_14.clicked.connect(partial(self.component,'CentrifugalPump'))
+ self.pushButton_26.clicked.connect(partial(self.component,'DistillationColumn'))
+ self.pushButton_18.clicked.connect(partial(self.component,'ShortcutColumn'))
+ self.pushButton_11.clicked.connect(partial(self.component,'Heater'))
+ self.pushButton_10.clicked.connect(partial(self.component,'Splitter'))
+ self.pushButton_9.clicked.connect(partial(self.component,'Flash'))
+ self.pushButton_25.clicked.connect(partial(self.component,'Valve'))
+ self.pushButton_12.clicked.connect(partial(self.component,'Cooler'))
+ self.pushButton_13.clicked.connect(partial(self.component,'CompoundSeparator'))
+ self.pushButton_15.clicked.connect(partial(self.component,'AdiabaticCompressor'))
+ self.pushButton_16.clicked.connect(partial(self.component,'AdiabaticExpander'))
+
+ '''
+ Displays help box
+ '''
+ def help(self):
+ msgBox = QMessageBox()
+ msgBox.setIcon(QMessageBox.Question)
+ msgBox.setTextFormat(Qt.RichText);
+ msgBox.setText("For any Help or Suggestion you can contact us at\n contact-om@fossee.in or at Visit fossee.in!")
+ msgBox.setStandardButtons(QMessageBox.Ok)
+ msgBox.exec_()
+
+ '''
+ Creates Binary Phase envelope
+ '''
+ def bin_phase_env(self):
+ if len(self.comp.get_compounds())<2:
+ QMessageBox.about(self, 'Important', "Please select at least 2 Compounds first")
+ self.comp.show()
+ else:
+ self.bin_phase = BinPhaseEnv(self.comp)
+ self.bin_phase.show()
+
+ '''
+ Shows Compounds Selector Dialog
+ '''
+ def select_compounds(self):
+ self.comp.show()
+
+ '''
+ Updates compounds after compound selected modified during simulation creation
+ '''
+ def update_compounds(self):
+ self.container.update_compounds()
+
+
+ '''
+ Returns current time in a required particular format
+ '''
+ def current_time(self):
+ now = datetime.datetime.now()
+ time = str(now.hour) + ":" + str(now.minute) + ":" +str(now.second)
+ return time
+
+ '''
+ Simulate function is responsible for the simulation
+ of the designed flowsheet in a particular mode
+ selected by the user.
+ '''
+ def simulate(self,mode):
+ self.thrd = threading.Thread(target=self.container.simulate, args=(mode,))
+ self.thrd.start()
+
+ '''
+ Terminate the current running simulation
+ '''
+ def terminate(self):
+ os.chdir(self.container.flowsheet.root_dir)
+ if self.thrd:
+ thread_id = self.thrd.ident
+ # print('____________________Going to terminate simulation thread with Thread ID:',thread_id,'____________________')
+ # print('____________________Going to terminate the new process created for omc____________________')
+ self.container.flowsheet.process.terminate()
+ print('____________________New process created for omc is terminated.____________________')
+ res = ctypes.pythonapi.PyThreadState_SetAsyncExc(thread_id, ctypes.py_object(SystemExit))
+ self.textBrowser.append("["+str(self.current_time())+"]Simulation Terminated.")
+ self.container.disableInterfaceforSimulation(False)
+ # print('____________________Simulation thread terminated____________________')
+ if res > 1:
+ ctypes.pythonapi.PyThreadState_SetAsyncExc(thread_id, 0)
+ # print('Exception raise (Thread termination) failure')
+
+ '''
+ Resets the zoom level to default scaling
+ '''
+ def zoom_reset(self):
+ if(self.zoom_count>0):
+ for i in range(self.zoom_count):
+ self.zoomout()
+ elif(self.zoom_count<0):
+ for i in range(abs(self.zoom_count)):
+ self.zoomin()
+
+ '''
+ ZoomOut the canvas
+ '''
+ def zoom_out(self):
+ self.graphicsView.scale(1.0/1.15,1.0/1.15)
+ self.zoom_count -=1
+
+ '''
+ ZoomIn the canvas
+ '''
+ def zoom_in(self):
+ self.graphicsView.scale(1.15,1.15)
+ self.zoom_count +=1
+
+ '''
+ Instantiate a NodeItem object for selected type of
+ component and added that on canvas/flowsheeting area.
+ '''
+ def component(self,unit_operation_type):
+ if(self.comp.is_compound_selected()):
+ self.type = unit_operation_type
+ if(self.type=="MaterialStream"):
+ self.obj = MaterialStream(compound_names = compound_selected)
+ else:
+ self.obj = eval(self.type)()
+ self.container.add_unit_operation(self.obj)
+
+ else:
+ QMessageBox.about(self, 'Important', "Please Select Compounds first")
+ self.comp.show()
+
+ '''
+ New is used to delete all the existing work.
+ '''
+ def new(self):
+ self.setWindowTitle('Untitled - Chemical Simulator GUI')
+ self.undo_redo_helper()
+ self.comp = ComponentSelector(self)
+ self.textBrowser.append("[" + str(self.current_time()) + "] New flowsheet is created ... ")
+ dock_widget_lst.clear()
+
+ '''
+ Handels all the operations which will happen when delete button is pressed.
+ '''
+ def delete_call(self,event):
+ try:
+ if event.key() == QtCore.Qt.Key_Delete:
+ l=self.scene.selectedItems()
+ self.container.delete(l)
+ except Exception as e:
+ print(e)
+
+ '''
+ It helps by clearing screen and loading the objects by undo redo methods
+ '''
+ def undo_redo_helper(self):
+ for i in self.container.unit_operations:
+ type(i).counter = 1
+ self.container = None
+ for i in dock_widget_lst:
+ i.hide()
+ del i
+ lst.clear()
+ self.container = Container(self.textBrowser, self.graphicsView)
+
+ compound_selected.clear()
+ self.scene = self.container.graphics.get_scene()
+ self.graphicsView.setScene(self.scene)
+ self.graphicsView.setMouseTracking(True)
+ self.graphicsView.keyPressEvent=self.delete_call
+
+ '''
+ Function for undo
+ '''
+ def undo(self):
+ redo_data = pop('Undo')
+ if redo_data is not None:
+ push('Redo', redo_data)
+ undo_data = get_last_list('Undo')
+ messages = self.textBrowser.toPlainText()
+ try:
+ self.undo_redo_helper()
+ self.container.graphics.load_canvas(undo_data, self.container)
+ self.textBrowser.setText(messages)
+ except Exception as e:
+ print(e)
+ self.textBrowser.append(messages)
+ else:
+ messages = self.textBrowser.toPlainText()
+ self.textBrowser.setText(messages)
+ self.textBrowser.append("[" + str(self.current_time()) + "] No more undo can be done!... ")
+
+ '''
+ Function for redo
+ '''
+ def redo(self):
+ redo_data = pop('Redo')
+ if redo_data is not None:
+ push('Undo', redo_data)
+ messages = self.textBrowser.toPlainText()
+ self.undo_redo_helper()
+ self.container.graphics.load_canvas(redo_data, self.container)
+ self.textBrowser.setText(messages)
+ else:
+ messages = self.textBrowser.toPlainText()
+ self.textBrowser.setText(messages)
+ self.textBrowser.append("[" + str(self.current_time()) + "] No more redo can be done!... ")
+
+ '''
+ Function for saving the current canvas items and compound_selected
+ '''
+ def save(self):
+ data = []
+ for i in self.container.unit_operations:
+ data.append(i)
+ i.saved = True
+ data.append(compound_selected)
+ data.append(self.container.result)
+
+ file_format = 'sim'
+ initial_path = QDir.currentPath() + ' untitled.' + file_format
+ file_name, _ = QFileDialog.getSaveFileName(self, "Save As",
+ initial_path, "%s Files (*.%s);; All Files (*)" %
+ (file_format.upper(), file_format))
+ try:
+ with open(file_name, 'wb') as f:
+ pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
+ fileName = file_name.split('/')[-1].split('.')[0]
+ self.setWindowTitle(fileName+' - Chemical Simulator GUI')
+ except Exception as e:
+ pass
+
+ '''
+ Function for loading previous saved canvas and simulation
+ '''
+ def open(self):
+ try:
+ file_format = 'sim'
+ initial_path = QDir.currentPath() + 'untitled.' + file_format
+
+ file_name, _ = QFileDialog.getOpenFileName(self, "Open As",
+ initial_path, "%s Files (*.%s);; All Files (*)" %
+ (file_format.upper(), file_format))
+ if file_name:
+ fileName = file_name.split('/')[-1].split('.')[0]
+ self.setWindowTitle(fileName+' - Chemical Simulator GUI')
+
+ self.undo_redo_helper()
+
+ with open(file_name, 'rb') as f:
+ obj = pickle.load(f)
+ temp_result = obj[-1]
+ obj.pop()
+ compound_selected = obj[-1]
+ obj.pop()
+ self.comp.set_compounds(compound_selected)
+ for i in compound_selected:
+ self.comp.compound_selection(self.comp, i)
+ self.comp.hide()
+ self.container.graphics.load_canvas(obj, self.container)
+ self.container.result = temp_result
+ DockWidget.show_result(dock_widget_lst)
+
+ for i in dock_widget_lst:
+ #Submitting values
+ i.param()
+
+ #Disbaling input data tab for output stream
+ for i in self.container.graphics.scene.items():
+ if (isinstance(i, NodeItem) and i.type == 'MaterialStream'):
+ i.update_tooltip_selectedVar()
+ no_input_lines = len(i.input[0].in_lines)
+ no_output_lines = len(i.output[0].out_lines)
+ if(no_input_lines>0): #Checks if material stream is input or output stream if it is output stream it continues
+ i.obj.disableInputDataTab(i.dock_widget)
+
+ except Exception as e:
+ print(e)
+
+ '''
+ Function for toggling the display of Component Selector
+ '''
+ def toggle_component_selector_view(self):
+ if(self.actionViewComponentSelector.isChecked()):
+ self.dockWidget.show()
+ else:
+ self.dockWidget.hide()
+
+ '''
+ Function for toggling the display of Message Browser
+ '''
+ def toggle_message_browser_view(self):
+ if(self.actionViewMessageBrowser.isChecked()):
+ self.dockWidget_2.show()
+ else:
+ self.dockWidget_2.hide()
+
+
+def main():
+
+ clean_file('Undo')
+ clean_file('Redo')
+
+ app = QApplication(sys.argv)
+ window = MainApp()
+ window.showMaximized()
+ app.exec()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/main/python/utils/Bin_Phase_env.py b/src/main/python/utils/Bin_Phase_env.py
new file mode 100644
index 0000000..ee973f9
--- /dev/null
+++ b/src/main/python/utils/Bin_Phase_env.py
@@ -0,0 +1,240 @@
+import sys
+import pandas as pd
+import numpy as np
+import os
+import csv
+from subprocess import Popen, PIPE
+
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+import PyQt5.QtGui as QtGui
+import PyQt5.QtCore as QtCore
+import PyQt5.QtWidgets as QtWidgets
+from PyQt5.uic import loadUiType
+
+
+import pyqtgraph as pg
+import pyqtgraph.exporters
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+ui_dialog,_ = loadUiType(parentPath+'/ui/utils/Binary_Phase_Env.ui')
+
+class BinPhaseEnv(QWidget,ui_dialog):
+ def __init__(self,comp):
+ QWidget.__init__(self)
+ self.setupUi(self)
+
+ self.comp = comp
+ self.compunds = self.comp.get_compounds()
+
+ for i in self.compunds:
+ self.comboBox.addItem(str(i))
+
+ for i in range(len(self.compunds)):
+ if i!=0:
+ self.comboBox_2.addItem(str(self.compunds[i]))
+
+ self.comboBox_2.addItem(str(self.compunds[0]))
+
+ self.lines = [line.rstrip('\n') for line in open(parentPath+'/python/utils/thermopackage.txt')]
+ for j in self.lines:
+ self.comboBox_3.addItem(str(j))
+
+ self.radioButton.setChecked(False)
+ self.radioButton_2.setChecked(False)
+
+ self.button_handler()
+ self.counter = 1
+
+ def button_handler(self):
+ self.radioButton.clicked.connect(lambda:self.T_xy())
+ self.radioButton_2.clicked.connect(lambda:self.P_xy())
+ self.pushButton_2.clicked.connect(lambda:self.plot())
+
+ def T_xy(self):
+ self.radioButton.setChecked(True)
+ self.radioButton_2.setChecked(False)
+ for i in reversed(range(self.formLayout.count())):
+ self.formLayout.itemAt(i).widget().setParent(None)
+
+ self.first = QLineEdit()
+ self.type = "P"
+ self.other = "T"
+ self.otherunit = "(K)"
+ self.formLayout.addRow(QLabel("P(Pa)"),self.first)
+ self.points = QLineEdit()
+ self.points.setText("40")
+ self.formLayout.addRow(QLabel("Number of data points"),self.points)
+
+ def P_xy(self):
+ self.radioButton_2.setChecked(True)
+ self.radioButton.setChecked(False)
+ for i in reversed(range(self.formLayout.count())):
+ self.formLayout.itemAt(i).widget().setParent(None)
+
+ self.first = QLineEdit()
+ self.type = "T"
+ self.other = "P"
+ self.otherunit = "(Pa)"
+ self.points = QLineEdit()
+ self.points.setText("40")
+ self.formLayout.addRow(QLabel("T(K)"),self.first)
+ self.formLayout.addRow(QLabel("Number of data points"),self.points)
+
+ def get_omc_path(self):
+ try:
+ self.omhome = os.environ.get('OPENMODELICAHOME')
+ if self.omhome is None:
+ self.omhome = os.path.split(os.path.split(os.path.realpath(spawn.find_executable("omc")))[0])[0]
+ elif os.path.exists('/opt/local/bin/omc'):
+ self.omhome = '/opt/local'
+ elif os.path.exists('/usr/bin/omc'):
+ self.omhome = '/usr'
+ return os.path.join(self.omhome, 'bin', 'omc')
+ except BaseException:
+ print("The OpenModelica compiler is missing in the System path please install it" )
+ raise
+
+ def plot(self):
+ try:
+ val = int(self.first.text(),10)
+ except:
+ val = 0
+ try:
+ data_points = int(self.points.text(),10)
+ except:
+ data_points = 0
+
+ self.curr_path = os.getcwd()
+ self.sim_dir_path = os.path.join(self.curr_path,'./../Simulator')
+ self.Graphmo_path = os.path.join(self.sim_dir_path,'Graph.mo')
+ self.plot_mos_path = os.path.join(self.sim_dir_path,'PlotGraph.mos')
+
+ self.data = []
+
+ self.comp1 = self.comboBox.currentText()
+ self.comp2 = self.comboBox_2.currentText()
+ self.comp_1 = self.comboBox.currentText().split('(')[0]
+ self.comp_2 = self.comboBox_2.currentText().split('(')[0]
+
+ self.thermoPack = self.comboBox_3.currentText()
+
+ self.data.append("model Graph\n")
+ self.data.append("import data = Simulator.Files.ChemsepDatabase;\n")
+ self.data.append("parameter data."+self.comp_1+" comp1;\n")
+ self.data.append("parameter data."+self.comp_2+" comp2;\n")
+ self.data.append("extends BinaryEnvelopes."+self.thermoPack+"(Nc = 2, data_points = "+str(data_points)+ ", comp = { comp1, comp2 }, "+self.type+" = fill( "+str(val)+", "+str(data_points)+"));\n")
+ self.data.append("end Graph;")
+
+ with open(self.Graphmo_path, 'w') as txtfile:
+ for d in self.data:
+ txtfile.write(str(d))
+
+ with open(self.plot_mos_path, 'w') as mosFile:
+ mosFile.write("loadModel(Modelica);\n")
+ mosFile.write("loadFile(\"Simulator/package.mo\");\n")
+ mosFile.write("loadFile(\"BinaryEnvelopes.mo\");\n")
+ mosFile.write("loadFile(\"Graph.mo\");\n")
+ mosFile.write("simulate(Graph, outputFormat=\"csv\", stopTime=1.0, numberOfIntervals=1);\n")
+
+ self.resdata = []
+ self.omc_path = self.get_omc_path()
+ simpath = self.plot_mos_path
+ os.chdir(self.sim_dir_path)
+
+ process = Popen([self.omc_path, '-s',simpath], stdout=PIPE, stderr=PIPE)
+ self.stdout, self.stderr = process.communicate()
+
+ os.chdir(self.curr_path)
+
+ csvpath = os.path.join(self.sim_dir_path,'Graph_res.csv')
+
+ self.datay = []
+ self.datax1 = []
+ self.datax2 = []
+ self.rows = []
+
+ with open (csvpath,'r') as resultFile:
+ self.resdata = []
+ csvreader = csv.reader(resultFile,delimiter=',')
+ for row in csvreader:
+ self.resdata.append(row)
+ self.rows.append(row)
+
+ if self.type=='T':
+ for k in range(len(self.rows[0])):
+ if self.rows[0][k][0]=='P':
+ self.datay.append(float(self.rows[1][k]))
+ length = len(self.rows[0][k])
+ if self.rows[0][k][0]=='x' and self.rows[0][k][length-2]=='1':
+ self.datax1.append(float(self.rows[1][k]))
+ if self.rows[0][k][0]=='y' and self.rows[0][k][length-2]=='1':
+ self.datax2.append(float(self.rows[1][k]))
+ else:
+ for k in range(len(self.rows[0])):
+ if self.rows[0][k][0]=='T':
+ self.datay.append(float(self.rows[1][k]))
+ length = len(self.rows[0][k])
+ if self.rows[0][k][0]=='x' and self.rows[0][k][length-2]=='1':
+ self.datax1.append(float(self.rows[1][k]))
+
+ if self.rows[0][k][0]=='y' and self.rows[0][k][length-2]=='1':
+ self.datax2.append(float(self.rows[1][k]))
+
+ plt = pg.PlotWidget()
+ plt.showGrid(x=True,y=True)
+ plt.addLegend()
+ plt.setXRange(0,1)
+
+ c1 = plt.plot(self.datax1, self.datay,pen=pg.mkPen('b',width = 1), name='dew points')
+ c2 = plt.plot(self.datax2, self.datay,pen=pg.mkPen('r',width = 1), name='bubble points')
+ view_box = plt.plotItem.vb
+ self.tool_tip = ""
+
+ def press_event(evt):
+ a = 10
+ pos = evt
+ mousepoint = view_box.mapSceneToView(pos)
+ roi = pg.ROI(pos)
+ find_color = plt.mapToGlobal(pos.toPoint())
+
+ screen = QGuiApplication.primaryScreen()
+ image = screen.grabWindow(QApplication.desktop().winId()).toImage()
+ colour = QtGui.QColor(image.pixel(find_color.x(),find_color.y()))
+
+ if colour.red()==255 or colour.blue()==255:
+ self.lineEdit_x.setText(str(round(mousepoint.x(),3)))
+ self.lineEdit_y.setText(str(round(mousepoint.y(),3)))
+ self.tool_tip = str(round(mousepoint.x(),3)) + ", " + str(round(mousepoint.y(),3))
+ QApplication.setOverrideCursor(QCursor(QtCore.Qt.CrossCursor))
+ else:
+ self.lineEdit_x.setText("")
+ self.lineEdit_y.setText("")
+ self.tool_tip = ""
+ QApplication.setOverrideCursor(QCursor(QtCore.Qt.ArrowCursor))
+
+ def entered(items):
+ for i in items:
+ if i.__class__.__name__ =="LegendItem":
+ self.lineEdit_x.setText("")
+ self.lineEdit_y.setText("")
+ QApplication.setOverrideCursor(QCursor(QtCore.Qt.ArrowCursor))
+ else:
+ i.setToolTip(self.tool_tip)
+
+ plt.scene().sigMouseMoved.connect(press_event)
+ plt.scene().sigMouseHover.connect(entered)
+
+ plt.setLabel('left',self.other+self.otherunit,units = '')
+ plt.setLabel('bottom',self.comp1+'(mol. frac.)',units = '')
+
+ self.new_tab = plt
+ self.new_tab.setObjectName("Plot "+str(self.counter))
+
+ self.tabWidget.addTab(self.new_tab,"Plot "+str(self.counter))
+ self.counter+=1
diff --git a/src/main/python/utils/ComponentSelector.py b/src/main/python/utils/ComponentSelector.py
new file mode 100644
index 0000000..a522834
--- /dev/null
+++ b/src/main/python/utils/ComponentSelector.py
@@ -0,0 +1,185 @@
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+from PyQt5.uic import loadUiType
+import os, sys
+import pandas as pd
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parent)
+
+from Simulator.Databases.Databases import ChemsepDatabase
+ui_dialog,_ = loadUiType(parentPath+'/ui/utils/ComponentSelector.ui')
+
+
+#df = pd.read_csv("compoundsDatabase.csv")
+
+compound_selected = [] #list storing components that are selected inintialised as empty
+
+class ComponentSelector(QDialog,ui_dialog):
+ def __init__(self,parent=None):
+ QDialog.__init__(self,parent)
+
+ self.setupUi(self)
+
+ self.dict1=dict()#empty dictionary which will store the obj and its compound
+ #self.DB1=#instance of Database class in Database.py module
+ self.instance=[ChemsepDatabase()] #list of all the instances
+ self.lines=[]
+
+ for i in self.instance:
+ x=i.get_comp_name_list()
+ self.dict1[i]=x
+ self.lines+=x
+ #print(self.lines)
+
+
+ #self.DB1_list=self.DB1.get_comp_name_list()
+ #storingchemsep
+ #database compound list in DB1_list
+ #self.dict1[self.DB1]=self.DB1_list #storing the list as a value and the db as key in dictionary
+ #self.lines=self.DB1_list #combined list of all the edited compounds
+
+ self.model = QStringListModel()
+ self.model.setStringList(self.lines)
+
+ self.completer = QCompleter()
+ self.completer.setCaseSensitivity(Qt.CaseInsensitive)
+ self.completer.setModel(self.model)
+
+ # QCompleter completes the text written in lineedit
+ self.lineEdit.setCompleter(self.completer)
+ self.compoundSelectButton.clicked.connect(self.compound_selection)
+ self.compoundSelectButton.setAutoDefault(False)
+ self.pushButton.clicked.connect(self.accept)
+ self.pushButton_2.clicked.connect(self.cancel)
+ self.pushButton_3.clicked.connect(self.remove_items)
+
+ def final_list(self,*list_name):
+ self.list_final=[]
+ #add multiple lists
+ for i in list_name:
+ self.list_final+=i
+ return (self.list_final)
+
+ def is_compound_selected(self):
+ if not compound_selected:
+ return False
+ else:
+ return True
+
+ #attrib:
+ #CAS fro CAS Number
+ #CompoundID for Name
+ #Smiles for Molecular Formula
+ #MolecularWeight for Molecular Weight
+
+#the below function will match the entered compound and get the database obj
+ #of the corresponding database
+
+ def get_object(self,component):
+ for ele in self.dict1:
+ values=self.dict1[ele]
+ for ind in values:
+ if ind ==component:
+ return(ele)
+
+
+#the below finction removes the before added extra string from the cmpounds
+ def get_original_name(self,component,removing_attrib):
+ self.temp_comp= component.replace(removing_attrib,'')
+ return(self.temp_comp)
+
+ def compound_selection(self, *args):
+ if len(args) == 2:
+ self.comp = args[1] #helpful when loading saved data
+ else:
+ self.comp = self.lineEdit.text() #gets entered text
+ if self.comp in self.lines: #matches with the db
+ if self.comp not in compound_selected:
+ compound_selected.append(self.comp) # appending compound in the list
+ self.obj=self.get_object(self.comp) #obj will store the key of the dictionary
+ #and thus store the instance of the class to which the component belongs
+ self.removing_attrib='(' + self.obj.name + ')' #getting the attribute that is to be removed
+ self.comp=self.get_original_name(self.comp,self.removing_attrib)
+ #getting only air, water etc from air chemsep etc
+
+
+ self.prop_list=self.obj.get_comp_prop(self.comp) #getting prop of the comp
+ #obj is the required class object
+ # self.creating_mo_file()
+ self.final_mo()
+
+ self.lineEdit.clear()
+ #print(compound_selected)
+
+ self.CAS=self.obj.get_value(self.comp,'CAS')
+ self.name=self.comp
+ self.molecular_formula=self.obj.get_value(self.comp, 'StructureFormula')
+ self.molecular_weight=self.obj.get_value(self.comp, 'MolecularWeight')
+
+ dict={'CAS':self.CAS,'Name':self.name,'Molecular Formula':self.molecular_formula,'Molecular Weight':self.molecular_weight}
+ #converted everything to a dictionary which will be passes to addtable
+ #function as a parameter.
+ #print(dict)
+ self.add_to_table(dict)
+ else:
+ self.show_error()
+
+ @staticmethod
+ def set_compounds(compounds):
+ # compound_selected = compounds
+ for i in compounds:
+ compound_selected.append(i)
+
+ def add_to_table(self,a):
+ try:
+ row_position = self.tableWidget.rowCount()
+ self.tableWidget.insertRow(row_position)
+ self.tableWidget.setItem(row_position , 0, QTableWidgetItem(str(a['CAS'])))
+ self.tableWidget.setItem(row_position , 1, QTableWidgetItem(str(a['Name'])))
+ self.tableWidget.setItem(row_position , 2, QTableWidgetItem(str(a['Molecular Formula'])))
+ self.tableWidget.setItem(row_position , 3, QTableWidgetItem(str(a['Molecular Weight'])))
+ except Exception as e:
+ exc_type, exc_obj, exc_tb = sys.exc_info()
+ fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
+ #print(exc_type, fname, exc_tb.tb_lineno)
+
+ def add_compounds_to_list(self,comp): # which list?
+ self.item = QListWidgetItem()
+ self.item.setText(comp)
+ self.listWidget.addItem(self.item)
+
+ def remove_items(self):
+ try:
+ item = self.tableWidget.item(self.tableWidget.currentRow(),1).text() + '(chemsep)'
+ self.tableWidget.removeRow(self.tableWidget.currentRow())
+ compound_selected.remove(item)
+ except Exception as e:
+ print(e)
+
+ def show_error(self):
+ QMessageBox.about(self, 'Important', "Selected Compound is not Available")
+
+ def cancel(self):
+ compound_selected.clear()
+ self.tableWidget.setRowCount(0)
+ self.reject()
+
+ def get_compounds(self):
+ return compound_selected
+
+ def final_mo(self):
+ self.f_mo=open(parentPath+'/Simulator/database.mo','w+')
+ self.f_mo.write('package database\n')
+ for line in self.prop_list:
+ self.f_mo.write(line)
+ self.f_mo.write('\n')
+ self.f_mo.write('\nend database;')
+ self.f_mo.close()
+
+ def accept(self):
+ #self.parent().container.update_compounds()
+ return super().accept()
+
\ No newline at end of file
diff --git a/src/main/python/utils/Container.py b/src/main/python/utils/Container.py
new file mode 100644
index 0000000..486169f
--- /dev/null
+++ b/src/main/python/utils/Container.py
@@ -0,0 +1,250 @@
+from collections import defaultdict
+import datetime
+import pickle
+import os,sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from python.OMChem.Flowsheet import Flowsheet
+from python.utils.ComponentSelector import *
+from python.utils.Graphics import NodeItem, Graphics, dock_widget_lst
+from python.DockWidgets.DockWidget import DockWidget
+
+class Container():
+ def __init__(self,msgbrowser, graphicsView):
+ self.unit_operations = []
+ self.thermo_package = None
+ self.compounds = None
+ self.flowsheet = None
+ self.conn = defaultdict(list)
+ self.op=defaultdict(list)
+ self.ip=defaultdict(list)
+ self.msg = msgbrowser
+ self.graphicsView = graphicsView
+ self.msg.setText("")
+ self.opl=[]
+ self.result=[]
+ self.graphics = Graphics(self.unit_operations, self.graphicsView)
+ self.scene = self.graphics.get_scene()
+
+ def current_time(self):
+ now = datetime.datetime.now()
+ time = str(now.hour) + ":" + str(now.minute) + ":" +str(now.second)
+ return time
+
+ def add_unit_operation(self, obj):
+ box = None
+ self.obj = obj
+ self.scene = self.graphics.get_scene()
+ box = self.graphics.create_node_item(self.obj, self)
+ if box is not None:
+ self.scene.addItem(box)
+ box.setPos(2500-30, 2500-30)
+
+ if(obj in self.unit_operations):
+ pass
+ else:
+ self.unit_operations.append(obj)
+ data = self.unit_operations[:]
+ data.append(compound_selected)
+ push('Undo', data)
+ self.msg.append("["+str(self.current_time())+"] "+obj.name+" is instantiated .""")
+
+ '''
+ Deletes the selected item from the canvas and also the objects created for that type.
+ '''
+ def delete(self,l):
+ for item in l:
+ self.scene.removeItem(item)
+ for i in dock_widget_lst:
+ if i.name == item.name:
+ i.hide()
+ del i
+ break
+
+ if hasattr(item,'input'):
+ for x in item.input:
+ if x.new_line:
+ self.scene.removeItem(x.new_line)
+ del x.new_line
+ if x.other_line:
+ self.scene.removeItem(x.other_line)
+ del x.other_line
+ if hasattr(item,'output'):
+ for x in item.output:
+ if x.new_line:
+ self.scene.removeItem(x.new_line)
+ del x.new_line
+ if x.other_line:
+ self.scene.removeItem(x.other_line)
+ del x.other_line
+ if hasattr(item,'obj'):
+ self.unit_operations.remove(item.obj)
+ for k in list(self.conn):
+ if item.obj==k:
+ del self.conn[k]
+ elif item.obj in self.conn[k]:
+ self.conn[k].remove(item.obj)
+ self.msg.append("["+str(self.current_time())+"] "+item.obj.name+" is deleted .""")
+ del item.obj
+ del item
+
+ clean_file('Redo')
+ data = self.unit_operations[:]
+ data.append(compound_selected)
+ push('Undo', data)
+
+ def fetch_object(self,name):
+ for i in self.unit_operations:
+ if(i.name==name):
+ return i
+
+ def add_compounds(self,comp):
+ self.compounds = comp
+
+ def update_compounds(self):
+ self.graphics.update_compounds()
+
+ def add_thermo_package(self,thermo):
+ self.thermo_package = thermo
+
+ def msg_browser(self):
+ std = self.flowsheet.stdout.decode("utf-8")
+ if(std):
+ stdout = str(std)
+ stdout = stdout.replace("\n","
")
+ self.msg.append(""+stdout+"")
+
+ stde = self.flowsheet.stderr.decode("utf-8")
+ if(stde):
+ stdout = str(stde)
+ stdout = stdout.replace("\n","
")
+ self.msg.append(""+stdout+"")
+
+ def simulate(self,mode):
+
+ self.disableInterfaceforSimulation(True)
+
+ for i in self.graphics.scene.items():
+ if (isinstance(i, NodeItem)):
+ try:
+ i.dock_widget.clear_results()
+ except AttributeError:
+ pass
+
+ #print("SIMULATE")
+ #print(mode)
+ self.compounds = compound_selected
+ self.flowsheet = Flowsheet()
+ self.flowsheet.add_compound_list([c[:c.index('(')] for c in self.compounds])
+ #print("######## connection master#########\n",self.conn)
+ for i in self.unit_operations :
+ self.flowsheet.add_unit_operations(i)
+
+
+ if mode=='SM':
+ self.msg.append("["+str(self.current_time())+"] Simulating in Sequential mode ... ")
+ self.flowsheet.simulate_SM(self.ip,self.op)
+ self.msg_browser()
+ self.result=self.flowsheet.result_data
+
+ elif mode=='EQN':
+ self.msg.append("["+str(self.current_time())+"] Simulating in equation mode ... ")
+ self.flowsheet.simulate_EQN(self.msg)
+ self.result=self.flowsheet.result_data
+
+ if(len(self.result)== 4):
+ #self.msg_browser()
+ self.msg.append("["+str(self.current_time())+"] Simulation Successful.")
+ else:
+ self.msg.append("["+str(self.current_time())+"] Simulation Failed.")
+ #print("under Eqn mode simulation")
+
+ if(len(self.result)== 4):
+ DockWidget.show_result(NodeItem.get_dock_widget())
+
+ for i in self.graphics.scene.items():
+ if (isinstance(i, NodeItem) and i.type == 'MaterialStream'):
+ i.update_tooltip_selectedVar()
+ no_input_lines = len(i.input[0].in_lines)
+ no_output_lines = len(i.output[0].out_lines)
+ if(no_input_lines>0): #Checks if material stream is input or output stream if it is output stream it continues
+ i.obj.disableInputDataTab(i.dock_widget)
+
+ self.disableInterfaceforSimulation(False)
+
+ def enableToolbar(self,status):
+ self.graphicsView.parent().parent().actionNew.setProperty('enabled',status)
+ self.graphicsView.parent().parent().actionZoomIn.setProperty('enabled',status)
+ self.graphicsView.parent().parent().actionZoomOut.setProperty('enabled',status)
+ self.graphicsView.parent().parent().actionResetZoom.setProperty('enabled',status)
+ self.graphicsView.parent().parent().actionEquationOriented.setProperty('enabled',status)
+ self.graphicsView.parent().parent().actionTerminate.setProperty('enabled',not status)
+ self.graphicsView.parent().parent().actionSelectCompounds.setProperty('enabled',status)
+
+ def disableInterfaceforSimulation(self,status):
+ self.graphicsView.parent().parent().menubar.setProperty('enabled',not status)
+ self.enableToolbar(not status)
+ self.graphicsView.parent().parent().dockWidget.setProperty('enabled',not status)
+ self.graphicsView.setInteractive(not status)
+ if status:
+ QApplication.instance().setOverrideCursor(QCursor(Qt.WaitCursor))
+ else:
+ QApplication.instance().restoreOverrideCursor()
+ QApplication.instance().setOverrideCursor(QCursor(Qt.ArrowCursor))
+
+def flat_list(lst):
+ flat_lst=[]
+ for sublist in lst:
+ for item in sublist:
+ flat_lst.append(item)
+ return flat_lst
+
+def push(file_name, data):
+ with open(f"{file_name}.dat", "ab") as obj:
+ pickle.dump(data, obj)
+
+def clean_file(file_name):
+ with open(f"{file_name}.dat", "wb") as clean:
+ pass
+
+def pop(file_name):
+ last_command = None
+ if os.stat(f"{file_name}.dat").st_size != 0:
+ commands = []
+ with open(f"{file_name}.dat", "rb") as objs:
+ while True:
+ try:
+ command = pickle.load(objs)
+ commands.append(command)
+ except EOFError:
+ break
+
+ last_command = commands[-1]
+ commands.remove(commands[-1])
+ if len(commands) != 0:
+ with open(f"{file_name}.dat", "wb") as updated_data:
+ for i in range(len(commands)):
+ pickle.dump(commands[i], updated_data)
+ else:
+ clean_file(file_name)
+
+ return last_command
+
+def get_last_list(file_name):
+ commands = []
+ if os.stat(f"{file_name}.dat").st_size != 0:
+ with open(f"{file_name}.dat", "rb") as objs:
+ while True:
+ try:
+ command = pickle.load(objs)
+ commands.append(command)
+ except EOFError:
+ break
+ if len(commands) is not 0:
+ return commands[-1]
+ else:
+ return None
diff --git a/src/main/python/utils/Graphics.py b/src/main/python/utils/Graphics.py
new file mode 100644
index 0000000..017ebef
--- /dev/null
+++ b/src/main/python/utils/Graphics.py
@@ -0,0 +1,709 @@
+from PyQt5.QtCore import *
+from PyQt5.QtWidgets import *
+from PyQt5.QtGui import *
+import PyQt5.QtGui as QtGui
+import PyQt5.QtCore as QtCore
+import PyQt5.QtWidgets as QtWidgets
+from PyQt5.QtWidgets import QLineEdit
+import os, sys
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from python.DockWidgets.DockWidget import *
+from python.DockWidgets.DockWidgetMaterialStream import *
+from python.DockWidgets.DockWidgetDistillationColumn import *
+from python.DockWidgets.DockWidgetShortcutColumn import *
+from python.DockWidgets.DockWidgetMixer import *
+from python.DockWidgets.DockWidgetSplitter import *
+from python.DockWidgets.DockWidgetFlash import *
+from python.DockWidgets.DockWidgetCompoundSeparator import *
+from python.DockWidgets.DockWidgetCompressorExpander import *
+from python.utils.Container import *
+from python.utils.Streams import *
+from python.utils.UnitOperations import *
+from python.utils.ComponentSelector import *
+
+class Graphics(QDialog, QtWidgets.QGraphicsItem):
+
+ def __init__(self, unit_operations, graphicsView):
+ QDialog.__init__(self)
+ QtWidgets.QGraphicsItem.__init__(self)
+ self.scene = QGraphicsScene()
+ self.scene.setItemIndexMethod(QGraphicsScene.BspTreeIndex)
+ self.graphicsView = graphicsView
+ self.pos = None
+ self.unit_operations = unit_operations
+ self.graphicsView.horizontalScrollBarVal = self.graphicsView.horizontalScrollBar().value()
+
+ def get_scene(self):
+ return self.scene
+
+ def create_node_item(self,unit_operation, container):
+ tempItem = NodeItem(unit_operation, container, self.graphicsView)
+ if tempItem.ok:
+ return tempItem
+ else:
+ return None
+
+ def update_compounds(self):
+ for i in self.graphicsView.items():
+ if isinstance(i, NodeItem):
+ i.update_compounds()
+
+ def load_canvas(self, obj, container):
+ stm = ['MaterialStream','EngStm']
+ for i in obj:
+ if i in self.unit_operations:
+ pass
+ else:
+ self.unit_operations.append(i)
+ type(i).counter += 1
+ #print(self.unit_operations)
+ new_box = self.create_node_item(i, container)
+ new_box.setPos(i.pos.toPoint().x(), i.pos.toPoint().y())
+ self.scene.addItem(new_box)
+
+ for i in obj:
+ if i.type == "MaterialStream":
+ pass
+ #print(eval(i.type))
+ elif i.type not in stm:
+ ip = i.input_stms
+ op = i.output_stms
+ for k, v in ip.items():
+ pointA = NodeItem.get_instances(v.name)
+ pointB = NodeItem.get_instances(i.name)
+ rect = pointA.output[0].boundingRect()
+ pointAA = QtCore.QPointF(rect.x() + rect.width()/(2), rect.y() + rect.height()/(2))
+ pointAA = pointA.output[0].mapToScene(pointAA)
+ socketB = next((s for s in pointB.input if k == s.id))
+ rectB = socketB.boundingRect()
+ pointBB = QtCore.QPointF(rectB.x() + rectB.width()/(2), rectB.y() + rectB.height()/(2))
+ pointBB = socketB.mapToScene(pointBB)
+ self.new_line = NodeLine(pointAA, pointBB, 'in')
+ self.new_line.source = pointA.output[0]
+ self.new_line.target = socketB
+ pointA.output[0].out_lines.append(self.new_line)
+ socketB.in_lines.append(self.new_line)
+ pointA.output[0].other_line = self.new_line
+ socketB.other_line = self.new_line
+ self.scene.addItem(self.new_line)
+ self.new_line.updatePath()
+ for k, v in op.items():
+ pointA = NodeItem.get_instances(i.name)
+ pointB = NodeItem.get_instances(v.name)
+ socketA = next(s for s in pointA.output if k == s.id)
+ rect = socketA.boundingRect()
+ pointAA = QtCore.QPointF(rect.x() + rect.width()/(2), rect.y() + rect.height()/(2))
+ pointAA = socketA.mapToScene(pointAA)
+ rectB = pointB.input[0].boundingRect()
+ pointBB = QtCore.QPointF(rectB.x() + rectB.width()/(2), rectB.y() + rectB.height()/(2))
+ pointBB = pointB.input[0].mapToScene(pointBB)
+ self.new_line = NodeLine(pointAA, pointBB, 'out')
+ self.new_line.source = socketA
+ self.new_line.target = pointB.input[0]
+ socketA.out_lines.append(self.new_line)
+ pointB.input[0].in_lines.append(self.new_line)
+ socketA.other_line = self.new_line
+ pointB.input[0].other_line = self.new_line
+ self.scene.addItem(self.new_line)
+ self.new_line.updatePath()
+
+
+class NodeLine(QtWidgets.QGraphicsPathItem):
+ def __init__(self, pointA, pointB , socket):
+ super(NodeLine, self).__init__()
+ self._pointA = pointA
+ self._pointB = pointB
+ self.socket = socket
+ self._source = None
+ self._target = None
+ self.setZValue(-1)
+ self.setBrush(QtGui.QColor(0,70,70,120))
+ self.pen = QtGui.QPen()
+ self.pen.setStyle(QtCore.Qt.SolidLine)
+ self.pen.setWidth(2)
+ self.pen.setColor(QtGui.QColor(0,70,70,220))
+ self.setPen(self.pen)
+
+ def updatePath(self):
+
+ if (self._pointB.x() - self._pointA.x()) < 30:
+ path = QtGui.QPainterPath()
+ midptx = (self.pointA.x() + 13)
+
+ ctrl1_1 = QtCore.QPointF(self.pointA.x(), self.pointA.y())
+ ctrl2_1 = QtCore.QPointF(self.pointA.x(), self.pointA.y())
+ pt1 = QtCore.QPointF(midptx , self.pointA.y())
+ path.moveTo(pt1)
+ path.cubicTo(ctrl1_1, ctrl2_1, pt1)
+
+ if abs(self.pointB.x()-midptx) > 150:
+ ctrl1_2 = QtCore.QPointF(midptx, self.pointA.y())
+ ctrl2_2 = QtCore.QPointF(midptx, self.pointA.y())
+ pt2 = QtCore.QPointF(midptx , self.pointA.y()+100)
+ path.cubicTo(ctrl1_2, ctrl2_2, pt2)
+ path.moveTo(pt2)
+
+ ctrl1_3 = QtCore.QPointF(midptx, self.pointA.y()+100)
+ ctrl2_3 = QtCore.QPointF(midptx, self.pointA.y()+100)
+ pt3 = QtCore.QPointF(self.pointB.x()-13, self.pointA.y()+100)
+ path.cubicTo(ctrl1_3, ctrl2_3, pt3)
+ path.moveTo(pt3)
+
+ ctrl1_4 = QtCore.QPointF(self.pointB.x()-13, self.pointA.y()+100)
+ ctrl2_4 = QtCore.QPointF(self.pointB.x()-13, self.pointA.y()+100)
+ pt4 = QtCore.QPointF(self.pointB.x()-13, self.pointB.y())
+ path.cubicTo(ctrl1_4, ctrl2_4, pt4)
+ path.moveTo(pt4)
+
+ ctrl1_5 = QtCore.QPointF(self.pointB.x()-13, self.pointB.y())
+ ctrl2_5 = QtCore.QPointF(self.pointB.x()-13, self.pointB.y())
+ pt5 = QtCore.QPointF(self.pointB.x(), self.pointB.y())
+ path.cubicTo(ctrl1_5, ctrl2_5, pt5)
+ path.moveTo(pt5)
+
+ self.setPath(path)
+ return
+ else:
+ ctrl1_2 = QtCore.QPointF(midptx, self.pointA.y())
+ ctrl2_2 = QtCore.QPointF(midptx, self.pointA.y())
+ pt2 = QtCore.QPointF(midptx , max(self.pointB.y(), self.pointA.y())-(abs(self.pointA.y()-self.pointB.y())/2))
+ path.cubicTo(ctrl1_2, ctrl2_2, pt2)
+ path.moveTo(pt2)
+
+ ctrl1_3 = QtCore.QPointF(midptx, max(self.pointB.y(), self.pointA.y())-(abs(self.pointA.y()-self.pointB.y())/2))
+ ctrl2_3 = QtCore.QPointF(midptx, max(self.pointB.y(), self.pointA.y())-(abs(self.pointA.y()-self.pointB.y())/2))
+ pt3 = QtCore.QPointF(self.pointB.x()-13, max(self.pointB.y(), self.pointA.y())-(abs(self.pointA.y()-self.pointB.y())/2))
+ path.cubicTo(ctrl1_3, ctrl2_3, pt3)
+ path.moveTo(pt3)
+
+ ctrl1_4 = QtCore.QPointF(self.pointB.x()-13, max(self.pointB.y(), self.pointA.y())-(abs(self.pointA.y()-self.pointB.y())/2))
+ ctrl2_4 = QtCore.QPointF(self.pointB.x()-13, max(self.pointB.y(), self.pointA.y())-(abs(self.pointA.y()-self.pointB.y())/2))
+ pt4 = QtCore.QPointF(self.pointB.x()-13, self.pointB.y())
+ path.cubicTo(ctrl1_4, ctrl2_4, pt4)
+ path.moveTo(pt4)
+
+ ctrl1_5 = QtCore.QPointF(self.pointB.x()-13, self.pointB.y())
+ ctrl2_5 = QtCore.QPointF(self.pointB.x()-13, self.pointB.y())
+ pt5 = QtCore.QPointF(self.pointB.x(), self.pointB.y())
+ path.cubicTo(ctrl1_5, ctrl2_5, pt5)
+ path.moveTo(pt5)
+
+ self.setPath(path)
+ return
+
+ path = QtGui.QPainterPath()
+ path.moveTo(self.pointA)
+ midptx = 0.5*(self.pointA.x() + self.pointB.x())
+
+ ctrl1_1 = QtCore.QPointF(self.pointA.x(), self.pointA.y())
+ ctrl2_1 = QtCore.QPointF(self.pointA.x(), self.pointA.y())
+ pt1 = QtCore.QPointF(midptx , self.pointA.y())
+ path.cubicTo(ctrl1_1, ctrl2_1, pt1)
+ path.moveTo(pt1)
+
+ ctrl1_2 = QtCore.QPointF(midptx, self.pointA.y())
+ ctrl2_2 = QtCore.QPointF(midptx, self.pointA.y())
+ pt2 = QtCore.QPointF(midptx , self.pointB.y())
+ path.cubicTo(ctrl1_2, ctrl2_2, pt2)
+ path.moveTo(pt2)
+
+ ctrl1_3 = QtCore.QPointF(midptx, self.pointB.y())
+ ctrl2_3 = QtCore.QPointF(midptx, self.pointB.y())
+ path.cubicTo(ctrl1_3, ctrl2_3, self.pointB)
+ path.moveTo(self.pointB)
+ self.setPath(path)
+
+ def paint(self, painter, option, widget):
+ painter.setPen(self.pen)
+ painter.drawPath(self.path())
+
+ @property
+ def pointA(self):
+ return self._pointA
+
+ @pointA.setter
+ def pointA(self, point):
+ self._pointA = point
+ self.updatePath()
+
+ @property
+ def pointB(self):
+ return self._pointB
+
+ @pointB.setter
+ def pointB(self, point):
+ self._pointB = point
+ self.updatePath()
+
+ @property
+ def source(self):
+ return self._source
+
+ @source.setter
+ def source(self, widget):
+ self._source = widget
+
+ @property
+ def target(self):
+ return self._target
+
+ @target.setter
+ def target(self, widget):
+ self._target = widget
+
+ def __delete__(self,instance):
+ del self._source
+ del self._target
+ del self._pointA
+ del self._pointB
+
+class NodeSocket(QtWidgets.QGraphicsItem):
+ def __init__(self, rect, parent, socketType, id):
+ super(NodeSocket, self).__init__(parent)
+ self.rect = rect
+ self.type = socketType
+ self.parent=parent
+ self.id = id
+ self.setAcceptHoverEvents(True)
+ self.new_line=None
+ self.other_line=None
+
+ # Brush
+ self.brush = QtGui.QBrush(Qt.transparent)
+ # Pen
+ self.pen = QtGui.QPen(Qt.NoPen)
+
+ # Lines
+ self.out_lines = []
+ self.in_lines = []
+
+ def shape(self):
+ path = QtGui.QPainterPath()
+ path.addEllipse(self.boundingRect())
+ return path
+
+ def boundingRect(self):
+ return QtCore.QRectF(self.rect)
+
+ def paint(self, painter, option, widget):
+
+ painter.setPen(self.pen)
+ painter.drawEllipse(self.rect.x(),self.rect.y(),self.rect.height(),self.rect.width())
+ painter.setBrush(self.brush)
+ painter.drawEllipse(self.rect.x()+2,self.rect.y()+2,(self.rect.height()/3)*2,(self.rect.width()/3)*2)
+
+ def mousePressEvent(self, event):
+ cursor = QCursor( Qt.ArrowCursor )
+ QApplication.instance().setOverrideCursor(cursor)
+
+ if self.type == 'op':
+ rect = self.boundingRect()
+ pointA = QtCore.QPointF(rect.x() + rect.width()/(2), rect.y() + rect.height()/(2))
+ pointA = self.mapToScene(pointA)
+ pointB = self.mapToScene(event.pos())
+ self.new_line = NodeLine(pointA, pointB ,'op')
+ self.out_lines.append(self.new_line)
+ self.scene().addItem(self.new_line)
+ elif self.type == 'in':
+ rect = self.boundingRect()
+ pointA = self.mapToScene(event.pos())
+ pointB = QtCore.QPointF(rect.x() + rect.width()/(2), rect.y() + rect.height()/(2))
+ pointB = self.mapToScene(pointB)
+ self.new_line = NodeLine(pointA, pointB, 'in')
+ self.in_lines.append(self.new_line)
+ self.scene().addItem(self.new_line)
+ else:
+ super(NodeSocket, self).mousePressEvent(event)
+
+ def mouseMoveEvent(self, event):
+
+ if self.type == 'op':
+ item = self.scene().itemAt(event.scenePos().toPoint(),QtGui.QTransform())
+ if(isinstance(item,NodeSocket)):
+ QApplication.instance().setOverrideCursor(QCursor( Qt.PointingHandCursor))
+ else:
+ QApplication.instance().restoreOverrideCursor()
+ QApplication.instance().setOverrideCursor(QCursor( Qt.ArrowCursor))
+ pointB = self.mapToScene(event.pos())
+ self.new_line.pointB = pointB
+ if self.other_line:
+ self.other_line.pointB=pointB
+ elif self.type == 'in':
+ pointA = self.mapToScene(event.pos())
+ self.new_line.pointA = pointA
+ if self.other_line:
+ self.other_line.pointA=pointA
+ else:
+ super(NodeSocket, self).mouseMoveEvent(event)
+
+ def mouseReleaseEvent(self, event):
+ cursor = QCursor( Qt.ArrowCursor )
+ QApplication.instance().setOverrideCursor(cursor)
+
+ item = self.scene().itemAt(event.scenePos().toPoint(),QtGui.QTransform())
+ stm = ['MaterialStream','EngStm']
+ item.other_line=self.new_line
+ if (self.type == 'op') and (item.type == 'in'):
+ self.new_line.source = self
+ self.new_line.target = item
+ item.in_lines.append(self.new_line)
+ self.new_line.pointB = item.get_center()
+ #print(type(self.new_line.source))
+ if self.new_line.source.parent.obj.type not in stm:
+ self.new_line.source.parent.obj.add_connection(0, self.new_line.source.id, self.new_line.target.parent.obj)
+ if self.new_line.target.parent.obj.type not in stm:
+ self.new_line.target.parent.obj.add_connection(1, self.new_line.target.id, self.new_line.source.parent.obj) # Input stream if flag is 1
+
+ sc = self.new_line.source.parent
+ tg = self.new_line.target.parent
+ if(sc.obj.type == 'MaterialStream'):
+ sc_no_input_lines = len(sc.input[0].in_lines)
+ if(sc_no_input_lines > 0):
+ sc.obj.disableInputDataTab(sc.dock_widget)
+ if(tg.obj.type == 'MaterialStream'):
+ tg_no_input_lines = len(tg.input[0].in_lines)
+ if(tg_no_input_lines > 0):
+ tg.obj.disableInputDataTab(tg.dock_widget)
+
+ elif (self.type =='in') and (item.type == 'op'):
+ self.new_line.source = item
+ self.new_line.target = self
+ item.out_lines.append(self.new_line)
+ self.new_line.pointA = item.get_center()
+ #print(type(self.new_line.source))
+ if self.new_line.source.parent.obj.type not in stm:
+ self.new_line.source.parent.obj.add_connection(0, self.new_line.source.id, self.new_line.target.parent.obj)
+ if self.new_line.target.parent.obj.type not in stm:
+ self.new_line.target.parent.obj.add_connection(1, self.new_line.target.id, self.new_line.source.parent.obj)
+
+ sc = self.new_line.source.parent
+ tg = self.new_line.target.parent
+ if(sc.obj.type == 'MaterialStream'):
+ sc_no_input_lines = len(sc.input[0].in_lines)
+ if(sc_no_input_lines > 0):
+ sc.obj.disableInputDataTab(sc.dock_widget)
+ if(tg.obj.type == 'MaterialStream'):
+ tg_no_input_lines = len(tg.input[0].in_lines)
+ if(tg_no_input_lines > 0):
+ tg.obj.disableInputDataTab(tg.dock_widget)
+
+ else:
+ self.scene().removeItem(self.new_line)
+ if(self.new_line in self.in_lines):
+ self.in_lines.remove(self.new_line)
+ if(self.new_line in self.out_lines):
+ self.out_lines.remove(self.new_line)
+ del self.new_line
+ super(NodeSocket, self).mouseReleaseEvent(event)
+
+ try:
+ data = get_last_list('Undo')
+ comp_selected = data[-1]
+ data.remove(comp_selected)
+ for i in range(len(data)):
+ if data[i].name == self.new_line.source.parent.obj.name:
+ data[i] = self.new_line.source.parent.obj
+ elif data[i].name == self.new_line.target.parent.obj.name:
+ data[i] = self.new_line.target.parent.obj
+ data.append(comp_selected)
+ push('Undo', data)
+ except Exception as e:
+ print(e)
+
+ def get_center(self):
+ rect = self.boundingRect()
+ center = QtCore.QPointF(rect.x() + rect.width()/(2), rect.y() + rect.height()/(2))
+ center = self.mapToScene(center)
+ return center
+
+ def hoverEnterEvent(self, event):
+ cursor = QCursor( Qt.PointingHandCursor)
+ QApplication.instance().setOverrideCursor(cursor)
+
+ def hoverLeaveEvent(self, event):
+ cursor = QCursor( Qt.ArrowCursor )
+ QApplication.instance().setOverrideCursor(cursor)
+
+ def show(self):
+ # set pen to show
+ self.pen = QPen(QtGui.QColor(0,70,70,220), 1, Qt.SolidLine)
+ self.brush = QBrush(QtGui.QColor(140,199,198,255))
+
+ def hide(self):
+ # set pen to transparent
+ self.pen = QPen(Qt.NoPen)
+ self.brush = QBrush(Qt.transparent)
+
+# all created node items will be put inside this list
+# it is used for recreating the node lines by returning the node item object based on unit operation object's name
+lst = []
+dock_widget_lst = []
+stack = []
+
+class NodeItem(QtWidgets.QGraphicsItem):
+
+ @staticmethod
+ def get_instances(namee):
+ for i in lst:
+ if i.name == namee:
+ return i
+
+ @staticmethod
+ def get_dock_widget():
+ return dock_widget_lst
+
+ def __init__(self,unit_operation, container, graphicsView):
+ l = ['Splitter','Mixer', 'DistillationColumn', 'Flash', 'CompoundSeparator', 'ShortcutColumn']
+ stm = ['MaterialStream', 'EnergyStream']
+ super(NodeItem, self).__init__()
+ self.obj = unit_operation
+ self.container = container
+ self.graphicsView = graphicsView
+ self.setAcceptHoverEvents(True)
+ self.name = self.obj.name
+ self.type = self.obj.type
+ self.ok = True
+ if (self.obj.modes_list):
+ default_tooltip = f"{self.name}\n\n"
+ default_tooltip_dict = self.obj.param_getter_tooltip(self.obj.mode)
+ for i, j in default_tooltip_dict.items():
+ if j is not None:
+ default_tooltip = default_tooltip + f" {i} : {j}\n"
+ self.setToolTip(default_tooltip)
+
+ dlg = QMessageBox()
+ dlg.setWindowTitle("Error")
+ dlg.setIcon(QMessageBox.Critical)
+ dlg.setText('Enter valid input value!')
+
+ if self.obj.type == 'Mixer' and not self.obj.saved:
+ text, self.ok = QInputDialog.getText(self.container.graphicsView, 'Mixer', 'Enter number of input(2-4):',
+ echo=QLineEdit.Normal, text=str(self.obj.no_of_inputs))
+ while self.ok and (int(text)< 2 or int(text) > 4):
+ dlg.exec_()
+ text, self.ok = QInputDialog.getText(self.container.graphicsView, 'Mixer', 'Enter number of input(2-4):',
+ echo=QLineEdit.Normal, text=str(self.obj.no_of_inputs))
+ if self.ok:
+ self.nin = int(text)
+ self.obj.no_of_inputs = self.nin
+ self.obj.variables['NI']['value'] = self.nin
+ # elif self.obj.type == 'Splitter' and not self.obj.saved:
+ # text, ok = QInputDialog.getText(self.container.graphicsView, 'Splitter', 'Enter number of output:')
+ # if ok and text:
+ # self.nop = int(text)
+ # self.obj.no_of_outputs = self.nop
+ # self.obj.variables['No']['value'] = self.nop
+ elif self.obj.type == 'DistillationColumn'and not self.obj.saved:
+ text, self.ok = QInputDialog.getText(self.container.graphicsView, 'DistillationColumn', 'Enter number of input(1-8):',
+ echo=QLineEdit.Normal, text=str(self.obj.no_of_inputs))
+ while self.ok and (int(text)< 1 or int(text) > 8):
+ dlg.exec_()
+ text, self.ok = QInputDialog.getText(self.container.graphicsView, 'DistillationColumn', 'Enter number of input(1-8):',
+ echo=QLineEdit.Normal, text=str(self.obj.no_of_inputs))
+ if self.ok:
+ self.nin = int(text)
+ self.obj.no_of_inputs = self.nin
+ self.obj.variables['Ni']['value'] = self.nin
+
+ self.nin = self.obj.no_of_inputs
+ self.nop = self.obj.no_of_outputs
+
+ self.dock_widget = None
+ lst.append(self)
+ if self.obj.type in l:
+ self.dock_widget = eval("DockWidget"+self.obj.type)(self.obj.name,self.obj.type,self.obj,self.container)
+ elif self.obj.type in stm:
+ self.dock_widget = eval("DockWidget"+self.obj.type)(self.obj.name,self.obj.type,self.obj,self.container)
+ elif self.obj.type == "AdiabaticCompressor" or self.obj.type == "AdiabaticExpander":
+ self.dock_widget = eval("DockWidgetCompressorExpander")(self.obj.name,self.obj.type,self.obj,self.container)
+ else:
+ self.dock_widget = DockWidget(self.obj.name,self.obj.type,self.obj,self.container)
+ dock_widget_lst.append(self.dock_widget)
+ self.main_window= findMainWindow(self)
+ self.dock_widget.setFixedWidth(360)
+ self.dock_widget.setFixedHeight(640)
+ self.dock_widget.DockWidgetFeature(QDockWidget.AllDockWidgetFeatures)
+ self.main_window.addDockWidget(Qt.LeftDockWidgetArea, self.dock_widget)
+
+ # updating input values
+ if self.dock_widget.obj.type != 'MaterialStream':
+ pass
+ #print(self.dock_widget.obj.type)
+ try:
+ self.dock_widget.obj.param_setter(self.dock_widget.obj.param_getter(self.dock_widget.obj.mode))
+ except Exception as e:
+ print(e)
+ # self.dock_widget.param()
+
+ self.dock_widget.hide()
+
+ self.pic=QtGui.QPixmap(parentPath+"/resources/base/Icons/"+self.type+".png")
+ self.rect = QtCore.QRect(0,0,self.pic.width(),self.pic.height())
+ self.text = QGraphicsTextItem(self)
+ f = QFont()
+ f.setPointSize(8)
+ self.text.setFont(f)
+ self.text.setDefaultTextColor(QtGui.QColor(0,70,70,220))
+ self.text.setParentItem(self)
+ self.text.setPos(self.rect.width()-(self.rect.width()*0.9), self.rect.height())
+ self.text.setPlainText(self.name)
+
+ self.setFlag(QtWidgets.QGraphicsPixmapItem.ItemIsMovable)
+ self.setFlag(QtWidgets.QGraphicsPixmapItem.ItemIsSelectable)
+ self.setFlag(QGraphicsItem.ItemSendsGeometryChanges)
+
+ # Brush
+ self.brush = QtGui.QBrush()
+ self.brush.setStyle(QtCore.Qt.SolidPattern)
+ self.brush.setColor(QtGui.QColor(80,0,90,255))
+ # Pen
+ self.pen = QtGui.QPen()
+ self.pen.setStyle(QtCore.Qt.SolidLine)
+ self.pen.setWidth(1)
+ self.pen.setColor(QtGui.QColor(20,20,20,255))
+
+ self.sel_pen = QtGui.QPen()
+ self.sel_pen.setStyle(QtCore.Qt.SolidLine)
+ self.sel_pen.setWidth(1)
+ self.sel_pen.setColor(QtGui.QColor(220,220,220,255))
+
+ # initializing the node sockets
+ self.input , self.output = self.initialize_sockets(self.type)
+
+ def shape(self):
+ path = QtGui.QPainterPath()
+ path.addRect(self.boundingRect())
+ return path
+
+ def boundingRect(self):
+ return QtCore.QRectF(self.rect)
+
+ def paint(self, painter, option, widget):
+ if self.isSelected():
+ painter.setPen(self.sel_pen)
+ painter.drawRect(QtCore.QRectF(self.rect))
+ else:
+ painter.setPen(self.pen)
+ painter.drawPixmap(self.rect,self.pic)
+
+ def initialize_sockets(self,type):
+ if(self.type=="Flash" or self.type=="CompoundSeparator"):
+ input = [NodeSocket(QtCore.QRect(1,(self.rect.height()*x/(self.nin+1)-6),4*3,4*3), self, 'in', x) for x in range(1,self.nin+1) ]
+ output = [NodeSocket(QtCore.QRect(self.rect.width()-13,(self.rect.height()*x*1/(self.nop+1))-4,4*3,4*3), self, 'op', x) for x in range(1,self.nop+1)]
+ return input,output
+ elif(self.type=="AdiabaticCompressor" or self.type=="AdiabaticExpander" or self.type =="Mixer" or self.type =="Splitter" or self.type =="Valve" ):
+ input = [NodeSocket(QtCore.QRect(-6.5, (self.rect.height()*x/(self.nin+1))-6,4*3,4*3), self, 'in', x) for x in range(1,self.nin+1) ]
+ output = [NodeSocket(QtCore.QRect(self.rect.width()-6.5,(self.rect.height()*x/(self.nop+1))-6,4*3,4*3), self, 'op', x) for x in range(1,self.nop+1)]
+ return input,output
+ elif(self.type=="Cooler" or self.type=="Heater"):
+ input = [NodeSocket(QtCore.QRect(-0.5, (self.rect.height()*x/(self.nin+1))-6,4*3,4*3), self, 'in', x) for x in range(1,self.nin+1) ]
+ output = [NodeSocket(QtCore.QRect(self.rect.width()-12.0,(self.rect.height()*x/(self.nop+1))-6,4*3,4*3), self, 'op', x) for x in range(1,self.nop+1)]
+ return input,output
+ elif(self.type=="CentrifugalPump"):
+ input = [NodeSocket(QtCore.QRect(-6.5,(self.rect.height()*x/(self.nin+1))-11, 4*3,4*3), self, 'in', x) for x in range(1,self.nin+1) ]
+ output = [NodeSocket(QtCore.QRect(self.rect.width()-6.5,-5.5,4*3,4*3), self, 'op', x) for x in range(1,self.nop+1)]
+ return input,output
+ elif(self.type=="DistillationColumn" or self.type=="ShortcutColumn"):
+ input = [NodeSocket(QtCore.QRect(-6.5,(self.rect.height()*x/(self.nin+1)-4),4*3,4*3), self, 'in', x) for x in range(1,self.nin+1) ]
+ output = [NodeSocket(QtCore.QRect(self.rect.width()-9.5,(self.rect.height()*1.44*x/(self.nop+1))-59,4*3,4*3), self, 'op', x) for x in range(1,self.nop+1)]
+ return input,output
+ elif(self.type=="MaterialStream"):
+ input = [NodeSocket(QtCore.QRect(-6.5,(self.rect.height()*x/(self.nin+1)-6),4*3,4*3), self, 'in', x) for x in range(1,self.nin+1) ]
+ output = [NodeSocket(QtCore.QRect(self.rect.width()-6.5,(self.rect.height()*x/(self.nin+1)-6),4*3,4*3), self, 'op', x) for x in range(1,self.nop+1)]
+ return input,output
+
+ def mouseMoveEvent(self, event):
+ super(NodeItem, self).mouseMoveEvent(event)
+ items = self.graphicsView.items()
+ for i in items:
+ if(type(i) == NodeItem):
+ for op in i.output:
+ for line in op.out_lines:
+ line.pointA = line.source.get_center()
+ line.pointB = line.target.get_center()
+ for ip in i.input:
+ for line in ip.in_lines:
+ line.pointA = line.source.get_center()
+ line.pointB = line.target.get_center()
+ self.pos = event.scenePos()
+ self.obj.set_pos(self.pos)
+
+ def mouseDoubleClickEvent(self, event):
+
+ self.graphicsView.horizontalScrollBarVal = self.graphicsView.horizontalScrollBar().value()
+ self.graphicsView.setInteractive(False)
+ if len(stack):
+ stack[-1].hide()
+ self.dock_widget.show()
+ stack.append(self.dock_widget)
+ self.graphicsView.setInteractive(True)
+
+ def update_tooltip(self):
+ default_tooltip = f"{self.name}\n\n"
+ default_tooltip_dict = self.obj.param_getter_tooltip(self.obj.mode)
+ for i, j in default_tooltip_dict.items():
+ if j is not None:
+ default_tooltip = default_tooltip + f" {i} : {j}\n"
+ self.setToolTip(default_tooltip)
+
+ def update_tooltip_selectedVar(self):
+ default_tooltip = f"{self.name}\n\n"
+ default_tooltip_dict = self.obj.param_getter_tooltip_selectedVar()
+ for i, j in default_tooltip_dict.items():
+ if j is not None:
+ default_tooltip = default_tooltip + f" {i} : {j}\n"
+ self.setToolTip(default_tooltip)
+
+ def update_compounds(self):
+ try:
+ self.obj.update_compounds()
+ self.dock_widget.update_compounds()
+ except AttributeError:
+ pass
+
+ def hoverEnterEvent(self, event):
+ super(NodeItem,self).hoverEnterEvent(event)
+ for i in self.graphicsView.items():
+ if(isinstance(i,NodeItem)):
+ for ip in i.input:
+ ip.show()
+ for op in i.output:
+ op.show()
+
+ def hoverLeaveEvent(self, event):
+ super(NodeItem,self).hoverLeaveEvent(event)
+ for i in self.graphicsView.items():
+ if(isinstance(i,NodeItem)):
+ for ip in i.input:
+ ip.hide()
+ for op in i.output:
+ op.hide()
+
+ def itemChange(self, change, value):
+ newPos = value
+ if change == self.ItemPositionChange and self.scene():
+ rect = self.container.graphicsView.sceneRect()
+ width = self.boundingRect().width()
+ height = self.boundingRect().height()
+ eWH1 = QPointF(newPos.x()+width,newPos.y()+height)
+ eWH2 = QPointF(newPos.x()-width,newPos.y()-height)
+ if not rect.__contains__(eWH1) or not rect.__contains__(eWH2) :
+ newPos.setX(min(rect.right()-width-40, max(newPos.x(), rect.left())))
+ newPos.setY(min(rect.bottom()-height-35, max(newPos.y(), rect.top())))
+ self.obj.set_pos(newPos)
+ return super(NodeItem,self).itemChange(change, newPos)
+
+def findMainWindow(self):
+ '''
+ Global function to find the (open) QMainWindow in application
+ '''
+ app = QApplication.instance()
+ for widget in app.topLevelWidgets():
+ if isinstance(widget, QMainWindow):
+ return widget
+ return None
diff --git a/src/main/python/utils/Streams.py b/src/main/python/utils/Streams.py
new file mode 100644
index 0000000..3f9f895
--- /dev/null
+++ b/src/main/python/utils/Streams.py
@@ -0,0 +1,451 @@
+import json
+import sys,os
+
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from PyQt5.QtCore import *
+from python.utils.ComponentSelector import compound_selected
+
+class MaterialStream():
+ counter = 1
+ def __init__(self, compound_names = []):
+
+ self.name = 'MaterialStream' + str(MaterialStream.counter)
+ self.type = 'MaterialStream'
+
+ self.compound_names = compound_names
+ self.count = MaterialStream.counter
+ self.thermo_package ="RaoultsLaw"
+ self.mode1 = "P"
+ self.mode2 = "T"
+
+ self.mode1_val = ""
+ self.mode2_val = ""
+ self.OM_data_init = ''
+ self.OM_data_eqn = ''
+ self.no_of_inputs = 1
+ self.no_of_outputs = 1
+ self.x = 2500-30
+ self.y = 2500-30
+ self.pos = QPointF(self.x, self.y)
+ MaterialStream.counter+=1
+ self.start_dict = {}
+ self.eqn_dict = {}
+ self.modes_list = ["PT", "PH", "PVF", "TVF", "PS"]
+ self.saved = False
+ self.mode = self.modes_list[0]
+
+ self.variables = {
+ 'P' : {'name':'Pressure', 'value':101325, 'unit':'Pa'},
+ 'T' : {'name':'Temperature', 'value':300, 'unit':'K'},
+
+ 'xvap' : {'name':'Vapour Mole Fraction', 'value':None, 'unit':''},
+ 'H_p[1]' : {'name':'Mixture Molar Entalpy', 'value':None, 'unit':'J/mol'},
+ 'S_p[1]' : {'name':'Mixture Molar Entropy', 'value':None, 'unit':'J/mol.K'},
+ 'F_p[1]' : {'name':'Mixture Molar Flow', 'value':100, 'unit':'mol/s'},
+ 'Fm_p[1]' : {'name':'Mixture Mass Flow', 'value':None, 'unit':'g/s'},
+
+ 'H_p[2]' : {'name':'Liquid Molar Entalpy', 'value':None, 'unit':'J/mol'},
+ 'S_p[2]' : {'name':'Liquid Molar Entropy', 'value':None, 'unit':'J/mol.K'},
+ 'F_p[2]' : {'name':'Liquid Molar Flow', 'value':None, 'unit':'mol/s'},
+
+ 'H_p[3]' : {'name':'Vapour Molar Entalpy', 'value':None, 'unit':'J/mol'},
+ 'S_p[3]' : {'name':'Vapour Molar Entropy', 'value':None, 'unit':'J/mol.K'},
+ 'F_p[3]' : {'name':'Vapour Molar Flow', 'value':None, 'unit':'mol/s'},
+
+ 'x_pc' : {'name':'Mole Fraction', 'value':[], 'unit':''},
+ 'xm_pc' : {'name':'Mass Fraction', 'value':None, 'unit':''},
+
+ 'F_pc' : {'name':'Mole Flow', 'value':100, 'unit':'mol/s'},
+ 'Fm_pc' : {'name':'Mass Flow', 'value':None, 'unit':'g/s'},
+ }
+ self.init_variables()
+
+ def update_compounds(self):
+ self.compound_names = compound_selected
+
+ def init_variables(self):
+ Nc = len(self.compound_names)
+ for i, val in enumerate(self.compound_names):
+ self.variables['x_pc[1,'+ str(i+1)+']'] = {'name':val + ' Mixture Mole Fraction', 'value':round(1/Nc,4), 'unit':''}
+ self.variables['xm_pc[1,'+ str(i+1)+']'] = {'name':val + ' Mixture Mass Fraction', 'value':None, 'unit':''}
+ self.variables['F_pc[1,'+ str(i+1)+']'] = {'name':val + ' Mixture Mole Flow', 'value':None, 'unit':'mol/s'}
+ self.variables['Fm_pc[1,'+ str(i+1)+']'] = {'name':val + ' Mixture Mass Flow', 'value':None, 'unit':'g/s'}
+
+ self.variables['x_pc[2,'+ str(i+1)+']'] = {'name':[val + ' Liquid Mole Fraction'], 'value':None, 'unit':''}
+ self.variables['xm_pc[2,'+ str(i+1)+']'] = {'name':[val + ' Liquid Mass Fraction'], 'value':None, 'unit':''}
+ self.variables['F_pc[2,'+ str(i+1)+']'] = {'name':[val + ' Liquid Mole Flow'], 'value':None, 'unit':'mol/s'}
+ self.variables['Fm_pc[2,'+ str(i+1)+']'] = {'name':[val + ' Liquid Mass Flow'], 'value':None, 'unit':'g/s'}
+
+ self.variables['x_pc[3,'+ str(i+1)+']'] = {'name':[val + ' Vapour Mole Fraction'], 'value':None, 'unit':''}
+ self.variables['xm_pc[3,'+ str(i+1)+']'] = {'name':[val + ' Vapour Mass Fraction'], 'value':None, 'unit':''}
+ self.variables['F_pc[3,'+ str(i+1)+']'] = {'name':[val + ' Vapour Mole Flow'], 'value':None, 'unit':'mol/s'}
+ self.variables['Fm_pc[3,'+ str(i+1)+']'] = {'name':[val + ' Vapour Mass Flow'], 'value':None, 'unit':'g/s'}
+
+ for i in self.compound_names:
+ self.variables[i] = {'value':''}
+
+ def param_getter_tooltip(self,mode):
+ dict = {}
+
+ temp = []
+ for i, val in enumerate(self.compound_names):
+ try:
+ temp.append(self.variables['x_pc[1,' + str(i+1) + ']']['value'])
+ except:
+ pass
+ self.variables['x_pc']['value'] = temp
+
+ if(mode=="PT"):
+ self.mode1 = 'P'
+ self.mode2 = 'T'
+ mode1_n = self.variables['P']['name']
+ mode2_n = self.variables['T']['name']
+ dict = {mode1_n:str(self.variables['P']['value'])+' '+self.variables['P']['unit'],
+ mode2_n:str(self.variables['T']['value'])+' '+self.variables['T']['unit']}
+ elif(mode=="PH"):
+ self.mode1 = 'P'
+ self.mode2 = 'H_p[1]'
+ mode1_n = self.variables['P']['name']
+ mode2_n = self.variables['H_p[1]']['name']
+
+ dict = {mode1_n:str(self.variables['P']['value'])+' '+self.variables['P']['unit'],
+ mode2_n:str(self.variables['H_p[1]']['value'])+' '+self.variables['H_p[1]']['unit']}
+ elif(mode=="PVF"):
+ self.mode1 = 'P'
+ self.mode2 = 'xvap'
+ mode1_n = self.variables['P']['name']
+ mode2_n = self.variables['xvap']['name']
+
+ dict = {mode1_n:str(self.variables['P']['value'])+' '+self.variables['P']['unit'],
+ mode2_n:str(self.variables['xvap']['value'])+' '+self.variables['xvap']['unit']}
+ elif(mode=="TVF"):
+ self.mode1 = 'T'
+ self.mode2 = 'xvap'
+ mode1_n = self.variables['T']['name']
+ mode2_n = self.variables['xvap']['name']
+ dict = {mode1_n:str(self.variables['T']['value'])+' '+self.variables['T']['unit'],
+ mode2_n:str(self.variables['xvap']['value'])+' '+self.variables['xvap']['unit']}
+
+ elif(mode=="PS"):
+ self.mode1 = 'P'
+ self.mode2 = 'S_p[1]'
+ mode1_n = self.variables['P']['name']
+ mode2_n = self.variables['S_p[1]']['name']
+
+ dict = {mode1_n:str(self.variables['P']['value'])+' '+self.variables['P']['unit'],
+ mode2_n:str(self.variables['S_p[1]']['value'])+' '+self.variables['S_p[1]']['unit']}
+
+ dict['Mole Flow'] = str(self.variables['F_p[1]']['value'])+' '+self.variables['F_p[1]']['unit']
+ dict[self.variables['x_pc']['name']] = str(self.variables['x_pc']['value'])+' '+self.variables['x_pc']['unit']
+ dict['Thermo Package'] = self.thermo_package
+ return dict
+
+ def param_getter_tooltip_selectedVar(self):
+ dict = {}
+
+ pressure_name = self.variables['P']['name']
+ pressure_val = self.variables['P']['value']
+ pressure_unit = self.variables['P']['unit']
+ temp_name = self.variables['T']['name']
+ temp_val = self.variables['T']['value']
+ temp_unit = self.variables['T']['unit']
+ mixMolEntal_name = self.variables['H_p[1]']['name']
+ mixMolEntal_val = round(float(self.variables['H_p[1]']['value']),2)
+ mixMolEntal_unit = self.variables['H_p[1]']['unit']
+ mixMolEntro_name = self.variables['S_p[1]']['name']
+ mixMolEntro_val = round(float(self.variables['S_p[1]']['value']),2)
+ mixMolEntro_unit = self.variables['S_p[1]']['unit']
+ vapMolFrac_name = self.variables['xvap']['name']
+ vapMolFrac_val = self.variables['xvap']['value']
+ vapMolFrac_unit = self.variables['xvap']['unit']
+ mixMolFlo_name = self.variables['F_p[1]']['name']
+ mixMolFlo_val = self.variables['F_p[1]']['value']
+ mixMolFlo_unit = self.variables['F_p[1]']['unit']
+ mixMassFlo_name = self.variables['Fm_p[1]']['name']
+ mixMassFlo_val = round(float(self.variables['Fm_p[1]']['value']),2)
+ mixMassFlo_unit = self.variables['Fm_p[1]']['unit']
+
+ dict = {pressure_name:str(pressure_val)+' '+pressure_unit,
+ temp_name:str(temp_val)+' '+temp_unit,
+ vapMolFrac_name:str(vapMolFrac_val)+' '+vapMolFrac_unit,
+ mixMolEntal_name:str(mixMolEntal_val)+' '+mixMolEntal_unit,
+ mixMolEntro_name:str(mixMolEntro_val)+' '+mixMolEntro_unit,
+ mixMolFlo_name:str(mixMolFlo_val)+' '+mixMolFlo_unit,
+ mixMassFlo_name:str(mixMassFlo_val)+' '+mixMassFlo_unit}
+ return dict
+
+ def param_getter(self,mode):
+ dict = {}
+
+ temp = []
+ for i, val in enumerate(self.compound_names):
+ try:
+ temp.append(self.variables['x_pc[1,' + str(i+1) + ']']['value'])
+ except:
+ pass
+ self.variables['x_pc']['value'] = temp
+
+ if(mode=="PT"):
+ self.mode1 = 'P'
+ self.mode2 = 'T'
+
+ dict = {self.mode1:self.variables['P']['value'], self.mode2:self.variables['T']['value'],
+ "MolFlow":self.variables['F_p[1]']['value'],"x_pc":self.variables['x_pc']['value'],
+ "Thermo Package": self.thermo_package}
+ #print('dictionary is :' + str(dict))
+
+ elif(mode=="PH"):
+ self.mode1 = 'P'
+ self.mode2 = 'H_p[1]'
+ dict = {self.mode1:self.variables['P']['value'], self.mode2:self.variables['H_p[1]']['value'],
+ "MolFlow":self.variables['F_p[1]']['value'], "x_pc":self.variables['x_pc']['value'],
+ "Thermo Package": self.thermo_package}
+ elif(mode=="PVF"):
+ self.mode1 = 'P'
+ self.mode2 = 'xvap'
+ dict = {self.mode1:self.variables['P']['value'], self.mode2:self.variables['xvap']['value'],
+ "MolFlow":self.variables['F_p[1]']['value'], "x_pc":self.variables['x_pc']['value'],
+ "Thermo Package": self.thermo_package}
+ elif(mode=="TVF"):
+ self.mode1 = 'T'
+ self.mode2 = 'xvap'
+ dict = {self.mode1:self.variables['T']['value'], self.mode2:self.variables['xvap']['value'],
+ "MolFlow":self.variables['F_p[1]']['value'], "x_pc":self.variables['x_pc']['value'],
+ "Thermo Package": self.thermo_package}
+ elif(mode=="PS"):
+ self.mode1 = 'P'
+ self.mode2 = 'S_p[1]'
+ dict = {self.mode1:self.variables['P']['value'], self.mode2: self.variables['S_p[1]']['value'],
+ "MolFlow":self.variables['F_p[1]']['value'], "x_pc":self.variables['x_pc']['value'],
+ "Thermo Package": self.thermo_package}
+
+ return dict
+
+ def param_setter(self,dict):
+ self.variables['x_pc']['value'] = dict['x_pc'].split(",")
+ #print('xpc is :' + str(self.variables['x_pc']['value']))
+ self.thermo_package = dict['Thermo Package']
+ self.variables['F_p[1]']['value'] = dict['MolFlow']
+ self.variables[self.mode1]['value'] = dict[self.mode1]
+ self.variables[self.mode2]['value'] = dict[self.mode2]
+
+ for i in range(len(self.compound_names)):
+ if self.variables['x_pc']['value'][i]:
+ self.variables['x_pc[1,'+str(i+1)+']']['value'] = self.variables['x_pc']['value'][i]
+ else:
+ self.variables['x_pc[1,'+str(i+1)+']']['value'] = None
+ self.variables['xm_pc[1,'+str(i+1)+']']['value'] = self.variables['xm_pc']['value']
+
+ self.variables['F_pc[1,'+str(i+1)+']']['value'] = None
+ self.variables['Fm_pc[1,'+str(i+1)+']']['value'] = None
+ for i in range(0,len(self.compound_names)):
+ self.variables['x_pc[2,'+str(i+1)+']']['value'] = None
+ self.variables['xm_pc[2,'+str(i+1)+']']['value'] = None
+ self.variables['F_pc[2,'+str(i+1)+']']['value'] = None
+ self.variables['Fm_pc[2,'+str(i+1)+']']['value'] = None
+
+ self.variables['x_pc[3,'+str(i+1)+']']['value'] = None
+ self.variables['xm_pc[3,'+str(i+1)+']']['value'] = None
+ self.variables['F_pc[3,'+str(i+1)+']']['value'] = None
+ self.variables['Fm_pc[3,'+str(i+1)+']']['value'] = None
+
+ def set_pos(self,pos):
+ self.pos = pos
+
+ def get_min_eqn_values(self):
+ x_pclist = []
+ for i in range(0,len(self.compound_names)):
+ x_pclist.append(self.variables['x_pc[1,'+str(i+1)+']']['value'])
+ x_pc = json.dumps(x_pclist)
+ x_pc = x_pc.replace('[','{')
+ x_pc = x_pc.replace(']','}')
+ x_pc = x_pc.replace('"','')
+
+ if self.variables[self.mode1]['value']:
+ self.eqn_dict[self.mode1] = self.variables[self.mode1]['value']
+ if self.variables[self.mode2]['value']:
+ self.eqn_dict[self.mode2] = self.variables[self.mode2]['value']
+ if self.variables['x_pc']['value']:
+ self.eqn_dict['x_pc[1,:]'] = x_pc
+ if self.variables['F_pc']['value']:
+ self.eqn_dict['F_p[1]'] = self.variables['F_p[1]']['value']
+
+ def get_start_values(self):
+ try:
+ if self.variables[self.mode1]['value']:
+ self.start_dict[self.mode1] = self.variables[self.mode1]['value']
+
+ if self.variables[self.mode2]['value']:
+ self.start_dict[self.mode2] = self.variables[self.mode2]['value']
+
+
+ if self.variables['x_pc[2,1]']['value'] != None:
+ x_pcarr = []
+ for i in range(1,4):
+ cmf = []
+ for j in range(1,len(self.compound_names)+1):
+ cmf.append(str(self.variables['x_pc['+str(i)+','+str(j)+']']['value']))
+ x_pcarr.append(cmf)
+ x_pcstr = json.dumps(x_pcarr)
+ x_pcstr = x_pcstr.replace('[','{')
+ x_pcstr = x_pcstr.replace(']','}')
+ x_pcstr = x_pcstr.replace('"','')
+ self.start_dict['x_pc'] = x_pcstr
+
+ if self.variables['xm_pc[2,1]']['value'] != None:
+ xm_pcarr = []
+ for i in range(1,4):
+ cmf = []
+ for j in range(1,len(self.compound_names)+1):
+ cmf.append(str(self.variables['xm_pc['+str(i)+','+str(j)+']']['value']))
+ xm_pcarr.append(cmf)
+ xm_pcstr = json.dumps(x_pcarr)
+ xm_pcstr = xm_pcstr.replace('[','{')
+ xm_pcstr = xm_pcstr.replace(']','}')
+ xm_pcstr = xm_pcstr.replace('"','')
+ self.start_dict['xm_pc'] = xm_pcstr
+
+ if self.variables['Fm_pc[2,1]']['value'] != None:
+ Fm_pcarr = []
+ for i in range(1,4):
+ cmf = []
+ for j in range(1,len(self.compound_names)+1):
+ cmf.append(str(self.variables['Fm_pc['+str(i)+','+str(j)+']']['value']))
+ Fm_pcarr.append(cmf)
+ Fm_pcstr = json.dumps(x_pcarr)
+ Fm_pcstr = Fm_pcstr.replace('[','{')
+ Fm_pcstr = Fm_pcstr.replace(']','}')
+ Fm_pcstr = Fm_pcstr.replace('"','')
+ self.start_dict['Fm_pc'] = Fm_pcstr
+
+ if self.variables['F_pc[2,1]']['value'] != None:
+ F_pcarr = []
+ for i in range(1,4):
+ cmf = []
+ for j in range(1,len(self.compound_names)+1):
+ cmf.append(str(self.variables['F_pc['+str(i)+','+str(j)+']']['value']))
+ F_pcarr.append(cmf)
+ F_pcstr = json.dumps(F_pcarr)
+ F_pcstr = F_pcstr.replace('[','{')
+ F_pcstr = F_pcstr.replace(']','}')
+ F_pcstr = F_pcstr.replace('"','')
+ self.start_dict['F_pc'] = F_pcstr
+
+ if self.variables['MW_p[2]']['value'] != None:
+ MW_pArr = []
+ for i in range(1,4):
+ MW_pArr.append(self.variables['MW_p['+str(i)+']']['value'])
+ MW_pStr = json.dumps(MW_pArr)
+ MW_pStr = MW_pStr.replace('[','{')
+ MW_pStr = MW_pStr.replace(']','}')
+ MW_pStr = MW_pStr.replace('"','')
+ self.start_dict['MW_p'] = MW_pStr
+
+ if self.variables['F_p[2]']['value'] != None:
+ F_pArr = []
+ for i in range(1,4):
+ F_pArr.append(self.variables['F_p['+str(i)+']']['value'])
+ F_pStr = json.dumps(F_pArr)
+ F_pStr = F_pStr.replace('[','{')
+ F_pStr = F_pStr.replace(']','}')
+ F_pStr = F_pStr.replace('"','')
+ self.start_dict['F_p'] = F_pStr
+
+ if self.variables['Cp_p[2]']['value'] != None:
+ Cp_pArr = []
+ for i in range(1,4):
+ Cp_pArr.append(self.variables['Cp_p['+str(i)+']']['value'])
+ Cp_pStr = json.dumps(Cp_pArr)
+ Cp_pStr = Cp_pStr.replace('[','{')
+ Cp_pStr = Cp_pStr.replace(']','}')
+ Cp_pStr = Cp_pStr.replace('"','')
+ self.start_dict['Cp_p'] = Cp_pStr
+
+ if self.variables['H_p[2]']['value'] != None:
+ H_pArr = []
+ for i in range(1,4):
+ H_pArr.append(self.variables['H_p['+str(i)+']']['value'])
+ H_pStr = json.dumps(H_pArr)
+ H_pStr = H_pStr.replace('[','{')
+ H_pStr = H_pStr.replace(']','}')
+ H_pStr = H_pStr.replace('"','')
+ self.start_dict['H_p'] = H_pStr
+
+
+ if self.variables['S_p[2]']['value'] != None:
+ S_pArr = []
+ for i in range(1,4):
+ S_pArr.append(self.variables['S_p['+str(i)+']']['value'])
+ S_pStr = json.dumps(S_pArr)
+ S_pStr = S_pStr.replace('[','{')
+ S_pStr = S_pStr.replace(']','}')
+ S_pStr = S_pStr.replace('"','')
+ self.start_dict['S_p'] = S_pStr
+
+ if self.variables['Fm_p[2]']['value'] != None:
+ Fm_pArr = []
+ for i in range(1,4):
+ Fm_pArr.append(self.variables['Fm_p['+str(i)+']']['value'])
+ Fm_pStr = json.dumps(Fm_pArr)
+ Fm_pStr = Fm_pStr.replace('[','{')
+ Fm_pStr = Fm_pStr.replace(']','}')
+ Fm_pStr = Fm_pStr.replace('"','')
+ self.start_dict['Fm_p'] = Fm_pStr
+
+ except Exception as e:
+ exc_type, exc_obj, exc_tb = sys.exc_info()
+ print(exc_type,exc_tb.tb_lineno)
+ print(e)
+ print('error')
+
+ def OM_Flowsheet_Initialize(self,addedcomp):
+ self.OM_data_init = ''
+ self.OM_data_init = self.OM_data_init + ("model ms"+str(self.count)+"\n")
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Streams.MaterialStream;\n" )
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.Files.ThermodynamicPackages."+self.thermo_package+";\n")
+ self.OM_data_init = self.OM_data_init + ("end ms"+str(self.count)+";\n")
+ comp_count = len(addedcomp)
+
+ self.OM_data_init = self.OM_data_init + "ms"+str(self.count) +" " + self.name +"(Nc = " + str(comp_count)
+ self.OM_data_init = self.OM_data_init + ",C = {"
+ C = str(addedcomp).strip('[').strip(']')
+ C = C.replace("'","")
+ self.OM_data_init = self.OM_data_init + C + "},"
+
+ self.OM_data_init = self.OM_data_init[:-1]
+ self.OM_data_init = self.OM_data_init + ');\n'
+ return self.OM_data_init
+
+ def OM_Flowsheet_Equation(self,addedcomp,method):
+ self.OM_data_eqn = ''
+ self.comp_count = len(addedcomp)
+ if method == 'Eqn':
+ self.eqn_dict = {}
+ self.get_min_eqn_values()
+ if method == 'SM':
+ self.eqn_dict = {}
+ self.get_min_eqn_values()
+
+ for key,value in self.eqn_dict.items():
+ self.OM_data_eqn = self.OM_data_eqn + self.name + '.'+ key + ' = ' + str(value) + ';\n'
+ return self.OM_data_eqn
+
+ def disableInputDataTab(self,dockwidget):
+ #setting the value of input data tab in dock widget and disabling them
+ dockwidget.comboBox.setDisabled(True)
+ dockwidget.input_dict['P'].setText(str(round(float(self.variables['P']['value']),2)))
+ dockwidget.input_dict['P'].setDisabled(True)
+ dockwidget.input_dict['T'].setText(str(round(float(self.variables['T']['value']),2)))
+ dockwidget.input_dict['T'].setDisabled(True)
+ dockwidget.input_dict['MolFlow'].setText(str(round(float(self.variables['F_p[1]']['value']),2)))
+ dockwidget.input_dict['MolFlow'].setDisabled(True)
+ dockwidget.cbTP.setCurrentText(str(self.thermo_package))
+ dockwidget.cbTP.setDisabled(True)
+ dockwidget.pushButton_2.setDisabled(True)
+ for index,k in enumerate(dockwidget.x_pclist):
+ k.setText(str(round(float(self.variables['x_pc[1,'+ str(index+1)+']']['value']),2)))
+ k.setDisabled(True)
diff --git a/src/main/python/utils/UnitOperations.py b/src/main/python/utils/UnitOperations.py
new file mode 100644
index 0000000..d3cf3d7
--- /dev/null
+++ b/src/main/python/utils/UnitOperations.py
@@ -0,0 +1,751 @@
+import os,sys
+import json
+current = os.path.dirname(os.path.realpath(__file__))
+parent = os.path.dirname(current)
+parentPath = os.path.dirname(parent)
+sys.path.append(parentPath)
+
+from python.OMChem.Flowsheet import Flowsheet
+from python.OMChem.EngStm import EngStm
+from python.utils.ComponentSelector import *
+from python.utils.Container import *
+from PyQt5.QtCore import *
+
+class UnitOperation():
+ counter = 1
+ def __init__(self):
+ self.OM_data_eqn = ''
+ self.OM_data_init = ''
+ self.input_stms = {}
+ self.output_stms = {}
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+ self.name = ''
+ self.mode = None
+ self.mode_val = None
+ self.type = ''
+ self.no_of_inputs = 1
+ self.no_of_outputs = 1
+ self.x = 2500-30
+ self.y = 2500-30
+ self.pos = QPointF(self.x, self.y)
+ self.count = UnitOperation.counter
+ self.variables = {}
+ self.modes_list = []
+ self.parameters = []
+ self.extra = []
+ self.for_naming = []
+ self.multidict = []
+ self.thermo_pack_req = False
+ self.thermo_package = 'RaoultsLaw'
+ self.saved = False
+
+ def param_getter(self,mode=None):
+ params = {}
+ if mode == None and self.modes_list:
+ self.mode = self.modes_list[0]
+ else:
+ self.mode = mode
+ params[self.mode] = None
+ for i in self.parameters:
+ params[i] = self.variables[i]['value']
+
+ return params
+
+ def param_getter_tooltip(self,mode=None):
+ params = {}
+ if mode == None and self.modes_list:
+ self.mode = self.modes_list[0]
+ else:
+ self.mode = mode
+ params[self.mode] = None
+ for i in self.parameters:
+ params[self.variables[i]['name']] = str(self.variables[i]['value'])+' '+self.variables[i]['unit']
+
+ return params
+
+ def param_setter(self,params):
+ print("param_setter ", params)
+ try:
+ self.mode = list(params.keys())[0]
+ except Exception as e:
+ print(e)
+ for k,v in params.items():
+ if k == 'Thermo Package':
+ self.thermo_package = v
+ elif k != self.mode:
+ self.k = v
+ self.variables[k]['value'] = v
+ else:
+ self.variables[k]['value'] = v
+ self.mode_val = params[self.mode]
+
+
+ def add_connection(self,flag,sourceId, UnitOpr):
+ if flag==1: # Input stream if flag is 1
+ self.input_stms[sourceId] = UnitOpr
+ else :
+ self.output_stms[sourceId] = UnitOpr
+
+ def set_pos(self,pos):
+ self.pos = pos
+
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+
+ def OM_Flowsheet_Initialize(self):
+ self.OM_data_init = ''
+
+ if(self.thermo_pack_req):
+ if len(self.extra)>1:
+ for i in range(len(self.extra)):
+ latest = ''
+ for j in range(self.extra[i]):
+ if self.extra[i][j]!='.':
+ latest += self.extra[i][j]
+ self.for_naming[i] = latest
+
+ if(self.thermo_pack_req):
+ if len(self.extra)==1:
+ for i in self.extra:
+ self.OM_data_init += ('model '+i+str(self.counter)+'\n')
+ self.OM_data_init += ('extends Simulator.UnitOperations.'+i+';\n')
+ self.OM_data_init += ('extends Simulator.Files.ThermodynamicPackages.'+self.thermo_package+';\n')
+ self.OM_data_init += ('end '+i+str(self.counter)+';\n')
+
+ self.OM_data_init += i+str(self.counter) + ' ' + self.name + '(Nc = ' + str(len(self.compounds))
+ else:
+ for i in range(len(self.extra)):
+ if i!=(len(self.extra)-1):
+ self.OM_data_init += ('model '+self.for_naming[i]+str(self.counter)+'\n')
+ self.OM_data_init += ('extends Simulator.UnitOperations.'+self.extra[i]+';\n')
+ self.OM_data_init += ('extends Simulator.Files.ThermodynamicPackages.'+self.thermo_package+';\n')
+ self.OM_data_init += ('end '+self.for_naming[i]+str(self.counter)+';\n')
+ else:
+ self.OM_data_init += ('model '+self.for_naming[i]+str(self.counter)+'\n')
+ self.OM_data_init += ('extends Simulator.UnitOperations.'+self.extra[i]+';\n')
+ for j in range(len(self.extra)-1):
+ self.OM_data_init += (self.for_naming[j] + str(self.counter) +' ' + self.for_naming[j] + '#' + self.multidict[j] + ';\n')
+
+ self.OM_data_init += ('end '+self.for_naming[i]+str(self.counter)+';\n')
+
+ self.OM_data_init += self.for_naming[i] + str(self.counter) + ' ' + self.for_naming + '(Nc = ' + str(len(self.compounds))
+
+ C = str(self.compounds).strip('[').strip(']')
+ C = C.replace("'", "")
+ self.OM_data_init += ',C = {' + C + '}'
+
+ for k in self.parameters:
+ if(k == 'HKey_x_pc' or k == 'LKey_x_pc'):
+ continue
+ self.OM_data_init += ', '
+ self.OM_data_init += k + ' = ' + (json.dumps(self.variables[k]['value']) if json.dumps(self.variables[k]['value']).replace('"', '').replace('_', '').isalpha()
+ else json.dumps(self.variables[k]['value']).replace('[', '{').replace(']', '}').replace('"', ''))
+
+ self.OM_data_init += ');\n'
+
+ else:
+ self.OM_data_init += 'Simulator.UnitOperations.' + self.type + ' ' + self.name + '(Nc = ' + str(len(self.compounds))
+ C = str(self.compounds).strip('[').strip(']')
+ C = C.replace("'", "")
+ self.OM_data_init += ',C = {' + C + '}'
+
+ for k in self.parameters:
+ self.OM_data_init += ', '
+ self.OM_data_init += k + ' = ' + (json.dumps(self.variables[k]['value']) if json.dumps(self.variables[k]['value']).replace('"', '').replace('_', '').isalpha()
+ else json.dumps(self.variables[k]['value']).replace('[', '{').replace(']', '}').replace('"', ''))
+
+ self.OM_data_init += ');\n'
+ return self.OM_data_init
+
+
+ def OM_Flowsheet_Equation(self):
+ self.OM_data_eqn = ''
+
+ if len(self.input_stms)>1 or self.type == 'Mixer':
+ strcount = 1
+ for strm in self.input_stms.values():
+ self.OM_data_eqn += ('connect(' + strm.name + '.Out,' + self.name + '.In[' + str(strcount) + ']);\n')
+ strcount += 1
+ else:
+ print(self.input_stms)
+ self.OM_data_eqn += ('connect(' + self.name + '.In,' + self.input_stms[1].name + '.Out);\n')
+
+ if len(self.output_stms)>1:
+ strcount = 1
+ for strm in self.output_stms.values():
+ self.OM_data_eqn += ('connect(' + strm.name + '.In,' + self.name + '.Out[' + str(strcount) + ']);\n')
+ strcount += 1
+ else:
+ print("self.output_stms ", self.output_stms)
+ self.OM_data_eqn += ('connect(' + self.name + '.Out,' + self.output_stms[1].name + '.In);\n')
+
+ if self.mode:
+ self.OM_data_eqn += (self.name + '.' + self.mode + '=' + str(self.mode_val) + ';\n')
+
+ return self.OM_data_eqn
+
+class ShortcutColumn(UnitOperation):
+ def __init__(self, CompNames = [], name='ShortcutColumn'):
+ UnitOperation.__init__(self)
+ self.name = name + str(ShortcutColumn.counter)
+ self.type = 'ShortcutColumn'
+ self.no_of_inputs = 1
+ self.no_of_outputs = 2
+ self.EngStm1 = EngStm(name='EngStm1'+self.name)
+ self.EngStm2 = EngStm(name='EngStm2'+self.name)
+ self.count = ShortcutColumn.counter
+
+ self.extra = ['ShortcutColumn']
+ self.for_naming = ['ShortcutColumn']
+ self.thermo_pack_req = True
+
+ self.parameters = ['HKey', 'LKey', 'HKey_x_pc', 'LKey_x_pc', 'Ctype', 'Pcond', 'Preb', 'RR']
+ self.result_parameters = ['RRmin', 'Ntmin', 'Nt', 'Intray', 'Fliqstrip', 'Fliqrec', 'Fvapstrip', 'Fvaprec', 'Qc', 'Qr']
+ type(self).counter += 1
+
+ self.variables = {
+ 'HKey' : {'name':'Heavy Key', 'value': 0, 'unit':''},
+ 'LKey' : {'name':'Light Key', 'value': 0, 'unit':''},
+ 'HKey_x_pc' : {'name':'Heavy Key Mole Fraction', 'value':0.01, 'unit':''},
+ 'LKey_x_pc' : {'name':'Light Key Mole Fraction', 'value':0.01, 'unit':''},
+ 'Ctype' : {'name':'Condenser Type', 'value':'Total', 'unit':''},
+ 'thermo_package' : {'name':'Thermo Package', 'value':'Raoults_Law', 'unit':''},
+ 'Pcond' : {'name':'Condenser Pressure', 'value':101325, 'unit':'Pa'},
+ 'Preb' : {'name':'Reboiler Pressure', 'value':101325, 'unit':'Pa'},
+ 'RR' : {'name':'Reflux Ratio', 'value':1.5, 'unit':''},
+
+ 'RRmin' : {'name':'Minimum Reflux Ratio', 'value': None , 'unit':''},
+ 'Ntmin' : {'name':'Minimum Number of Stages', 'value': None, 'unit':''},
+ 'Nt' : {'name':'Actual Number of Stages', 'value': None, 'unit':''},
+ 'Intray' : {'name':'Optimal Feed Stage', 'value': None, 'unit':''},
+ 'Fliqstrip' : {'name':'Stripping Liquid', 'value': None, 'unit':'mol/s'},
+ 'Fliqrec' : {'name':'Rectification Liquid', 'value': None, 'unit':'mol/s'},
+ 'Fvapstrip' : {'name':'Stripping Vapor', 'value': None, 'unit':'mol/s'},
+ 'Fvaprec' : {'name':'Recification Vapour', 'value': None, 'unit':'mol/s'},
+ 'Qc' : {'name':'Conderser Duty', 'value': None, 'unit':'W'},
+ 'Qr' : {'name':'Reboiler Duty', 'value': None, 'unit':'W'},
+
+ }
+
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+
+ def param_setter(self,params):
+ print("param_setter ", params)
+ self.variables['HKey']['value'] = self.compounds.index(params[0]) + 1
+ self.variables['LKey']['value'] = self.compounds.index(params[1]) + 1
+ self.variables['HKey_x_pc']['value'] = params[2]
+ self.variables['LKey_x_pc']['value'] = params[3]
+ self.variables['Ctype']['value'] = params[4]
+ self.variables['Pcond']['value'] = params[5]
+ self.variables['Preb']['value'] = params[6]
+ self.variables['RR']['value'] = params[7]
+ self.variables['thermo_package']['value'] = params[8]
+
+ def OM_Flowsheet_Equation(self):
+ self.OM_data_eqn = ''
+
+ self.OM_data_eqn += ('connect(' + self.name + '.In,' + self.input_stms[1].name + '.Out);\n')
+
+ strcount = 1
+ for strm in self.output_stms.values():
+ self.OM_data_eqn += ('connect(' + strm.name + '.In,' + self.name + '.Out' + str(strcount) + ');\n')
+ strcount += 1
+
+ self.OM_data_eqn += (self.name + '.x_pc[2, ' + self.name + '.HKey] = ' + str(self.variables['HKey_x_pc']['value']) + ';\n')
+ self.OM_data_eqn += (self.name + '.x_pc[3, ' + self.name + '.LKey] = ' + str(self.variables['LKey_x_pc']['value']) + ';\n')
+
+ return self.OM_data_eqn
+
+class DistillationColumn(UnitOperation):
+ def __init__(self,name='DistillationColumn'):
+ UnitOperation.__init__(self)
+ self.name = name + str(DistillationColumn.counter)
+ self.type = 'DistillationColumn'
+ self.no_of_inputs = 1
+ self.no_of_outputs = 2
+ self.EngStm1 = EngStm(name='EngStm1'+self.name)
+ self.EngStm2 = EngStm(name='EngStm2'+self.name)
+ self.count = DistillationColumn.counter
+ self.thermo_pack_req = True
+ # self.modes_list = ['RR', 'Nout', 'T']
+ self.modes_list = []
+ self.parameters = ['Nt', 'Ni', 'Ctype', 'Pcond', 'condmode', 'C_comp', 'C_Spec', 'Preb', 'rebmode', 'rebcomp', 'R_Spec']
+ #self.parameters = ['Nt', 'InT_s', 'In_s', 'thermo_package', 'Ctype', 'Pcond', 'Preb']
+ self.result_parameters = ['Cduty.Q', 'Rduty.Q']
+ self.Cspec_list = ['Reflux Ratio','Product Molar Flow (mol/s)', 'Temperature (K)', 'Compound Molar Fraction', 'Compound Molar Flow (mol/s)']
+ self.Rspec_list = ['Product Molar Flow (mol/s)', 'Temperature (K)', 'Compound Molar Fraction', 'Compound Molar Flow (mol/s)']
+
+ type(self).counter += 1
+ self.variables = {
+ 'Ni' : {'name':'Number of Input', 'value':1, 'unit':''},
+ 'RR' : {'name':'Reflux Ratio', 'value':None, 'unit':''},
+ 'T' : {'name':'Temperature', 'value':300, 'unit':'K'},
+ 'Nout' : {'name':'No of Sidedraws', 'value':None, 'unit':''},
+ 'Nt' : {'name':'No of Stages', 'value':12, 'unit':''},
+ 'InT_s' : {'name':'Feed Stage', 'value':[], 'unit':''},
+ 'thermo_package' : {'name':'Thermo Package', 'value':'Raoults_Law', 'unit':''},
+ 'Ctype' : {'name':'Condenser Type', 'value':'Total', 'unit':''},
+ 'Pcond' : {'name':'Condenser Pressure', 'value':101325, 'unit':'Pa'},
+ 'Preb' : {'name':'Reboiler Pressure', 'value':101325, 'unit':'Pa'},
+ 'C_Spec': {'name':'Condenser Specification', 'type':self.Cspec_list[0], 'value':'', 'comp':compound_selected[0], 'unit':''},
+ 'R_Spec': {'name':'Reboiler Specification', 'type':self.Rspec_list[0], 'value':'', 'comp':compound_selected[0], 'unit':''},
+ 'Cduty.Q': {'name':'Condenser Duty', 'value': '', 'unit':'W'},
+ 'Rduty.Q': {'name':'Reboiler Duty', 'value': '', 'unit': 'W'},
+ 'Stages.T': {'name':'Stagewise Temperature', 'value':[], 'unit':'K'},
+ 'Stages.x_pc': {'name': 'Stagewise Component Mole Fraction', 'value':[],'unit':''}
+ }
+
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+
+ def param_setter(self,params):
+ print("param_setter ", params)
+ temp = 0
+ self.variables['Nt']['value'] = params[0]
+ for i in range(self.variables['Ni']['value']):
+ self.variables['InT_s']['value'].append(params[i+1])
+ temp = i + 1
+
+ self.variables['Ctype']['value'] = params[temp+1]
+ self.variables['Pcond']['value'] = params[temp+2]
+ self.variables['C_Spec']['type'] = params[temp+3]
+ if 'Compound' in self.variables['C_Spec']['type']:
+ self.variables['C_Spec']['comp'] = params[temp+4]
+ # C_Spec variable value won't be updated to class here. It will be updated in result
+ self.variables['C_Spec']['value'] = params[temp+5]
+ for var in self.variables:
+ if self.variables[var]['name'] == self.variables['C_Spec']['type']:
+ self.variables[var]['value'] = params[temp+5]
+
+ self.variables['Preb']['value'] = params[temp+6]
+ # R_Spec variable value won't be updated to class here. It will be updated in result
+ self.variables['R_Spec']['type'] = params[temp+7]
+ if 'Compound' in self.variables['R_Spec']['type']:
+ self.variables['R_Spec']['comp'] = params[temp+8]
+ self.variables['R_Spec']['value'] = params[temp+9]
+ self.variables['thermo_package']['value'] = params[temp+10]
+ print(self.variables)
+
+ def OM_Flowsheet_Initialize(self):
+ self.OM_data_init = ''
+ self.OM_data_init = self.OM_data_init + 'model Condenser\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.UnitOperations.DistillationColumn.Cond;\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Files.ThermodynamicPackages.' + self.thermo_package + ';\n'
+ self.OM_data_init = self.OM_data_init + 'end Condenser;\n'
+ self.OM_data_init = self.OM_data_init + 'model Tray\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.UnitOperations.DistillationColumn.DistTray;\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Files.ThermodynamicPackages.' + self.thermo_package + ';\n'
+ self.OM_data_init = self.OM_data_init + 'end Tray;\n'
+ self.OM_data_init = self.OM_data_init + 'model Reboiler\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.UnitOperations.DistillationColumn.Reb;\n'
+ self.OM_data_init = self.OM_data_init + 'extends Simulator.Files.ThermodynamicPackages.' + self.thermo_package + ';\n'
+ self.OM_data_init = self.OM_data_init + 'end Reboiler;\n'
+ self.OM_data_init = self.OM_data_init + ("model distCol" + str(self.count) + "\n")
+ self.OM_data_init = self.OM_data_init + ("extends Simulator.UnitOperations.DistillationColumn.DistCol;\n")
+ self.OM_data_init = self.OM_data_init + (
+ "Condenser condenser(Nc = Nc, C = C, Ctype =Ctype, Bin = Bin_t[1], T(start = 300));\n")
+ self.OM_data_init = self.OM_data_init + (
+ "Reboiler reboiler(Nc = Nc, C = C, Bin = Bin_t[Nt]);\n")
+ self.OM_data_init = self.OM_data_init + (
+ "Tray tray[Nt - 2](each Nc = Nc, each C = C, Bin = Bin_t[2:Nt - 1]);\n")
+ self.OM_data_init = self.OM_data_init + ("end distCol" + str(self.count) + ";\n")
+ comp_count = len(self.compounds)
+ self.OM_data_init = self.OM_data_init + (
+ "distCol" + str(self.count) + " " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (", C= C")
+
+ self.OM_data_init = self.OM_data_init + "," + (
+ "Nt=" + str(self.variables['Nt']['value']) + "," + "Ni="
+ + str(self.variables['Ni']['value']) + ",InT_s=" + "{" +
+ str(self.variables['InT_s']['value']).strip('[').strip(']') + "}" + ',Ctype ="' +
+ self.variables['Ctype']['value'] + '");\n')
+ # self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStm1.name+';\n'
+ # self.OM_data_init = self.OM_data_init + 'Simulator.Streams.Energy_Stream '+self.EngStm2.name+';\n'
+ return self.OM_data_init
+
+ def OM_Flowsheet_Equation(self):
+ self.OM_data_eqn = ''
+ # self.OM_data_eqn = self.name + '.pressDrop = ' + str(self.PressDrop) + ';\n'
+ # self.OM_data_eqn = self.OM_data_eqn + (
+ # 'connect(' + self.name + '.' + 'condensor_duty' + ',' + self.EngStm1.name + '.inlet);\n')
+ # self.OM_data_eqn = self.OM_data_eqn + (
+ # 'connect(' + self.name + '.reboiler_duty' + ', ' + self.EngStm2.name + '.inlet);\n')
+ self.OM_data_eqn = self.OM_data_eqn + (
+ 'connect(' + self.name + '.Dist' + ", " + self.output_stms[1].name + '.In);\n')
+ self.OM_data_eqn = self.OM_data_eqn + (
+ 'connect(' + self.name + '.Bot' + ", " + self.output_stms[2].name + '.In);\n')
+ for i in range(len(self.input_stms)):
+ self.OM_data_eqn = self.OM_data_eqn + (
+ 'connect(' + self.input_stms[i +1].name + '.Out' + ", " + self.name + '.In_s[' + str(
+ i + 1) + ']);\n')
+ # ['Product Molar Flow (mol/s)', 'Temperature (K)', 'Compound Molar Fraction',
+ # 'Compound Molar Flow (mol/s)']
+ if self.variables['C_Spec']['type'] == "Reflux Ratio":
+ self.OM_data_eqn = self.OM_data_eqn + (
+ self.name + '.' + 'RR' + '=' + str(self.variables['RR']['value']) + ';\n')
+ elif self.variables['C_Spec']['type'] == "Product Molar Flow (mol/s)":
+ self.OM_data_eqn = self.OM_data_eqn + (self.output_stms[1].name + '.' + 'F_p[1] = ' + str(
+ self.variables['C_Spec']['value']) + ';\n')
+ elif self.variables['C_Spec']['type'] == "Temperature (K)":
+ self.OM_data_eqn = self.OM_data_eqn + (self.output_stms[1].name + '.' + 'T = ' + str(
+ self.variables['C_Spec']['value']) + ';\n')
+ elif self.variables['C_Spec']['type'] == "Compound Molar Fraction":
+ self.OM_data_eqn = self.OM_data_eqn + (self.output_stms[1].name + '.x_pc[1,:' +
+ str(self.compounds.index(self.variables['C_Spec']['comp']) + 1) + '] = ' + str(
+ self.variables['C_Spec']['value']) + ';\n')
+ elif self.variables['C_Spec']['type'] == "Compound Molar Flow (mol/s)":
+ self.OM_data_eqn = self.OM_data_eqn + (self.output_stms[1].name + '.F_pc[1,:' +
+ str(self.compounds.index(self.variables['C_Spec']['comp']) + 1) + '] = ' + str(
+ self.variables['C_Spec']['value']) + ';\n')
+ else:
+ self.OM_data_eqn = self.OM_data_eqn + (
+ self.name + '.Condenser.' + self.mode + '=' + str(self.modeVal) + ';\n')
+
+ if self.variables['R_Spec']['type'] == "Product Molar Flow (mol/s)":
+ self.OM_data_eqn = self.OM_data_eqn + (self.output_stms[2].name + '.' + 'F_p[1] = ' + str(
+ self.variables['R_Spec']['value']) + ';\n')
+ elif self.variables['R_Spec']['type'] == "Temperature (K)":
+ self.OM_data_eqn = self.OM_data_eqn + (self.output_stms[2].name + '.' + 'T = ' + str(
+ self.variables['R_Spec']['value']) + ';\n')
+ elif self.variables['R_Spec']['type'] == "Compound Molar Fraction":
+ self.OM_data_eqn = self.OM_data_eqn + (self.output_stms[2].name + '.x_pc[1,:' +
+ str(self.compounds.index(self.variables['R_Spec']['comp']) + 1) + '] = ' + str(
+ self.variables['R_Spec']['value']) + ';\n')
+ elif self.variables['R_Spec']['type'] == "Compound Molar Flow (mol/s)":
+ self.OM_data_eqn = self.OM_data_eqn + (self.output_stms[2].name + '.F_pc[1,:' +
+ str(self.compounds.index(self.variables['R_Spec']['comp']) + 1) + '] = ' + str(
+ self.variables['R_Spec']['value']) + ';\n')
+
+
+ self.OM_data_eqn = self.OM_data_eqn + self.name + '.reboiler.P=' + str(
+ self.variables['Preb']['value']) + ';\n'
+ self.OM_data_eqn = self.OM_data_eqn + self.name + '.condenser.P=' + str(
+ self.variables['Pcond']['value']) + ';\n'
+ return self.OM_data_eqn
+
+class ConvertionReactor(UnitOperation):
+ def __init__(self,name='',Nr=None,b=None,X=None,Z=None,a=[],operation=None,Tdef=None):
+ UnitOperation.__init__(self)
+ self.name = name
+ self.type = 'ConvertionReactor'
+
+ self.Nr = str(Nr)
+ self.b = str(b)
+ self.X = str(X)
+ self.Z = str(Z)
+ self.a = json.dumps(a).replace('[','{').replace(']','}')
+ self.operation = str(operation)
+ self.Tdef = str(Tdef)
+
+class CompoundSeparator(UnitOperation):
+ def __init__(self, name='CompoundSeparator'):
+ UnitOperation.__init__(self)
+ self.name = name + str(CompoundSeparator.counter)
+ self.type = 'CompoundSeparator'
+ self.no_of_inputs = 1
+ self.no_of_outputs = 2
+
+ self.SepFact_modes = ['Molar_Flow (mol/s)', 'Mass_Flow (g/s)', 'Inlet_Molar_Flow_Percent', 'Outlet_Molar_Flow_Percent']
+
+ type(self).counter += 1
+ self.variables = {
+ 'SepStrm' : {'name':'Separation Stream', 'value':1, 'unit':''},
+ 'SepVal_c' : {'name':'Separation Value', 'value':['']*len(self.compounds), 'unit':''},
+ 'SepFact_c' : {'name':'Separaction Factor', 'value':['']*len(self.compounds), 'unit':''},
+ }
+
+ def init_variables(self):
+ self.variables = {
+ 'SepStrm' : {'name':'Separation Stream', 'value':1, 'unit':''},
+ 'SepVal_c' : {'name':'Separation Value', 'value':['']*len(self.compounds), 'unit':''},
+ 'SepFact_c' : {'name':'Separaction Factor', 'value':['']*len(self.compounds), 'unit':''},
+ }
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+
+ def param_setter(self,params):
+ print("param_setter CompSep ", params)
+
+ if (params[0]):
+ self.variables['SepStrm']['value'] = 1
+ else:
+ self.variables['SepStrm']['value'] = 2
+ for index, i in enumerate(range(2, len(params))):
+ if (i %2 != 0):
+ self.variables['SepVal_c']['value'][index//2] = float(params[i])
+ else:
+ self.variables['SepFact_c']['value'][index//2] = params[i]
+
+ # self.variables['SepFact_c']['value'] = json.dumps(self.variables['SepFact_c']['value']).replace('[','{').replace(']','}')
+ # self.variables['SepStrm']['value'] = str(self.variables['SepStrm']['value'])
+ # self.variables['SepVal_c']['value'] = json.dumps(self.variables['SepVal_c']['value']).replace('[','{').replace(']','}')
+
+
+ def OM_Flowsheet_Initialize(self):
+ SepStrm = str(self.variables['SepStrm']['value'])
+ SepFact = []
+ for i in range(len(self.compounds)):
+ SepFact.append(self.variables['SepFact_c']['value'][i].split(' ')[0])
+ SepFact = json.dumps(SepFact).replace('[', '{').replace(']', '}')
+ self.OM_data_init = ''
+ comp_count = len(self.compounds)
+ self.OM_data_init = self.OM_data_init + (
+ "Simulator.UnitOperations.CompoundSeparator " + self.name + "(Nc = " + str(comp_count))
+ self.OM_data_init = self.OM_data_init + (", C = {")
+ comp = str(self.compounds).strip('[').strip(']')
+ comp = comp.replace("'", "")
+ self.OM_data_init = self.OM_data_init + comp + ("},")
+ self.OM_data_init = self.OM_data_init + ("SepFact_c = " + SepFact + ",SepStrm = " + SepStrm + ");\n")
+
+ return self.OM_data_init
+
+
+ def OM_Flowsheet_Equation(self):
+ SepVal = json.dumps(self.variables['SepVal_c']['value']).replace('[','{').replace(']','}')
+ self.OM_data_eqn = ''
+
+ self.OM_data_eqn += ('connect(' + self.name + '.In,' + self.input_stms[1].name + '.Out);\n')
+
+ strcount = 1
+ for strm in self.output_stms.values():
+ self.OM_data_eqn += ('connect(' + strm.name + '.In,' + self.name + '.Out' + str(strcount) + ');\n')
+ strcount += 1
+
+ self.OM_data_eqn += (self.name + '.SepVal_c ' + '=' + SepVal + ';\n')
+
+ return self.OM_data_eqn
+
+class Flash(UnitOperation):
+ def __init__(self,name='Flash'):
+ UnitOperation.__init__(self)
+ self.name = name + str(Flash.counter)
+ self.type = 'Flash'
+ self.extra = ['Flash']
+ self.for_naming = ['Flash']
+ self.no_of_inputs = 1
+ self.no_of_outputs = 2
+ self.count = Flash.counter
+ self.thermo_pack_req = True
+ self.parameters = ['BTdef', 'Tdef', 'BPdef', 'Pdef']
+
+ type(self).counter += 1
+ self.variables = {
+ 'thermo_package' : {'name':'Thermo Package', 'value':None, 'unit':''},
+ 'BTdef' : {'name':'Separation Temperature Boolean', 'value':False, 'unit':''},
+ 'BPdef' : {'name':'Separation Pressure Boolean', 'value':False, 'unit':''},
+ 'Tdef' : {'name':'Separation Temperature', 'value':298.15, 'unit':'K'},
+ 'Pdef' : {'name':'Separation Pressure', 'value':101325, 'unit':'Pa'}
+ }
+
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+
+ def param_setter(self,params):
+ print("param_setter ", params)
+ self.variables['thermo_package']['value'] = params[0]
+ self.variables['BTdef']['value'] = params[1]
+ self.variables['Tdef']['value'] = params[2]
+ self.variables['BPdef']['value'] = params[3]
+ self.variables['Pdef']['value'] = params[4]
+
+ def OM_Flowsheet_Equation(self):
+ self.OM_data_eqn = ''
+
+ self.OM_data_eqn += ('connect(' + self.name + '.In,' + self.input_stms[1].name + '.Out);\n')
+
+ strcount = 1
+ for strm in self.output_stms.values():
+ self.OM_data_eqn += ('connect(' + strm.name + '.In,' + self.name + '.Out' + str(strcount) + ');\n')
+ strcount += 1
+
+ return self.OM_data_eqn
+
+class CentrifugalPump(UnitOperation):
+ def __init__(self,name='CentrifugalPump'):
+ UnitOperation.__init__(self)
+ self.name = name + str(CentrifugalPump.counter)
+ self.type = 'CentrifugalPump'
+ self.modes_list = ['Pdel', 'Pout', 'Q'] #"enFlo"
+ self.parameters = ['Eff']
+
+ type(self).counter += 1
+ self.variables = {
+ 'Eff' : {'name':'Efficiency', 'value':1, 'unit':''},
+ 'Pdel' : {'name':'Pressure Increase', 'value':None, 'unit':'Pa'},
+ 'Pout' : {'name':'Outlet Pressure', 'value':None, 'unit':'Pa'},
+ 'Q' : {'name':'Power Required', 'value':None, 'unit':'W'},
+ }
+
+class Valve(UnitOperation):
+ def __init__(self,name='Valve'):
+ UnitOperation.__init__(self)
+ self.name = name + str(Valve.counter)
+ self.type = 'Valve'
+ self.modes_list = ['Pdel', 'Pout']
+
+ type(self).counter += 1
+ self.variables = {
+ 'Pdel' : {'name':'Pressure Drop', 'value':None, 'unit':'Pa'},
+ 'Pout' : {'name':'Outlet Pressure', 'value':None, 'unit':'Pa'}
+ }
+
+class Splitter(UnitOperation):
+ def __init__(self,name='Splitter'):
+ UnitOperation.__init__(self)
+ self.name = name + str(Splitter.counter)
+ self.type = 'Splitter'
+ self.no_of_outputs = 2
+
+ self.CalcType_modes = ['Split_Ratio', 'Molar_Flow', 'Mass_Flow']
+
+ self.parameters = ['No', 'CalcType', 'SpecVal_s']
+ type(self).counter += 1
+
+ self.variables = {
+ 'No' : {'name':'No. of Output', 'value':2, 'unit':''},
+ 'CalcType' : {'name':'Calculation Type', 'value':self.CalcType_modes[0], 'unit':''},
+ 'SpecVal_s' : {'name':'Specification Value', 'value':[0.5,0.5], 'unit':''}
+ }
+
+ specval = self.variables['SpecVal_s']['value']
+ self.specval = json.dumps(specval).replace('[','{').replace(']','}')
+
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+
+ def param_setter(self,params):
+ print("param_setter ", params)
+ self.variables['No']['value'] = int(params[0])
+ self.variables['CalcType']['value'] = params[1]
+ self.variables['SpecVal_s']['value'] = [float(params[2]), float(params[3])]
+ if self.variables['CalcType']['value'] == 'Molar_Flow':
+ self.variables['SpecVal_s']['unit'] = 'mol/s'
+ elif self.variables['CalcType']['value'] == 'Mass_Flow':
+ self.variables['SpecVal_s']['unit'] = 'g/s'
+ else:
+ self.variables['SpecVal_s']['unit'] = ''
+
+class Mixer(UnitOperation):
+
+ def __init__(self,name='Mixer'):
+ UnitOperation.__init__(self)
+ self.name = name + str(Mixer.counter)
+ self.type = 'Mixer'
+ self.no_of_inputs = 2
+
+ self.Pout_modes = ['Inlet_Minimum', 'Inlet_Average', 'Inlet_Maximum']
+ self.parameters = ['NI', 'outPress']
+ type(self).counter += 1
+
+ self.variables = {
+ 'NI' : {'name':'Number of Input', 'value':6, 'unit':''},
+ 'outPress' : {'name':'Outlet Pressure', 'value':'Inlet_Average', 'unit':''},
+ }
+
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+
+ def param_setter(self, params):
+ self.variables['NI']['value'] = int(params[0])
+ self.variables['outPress']['value'] = params[1]
+
+class Heater(UnitOperation):
+
+ def __init__(self, name='Heater'):
+ UnitOperation.__init__(self)
+ self.name = name + str(type(self).counter)
+ self.type = 'Heater'
+ self.no_of_inputs = 1
+ self.no_of_outputs = 1
+ self.modes_list = ['Q','Tout','xvapout','Tdel']
+ self.parameters = ['Pdel', 'Eff']
+ self.extra = None
+ self.for_naming = None
+ type(self).counter += 1
+
+ self.variables = {
+ 'Pdel' : {'name':'Pressure Drop', 'value':0, 'unit':'Pa'},
+ 'Eff' : {'name':'Efficiency', 'value':1, 'unit':''},
+ 'Tout' : {'name':'Outlet Temperature', 'value':298.15, 'unit':'K'},
+ 'Tdel' : {'name':'Temperature Increase', 'value':0, 'unit':'K'},
+ 'Q' : {'name':'Heat Added', 'value':0, 'unit':'W'},
+ 'xvapout': {'name':'Outlet Vapour', 'value':None, 'unit':''}
+ }
+
+class Cooler(UnitOperation):
+
+ def __init__(self, name='Cooler'):
+ UnitOperation.__init__(self)
+ self.name = name + str(type(self).counter)
+ self.type = 'Cooler'
+ self.no_of_inputs = 1
+ self.no_of_outputs = 1
+ self.modes_list = ['Q','Tout','Tdel','xvap']
+ self.extra = None
+ self.for_naming = None
+ self.parameters = ['Pdel', 'Eff']
+ type(self).counter += 1
+
+ self.variables = {
+ 'Pdel' : {'name':'Pressure Drop', 'value':0, 'unit':'Pa'},
+ 'Eff' : {'name':'Efficiency', 'value':1, 'unit':''},
+ 'Tout' : {'name':'Outlet Temperature', 'value':298.15, 'unit':'K'},
+ 'Tdel' : {'name':'Temperature Increase', 'value':0, 'unit':'K'},
+ 'Q' : {'name':'Heat Added', 'value':0, 'unit':'W'},
+ 'xvap' : {'name':'Vapour Phase Mole Fraction', 'value':None, 'unit':'g/s'},
+ }
+
+class AdiabaticCompressor(UnitOperation):
+
+ def __init__(self, name='AdiabaticCompressor'):
+ UnitOperation.__init__(self)
+ self.name = name + str(type(self).counter)
+ self.type = 'AdiabaticCompressor'
+ self.no_of_inputs = 1
+ self.no_of_outputs = 1
+ self.modes_list = ["Pdel","Pout","Q"]
+ self.extra = ['AdiabaticCompressor']
+ self.for_naming = ['AdiabaticCompressor']
+ self.thermo_pack_req = True
+ self.thermo_package ="RaoultsLaw"
+ self.parameters = ['Eff']
+ type(self).counter += 1
+ self.variables = {
+ 'Pdel' : {'name':'Pressure Increase', 'value':0, 'unit':'Pa'},
+ 'Tdel' : {'name':'Temperature Increase', 'value':0, 'unit':'K'},
+ 'Pout' : {'name':'Outlet Pressure', 'value':101325, 'unit':'Pa'},
+ 'Tout' : {'name':'Outlet Temperature', 'value':298.15, 'unit':'K'},
+ 'Q' : {'name':'Heat Added', 'value':0, 'unit':'W'},
+ 'Eff' : {'name':'Efficiency', 'value':1, 'unit':''}
+ }
+
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
+
+class AdiabaticExpander(UnitOperation):
+
+ def __init__(self, name='AdiabaticExpander'):
+ UnitOperation.__init__(self)
+ self.name = name + str(type(self).counter)
+ self.type = 'AdiabaticExpander'
+ self.no_of_inputs = 1
+ self.no_of_outputs = 1
+ self.modes_list = ["Pdel","Pout","Q"]
+ self.extra = ['AdiabaticExpander']
+ self.for_naming = ['AdiabaticExpander']
+ self.thermo_pack_req = True
+ self.thermo_package ="RaoultsLaw"
+ self.parameters = ['Eff']
+ type(self).counter += 1
+ self.variables = {
+ 'Pdel' : {'name':'Pressure Drop', 'value':0, 'unit':'Pa'},
+ 'Tdel' : {'name':'Temperature Increase', 'value':0, 'unit':'K'},
+ 'Pout' : {'name':'Outlet Pressure', 'value':101325, 'unit':'Pa'},
+ 'Tout' : {'name':'Outlet Temperature', 'value':298.15, 'unit':'K'},
+ 'Q' : {'name':'Heat Added', 'value':0, 'unit':'W'},
+ 'Eff' : {'name':'Efficiency', 'value':1, 'unit':''}
+ }
+
+ def update_compounds(self):
+ self.compounds = [c[:c.index('(')] for c in compound_selected]
\ No newline at end of file
diff --git a/src/main/python/utils/__init__.py b/src/main/python/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/main/python/utils/__pycache__/Bin_Phase_env.cpython-37.pyc b/src/main/python/utils/__pycache__/Bin_Phase_env.cpython-37.pyc
new file mode 100644
index 0000000..b5ecd9c
Binary files /dev/null and b/src/main/python/utils/__pycache__/Bin_Phase_env.cpython-37.pyc differ
diff --git a/src/main/python/utils/__pycache__/ComponentSelector.cpython-37.pyc b/src/main/python/utils/__pycache__/ComponentSelector.cpython-37.pyc
new file mode 100644
index 0000000..89175f7
Binary files /dev/null and b/src/main/python/utils/__pycache__/ComponentSelector.cpython-37.pyc differ
diff --git a/src/main/python/utils/__pycache__/Container.cpython-37.pyc b/src/main/python/utils/__pycache__/Container.cpython-37.pyc
new file mode 100644
index 0000000..1aa6e29
Binary files /dev/null and b/src/main/python/utils/__pycache__/Container.cpython-37.pyc differ
diff --git a/src/main/python/utils/__pycache__/Graphics.cpython-37.pyc b/src/main/python/utils/__pycache__/Graphics.cpython-37.pyc
new file mode 100644
index 0000000..f7366e5
Binary files /dev/null and b/src/main/python/utils/__pycache__/Graphics.cpython-37.pyc differ
diff --git a/src/main/python/utils/__pycache__/Streams.cpython-37.pyc b/src/main/python/utils/__pycache__/Streams.cpython-37.pyc
new file mode 100644
index 0000000..5bde993
Binary files /dev/null and b/src/main/python/utils/__pycache__/Streams.cpython-37.pyc differ
diff --git a/src/main/python/utils/__pycache__/UnitOperations.cpython-37.pyc b/src/main/python/utils/__pycache__/UnitOperations.cpython-37.pyc
new file mode 100644
index 0000000..6385ffd
Binary files /dev/null and b/src/main/python/utils/__pycache__/UnitOperations.cpython-37.pyc differ
diff --git a/src/main/python/utils/__pycache__/__init__.cpython-37.pyc b/src/main/python/utils/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..b1c23fe
Binary files /dev/null and b/src/main/python/utils/__pycache__/__init__.cpython-37.pyc differ
diff --git a/src/main/python/utils/thermopackage.txt b/src/main/python/utils/thermopackage.txt
new file mode 100644
index 0000000..f9155f5
--- /dev/null
+++ b/src/main/python/utils/thermopackage.txt
@@ -0,0 +1,6 @@
+RaoultsLaw
+NRTL
+UNIQUAC
+UNIFAC
+PengRobinson
+GraysonStreed
\ No newline at end of file
diff --git a/src/main/python/venv/Lib/site-packages/__pycache__/mccabe.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/__pycache__/mccabe.cpython-37.pyc
new file mode 100644
index 0000000..44dca75
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/__pycache__/mccabe.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/__pycache__/six.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/__pycache__/six.cpython-37.pyc
new file mode 100644
index 0000000..07b383b
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/__pycache__/six.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/COPYING b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/COPYING
new file mode 100644
index 0000000..d511905
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/COPYING
@@ -0,0 +1,339 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation. If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission. For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this. Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 2 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along
+ with this program; if not, write to the Free Software Foundation, Inc.,
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+ `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+ , 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs. If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.
diff --git a/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/COPYING.LESSER b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/COPYING.LESSER
new file mode 100644
index 0000000..2d2d780
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/COPYING.LESSER
@@ -0,0 +1,510 @@
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations
+below.
+
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it
+becomes a de-facto standard. To achieve this, non-free programs must
+be allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control
+compilation and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at least
+ three years, to give the same user the materials specified in
+ Subsection 6a, above, for a charge no more than the cost of
+ performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply, and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License
+may add an explicit geographical distribution limitation excluding those
+countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms
+of the ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library.
+It is safest to attach them to the start of each source file to most
+effectively convey the exclusion of warranty; and each file should
+have at least the "copyright" line and a pointer to where the full
+notice is found.
+
+
+
+ Copyright (C)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or
+your school, if any, to sign a "copyright disclaimer" for the library,
+if necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James
+ Random Hacker.
+
+ , 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
+
+
diff --git a/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/INSTALLER b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/METADATA b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/METADATA
new file mode 100644
index 0000000..2805693
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/METADATA
@@ -0,0 +1,117 @@
+Metadata-Version: 2.1
+Name: astroid
+Version: 2.3.3
+Summary: An abstract syntax tree for Python with inference support.
+Home-page: https://github.com/PyCQA/astroid
+Author: Python Code Quality Authority
+Author-email: code-quality@python.org
+License: LGPL
+Platform: UNKNOWN
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Quality Assurance
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=3.5.*
+Requires-Dist: lazy-object-proxy (==1.4.*)
+Requires-Dist: six (~=1.12)
+Requires-Dist: wrapt (==1.11.*)
+Requires-Dist: typed-ast (<1.5,>=1.4.0) ; implementation_name == "cpython" and python_version < "3.8"
+
+Astroid
+=======
+
+.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master
+ :target: https://travis-ci.org/PyCQA/astroid
+
+.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true
+ :alt: AppVeyor Build Status
+ :target: https://ci.appveyor.com/project/PCManticore/astroid
+
+.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master
+ :target: https://coveralls.io/github/PyCQA/astroid?branch=master
+
+.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest
+ :target: http://astroid.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation Status
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+.. |tideliftlogo| image:: doc/media/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+ :width: 75
+ :height: 60
+ :alt: Tidelift
+
+.. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - Professional support for astroid is available as part of the `Tidelift
+ Subscription`_. Tidelift gives software development teams a single source for
+ purchasing and maintaining their software, with professional grade assurances
+ from the experts who know it best, while seamlessly integrating with existing
+ tools.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-astroid?utm_source=pypi-astroid&utm_medium=referral&utm_campaign=readme
+
+
+
+What's this?
+------------
+
+The aim of this module is to provide a common base representation of
+python source code. It is currently the library powering pylint's capabilities.
+
+It provides a compatible representation which comes from the `_ast`
+module. It rebuilds the tree generated by the builtin _ast module by
+recursively walking down the AST and building an extended ast. The new
+node classes have additional methods and attributes for different
+usages. They include some support for static inference and local name
+scopes. Furthermore, astroid can also build partial trees by inspecting living
+objects.
+
+
+Installation
+------------
+
+Extract the tarball, jump into the created directory and run::
+
+ pip install .
+
+
+If you want to do an editable installation, you can run::
+
+ pip install -e .
+
+
+If you have any questions, please mail the code-quality@python.org
+mailing list for support. See
+http://mail.python.org/mailman/listinfo/code-quality for subscription
+information and archives.
+
+Documentation
+-------------
+http://astroid.readthedocs.io/en/latest/
+
+
+Python Versions
+---------------
+
+astroid 2.0 is currently available for Python 3 only. If you want Python 2
+support, older versions of astroid will still supported until 2020.
+
+Test
+----
+
+Tests are in the 'test' subdirectory. To launch the whole tests suite, you can use
+either `tox` or `pytest`::
+
+ tox
+ pytest astroid
+
+
diff --git a/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/RECORD b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/RECORD
new file mode 100644
index 0000000..f5d983e
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/RECORD
@@ -0,0 +1,145 @@
+astroid-2.3.3.dist-info/COPYING,sha256=qxX9UmvY3Rip5368E5ZWv00z6X_HI4zRG_YOK5uGZsY,17987
+astroid-2.3.3.dist-info/COPYING.LESSER,sha256=qb3eVhbs3R6YC0TzYGAO6Hg7H5m4zIOivrFjoKOQ6GE,26527
+astroid-2.3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+astroid-2.3.3.dist-info/METADATA,sha256=i0Ut5kY28jjA7pIT7o-_UbHKI5HbTXA0xQubIxcHO8w,3869
+astroid-2.3.3.dist-info/RECORD,,
+astroid-2.3.3.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
+astroid-2.3.3.dist-info/top_level.txt,sha256=HsdW4O2x7ZXRj6k-agi3RaQybGLobI3VSE-jt4vQUXM,8
+astroid/__init__.py,sha256=tJJMsKzMv8hUgw3y0VQAAMx9BO-nrNUcNy_wI0XBFXo,5538
+astroid/__pkginfo__.py,sha256=vS7X-qu0abKFCIxjA0h9994nl1zj7Ziu3lEz9jniONU,2053
+astroid/__pycache__/__init__.cpython-37.pyc,,
+astroid/__pycache__/__pkginfo__.cpython-37.pyc,,
+astroid/__pycache__/_ast.cpython-37.pyc,,
+astroid/__pycache__/arguments.cpython-37.pyc,,
+astroid/__pycache__/as_string.cpython-37.pyc,,
+astroid/__pycache__/bases.cpython-37.pyc,,
+astroid/__pycache__/builder.cpython-37.pyc,,
+astroid/__pycache__/context.cpython-37.pyc,,
+astroid/__pycache__/decorators.cpython-37.pyc,,
+astroid/__pycache__/exceptions.cpython-37.pyc,,
+astroid/__pycache__/helpers.cpython-37.pyc,,
+astroid/__pycache__/inference.cpython-37.pyc,,
+astroid/__pycache__/manager.cpython-37.pyc,,
+astroid/__pycache__/mixins.cpython-37.pyc,,
+astroid/__pycache__/modutils.cpython-37.pyc,,
+astroid/__pycache__/node_classes.cpython-37.pyc,,
+astroid/__pycache__/nodes.cpython-37.pyc,,
+astroid/__pycache__/objects.cpython-37.pyc,,
+astroid/__pycache__/protocols.cpython-37.pyc,,
+astroid/__pycache__/raw_building.cpython-37.pyc,,
+astroid/__pycache__/rebuilder.cpython-37.pyc,,
+astroid/__pycache__/scoped_nodes.cpython-37.pyc,,
+astroid/__pycache__/test_utils.cpython-37.pyc,,
+astroid/__pycache__/transforms.cpython-37.pyc,,
+astroid/__pycache__/util.cpython-37.pyc,,
+astroid/_ast.py,sha256=6OGeHGRbK6oLmrsw6-UOpLFlIV1rStrA7BNpKGsu5Lw,1406
+astroid/arguments.py,sha256=cui-UmbEeywSk0eitSrOhi9F0Ci2clS4qYXTi8uXRs4,11783
+astroid/as_string.py,sha256=8SoRjh8UlDRWkbFMTvse9th8flPt6iu9xOcBip1s1f8,22411
+astroid/bases.py,sha256=G2Zs5OEHoshjLJT8e-ApDH9Q3EZtC27cKJ5yKf84_7w,18698
+astroid/brain/__pycache__/brain_argparse.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_attrs.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_collections.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_crypt.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_curses.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_dataclasses.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_functools.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_gi.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_http.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_io.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_nose.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_pytest.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_qt.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_random.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_re.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_six.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_ssl.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_threading.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_typing.cpython-37.pyc,,
+astroid/brain/__pycache__/brain_uuid.cpython-37.pyc,,
+astroid/brain/brain_argparse.py,sha256=VEeMCr3OIjHmCy35uc-kX6nJ5_NUOAimpGJMr6CChoA,1024
+astroid/brain/brain_attrs.py,sha256=k8zJqIXsIbQrncthrzyB5NtdPTktgVi9wG7nyl8xMzs,2208
+astroid/brain/brain_builtin_inference.py,sha256=Ttwr1Ekt1_czEF50uEjY0dA5S89WFqyyBl0sWPUaYnE,27206
+astroid/brain/brain_collections.py,sha256=8Vmsb9I19er3MycZtT6qWDrIMV_SEHtl87gTPC5qQHc,2651
+astroid/brain/brain_crypt.py,sha256=gA7Q4GVuAM4viuTGWM6SNTPQXv5Gr_mFapyKMTRcsJ0,875
+astroid/brain/brain_curses.py,sha256=tDnlCP1bEvleqCMz856yua9mM5um1p_JendFhT4rBFk,3303
+astroid/brain/brain_dataclasses.py,sha256=5WndOYSY0oi2v-Od6KdPte-FKt00LoNRH2riSB4S1os,1647
+astroid/brain/brain_dateutil.py,sha256=q2dyV2907Bw4n7m2W4EEdok3Ndv8NzeIQxAZwXBiS14,795
+astroid/brain/brain_fstrings.py,sha256=VKVMijgLE2pg2dtXM6GGFgONOxOg8qA9D5V6dYzWTbQ,2121
+astroid/brain/brain_functools.py,sha256=gGMs0cEMVXR9pRPeu3LqkMARE6yzymvC7pzmRbJCWIY,5400
+astroid/brain/brain_gi.py,sha256=-EpcKf9z3wT_7v0k0WXIZtgk3-213lkfUX9bxeKOM3Y,6810
+astroid/brain/brain_hashlib.py,sha256=cp30hX5HhWqbWG3zqcNu8N3aHGeQK4DPi4ac8owBonU,2163
+astroid/brain/brain_http.py,sha256=-cQohgE5uQ5eBBjjFg7P5c2OlganAK6yZOKA6EkKd6o,10317
+astroid/brain/brain_io.py,sha256=DJcTFMTexrsHaGg2-kHoXwonddu13ImT7NEjiF1xPiU,1470
+astroid/brain/brain_mechanize.py,sha256=xTBc-u2DMmMPeci7DVFs4L2T98DwwLF_Ob5YZviLPp8,889
+astroid/brain/brain_multiprocessing.py,sha256=4iLBXpB7Bgy_hGVx-xhV7spYKg5tc4OybIiBcuwNL7U,3017
+astroid/brain/brain_namedtuple_enum.py,sha256=JBRVBhPSicUAixPdeEerhnxeEJtVnS7T1FkVhvJcDZU,15722
+astroid/brain/brain_nose.py,sha256=kECw2jHmX0IUPX4Gx3XVGrflKGnlgPB79QHt6WU2cwQ,2211
+astroid/brain/brain_numpy_core_fromnumeric.py,sha256=_mtg-7jySDnDoxhtrNtimVZ_lbsm63jb7U0iqcBjgLY,626
+astroid/brain/brain_numpy_core_function_base.py,sha256=2jtHOa_RCMlig7UZVUWSmICFvotvu7bZKCdLZhbTc0Q,1173
+astroid/brain/brain_numpy_core_multiarray.py,sha256=e-igYgbLP8UhCq3VSlRhykhXyoMcO2M7UOcrbzfuWpQ,1890
+astroid/brain/brain_numpy_core_numeric.py,sha256=RP9L1GfhPBGK3KQeeDoo-OyFUvkVNksw0sc9a6t3NJ8,1389
+astroid/brain/brain_numpy_core_numerictypes.py,sha256=RBRdil8D5qtTj6yquQ6_JwYACKRM7vfh4p7nwy3MYLk,7706
+astroid/brain/brain_numpy_core_umath.py,sha256=GGTCDVNDKEAppXjjToNzawa8lpCFr9GEh0OY3eQulec,5279
+astroid/brain/brain_numpy_ndarray.py,sha256=GMDomYcpCfCoKa1amdtQPsdy_VMPot3QUaG9mxlApBk,8417
+astroid/brain/brain_numpy_random_mtrand.py,sha256=It76Xh4atuxwGtsHiXe4llvEKyKh0R5Wa7MgG5y5vVU,3284
+astroid/brain/brain_numpy_utils.py,sha256=NxY99MzQ-m2Md_nofdAU30DFmse2CjpgqfWvYoMDDOc,1622
+astroid/brain/brain_pkg_resources.py,sha256=S_5UED1Zg8ObEJumRdpYGnjxZzemh_G_NFj3p5NGPfc,2262
+astroid/brain/brain_pytest.py,sha256=RXaNUVqy2R0et0Upn4GJkVgq5SG8Pl7zLlhqQg8Xx3Q,2384
+astroid/brain/brain_qt.py,sha256=FXdziZGGzFRzukhZguFoMY4q6PSsp6ZhNJovpzDG_Kc,2464
+astroid/brain/brain_random.py,sha256=2RZY-QEXMNWp7E6h0l0-ke-DtjKTOFlTdjiQZi3XdQc,2432
+astroid/brain/brain_re.py,sha256=le7VJHUAf80HyE_aQCh7_8FyDVK6JwNWA--c9RaMVQ8,1128
+astroid/brain/brain_six.py,sha256=6QHcKXoYf8yMMXWkx3g3lK0kqB5OFeYcXwjUTdgWTMw,6146
+astroid/brain/brain_ssl.py,sha256=2quiZVA_BW8PWmkAsOuYanq9Hvb93LT7c9YVslw3r14,3634
+astroid/brain/brain_subprocess.py,sha256=iXuKDWsUJhJDdKLDm6N8EiBw78Pjn-Xw-UJFk5gvup0,3668
+astroid/brain/brain_threading.py,sha256=73Inb3j7Tps5LQDJDGZOgR-bawttS1rk1l0LUL1WR1o,818
+astroid/brain/brain_typing.py,sha256=iFw33beNCitCJjJNvccIY6SsFJcdKVDdl-56DxDioh0,2780
+astroid/brain/brain_uuid.py,sha256=flWrk1Ve7oqYrO8GTZ3To8RBYteRfYwvash-s9KiU9o,564
+astroid/builder.py,sha256=0wrC4-ausU_nEEkgI8LJTsrNFN_XCbOkqoG2DsKCsks,16023
+astroid/context.py,sha256=VsyUDVB1J9fk1o8MQoE4ygfC7gdNjVYVUD4Bhgs9JM0,5164
+astroid/decorators.py,sha256=m0v63YRiQKc66-g8ckkYeJ0d5cji8AhkUxFPbTfLVDc,4229
+astroid/exceptions.py,sha256=_IJRdLfyNSPVjxYgEd11Uu9XpdqE7uBCVOEIxt3ua70,7047
+astroid/helpers.py,sha256=3HOFwK0ieIoLu7JhrbM1r0zxPyDtTl2oNSv-tXQ2iRw,9170
+astroid/inference.py,sha256=0diHXE-ZGiWU9y31flQa3YZhg6-v4dZgD4PPFAlHJGc,33023
+astroid/interpreter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+astroid/interpreter/__pycache__/__init__.cpython-37.pyc,,
+astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc,,
+astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc,,
+astroid/interpreter/_import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc,,
+astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc,,
+astroid/interpreter/_import/__pycache__/util.cpython-37.pyc,,
+astroid/interpreter/_import/spec.py,sha256=L48FismdLnk6wjyAzIzJocKVdkBmbQlJgxwzeJ2_luA,11318
+astroid/interpreter/_import/util.py,sha256=inubUz6F3_kaMFaeleKUW6E6wCMIPrhU882zvwEZ02I,255
+astroid/interpreter/dunder_lookup.py,sha256=dP-AZU_aGPNt03b1ttrMglxzeU3NtgnG0MfpSLPH6sg,2155
+astroid/interpreter/objectmodel.py,sha256=7wQbTJhoUwH89x3tBfaA9WLaudBjwKcNpsBPWBQM_7U,23935
+astroid/manager.py,sha256=p7YPLYupDzG05OxR8qqF4fWMJExFAGIjTbVunPT3ECQ,12998
+astroid/mixins.py,sha256=F2rv2Ow7AU3YT_2jitVJik95ZWRVK6hpf8BrkkspzUY,5571
+astroid/modutils.py,sha256=1mBU_-rZH5-9K4nXB9hPi4mesi-pdlDltM_A-OU3zec,23425
+astroid/node_classes.py,sha256=FVYqErzW6lEHEZz3x_ZsqpyR1nyNOvnt0_Oi86btwAQ,140093
+astroid/nodes.py,sha256=tzYNu1tTF8bemsDitnSj7RFjQR2hrwlMDTwAmULoU5A,2957
+astroid/objects.py,sha256=q6ffgYLpyHENUY8BtiZAPHhnz91LJbQFkuaQnrNtf7g,9879
+astroid/protocols.py,sha256=Y-Mupe42X_FrdDC6KwnLyUM4yByWicR_tfqaSGWopT0,26828
+astroid/raw_building.py,sha256=HKYGE5Ll3g0WKntVErqCacQFiyTa5OVuVieIhkvckbc,16808
+astroid/rebuilder.py,sha256=q1XtkOYkykbRhk2UXhuMGsnGZFMzCDxdvTaG4VEh6Mw,41835
+astroid/scoped_nodes.py,sha256=C-ZcmS7QNkIBGUb2wc-hbHaUtOvfcOkQxYhD8xPrwjQ,94141
+astroid/test_utils.py,sha256=Q9SsfJDCJqSdRzEkp_5i1xLGcbFDztqqkdRjjLH476o,2314
+astroid/transforms.py,sha256=1npwJWcQUSIjcpcWd1pc-dJhtHOyiboQHsETAIQd5co,3377
+astroid/util.py,sha256=jg5LnqbWSZTZP1KgpxGBuC6Lfwhn9Jb2T2TohXghmC0,4785
diff --git a/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/WHEEL b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/WHEEL
new file mode 100644
index 0000000..3b5c403
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.6)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/top_level.txt b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/top_level.txt
new file mode 100644
index 0000000..450d4fe
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid-2.3.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+astroid
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__init__.py b/src/main/python/venv/Lib/site-packages/astroid/__init__.py
new file mode 100644
index 0000000..d36a5b4
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/__init__.py
@@ -0,0 +1,166 @@
+# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2016 Moises Lopez
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Python Abstract Syntax Tree New Generation
+
+The aim of this module is to provide a common base representation of
+python source code for projects such as pychecker, pyreverse,
+pylint... Well, actually the development of this library is essentially
+governed by pylint's needs.
+
+It extends class defined in the python's _ast module with some
+additional methods and attributes. Instance attributes are added by a
+builder object, which can either generate extended ast (let's call
+them astroid ;) by visiting an existent ast tree or by inspecting living
+object. Methods are added by monkey patching ast classes.
+
+Main modules are:
+
+* nodes and scoped_nodes for more information about methods and
+ attributes added to different node classes
+
+* the manager contains a high level object to get astroid trees from
+ source files and living objects. It maintains a cache of previously
+ constructed tree for quick access
+
+* builder contains the class responsible to build astroid trees
+"""
+
+import enum
+import itertools
+import os
+import sys
+
+import wrapt
+
+
+_Context = enum.Enum("Context", "Load Store Del")
+Load = _Context.Load
+Store = _Context.Store
+Del = _Context.Del
+del _Context
+
+
+from .__pkginfo__ import version as __version__
+
+# WARNING: internal imports order matters !
+
+# pylint: disable=redefined-builtin
+
+# make all exception classes accessible from astroid package
+from astroid.exceptions import *
+
+# make all node classes accessible from astroid package
+from astroid.nodes import *
+
+# trigger extra monkey-patching
+from astroid import inference
+
+# more stuff available
+from astroid import raw_building
+from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod
+from astroid.node_classes import are_exclusive, unpack_infer
+from astroid.scoped_nodes import builtin_lookup
+from astroid.builder import parse, extract_node
+from astroid.util import Uninferable
+
+# make a manager instance (borg) accessible from astroid package
+from astroid.manager import AstroidManager
+
+MANAGER = AstroidManager()
+del AstroidManager
+
+# transform utilities (filters and decorator)
+
+
+# pylint: disable=dangerous-default-value
+@wrapt.decorator
+def _inference_tip_cached(func, instance, args, kwargs, _cache={}):
+ """Cache decorator used for inference tips"""
+ node = args[0]
+ try:
+ return iter(_cache[func, node])
+ except KeyError:
+ result = func(*args, **kwargs)
+ # Need to keep an iterator around
+ original, copy = itertools.tee(result)
+ _cache[func, node] = list(copy)
+ return original
+
+
+# pylint: enable=dangerous-default-value
+
+
+def inference_tip(infer_function, raise_on_overwrite=False):
+ """Given an instance specific inference function, return a function to be
+ given to MANAGER.register_transform to set this inference function.
+
+ :param bool raise_on_overwrite: Raise an `InferenceOverwriteError`
+ if the inference tip will overwrite another. Used for debugging
+
+ Typical usage
+
+ .. sourcecode:: python
+
+ MANAGER.register_transform(Call, inference_tip(infer_named_tuple),
+ predicate)
+
+ .. Note::
+
+ Using an inference tip will override
+ any previously set inference tip for the given
+ node. Use a predicate in the transform to prevent
+ excess overwrites.
+ """
+
+ def transform(node, infer_function=infer_function):
+ if (
+ raise_on_overwrite
+ and node._explicit_inference is not None
+ and node._explicit_inference is not infer_function
+ ):
+ raise InferenceOverwriteError(
+ "Inference already set to {existing_inference}. "
+ "Trying to overwrite with {new_inference} for {node}".format(
+ existing_inference=infer_function,
+ new_inference=node._explicit_inference,
+ node=node,
+ )
+ )
+ # pylint: disable=no-value-for-parameter
+ node._explicit_inference = _inference_tip_cached(infer_function)
+ return node
+
+ return transform
+
+
+def register_module_extender(manager, module_name, get_extension_mod):
+ def transform(node):
+ extension_module = get_extension_mod()
+ for name, objs in extension_module.locals.items():
+ node.locals[name] = objs
+ for obj in objs:
+ if obj.parent is extension_module:
+ obj.parent = node
+
+ manager.register_transform(Module, transform, lambda n: n.name == module_name)
+
+
+# load brain plugins
+BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), "brain")
+if BRAIN_MODULES_DIR not in sys.path:
+ # add it to the end of the list so user path take precedence
+ sys.path.append(BRAIN_MODULES_DIR)
+# load modules in this directory
+for module in os.listdir(BRAIN_MODULES_DIR):
+ if module.endswith(".py"):
+ __import__(module[:-3])
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pkginfo__.py b/src/main/python/venv/Lib/site-packages/astroid/__pkginfo__.py
new file mode 100644
index 0000000..4a17b5d
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/__pkginfo__.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2015-2017 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2015 Radosław Ganczarek
+# Copyright (c) 2016 Moises Lopez
+# Copyright (c) 2017 Hugo
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2018 Ashley Whetter
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""astroid packaging information"""
+
+version = "2.3.3"
+numversion = tuple(int(elem) for elem in version.split(".") if elem.isdigit())
+
+extras_require = {}
+install_requires = [
+ "lazy_object_proxy==1.4.*",
+ "six~=1.12",
+ "wrapt==1.11.*",
+ 'typed-ast>=1.4.0,<1.5;implementation_name== "cpython" and python_version<"3.8"',
+]
+
+# pylint: disable=redefined-builtin; why license is a builtin anyway?
+license = "LGPL"
+
+author = "Python Code Quality Authority"
+author_email = "code-quality@python.org"
+mailinglist = "mailto://%s" % author_email
+web = "https://github.com/PyCQA/astroid"
+
+description = "An abstract syntax tree for Python with inference support."
+
+classifiers = [
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Software Development :: Quality Assurance",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+]
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..eb28207
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/__init__.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc
new file mode 100644
index 0000000..ed3f17b
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/__pkginfo__.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc
new file mode 100644
index 0000000..c6f8a74
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/_ast.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc
new file mode 100644
index 0000000..64896f7
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/arguments.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc
new file mode 100644
index 0000000..372e534
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/as_string.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc
new file mode 100644
index 0000000..366b834
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/bases.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc
new file mode 100644
index 0000000..6ff12eb
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/builder.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc
new file mode 100644
index 0000000..777eede
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/context.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc
new file mode 100644
index 0000000..1bc12f8
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/decorators.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc
new file mode 100644
index 0000000..211001b
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/exceptions.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc
new file mode 100644
index 0000000..bae7ec3
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/helpers.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc
new file mode 100644
index 0000000..c9328c1
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/inference.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc
new file mode 100644
index 0000000..31b45d7
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/manager.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc
new file mode 100644
index 0000000..7b5b9e4
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/mixins.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc
new file mode 100644
index 0000000..a0f3b48
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/modutils.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc
new file mode 100644
index 0000000..7abdd4b
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/node_classes.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc
new file mode 100644
index 0000000..18c04f8
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/nodes.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc
new file mode 100644
index 0000000..460886a
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/objects.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc
new file mode 100644
index 0000000..d628662
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/protocols.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc
new file mode 100644
index 0000000..0b414cf
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/raw_building.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc
new file mode 100644
index 0000000..13516ca
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/rebuilder.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc
new file mode 100644
index 0000000..d767b50
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/scoped_nodes.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc
new file mode 100644
index 0000000..4b6fba6
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/test_utils.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc
new file mode 100644
index 0000000..b2f4230
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/transforms.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc
new file mode 100644
index 0000000..b5e4fe7
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/__pycache__/util.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/_ast.py b/src/main/python/venv/Lib/site-packages/astroid/_ast.py
new file mode 100644
index 0000000..2e44c1f
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/_ast.py
@@ -0,0 +1,49 @@
+import ast
+from collections import namedtuple
+from functools import partial
+from typing import Optional
+import sys
+
+_ast_py2 = _ast_py3 = None
+try:
+ import typed_ast.ast3 as _ast_py3
+ import typed_ast.ast27 as _ast_py2
+except ImportError:
+ pass
+
+
+PY38 = sys.version_info[:2] >= (3, 8)
+if PY38:
+ # On Python 3.8, typed_ast was merged back into `ast`
+ _ast_py3 = ast
+
+
+FunctionType = namedtuple("FunctionType", ["argtypes", "returns"])
+
+
+def _get_parser_module(parse_python_two: bool = False):
+ if parse_python_two:
+ parser_module = _ast_py2
+ else:
+ parser_module = _ast_py3
+ return parser_module or ast
+
+
+def _parse(string: str, parse_python_two: bool = False):
+ parse_module = _get_parser_module(parse_python_two=parse_python_two)
+ parse_func = parse_module.parse
+ if _ast_py3:
+ if PY38:
+ parse_func = partial(parse_func, type_comments=True)
+ if not parse_python_two:
+ parse_func = partial(parse_func, feature_version=sys.version_info.minor)
+ return parse_func(string)
+
+
+def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
+ """Given a correct type comment, obtain a FunctionType object"""
+ if _ast_py3 is None:
+ return None
+
+ func_type = _ast_py3.parse(type_comment, "", "func_type")
+ return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/arguments.py b/src/main/python/venv/Lib/site-packages/astroid/arguments.py
new file mode 100644
index 0000000..c4bdc6d
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/arguments.py
@@ -0,0 +1,285 @@
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 Anthony Sottile
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+from astroid import bases
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import nodes
+from astroid import util
+
+
+class CallSite:
+ """Class for understanding arguments passed into a call site
+
+ It needs a call context, which contains the arguments and the
+ keyword arguments that were passed into a given call site.
+ In order to infer what an argument represents, call
+ :meth:`infer_argument` with the corresponding function node
+ and the argument name.
+ """
+
+ def __init__(self, callcontext, argument_context_map=None):
+ if argument_context_map is None:
+ argument_context_map = {}
+ self.argument_context_map = argument_context_map
+ args = callcontext.args
+ keywords = callcontext.keywords
+ self.duplicated_keywords = set()
+ self._unpacked_args = self._unpack_args(args)
+ self._unpacked_kwargs = self._unpack_keywords(keywords)
+
+ self.positional_arguments = [
+ arg for arg in self._unpacked_args if arg is not util.Uninferable
+ ]
+ self.keyword_arguments = {
+ key: value
+ for key, value in self._unpacked_kwargs.items()
+ if value is not util.Uninferable
+ }
+
+ @classmethod
+ def from_call(cls, call_node):
+ """Get a CallSite object from the given Call node."""
+ callcontext = contextmod.CallContext(call_node.args, call_node.keywords)
+ return cls(callcontext)
+
+ def has_invalid_arguments(self):
+ """Check if in the current CallSite were passed *invalid* arguments
+
+ This can mean multiple things. For instance, if an unpacking
+ of an invalid object was passed, then this method will return True.
+ Other cases can be when the arguments can't be inferred by astroid,
+ for example, by passing objects which aren't known statically.
+ """
+ return len(self.positional_arguments) != len(self._unpacked_args)
+
+ def has_invalid_keywords(self):
+ """Check if in the current CallSite were passed *invalid* keyword arguments
+
+ For instance, unpacking a dictionary with integer keys is invalid
+ (**{1:2}), because the keys must be strings, which will make this
+ method to return True. Other cases where this might return True if
+ objects which can't be inferred were passed.
+ """
+ return len(self.keyword_arguments) != len(self._unpacked_kwargs)
+
+ def _unpack_keywords(self, keywords):
+ values = {}
+ context = contextmod.InferenceContext()
+ context.extra_context = self.argument_context_map
+ for name, value in keywords:
+ if name is None:
+ # Then it's an unpacking operation (**)
+ try:
+ inferred = next(value.infer(context=context))
+ except exceptions.InferenceError:
+ values[name] = util.Uninferable
+ continue
+
+ if not isinstance(inferred, nodes.Dict):
+ # Not something we can work with.
+ values[name] = util.Uninferable
+ continue
+
+ for dict_key, dict_value in inferred.items:
+ try:
+ dict_key = next(dict_key.infer(context=context))
+ except exceptions.InferenceError:
+ values[name] = util.Uninferable
+ continue
+ if not isinstance(dict_key, nodes.Const):
+ values[name] = util.Uninferable
+ continue
+ if not isinstance(dict_key.value, str):
+ values[name] = util.Uninferable
+ continue
+ if dict_key.value in values:
+ # The name is already in the dictionary
+ values[dict_key.value] = util.Uninferable
+ self.duplicated_keywords.add(dict_key.value)
+ continue
+ values[dict_key.value] = dict_value
+ else:
+ values[name] = value
+ return values
+
+ def _unpack_args(self, args):
+ values = []
+ context = contextmod.InferenceContext()
+ context.extra_context = self.argument_context_map
+ for arg in args:
+ if isinstance(arg, nodes.Starred):
+ try:
+ inferred = next(arg.value.infer(context=context))
+ except exceptions.InferenceError:
+ values.append(util.Uninferable)
+ continue
+
+ if inferred is util.Uninferable:
+ values.append(util.Uninferable)
+ continue
+ if not hasattr(inferred, "elts"):
+ values.append(util.Uninferable)
+ continue
+ values.extend(inferred.elts)
+ else:
+ values.append(arg)
+ return values
+
+ def infer_argument(self, funcnode, name, context):
+ """infer a function argument value according to the call context
+
+ Arguments:
+ funcnode: The function being called.
+ name: The name of the argument whose value is being inferred.
+ context: Inference context object
+ """
+ if name in self.duplicated_keywords:
+ raise exceptions.InferenceError(
+ "The arguments passed to {func!r} " " have duplicate keywords.",
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
+
+ # Look into the keywords first, maybe it's already there.
+ try:
+ return self.keyword_arguments[name].infer(context)
+ except KeyError:
+ pass
+
+ # Too many arguments given and no variable arguments.
+ if len(self.positional_arguments) > len(funcnode.args.args):
+ if not funcnode.args.vararg:
+ raise exceptions.InferenceError(
+ "Too many positional arguments "
+ "passed to {func!r} that does "
+ "not have *args.",
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
+
+ positional = self.positional_arguments[: len(funcnode.args.args)]
+ vararg = self.positional_arguments[len(funcnode.args.args) :]
+ argindex = funcnode.args.find_argname(name)[0]
+ kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}
+ kwargs = {
+ key: value
+ for key, value in self.keyword_arguments.items()
+ if key not in kwonlyargs
+ }
+ # If there are too few positionals compared to
+ # what the function expects to receive, check to see
+ # if the missing positional arguments were passed
+ # as keyword arguments and if so, place them into the
+ # positional args list.
+ if len(positional) < len(funcnode.args.args):
+ for func_arg in funcnode.args.args:
+ if func_arg.name in kwargs:
+ arg = kwargs.pop(func_arg.name)
+ positional.append(arg)
+
+ if argindex is not None:
+ # 2. first argument of instance/class method
+ if argindex == 0 and funcnode.type in ("method", "classmethod"):
+ if context.boundnode is not None:
+ boundnode = context.boundnode
+ else:
+ # XXX can do better ?
+ boundnode = funcnode.parent.frame()
+
+ if isinstance(boundnode, nodes.ClassDef):
+ # Verify that we're accessing a method
+ # of the metaclass through a class, as in
+ # `cls.metaclass_method`. In this case, the
+ # first argument is always the class.
+ method_scope = funcnode.parent.scope()
+ if method_scope is boundnode.metaclass():
+ return iter((boundnode,))
+
+ if funcnode.type == "method":
+ if not isinstance(boundnode, bases.Instance):
+ boundnode = bases.Instance(boundnode)
+ return iter((boundnode,))
+ if funcnode.type == "classmethod":
+ return iter((boundnode,))
+ # if we have a method, extract one position
+ # from the index, so we'll take in account
+ # the extra parameter represented by `self` or `cls`
+ if funcnode.type in ("method", "classmethod"):
+ argindex -= 1
+ # 2. search arg index
+ try:
+ return self.positional_arguments[argindex].infer(context)
+ except IndexError:
+ pass
+
+ if funcnode.args.kwarg == name:
+ # It wants all the keywords that were passed into
+ # the call site.
+ if self.has_invalid_keywords():
+ raise exceptions.InferenceError(
+ "Inference failed to find values for all keyword arguments "
+ "to {func!r}: {unpacked_kwargs!r} doesn't correspond to "
+ "{keyword_arguments!r}.",
+ keyword_arguments=self.keyword_arguments,
+ unpacked_kwargs=self._unpacked_kwargs,
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
+ kwarg = nodes.Dict(
+ lineno=funcnode.args.lineno,
+ col_offset=funcnode.args.col_offset,
+ parent=funcnode.args,
+ )
+ kwarg.postinit(
+ [(nodes.const_factory(key), value) for key, value in kwargs.items()]
+ )
+ return iter((kwarg,))
+ if funcnode.args.vararg == name:
+ # It wants all the args that were passed into
+ # the call site.
+ if self.has_invalid_arguments():
+ raise exceptions.InferenceError(
+ "Inference failed to find values for all positional "
+ "arguments to {func!r}: {unpacked_args!r} doesn't "
+ "correspond to {positional_arguments!r}.",
+ positional_arguments=self.positional_arguments,
+ unpacked_args=self._unpacked_args,
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
+ args = nodes.Tuple(
+ lineno=funcnode.args.lineno,
+ col_offset=funcnode.args.col_offset,
+ parent=funcnode.args,
+ )
+ args.postinit(vararg)
+ return iter((args,))
+
+ # Check if it's a default parameter.
+ try:
+ return funcnode.args.default_value(name).infer(context)
+ except exceptions.NoDefault:
+ pass
+ raise exceptions.InferenceError(
+ "No value found for argument {name} to " "{func!r}",
+ call_site=self,
+ func=funcnode,
+ arg=name,
+ context=context,
+ )
diff --git a/src/main/python/venv/Lib/site-packages/astroid/as_string.py b/src/main/python/venv/Lib/site-packages/astroid/as_string.py
new file mode 100644
index 0000000..3cd6e0d
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/as_string.py
@@ -0,0 +1,633 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2010 Daniel Harding
+# Copyright (c) 2013-2016, 2018 Claudiu Popa
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Jared Garst
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2017 rr-
+# Copyright (c) 2018 brendanator
+# Copyright (c) 2018 Nick Drozd
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""This module renders Astroid nodes as string:
+
+* :func:`to_code` function return equivalent (hopefully valid) python string
+
+* :func:`dump` function return an internal representation of nodes found
+ in the tree, useful for debugging or understanding the tree structure
+"""
+
+# pylint: disable=unused-argument
+
+DOC_NEWLINE = "\0"
+
+
+class AsStringVisitor:
+ """Visitor to render an Astroid node as a valid python code string"""
+
+ def __init__(self, indent):
+ self.indent = indent
+
+ def __call__(self, node):
+ """Makes this visitor behave as a simple function"""
+ return node.accept(self).replace(DOC_NEWLINE, "\n")
+
+ def _docs_dedent(self, doc):
+ """Stop newlines in docs being indented by self._stmt_list"""
+ return '\n%s"""%s"""' % (self.indent, doc.replace("\n", DOC_NEWLINE))
+
+ def _stmt_list(self, stmts, indent=True):
+ """return a list of nodes to string"""
+ stmts = "\n".join(nstr for nstr in [n.accept(self) for n in stmts] if nstr)
+ if indent:
+ return self.indent + stmts.replace("\n", "\n" + self.indent)
+
+ return stmts
+
+ def _precedence_parens(self, node, child, is_left=True):
+ """Wrap child in parens only if required to keep same semantics"""
+ if self._should_wrap(node, child, is_left):
+ return "(%s)" % child.accept(self)
+
+ return child.accept(self)
+
+ def _should_wrap(self, node, child, is_left):
+ """Wrap child if:
+ - it has lower precedence
+ - same precedence with position opposite to associativity direction
+ """
+ node_precedence = node.op_precedence()
+ child_precedence = child.op_precedence()
+
+ if node_precedence > child_precedence:
+ # 3 * (4 + 5)
+ return True
+
+ if (
+ node_precedence == child_precedence
+ and is_left != node.op_left_associative()
+ ):
+ # 3 - (4 - 5)
+ # (2**3)**4
+ return True
+
+ return False
+
+ ## visit_ methods ###########################################
+
+ def visit_arguments(self, node):
+ """return an astroid.Function node as string"""
+ return node.format_args()
+
+ def visit_assignattr(self, node):
+ """return an astroid.AssAttr node as string"""
+ return self.visit_attribute(node)
+
+ def visit_assert(self, node):
+ """return an astroid.Assert node as string"""
+ if node.fail:
+ return "assert %s, %s" % (node.test.accept(self), node.fail.accept(self))
+ return "assert %s" % node.test.accept(self)
+
+ def visit_assignname(self, node):
+ """return an astroid.AssName node as string"""
+ return node.name
+
+ def visit_assign(self, node):
+ """return an astroid.Assign node as string"""
+ lhs = " = ".join(n.accept(self) for n in node.targets)
+ return "%s = %s" % (lhs, node.value.accept(self))
+
+ def visit_augassign(self, node):
+ """return an astroid.AugAssign node as string"""
+ return "%s %s %s" % (node.target.accept(self), node.op, node.value.accept(self))
+
+ def visit_annassign(self, node):
+ """Return an astroid.AugAssign node as string"""
+
+ target = node.target.accept(self)
+ annotation = node.annotation.accept(self)
+ if node.value is None:
+ return "%s: %s" % (target, annotation)
+ return "%s: %s = %s" % (target, annotation, node.value.accept(self))
+
+ def visit_repr(self, node):
+ """return an astroid.Repr node as string"""
+ return "`%s`" % node.value.accept(self)
+
+ def visit_binop(self, node):
+ """return an astroid.BinOp node as string"""
+ left = self._precedence_parens(node, node.left)
+ right = self._precedence_parens(node, node.right, is_left=False)
+ if node.op == "**":
+ return "%s%s%s" % (left, node.op, right)
+
+ return "%s %s %s" % (left, node.op, right)
+
+ def visit_boolop(self, node):
+ """return an astroid.BoolOp node as string"""
+ values = ["%s" % self._precedence_parens(node, n) for n in node.values]
+ return (" %s " % node.op).join(values)
+
+ def visit_break(self, node):
+ """return an astroid.Break node as string"""
+ return "break"
+
+ def visit_call(self, node):
+ """return an astroid.Call node as string"""
+ expr_str = self._precedence_parens(node, node.func)
+ args = [arg.accept(self) for arg in node.args]
+ if node.keywords:
+ keywords = [kwarg.accept(self) for kwarg in node.keywords]
+ else:
+ keywords = []
+
+ args.extend(keywords)
+ return "%s(%s)" % (expr_str, ", ".join(args))
+
+ def visit_classdef(self, node):
+ """return an astroid.ClassDef node as string"""
+ decorate = node.decorators.accept(self) if node.decorators else ""
+ bases = ", ".join(n.accept(self) for n in node.bases)
+ metaclass = node.metaclass()
+ if metaclass and not node.has_metaclass_hack():
+ if bases:
+ bases = "(%s, metaclass=%s)" % (bases, metaclass.name)
+ else:
+ bases = "(metaclass=%s)" % metaclass.name
+ else:
+ bases = "(%s)" % bases if bases else ""
+ docs = self._docs_dedent(node.doc) if node.doc else ""
+ return "\n\n%sclass %s%s:%s\n%s\n" % (
+ decorate,
+ node.name,
+ bases,
+ docs,
+ self._stmt_list(node.body),
+ )
+
+ def visit_compare(self, node):
+ """return an astroid.Compare node as string"""
+ rhs_str = " ".join(
+ [
+ "%s %s" % (op, self._precedence_parens(node, expr, is_left=False))
+ for op, expr in node.ops
+ ]
+ )
+ return "%s %s" % (self._precedence_parens(node, node.left), rhs_str)
+
+ def visit_comprehension(self, node):
+ """return an astroid.Comprehension node as string"""
+ ifs = "".join(" if %s" % n.accept(self) for n in node.ifs)
+ return "for %s in %s%s" % (
+ node.target.accept(self),
+ node.iter.accept(self),
+ ifs,
+ )
+
+ def visit_const(self, node):
+ """return an astroid.Const node as string"""
+ if node.value is Ellipsis:
+ return "..."
+ return repr(node.value)
+
+ def visit_continue(self, node):
+ """return an astroid.Continue node as string"""
+ return "continue"
+
+ def visit_delete(self, node): # XXX check if correct
+ """return an astroid.Delete node as string"""
+ return "del %s" % ", ".join(child.accept(self) for child in node.targets)
+
+ def visit_delattr(self, node):
+ """return an astroid.DelAttr node as string"""
+ return self.visit_attribute(node)
+
+ def visit_delname(self, node):
+ """return an astroid.DelName node as string"""
+ return node.name
+
+ def visit_decorators(self, node):
+ """return an astroid.Decorators node as string"""
+ return "@%s\n" % "\n@".join(item.accept(self) for item in node.nodes)
+
+ def visit_dict(self, node):
+ """return an astroid.Dict node as string"""
+ return "{%s}" % ", ".join(self._visit_dict(node))
+
+ def _visit_dict(self, node):
+ for key, value in node.items:
+ key = key.accept(self)
+ value = value.accept(self)
+ if key == "**":
+ # It can only be a DictUnpack node.
+ yield key + value
+ else:
+ yield "%s: %s" % (key, value)
+
+ def visit_dictunpack(self, node):
+ return "**"
+
+ def visit_dictcomp(self, node):
+ """return an astroid.DictComp node as string"""
+ return "{%s: %s %s}" % (
+ node.key.accept(self),
+ node.value.accept(self),
+ " ".join(n.accept(self) for n in node.generators),
+ )
+
+ def visit_expr(self, node):
+ """return an astroid.Discard node as string"""
+ return node.value.accept(self)
+
+ def visit_emptynode(self, node):
+ """dummy method for visiting an Empty node"""
+ return ""
+
+ def visit_excepthandler(self, node):
+ if node.type:
+ if node.name:
+ excs = "except %s, %s" % (
+ node.type.accept(self),
+ node.name.accept(self),
+ )
+ else:
+ excs = "except %s" % node.type.accept(self)
+ else:
+ excs = "except"
+ return "%s:\n%s" % (excs, self._stmt_list(node.body))
+
+ def visit_ellipsis(self, node):
+ """return an astroid.Ellipsis node as string"""
+ return "..."
+
+ def visit_empty(self, node):
+ """return an Empty node as string"""
+ return ""
+
+ def visit_exec(self, node):
+ """return an astroid.Exec node as string"""
+ if node.locals:
+ return "exec %s in %s, %s" % (
+ node.expr.accept(self),
+ node.locals.accept(self),
+ node.globals.accept(self),
+ )
+ if node.globals:
+ return "exec %s in %s" % (node.expr.accept(self), node.globals.accept(self))
+ return "exec %s" % node.expr.accept(self)
+
+ def visit_extslice(self, node):
+ """return an astroid.ExtSlice node as string"""
+ return ", ".join(dim.accept(self) for dim in node.dims)
+
+ def visit_for(self, node):
+ """return an astroid.For node as string"""
+ fors = "for %s in %s:\n%s" % (
+ node.target.accept(self),
+ node.iter.accept(self),
+ self._stmt_list(node.body),
+ )
+ if node.orelse:
+ fors = "%s\nelse:\n%s" % (fors, self._stmt_list(node.orelse))
+ return fors
+
+ def visit_importfrom(self, node):
+ """return an astroid.ImportFrom node as string"""
+ return "from %s import %s" % (
+ "." * (node.level or 0) + node.modname,
+ _import_string(node.names),
+ )
+
+ def visit_functiondef(self, node):
+ """return an astroid.Function node as string"""
+ decorate = node.decorators.accept(self) if node.decorators else ""
+ docs = self._docs_dedent(node.doc) if node.doc else ""
+ trailer = ":"
+ if node.returns:
+ return_annotation = " -> " + node.returns.as_string()
+ trailer = return_annotation + ":"
+ def_format = "\n%sdef %s(%s)%s%s\n%s"
+ return def_format % (
+ decorate,
+ node.name,
+ node.args.accept(self),
+ trailer,
+ docs,
+ self._stmt_list(node.body),
+ )
+
+ def visit_generatorexp(self, node):
+ """return an astroid.GeneratorExp node as string"""
+ return "(%s %s)" % (
+ node.elt.accept(self),
+ " ".join(n.accept(self) for n in node.generators),
+ )
+
+ def visit_attribute(self, node):
+ """return an astroid.Getattr node as string"""
+ return "%s.%s" % (self._precedence_parens(node, node.expr), node.attrname)
+
+ def visit_global(self, node):
+ """return an astroid.Global node as string"""
+ return "global %s" % ", ".join(node.names)
+
+ def visit_if(self, node):
+ """return an astroid.If node as string"""
+ ifs = ["if %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))]
+ if node.has_elif_block():
+ ifs.append("el%s" % self._stmt_list(node.orelse, indent=False))
+ elif node.orelse:
+ ifs.append("else:\n%s" % self._stmt_list(node.orelse))
+ return "\n".join(ifs)
+
+ def visit_ifexp(self, node):
+ """return an astroid.IfExp node as string"""
+ return "%s if %s else %s" % (
+ self._precedence_parens(node, node.body, is_left=True),
+ self._precedence_parens(node, node.test, is_left=True),
+ self._precedence_parens(node, node.orelse, is_left=False),
+ )
+
+ def visit_import(self, node):
+ """return an astroid.Import node as string"""
+ return "import %s" % _import_string(node.names)
+
+ def visit_keyword(self, node):
+ """return an astroid.Keyword node as string"""
+ if node.arg is None:
+ return "**%s" % node.value.accept(self)
+ return "%s=%s" % (node.arg, node.value.accept(self))
+
+ def visit_lambda(self, node):
+ """return an astroid.Lambda node as string"""
+ args = node.args.accept(self)
+ body = node.body.accept(self)
+ if args:
+ return "lambda %s: %s" % (args, body)
+
+ return "lambda: %s" % body
+
+ def visit_list(self, node):
+ """return an astroid.List node as string"""
+ return "[%s]" % ", ".join(child.accept(self) for child in node.elts)
+
+ def visit_listcomp(self, node):
+ """return an astroid.ListComp node as string"""
+ return "[%s %s]" % (
+ node.elt.accept(self),
+ " ".join(n.accept(self) for n in node.generators),
+ )
+
+ def visit_module(self, node):
+ """return an astroid.Module node as string"""
+ docs = '"""%s"""\n\n' % node.doc if node.doc else ""
+ return docs + "\n".join(n.accept(self) for n in node.body) + "\n\n"
+
+ def visit_name(self, node):
+ """return an astroid.Name node as string"""
+ return node.name
+
+ def visit_pass(self, node):
+ """return an astroid.Pass node as string"""
+ return "pass"
+
+ def visit_print(self, node):
+ """return an astroid.Print node as string"""
+ nodes = ", ".join(n.accept(self) for n in node.values)
+ if not node.nl:
+ nodes = "%s," % nodes
+ if node.dest:
+ return "print >> %s, %s" % (node.dest.accept(self), nodes)
+ return "print %s" % nodes
+
+ def visit_raise(self, node):
+ """return an astroid.Raise node as string"""
+ if node.exc:
+ if node.inst:
+ if node.tback:
+ return "raise %s, %s, %s" % (
+ node.exc.accept(self),
+ node.inst.accept(self),
+ node.tback.accept(self),
+ )
+ return "raise %s, %s" % (node.exc.accept(self), node.inst.accept(self))
+ return "raise %s" % node.exc.accept(self)
+ return "raise"
+
+ def visit_return(self, node):
+ """return an astroid.Return node as string"""
+ if node.is_tuple_return() and len(node.value.elts) > 1:
+ elts = [child.accept(self) for child in node.value.elts]
+ return "return %s" % ", ".join(elts)
+
+ if node.value:
+ return "return %s" % node.value.accept(self)
+
+ return "return"
+
+ def visit_index(self, node):
+ """return an astroid.Index node as string"""
+ return node.value.accept(self)
+
+ def visit_set(self, node):
+ """return an astroid.Set node as string"""
+ return "{%s}" % ", ".join(child.accept(self) for child in node.elts)
+
+ def visit_setcomp(self, node):
+ """return an astroid.SetComp node as string"""
+ return "{%s %s}" % (
+ node.elt.accept(self),
+ " ".join(n.accept(self) for n in node.generators),
+ )
+
+ def visit_slice(self, node):
+ """return an astroid.Slice node as string"""
+ lower = node.lower.accept(self) if node.lower else ""
+ upper = node.upper.accept(self) if node.upper else ""
+ step = node.step.accept(self) if node.step else ""
+ if step:
+ return "%s:%s:%s" % (lower, upper, step)
+ return "%s:%s" % (lower, upper)
+
+ def visit_subscript(self, node):
+ """return an astroid.Subscript node as string"""
+ idx = node.slice
+ if idx.__class__.__name__.lower() == "index":
+ idx = idx.value
+ idxstr = idx.accept(self)
+ if idx.__class__.__name__.lower() == "tuple" and idx.elts:
+ # Remove parenthesis in tuple and extended slice.
+ # a[(::1, 1:)] is not valid syntax.
+ idxstr = idxstr[1:-1]
+ return "%s[%s]" % (self._precedence_parens(node, node.value), idxstr)
+
+ def visit_tryexcept(self, node):
+ """return an astroid.TryExcept node as string"""
+ trys = ["try:\n%s" % self._stmt_list(node.body)]
+ for handler in node.handlers:
+ trys.append(handler.accept(self))
+ if node.orelse:
+ trys.append("else:\n%s" % self._stmt_list(node.orelse))
+ return "\n".join(trys)
+
+ def visit_tryfinally(self, node):
+ """return an astroid.TryFinally node as string"""
+ return "try:\n%s\nfinally:\n%s" % (
+ self._stmt_list(node.body),
+ self._stmt_list(node.finalbody),
+ )
+
+ def visit_tuple(self, node):
+ """return an astroid.Tuple node as string"""
+ if len(node.elts) == 1:
+ return "(%s, )" % node.elts[0].accept(self)
+ return "(%s)" % ", ".join(child.accept(self) for child in node.elts)
+
+ def visit_unaryop(self, node):
+ """return an astroid.UnaryOp node as string"""
+ if node.op == "not":
+ operator = "not "
+ else:
+ operator = node.op
+ return "%s%s" % (operator, self._precedence_parens(node, node.operand))
+
+ def visit_while(self, node):
+ """return an astroid.While node as string"""
+ whiles = "while %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))
+ if node.orelse:
+ whiles = "%s\nelse:\n%s" % (whiles, self._stmt_list(node.orelse))
+ return whiles
+
+ def visit_with(self, node): # 'with' without 'as' is possible
+ """return an astroid.With node as string"""
+ items = ", ".join(
+ ("%s" % expr.accept(self)) + (vars and " as %s" % (vars.accept(self)) or "")
+ for expr, vars in node.items
+ )
+ return "with %s:\n%s" % (items, self._stmt_list(node.body))
+
+ def visit_yield(self, node):
+ """yield an ast.Yield node as string"""
+ yi_val = (" " + node.value.accept(self)) if node.value else ""
+ expr = "yield" + yi_val
+ if node.parent.is_statement:
+ return expr
+
+ return "(%s)" % (expr,)
+
+ def visit_starred(self, node):
+ """return Starred node as string"""
+ return "*" + node.value.accept(self)
+
+ # These aren't for real AST nodes, but for inference objects.
+
+ def visit_frozenset(self, node):
+ return node.parent.accept(self)
+
+ def visit_super(self, node):
+ return node.parent.accept(self)
+
+ def visit_uninferable(self, node):
+ return str(node)
+
+
+class AsStringVisitor3(AsStringVisitor):
+ """AsStringVisitor3 overwrites some AsStringVisitor methods"""
+
+ def visit_excepthandler(self, node):
+ if node.type:
+ if node.name:
+ excs = "except %s as %s" % (
+ node.type.accept(self),
+ node.name.accept(self),
+ )
+ else:
+ excs = "except %s" % node.type.accept(self)
+ else:
+ excs = "except"
+ return "%s:\n%s" % (excs, self._stmt_list(node.body))
+
+ def visit_nonlocal(self, node):
+ """return an astroid.Nonlocal node as string"""
+ return "nonlocal %s" % ", ".join(node.names)
+
+ def visit_raise(self, node):
+ """return an astroid.Raise node as string"""
+ if node.exc:
+ if node.cause:
+ return "raise %s from %s" % (
+ node.exc.accept(self),
+ node.cause.accept(self),
+ )
+ return "raise %s" % node.exc.accept(self)
+ return "raise"
+
+ def visit_yieldfrom(self, node):
+ """ Return an astroid.YieldFrom node as string. """
+ yi_val = (" " + node.value.accept(self)) if node.value else ""
+ expr = "yield from" + yi_val
+ if node.parent.is_statement:
+ return expr
+
+ return "(%s)" % (expr,)
+
+ def visit_asyncfunctiondef(self, node):
+ function = super(AsStringVisitor3, self).visit_functiondef(node)
+ return "async " + function.strip()
+
+ def visit_await(self, node):
+ return "await %s" % node.value.accept(self)
+
+ def visit_asyncwith(self, node):
+ return "async %s" % self.visit_with(node)
+
+ def visit_asyncfor(self, node):
+ return "async %s" % self.visit_for(node)
+
+ def visit_joinedstr(self, node):
+ # Special treatment for constants,
+ # as we want to join literals not reprs
+ string = "".join(
+ value.value if type(value).__name__ == "Const" else value.accept(self)
+ for value in node.values
+ )
+ return "f'%s'" % string
+
+ def visit_formattedvalue(self, node):
+ return "{%s}" % node.value.accept(self)
+
+ def visit_comprehension(self, node):
+ """return an astroid.Comprehension node as string"""
+ return "%s%s" % (
+ "async " if node.is_async else "",
+ super(AsStringVisitor3, self).visit_comprehension(node),
+ )
+
+ def visit_namedexpr(self, node):
+ """Return an assignment expression node as string"""
+ target = node.target.accept(self)
+ value = node.value.accept(self)
+ return "%s := %s" % (target, value)
+
+
+def _import_string(names):
+ """return a list of (name, asname) formatted as a string"""
+ _names = []
+ for name, asname in names:
+ if asname is not None:
+ _names.append("%s as %s" % (name, asname))
+ else:
+ _names.append(name)
+ return ", ".join(_names)
+
+
+AsStringVisitor = AsStringVisitor3
+
+# This sets the default indent to 4 spaces.
+to_code = AsStringVisitor(" ")
diff --git a/src/main/python/venv/Lib/site-packages/astroid/bases.py b/src/main/python/venv/Lib/site-packages/astroid/bases.py
new file mode 100644
index 0000000..d5b042a
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/bases.py
@@ -0,0 +1,542 @@
+# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2012 FELD Boris
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2016-2017 Derek Gustafson
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 Daniel Colascione
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""This module contains base classes and functions for the nodes and some
+inference utils.
+"""
+
+import builtins
+import collections
+
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import util
+
+objectmodel = util.lazy_import("interpreter.objectmodel")
+helpers = util.lazy_import("helpers")
+BUILTINS = builtins.__name__
+manager = util.lazy_import("manager")
+MANAGER = manager.AstroidManager()
+
+# TODO: check if needs special treatment
+BUILTINS = "builtins"
+BOOL_SPECIAL_METHOD = "__bool__"
+
+PROPERTIES = {BUILTINS + ".property", "abc.abstractproperty"}
+# List of possible property names. We use this list in order
+# to see if a method is a property or not. This should be
+# pretty reliable and fast, the alternative being to check each
+# decorator to see if its a real property-like descriptor, which
+# can be too complicated.
+# Also, these aren't qualified, because each project can
+# define them, we shouldn't expect to know every possible
+# property-like decorator!
+POSSIBLE_PROPERTIES = {
+ "cached_property",
+ "cachedproperty",
+ "lazyproperty",
+ "lazy_property",
+ "reify",
+ "lazyattribute",
+ "lazy_attribute",
+ "LazyProperty",
+ "lazy",
+ "cache_readonly",
+}
+
+
+def _is_property(meth):
+ if PROPERTIES.intersection(meth.decoratornames()):
+ return True
+ stripped = {
+ name.split(".")[-1]
+ for name in meth.decoratornames()
+ if name is not util.Uninferable
+ }
+ if any(name in stripped for name in POSSIBLE_PROPERTIES):
+ return True
+
+ # Lookup for subclasses of *property*
+ if not meth.decorators:
+ return False
+ for decorator in meth.decorators.nodes or ():
+ inferred = helpers.safe_infer(decorator)
+ if inferred is None or inferred is util.Uninferable:
+ continue
+ if inferred.__class__.__name__ == "ClassDef":
+ for base_class in inferred.bases:
+ if base_class.__class__.__name__ != "Name":
+ continue
+ module, _ = base_class.lookup(base_class.name)
+ if module.name == BUILTINS and base_class.name == "property":
+ return True
+
+ return False
+
+
+class Proxy:
+ """a simple proxy object
+
+ Note:
+
+ Subclasses of this object will need a custom __getattr__
+ if new instance attributes are created. See the Const class
+ """
+
+ _proxied = None # proxied object may be set by class or by instance
+
+ def __init__(self, proxied=None):
+ if proxied is not None:
+ self._proxied = proxied
+
+ def __getattr__(self, name):
+ if name == "_proxied":
+ return getattr(self.__class__, "_proxied")
+ if name in self.__dict__:
+ return self.__dict__[name]
+ return getattr(self._proxied, name)
+
+ def infer(self, context=None):
+ yield self
+
+
+def _infer_stmts(stmts, context, frame=None):
+ """Return an iterator on statements inferred by each statement in *stmts*."""
+ inferred = False
+ if context is not None:
+ name = context.lookupname
+ context = context.clone()
+ else:
+ name = None
+ context = contextmod.InferenceContext()
+
+ for stmt in stmts:
+ if stmt is util.Uninferable:
+ yield stmt
+ inferred = True
+ continue
+ context.lookupname = stmt._infer_name(frame, name)
+ try:
+ for inferred in stmt.infer(context=context):
+ yield inferred
+ inferred = True
+ except exceptions.NameInferenceError:
+ continue
+ except exceptions.InferenceError:
+ yield util.Uninferable
+ inferred = True
+ if not inferred:
+ raise exceptions.InferenceError(
+ "Inference failed for all members of {stmts!r}.",
+ stmts=stmts,
+ frame=frame,
+ context=context,
+ )
+
+
+def _infer_method_result_truth(instance, method_name, context):
+ # Get the method from the instance and try to infer
+ # its return's truth value.
+ meth = next(instance.igetattr(method_name, context=context), None)
+ if meth and hasattr(meth, "infer_call_result"):
+ if not meth.callable():
+ return util.Uninferable
+ try:
+ for value in meth.infer_call_result(instance, context=context):
+ if value is util.Uninferable:
+ return value
+
+ inferred = next(value.infer(context=context))
+ return inferred.bool_value()
+ except exceptions.InferenceError:
+ pass
+ return util.Uninferable
+
+
+class BaseInstance(Proxy):
+ """An instance base class, which provides lookup methods for potential instances."""
+
+ special_attributes = None
+
+ def display_type(self):
+ return "Instance of"
+
+ def getattr(self, name, context=None, lookupclass=True):
+ try:
+ values = self._proxied.instance_attr(name, context)
+ except exceptions.AttributeInferenceError as exc:
+ if self.special_attributes and name in self.special_attributes:
+ return [self.special_attributes.lookup(name)]
+
+ if lookupclass:
+ # Class attributes not available through the instance
+ # unless they are explicitly defined.
+ return self._proxied.getattr(name, context, class_context=False)
+
+ raise exceptions.AttributeInferenceError(
+ target=self, attribute=name, context=context
+ ) from exc
+ # since we've no context information, return matching class members as
+ # well
+ if lookupclass:
+ try:
+ return values + self._proxied.getattr(
+ name, context, class_context=False
+ )
+ except exceptions.AttributeInferenceError:
+ pass
+ return values
+
+ def igetattr(self, name, context=None):
+ """inferred getattr"""
+ if not context:
+ context = contextmod.InferenceContext()
+ try:
+ # avoid recursively inferring the same attr on the same class
+ if context.push((self._proxied, name)):
+ raise exceptions.InferenceError(
+ message="Cannot infer the same attribute again",
+ node=self,
+ context=context,
+ )
+
+ # XXX frame should be self._proxied, or not ?
+ get_attr = self.getattr(name, context, lookupclass=False)
+ yield from _infer_stmts(
+ self._wrap_attr(get_attr, context), context, frame=self
+ )
+ except exceptions.AttributeInferenceError as error:
+ try:
+ # fallback to class.igetattr since it has some logic to handle
+ # descriptors
+ # But only if the _proxied is the Class.
+ if self._proxied.__class__.__name__ != "ClassDef":
+ raise
+ attrs = self._proxied.igetattr(name, context, class_context=False)
+ yield from self._wrap_attr(attrs, context)
+ except exceptions.AttributeInferenceError as error:
+ raise exceptions.InferenceError(**vars(error)) from error
+
+ def _wrap_attr(self, attrs, context=None):
+ """wrap bound methods of attrs in a InstanceMethod proxies"""
+ for attr in attrs:
+ if isinstance(attr, UnboundMethod):
+ if _is_property(attr):
+ yield from attr.infer_call_result(self, context)
+ else:
+ yield BoundMethod(attr, self)
+ elif hasattr(attr, "name") and attr.name == "":
+ if attr.args.args and attr.args.args[0].name == "self":
+ yield BoundMethod(attr, self)
+ continue
+ yield attr
+ else:
+ yield attr
+
+ def infer_call_result(self, caller, context=None):
+ """infer what a class instance is returning when called"""
+ context = contextmod.bind_context_to_node(context, self)
+ inferred = False
+ for node in self._proxied.igetattr("__call__", context):
+ if node is util.Uninferable or not node.callable():
+ continue
+ for res in node.infer_call_result(caller, context):
+ inferred = True
+ yield res
+ if not inferred:
+ raise exceptions.InferenceError(node=self, caller=caller, context=context)
+
+
+class Instance(BaseInstance):
+ """A special node representing a class instance."""
+
+ # pylint: disable=unnecessary-lambda
+ special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel())
+
+ def __repr__(self):
+ return "" % (
+ self._proxied.root().name,
+ self._proxied.name,
+ id(self),
+ )
+
+ def __str__(self):
+ return "Instance of %s.%s" % (self._proxied.root().name, self._proxied.name)
+
+ def callable(self):
+ try:
+ self._proxied.getattr("__call__", class_context=False)
+ return True
+ except exceptions.AttributeInferenceError:
+ return False
+
+ def pytype(self):
+ return self._proxied.qname()
+
+ def display_type(self):
+ return "Instance of"
+
+ def bool_value(self):
+ """Infer the truth value for an Instance
+
+ The truth value of an instance is determined by these conditions:
+
+ * if it implements __bool__ on Python 3 or __nonzero__
+ on Python 2, then its bool value will be determined by
+ calling this special method and checking its result.
+ * when this method is not defined, __len__() is called, if it
+ is defined, and the object is considered true if its result is
+ nonzero. If a class defines neither __len__() nor __bool__(),
+ all its instances are considered true.
+ """
+ context = contextmod.InferenceContext()
+ context.callcontext = contextmod.CallContext(args=[])
+ context.boundnode = self
+
+ try:
+ result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context)
+ except (exceptions.InferenceError, exceptions.AttributeInferenceError):
+ # Fallback to __len__.
+ try:
+ result = _infer_method_result_truth(self, "__len__", context)
+ except (exceptions.AttributeInferenceError, exceptions.InferenceError):
+ return True
+ return result
+
+ # This is set in inference.py.
+ def getitem(self, index, context=None):
+ pass
+
+
+class UnboundMethod(Proxy):
+ """a special node representing a method not bound to an instance"""
+
+ # pylint: disable=unnecessary-lambda
+ special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel())
+
+ def __repr__(self):
+ frame = self._proxied.parent.frame()
+ return "<%s %s of %s at 0x%s" % (
+ self.__class__.__name__,
+ self._proxied.name,
+ frame.qname(),
+ id(self),
+ )
+
+ def implicit_parameters(self):
+ return 0
+
+ def is_bound(self):
+ return False
+
+ def getattr(self, name, context=None):
+ if name in self.special_attributes:
+ return [self.special_attributes.lookup(name)]
+ return self._proxied.getattr(name, context)
+
+ def igetattr(self, name, context=None):
+ if name in self.special_attributes:
+ return iter((self.special_attributes.lookup(name),))
+ return self._proxied.igetattr(name, context)
+
+ def infer_call_result(self, caller, context):
+ """
+ The boundnode of the regular context with a function called
+ on ``object.__new__`` will be of type ``object``,
+ which is incorrect for the argument in general.
+ If no context is given the ``object.__new__`` call argument will
+ correctly inferred except when inside a call that requires
+ the additional context (such as a classmethod) of the boundnode
+ to determine which class the method was called from
+ """
+
+ # If we're unbound method __new__ of builtin object, the result is an
+ # instance of the class given as first argument.
+ if (
+ self._proxied.name == "__new__"
+ and self._proxied.parent.frame().qname() == "%s.object" % BUILTINS
+ ):
+ if caller.args:
+ node_context = context.extra_context.get(caller.args[0])
+ infer = caller.args[0].infer(context=node_context)
+ else:
+ infer = []
+ return (Instance(x) if x is not util.Uninferable else x for x in infer)
+ return self._proxied.infer_call_result(caller, context)
+
+ def bool_value(self):
+ return True
+
+
+class BoundMethod(UnboundMethod):
+ """a special node representing a method bound to an instance"""
+
+ # pylint: disable=unnecessary-lambda
+ special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel())
+
+ def __init__(self, proxy, bound):
+ UnboundMethod.__init__(self, proxy)
+ self.bound = bound
+
+ def implicit_parameters(self):
+ return 1
+
+ def is_bound(self):
+ return True
+
+ def _infer_type_new_call(self, caller, context):
+ """Try to infer what type.__new__(mcs, name, bases, attrs) returns.
+
+ In order for such call to be valid, the metaclass needs to be
+ a subtype of ``type``, the name needs to be a string, the bases
+ needs to be a tuple of classes
+ """
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import node_classes
+
+ # Verify the metaclass
+ mcs = next(caller.args[0].infer(context=context))
+ if mcs.__class__.__name__ != "ClassDef":
+ # Not a valid first argument.
+ return None
+ if not mcs.is_subtype_of("%s.type" % BUILTINS):
+ # Not a valid metaclass.
+ return None
+
+ # Verify the name
+ name = next(caller.args[1].infer(context=context))
+ if name.__class__.__name__ != "Const":
+ # Not a valid name, needs to be a const.
+ return None
+ if not isinstance(name.value, str):
+ # Needs to be a string.
+ return None
+
+ # Verify the bases
+ bases = next(caller.args[2].infer(context=context))
+ if bases.__class__.__name__ != "Tuple":
+ # Needs to be a tuple.
+ return None
+ inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts]
+ if any(base.__class__.__name__ != "ClassDef" for base in inferred_bases):
+ # All the bases needs to be Classes
+ return None
+
+ # Verify the attributes.
+ attrs = next(caller.args[3].infer(context=context))
+ if attrs.__class__.__name__ != "Dict":
+ # Needs to be a dictionary.
+ return None
+ cls_locals = collections.defaultdict(list)
+ for key, value in attrs.items:
+ key = next(key.infer(context=context))
+ value = next(value.infer(context=context))
+ # Ignore non string keys
+ if key.__class__.__name__ == "Const" and isinstance(key.value, str):
+ cls_locals[key.value].append(value)
+
+ # Build the class from now.
+ cls = mcs.__class__(
+ name=name.value,
+ lineno=caller.lineno,
+ col_offset=caller.col_offset,
+ parent=caller,
+ )
+ empty = node_classes.Pass()
+ cls.postinit(
+ bases=bases.elts,
+ body=[empty],
+ decorators=[],
+ newstyle=True,
+ metaclass=mcs,
+ keywords=[],
+ )
+ cls.locals = cls_locals
+ return cls
+
+ def infer_call_result(self, caller, context=None):
+ context = contextmod.bind_context_to_node(context, self.bound)
+ if (
+ self.bound.__class__.__name__ == "ClassDef"
+ and self.bound.name == "type"
+ and self.name == "__new__"
+ and len(caller.args) == 4
+ ):
+ # Check if we have a ``type.__new__(mcs, name, bases, attrs)`` call.
+ new_cls = self._infer_type_new_call(caller, context)
+ if new_cls:
+ return iter((new_cls,))
+
+ return super(BoundMethod, self).infer_call_result(caller, context)
+
+ def bool_value(self):
+ return True
+
+
+class Generator(BaseInstance):
+ """a special node representing a generator.
+
+ Proxied class is set once for all in raw_building.
+ """
+
+ # pylint: disable=unnecessary-lambda
+ special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel())
+
+ # pylint: disable=super-init-not-called
+ def __init__(self, parent=None):
+ self.parent = parent
+
+ def callable(self):
+ return False
+
+ def pytype(self):
+ return "%s.generator" % BUILTINS
+
+ def display_type(self):
+ return "Generator"
+
+ def bool_value(self):
+ return True
+
+ def __repr__(self):
+ return "" % (
+ self._proxied.name,
+ self.lineno,
+ id(self),
+ )
+
+ def __str__(self):
+ return "Generator(%s)" % (self._proxied.name)
+
+
+class AsyncGenerator(Generator):
+ """Special node representing an async generator"""
+
+ def pytype(self):
+ return "%s.async_generator" % BUILTINS
+
+ def display_type(self):
+ return "AsyncGenerator"
+
+ def __repr__(self):
+ return "" % (
+ self._proxied.name,
+ self.lineno,
+ id(self),
+ )
+
+ def __str__(self):
+ return "AsyncGenerator(%s)" % (self._proxied.name)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc
new file mode 100644
index 0000000..02f8cf7
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_argparse.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc
new file mode 100644
index 0000000..7cf4841
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_attrs.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc
new file mode 100644
index 0000000..c2a6f46
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_builtin_inference.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc
new file mode 100644
index 0000000..af5833f
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_collections.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_crypt.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_crypt.cpython-37.pyc
new file mode 100644
index 0000000..a895bb5
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_crypt.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc
new file mode 100644
index 0000000..e33a68c
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_curses.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dataclasses.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dataclasses.cpython-37.pyc
new file mode 100644
index 0000000..ead95a8
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dataclasses.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc
new file mode 100644
index 0000000..94c253f
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_dateutil.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc
new file mode 100644
index 0000000..807c54d
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_fstrings.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc
new file mode 100644
index 0000000..1d0fbe5
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_functools.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc
new file mode 100644
index 0000000..115a75b
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_gi.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc
new file mode 100644
index 0000000..8cd6565
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_hashlib.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_http.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_http.cpython-37.pyc
new file mode 100644
index 0000000..ca12de5
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_http.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc
new file mode 100644
index 0000000..5befdcd
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_io.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc
new file mode 100644
index 0000000..e02f078
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_mechanize.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc
new file mode 100644
index 0000000..4c20ea7
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_multiprocessing.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc
new file mode 100644
index 0000000..4f6155a
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_namedtuple_enum.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc
new file mode 100644
index 0000000..872060b
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_nose.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pyc
new file mode 100644
index 0000000..275e716
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_fromnumeric.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pyc
new file mode 100644
index 0000000..1b3da4c
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_function_base.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pyc
new file mode 100644
index 0000000..4e9eb31
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_multiarray.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pyc
new file mode 100644
index 0000000..6f6e302
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numeric.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pyc
new file mode 100644
index 0000000..0c77435
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_numerictypes.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pyc
new file mode 100644
index 0000000..bb8593b
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_core_umath.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pyc
new file mode 100644
index 0000000..f663c18
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_ndarray.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pyc
new file mode 100644
index 0000000..32a3b7b
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_random_mtrand.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pyc
new file mode 100644
index 0000000..0e950e7
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_numpy_utils.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc
new file mode 100644
index 0000000..bca107d
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pkg_resources.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc
new file mode 100644
index 0000000..c6647f8
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_pytest.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc
new file mode 100644
index 0000000..01d5160
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_qt.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc
new file mode 100644
index 0000000..b5d2c69
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_random.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc
new file mode 100644
index 0000000..e317433
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_re.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc
new file mode 100644
index 0000000..b5deac2
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_six.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc
new file mode 100644
index 0000000..90e94c9
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_ssl.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc
new file mode 100644
index 0000000..ac6c87d
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_subprocess.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc
new file mode 100644
index 0000000..a9214ba
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_threading.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc
new file mode 100644
index 0000000..9cb0782
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_typing.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc
new file mode 100644
index 0000000..f6850ba
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/brain/__pycache__/brain_uuid.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_argparse.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_argparse.py
new file mode 100644
index 0000000..d489911
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_argparse.py
@@ -0,0 +1,33 @@
+from astroid import MANAGER, arguments, nodes, inference_tip, UseInferenceDefault
+
+
+def infer_namespace(node, context=None):
+ callsite = arguments.CallSite.from_call(node)
+ if not callsite.keyword_arguments:
+ # Cannot make sense of it.
+ raise UseInferenceDefault()
+
+ class_node = nodes.ClassDef("Namespace", "docstring")
+ class_node.parent = node.parent
+ for attr in set(callsite.keyword_arguments):
+ fake_node = nodes.EmptyNode()
+ fake_node.parent = class_node
+ fake_node.attrname = attr
+ class_node.instance_attrs[attr] = [fake_node]
+ return iter((class_node.instantiate_class(),))
+
+
+def _looks_like_namespace(node):
+ func = node.func
+ if isinstance(func, nodes.Attribute):
+ return (
+ func.attrname == "Namespace"
+ and isinstance(func.expr, nodes.Name)
+ and func.expr.name == "argparse"
+ )
+ return False
+
+
+MANAGER.register_transform(
+ nodes.Call, inference_tip(infer_namespace), _looks_like_namespace
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_attrs.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_attrs.py
new file mode 100644
index 0000000..670736f
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_attrs.py
@@ -0,0 +1,65 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+"""
+Astroid hook for the attrs library
+
+Without this hook pylint reports unsupported-assignment-operation
+for attrs classes
+"""
+
+import astroid
+from astroid import MANAGER
+
+
+ATTRIB_NAMES = frozenset(("attr.ib", "attrib", "attr.attrib"))
+ATTRS_NAMES = frozenset(("attr.s", "attrs", "attr.attrs", "attr.attributes"))
+
+
+def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES):
+ """Return True if a decorated node has
+ an attr decorator applied."""
+ if not node.decorators:
+ return False
+ for decorator_attribute in node.decorators.nodes:
+ if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
+ decorator_attribute = decorator_attribute.func
+ if decorator_attribute.as_string() in decorator_names:
+ return True
+ return False
+
+
+def attr_attributes_transform(node):
+ """Given that the ClassNode has an attr decorator,
+ rewrite class attributes as instance attributes
+ """
+ # Astroid can't infer this attribute properly
+ # Prevents https://github.com/PyCQA/pylint/issues/1884
+ node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)]
+
+ for cdefbodynode in node.body:
+ if not isinstance(cdefbodynode, (astroid.Assign, astroid.AnnAssign)):
+ continue
+ if isinstance(cdefbodynode.value, astroid.Call):
+ if cdefbodynode.value.func.as_string() not in ATTRIB_NAMES:
+ continue
+ else:
+ continue
+ targets = (
+ cdefbodynode.targets
+ if hasattr(cdefbodynode, "targets")
+ else [cdefbodynode.target]
+ )
+ for target in targets:
+
+ rhs_node = astroid.Unknown(
+ lineno=cdefbodynode.lineno,
+ col_offset=cdefbodynode.col_offset,
+ parent=cdefbodynode,
+ )
+ node.locals[target.name] = [rhs_node]
+ node.instance_attrs[target.name] = [rhs_node]
+
+
+MANAGER.register_transform(
+ astroid.ClassDef, attr_attributes_transform, is_decorated_with_attrs
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py
new file mode 100644
index 0000000..2dd7cc5
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_builtin_inference.py
@@ -0,0 +1,829 @@
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014-2015 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Rene Zhang
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for various builtins."""
+
+from functools import partial
+from textwrap import dedent
+
+import six
+from astroid import (
+ MANAGER,
+ UseInferenceDefault,
+ AttributeInferenceError,
+ inference_tip,
+ InferenceError,
+ NameInferenceError,
+ AstroidTypeError,
+ MroError,
+)
+from astroid import arguments
+from astroid.builder import AstroidBuilder
+from astroid import helpers
+from astroid import nodes
+from astroid import objects
+from astroid import scoped_nodes
+from astroid import util
+
+
+OBJECT_DUNDER_NEW = "object.__new__"
+
+
+def _extend_str(class_node, rvalue):
+ """function to extend builtin str/unicode class"""
+ code = dedent(
+ """
+ class whatever(object):
+ def join(self, iterable):
+ return {rvalue}
+ def replace(self, old, new, count=None):
+ return {rvalue}
+ def format(self, *args, **kwargs):
+ return {rvalue}
+ def encode(self, encoding='ascii', errors=None):
+ return ''
+ def decode(self, encoding='ascii', errors=None):
+ return u''
+ def capitalize(self):
+ return {rvalue}
+ def title(self):
+ return {rvalue}
+ def lower(self):
+ return {rvalue}
+ def upper(self):
+ return {rvalue}
+ def swapcase(self):
+ return {rvalue}
+ def index(self, sub, start=None, end=None):
+ return 0
+ def find(self, sub, start=None, end=None):
+ return 0
+ def count(self, sub, start=None, end=None):
+ return 0
+ def strip(self, chars=None):
+ return {rvalue}
+ def lstrip(self, chars=None):
+ return {rvalue}
+ def rstrip(self, chars=None):
+ return {rvalue}
+ def rjust(self, width, fillchar=None):
+ return {rvalue}
+ def center(self, width, fillchar=None):
+ return {rvalue}
+ def ljust(self, width, fillchar=None):
+ return {rvalue}
+ """
+ )
+ code = code.format(rvalue=rvalue)
+ fake = AstroidBuilder(MANAGER).string_build(code)["whatever"]
+ for method in fake.mymethods():
+ method.parent = class_node
+ method.lineno = None
+ method.col_offset = None
+ if "__class__" in method.locals:
+ method.locals["__class__"] = [class_node]
+ class_node.locals[method.name] = [method]
+ method.parent = class_node
+
+
+def _extend_builtins(class_transforms):
+ builtin_ast = MANAGER.builtins_module
+ for class_name, transform in class_transforms.items():
+ transform(builtin_ast[class_name])
+
+
+_extend_builtins(
+ {
+ "bytes": partial(_extend_str, rvalue="b''"),
+ "str": partial(_extend_str, rvalue="''"),
+ }
+)
+
+
+def _builtin_filter_predicate(node, builtin_name):
+ if isinstance(node.func, nodes.Name) and node.func.name == builtin_name:
+ return True
+ if isinstance(node.func, nodes.Attribute):
+ return (
+ node.func.attrname == "fromkeys"
+ and isinstance(node.func.expr, nodes.Name)
+ and node.func.expr.name == "dict"
+ )
+ return False
+
+
+def register_builtin_transform(transform, builtin_name):
+ """Register a new transform function for the given *builtin_name*.
+
+ The transform function must accept two parameters, a node and
+ an optional context.
+ """
+
+ def _transform_wrapper(node, context=None):
+ result = transform(node, context=context)
+ if result:
+ if not result.parent:
+ # Let the transformation function determine
+ # the parent for its result. Otherwise,
+ # we set it to be the node we transformed from.
+ result.parent = node
+
+ if result.lineno is None:
+ result.lineno = node.lineno
+ if result.col_offset is None:
+ result.col_offset = node.col_offset
+ return iter([result])
+
+ MANAGER.register_transform(
+ nodes.Call,
+ inference_tip(_transform_wrapper),
+ partial(_builtin_filter_predicate, builtin_name=builtin_name),
+ )
+
+
+def _container_generic_inference(node, context, node_type, transform):
+ args = node.args
+ if not args:
+ return node_type()
+ if len(node.args) > 1:
+ raise UseInferenceDefault()
+
+ arg, = args
+ transformed = transform(arg)
+ if not transformed:
+ try:
+ inferred = next(arg.infer(context=context))
+ except (InferenceError, StopIteration):
+ raise UseInferenceDefault()
+ if inferred is util.Uninferable:
+ raise UseInferenceDefault()
+ transformed = transform(inferred)
+ if not transformed or transformed is util.Uninferable:
+ raise UseInferenceDefault()
+ return transformed
+
+
+def _container_generic_transform(arg, klass, iterables, build_elts):
+ if isinstance(arg, klass):
+ return arg
+ elif isinstance(arg, iterables):
+ if all(isinstance(elt, nodes.Const) for elt in arg.elts):
+ elts = [elt.value for elt in arg.elts]
+ else:
+ # TODO: Does not handle deduplication for sets.
+ elts = filter(None, map(helpers.safe_infer, arg.elts))
+ elif isinstance(arg, nodes.Dict):
+ # Dicts need to have consts as strings already.
+ if not all(isinstance(elt[0], nodes.Const) for elt in arg.items):
+ raise UseInferenceDefault()
+ elts = [item[0].value for item in arg.items]
+ elif isinstance(arg, nodes.Const) and isinstance(
+ arg.value, (six.string_types, six.binary_type)
+ ):
+ elts = arg.value
+ else:
+ return
+ return klass.from_elements(elts=build_elts(elts))
+
+
+def _infer_builtin_container(
+ node, context, klass=None, iterables=None, build_elts=None
+):
+ transform_func = partial(
+ _container_generic_transform,
+ klass=klass,
+ iterables=iterables,
+ build_elts=build_elts,
+ )
+
+ return _container_generic_inference(node, context, klass, transform_func)
+
+
+# pylint: disable=invalid-name
+infer_tuple = partial(
+ _infer_builtin_container,
+ klass=nodes.Tuple,
+ iterables=(
+ nodes.List,
+ nodes.Set,
+ objects.FrozenSet,
+ objects.DictItems,
+ objects.DictKeys,
+ objects.DictValues,
+ ),
+ build_elts=tuple,
+)
+
+infer_list = partial(
+ _infer_builtin_container,
+ klass=nodes.List,
+ iterables=(
+ nodes.Tuple,
+ nodes.Set,
+ objects.FrozenSet,
+ objects.DictItems,
+ objects.DictKeys,
+ objects.DictValues,
+ ),
+ build_elts=list,
+)
+
+infer_set = partial(
+ _infer_builtin_container,
+ klass=nodes.Set,
+ iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, objects.DictKeys),
+ build_elts=set,
+)
+
+infer_frozenset = partial(
+ _infer_builtin_container,
+ klass=objects.FrozenSet,
+ iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictKeys),
+ build_elts=frozenset,
+)
+
+
+def _get_elts(arg, context):
+ is_iterable = lambda n: isinstance(n, (nodes.List, nodes.Tuple, nodes.Set))
+ try:
+ inferred = next(arg.infer(context))
+ except (InferenceError, NameInferenceError):
+ raise UseInferenceDefault()
+ if isinstance(inferred, nodes.Dict):
+ items = inferred.items
+ elif is_iterable(inferred):
+ items = []
+ for elt in inferred.elts:
+ # If an item is not a pair of two items,
+ # then fallback to the default inference.
+ # Also, take in consideration only hashable items,
+ # tuples and consts. We are choosing Names as well.
+ if not is_iterable(elt):
+ raise UseInferenceDefault()
+ if len(elt.elts) != 2:
+ raise UseInferenceDefault()
+ if not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name)):
+ raise UseInferenceDefault()
+ items.append(tuple(elt.elts))
+ else:
+ raise UseInferenceDefault()
+ return items
+
+
+def infer_dict(node, context=None):
+ """Try to infer a dict call to a Dict node.
+
+ The function treats the following cases:
+
+ * dict()
+ * dict(mapping)
+ * dict(iterable)
+ * dict(iterable, **kwargs)
+ * dict(mapping, **kwargs)
+ * dict(**kwargs)
+
+ If a case can't be inferred, we'll fallback to default inference.
+ """
+ call = arguments.CallSite.from_call(node)
+ if call.has_invalid_arguments() or call.has_invalid_keywords():
+ raise UseInferenceDefault
+
+ args = call.positional_arguments
+ kwargs = list(call.keyword_arguments.items())
+
+ if not args and not kwargs:
+ # dict()
+ return nodes.Dict()
+ elif kwargs and not args:
+ # dict(a=1, b=2, c=4)
+ items = [(nodes.Const(key), value) for key, value in kwargs]
+ elif len(args) == 1 and kwargs:
+ # dict(some_iterable, b=2, c=4)
+ elts = _get_elts(args[0], context)
+ keys = [(nodes.Const(key), value) for key, value in kwargs]
+ items = elts + keys
+ elif len(args) == 1:
+ items = _get_elts(args[0], context)
+ else:
+ raise UseInferenceDefault()
+
+ value = nodes.Dict(
+ col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
+ )
+ value.postinit(items)
+ return value
+
+
+def infer_super(node, context=None):
+ """Understand super calls.
+
+ There are some restrictions for what can be understood:
+
+ * unbounded super (one argument form) is not understood.
+
+ * if the super call is not inside a function (classmethod or method),
+ then the default inference will be used.
+
+ * if the super arguments can't be inferred, the default inference
+ will be used.
+ """
+ if len(node.args) == 1:
+ # Ignore unbounded super.
+ raise UseInferenceDefault
+
+ scope = node.scope()
+ if not isinstance(scope, nodes.FunctionDef):
+ # Ignore non-method uses of super.
+ raise UseInferenceDefault
+ if scope.type not in ("classmethod", "method"):
+ # Not interested in staticmethods.
+ raise UseInferenceDefault
+
+ cls = scoped_nodes.get_wrapping_class(scope)
+ if not len(node.args):
+ mro_pointer = cls
+ # In we are in a classmethod, the interpreter will fill
+ # automatically the class as the second argument, not an instance.
+ if scope.type == "classmethod":
+ mro_type = cls
+ else:
+ mro_type = cls.instantiate_class()
+ else:
+ try:
+ mro_pointer = next(node.args[0].infer(context=context))
+ except InferenceError:
+ raise UseInferenceDefault
+ try:
+ mro_type = next(node.args[1].infer(context=context))
+ except InferenceError:
+ raise UseInferenceDefault
+
+ if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
+ # No way we could understand this.
+ raise UseInferenceDefault
+
+ super_obj = objects.Super(
+ mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope
+ )
+ super_obj.parent = node
+ return super_obj
+
+
+def _infer_getattr_args(node, context):
+ if len(node.args) not in (2, 3):
+ # Not a valid getattr call.
+ raise UseInferenceDefault
+
+ try:
+ obj = next(node.args[0].infer(context=context))
+ attr = next(node.args[1].infer(context=context))
+ except InferenceError:
+ raise UseInferenceDefault
+
+ if obj is util.Uninferable or attr is util.Uninferable:
+ # If one of the arguments is something we can't infer,
+ # then also make the result of the getattr call something
+ # which is unknown.
+ return util.Uninferable, util.Uninferable
+
+ is_string = isinstance(attr, nodes.Const) and isinstance(
+ attr.value, six.string_types
+ )
+ if not is_string:
+ raise UseInferenceDefault
+
+ return obj, attr.value
+
+
+def infer_getattr(node, context=None):
+ """Understand getattr calls
+
+ If one of the arguments is an Uninferable object, then the
+ result will be an Uninferable object. Otherwise, the normal attribute
+ lookup will be done.
+ """
+ obj, attr = _infer_getattr_args(node, context)
+ if (
+ obj is util.Uninferable
+ or attr is util.Uninferable
+ or not hasattr(obj, "igetattr")
+ ):
+ return util.Uninferable
+
+ try:
+ return next(obj.igetattr(attr, context=context))
+ except (StopIteration, InferenceError, AttributeInferenceError):
+ if len(node.args) == 3:
+ # Try to infer the default and return it instead.
+ try:
+ return next(node.args[2].infer(context=context))
+ except InferenceError:
+ raise UseInferenceDefault
+
+ raise UseInferenceDefault
+
+
+def infer_hasattr(node, context=None):
+ """Understand hasattr calls
+
+ This always guarantees three possible outcomes for calling
+ hasattr: Const(False) when we are sure that the object
+ doesn't have the intended attribute, Const(True) when
+ we know that the object has the attribute and Uninferable
+ when we are unsure of the outcome of the function call.
+ """
+ try:
+ obj, attr = _infer_getattr_args(node, context)
+ if (
+ obj is util.Uninferable
+ or attr is util.Uninferable
+ or not hasattr(obj, "getattr")
+ ):
+ return util.Uninferable
+ obj.getattr(attr, context=context)
+ except UseInferenceDefault:
+ # Can't infer something from this function call.
+ return util.Uninferable
+ except AttributeInferenceError:
+ # Doesn't have it.
+ return nodes.Const(False)
+ return nodes.Const(True)
+
+
+def infer_callable(node, context=None):
+ """Understand callable calls
+
+ This follows Python's semantics, where an object
+ is callable if it provides an attribute __call__,
+ even though that attribute is something which can't be
+ called.
+ """
+ if len(node.args) != 1:
+ # Invalid callable call.
+ raise UseInferenceDefault
+
+ argument = node.args[0]
+ try:
+ inferred = next(argument.infer(context=context))
+ except InferenceError:
+ return util.Uninferable
+ if inferred is util.Uninferable:
+ return util.Uninferable
+ return nodes.Const(inferred.callable())
+
+
+def infer_bool(node, context=None):
+ """Understand bool calls."""
+ if len(node.args) > 1:
+ # Invalid bool call.
+ raise UseInferenceDefault
+
+ if not node.args:
+ return nodes.Const(False)
+
+ argument = node.args[0]
+ try:
+ inferred = next(argument.infer(context=context))
+ except InferenceError:
+ return util.Uninferable
+ if inferred is util.Uninferable:
+ return util.Uninferable
+
+ bool_value = inferred.bool_value()
+ if bool_value is util.Uninferable:
+ return util.Uninferable
+ return nodes.Const(bool_value)
+
+
+def infer_type(node, context=None):
+ """Understand the one-argument form of *type*."""
+ if len(node.args) != 1:
+ raise UseInferenceDefault
+
+ return helpers.object_type(node.args[0], context)
+
+
+def infer_slice(node, context=None):
+ """Understand `slice` calls."""
+ args = node.args
+ if not 0 < len(args) <= 3:
+ raise UseInferenceDefault
+
+ infer_func = partial(helpers.safe_infer, context=context)
+ args = [infer_func(arg) for arg in args]
+ for arg in args:
+ if not arg or arg is util.Uninferable:
+ raise UseInferenceDefault
+ if not isinstance(arg, nodes.Const):
+ raise UseInferenceDefault
+ if not isinstance(arg.value, (type(None), int)):
+ raise UseInferenceDefault
+
+ if len(args) < 3:
+ # Make sure we have 3 arguments.
+ args.extend([None] * (3 - len(args)))
+
+ slice_node = nodes.Slice(
+ lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
+ )
+ slice_node.postinit(*args)
+ return slice_node
+
+
+def _infer_object__new__decorator(node, context=None):
+ # Instantiate class immediately
+ # since that's what @object.__new__ does
+ return iter((node.instantiate_class(),))
+
+
+def _infer_object__new__decorator_check(node):
+ """Predicate before inference_tip
+
+ Check if the given ClassDef has an @object.__new__ decorator
+ """
+ if not node.decorators:
+ return False
+
+ for decorator in node.decorators.nodes:
+ if isinstance(decorator, nodes.Attribute):
+ if decorator.as_string() == OBJECT_DUNDER_NEW:
+ return True
+ return False
+
+
+def infer_issubclass(callnode, context=None):
+ """Infer issubclass() calls
+
+ :param nodes.Call callnode: an `issubclass` call
+ :param InferenceContext: the context for the inference
+ :rtype nodes.Const: Boolean Const value of the `issubclass` call
+ :raises UseInferenceDefault: If the node cannot be inferred
+ """
+ call = arguments.CallSite.from_call(callnode)
+ if call.keyword_arguments:
+ # issubclass doesn't support keyword arguments
+ raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments")
+ if len(call.positional_arguments) != 2:
+ raise UseInferenceDefault(
+ "Expected two arguments, got {count}".format(
+ count=len(call.positional_arguments)
+ )
+ )
+ # The left hand argument is the obj to be checked
+ obj_node, class_or_tuple_node = call.positional_arguments
+
+ try:
+ obj_type = next(obj_node.infer(context=context))
+ except InferenceError as exc:
+ raise UseInferenceDefault from exc
+ if not isinstance(obj_type, nodes.ClassDef):
+ raise UseInferenceDefault("TypeError: arg 1 must be class")
+
+ # The right hand argument is the class(es) that the given
+ # object is to be checked against.
+ try:
+ class_container = _class_or_tuple_to_container(
+ class_or_tuple_node, context=context
+ )
+ except InferenceError as exc:
+ raise UseInferenceDefault from exc
+ try:
+ issubclass_bool = helpers.object_issubclass(obj_type, class_container, context)
+ except AstroidTypeError as exc:
+ raise UseInferenceDefault("TypeError: " + str(exc)) from exc
+ except MroError as exc:
+ raise UseInferenceDefault from exc
+ return nodes.Const(issubclass_bool)
+
+
+def infer_isinstance(callnode, context=None):
+ """Infer isinstance calls
+
+ :param nodes.Call callnode: an isinstance call
+ :param InferenceContext: context for call
+ (currently unused but is a common interface for inference)
+ :rtype nodes.Const: Boolean Const value of isinstance call
+
+ :raises UseInferenceDefault: If the node cannot be inferred
+ """
+ call = arguments.CallSite.from_call(callnode)
+ if call.keyword_arguments:
+ # isinstance doesn't support keyword arguments
+ raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments")
+ if len(call.positional_arguments) != 2:
+ raise UseInferenceDefault(
+ "Expected two arguments, got {count}".format(
+ count=len(call.positional_arguments)
+ )
+ )
+ # The left hand argument is the obj to be checked
+ obj_node, class_or_tuple_node = call.positional_arguments
+ # The right hand argument is the class(es) that the given
+ # obj is to be check is an instance of
+ try:
+ class_container = _class_or_tuple_to_container(
+ class_or_tuple_node, context=context
+ )
+ except InferenceError:
+ raise UseInferenceDefault
+ try:
+ isinstance_bool = helpers.object_isinstance(obj_node, class_container, context)
+ except AstroidTypeError as exc:
+ raise UseInferenceDefault("TypeError: " + str(exc))
+ except MroError as exc:
+ raise UseInferenceDefault from exc
+ if isinstance_bool is util.Uninferable:
+ raise UseInferenceDefault
+ return nodes.Const(isinstance_bool)
+
+
+def _class_or_tuple_to_container(node, context=None):
+ # Move inferences results into container
+ # to simplify later logic
+ # raises InferenceError if any of the inferences fall through
+ node_infer = next(node.infer(context=context))
+ # arg2 MUST be a type or a TUPLE of types
+ # for isinstance
+ if isinstance(node_infer, nodes.Tuple):
+ class_container = [
+ next(node.infer(context=context)) for node in node_infer.elts
+ ]
+ class_container = [
+ klass_node for klass_node in class_container if klass_node is not None
+ ]
+ else:
+ class_container = [node_infer]
+ return class_container
+
+
+def infer_len(node, context=None):
+ """Infer length calls
+
+ :param nodes.Call node: len call to infer
+ :param context.InferenceContext: node context
+ :rtype nodes.Const: a Const node with the inferred length, if possible
+ """
+ call = arguments.CallSite.from_call(node)
+ if call.keyword_arguments:
+ raise UseInferenceDefault("TypeError: len() must take no keyword arguments")
+ if len(call.positional_arguments) != 1:
+ raise UseInferenceDefault(
+ "TypeError: len() must take exactly one argument "
+ "({len}) given".format(len=len(call.positional_arguments))
+ )
+ [argument_node] = call.positional_arguments
+ try:
+ return nodes.Const(helpers.object_len(argument_node, context=context))
+ except (AstroidTypeError, InferenceError) as exc:
+ raise UseInferenceDefault(str(exc)) from exc
+
+
+def infer_str(node, context=None):
+ """Infer str() calls
+
+ :param nodes.Call node: str() call to infer
+ :param context.InferenceContext: node context
+ :rtype nodes.Const: a Const containing an empty string
+ """
+ call = arguments.CallSite.from_call(node)
+ if call.keyword_arguments:
+ raise UseInferenceDefault("TypeError: str() must take no keyword arguments")
+ try:
+ return nodes.Const("")
+ except (AstroidTypeError, InferenceError) as exc:
+ raise UseInferenceDefault(str(exc)) from exc
+
+
+def infer_int(node, context=None):
+ """Infer int() calls
+
+ :param nodes.Call node: int() call to infer
+ :param context.InferenceContext: node context
+ :rtype nodes.Const: a Const containing the integer value of the int() call
+ """
+ call = arguments.CallSite.from_call(node)
+ if call.keyword_arguments:
+ raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
+
+ if call.positional_arguments:
+ try:
+ first_value = next(call.positional_arguments[0].infer(context=context))
+ except InferenceError as exc:
+ raise UseInferenceDefault(str(exc)) from exc
+
+ if first_value is util.Uninferable:
+ raise UseInferenceDefault
+
+ if isinstance(first_value, nodes.Const) and isinstance(
+ first_value.value, (int, str)
+ ):
+ try:
+ actual_value = int(first_value.value)
+ except ValueError:
+ return nodes.Const(0)
+ return nodes.Const(actual_value)
+
+ return nodes.Const(0)
+
+
+def infer_dict_fromkeys(node, context=None):
+ """Infer dict.fromkeys
+
+ :param nodes.Call node: dict.fromkeys() call to infer
+ :param context.InferenceContext: node context
+ :rtype nodes.Dict:
+ a Dictionary containing the values that astroid was able to infer.
+ In case the inference failed for any reason, an empty dictionary
+ will be inferred instead.
+ """
+
+ def _build_dict_with_elements(elements):
+ new_node = nodes.Dict(
+ col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
+ )
+ new_node.postinit(elements)
+ return new_node
+
+ call = arguments.CallSite.from_call(node)
+ if call.keyword_arguments:
+ raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
+ if len(call.positional_arguments) not in {1, 2}:
+ raise UseInferenceDefault(
+ "TypeError: Needs between 1 and 2 positional arguments"
+ )
+
+ default = nodes.Const(None)
+ values = call.positional_arguments[0]
+ try:
+ inferred_values = next(values.infer(context=context))
+ except InferenceError:
+ return _build_dict_with_elements([])
+ if inferred_values is util.Uninferable:
+ return _build_dict_with_elements([])
+
+ # Limit to a couple of potential values, as this can become pretty complicated
+ accepted_iterable_elements = (nodes.Const,)
+ if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)):
+ elements = inferred_values.elts
+ for element in elements:
+ if not isinstance(element, accepted_iterable_elements):
+ # Fallback to an empty dict
+ return _build_dict_with_elements([])
+
+ elements_with_value = [(element, default) for element in elements]
+ return _build_dict_with_elements(elements_with_value)
+
+ elif isinstance(inferred_values, nodes.Const) and isinstance(
+ inferred_values.value, (str, bytes)
+ ):
+ elements = [
+ (nodes.Const(element), default) for element in inferred_values.value
+ ]
+ return _build_dict_with_elements(elements)
+ elif isinstance(inferred_values, nodes.Dict):
+ keys = inferred_values.itered()
+ for key in keys:
+ if not isinstance(key, accepted_iterable_elements):
+ # Fallback to an empty dict
+ return _build_dict_with_elements([])
+
+ elements_with_value = [(element, default) for element in keys]
+ return _build_dict_with_elements(elements_with_value)
+
+ # Fallback to an empty dictionary
+ return _build_dict_with_elements([])
+
+
+# Builtins inference
+register_builtin_transform(infer_bool, "bool")
+register_builtin_transform(infer_super, "super")
+register_builtin_transform(infer_callable, "callable")
+register_builtin_transform(infer_getattr, "getattr")
+register_builtin_transform(infer_hasattr, "hasattr")
+register_builtin_transform(infer_tuple, "tuple")
+register_builtin_transform(infer_set, "set")
+register_builtin_transform(infer_list, "list")
+register_builtin_transform(infer_dict, "dict")
+register_builtin_transform(infer_frozenset, "frozenset")
+register_builtin_transform(infer_type, "type")
+register_builtin_transform(infer_slice, "slice")
+register_builtin_transform(infer_isinstance, "isinstance")
+register_builtin_transform(infer_issubclass, "issubclass")
+register_builtin_transform(infer_len, "len")
+register_builtin_transform(infer_str, "str")
+register_builtin_transform(infer_int, "int")
+register_builtin_transform(infer_dict_fromkeys, "dict.fromkeys")
+
+
+# Infer object.__new__ calls
+MANAGER.register_transform(
+ nodes.ClassDef,
+ inference_tip(_infer_object__new__decorator),
+ _infer_object__new__decorator_check,
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_collections.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_collections.py
new file mode 100644
index 0000000..e5b09ec
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_collections.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016, 2018 Claudiu Popa
+# Copyright (c) 2016-2017 Łukasz Rogalski
+# Copyright (c) 2017 Derek Gustafson
+# Copyright (c) 2018 Ioana Tagirta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import sys
+
+import astroid
+
+
+def _collections_transform():
+ return astroid.parse(
+ """
+ class defaultdict(dict):
+ default_factory = None
+ def __missing__(self, key): pass
+ def __getitem__(self, key): return default_factory
+
+ """
+ + _deque_mock()
+ + _ordered_dict_mock()
+ )
+
+
+def _deque_mock():
+ base_deque_class = """
+ class deque(object):
+ maxlen = 0
+ def __init__(self, iterable=None, maxlen=None):
+ self.iterable = iterable or []
+ def append(self, x): pass
+ def appendleft(self, x): pass
+ def clear(self): pass
+ def count(self, x): return 0
+ def extend(self, iterable): pass
+ def extendleft(self, iterable): pass
+ def pop(self): return self.iterable[0]
+ def popleft(self): return self.iterable[0]
+ def remove(self, value): pass
+ def reverse(self): return reversed(self.iterable)
+ def rotate(self, n=1): return self
+ def __iter__(self): return self
+ def __reversed__(self): return self.iterable[::-1]
+ def __getitem__(self, index): return self.iterable[index]
+ def __setitem__(self, index, value): pass
+ def __delitem__(self, index): pass
+ def __bool__(self): return bool(self.iterable)
+ def __nonzero__(self): return bool(self.iterable)
+ def __contains__(self, o): return o in self.iterable
+ def __len__(self): return len(self.iterable)
+ def __copy__(self): return deque(self.iterable)
+ def copy(self): return deque(self.iterable)
+ def index(self, x, start=0, end=0): return 0
+ def insert(self, x, i): pass
+ def __add__(self, other): pass
+ def __iadd__(self, other): pass
+ def __mul__(self, other): pass
+ def __imul__(self, other): pass
+ def __rmul__(self, other): pass"""
+ return base_deque_class
+
+
+def _ordered_dict_mock():
+ base_ordered_dict_class = """
+ class OrderedDict(dict):
+ def __reversed__(self): return self[::-1]
+ def move_to_end(self, key, last=False): pass"""
+ return base_ordered_dict_class
+
+
+astroid.register_module_extender(astroid.MANAGER, "collections", _collections_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_crypt.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_crypt.py
new file mode 100644
index 0000000..491ee23
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_crypt.py
@@ -0,0 +1,26 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import sys
+import astroid
+
+PY37 = sys.version_info >= (3, 7)
+
+if PY37:
+ # Since Python 3.7 Hashing Methods are added
+ # dynamically to globals()
+
+ def _re_transform():
+ return astroid.parse(
+ """
+ from collections import namedtuple
+ _Method = namedtuple('_Method', 'name ident salt_chars total_size')
+
+ METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
+ METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
+ METHOD_BLOWFISH = _Method('BLOWFISH', 2, 'b', 22)
+ METHOD_MD5 = _Method('MD5', '1', 8, 34)
+ METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
+ """
+ )
+
+ astroid.register_module_extender(astroid.MANAGER, "crypt", _re_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_curses.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_curses.py
new file mode 100644
index 0000000..68e88b9
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_curses.py
@@ -0,0 +1,179 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import astroid
+
+
+def _curses_transform():
+ return astroid.parse(
+ """
+ A_ALTCHARSET = 1
+ A_BLINK = 1
+ A_BOLD = 1
+ A_DIM = 1
+ A_INVIS = 1
+ A_ITALIC = 1
+ A_NORMAL = 1
+ A_PROTECT = 1
+ A_REVERSE = 1
+ A_STANDOUT = 1
+ A_UNDERLINE = 1
+ A_HORIZONTAL = 1
+ A_LEFT = 1
+ A_LOW = 1
+ A_RIGHT = 1
+ A_TOP = 1
+ A_VERTICAL = 1
+ A_CHARTEXT = 1
+ A_ATTRIBUTES = 1
+ A_CHARTEXT = 1
+ A_COLOR = 1
+ KEY_MIN = 1
+ KEY_BREAK = 1
+ KEY_DOWN = 1
+ KEY_UP = 1
+ KEY_LEFT = 1
+ KEY_RIGHT = 1
+ KEY_HOME = 1
+ KEY_BACKSPACE = 1
+ KEY_F0 = 1
+ KEY_Fn = 1
+ KEY_DL = 1
+ KEY_IL = 1
+ KEY_DC = 1
+ KEY_IC = 1
+ KEY_EIC = 1
+ KEY_CLEAR = 1
+ KEY_EOS = 1
+ KEY_EOL = 1
+ KEY_SF = 1
+ KEY_SR = 1
+ KEY_NPAGE = 1
+ KEY_PPAGE = 1
+ KEY_STAB = 1
+ KEY_CTAB = 1
+ KEY_CATAB = 1
+ KEY_ENTER = 1
+ KEY_SRESET = 1
+ KEY_RESET = 1
+ KEY_PRINT = 1
+ KEY_LL = 1
+ KEY_A1 = 1
+ KEY_A3 = 1
+ KEY_B2 = 1
+ KEY_C1 = 1
+ KEY_C3 = 1
+ KEY_BTAB = 1
+ KEY_BEG = 1
+ KEY_CANCEL = 1
+ KEY_CLOSE = 1
+ KEY_COMMAND = 1
+ KEY_COPY = 1
+ KEY_CREATE = 1
+ KEY_END = 1
+ KEY_EXIT = 1
+ KEY_FIND = 1
+ KEY_HELP = 1
+ KEY_MARK = 1
+ KEY_MESSAGE = 1
+ KEY_MOVE = 1
+ KEY_NEXT = 1
+ KEY_OPEN = 1
+ KEY_OPTIONS = 1
+ KEY_PREVIOUS = 1
+ KEY_REDO = 1
+ KEY_REFERENCE = 1
+ KEY_REFRESH = 1
+ KEY_REPLACE = 1
+ KEY_RESTART = 1
+ KEY_RESUME = 1
+ KEY_SAVE = 1
+ KEY_SBEG = 1
+ KEY_SCANCEL = 1
+ KEY_SCOMMAND = 1
+ KEY_SCOPY = 1
+ KEY_SCREATE = 1
+ KEY_SDC = 1
+ KEY_SDL = 1
+ KEY_SELECT = 1
+ KEY_SEND = 1
+ KEY_SEOL = 1
+ KEY_SEXIT = 1
+ KEY_SFIND = 1
+ KEY_SHELP = 1
+ KEY_SHOME = 1
+ KEY_SIC = 1
+ KEY_SLEFT = 1
+ KEY_SMESSAGE = 1
+ KEY_SMOVE = 1
+ KEY_SNEXT = 1
+ KEY_SOPTIONS = 1
+ KEY_SPREVIOUS = 1
+ KEY_SPRINT = 1
+ KEY_SREDO = 1
+ KEY_SREPLACE = 1
+ KEY_SRIGHT = 1
+ KEY_SRSUME = 1
+ KEY_SSAVE = 1
+ KEY_SSUSPEND = 1
+ KEY_SUNDO = 1
+ KEY_SUSPEND = 1
+ KEY_UNDO = 1
+ KEY_MOUSE = 1
+ KEY_RESIZE = 1
+ KEY_MAX = 1
+ ACS_BBSS = 1
+ ACS_BLOCK = 1
+ ACS_BOARD = 1
+ ACS_BSBS = 1
+ ACS_BSSB = 1
+ ACS_BSSS = 1
+ ACS_BTEE = 1
+ ACS_BULLET = 1
+ ACS_CKBOARD = 1
+ ACS_DARROW = 1
+ ACS_DEGREE = 1
+ ACS_DIAMOND = 1
+ ACS_GEQUAL = 1
+ ACS_HLINE = 1
+ ACS_LANTERN = 1
+ ACS_LARROW = 1
+ ACS_LEQUAL = 1
+ ACS_LLCORNER = 1
+ ACS_LRCORNER = 1
+ ACS_LTEE = 1
+ ACS_NEQUAL = 1
+ ACS_PI = 1
+ ACS_PLMINUS = 1
+ ACS_PLUS = 1
+ ACS_RARROW = 1
+ ACS_RTEE = 1
+ ACS_S1 = 1
+ ACS_S3 = 1
+ ACS_S7 = 1
+ ACS_S9 = 1
+ ACS_SBBS = 1
+ ACS_SBSB = 1
+ ACS_SBSS = 1
+ ACS_SSBB = 1
+ ACS_SSBS = 1
+ ACS_SSSB = 1
+ ACS_SSSS = 1
+ ACS_STERLING = 1
+ ACS_TTEE = 1
+ ACS_UARROW = 1
+ ACS_ULCORNER = 1
+ ACS_URCORNER = 1
+ ACS_VLINE = 1
+ COLOR_BLACK = 1
+ COLOR_BLUE = 1
+ COLOR_CYAN = 1
+ COLOR_GREEN = 1
+ COLOR_MAGENTA = 1
+ COLOR_RED = 1
+ COLOR_WHITE = 1
+ COLOR_YELLOW = 1
+ """
+ )
+
+
+astroid.register_module_extender(astroid.MANAGER, "curses", _curses_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_dataclasses.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_dataclasses.py
new file mode 100644
index 0000000..7a25e0c
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_dataclasses.py
@@ -0,0 +1,50 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+"""
+Astroid hook for the dataclasses library
+"""
+
+import astroid
+from astroid import MANAGER
+
+
+DATACLASSES_DECORATORS = frozenset(("dataclasses.dataclass", "dataclass"))
+
+
+def is_decorated_with_dataclass(node, decorator_names=DATACLASSES_DECORATORS):
+ """Return True if a decorated node has a `dataclass` decorator applied."""
+ if not node.decorators:
+ return False
+ for decorator_attribute in node.decorators.nodes:
+ if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
+ decorator_attribute = decorator_attribute.func
+ if decorator_attribute.as_string() in decorator_names:
+ return True
+ return False
+
+
+def dataclass_transform(node):
+ """Rewrite a dataclass to be easily understood by pylint"""
+
+ for assign_node in node.body:
+ if not isinstance(assign_node, (astroid.AnnAssign, astroid.Assign)):
+ continue
+
+ targets = (
+ assign_node.targets
+ if hasattr(assign_node, "targets")
+ else [assign_node.target]
+ )
+ for target in targets:
+ rhs_node = astroid.Unknown(
+ lineno=assign_node.lineno,
+ col_offset=assign_node.col_offset,
+ parent=assign_node,
+ )
+ node.instance_attrs[target.name] = [rhs_node]
+ node.locals[target.name] = [rhs_node]
+
+
+MANAGER.register_transform(
+ astroid.ClassDef, dataclass_transform, is_decorated_with_dataclass
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_dateutil.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_dateutil.py
new file mode 100644
index 0000000..a1c270f
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_dateutil.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2015 raylu
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for dateutil"""
+
+import textwrap
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def dateutil_transform():
+ return AstroidBuilder(MANAGER).string_build(
+ textwrap.dedent(
+ """
+ import datetime
+ def parse(timestr, parserinfo=None, **kwargs):
+ return datetime.datetime()
+ """
+ )
+ )
+
+
+register_module_extender(MANAGER, "dateutil.parser", dateutil_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_fstrings.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_fstrings.py
new file mode 100644
index 0000000..7d8c7b6
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_fstrings.py
@@ -0,0 +1,51 @@
+# Copyright (c) 2017 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import collections
+import sys
+
+import astroid
+
+
+def _clone_node_with_lineno(node, parent, lineno):
+ cls = node.__class__
+ other_fields = node._other_fields
+ _astroid_fields = node._astroid_fields
+ init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
+ postinit_params = {param: getattr(node, param) for param in _astroid_fields}
+ if other_fields:
+ init_params.update({param: getattr(node, param) for param in other_fields})
+ new_node = cls(**init_params)
+ if hasattr(node, "postinit") and _astroid_fields:
+ for param, child in postinit_params.items():
+ if child and not isinstance(child, collections.Sequence):
+ cloned_child = _clone_node_with_lineno(
+ node=child, lineno=new_node.lineno, parent=new_node
+ )
+ postinit_params[param] = cloned_child
+ new_node.postinit(**postinit_params)
+ return new_node
+
+
+def _transform_formatted_value(node):
+ if node.value and node.value.lineno == 1:
+ if node.lineno != node.value.lineno:
+ new_node = astroid.FormattedValue(
+ lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
+ )
+ new_value = _clone_node_with_lineno(
+ node=node.value, lineno=node.lineno, parent=new_node
+ )
+ new_node.postinit(value=new_value, format_spec=node.format_spec)
+ return new_node
+
+
+if sys.version_info[:2] >= (3, 6):
+ # TODO: this fix tries to *patch* http://bugs.python.org/issue29051
+ # The problem is that FormattedValue.value, which is a Name node,
+ # has wrong line numbers, usually 1. This creates problems for pylint,
+ # which expects correct line numbers for things such as message control.
+ astroid.MANAGER.register_transform(
+ astroid.FormattedValue, _transform_formatted_value
+ )
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_functools.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_functools.py
new file mode 100644
index 0000000..8b594ef
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_functools.py
@@ -0,0 +1,158 @@
+# Copyright (c) 2016, 2018 Claudiu Popa
+# Copyright (c) 2018 Bryce Guinta
+
+"""Astroid hooks for understanding functools library module."""
+from functools import partial
+from itertools import chain
+
+import astroid
+from astroid import arguments
+from astroid import BoundMethod
+from astroid import extract_node
+from astroid import helpers
+from astroid.interpreter import objectmodel
+from astroid import MANAGER
+from astroid import objects
+
+
+LRU_CACHE = "functools.lru_cache"
+
+
+class LruWrappedModel(objectmodel.FunctionModel):
+ """Special attribute model for functions decorated with functools.lru_cache.
+
+ The said decorators patches at decoration time some functions onto
+ the decorated function.
+ """
+
+ @property
+ def attr___wrapped__(self):
+ return self._instance
+
+ @property
+ def attr_cache_info(self):
+ cache_info = extract_node(
+ """
+ from functools import _CacheInfo
+ _CacheInfo(0, 0, 0, 0)
+ """
+ )
+
+ class CacheInfoBoundMethod(BoundMethod):
+ def infer_call_result(self, caller, context=None):
+ yield helpers.safe_infer(cache_info)
+
+ return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance)
+
+ @property
+ def attr_cache_clear(self):
+ node = extract_node("""def cache_clear(self): pass""")
+ return BoundMethod(proxy=node, bound=self._instance.parent.scope())
+
+
+def _transform_lru_cache(node, context=None):
+ # TODO: this is not ideal, since the node should be immutable,
+ # but due to https://github.com/PyCQA/astroid/issues/354,
+ # there's not much we can do now.
+ # Replacing the node would work partially, because,
+ # in pylint, the old node would still be available, leading
+ # to spurious false positives.
+ node.special_attributes = LruWrappedModel()(node)
+ return
+
+
+def _functools_partial_inference(node, context=None):
+ call = arguments.CallSite.from_call(node)
+ number_of_positional = len(call.positional_arguments)
+ if number_of_positional < 1:
+ raise astroid.UseInferenceDefault(
+ "functools.partial takes at least one argument"
+ )
+ if number_of_positional == 1 and not call.keyword_arguments:
+ raise astroid.UseInferenceDefault(
+ "functools.partial needs at least to have some filled arguments"
+ )
+
+ partial_function = call.positional_arguments[0]
+ try:
+ inferred_wrapped_function = next(partial_function.infer(context=context))
+ except astroid.InferenceError as exc:
+ raise astroid.UseInferenceDefault from exc
+ if inferred_wrapped_function is astroid.Uninferable:
+ raise astroid.UseInferenceDefault("Cannot infer the wrapped function")
+ if not isinstance(inferred_wrapped_function, astroid.FunctionDef):
+ raise astroid.UseInferenceDefault("The wrapped function is not a function")
+
+ # Determine if the passed keywords into the callsite are supported
+ # by the wrapped function.
+ function_parameters = chain(
+ inferred_wrapped_function.args.args or (),
+ inferred_wrapped_function.args.posonlyargs or (),
+ inferred_wrapped_function.args.kwonlyargs or (),
+ )
+ parameter_names = set(
+ param.name
+ for param in function_parameters
+ if isinstance(param, astroid.AssignName)
+ )
+ if set(call.keyword_arguments) - parameter_names:
+ raise astroid.UseInferenceDefault(
+ "wrapped function received unknown parameters"
+ )
+
+ partial_function = objects.PartialFunction(
+ call,
+ name=inferred_wrapped_function.name,
+ doc=inferred_wrapped_function.doc,
+ lineno=inferred_wrapped_function.lineno,
+ col_offset=inferred_wrapped_function.col_offset,
+ parent=inferred_wrapped_function.parent,
+ )
+ partial_function.postinit(
+ args=inferred_wrapped_function.args,
+ body=inferred_wrapped_function.body,
+ decorators=inferred_wrapped_function.decorators,
+ returns=inferred_wrapped_function.returns,
+ type_comment_returns=inferred_wrapped_function.type_comment_returns,
+ type_comment_args=inferred_wrapped_function.type_comment_args,
+ )
+ return iter((partial_function,))
+
+
+def _looks_like_lru_cache(node):
+ """Check if the given function node is decorated with lru_cache."""
+ if not node.decorators:
+ return False
+ for decorator in node.decorators.nodes:
+ if not isinstance(decorator, astroid.Call):
+ continue
+ if _looks_like_functools_member(decorator, "lru_cache"):
+ return True
+ return False
+
+
+def _looks_like_functools_member(node, member):
+ """Check if the given Call node is a functools.partial call"""
+ if isinstance(node.func, astroid.Name):
+ return node.func.name == member
+ elif isinstance(node.func, astroid.Attribute):
+ return (
+ node.func.attrname == member
+ and isinstance(node.func.expr, astroid.Name)
+ and node.func.expr.name == "functools"
+ )
+
+
+_looks_like_partial = partial(_looks_like_functools_member, member="partial")
+
+
+MANAGER.register_transform(
+ astroid.FunctionDef, _transform_lru_cache, _looks_like_lru_cache
+)
+
+
+MANAGER.register_transform(
+ astroid.Call,
+ astroid.inference_tip(_functools_partial_inference),
+ _looks_like_partial,
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_gi.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_gi.py
new file mode 100644
index 0000000..0970610
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_gi.py
@@ -0,0 +1,220 @@
+# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Cole Robinson
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 David Shea
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2016 Giuseppe Scrivano
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the Python 2 GObject introspection bindings.
+
+Helps with understanding everything imported from 'gi.repository'
+"""
+
+import inspect
+import itertools
+import sys
+import re
+import warnings
+
+from astroid import MANAGER, AstroidBuildingError, nodes
+from astroid.builder import AstroidBuilder
+
+
+_inspected_modules = {}
+
+_identifier_re = r"^[A-Za-z_]\w*$"
+
+
+def _gi_build_stub(parent):
+ """
+ Inspect the passed module recursively and build stubs for functions,
+ classes, etc.
+ """
+ classes = {}
+ functions = {}
+ constants = {}
+ methods = {}
+ for name in dir(parent):
+ if name.startswith("__"):
+ continue
+
+ # Check if this is a valid name in python
+ if not re.match(_identifier_re, name):
+ continue
+
+ try:
+ obj = getattr(parent, name)
+ except:
+ continue
+
+ if inspect.isclass(obj):
+ classes[name] = obj
+ elif inspect.isfunction(obj) or inspect.isbuiltin(obj):
+ functions[name] = obj
+ elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
+ methods[name] = obj
+ elif (
+ str(obj).startswith(", )
+ # Only accept function calls with two constant arguments
+ if len(node.args) != 2:
+ return False
+
+ if not all(isinstance(arg, nodes.Const) for arg in node.args):
+ return False
+
+ func = node.func
+ if isinstance(func, nodes.Attribute):
+ if func.attrname != "require_version":
+ return False
+ if isinstance(func.expr, nodes.Name) and func.expr.name == "gi":
+ return True
+
+ return False
+
+ if isinstance(func, nodes.Name):
+ return func.name == "require_version"
+
+ return False
+
+
+def _register_require_version(node):
+ # Load the gi.require_version locally
+ try:
+ import gi
+
+ gi.require_version(node.args[0].value, node.args[1].value)
+ except Exception:
+ pass
+
+ return node
+
+
+MANAGER.register_failed_import_hook(_import_gi_module)
+MANAGER.register_transform(
+ nodes.Call, _register_require_version, _looks_like_require_version
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_hashlib.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_hashlib.py
new file mode 100644
index 0000000..98ae774
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_hashlib.py
@@ -0,0 +1,67 @@
+# Copyright (c) 2016, 2018 Claudiu Popa
+# Copyright (c) 2018 Ioana Tagirta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import sys
+
+import six
+
+import astroid
+
+PY36 = sys.version_info >= (3, 6)
+
+
+def _hashlib_transform():
+ signature = "value=''"
+ template = """
+ class %(name)s(object):
+ def __init__(self, %(signature)s): pass
+ def digest(self):
+ return %(digest)s
+ def copy(self):
+ return self
+ def update(self, value): pass
+ def hexdigest(self):
+ return ''
+ @property
+ def name(self):
+ return %(name)r
+ @property
+ def block_size(self):
+ return 1
+ @property
+ def digest_size(self):
+ return 1
+ """
+ algorithms_with_signature = dict.fromkeys(
+ ["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature
+ )
+ if PY36:
+ blake2b_signature = "data=b'', *, digest_size=64, key=b'', salt=b'', \
+ person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
+ node_depth=0, inner_size=0, last_node=False"
+ blake2s_signature = "data=b'', *, digest_size=32, key=b'', salt=b'', \
+ person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
+ node_depth=0, inner_size=0, last_node=False"
+ new_algorithms = dict.fromkeys(
+ ["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"],
+ signature,
+ )
+ algorithms_with_signature.update(new_algorithms)
+ algorithms_with_signature.update(
+ {"blake2b": blake2b_signature, "blake2s": blake2s_signature}
+ )
+ classes = "".join(
+ template
+ % {
+ "name": hashfunc,
+ "digest": 'b""' if six.PY3 else '""',
+ "signature": signature,
+ }
+ for hashfunc, signature in algorithms_with_signature.items()
+ )
+ return astroid.parse(classes)
+
+
+astroid.register_module_extender(astroid.MANAGER, "hashlib", _hashlib_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_http.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_http.py
new file mode 100644
index 0000000..a3aa814
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_http.py
@@ -0,0 +1,201 @@
+# Copyright (c) 2018 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid brain hints for some of the `http` module."""
+import textwrap
+
+import astroid
+from astroid.builder import AstroidBuilder
+
+
+def _http_transform():
+ code = textwrap.dedent(
+ """
+ from collections import namedtuple
+ _HTTPStatus = namedtuple('_HTTPStatus', 'value phrase description')
+
+ class HTTPStatus:
+
+ # informational
+ CONTINUE = _HTTPStatus(100, 'Continue', 'Request received, please continue')
+ SWITCHING_PROTOCOLS = _HTTPStatus(101, 'Switching Protocols',
+ 'Switching to new protocol; obey Upgrade header')
+ PROCESSING = _HTTPStatus(102, 'Processing', '')
+ OK = _HTTPStatus(200, 'OK', 'Request fulfilled, document follows')
+ CREATED = _HTTPStatus(201, 'Created', 'Document created, URL follows')
+ ACCEPTED = _HTTPStatus(202, 'Accepted',
+ 'Request accepted, processing continues off-line')
+ NON_AUTHORITATIVE_INFORMATION = _HTTPStatus(203,
+ 'Non-Authoritative Information', 'Request fulfilled from cache')
+ NO_CONTENT = _HTTPStatus(204, 'No Content', 'Request fulfilled, nothing follows')
+ RESET_CONTENT =_HTTPStatus(205, 'Reset Content', 'Clear input form for further input')
+ PARTIAL_CONTENT = _HTTPStatus(206, 'Partial Content', 'Partial content follows')
+ MULTI_STATUS = _HTTPStatus(207, 'Multi-Status', '')
+ ALREADY_REPORTED = _HTTPStatus(208, 'Already Reported', '')
+ IM_USED = _HTTPStatus(226, 'IM Used', '')
+ MULTIPLE_CHOICES = _HTTPStatus(300, 'Multiple Choices',
+ 'Object has several resources -- see URI list')
+ MOVED_PERMANENTLY = _HTTPStatus(301, 'Moved Permanently',
+ 'Object moved permanently -- see URI list')
+ FOUND = _HTTPStatus(302, 'Found', 'Object moved temporarily -- see URI list')
+ SEE_OTHER = _HTTPStatus(303, 'See Other', 'Object moved -- see Method and URL list')
+ NOT_MODIFIED = _HTTPStatus(304, 'Not Modified',
+ 'Document has not changed since given time')
+ USE_PROXY = _HTTPStatus(305, 'Use Proxy',
+ 'You must use proxy specified in Location to access this resource')
+ TEMPORARY_REDIRECT = _HTTPStatus(307, 'Temporary Redirect',
+ 'Object moved temporarily -- see URI list')
+ PERMANENT_REDIRECT = _HTTPStatus(308, 'Permanent Redirect',
+ 'Object moved permanently -- see URI list')
+ BAD_REQUEST = _HTTPStatus(400, 'Bad Request',
+ 'Bad request syntax or unsupported method')
+ UNAUTHORIZED = _HTTPStatus(401, 'Unauthorized',
+ 'No permission -- see authorization schemes')
+ PAYMENT_REQUIRED = _HTTPStatus(402, 'Payment Required',
+ 'No payment -- see charging schemes')
+ FORBIDDEN = _HTTPStatus(403, 'Forbidden',
+ 'Request forbidden -- authorization will not help')
+ NOT_FOUND = _HTTPStatus(404, 'Not Found',
+ 'Nothing matches the given URI')
+ METHOD_NOT_ALLOWED = _HTTPStatus(405, 'Method Not Allowed',
+ 'Specified method is invalid for this resource')
+ NOT_ACCEPTABLE = _HTTPStatus(406, 'Not Acceptable',
+ 'URI not available in preferred format')
+ PROXY_AUTHENTICATION_REQUIRED = _HTTPStatus(407,
+ 'Proxy Authentication Required',
+ 'You must authenticate with this proxy before proceeding')
+ REQUEST_TIMEOUT = _HTTPStatus(408, 'Request Timeout',
+ 'Request timed out; try again later')
+ CONFLICT = _HTTPStatus(409, 'Conflict', 'Request conflict')
+ GONE = _HTTPStatus(410, 'Gone',
+ 'URI no longer exists and has been permanently removed')
+ LENGTH_REQUIRED = _HTTPStatus(411, 'Length Required',
+ 'Client must specify Content-Length')
+ PRECONDITION_FAILED = _HTTPStatus(412, 'Precondition Failed',
+ 'Precondition in headers is false')
+ REQUEST_ENTITY_TOO_LARGE = _HTTPStatus(413, 'Request Entity Too Large',
+ 'Entity is too large')
+ REQUEST_URI_TOO_LONG = _HTTPStatus(414, 'Request-URI Too Long',
+ 'URI is too long')
+ UNSUPPORTED_MEDIA_TYPE = _HTTPStatus(415, 'Unsupported Media Type',
+ 'Entity body in unsupported format')
+ REQUESTED_RANGE_NOT_SATISFIABLE = _HTTPStatus(416,
+ 'Requested Range Not Satisfiable',
+ 'Cannot satisfy request range')
+ EXPECTATION_FAILED = _HTTPStatus(417, 'Expectation Failed',
+ 'Expect condition could not be satisfied')
+ MISDIRECTED_REQUEST = _HTTPStatus(421, 'Misdirected Request',
+ 'Server is not able to produce a response')
+ UNPROCESSABLE_ENTITY = _HTTPStatus(422, 'Unprocessable Entity')
+ LOCKED = _HTTPStatus(423, 'Locked')
+ FAILED_DEPENDENCY = _HTTPStatus(424, 'Failed Dependency')
+ UPGRADE_REQUIRED = _HTTPStatus(426, 'Upgrade Required')
+ PRECONDITION_REQUIRED = _HTTPStatus(428, 'Precondition Required',
+ 'The origin server requires the request to be conditional')
+ TOO_MANY_REQUESTS = _HTTPStatus(429, 'Too Many Requests',
+ 'The user has sent too many requests in '
+ 'a given amount of time ("rate limiting")')
+ REQUEST_HEADER_FIELDS_TOO_LARGE = _HTTPStatus(431,
+ 'Request Header Fields Too Large',
+ 'The server is unwilling to process the request because its header '
+ 'fields are too large')
+ UNAVAILABLE_FOR_LEGAL_REASONS = _HTTPStatus(451,
+ 'Unavailable For Legal Reasons',
+ 'The server is denying access to the '
+ 'resource as a consequence of a legal demand')
+ INTERNAL_SERVER_ERROR = _HTTPStatus(500, 'Internal Server Error',
+ 'Server got itself in trouble')
+ NOT_IMPLEMENTED = _HTTPStatus(501, 'Not Implemented',
+ 'Server does not support this operation')
+ BAD_GATEWAY = _HTTPStatus(502, 'Bad Gateway',
+ 'Invalid responses from another server/proxy')
+ SERVICE_UNAVAILABLE = _HTTPStatus(503, 'Service Unavailable',
+ 'The server cannot process the request due to a high load')
+ GATEWAY_TIMEOUT = _HTTPStatus(504, 'Gateway Timeout',
+ 'The gateway server did not receive a timely response')
+ HTTP_VERSION_NOT_SUPPORTED = _HTTPStatus(505, 'HTTP Version Not Supported',
+ 'Cannot fulfill request')
+ VARIANT_ALSO_NEGOTIATES = _HTTPStatus(506, 'Variant Also Negotiates')
+ INSUFFICIENT_STORAGE = _HTTPStatus(507, 'Insufficient Storage')
+ LOOP_DETECTED = _HTTPStatus(508, 'Loop Detected')
+ NOT_EXTENDED = _HTTPStatus(510, 'Not Extended')
+ NETWORK_AUTHENTICATION_REQUIRED = _HTTPStatus(511,
+ 'Network Authentication Required',
+ 'The client needs to authenticate to gain network access')
+ """
+ )
+ return AstroidBuilder(astroid.MANAGER).string_build(code)
+
+
+def _http_client_transform():
+ return AstroidBuilder(astroid.MANAGER).string_build(
+ textwrap.dedent(
+ """
+ from http import HTTPStatus
+
+ CONTINUE = HTTPStatus.CONTINUE
+ SWITCHING_PROTOCOLS = HTTPStatus.SWITCHING_PROTOCOLS
+ PROCESSING = HTTPStatus.PROCESSING
+ OK = HTTPStatus.OK
+ CREATED = HTTPStatus.CREATED
+ ACCEPTED = HTTPStatus.ACCEPTED
+ NON_AUTHORITATIVE_INFORMATION = HTTPStatus.NON_AUTHORITATIVE_INFORMATION
+ NO_CONTENT = HTTPStatus.NO_CONTENT
+ RESET_CONTENT = HTTPStatus.RESET_CONTENT
+ PARTIAL_CONTENT = HTTPStatus.PARTIAL_CONTENT
+ MULTI_STATUS = HTTPStatus.MULTI_STATUS
+ ALREADY_REPORTED = HTTPStatus.ALREADY_REPORTED
+ IM_USED = HTTPStatus.IM_USED
+ MULTIPLE_CHOICES = HTTPStatus.MULTIPLE_CHOICES
+ MOVED_PERMANENTLY = HTTPStatus.MOVED_PERMANENTLY
+ FOUND = HTTPStatus.FOUND
+ SEE_OTHER = HTTPStatus.SEE_OTHER
+ NOT_MODIFIED = HTTPStatus.NOT_MODIFIED
+ USE_PROXY = HTTPStatus.USE_PROXY
+ TEMPORARY_REDIRECT = HTTPStatus.TEMPORARY_REDIRECT
+ PERMANENT_REDIRECT = HTTPStatus.PERMANENT_REDIRECT
+ BAD_REQUEST = HTTPStatus.BAD_REQUEST
+ UNAUTHORIZED = HTTPStatus.UNAUTHORIZED
+ PAYMENT_REQUIRED = HTTPStatus.PAYMENT_REQUIRED
+ FORBIDDEN = HTTPStatus.FORBIDDEN
+ NOT_FOUND = HTTPStatus.NOT_FOUND
+ METHOD_NOT_ALLOWED = HTTPStatus.METHOD_NOT_ALLOWED
+ NOT_ACCEPTABLE = HTTPStatus.NOT_ACCEPTABLE
+ PROXY_AUTHENTICATION_REQUIRED = HTTPStatus.PROXY_AUTHENTICATION_REQUIRED
+ REQUEST_TIMEOUT = HTTPStatus.REQUEST_TIMEOUT
+ CONFLICT = HTTPStatus.CONFLICT
+ GONE = HTTPStatus.GONE
+ LENGTH_REQUIRED = HTTPStatus.LENGTH_REQUIRED
+ PRECONDITION_FAILED = HTTPStatus.PRECONDITION_FAILED
+ REQUEST_ENTITY_TOO_LARGE = HTTPStatus.REQUEST_ENTITY_TOO_LARGE
+ REQUEST_URI_TOO_LONG = HTTPStatus.REQUEST_URI_TOO_LONG
+ UNSUPPORTED_MEDIA_TYPE = HTTPStatus.UNSUPPORTED_MEDIA_TYPE
+ REQUESTED_RANGE_NOT_SATISFIABLE = HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE
+ EXPECTATION_FAILED = HTTPStatus.EXPECTATION_FAILED
+ UNPROCESSABLE_ENTITY = HTTPStatus.UNPROCESSABLE_ENTITY
+ LOCKED = HTTPStatus.LOCKED
+ FAILED_DEPENDENCY = HTTPStatus.FAILED_DEPENDENCY
+ UPGRADE_REQUIRED = HTTPStatus.UPGRADE_REQUIRED
+ PRECONDITION_REQUIRED = HTTPStatus.PRECONDITION_REQUIRED
+ TOO_MANY_REQUESTS = HTTPStatus.TOO_MANY_REQUESTS
+ REQUEST_HEADER_FIELDS_TOO_LARGE = HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE
+ INTERNAL_SERVER_ERROR = HTTPStatus.INTERNAL_SERVER_ERROR
+ NOT_IMPLEMENTED = HTTPStatus.NOT_IMPLEMENTED
+ BAD_GATEWAY = HTTPStatus.BAD_GATEWAY
+ SERVICE_UNAVAILABLE = HTTPStatus.SERVICE_UNAVAILABLE
+ GATEWAY_TIMEOUT = HTTPStatus.GATEWAY_TIMEOUT
+ HTTP_VERSION_NOT_SUPPORTED = HTTPStatus.HTTP_VERSION_NOT_SUPPORTED
+ VARIANT_ALSO_NEGOTIATES = HTTPStatus.VARIANT_ALSO_NEGOTIATES
+ INSUFFICIENT_STORAGE = HTTPStatus.INSUFFICIENT_STORAGE
+ LOOP_DETECTED = HTTPStatus.LOOP_DETECTED
+ NOT_EXTENDED = HTTPStatus.NOT_EXTENDED
+ NETWORK_AUTHENTICATION_REQUIRED = HTTPStatus.NETWORK_AUTHENTICATION_REQUIRED
+ """
+ )
+ )
+
+
+astroid.register_module_extender(astroid.MANAGER, "http", _http_transform)
+astroid.register_module_extender(astroid.MANAGER, "http.client", _http_client_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_io.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_io.py
new file mode 100644
index 0000000..4c68922
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_io.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2016 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid brain hints for some of the _io C objects."""
+
+import astroid
+
+
+BUFFERED = {"BufferedWriter", "BufferedReader"}
+TextIOWrapper = "TextIOWrapper"
+FileIO = "FileIO"
+BufferedWriter = "BufferedWriter"
+
+
+def _generic_io_transform(node, name, cls):
+ """Transform the given name, by adding the given *class* as a member of the node."""
+
+ io_module = astroid.MANAGER.ast_from_module_name("_io")
+ attribute_object = io_module[cls]
+ instance = attribute_object.instantiate_class()
+ node.locals[name] = [instance]
+
+
+def _transform_text_io_wrapper(node):
+ # This is not always correct, since it can vary with the type of the descriptor,
+ # being stdout, stderr or stdin. But we cannot get access to the name of the
+ # stream, which is why we are using the BufferedWriter class as a default
+ # value
+ return _generic_io_transform(node, name="buffer", cls=BufferedWriter)
+
+
+def _transform_buffered(node):
+ return _generic_io_transform(node, name="raw", cls=FileIO)
+
+
+astroid.MANAGER.register_transform(
+ astroid.ClassDef, _transform_buffered, lambda node: node.name in BUFFERED
+)
+astroid.MANAGER.register_transform(
+ astroid.ClassDef,
+ _transform_text_io_wrapper,
+ lambda node: node.name == TextIOWrapper,
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_mechanize.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_mechanize.py
new file mode 100644
index 0000000..93f282e
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_mechanize.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2012-2013 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def mechanize_transform():
+ return AstroidBuilder(MANAGER).string_build(
+ """
+
+class Browser(object):
+ def open(self, url, data=None, timeout=None):
+ return None
+ def open_novisit(self, url, data=None, timeout=None):
+ return None
+ def open_local_file(self, filename):
+ return None
+
+"""
+ )
+
+
+register_module_extender(MANAGER, "mechanize", mechanize_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py
new file mode 100644
index 0000000..71256ee
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_multiprocessing.py
@@ -0,0 +1,106 @@
+# Copyright (c) 2016 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+import sys
+
+import astroid
+from astroid import exceptions
+
+
+def _multiprocessing_transform():
+ module = astroid.parse(
+ """
+ from multiprocessing.managers import SyncManager
+ def Manager():
+ return SyncManager()
+ """
+ )
+ # Multiprocessing uses a getattr lookup inside contexts,
+ # in order to get the attributes they need. Since it's extremely
+ # dynamic, we use this approach to fake it.
+ node = astroid.parse(
+ """
+ from multiprocessing.context import DefaultContext, BaseContext
+ default = DefaultContext()
+ base = BaseContext()
+ """
+ )
+ try:
+ context = next(node["default"].infer())
+ base = next(node["base"].infer())
+ except exceptions.InferenceError:
+ return module
+
+ for node in (context, base):
+ for key, value in node.locals.items():
+ if key.startswith("_"):
+ continue
+
+ value = value[0]
+ if isinstance(value, astroid.FunctionDef):
+ # We need to rebound this, since otherwise
+ # it will have an extra argument (self).
+ value = astroid.BoundMethod(value, node)
+ module[key] = value
+ return module
+
+
+def _multiprocessing_managers_transform():
+ return astroid.parse(
+ """
+ import array
+ import threading
+ import multiprocessing.pool as pool
+
+ import six
+
+ class Namespace(object):
+ pass
+
+ class Value(object):
+ def __init__(self, typecode, value, lock=True):
+ self._typecode = typecode
+ self._value = value
+ def get(self):
+ return self._value
+ def set(self, value):
+ self._value = value
+ def __repr__(self):
+ return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
+ value = property(get, set)
+
+ def Array(typecode, sequence, lock=True):
+ return array.array(typecode, sequence)
+
+ class SyncManager(object):
+ Queue = JoinableQueue = six.moves.queue.Queue
+ Event = threading.Event
+ RLock = threading.RLock
+ BoundedSemaphore = threading.BoundedSemaphore
+ Condition = threading.Condition
+ Barrier = threading.Barrier
+ Pool = pool.Pool
+ list = list
+ dict = dict
+ Value = Value
+ Array = Array
+ Namespace = Namespace
+ __enter__ = lambda self: self
+ __exit__ = lambda *args: args
+
+ def start(self, initializer=None, initargs=None):
+ pass
+ def shutdown(self):
+ pass
+ """
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "multiprocessing.managers", _multiprocessing_managers_transform
+)
+astroid.register_module_extender(
+ astroid.MANAGER, "multiprocessing", _multiprocessing_transform
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py
new file mode 100644
index 0000000..de24067
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_namedtuple_enum.py
@@ -0,0 +1,449 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Dmitry Pribysh
+# Copyright (c) 2015 David Shea
+# Copyright (c) 2015 Philip Lorenz
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2016 Mateusz Bysiek
+# Copyright (c) 2017 Hugo
+# Copyright (c) 2017 Łukasz Rogalski
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the Python standard library."""
+
+import functools
+import keyword
+from textwrap import dedent
+
+from astroid import MANAGER, UseInferenceDefault, inference_tip, InferenceError
+from astroid import arguments
+from astroid import exceptions
+from astroid import nodes
+from astroid.builder import AstroidBuilder, extract_node
+from astroid import util
+
+
+TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
+ENUM_BASE_NAMES = {
+ "Enum",
+ "IntEnum",
+ "enum.Enum",
+ "enum.IntEnum",
+ "IntFlag",
+ "enum.IntFlag",
+}
+
+
+def _infer_first(node, context):
+ if node is util.Uninferable:
+ raise UseInferenceDefault
+ try:
+ value = next(node.infer(context=context))
+ if value is util.Uninferable:
+ raise UseInferenceDefault()
+ else:
+ return value
+ except StopIteration:
+ raise InferenceError()
+
+
+def _find_func_form_arguments(node, context):
+ def _extract_namedtuple_arg_or_keyword(position, key_name=None):
+
+ if len(args) > position:
+ return _infer_first(args[position], context)
+ if key_name and key_name in found_keywords:
+ return _infer_first(found_keywords[key_name], context)
+
+ args = node.args
+ keywords = node.keywords
+ found_keywords = (
+ {keyword.arg: keyword.value for keyword in keywords} if keywords else {}
+ )
+
+ name = _extract_namedtuple_arg_or_keyword(position=0, key_name="typename")
+ names = _extract_namedtuple_arg_or_keyword(position=1, key_name="field_names")
+ if name and names:
+ return name.value, names
+
+ raise UseInferenceDefault()
+
+
+def infer_func_form(node, base_type, context=None, enum=False):
+ """Specific inference function for namedtuple or Python 3 enum. """
+ # node is a Call node, class name as first argument and generated class
+ # attributes as second argument
+
+ # namedtuple or enums list of attributes can be a list of strings or a
+ # whitespace-separate string
+ try:
+ name, names = _find_func_form_arguments(node, context)
+ try:
+ attributes = names.value.replace(",", " ").split()
+ except AttributeError:
+ if not enum:
+ attributes = [
+ _infer_first(const, context).value for const in names.elts
+ ]
+ else:
+ # Enums supports either iterator of (name, value) pairs
+ # or mappings.
+ if hasattr(names, "items") and isinstance(names.items, list):
+ attributes = [
+ _infer_first(const[0], context).value
+ for const in names.items
+ if isinstance(const[0], nodes.Const)
+ ]
+ elif hasattr(names, "elts"):
+ # Enums can support either ["a", "b", "c"]
+ # or [("a", 1), ("b", 2), ...], but they can't
+ # be mixed.
+ if all(isinstance(const, nodes.Tuple) for const in names.elts):
+ attributes = [
+ _infer_first(const.elts[0], context).value
+ for const in names.elts
+ if isinstance(const, nodes.Tuple)
+ ]
+ else:
+ attributes = [
+ _infer_first(const, context).value for const in names.elts
+ ]
+ else:
+ raise AttributeError
+ if not attributes:
+ raise AttributeError
+ except (AttributeError, exceptions.InferenceError):
+ raise UseInferenceDefault()
+
+ # If we can't infer the name of the class, don't crash, up to this point
+ # we know it is a namedtuple anyway.
+ name = name or "Uninferable"
+ # we want to return a Class node instance with proper attributes set
+ class_node = nodes.ClassDef(name, "docstring")
+ class_node.parent = node.parent
+ # set base class=tuple
+ class_node.bases.append(base_type)
+ # XXX add __init__(*attributes) method
+ for attr in attributes:
+ fake_node = nodes.EmptyNode()
+ fake_node.parent = class_node
+ fake_node.attrname = attr
+ class_node.instance_attrs[attr] = [fake_node]
+ return class_node, name, attributes
+
+
+def _has_namedtuple_base(node):
+ """Predicate for class inference tip
+
+ :type node: ClassDef
+ :rtype: bool
+ """
+ return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES
+
+
+def _looks_like(node, name):
+ func = node.func
+ if isinstance(func, nodes.Attribute):
+ return func.attrname == name
+ if isinstance(func, nodes.Name):
+ return func.name == name
+ return False
+
+
+_looks_like_namedtuple = functools.partial(_looks_like, name="namedtuple")
+_looks_like_enum = functools.partial(_looks_like, name="Enum")
+_looks_like_typing_namedtuple = functools.partial(_looks_like, name="NamedTuple")
+
+
+def infer_named_tuple(node, context=None):
+ """Specific inference function for namedtuple Call node"""
+ tuple_base_name = nodes.Name(name="tuple", parent=node.root())
+ class_node, name, attributes = infer_func_form(
+ node, tuple_base_name, context=context
+ )
+ call_site = arguments.CallSite.from_call(node)
+ func = next(extract_node("import collections; collections.namedtuple").infer())
+ try:
+ rename = next(call_site.infer_argument(func, "rename", context)).bool_value()
+ except InferenceError:
+ rename = False
+
+ if rename:
+ attributes = _get_renamed_namedtuple_attributes(attributes)
+
+ replace_args = ", ".join("{arg}=None".format(arg=arg) for arg in attributes)
+ field_def = (
+ " {name} = property(lambda self: self[{index:d}], "
+ "doc='Alias for field number {index:d}')"
+ )
+ field_defs = "\n".join(
+ field_def.format(name=name, index=index)
+ for index, name in enumerate(attributes)
+ )
+ fake = AstroidBuilder(MANAGER).string_build(
+ """
+class %(name)s(tuple):
+ __slots__ = ()
+ _fields = %(fields)r
+ def _asdict(self):
+ return self.__dict__
+ @classmethod
+ def _make(cls, iterable, new=tuple.__new__, len=len):
+ return new(cls, iterable)
+ def _replace(self, %(replace_args)s):
+ return self
+ def __getnewargs__(self):
+ return tuple(self)
+%(field_defs)s
+ """
+ % {
+ "name": name,
+ "fields": attributes,
+ "field_defs": field_defs,
+ "replace_args": replace_args,
+ }
+ )
+ class_node.locals["_asdict"] = fake.body[0].locals["_asdict"]
+ class_node.locals["_make"] = fake.body[0].locals["_make"]
+ class_node.locals["_replace"] = fake.body[0].locals["_replace"]
+ class_node.locals["_fields"] = fake.body[0].locals["_fields"]
+ for attr in attributes:
+ class_node.locals[attr] = fake.body[0].locals[attr]
+ # we use UseInferenceDefault, we can't be a generator so return an iterator
+ return iter([class_node])
+
+
+def _get_renamed_namedtuple_attributes(field_names):
+ names = list(field_names)
+ seen = set()
+ for i, name in enumerate(field_names):
+ if (
+ not all(c.isalnum() or c == "_" for c in name)
+ or keyword.iskeyword(name)
+ or not name
+ or name[0].isdigit()
+ or name.startswith("_")
+ or name in seen
+ ):
+ names[i] = "_%d" % i
+ seen.add(name)
+ return tuple(names)
+
+
+def infer_enum(node, context=None):
+ """ Specific inference function for enum Call node. """
+ enum_meta = extract_node(
+ """
+ class EnumMeta(object):
+ 'docstring'
+ def __call__(self, node):
+ class EnumAttribute(object):
+ name = ''
+ value = 0
+ return EnumAttribute()
+ def __iter__(self):
+ class EnumAttribute(object):
+ name = ''
+ value = 0
+ return [EnumAttribute()]
+ def __reversed__(self):
+ class EnumAttribute(object):
+ name = ''
+ value = 0
+ return (EnumAttribute, )
+ def __next__(self):
+ return next(iter(self))
+ def __getitem__(self, attr):
+ class Value(object):
+ @property
+ def name(self):
+ return ''
+ @property
+ def value(self):
+ return attr
+
+ return Value()
+ __members__ = ['']
+ """
+ )
+ class_node = infer_func_form(node, enum_meta, context=context, enum=True)[0]
+ return iter([class_node.instantiate_class()])
+
+
+INT_FLAG_ADDITION_METHODS = """
+ def __or__(self, other):
+ return {name}(self.value | other.value)
+ def __and__(self, other):
+ return {name}(self.value & other.value)
+ def __xor__(self, other):
+ return {name}(self.value ^ other.value)
+ def __add__(self, other):
+ return {name}(self.value + other.value)
+ def __div__(self, other):
+ return {name}(self.value / other.value)
+ def __invert__(self):
+ return {name}(~self.value)
+ def __mul__(self, other):
+ return {name}(self.value * other.value)
+"""
+
+
+def infer_enum_class(node):
+ """ Specific inference for enums. """
+ for basename in node.basenames:
+ # TODO: doesn't handle subclasses yet. This implementation
+ # is a hack to support enums.
+ if basename not in ENUM_BASE_NAMES:
+ continue
+ if node.root().name == "enum":
+ # Skip if the class is directly from enum module.
+ break
+ for local, values in node.locals.items():
+ if any(not isinstance(value, nodes.AssignName) for value in values):
+ continue
+
+ targets = []
+ stmt = values[0].statement()
+ if isinstance(stmt, nodes.Assign):
+ if isinstance(stmt.targets[0], nodes.Tuple):
+ targets = stmt.targets[0].itered()
+ else:
+ targets = stmt.targets
+ elif isinstance(stmt, nodes.AnnAssign):
+ targets = [stmt.target]
+
+ inferred_return_value = None
+ if isinstance(stmt, nodes.Assign):
+ if isinstance(stmt.value, nodes.Const):
+ if isinstance(stmt.value.value, str):
+ inferred_return_value = repr(stmt.value.value)
+ else:
+ inferred_return_value = stmt.value.value
+ else:
+ inferred_return_value = stmt.value.as_string()
+
+ new_targets = []
+ for target in targets:
+ # Replace all the assignments with our mocked class.
+ classdef = dedent(
+ """
+ class {name}({types}):
+ @property
+ def value(self):
+ return {return_value}
+ @property
+ def name(self):
+ return "{name}"
+ """.format(
+ name=target.name,
+ types=", ".join(node.basenames),
+ return_value=inferred_return_value,
+ )
+ )
+ if "IntFlag" in basename:
+ # Alright, we need to add some additional methods.
+ # Unfortunately we still can't infer the resulting objects as
+ # Enum members, but once we'll be able to do that, the following
+ # should result in some nice symbolic execution
+ classdef += INT_FLAG_ADDITION_METHODS.format(name=target.name)
+
+ fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
+ fake.parent = target.parent
+ for method in node.mymethods():
+ fake.locals[method.name] = [method]
+ new_targets.append(fake.instantiate_class())
+ node.locals[local] = new_targets
+ break
+ return node
+
+
+def infer_typing_namedtuple_class(class_node, context=None):
+ """Infer a subclass of typing.NamedTuple"""
+ # Check if it has the corresponding bases
+ annassigns_fields = [
+ annassign.target.name
+ for annassign in class_node.body
+ if isinstance(annassign, nodes.AnnAssign)
+ ]
+ code = dedent(
+ """
+ from collections import namedtuple
+ namedtuple({typename!r}, {fields!r})
+ """
+ ).format(typename=class_node.name, fields=",".join(annassigns_fields))
+ node = extract_node(code)
+ generated_class_node = next(infer_named_tuple(node, context))
+ for method in class_node.mymethods():
+ generated_class_node.locals[method.name] = [method]
+
+ for assign in class_node.body:
+ if not isinstance(assign, nodes.Assign):
+ continue
+
+ for target in assign.targets:
+ attr = target.name
+ generated_class_node.locals[attr] = class_node.locals[attr]
+
+ return iter((generated_class_node,))
+
+
+def infer_typing_namedtuple(node, context=None):
+ """Infer a typing.NamedTuple(...) call."""
+ # This is essentially a namedtuple with different arguments
+ # so we extract the args and infer a named tuple.
+ try:
+ func = next(node.func.infer())
+ except InferenceError:
+ raise UseInferenceDefault
+
+ if func.qname() != "typing.NamedTuple":
+ raise UseInferenceDefault
+
+ if len(node.args) != 2:
+ raise UseInferenceDefault
+
+ if not isinstance(node.args[1], (nodes.List, nodes.Tuple)):
+ raise UseInferenceDefault
+
+ names = []
+ for elt in node.args[1].elts:
+ if not isinstance(elt, (nodes.List, nodes.Tuple)):
+ raise UseInferenceDefault
+ if len(elt.elts) != 2:
+ raise UseInferenceDefault
+ names.append(elt.elts[0].as_string())
+
+ typename = node.args[0].as_string()
+ if names:
+ field_names = "({},)".format(",".join(names))
+ else:
+ field_names = "''"
+ node = extract_node(
+ "namedtuple({typename}, {fields})".format(typename=typename, fields=field_names)
+ )
+ return infer_named_tuple(node, context)
+
+
+MANAGER.register_transform(
+ nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple
+)
+MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), _looks_like_enum)
+MANAGER.register_transform(
+ nodes.ClassDef,
+ infer_enum_class,
+ predicate=lambda cls: any(
+ basename for basename in cls.basenames if basename in ENUM_BASE_NAMES
+ ),
+)
+MANAGER.register_transform(
+ nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base
+)
+MANAGER.register_transform(
+ nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_nose.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_nose.py
new file mode 100644
index 0000000..7b12d76
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_nose.py
@@ -0,0 +1,77 @@
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Hooks for nose library."""
+
+import re
+import textwrap
+
+import astroid
+import astroid.builder
+
+_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
+
+
+def _pep8(name, caps=re.compile("([A-Z])")):
+ return caps.sub(lambda m: "_" + m.groups()[0].lower(), name)
+
+
+def _nose_tools_functions():
+ """Get an iterator of names and bound methods."""
+ module = _BUILDER.string_build(
+ textwrap.dedent(
+ """
+ import unittest
+
+ class Test(unittest.TestCase):
+ pass
+ a = Test()
+ """
+ )
+ )
+ try:
+ case = next(module["a"].infer())
+ except astroid.InferenceError:
+ return
+ for method in case.methods():
+ if method.name.startswith("assert") and "_" not in method.name:
+ pep8_name = _pep8(method.name)
+ yield pep8_name, astroid.BoundMethod(method, case)
+ if method.name == "assertEqual":
+ # nose also exports assert_equals.
+ yield "assert_equals", astroid.BoundMethod(method, case)
+
+
+def _nose_tools_transform(node):
+ for method_name, method in _nose_tools_functions():
+ node.locals[method_name] = [method]
+
+
+def _nose_tools_trivial_transform():
+ """Custom transform for the nose.tools module."""
+ stub = _BUILDER.string_build("""__all__ = []""")
+ all_entries = ["ok_", "eq_"]
+
+ for pep8_name, method in _nose_tools_functions():
+ all_entries.append(pep8_name)
+ stub[pep8_name] = method
+
+ # Update the __all__ variable, since nose.tools
+ # does this manually with .append.
+ all_assign = stub["__all__"].parent
+ all_object = astroid.List(all_entries)
+ all_object.parent = all_assign
+ all_assign.value = all_object
+ return stub
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "nose.tools.trivial", _nose_tools_trivial_transform
+)
+astroid.MANAGER.register_transform(
+ astroid.Module, _nose_tools_transform, lambda n: n.name == "nose.tools"
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_fromnumeric.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_fromnumeric.py
new file mode 100644
index 0000000..43b30e4
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_fromnumeric.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2018-2019 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for numpy.core.fromnumeric module."""
+
+import astroid
+
+
+def numpy_core_fromnumeric_transform():
+ return astroid.parse(
+ """
+ def sum(a, axis=None, dtype=None, out=None, keepdims=None, initial=None):
+ return numpy.ndarray([0, 0])
+ """
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.core.fromnumeric", numpy_core_fromnumeric_transform
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_function_base.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_function_base.py
new file mode 100644
index 0000000..05a73d9
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_function_base.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2018-2019 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for numpy.core.function_base module."""
+
+import functools
+import astroid
+from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
+
+
+METHODS_TO_BE_INFERRED = {
+ "linspace": """def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, axis=0):
+ return numpy.ndarray([0, 0])""",
+ "logspace": """def logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None, axis=0):
+ return numpy.ndarray([0, 0])""",
+ "geomspace": """def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0):
+ return numpy.ndarray([0, 0])""",
+}
+
+for func_name, func_src in METHODS_TO_BE_INFERRED.items():
+ inference_function = functools.partial(infer_numpy_member, func_src)
+ astroid.MANAGER.register_transform(
+ astroid.Attribute,
+ astroid.inference_tip(inference_function),
+ functools.partial(looks_like_numpy_member, func_name),
+ )
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_multiarray.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_multiarray.py
new file mode 100644
index 0000000..3032acc
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_multiarray.py
@@ -0,0 +1,55 @@
+# Copyright (c) 2018-2019 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for numpy.core.multiarray module."""
+
+import functools
+import astroid
+from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
+
+
+def numpy_core_multiarray_transform():
+ return astroid.parse(
+ """
+ # different functions defined in multiarray.py
+ def inner(a, b):
+ return numpy.ndarray([0, 0])
+
+ def vdot(a, b):
+ return numpy.ndarray([0, 0])
+ """
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.core.multiarray", numpy_core_multiarray_transform
+)
+
+
+METHODS_TO_BE_INFERRED = {
+ "array": """def array(object, dtype=None, copy=True, order='K', subok=False, ndmin=0):
+ return numpy.ndarray([0, 0])""",
+ "dot": """def dot(a, b, out=None):
+ return numpy.ndarray([0, 0])""",
+ "empty_like": """def empty_like(a, dtype=None, order='K', subok=True):
+ return numpy.ndarray((0, 0))""",
+ "concatenate": """def concatenate(arrays, axis=None, out=None):
+ return numpy.ndarray((0, 0))""",
+ "where": """def where(condition, x=None, y=None):
+ return numpy.ndarray([0, 0])""",
+ "empty": """def empty(shape, dtype=float, order='C'):
+ return numpy.ndarray([0, 0])""",
+ "zeros": """def zeros(shape, dtype=float, order='C'):
+ return numpy.ndarray([0, 0])""",
+}
+
+for method_name, function_src in METHODS_TO_BE_INFERRED.items():
+ inference_function = functools.partial(infer_numpy_member, function_src)
+ astroid.MANAGER.register_transform(
+ astroid.Attribute,
+ astroid.inference_tip(inference_function),
+ functools.partial(looks_like_numpy_member, method_name),
+ )
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numeric.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numeric.py
new file mode 100644
index 0000000..ba43c94
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numeric.py
@@ -0,0 +1,43 @@
+# Copyright (c) 2018-2019 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for numpy.core.numeric module."""
+
+import functools
+import astroid
+from brain_numpy_utils import looks_like_numpy_member, infer_numpy_member
+
+
+def numpy_core_numeric_transform():
+ return astroid.parse(
+ """
+ # different functions defined in numeric.py
+ import numpy
+ def zeros_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
+ def ones_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
+ def full_like(a, fill_value, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
+ """
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.core.numeric", numpy_core_numeric_transform
+)
+
+
+METHODS_TO_BE_INFERRED = {
+ "ones": """def ones(shape, dtype=None, order='C'):
+ return numpy.ndarray([0, 0])"""
+}
+
+
+for method_name, function_src in METHODS_TO_BE_INFERRED.items():
+ inference_function = functools.partial(infer_numpy_member, function_src)
+ astroid.MANAGER.register_transform(
+ astroid.Attribute,
+ astroid.inference_tip(inference_function),
+ functools.partial(looks_like_numpy_member, method_name),
+ )
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numerictypes.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numerictypes.py
new file mode 100644
index 0000000..42021fa
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_numerictypes.py
@@ -0,0 +1,250 @@
+# Copyright (c) 2018-2019 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+# TODO(hippo91) : correct the methods signature.
+
+"""Astroid hooks for numpy.core.numerictypes module."""
+
+import astroid
+
+
+def numpy_core_numerictypes_transform():
+ return astroid.parse(
+ """
+ # different types defined in numerictypes.py
+ class generic(object):
+ def __init__(self, value):
+ self.T = None
+ self.base = None
+ self.data = None
+ self.dtype = None
+ self.flags = None
+ self.flat = None
+ self.imag = None
+ self.itemsize = None
+ self.nbytes = None
+ self.ndim = None
+ self.real = None
+ self.size = None
+ self.strides = None
+
+ def all(self): return uninferable
+ def any(self): return uninferable
+ def argmax(self): return uninferable
+ def argmin(self): return uninferable
+ def argsort(self): return uninferable
+ def astype(self): return uninferable
+ def base(self): return uninferable
+ def byteswap(self): return uninferable
+ def choose(self): return uninferable
+ def clip(self): return uninferable
+ def compress(self): return uninferable
+ def conj(self): return uninferable
+ def conjugate(self): return uninferable
+ def copy(self): return uninferable
+ def cumprod(self): return uninferable
+ def cumsum(self): return uninferable
+ def data(self): return uninferable
+ def diagonal(self): return uninferable
+ def dtype(self): return uninferable
+ def dump(self): return uninferable
+ def dumps(self): return uninferable
+ def fill(self): return uninferable
+ def flags(self): return uninferable
+ def flat(self): return uninferable
+ def flatten(self): return uninferable
+ def getfield(self): return uninferable
+ def imag(self): return uninferable
+ def item(self): return uninferable
+ def itemset(self): return uninferable
+ def itemsize(self): return uninferable
+ def max(self): return uninferable
+ def mean(self): return uninferable
+ def min(self): return uninferable
+ def nbytes(self): return uninferable
+ def ndim(self): return uninferable
+ def newbyteorder(self): return uninferable
+ def nonzero(self): return uninferable
+ def prod(self): return uninferable
+ def ptp(self): return uninferable
+ def put(self): return uninferable
+ def ravel(self): return uninferable
+ def real(self): return uninferable
+ def repeat(self): return uninferable
+ def reshape(self): return uninferable
+ def resize(self): return uninferable
+ def round(self): return uninferable
+ def searchsorted(self): return uninferable
+ def setfield(self): return uninferable
+ def setflags(self): return uninferable
+ def shape(self): return uninferable
+ def size(self): return uninferable
+ def sort(self): return uninferable
+ def squeeze(self): return uninferable
+ def std(self): return uninferable
+ def strides(self): return uninferable
+ def sum(self): return uninferable
+ def swapaxes(self): return uninferable
+ def take(self): return uninferable
+ def tobytes(self): return uninferable
+ def tofile(self): return uninferable
+ def tolist(self): return uninferable
+ def tostring(self): return uninferable
+ def trace(self): return uninferable
+ def transpose(self): return uninferable
+ def var(self): return uninferable
+ def view(self): return uninferable
+
+
+ class dtype(object):
+ def __init__(self, obj, align=False, copy=False):
+ self.alignment = None
+ self.base = None
+ self.byteorder = None
+ self.char = None
+ self.descr = None
+ self.fields = None
+ self.flags = None
+ self.hasobject = None
+ self.isalignedstruct = None
+ self.isbuiltin = None
+ self.isnative = None
+ self.itemsize = None
+ self.kind = None
+ self.metadata = None
+ self.name = None
+ self.names = None
+ self.num = None
+ self.shape = None
+ self.str = None
+ self.subdtype = None
+ self.type = None
+
+ def newbyteorder(self, new_order='S'): return uninferable
+ def __neg__(self): return uninferable
+
+ class busdaycalendar(object):
+ def __init__(self, weekmask='1111100', holidays=None):
+ self.holidays = None
+ self.weekmask = None
+
+ class flexible(generic): pass
+ class bool_(generic): pass
+ class number(generic):
+ def __neg__(self): return uninferable
+ class datetime64(generic):
+ def __init__(self, nb, unit=None): pass
+
+
+ class void(flexible):
+ def __init__(self, *args, **kwargs):
+ self.base = None
+ self.dtype = None
+ self.flags = None
+ def getfield(self): return uninferable
+ def setfield(self): return uninferable
+
+
+ class character(flexible): pass
+
+
+ class integer(number):
+ def __init__(self, value):
+ self.denominator = None
+ self.numerator = None
+
+
+ class inexact(number): pass
+
+
+ class str_(str, character):
+ def maketrans(self, x, y=None, z=None): return uninferable
+
+
+ class bytes_(bytes, character):
+ def fromhex(self, string): return uninferable
+ def maketrans(self, frm, to): return uninferable
+
+
+ class signedinteger(integer): pass
+
+
+ class unsignedinteger(integer): pass
+
+
+ class complexfloating(inexact): pass
+
+
+ class floating(inexact): pass
+
+
+ class float64(floating, float):
+ def fromhex(self, string): return uninferable
+
+
+ class uint64(unsignedinteger): pass
+ class complex64(complexfloating): pass
+ class int16(signedinteger): pass
+ class float96(floating): pass
+ class int8(signedinteger): pass
+ class uint32(unsignedinteger): pass
+ class uint8(unsignedinteger): pass
+ class _typedict(dict): pass
+ class complex192(complexfloating): pass
+ class timedelta64(signedinteger):
+ def __init__(self, nb, unit=None): pass
+ class int32(signedinteger): pass
+ class uint16(unsignedinteger): pass
+ class float32(floating): pass
+ class complex128(complexfloating, complex): pass
+ class float16(floating): pass
+ class int64(signedinteger): pass
+
+ buffer_type = memoryview
+ bool8 = bool_
+ byte = int8
+ bytes0 = bytes_
+ cdouble = complex128
+ cfloat = complex128
+ clongdouble = complex192
+ clongfloat = complex192
+ complex_ = complex128
+ csingle = complex64
+ double = float64
+ float_ = float64
+ half = float16
+ int0 = int32
+ int_ = int32
+ intc = int32
+ intp = int32
+ long = int32
+ longcomplex = complex192
+ longdouble = float96
+ longfloat = float96
+ longlong = int64
+ object0 = object_
+ object_ = object_
+ short = int16
+ single = float32
+ singlecomplex = complex64
+ str0 = str_
+ string_ = bytes_
+ ubyte = uint8
+ uint = uint32
+ uint0 = uint32
+ uintc = uint32
+ uintp = uint32
+ ulonglong = uint64
+ unicode = str_
+ unicode_ = str_
+ ushort = uint16
+ void0 = void
+ """
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.core.numerictypes", numpy_core_numerictypes_transform
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_umath.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_umath.py
new file mode 100644
index 0000000..459d38c
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_core_umath.py
@@ -0,0 +1,105 @@
+# Copyright (c) 2018-2019 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for numpy.core.umath module."""
+
+import astroid
+
+
+def numpy_core_umath_transform():
+ ufunc_optional_keyword_arguments = (
+ """out=None, where=True, casting='same_kind', order='K', """
+ """dtype=None, subok=True"""
+ )
+ return astroid.parse(
+ """
+ # Constants
+ e = 2.718281828459045
+ euler_gamma = 0.5772156649015329
+
+ # No arg functions
+ def geterrobj(): return []
+
+ # One arg functions
+ def seterrobj(errobj): return None
+
+ # One arg functions with optional kwargs
+ def arccos(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def arccosh(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def arcsin(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def arcsinh(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def arctan(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def arctanh(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def cbrt(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def conj(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def conjugate(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def cosh(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def deg2rad(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def degrees(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def exp2(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def expm1(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def fabs(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def frexp(x, {opt_args:s}): return (numpy.ndarray((0, 0)), numpy.ndarray((0, 0)))
+ def isfinite(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def isinf(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def log(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def log1p(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def log2(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def logical_not(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def modf(x, {opt_args:s}): return (numpy.ndarray((0, 0)), numpy.ndarray((0, 0)))
+ def negative(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def rad2deg(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def radians(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def reciprocal(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def rint(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def sign(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def signbit(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def sinh(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def spacing(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def square(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def tan(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def tanh(x, {opt_args:s}): return numpy.ndarray((0, 0))
+ def trunc(x, {opt_args:s}): return numpy.ndarray((0, 0))
+
+ # Two args functions with optional kwargs
+ def bitwise_and(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def bitwise_or(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def bitwise_xor(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def copysign(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def divide(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def equal(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def floor_divide(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def fmax(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def fmin(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def fmod(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def greater(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def hypot(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def ldexp(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def left_shift(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def less(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def logaddexp(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def logaddexp2(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def logical_and(x1, x2, {opt_args:s}): return numpy.ndarray([0, 0])
+ def logical_or(x1, x2, {opt_args:s}): return numpy.ndarray([0, 0])
+ def logical_xor(x1, x2, {opt_args:s}): return numpy.ndarray([0, 0])
+ def maximum(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def minimum(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def nextafter(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def not_equal(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def power(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def remainder(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def right_shift(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def subtract(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ def true_divide(x1, x2, {opt_args:s}): return numpy.ndarray((0, 0))
+ """.format(
+ opt_args=ufunc_optional_keyword_arguments
+ )
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.core.umath", numpy_core_umath_transform
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py
new file mode 100644
index 0000000..8c231a3
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_ndarray.py
@@ -0,0 +1,153 @@
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+# Copyright (c) 2017-2018 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for numpy ndarray class."""
+
+import functools
+import astroid
+
+
+def infer_numpy_ndarray(node, context=None):
+ ndarray = """
+ class ndarray(object):
+ def __init__(self, shape, dtype=float, buffer=None, offset=0,
+ strides=None, order=None):
+ self.T = None
+ self.base = None
+ self.ctypes = None
+ self.data = None
+ self.dtype = None
+ self.flags = None
+ self.flat = None
+ self.imag = None
+ self.itemsize = None
+ self.nbytes = None
+ self.ndim = None
+ self.real = None
+ self.shape = None
+ self.size = None
+ self.strides = None
+
+ def __abs__(self): return numpy.ndarray([0, 0])
+ def __add__(self, value): return numpy.ndarray([0, 0])
+ def __and__(self, value): return numpy.ndarray([0, 0])
+ def __array__(self, dtype=None): return numpy.ndarray([0, 0])
+ def __array_wrap__(self, obj): return numpy.ndarray([0, 0])
+ def __contains__(self, key): return True
+ def __copy__(self): return numpy.ndarray([0, 0])
+ def __deepcopy__(self, memo): return numpy.ndarray([0, 0])
+ def __divmod__(self, value): return (numpy.ndarray([0, 0]), numpy.ndarray([0, 0]))
+ def __eq__(self, value): return numpy.ndarray([0, 0])
+ def __float__(self): return 0.
+ def __floordiv__(self): return numpy.ndarray([0, 0])
+ def __ge__(self, value): return numpy.ndarray([0, 0])
+ def __getitem__(self, key): return uninferable
+ def __gt__(self, value): return numpy.ndarray([0, 0])
+ def __iadd__(self, value): return numpy.ndarray([0, 0])
+ def __iand__(self, value): return numpy.ndarray([0, 0])
+ def __ifloordiv__(self, value): return numpy.ndarray([0, 0])
+ def __ilshift__(self, value): return numpy.ndarray([0, 0])
+ def __imod__(self, value): return numpy.ndarray([0, 0])
+ def __imul__(self, value): return numpy.ndarray([0, 0])
+ def __int__(self): return 0
+ def __invert__(self): return numpy.ndarray([0, 0])
+ def __ior__(self, value): return numpy.ndarray([0, 0])
+ def __ipow__(self, value): return numpy.ndarray([0, 0])
+ def __irshift__(self, value): return numpy.ndarray([0, 0])
+ def __isub__(self, value): return numpy.ndarray([0, 0])
+ def __itruediv__(self, value): return numpy.ndarray([0, 0])
+ def __ixor__(self, value): return numpy.ndarray([0, 0])
+ def __le__(self, value): return numpy.ndarray([0, 0])
+ def __len__(self): return 1
+ def __lshift__(self, value): return numpy.ndarray([0, 0])
+ def __lt__(self, value): return numpy.ndarray([0, 0])
+ def __matmul__(self, value): return numpy.ndarray([0, 0])
+ def __mod__(self, value): return numpy.ndarray([0, 0])
+ def __mul__(self, value): return numpy.ndarray([0, 0])
+ def __ne__(self, value): return numpy.ndarray([0, 0])
+ def __neg__(self): return numpy.ndarray([0, 0])
+ def __or__(self): return numpy.ndarray([0, 0])
+ def __pos__(self): return numpy.ndarray([0, 0])
+ def __pow__(self): return numpy.ndarray([0, 0])
+ def __repr__(self): return str()
+ def __rshift__(self): return numpy.ndarray([0, 0])
+ def __setitem__(self, key, value): return uninferable
+ def __str__(self): return str()
+ def __sub__(self, value): return numpy.ndarray([0, 0])
+ def __truediv__(self, value): return numpy.ndarray([0, 0])
+ def __xor__(self, value): return numpy.ndarray([0, 0])
+ def all(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
+ def any(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
+ def argmax(self, axis=None, out=None): return np.ndarray([0, 0])
+ def argmin(self, axis=None, out=None): return np.ndarray([0, 0])
+ def argpartition(self, kth, axis=-1, kind='introselect', order=None): return np.ndarray([0, 0])
+ def argsort(self, axis=-1, kind='quicksort', order=None): return np.ndarray([0, 0])
+ def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
+ def byteswap(self, inplace=False): return np.ndarray([0, 0])
+ def choose(self, choices, out=None, mode='raise'): return np.ndarray([0, 0])
+ def clip(self, min=None, max=None, out=None): return np.ndarray([0, 0])
+ def compress(self, condition, axis=None, out=None): return np.ndarray([0, 0])
+ def conj(self): return np.ndarray([0, 0])
+ def conjugate(self): return np.ndarray([0, 0])
+ def copy(self, order='C'): return np.ndarray([0, 0])
+ def cumprod(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
+ def cumsum(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
+ def diagonal(self, offset=0, axis1=0, axis2=1): return np.ndarray([0, 0])
+ def dot(self, b, out=None): return np.ndarray([0, 0])
+ def dump(self, file): return None
+ def dumps(self): return str()
+ def fill(self, value): return None
+ def flatten(self, order='C'): return np.ndarray([0, 0])
+ def getfield(self, dtype, offset=0): return np.ndarray([0, 0])
+ def item(self, *args): return uninferable
+ def itemset(self, *args): return None
+ def max(self, axis=None, out=None): return np.ndarray([0, 0])
+ def mean(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
+ def min(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
+ def newbyteorder(self, new_order='S'): return np.ndarray([0, 0])
+ def nonzero(self): return (1,)
+ def partition(self, kth, axis=-1, kind='introselect', order=None): return None
+ def prod(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
+ def ptp(self, axis=None, out=None): return np.ndarray([0, 0])
+ def put(self, indices, values, mode='raise'): return None
+ def ravel(self, order='C'): return np.ndarray([0, 0])
+ def repeat(self, repeats, axis=None): return np.ndarray([0, 0])
+ def reshape(self, shape, order='C'): return np.ndarray([0, 0])
+ def resize(self, new_shape, refcheck=True): return None
+ def round(self, decimals=0, out=None): return np.ndarray([0, 0])
+ def searchsorted(self, v, side='left', sorter=None): return np.ndarray([0, 0])
+ def setfield(self, val, dtype, offset=0): return None
+ def setflags(self, write=None, align=None, uic=None): return None
+ def sort(self, axis=-1, kind='quicksort', order=None): return None
+ def squeeze(self, axis=None): return np.ndarray([0, 0])
+ def std(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
+ def sum(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
+ def swapaxes(self, axis1, axis2): return np.ndarray([0, 0])
+ def take(self, indices, axis=None, out=None, mode='raise'): return np.ndarray([0, 0])
+ def tobytes(self, order='C'): return b''
+ def tofile(self, fid, sep="", format="%s"): return None
+ def tolist(self, ): return []
+ def tostring(self, order='C'): return b''
+ def trace(self, offset=0, axis1=0, axis2=1, dtype=None, out=None): return np.ndarray([0, 0])
+ def transpose(self, *axes): return np.ndarray([0, 0])
+ def var(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
+ def view(self, dtype=None, type=None): return np.ndarray([0, 0])
+ """
+ node = astroid.extract_node(ndarray)
+ return node.infer(context=context)
+
+
+def _looks_like_numpy_ndarray(node):
+ return isinstance(node, astroid.Attribute) and node.attrname == "ndarray"
+
+
+astroid.MANAGER.register_transform(
+ astroid.Attribute,
+ astroid.inference_tip(infer_numpy_ndarray),
+ _looks_like_numpy_ndarray,
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_random_mtrand.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_random_mtrand.py
new file mode 100644
index 0000000..772bfc4
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_random_mtrand.py
@@ -0,0 +1,70 @@
+# Copyright (c) 2018-2019 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+# TODO(hippo91) : correct the functions return types
+"""Astroid hooks for numpy.random.mtrand module."""
+
+import astroid
+
+
+def numpy_random_mtrand_transform():
+ return astroid.parse(
+ """
+ def beta(a, b, size=None): return uninferable
+ def binomial(n, p, size=None): return uninferable
+ def bytes(length): return uninferable
+ def chisquare(df, size=None): return uninferable
+ def choice(a, size=None, replace=True, p=None): return uninferable
+ def dirichlet(alpha, size=None): return uninferable
+ def exponential(scale=1.0, size=None): return uninferable
+ def f(dfnum, dfden, size=None): return uninferable
+ def gamma(shape, scale=1.0, size=None): return uninferable
+ def geometric(p, size=None): return uninferable
+ def get_state(): return uninferable
+ def gumbel(loc=0.0, scale=1.0, size=None): return uninferable
+ def hypergeometric(ngood, nbad, nsample, size=None): return uninferable
+ def laplace(loc=0.0, scale=1.0, size=None): return uninferable
+ def logistic(loc=0.0, scale=1.0, size=None): return uninferable
+ def lognormal(mean=0.0, sigma=1.0, size=None): return uninferable
+ def logseries(p, size=None): return uninferable
+ def multinomial(n, pvals, size=None): return uninferable
+ def multivariate_normal(mean, cov, size=None): return uninferable
+ def negative_binomial(n, p, size=None): return uninferable
+ def noncentral_chisquare(df, nonc, size=None): return uninferable
+ def noncentral_f(dfnum, dfden, nonc, size=None): return uninferable
+ def normal(loc=0.0, scale=1.0, size=None): return uninferable
+ def pareto(a, size=None): return uninferable
+ def permutation(x): return uninferable
+ def poisson(lam=1.0, size=None): return uninferable
+ def power(a, size=None): return uninferable
+ def rand(*args): return uninferable
+ def randint(low, high=None, size=None, dtype='l'):
+ import numpy
+ return numpy.ndarray((1,1))
+ def randn(*args): return uninferable
+ def random_integers(low, high=None, size=None): return uninferable
+ def random_sample(size=None): return uninferable
+ def rayleigh(scale=1.0, size=None): return uninferable
+ def seed(seed=None): return uninferable
+ def set_state(state): return uninferable
+ def shuffle(x): return uninferable
+ def standard_cauchy(size=None): return uninferable
+ def standard_exponential(size=None): return uninferable
+ def standard_gamma(shape, size=None): return uninferable
+ def standard_normal(size=None): return uninferable
+ def standard_t(df, size=None): return uninferable
+ def triangular(left, mode, right, size=None): return uninferable
+ def uniform(low=0.0, high=1.0, size=None): return uninferable
+ def vonmises(mu, kappa, size=None): return uninferable
+ def wald(mean, scale, size=None): return uninferable
+ def weibull(a, size=None): return uninferable
+ def zipf(a, size=None): return uninferable
+ """
+ )
+
+
+astroid.register_module_extender(
+ astroid.MANAGER, "numpy.random.mtrand", numpy_random_mtrand_transform
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_utils.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_utils.py
new file mode 100644
index 0000000..2bad01e
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_numpy_utils.py
@@ -0,0 +1,56 @@
+# Copyright (c) 2018-2019 hippo91
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Different utilities for the numpy brains"""
+
+
+import astroid
+
+
+def infer_numpy_member(src, node, context=None):
+ node = astroid.extract_node(src)
+ return node.infer(context=context)
+
+
+def _is_a_numpy_module(node: astroid.node_classes.Name) -> bool:
+ """
+ Returns True if the node is a representation of a numpy module.
+
+ For example in :
+ import numpy as np
+ x = np.linspace(1, 2)
+ The node is a representation of the numpy module.
+
+ :param node: node to test
+ :return: True if the node is a representation of the numpy module.
+ """
+ module_nickname = node.name
+ potential_import_target = [
+ x for x in node.lookup(module_nickname)[1] if isinstance(x, astroid.Import)
+ ]
+ for target in potential_import_target:
+ if ("numpy", module_nickname) in target.names:
+ return True
+ return False
+
+
+def looks_like_numpy_member(
+ member_name: str, node: astroid.node_classes.NodeNG
+) -> bool:
+ """
+ Returns True if the node is a member of numpy whose
+ name is member_name.
+
+ :param member_name: name of the member
+ :param node: node to test
+ :return: True if the node is a member of numpy
+ """
+ return (
+ isinstance(node, astroid.Attribute)
+ and node.attrname == member_name
+ and isinstance(node.expr, astroid.Name)
+ and _is_a_numpy_module(node.expr)
+ )
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py
new file mode 100644
index 0000000..25e7649
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_pkg_resources.py
@@ -0,0 +1,75 @@
+# Copyright (c) 2016, 2018 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+import astroid
+from astroid import parse
+from astroid import inference_tip
+from astroid import register_module_extender
+from astroid import MANAGER
+
+
+def pkg_resources_transform():
+ return parse(
+ """
+def require(*requirements):
+ return pkg_resources.working_set.require(*requirements)
+
+def run_script(requires, script_name):
+ return pkg_resources.working_set.run_script(requires, script_name)
+
+def iter_entry_points(group, name=None):
+ return pkg_resources.working_set.iter_entry_points(group, name)
+
+def resource_exists(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).has_resource(resource_name)
+
+def resource_isdir(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).resource_isdir(
+ resource_name)
+
+def resource_filename(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).get_resource_filename(
+ self, resource_name)
+
+def resource_stream(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).get_resource_stream(
+ self, resource_name)
+
+def resource_string(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).get_resource_string(
+ self, resource_name)
+
+def resource_listdir(package_or_requirement, resource_name):
+ return get_provider(package_or_requirement).resource_listdir(
+ resource_name)
+
+def extraction_error():
+ pass
+
+def get_cache_path(archive_name, names=()):
+ extract_path = self.extraction_path or get_default_cache()
+ target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
+ return target_path
+
+def postprocess(tempname, filename):
+ pass
+
+def set_extraction_path(path):
+ pass
+
+def cleanup_resources(force=False):
+ pass
+
+def get_distribution(dist):
+ return Distribution(dist)
+
+_namespace_packages = {}
+"""
+ )
+
+
+register_module_extender(MANAGER, "pkg_resources", pkg_resources_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_pytest.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_pytest.py
new file mode 100644
index 0000000..d7e3ac8
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_pytest.py
@@ -0,0 +1,88 @@
+# Copyright (c) 2014-2016 Claudiu Popa
+# Copyright (c) 2014 Jeff Quast
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2016 Florian Bruhin
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for pytest."""
+from __future__ import absolute_import
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+
+
+def pytest_transform():
+ return AstroidBuilder(MANAGER).string_build(
+ """
+
+try:
+ import _pytest.mark
+ import _pytest.recwarn
+ import _pytest.runner
+ import _pytest.python
+ import _pytest.skipping
+ import _pytest.assertion
+except ImportError:
+ pass
+else:
+ deprecated_call = _pytest.recwarn.deprecated_call
+ warns = _pytest.recwarn.warns
+
+ exit = _pytest.runner.exit
+ fail = _pytest.runner.fail
+ skip = _pytest.runner.skip
+ importorskip = _pytest.runner.importorskip
+
+ xfail = _pytest.skipping.xfail
+ mark = _pytest.mark.MarkGenerator()
+ raises = _pytest.python.raises
+
+ # New in pytest 3.0
+ try:
+ approx = _pytest.python.approx
+ register_assert_rewrite = _pytest.assertion.register_assert_rewrite
+ except AttributeError:
+ pass
+
+
+# Moved in pytest 3.0
+
+try:
+ import _pytest.freeze_support
+ freeze_includes = _pytest.freeze_support.freeze_includes
+except ImportError:
+ try:
+ import _pytest.genscript
+ freeze_includes = _pytest.genscript.freeze_includes
+ except ImportError:
+ pass
+
+try:
+ import _pytest.debugging
+ set_trace = _pytest.debugging.pytestPDB().set_trace
+except ImportError:
+ try:
+ import _pytest.pdb
+ set_trace = _pytest.pdb.pytestPDB().set_trace
+ except ImportError:
+ pass
+
+try:
+ import _pytest.fixtures
+ fixture = _pytest.fixtures.fixture
+ yield_fixture = _pytest.fixtures.yield_fixture
+except ImportError:
+ try:
+ import _pytest.python
+ fixture = _pytest.python.fixture
+ yield_fixture = _pytest.python.yield_fixture
+ except ImportError:
+ pass
+"""
+ )
+
+
+register_module_extender(MANAGER, "pytest", pytest_transform)
+register_module_extender(MANAGER, "py.test", pytest_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_qt.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_qt.py
new file mode 100644
index 0000000..8679d14
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_qt.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2015-2016 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+# Copyright (c) 2017 Roy Wright
+# Copyright (c) 2018 Ashley Whetter
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the PyQT library."""
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+from astroid import nodes
+from astroid import parse
+
+
+def _looks_like_signal(node, signal_name="pyqtSignal"):
+ if "__class__" in node.instance_attrs:
+ try:
+ cls = node.instance_attrs["__class__"][0]
+ return cls.name == signal_name
+ except AttributeError:
+ # return False if the cls does not have a name attribute
+ pass
+ return False
+
+
+def transform_pyqt_signal(node):
+ module = parse(
+ """
+ class pyqtSignal(object):
+ def connect(self, slot, type=None, no_receiver_check=False):
+ pass
+ def disconnect(self, slot):
+ pass
+ def emit(self, *args):
+ pass
+ """
+ )
+ signal_cls = module["pyqtSignal"]
+ node.instance_attrs["emit"] = signal_cls["emit"]
+ node.instance_attrs["disconnect"] = signal_cls["disconnect"]
+ node.instance_attrs["connect"] = signal_cls["connect"]
+
+
+def transform_pyside_signal(node):
+ module = parse(
+ """
+ class NotPySideSignal(object):
+ def connect(self, receiver, type=None):
+ pass
+ def disconnect(self, receiver):
+ pass
+ def emit(self, *args):
+ pass
+ """
+ )
+ signal_cls = module["NotPySideSignal"]
+ node.instance_attrs["connect"] = signal_cls["connect"]
+ node.instance_attrs["disconnect"] = signal_cls["disconnect"]
+ node.instance_attrs["emit"] = signal_cls["emit"]
+
+
+def pyqt4_qtcore_transform():
+ return AstroidBuilder(MANAGER).string_build(
+ """
+
+def SIGNAL(signal_name): pass
+
+class QObject(object):
+ def emit(self, signal): pass
+"""
+ )
+
+
+register_module_extender(MANAGER, "PyQt4.QtCore", pyqt4_qtcore_transform)
+MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal)
+MANAGER.register_transform(
+ nodes.ClassDef,
+ transform_pyside_signal,
+ lambda node: node.qname() in ("PySide.QtCore.Signal", "PySide2.QtCore.Signal"),
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_random.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_random.py
new file mode 100644
index 0000000..5ec858a
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_random.py
@@ -0,0 +1,75 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import random
+
+import astroid
+from astroid import helpers
+from astroid import MANAGER
+
+
+ACCEPTED_ITERABLES_FOR_SAMPLE = (astroid.List, astroid.Set, astroid.Tuple)
+
+
+def _clone_node_with_lineno(node, parent, lineno):
+ cls = node.__class__
+ other_fields = node._other_fields
+ _astroid_fields = node._astroid_fields
+ init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
+ postinit_params = {param: getattr(node, param) for param in _astroid_fields}
+ if other_fields:
+ init_params.update({param: getattr(node, param) for param in other_fields})
+ new_node = cls(**init_params)
+ if hasattr(node, "postinit") and _astroid_fields:
+ new_node.postinit(**postinit_params)
+ return new_node
+
+
+def infer_random_sample(node, context=None):
+ if len(node.args) != 2:
+ raise astroid.UseInferenceDefault
+
+ length = node.args[1]
+ if not isinstance(length, astroid.Const):
+ raise astroid.UseInferenceDefault
+ if not isinstance(length.value, int):
+ raise astroid.UseInferenceDefault
+
+ inferred_sequence = helpers.safe_infer(node.args[0], context=context)
+ if not inferred_sequence:
+ raise astroid.UseInferenceDefault
+
+ if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
+ raise astroid.UseInferenceDefault
+
+ if length.value > len(inferred_sequence.elts):
+ # In this case, this will raise a ValueError
+ raise astroid.UseInferenceDefault
+
+ try:
+ elts = random.sample(inferred_sequence.elts, length.value)
+ except ValueError:
+ raise astroid.UseInferenceDefault
+
+ new_node = astroid.List(
+ lineno=node.lineno, col_offset=node.col_offset, parent=node.scope()
+ )
+ new_elts = [
+ _clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno)
+ for elt in elts
+ ]
+ new_node.postinit(new_elts)
+ return iter((new_node,))
+
+
+def _looks_like_random_sample(node):
+ func = node.func
+ if isinstance(func, astroid.Attribute):
+ return func.attrname == "sample"
+ if isinstance(func, astroid.Name):
+ return func.name == "sample"
+ return False
+
+
+MANAGER.register_transform(
+ astroid.Call, astroid.inference_tip(infer_random_sample), _looks_like_random_sample
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_re.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_re.py
new file mode 100644
index 0000000..c7ee51a
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_re.py
@@ -0,0 +1,36 @@
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import sys
+import astroid
+
+PY36 = sys.version_info >= (3, 6)
+
+if PY36:
+ # Since Python 3.6 there is the RegexFlag enum
+ # where every entry will be exposed via updating globals()
+
+ def _re_transform():
+ return astroid.parse(
+ """
+ import sre_compile
+ ASCII = sre_compile.SRE_FLAG_ASCII
+ IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE
+ LOCALE = sre_compile.SRE_FLAG_LOCALE
+ UNICODE = sre_compile.SRE_FLAG_UNICODE
+ MULTILINE = sre_compile.SRE_FLAG_MULTILINE
+ DOTALL = sre_compile.SRE_FLAG_DOTALL
+ VERBOSE = sre_compile.SRE_FLAG_VERBOSE
+ A = ASCII
+ I = IGNORECASE
+ L = LOCALE
+ U = UNICODE
+ M = MULTILINE
+ S = DOTALL
+ X = VERBOSE
+ TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE
+ T = TEMPLATE
+ DEBUG = sre_compile.SRE_FLAG_DEBUG
+ """
+ )
+
+ astroid.register_module_extender(astroid.MANAGER, "re", _re_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_six.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_six.py
new file mode 100644
index 0000000..b342fbf
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_six.py
@@ -0,0 +1,200 @@
+# Copyright (c) 2014-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""Astroid hooks for six module."""
+
+from textwrap import dedent
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+from astroid.exceptions import (
+ AstroidBuildingError,
+ InferenceError,
+ AttributeInferenceError,
+)
+from astroid import nodes
+
+
+SIX_ADD_METACLASS = "six.add_metaclass"
+
+
+def _indent(text, prefix, predicate=None):
+ """Adds 'prefix' to the beginning of selected lines in 'text'.
+
+ If 'predicate' is provided, 'prefix' will only be added to the lines
+ where 'predicate(line)' is True. If 'predicate' is not provided,
+ it will default to adding 'prefix' to all non-empty lines that do not
+ consist solely of whitespace characters.
+ """
+ if predicate is None:
+ predicate = lambda line: line.strip()
+
+ def prefixed_lines():
+ for line in text.splitlines(True):
+ yield prefix + line if predicate(line) else line
+
+ return "".join(prefixed_lines())
+
+
+_IMPORTS = """
+import _io
+cStringIO = _io.StringIO
+filter = filter
+from itertools import filterfalse
+input = input
+from sys import intern
+map = map
+range = range
+from imp import reload as reload_module
+from functools import reduce
+from shlex import quote as shlex_quote
+from io import StringIO
+from collections import UserDict, UserList, UserString
+xrange = range
+zip = zip
+from itertools import zip_longest
+import builtins
+import configparser
+import copyreg
+import _dummy_thread
+import http.cookiejar as http_cookiejar
+import http.cookies as http_cookies
+import html.entities as html_entities
+import html.parser as html_parser
+import http.client as http_client
+import http.server as http_server
+BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
+import pickle as cPickle
+import queue
+import reprlib
+import socketserver
+import _thread
+import winreg
+import xmlrpc.server as xmlrpc_server
+import xmlrpc.client as xmlrpc_client
+import urllib.robotparser as urllib_robotparser
+import email.mime.multipart as email_mime_multipart
+import email.mime.nonmultipart as email_mime_nonmultipart
+import email.mime.text as email_mime_text
+import email.mime.base as email_mime_base
+import urllib.parse as urllib_parse
+import urllib.error as urllib_error
+import tkinter
+import tkinter.dialog as tkinter_dialog
+import tkinter.filedialog as tkinter_filedialog
+import tkinter.scrolledtext as tkinter_scrolledtext
+import tkinter.simpledialog as tkinder_simpledialog
+import tkinter.tix as tkinter_tix
+import tkinter.ttk as tkinter_ttk
+import tkinter.constants as tkinter_constants
+import tkinter.dnd as tkinter_dnd
+import tkinter.colorchooser as tkinter_colorchooser
+import tkinter.commondialog as tkinter_commondialog
+import tkinter.filedialog as tkinter_tkfiledialog
+import tkinter.font as tkinter_font
+import tkinter.messagebox as tkinter_messagebox
+import urllib
+import urllib.request as urllib_request
+import urllib.robotparser as urllib_robotparser
+import urllib.parse as urllib_parse
+import urllib.error as urllib_error
+"""
+
+
+def six_moves_transform():
+ code = dedent(
+ """
+ class Moves(object):
+ {}
+ moves = Moves()
+ """
+ ).format(_indent(_IMPORTS, " "))
+ module = AstroidBuilder(MANAGER).string_build(code)
+ module.name = "six.moves"
+ return module
+
+
+def _six_fail_hook(modname):
+ """Fix six.moves imports due to the dynamic nature of this
+ class.
+
+ Construct a pseudo-module which contains all the necessary imports
+ for six
+
+ :param modname: Name of failed module
+ :type modname: str
+
+ :return: An astroid module
+ :rtype: nodes.Module
+ """
+
+ attribute_of = modname != "six.moves" and modname.startswith("six.moves")
+ if modname != "six.moves" and not attribute_of:
+ raise AstroidBuildingError(modname=modname)
+ module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
+ module.name = "six.moves"
+ if attribute_of:
+ # Facilitate import of submodules in Moves
+ start_index = len(module.name)
+ attribute = modname[start_index:].lstrip(".").replace(".", "_")
+ try:
+ import_attr = module.getattr(attribute)[0]
+ except AttributeInferenceError:
+ raise AstroidBuildingError(modname=modname)
+ if isinstance(import_attr, nodes.Import):
+ submodule = MANAGER.ast_from_module_name(import_attr.names[0][0])
+ return submodule
+ # Let dummy submodule imports pass through
+ # This will cause an Uninferable result, which is okay
+ return module
+
+
+def _looks_like_decorated_with_six_add_metaclass(node):
+ if not node.decorators:
+ return False
+
+ for decorator in node.decorators.nodes:
+ if not isinstance(decorator, nodes.Call):
+ continue
+ if decorator.func.as_string() == SIX_ADD_METACLASS:
+ return True
+ return False
+
+
+def transform_six_add_metaclass(node):
+ """Check if the given class node is decorated with *six.add_metaclass*
+
+ If so, inject its argument as the metaclass of the underlying class.
+ """
+ if not node.decorators:
+ return
+
+ for decorator in node.decorators.nodes:
+ if not isinstance(decorator, nodes.Call):
+ continue
+
+ try:
+ func = next(decorator.func.infer())
+ except InferenceError:
+ continue
+ if func.qname() == SIX_ADD_METACLASS and decorator.args:
+ metaclass = decorator.args[0]
+ node._metaclass = metaclass
+ return node
+
+
+register_module_extender(MANAGER, "six", six_moves_transform)
+register_module_extender(
+ MANAGER, "requests.packages.urllib3.packages.six", six_moves_transform
+)
+MANAGER.register_failed_import_hook(_six_fail_hook)
+MANAGER.register_transform(
+ nodes.ClassDef,
+ transform_six_add_metaclass,
+ _looks_like_decorated_with_six_add_metaclass,
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_ssl.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_ssl.py
new file mode 100644
index 0000000..893d8a2
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_ssl.py
@@ -0,0 +1,74 @@
+# Copyright (c) 2016 Claudiu Popa
+# Copyright (c) 2016 Ceridwen
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the ssl library."""
+
+from astroid import MANAGER, register_module_extender
+from astroid.builder import AstroidBuilder
+from astroid import nodes
+from astroid import parse
+
+
+def ssl_transform():
+ return parse(
+ """
+ from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
+ from _ssl import _SSLContext, MemoryBIO
+ from _ssl import (
+ SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
+ SSLSyscallError, SSLEOFError,
+ )
+ from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
+ from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
+ from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
+ try:
+ from _ssl import RAND_egd
+ except ImportError:
+ # LibreSSL does not provide RAND_egd
+ pass
+ from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE,
+ OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3,
+ OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2,
+ OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE)
+
+ from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE,
+ ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE,
+ ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE,
+ ALERT_DESCRIPTION_BAD_RECORD_MAC,
+ ALERT_DESCRIPTION_CERTIFICATE_EXPIRED,
+ ALERT_DESCRIPTION_CERTIFICATE_REVOKED,
+ ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN,
+ ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE,
+ ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR,
+ ALERT_DESCRIPTION_DECOMPRESSION_FAILURE,
+ ALERT_DESCRIPTION_DECRYPT_ERROR,
+ ALERT_DESCRIPTION_HANDSHAKE_FAILURE,
+ ALERT_DESCRIPTION_ILLEGAL_PARAMETER,
+ ALERT_DESCRIPTION_INSUFFICIENT_SECURITY,
+ ALERT_DESCRIPTION_INTERNAL_ERROR,
+ ALERT_DESCRIPTION_NO_RENEGOTIATION,
+ ALERT_DESCRIPTION_PROTOCOL_VERSION,
+ ALERT_DESCRIPTION_RECORD_OVERFLOW,
+ ALERT_DESCRIPTION_UNEXPECTED_MESSAGE,
+ ALERT_DESCRIPTION_UNKNOWN_CA,
+ ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY,
+ ALERT_DESCRIPTION_UNRECOGNIZED_NAME,
+ ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE,
+ ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION,
+ ALERT_DESCRIPTION_USER_CANCELLED)
+ from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL,
+ SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ,
+ SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN)
+ from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT
+ from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN
+ from _ssl import _OPENSSL_API_VERSION
+ from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2
+ from _ssl import PROTOCOL_TLS, PROTOCOL_TLS_CLIENT, PROTOCOL_TLS_SERVER
+ """
+ )
+
+
+register_module_extender(MANAGER, "ssl", ssl_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_subprocess.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_subprocess.py
new file mode 100644
index 0000000..c14dc55
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_subprocess.py
@@ -0,0 +1,111 @@
+# Copyright (c) 2016-2017 Claudiu Popa
+# Copyright (c) 2017 Hugo
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+import sys
+import textwrap
+
+import astroid
+
+
+PY37 = sys.version_info >= (3, 7)
+PY36 = sys.version_info >= (3, 6)
+
+
+def _subprocess_transform():
+ communicate = (bytes("string", "ascii"), bytes("string", "ascii"))
+ communicate_signature = "def communicate(self, input=None, timeout=None)"
+ if PY37:
+ init = """
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0, restore_signals=True,
+ start_new_session=False, pass_fds=(), *,
+ encoding=None, errors=None, text=None):
+ pass
+ """
+ elif PY36:
+ init = """
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0, restore_signals=True,
+ start_new_session=False, pass_fds=(), *,
+ encoding=None, errors=None):
+ pass
+ """
+ else:
+ init = """
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0, restore_signals=True,
+ start_new_session=False, pass_fds=()):
+ pass
+ """
+ wait_signature = "def wait(self, timeout=None)"
+ ctx_manager = """
+ def __enter__(self): return self
+ def __exit__(self, *args): pass
+ """
+ py3_args = "args = []"
+ code = textwrap.dedent(
+ """
+ def check_output(
+ args, *,
+ stdin=None,
+ stderr=None,
+ shell=False,
+ cwd=None,
+ encoding=None,
+ errors=None,
+ universal_newlines=False,
+ timeout=None,
+ env=None
+ ):
+
+ if universal_newlines:
+ return ""
+ return b""
+ class Popen(object):
+ returncode = pid = 0
+ stdin = stdout = stderr = file()
+ %(py3_args)s
+
+ %(communicate_signature)s:
+ return %(communicate)r
+ %(wait_signature)s:
+ return self.returncode
+ def poll(self):
+ return self.returncode
+ def send_signal(self, signal):
+ pass
+ def terminate(self):
+ pass
+ def kill(self):
+ pass
+ %(ctx_manager)s
+ """
+ % {
+ "communicate": communicate,
+ "communicate_signature": communicate_signature,
+ "wait_signature": wait_signature,
+ "ctx_manager": ctx_manager,
+ "py3_args": py3_args,
+ }
+ )
+
+ init_lines = textwrap.dedent(init).splitlines()
+ indented_init = "\n".join(" " * 4 + line for line in init_lines)
+ code += indented_init
+ return astroid.parse(code)
+
+
+astroid.register_module_extender(astroid.MANAGER, "subprocess", _subprocess_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_threading.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_threading.py
new file mode 100644
index 0000000..dffa55a
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_threading.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016 Claudiu Popa
+# Copyright (c) 2017 Łukasz Rogalski
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+import astroid
+
+
+def _thread_transform():
+ return astroid.parse(
+ """
+ class lock(object):
+ def acquire(self, blocking=True, timeout=-1):
+ pass
+ def release(self):
+ pass
+ def __enter__(self):
+ return True
+ def __exit__(self, *args):
+ pass
+ def locked(self):
+ return False
+
+ def Lock():
+ return lock()
+ """
+ )
+
+
+astroid.register_module_extender(astroid.MANAGER, "threading", _thread_transform)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_typing.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_typing.py
new file mode 100644
index 0000000..9ff7227
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_typing.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017-2018 Claudiu Popa
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2017 David Euresti
+# Copyright (c) 2018 Bryce Guinta
+
+"""Astroid hooks for typing.py support."""
+import typing
+
+from astroid import (
+ MANAGER,
+ UseInferenceDefault,
+ extract_node,
+ inference_tip,
+ nodes,
+ InferenceError,
+)
+
+
+TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
+TYPING_TYPEVARS = {"TypeVar", "NewType"}
+TYPING_TYPEVARS_QUALIFIED = {"typing.TypeVar", "typing.NewType"}
+TYPING_TYPE_TEMPLATE = """
+class Meta(type):
+ def __getitem__(self, item):
+ return self
+
+ @property
+ def __args__(self):
+ return ()
+
+class {0}(metaclass=Meta):
+ pass
+"""
+TYPING_MEMBERS = set(typing.__all__)
+
+
+def looks_like_typing_typevar_or_newtype(node):
+ func = node.func
+ if isinstance(func, nodes.Attribute):
+ return func.attrname in TYPING_TYPEVARS
+ if isinstance(func, nodes.Name):
+ return func.name in TYPING_TYPEVARS
+ return False
+
+
+def infer_typing_typevar_or_newtype(node, context=None):
+ """Infer a typing.TypeVar(...) or typing.NewType(...) call"""
+ try:
+ func = next(node.func.infer(context=context))
+ except InferenceError as exc:
+ raise UseInferenceDefault from exc
+
+ if func.qname() not in TYPING_TYPEVARS_QUALIFIED:
+ raise UseInferenceDefault
+ if not node.args:
+ raise UseInferenceDefault
+
+ typename = node.args[0].as_string().strip("'")
+ node = extract_node(TYPING_TYPE_TEMPLATE.format(typename))
+ return node.infer(context=context)
+
+
+def _looks_like_typing_subscript(node):
+ """Try to figure out if a Subscript node *might* be a typing-related subscript"""
+ if isinstance(node, nodes.Name):
+ return node.name in TYPING_MEMBERS
+ elif isinstance(node, nodes.Attribute):
+ return node.attrname in TYPING_MEMBERS
+ elif isinstance(node, nodes.Subscript):
+ return _looks_like_typing_subscript(node.value)
+ return False
+
+
+def infer_typing_attr(node, context=None):
+ """Infer a typing.X[...] subscript"""
+ try:
+ value = next(node.value.infer())
+ except InferenceError as exc:
+ raise UseInferenceDefault from exc
+
+ if not value.qname().startswith("typing."):
+ raise UseInferenceDefault
+
+ node = extract_node(TYPING_TYPE_TEMPLATE.format(value.qname().split(".")[-1]))
+ return node.infer(context=context)
+
+
+MANAGER.register_transform(
+ nodes.Call,
+ inference_tip(infer_typing_typevar_or_newtype),
+ looks_like_typing_typevar_or_newtype,
+)
+MANAGER.register_transform(
+ nodes.Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/brain/brain_uuid.py b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_uuid.py
new file mode 100644
index 0000000..8bda631
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/brain/brain_uuid.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2017 Claudiu Popa
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Astroid hooks for the UUID module."""
+
+
+from astroid import MANAGER
+from astroid import nodes
+
+
+def _patch_uuid_class(node):
+ # The .int member is patched using __dict__
+ node.locals["int"] = [nodes.Const(0, parent=node)]
+
+
+MANAGER.register_transform(
+ nodes.ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID"
+)
diff --git a/src/main/python/venv/Lib/site-packages/astroid/builder.py b/src/main/python/venv/Lib/site-packages/astroid/builder.py
new file mode 100644
index 0000000..ac71093
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/builder.py
@@ -0,0 +1,435 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2013 Phil Schaf
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014-2015 Google, Inc.
+# Copyright (c) 2014 Alexander Presnyakov
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2018 Anthony Sottile
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""The AstroidBuilder makes astroid from living object and / or from _ast
+
+The builder is not thread safe and can't be used to parse different sources
+at the same time.
+"""
+
+import os
+import textwrap
+from tokenize import detect_encoding
+
+from astroid._ast import _parse
+from astroid import bases
+from astroid import exceptions
+from astroid import manager
+from astroid import modutils
+from astroid import raw_building
+from astroid import rebuilder
+from astroid import nodes
+from astroid import util
+
+# The name of the transient function that is used to
+# wrap expressions to be extracted when calling
+# extract_node.
+_TRANSIENT_FUNCTION = "__"
+
+# The comment used to select a statement to be extracted
+# when calling extract_node.
+_STATEMENT_SELECTOR = "#@"
+
+MANAGER = manager.AstroidManager()
+
+
+def open_source_file(filename):
+ with open(filename, "rb") as byte_stream:
+ encoding = detect_encoding(byte_stream.readline)[0]
+ stream = open(filename, "r", newline=None, encoding=encoding)
+ data = stream.read()
+ return stream, encoding, data
+
+
+def _can_assign_attr(node, attrname):
+ try:
+ slots = node.slots()
+ except NotImplementedError:
+ pass
+ else:
+ if slots and attrname not in {slot.value for slot in slots}:
+ return False
+ return True
+
+
+class AstroidBuilder(raw_building.InspectBuilder):
+ """Class for building an astroid tree from source code or from a live module.
+
+ The param *manager* specifies the manager class which should be used.
+ If no manager is given, then the default one will be used. The
+ param *apply_transforms* determines if the transforms should be
+ applied after the tree was built from source or from a live object,
+ by default being True.
+ """
+
+ # pylint: disable=redefined-outer-name
+ def __init__(self, manager=None, apply_transforms=True):
+ super(AstroidBuilder, self).__init__()
+ self._manager = manager or MANAGER
+ self._apply_transforms = apply_transforms
+
+ def module_build(self, module, modname=None):
+ """Build an astroid from a living module instance."""
+ node = None
+ path = getattr(module, "__file__", None)
+ if path is not None:
+ path_, ext = os.path.splitext(modutils._path_from_filename(path))
+ if ext in (".py", ".pyc", ".pyo") and os.path.exists(path_ + ".py"):
+ node = self.file_build(path_ + ".py", modname)
+ if node is None:
+ # this is a built-in module
+ # get a partial representation by introspection
+ node = self.inspect_build(module, modname=modname, path=path)
+ if self._apply_transforms:
+ # We have to handle transformation by ourselves since the
+ # rebuilder isn't called for builtin nodes
+ node = self._manager.visit_transforms(node)
+ return node
+
+ def file_build(self, path, modname=None):
+ """Build astroid from a source code file (i.e. from an ast)
+
+ *path* is expected to be a python source file
+ """
+ try:
+ stream, encoding, data = open_source_file(path)
+ except IOError as exc:
+ raise exceptions.AstroidBuildingError(
+ "Unable to load file {path}:\n{error}",
+ modname=modname,
+ path=path,
+ error=exc,
+ ) from exc
+ except (SyntaxError, LookupError) as exc:
+ raise exceptions.AstroidSyntaxError(
+ "Python 3 encoding specification error or unknown encoding:\n"
+ "{error}",
+ modname=modname,
+ path=path,
+ error=exc,
+ ) from exc
+ except UnicodeError as exc: # wrong encoding
+ # detect_encoding returns utf-8 if no encoding specified
+ raise exceptions.AstroidBuildingError(
+ "Wrong or no encoding specified for {filename}.", filename=path
+ ) from exc
+ with stream:
+ # get module name if necessary
+ if modname is None:
+ try:
+ modname = ".".join(modutils.modpath_from_file(path))
+ except ImportError:
+ modname = os.path.splitext(os.path.basename(path))[0]
+ # build astroid representation
+ module = self._data_build(data, modname, path)
+ return self._post_build(module, encoding)
+
+ def string_build(self, data, modname="", path=None):
+ """Build astroid from source code string."""
+ module = self._data_build(data, modname, path)
+ module.file_bytes = data.encode("utf-8")
+ return self._post_build(module, "utf-8")
+
+ def _post_build(self, module, encoding):
+ """Handles encoding and delayed nodes after a module has been built"""
+ module.file_encoding = encoding
+ self._manager.cache_module(module)
+ # post tree building steps after we stored the module in the cache:
+ for from_node in module._import_from_nodes:
+ if from_node.modname == "__future__":
+ for symbol, _ in from_node.names:
+ module.future_imports.add(symbol)
+ self.add_from_names_to_locals(from_node)
+ # handle delayed assattr nodes
+ for delayed in module._delayed_assattr:
+ self.delayed_assattr(delayed)
+
+ # Visit the transforms
+ if self._apply_transforms:
+ module = self._manager.visit_transforms(module)
+ return module
+
+ def _data_build(self, data, modname, path):
+ """Build tree node from data and add some informations"""
+ try:
+ node = _parse(data + "\n")
+ except (TypeError, ValueError, SyntaxError) as exc:
+ raise exceptions.AstroidSyntaxError(
+ "Parsing Python code failed:\n{error}",
+ source=data,
+ modname=modname,
+ path=path,
+ error=exc,
+ ) from exc
+ if path is not None:
+ node_file = os.path.abspath(path)
+ else:
+ node_file = ">"
+ if modname.endswith(".__init__"):
+ modname = modname[:-9]
+ package = True
+ else:
+ package = (
+ path is not None
+ and os.path.splitext(os.path.basename(path))[0] == "__init__"
+ )
+ builder = rebuilder.TreeRebuilder(self._manager)
+ module = builder.visit_module(node, modname, node_file, package)
+ module._import_from_nodes = builder._import_from_nodes
+ module._delayed_assattr = builder._delayed_assattr
+ return module
+
+ def add_from_names_to_locals(self, node):
+ """Store imported names to the locals
+
+ Resort the locals if coming from a delayed node
+ """
+ _key_func = lambda node: node.fromlineno
+
+ def sort_locals(my_list):
+ my_list.sort(key=_key_func)
+
+ for (name, asname) in node.names:
+ if name == "*":
+ try:
+ imported = node.do_import_module()
+ except exceptions.AstroidBuildingError:
+ continue
+ for name in imported.public_names():
+ node.parent.set_local(name, node)
+ sort_locals(node.parent.scope().locals[name])
+ else:
+ node.parent.set_local(asname or name, node)
+ sort_locals(node.parent.scope().locals[asname or name])
+
+ def delayed_assattr(self, node):
+ """Visit a AssAttr node
+
+ This adds name to locals and handle members definition.
+ """
+ try:
+ frame = node.frame()
+ for inferred in node.expr.infer():
+ if inferred is util.Uninferable:
+ continue
+ try:
+ if inferred.__class__ is bases.Instance:
+ inferred = inferred._proxied
+ iattrs = inferred.instance_attrs
+ if not _can_assign_attr(inferred, node.attrname):
+ continue
+ elif isinstance(inferred, bases.Instance):
+ # Const, Tuple, ... we may be wrong, may be not, but
+ # anyway we don't want to pollute builtin's namespace
+ continue
+ elif inferred.is_function:
+ iattrs = inferred.instance_attrs
+ else:
+ iattrs = inferred.locals
+ except AttributeError:
+ # XXX log error
+ continue
+ values = iattrs.setdefault(node.attrname, [])
+ if node in values:
+ continue
+ # get assign in __init__ first XXX useful ?
+ if (
+ frame.name == "__init__"
+ and values
+ and values[0].frame().name != "__init__"
+ ):
+ values.insert(0, node)
+ else:
+ values.append(node)
+ except exceptions.InferenceError:
+ pass
+
+
+def build_namespace_package_module(name, path):
+ return nodes.Module(name, doc="", path=path, package=True)
+
+
+def parse(code, module_name="", path=None, apply_transforms=True):
+ """Parses a source string in order to obtain an astroid AST from it
+
+ :param str code: The code for the module.
+ :param str module_name: The name for the module, if any
+ :param str path: The path for the module
+ :param bool apply_transforms:
+ Apply the transforms for the give code. Use it if you
+ don't want the default transforms to be applied.
+ """
+ code = textwrap.dedent(code)
+ builder = AstroidBuilder(manager=MANAGER, apply_transforms=apply_transforms)
+ return builder.string_build(code, modname=module_name, path=path)
+
+
+def _extract_expressions(node):
+ """Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
+
+ The function walks the AST recursively to search for expressions that
+ are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
+ expression, it completely removes the function call node from the tree,
+ replacing it by the wrapped expression inside the parent.
+
+ :param node: An astroid node.
+ :type node: astroid.bases.NodeNG
+ :yields: The sequence of wrapped expressions on the modified tree
+ expression can be found.
+ """
+ if (
+ isinstance(node, nodes.Call)
+ and isinstance(node.func, nodes.Name)
+ and node.func.name == _TRANSIENT_FUNCTION
+ ):
+ real_expr = node.args[0]
+ real_expr.parent = node.parent
+ # Search for node in all _astng_fields (the fields checked when
+ # get_children is called) of its parent. Some of those fields may
+ # be lists or tuples, in which case the elements need to be checked.
+ # When we find it, replace it by real_expr, so that the AST looks
+ # like no call to _TRANSIENT_FUNCTION ever took place.
+ for name in node.parent._astroid_fields:
+ child = getattr(node.parent, name)
+ if isinstance(child, (list, tuple)):
+ for idx, compound_child in enumerate(child):
+ if compound_child is node:
+ child[idx] = real_expr
+ elif child is node:
+ setattr(node.parent, name, real_expr)
+ yield real_expr
+ else:
+ for child in node.get_children():
+ yield from _extract_expressions(child)
+
+
+def _find_statement_by_line(node, line):
+ """Extracts the statement on a specific line from an AST.
+
+ If the line number of node matches line, it will be returned;
+ otherwise its children are iterated and the function is called
+ recursively.
+
+ :param node: An astroid node.
+ :type node: astroid.bases.NodeNG
+ :param line: The line number of the statement to extract.
+ :type line: int
+ :returns: The statement on the line, or None if no statement for the line
+ can be found.
+ :rtype: astroid.bases.NodeNG or None
+ """
+ if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)):
+ # This is an inaccuracy in the AST: the nodes that can be
+ # decorated do not carry explicit information on which line
+ # the actual definition (class/def), but .fromline seems to
+ # be close enough.
+ node_line = node.fromlineno
+ else:
+ node_line = node.lineno
+
+ if node_line == line:
+ return node
+
+ for child in node.get_children():
+ result = _find_statement_by_line(child, line)
+ if result:
+ return result
+
+ return None
+
+
+def extract_node(code, module_name=""):
+ """Parses some Python code as a module and extracts a designated AST node.
+
+ Statements:
+ To extract one or more statement nodes, append #@ to the end of the line
+
+ Examples:
+ >>> def x():
+ >>> def y():
+ >>> return 1 #@
+
+ The return statement will be extracted.
+
+ >>> class X(object):
+ >>> def meth(self): #@
+ >>> pass
+
+ The function object 'meth' will be extracted.
+
+ Expressions:
+ To extract arbitrary expressions, surround them with the fake
+ function call __(...). After parsing, the surrounded expression
+ will be returned and the whole AST (accessible via the returned
+ node's parent attribute) will look like the function call was
+ never there in the first place.
+
+ Examples:
+ >>> a = __(1)
+
+ The const node will be extracted.
+
+ >>> def x(d=__(foo.bar)): pass
+
+ The node containing the default argument will be extracted.
+
+ >>> def foo(a, b):
+ >>> return 0 < __(len(a)) < b
+
+ The node containing the function call 'len' will be extracted.
+
+ If no statements or expressions are selected, the last toplevel
+ statement will be returned.
+
+ If the selected statement is a discard statement, (i.e. an expression
+ turned into a statement), the wrapped expression is returned instead.
+
+ For convenience, singleton lists are unpacked.
+
+ :param str code: A piece of Python code that is parsed as
+ a module. Will be passed through textwrap.dedent first.
+ :param str module_name: The name of the module.
+ :returns: The designated node from the parse tree, or a list of nodes.
+ :rtype: astroid.bases.NodeNG, or a list of nodes.
+ """
+
+ def _extract(node):
+ if isinstance(node, nodes.Expr):
+ return node.value
+
+ return node
+
+ requested_lines = []
+ for idx, line in enumerate(code.splitlines()):
+ if line.strip().endswith(_STATEMENT_SELECTOR):
+ requested_lines.append(idx + 1)
+
+ tree = parse(code, module_name=module_name)
+ if not tree.body:
+ raise ValueError("Empty tree, cannot extract from it")
+
+ extracted = []
+ if requested_lines:
+ extracted = [_find_statement_by_line(tree, line) for line in requested_lines]
+
+ # Modifies the tree.
+ extracted.extend(_extract_expressions(tree))
+
+ if not extracted:
+ extracted.append(tree.body[-1])
+
+ extracted = [_extract(node) for node in extracted]
+ if len(extracted) == 1:
+ return extracted[0]
+ return extracted
diff --git a/src/main/python/venv/Lib/site-packages/astroid/context.py b/src/main/python/venv/Lib/site-packages/astroid/context.py
new file mode 100644
index 0000000..70a9208
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/context.py
@@ -0,0 +1,179 @@
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Various context related utilities, including inference and call contexts."""
+import contextlib
+import pprint
+from typing import Optional
+
+
+class InferenceContext:
+ """Provide context for inference
+
+ Store already inferred nodes to save time
+ Account for already visited nodes to infinite stop infinite recursion
+ """
+
+ __slots__ = (
+ "path",
+ "lookupname",
+ "callcontext",
+ "boundnode",
+ "inferred",
+ "extra_context",
+ )
+
+ def __init__(self, path=None, inferred=None):
+ self.path = path or set()
+ """
+ :type: set(tuple(NodeNG, optional(str)))
+
+ Path of visited nodes and their lookupname
+
+ Currently this key is ``(node, context.lookupname)``
+ """
+ self.lookupname = None
+ """
+ :type: optional[str]
+
+ The original name of the node
+
+ e.g.
+ foo = 1
+ The inference of 'foo' is nodes.Const(1) but the lookup name is 'foo'
+ """
+ self.callcontext = None
+ """
+ :type: optional[CallContext]
+
+ The call arguments and keywords for the given context
+ """
+ self.boundnode = None
+ """
+ :type: optional[NodeNG]
+
+ The bound node of the given context
+
+ e.g. the bound node of object.__new__(cls) is the object node
+ """
+ self.inferred = inferred or {}
+ """
+ :type: dict(seq, seq)
+
+ Inferred node contexts to their mapped results
+ Currently the key is ``(node, lookupname, callcontext, boundnode)``
+ and the value is tuple of the inferred results
+ """
+ self.extra_context = {}
+ """
+ :type: dict(NodeNG, Context)
+
+ Context that needs to be passed down through call stacks
+ for call arguments
+ """
+
+ def push(self, node):
+ """Push node into inference path
+
+ :return: True if node is already in context path else False
+ :rtype: bool
+
+ Allows one to see if the given node has already
+ been looked at for this inference context"""
+ name = self.lookupname
+ if (node, name) in self.path:
+ return True
+
+ self.path.add((node, name))
+ return False
+
+ def clone(self):
+ """Clone inference path
+
+ For example, each side of a binary operation (BinOp)
+ starts with the same context but diverge as each side is inferred
+ so the InferenceContext will need be cloned"""
+ # XXX copy lookupname/callcontext ?
+ clone = InferenceContext(self.path, inferred=self.inferred)
+ clone.callcontext = self.callcontext
+ clone.boundnode = self.boundnode
+ clone.extra_context = self.extra_context
+ return clone
+
+ def cache_generator(self, key, generator):
+ """Cache result of generator into dictionary
+
+ Used to cache inference results"""
+ results = []
+ for result in generator:
+ results.append(result)
+ yield result
+
+ self.inferred[key] = tuple(results)
+
+ @contextlib.contextmanager
+ def restore_path(self):
+ path = set(self.path)
+ yield
+ self.path = path
+
+ def __str__(self):
+ state = (
+ "%s=%s"
+ % (field, pprint.pformat(getattr(self, field), width=80 - len(field)))
+ for field in self.__slots__
+ )
+ return "%s(%s)" % (type(self).__name__, ",\n ".join(state))
+
+
+class CallContext:
+ """Holds information for a call site."""
+
+ __slots__ = ("args", "keywords")
+
+ def __init__(self, args, keywords=None):
+ """
+ :param List[NodeNG] args: Call positional arguments
+ :param Union[List[nodes.Keyword], None] keywords: Call keywords
+ """
+ self.args = args
+ if keywords:
+ keywords = [(arg.arg, arg.value) for arg in keywords]
+ else:
+ keywords = []
+ self.keywords = keywords
+
+
+def copy_context(context: Optional[InferenceContext]) -> InferenceContext:
+ """Clone a context if given, or return a fresh contexxt"""
+ if context is not None:
+ return context.clone()
+
+ return InferenceContext()
+
+
+def bind_context_to_node(context, node):
+ """Give a context a boundnode
+ to retrieve the correct function name or attribute value
+ with from further inference.
+
+ Do not use an existing context since the boundnode could then
+ be incorrectly propagated higher up in the call stack.
+
+ :param context: Context to use
+ :type context: Optional(context)
+
+ :param node: Node to do name lookups from
+ :type node NodeNG:
+
+ :returns: A new context
+ :rtype: InferenceContext
+ """
+ context = copy_context(context)
+ context.boundnode = node
+ return context
diff --git a/src/main/python/venv/Lib/site-packages/astroid/decorators.py b/src/main/python/venv/Lib/site-packages/astroid/decorators.py
new file mode 100644
index 0000000..1448757
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/decorators.py
@@ -0,0 +1,141 @@
+# Copyright (c) 2015-2016, 2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 Ashley Whetter
+# Copyright (c) 2018 HoverHell
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+""" A few useful function/method decorators."""
+
+import functools
+
+import wrapt
+
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import util
+
+
+@wrapt.decorator
+def cached(func, instance, args, kwargs):
+ """Simple decorator to cache result of method calls without args."""
+ cache = getattr(instance, "__cache", None)
+ if cache is None:
+ instance.__cache = cache = {}
+ try:
+ return cache[func]
+ except KeyError:
+ cache[func] = result = func(*args, **kwargs)
+ return result
+
+
+class cachedproperty:
+ """ Provides a cached property equivalent to the stacking of
+ @cached and @property, but more efficient.
+
+ After first usage, the becomes part of the object's
+ __dict__. Doing:
+
+ del obj. empties the cache.
+
+ Idea taken from the pyramid_ framework and the mercurial_ project.
+
+ .. _pyramid: http://pypi.python.org/pypi/pyramid
+ .. _mercurial: http://pypi.python.org/pypi/Mercurial
+ """
+
+ __slots__ = ("wrapped",)
+
+ def __init__(self, wrapped):
+ try:
+ wrapped.__name__
+ except AttributeError as exc:
+ raise TypeError("%s must have a __name__ attribute" % wrapped) from exc
+ self.wrapped = wrapped
+
+ @property
+ def __doc__(self):
+ doc = getattr(self.wrapped, "__doc__", None)
+ return "%s" % (
+ "\n%s" % doc if doc else ""
+ )
+
+ def __get__(self, inst, objtype=None):
+ if inst is None:
+ return self
+ val = self.wrapped(inst)
+ setattr(inst, self.wrapped.__name__, val)
+ return val
+
+
+def path_wrapper(func):
+ """return the given infer function wrapped to handle the path
+
+ Used to stop inference if the node has already been looked
+ at for a given `InferenceContext` to prevent infinite recursion
+ """
+
+ @functools.wraps(func)
+ def wrapped(node, context=None, _func=func, **kwargs):
+ """wrapper function handling context"""
+ if context is None:
+ context = contextmod.InferenceContext()
+ if context.push(node):
+ return None
+
+ yielded = set()
+ generator = _func(node, context, **kwargs)
+ try:
+ while True:
+ res = next(generator)
+ # unproxy only true instance, not const, tuple, dict...
+ if res.__class__.__name__ == "Instance":
+ ares = res._proxied
+ else:
+ ares = res
+ if ares not in yielded:
+ yield res
+ yielded.add(ares)
+ except StopIteration as error:
+ if error.args:
+ return error.args[0]
+ return None
+
+ return wrapped
+
+
+@wrapt.decorator
+def yes_if_nothing_inferred(func, instance, args, kwargs):
+ generator = func(*args, **kwargs)
+
+ try:
+ yield next(generator)
+ except StopIteration:
+ # generator is empty
+ yield util.Uninferable
+ return
+
+ yield from generator
+
+
+@wrapt.decorator
+def raise_if_nothing_inferred(func, instance, args, kwargs):
+ generator = func(*args, **kwargs)
+
+ try:
+ yield next(generator)
+ except StopIteration as error:
+ # generator is empty
+ if error.args:
+ # pylint: disable=not-a-mapping
+ raise exceptions.InferenceError(**error.args[0])
+ raise exceptions.InferenceError(
+ "StopIteration raised without any error information."
+ )
+
+ yield from generator
diff --git a/src/main/python/venv/Lib/site-packages/astroid/exceptions.py b/src/main/python/venv/Lib/site-packages/astroid/exceptions.py
new file mode 100644
index 0000000..7e9d655
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/exceptions.py
@@ -0,0 +1,230 @@
+# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2015-2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""this module contains exceptions used in the astroid library
+"""
+from astroid import util
+
+
+class AstroidError(Exception):
+ """base exception class for all astroid related exceptions
+
+ AstroidError and its subclasses are structured, intended to hold
+ objects representing state when the exception is thrown. Field
+ values are passed to the constructor as keyword-only arguments.
+ Each subclass has its own set of standard fields, but use your
+ best judgment to decide whether a specific exception instance
+ needs more or fewer fields for debugging. Field values may be
+ used to lazily generate the error message: self.message.format()
+ will be called with the field names and values supplied as keyword
+ arguments.
+ """
+
+ def __init__(self, message="", **kws):
+ super(AstroidError, self).__init__(message)
+ self.message = message
+ for key, value in kws.items():
+ setattr(self, key, value)
+
+ def __str__(self):
+ return self.message.format(**vars(self))
+
+
+class AstroidBuildingError(AstroidError):
+ """exception class when we are unable to build an astroid representation
+
+ Standard attributes:
+ modname: Name of the module that AST construction failed for.
+ error: Exception raised during construction.
+ """
+
+ def __init__(self, message="Failed to import module {modname}.", **kws):
+ super(AstroidBuildingError, self).__init__(message, **kws)
+
+
+class AstroidImportError(AstroidBuildingError):
+ """Exception class used when a module can't be imported by astroid."""
+
+
+class TooManyLevelsError(AstroidImportError):
+ """Exception class which is raised when a relative import was beyond the top-level.
+
+ Standard attributes:
+ level: The level which was attempted.
+ name: the name of the module on which the relative import was attempted.
+ """
+
+ level = None
+ name = None
+
+ def __init__(
+ self,
+ message="Relative import with too many levels " "({level}) for module {name!r}",
+ **kws
+ ):
+ super(TooManyLevelsError, self).__init__(message, **kws)
+
+
+class AstroidSyntaxError(AstroidBuildingError):
+ """Exception class used when a module can't be parsed."""
+
+
+class NoDefault(AstroidError):
+ """raised by function's `default_value` method when an argument has
+ no default value
+
+ Standard attributes:
+ func: Function node.
+ name: Name of argument without a default.
+ """
+
+ func = None
+ name = None
+
+ def __init__(self, message="{func!r} has no default for {name!r}.", **kws):
+ super(NoDefault, self).__init__(message, **kws)
+
+
+class ResolveError(AstroidError):
+ """Base class of astroid resolution/inference error.
+
+ ResolveError is not intended to be raised.
+
+ Standard attributes:
+ context: InferenceContext object.
+ """
+
+ context = None
+
+
+class MroError(ResolveError):
+ """Error raised when there is a problem with method resolution of a class.
+
+ Standard attributes:
+ mros: A sequence of sequences containing ClassDef nodes.
+ cls: ClassDef node whose MRO resolution failed.
+ context: InferenceContext object.
+ """
+
+ mros = ()
+ cls = None
+
+ def __str__(self):
+ mro_names = ", ".join(
+ "({})".format(", ".join(b.name for b in m)) for m in self.mros
+ )
+ return self.message.format(mros=mro_names, cls=self.cls)
+
+
+class DuplicateBasesError(MroError):
+ """Error raised when there are duplicate bases in the same class bases."""
+
+
+class InconsistentMroError(MroError):
+ """Error raised when a class's MRO is inconsistent."""
+
+
+class SuperError(ResolveError):
+ """Error raised when there is a problem with a *super* call.
+
+ Standard attributes:
+ *super_*: The Super instance that raised the exception.
+ context: InferenceContext object.
+ """
+
+ super_ = None
+
+ def __str__(self):
+ return self.message.format(**vars(self.super_))
+
+
+class InferenceError(ResolveError):
+ """raised when we are unable to infer a node
+
+ Standard attributes:
+ node: The node inference was called on.
+ context: InferenceContext object.
+ """
+
+ node = None
+ context = None
+
+ def __init__(self, message="Inference failed for {node!r}.", **kws):
+ super(InferenceError, self).__init__(message, **kws)
+
+
+# Why does this inherit from InferenceError rather than ResolveError?
+# Changing it causes some inference tests to fail.
+class NameInferenceError(InferenceError):
+ """Raised when a name lookup fails, corresponds to NameError.
+
+ Standard attributes:
+ name: The name for which lookup failed, as a string.
+ scope: The node representing the scope in which the lookup occurred.
+ context: InferenceContext object.
+ """
+
+ name = None
+ scope = None
+
+ def __init__(self, message="{name!r} not found in {scope!r}.", **kws):
+ super(NameInferenceError, self).__init__(message, **kws)
+
+
+class AttributeInferenceError(ResolveError):
+ """Raised when an attribute lookup fails, corresponds to AttributeError.
+
+ Standard attributes:
+ target: The node for which lookup failed.
+ attribute: The attribute for which lookup failed, as a string.
+ context: InferenceContext object.
+ """
+
+ target = None
+ attribute = None
+
+ def __init__(self, message="{attribute!r} not found on {target!r}.", **kws):
+ super(AttributeInferenceError, self).__init__(message, **kws)
+
+
+class UseInferenceDefault(Exception):
+ """exception to be raised in custom inference function to indicate that it
+ should go back to the default behaviour
+ """
+
+
+class _NonDeducibleTypeHierarchy(Exception):
+ """Raised when is_subtype / is_supertype can't deduce the relation between two types."""
+
+
+class AstroidIndexError(AstroidError):
+ """Raised when an Indexable / Mapping does not have an index / key."""
+
+
+class AstroidTypeError(AstroidError):
+ """Raised when a TypeError would be expected in Python code."""
+
+
+class InferenceOverwriteError(AstroidError):
+ """Raised when an inference tip is overwritten
+
+ Currently only used for debugging.
+ """
+
+
+# Backwards-compatibility aliases
+OperationError = util.BadOperationMessage
+UnaryOperationError = util.BadUnaryOperationMessage
+BinaryOperationError = util.BadBinaryOperationMessage
+
+SuperArgumentTypeError = SuperError
+UnresolvableName = NameInferenceError
+NotFoundError = AttributeInferenceError
+AstroidBuildingException = AstroidBuildingError
diff --git a/src/main/python/venv/Lib/site-packages/astroid/helpers.py b/src/main/python/venv/Lib/site-packages/astroid/helpers.py
new file mode 100644
index 0000000..be133b3
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/helpers.py
@@ -0,0 +1,273 @@
+# Copyright (c) 2015-2018 Claudiu Popa
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2018 Bryce Guinta
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+
+"""
+Various helper utilities.
+"""
+
+import builtins as builtins_mod
+
+from astroid import bases
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import manager
+from astroid import nodes
+from astroid import raw_building
+from astroid import scoped_nodes
+from astroid import util
+
+
+BUILTINS = builtins_mod.__name__
+
+
+def _build_proxy_class(cls_name, builtins):
+ proxy = raw_building.build_class(cls_name)
+ proxy.parent = builtins
+ return proxy
+
+
+def _function_type(function, builtins):
+ if isinstance(function, scoped_nodes.Lambda):
+ if function.root().name == BUILTINS:
+ cls_name = "builtin_function_or_method"
+ else:
+ cls_name = "function"
+ elif isinstance(function, bases.BoundMethod):
+ cls_name = "method"
+ elif isinstance(function, bases.UnboundMethod):
+ cls_name = "function"
+ return _build_proxy_class(cls_name, builtins)
+
+
+def _object_type(node, context=None):
+ astroid_manager = manager.AstroidManager()
+ builtins = astroid_manager.builtins_module
+ context = context or contextmod.InferenceContext()
+
+ for inferred in node.infer(context=context):
+ if isinstance(inferred, scoped_nodes.ClassDef):
+ if inferred.newstyle:
+ metaclass = inferred.metaclass(context=context)
+ if metaclass:
+ yield metaclass
+ continue
+ yield builtins.getattr("type")[0]
+ elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)):
+ yield _function_type(inferred, builtins)
+ elif isinstance(inferred, scoped_nodes.Module):
+ yield _build_proxy_class("module", builtins)
+ else:
+ yield inferred._proxied
+
+
+def object_type(node, context=None):
+ """Obtain the type of the given node
+
+ This is used to implement the ``type`` builtin, which means that it's
+ used for inferring type calls, as well as used in a couple of other places
+ in the inference.
+ The node will be inferred first, so this function can support all
+ sorts of objects, as long as they support inference.
+ """
+
+ try:
+ types = set(_object_type(node, context))
+ except exceptions.InferenceError:
+ return util.Uninferable
+ if len(types) > 1 or not types:
+ return util.Uninferable
+ return list(types)[0]
+
+
+def _object_type_is_subclass(obj_type, class_or_seq, context=None):
+ if not isinstance(class_or_seq, (tuple, list)):
+ class_seq = (class_or_seq,)
+ else:
+ class_seq = class_or_seq
+
+ if obj_type is util.Uninferable:
+ return util.Uninferable
+
+ # Instances are not types
+ class_seq = [
+ item if not isinstance(item, bases.Instance) else util.Uninferable
+ for item in class_seq
+ ]
+ # strict compatibility with issubclass
+ # issubclass(type, (object, 1)) evaluates to true
+ # issubclass(object, (1, type)) raises TypeError
+ for klass in class_seq:
+ if klass is util.Uninferable:
+ raise exceptions.AstroidTypeError("arg 2 must be a type or tuple of types")
+
+ for obj_subclass in obj_type.mro():
+ if obj_subclass == klass:
+ return True
+ return False
+
+
+def object_isinstance(node, class_or_seq, context=None):
+ """Check if a node 'isinstance' any node in class_or_seq
+
+ :param node: A given node
+ :param class_or_seq: Union[nodes.NodeNG, Sequence[nodes.NodeNG]]
+ :rtype: bool
+
+ :raises AstroidTypeError: if the given ``classes_or_seq`` are not types
+ """
+ obj_type = object_type(node, context)
+ if obj_type is util.Uninferable:
+ return util.Uninferable
+ return _object_type_is_subclass(obj_type, class_or_seq, context=context)
+
+
+def object_issubclass(node, class_or_seq, context=None):
+ """Check if a type is a subclass of any node in class_or_seq
+
+ :param node: A given node
+ :param class_or_seq: Union[Nodes.NodeNG, Sequence[nodes.NodeNG]]
+ :rtype: bool
+
+ :raises AstroidTypeError: if the given ``classes_or_seq`` are not types
+ :raises AstroidError: if the type of the given node cannot be inferred
+ or its type's mro doesn't work
+ """
+ if not isinstance(node, nodes.ClassDef):
+ raise TypeError("{node} needs to be a ClassDef node".format(node=node))
+ return _object_type_is_subclass(node, class_or_seq, context=context)
+
+
+def safe_infer(node, context=None):
+ """Return the inferred value for the given node.
+
+ Return None if inference failed or if there is some ambiguity (more than
+ one node has been inferred).
+ """
+ try:
+ inferit = node.infer(context=context)
+ value = next(inferit)
+ except exceptions.InferenceError:
+ return None
+ try:
+ next(inferit)
+ return None # None if there is ambiguity on the inferred node
+ except exceptions.InferenceError:
+ return None # there is some kind of ambiguity
+ except StopIteration:
+ return value
+
+
+def has_known_bases(klass, context=None):
+ """Return true if all base classes of a class could be inferred."""
+ try:
+ return klass._all_bases_known
+ except AttributeError:
+ pass
+ for base in klass.bases:
+ result = safe_infer(base, context=context)
+ # TODO: check for A->B->A->B pattern in class structure too?
+ if (
+ not isinstance(result, scoped_nodes.ClassDef)
+ or result is klass
+ or not has_known_bases(result, context=context)
+ ):
+ klass._all_bases_known = False
+ return False
+ klass._all_bases_known = True
+ return True
+
+
+def _type_check(type1, type2):
+ if not all(map(has_known_bases, (type1, type2))):
+ raise exceptions._NonDeducibleTypeHierarchy
+
+ if not all([type1.newstyle, type2.newstyle]):
+ return False
+ try:
+ return type1 in type2.mro()[:-1]
+ except exceptions.MroError:
+ # The MRO is invalid.
+ raise exceptions._NonDeducibleTypeHierarchy
+
+
+def is_subtype(type1, type2):
+ """Check if *type1* is a subtype of *type2*."""
+ return _type_check(type1=type2, type2=type1)
+
+
+def is_supertype(type1, type2):
+ """Check if *type2* is a supertype of *type1*."""
+ return _type_check(type1, type2)
+
+
+def class_instance_as_index(node):
+ """Get the value as an index for the given instance.
+
+ If an instance provides an __index__ method, then it can
+ be used in some scenarios where an integer is expected,
+ for instance when multiplying or subscripting a list.
+ """
+ context = contextmod.InferenceContext()
+ context.callcontext = contextmod.CallContext(args=[node])
+
+ try:
+ for inferred in node.igetattr("__index__", context=context):
+ if not isinstance(inferred, bases.BoundMethod):
+ continue
+
+ for result in inferred.infer_call_result(node, context=context):
+ if isinstance(result, nodes.Const) and isinstance(result.value, int):
+ return result
+ except exceptions.InferenceError:
+ pass
+ return None
+
+
+def object_len(node, context=None):
+ """Infer length of given node object
+
+ :param Union[nodes.ClassDef, nodes.Instance] node:
+ :param node: Node to infer length of
+
+ :raises AstroidTypeError: If an invalid node is returned
+ from __len__ method or no __len__ method exists
+ :raises InferenceError: If the given node cannot be inferred
+ or if multiple nodes are inferred
+ :rtype int: Integer length of node
+ """
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid.objects import FrozenSet
+
+ inferred_node = safe_infer(node, context=context)
+ if inferred_node is None or inferred_node is util.Uninferable:
+ raise exceptions.InferenceError(node=node)
+ if isinstance(inferred_node, nodes.Const) and isinstance(
+ inferred_node.value, (bytes, str)
+ ):
+ return len(inferred_node.value)
+ if isinstance(inferred_node, (nodes.List, nodes.Set, nodes.Tuple, FrozenSet)):
+ return len(inferred_node.elts)
+ if isinstance(inferred_node, nodes.Dict):
+ return len(inferred_node.items)
+ try:
+ node_type = object_type(inferred_node, context=context)
+ len_call = next(node_type.igetattr("__len__", context=context))
+ except exceptions.AttributeInferenceError:
+ raise exceptions.AstroidTypeError(
+ "object of type '{}' has no len()".format(len_call.pytype())
+ )
+
+ result_of_len = next(len_call.infer_call_result(node, context))
+ if (
+ isinstance(result_of_len, nodes.Const)
+ and result_of_len.pytype() == "builtins.int"
+ ):
+ return result_of_len.value
+ raise exceptions.AstroidTypeError(
+ "'{}' object cannot be interpreted as an integer".format(result_of_len)
+ )
diff --git a/src/main/python/venv/Lib/site-packages/astroid/inference.py b/src/main/python/venv/Lib/site-packages/astroid/inference.py
new file mode 100644
index 0000000..77c6b1d
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/inference.py
@@ -0,0 +1,943 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2012 FELD Boris
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Dmitry Pribysh
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2017 Michał Masłowski
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 Ashley Whetter
+# Copyright (c) 2018 HoverHell
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""this module contains a set of functions to handle inference on astroid trees
+"""
+
+import functools
+import itertools
+import operator
+
+from astroid import bases
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import decorators
+from astroid import helpers
+from astroid import manager
+from astroid import nodes
+from astroid.interpreter import dunder_lookup
+from astroid import protocols
+from astroid import util
+
+
+MANAGER = manager.AstroidManager()
+
+
+# .infer method ###############################################################
+
+
+def infer_end(self, context=None):
+ """inference's end for node such as Module, ClassDef, FunctionDef,
+ Const...
+
+ """
+ yield self
+
+
+nodes.Module._infer = infer_end
+nodes.ClassDef._infer = infer_end
+nodes.FunctionDef._infer = infer_end
+nodes.Lambda._infer = infer_end
+nodes.Const._infer = infer_end
+nodes.Slice._infer = infer_end
+
+
+def _infer_sequence_helper(node, context=None):
+ """Infer all values based on _BaseContainer.elts"""
+ values = []
+
+ for elt in node.elts:
+ if isinstance(elt, nodes.Starred):
+ starred = helpers.safe_infer(elt.value, context)
+ if not starred:
+ raise exceptions.InferenceError(node=node, context=context)
+ if not hasattr(starred, "elts"):
+ raise exceptions.InferenceError(node=node, context=context)
+ values.extend(_infer_sequence_helper(starred))
+ elif isinstance(elt, nodes.NamedExpr):
+ value = helpers.safe_infer(elt.value, context)
+ if not value:
+ raise exceptions.InferenceError(node=node, context=context)
+ values.append(value)
+ else:
+ values.append(elt)
+ return values
+
+
+@decorators.raise_if_nothing_inferred
+def infer_sequence(self, context=None):
+ has_starred_named_expr = any(
+ isinstance(e, (nodes.Starred, nodes.NamedExpr)) for e in self.elts
+ )
+ if has_starred_named_expr:
+ values = _infer_sequence_helper(self, context)
+ new_seq = type(self)(
+ lineno=self.lineno, col_offset=self.col_offset, parent=self.parent
+ )
+ new_seq.postinit(values)
+
+ yield new_seq
+ else:
+ yield self
+
+
+nodes.List._infer = infer_sequence
+nodes.Tuple._infer = infer_sequence
+nodes.Set._infer = infer_sequence
+
+
+def infer_map(self, context=None):
+ if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items):
+ yield self
+ else:
+ items = _infer_map(self, context)
+ new_seq = type(self)(self.lineno, self.col_offset, self.parent)
+ new_seq.postinit(list(items.items()))
+ yield new_seq
+
+
+def _update_with_replacement(lhs_dict, rhs_dict):
+ """Delete nodes that equate to duplicate keys
+
+ Since an astroid node doesn't 'equal' another node with the same value,
+ this function uses the as_string method to make sure duplicate keys
+ don't get through
+
+ Note that both the key and the value are astroid nodes
+
+ Fixes issue with DictUnpack causing duplicte keys
+ in inferred Dict items
+
+ :param dict(nodes.NodeNG, nodes.NodeNG) lhs_dict: Dictionary to 'merge' nodes into
+ :param dict(nodes.NodeNG, nodes.NodeNG) rhs_dict: Dictionary with nodes to pull from
+ :return dict(nodes.NodeNG, nodes.NodeNG): merged dictionary of nodes
+ """
+ combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items())
+ # Overwrite keys which have the same string values
+ string_map = {key.as_string(): (key, value) for key, value in combined_dict}
+ # Return to dictionary
+ return dict(string_map.values())
+
+
+def _infer_map(node, context):
+ """Infer all values based on Dict.items"""
+ values = {}
+ for name, value in node.items:
+ if isinstance(name, nodes.DictUnpack):
+ double_starred = helpers.safe_infer(value, context)
+ if not double_starred:
+ raise exceptions.InferenceError
+ if not isinstance(double_starred, nodes.Dict):
+ raise exceptions.InferenceError(node=node, context=context)
+ unpack_items = _infer_map(double_starred, context)
+ values = _update_with_replacement(values, unpack_items)
+ else:
+ key = helpers.safe_infer(name, context=context)
+ value = helpers.safe_infer(value, context=context)
+ if any(not elem for elem in (key, value)):
+ raise exceptions.InferenceError(node=node, context=context)
+ values = _update_with_replacement(values, {key: value})
+ return values
+
+
+nodes.Dict._infer = infer_map
+
+
+def _higher_function_scope(node):
+ """ Search for the first function which encloses the given
+ scope. This can be used for looking up in that function's
+ scope, in case looking up in a lower scope for a particular
+ name fails.
+
+ :param node: A scope node.
+ :returns:
+ ``None``, if no parent function scope was found,
+ otherwise an instance of :class:`astroid.scoped_nodes.Function`,
+ which encloses the given node.
+ """
+ current = node
+ while current.parent and not isinstance(current.parent, nodes.FunctionDef):
+ current = current.parent
+ if current and current.parent:
+ return current.parent
+ return None
+
+
+def infer_name(self, context=None):
+ """infer a Name: use name lookup rules"""
+ frame, stmts = self.lookup(self.name)
+ if not stmts:
+ # Try to see if the name is enclosed in a nested function
+ # and use the higher (first function) scope for searching.
+ parent_function = _higher_function_scope(self.scope())
+ if parent_function:
+ _, stmts = parent_function.lookup(self.name)
+
+ if not stmts:
+ raise exceptions.NameInferenceError(
+ name=self.name, scope=self.scope(), context=context
+ )
+ context = contextmod.copy_context(context)
+ context.lookupname = self.name
+ return bases._infer_stmts(stmts, context, frame)
+
+
+# pylint: disable=no-value-for-parameter
+nodes.Name._infer = decorators.raise_if_nothing_inferred(
+ decorators.path_wrapper(infer_name)
+)
+nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_call(self, context=None):
+ """infer a Call node by trying to guess what the function returns"""
+ callcontext = contextmod.copy_context(context)
+ callcontext.callcontext = contextmod.CallContext(
+ args=self.args, keywords=self.keywords
+ )
+ callcontext.boundnode = None
+ if context is not None:
+ callcontext.extra_context = _populate_context_lookup(self, context.clone())
+
+ for callee in self.func.infer(context):
+ if callee is util.Uninferable:
+ yield callee
+ continue
+ try:
+ if hasattr(callee, "infer_call_result"):
+ yield from callee.infer_call_result(caller=self, context=callcontext)
+ except exceptions.InferenceError:
+ continue
+ return dict(node=self, context=context)
+
+
+nodes.Call._infer = infer_call
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_import(self, context=None, asname=True):
+ """infer an Import node: return the imported module/object"""
+ name = context.lookupname
+ if name is None:
+ raise exceptions.InferenceError(node=self, context=context)
+
+ try:
+ if asname:
+ yield self.do_import_module(self.real_name(name))
+ else:
+ yield self.do_import_module(name)
+ except exceptions.AstroidBuildingError as exc:
+ raise exceptions.InferenceError(node=self, context=context) from exc
+
+
+nodes.Import._infer = infer_import
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_import_from(self, context=None, asname=True):
+ """infer a ImportFrom node: return the imported module/object"""
+ name = context.lookupname
+ if name is None:
+ raise exceptions.InferenceError(node=self, context=context)
+ if asname:
+ name = self.real_name(name)
+
+ try:
+ module = self.do_import_module()
+ except exceptions.AstroidBuildingError as exc:
+ raise exceptions.InferenceError(node=self, context=context) from exc
+
+ try:
+ context = contextmod.copy_context(context)
+ context.lookupname = name
+ stmts = module.getattr(name, ignore_locals=module is self.root())
+ return bases._infer_stmts(stmts, context)
+ except exceptions.AttributeInferenceError as error:
+ raise exceptions.InferenceError(
+ error.message, target=self, attribute=name, context=context
+ ) from error
+
+
+nodes.ImportFrom._infer = infer_import_from
+
+
+def infer_attribute(self, context=None):
+ """infer an Attribute node by using getattr on the associated object"""
+ for owner in self.expr.infer(context):
+ if owner is util.Uninferable:
+ yield owner
+ continue
+
+ if context and context.boundnode:
+ # This handles the situation where the attribute is accessed through a subclass
+ # of a base class and the attribute is defined at the base class's level,
+ # by taking in consideration a redefinition in the subclass.
+ if isinstance(owner, bases.Instance) and isinstance(
+ context.boundnode, bases.Instance
+ ):
+ try:
+ if helpers.is_subtype(
+ helpers.object_type(context.boundnode),
+ helpers.object_type(owner),
+ ):
+ owner = context.boundnode
+ except exceptions._NonDeducibleTypeHierarchy:
+ # Can't determine anything useful.
+ pass
+
+ try:
+ context.boundnode = owner
+ yield from owner.igetattr(self.attrname, context)
+ context.boundnode = None
+ except (exceptions.AttributeInferenceError, exceptions.InferenceError):
+ context.boundnode = None
+ except AttributeError:
+ # XXX method / function
+ context.boundnode = None
+ return dict(node=self, context=context)
+
+
+nodes.Attribute._infer = decorators.raise_if_nothing_inferred(
+ decorators.path_wrapper(infer_attribute)
+)
+# won't work with a path wrapper
+nodes.AssignAttr.infer_lhs = decorators.raise_if_nothing_inferred(infer_attribute)
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_global(self, context=None):
+ if context.lookupname is None:
+ raise exceptions.InferenceError(node=self, context=context)
+ try:
+ return bases._infer_stmts(self.root().getattr(context.lookupname), context)
+ except exceptions.AttributeInferenceError as error:
+ raise exceptions.InferenceError(
+ error.message, target=self, attribute=context.lookupname, context=context
+ ) from error
+
+
+nodes.Global._infer = infer_global
+
+
+_SUBSCRIPT_SENTINEL = object()
+
+
+@decorators.raise_if_nothing_inferred
+def infer_subscript(self, context=None):
+ """Inference for subscripts
+
+ We're understanding if the index is a Const
+ or a slice, passing the result of inference
+ to the value's `getitem` method, which should
+ handle each supported index type accordingly.
+ """
+
+ found_one = False
+ for value in self.value.infer(context):
+ if value is util.Uninferable:
+ yield util.Uninferable
+ return None
+ for index in self.slice.infer(context):
+ if index is util.Uninferable:
+ yield util.Uninferable
+ return None
+
+ # Try to deduce the index value.
+ index_value = _SUBSCRIPT_SENTINEL
+ if value.__class__ == bases.Instance:
+ index_value = index
+ else:
+ if index.__class__ == bases.Instance:
+ instance_as_index = helpers.class_instance_as_index(index)
+ if instance_as_index:
+ index_value = instance_as_index
+ else:
+ index_value = index
+ if index_value is _SUBSCRIPT_SENTINEL:
+ raise exceptions.InferenceError(node=self, context=context)
+
+ try:
+ assigned = value.getitem(index_value, context)
+ except (
+ exceptions.AstroidTypeError,
+ exceptions.AstroidIndexError,
+ exceptions.AttributeInferenceError,
+ AttributeError,
+ ) as exc:
+ raise exceptions.InferenceError(node=self, context=context) from exc
+
+ # Prevent inferring if the inferred subscript
+ # is the same as the original subscripted object.
+ if self is assigned or assigned is util.Uninferable:
+ yield util.Uninferable
+ return None
+ yield from assigned.infer(context)
+ found_one = True
+
+ if found_one:
+ return dict(node=self, context=context)
+ return None
+
+
+nodes.Subscript._infer = decorators.path_wrapper(infer_subscript)
+nodes.Subscript.infer_lhs = infer_subscript
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def _infer_boolop(self, context=None):
+ """Infer a boolean operation (and / or / not).
+
+ The function will calculate the boolean operation
+ for all pairs generated through inference for each component
+ node.
+ """
+ values = self.values
+ if self.op == "or":
+ predicate = operator.truth
+ else:
+ predicate = operator.not_
+
+ try:
+ values = [value.infer(context=context) for value in values]
+ except exceptions.InferenceError:
+ yield util.Uninferable
+ return None
+
+ for pair in itertools.product(*values):
+ if any(item is util.Uninferable for item in pair):
+ # Can't infer the final result, just yield Uninferable.
+ yield util.Uninferable
+ continue
+
+ bool_values = [item.bool_value() for item in pair]
+ if any(item is util.Uninferable for item in bool_values):
+ # Can't infer the final result, just yield Uninferable.
+ yield util.Uninferable
+ continue
+
+ # Since the boolean operations are short circuited operations,
+ # this code yields the first value for which the predicate is True
+ # and if no value respected the predicate, then the last value will
+ # be returned (or Uninferable if there was no last value).
+ # This is conforming to the semantics of `and` and `or`:
+ # 1 and 0 -> 1
+ # 0 and 1 -> 0
+ # 1 or 0 -> 1
+ # 0 or 1 -> 1
+ value = util.Uninferable
+ for value, bool_value in zip(pair, bool_values):
+ if predicate(bool_value):
+ yield value
+ break
+ else:
+ yield value
+
+ return dict(node=self, context=context)
+
+
+nodes.BoolOp._infer = _infer_boolop
+
+
+# UnaryOp, BinOp and AugAssign inferences
+
+
+def _filter_operation_errors(self, infer_callable, context, error):
+ for result in infer_callable(self, context):
+ if isinstance(result, error):
+ # For the sake of .infer(), we don't care about operation
+ # errors, which is the job of pylint. So return something
+ # which shows that we can't infer the result.
+ yield util.Uninferable
+ else:
+ yield result
+
+
+def _infer_unaryop(self, context=None):
+ """Infer what an UnaryOp should return when evaluated."""
+ for operand in self.operand.infer(context):
+ try:
+ yield operand.infer_unary_op(self.op)
+ except TypeError as exc:
+ # The operand doesn't support this operation.
+ yield util.BadUnaryOperationMessage(operand, self.op, exc)
+ except AttributeError as exc:
+ meth = protocols.UNARY_OP_METHOD[self.op]
+ if meth is None:
+ # `not node`. Determine node's boolean
+ # value and negate its result, unless it is
+ # Uninferable, which will be returned as is.
+ bool_value = operand.bool_value()
+ if bool_value is not util.Uninferable:
+ yield nodes.const_factory(not bool_value)
+ else:
+ yield util.Uninferable
+ else:
+ if not isinstance(operand, (bases.Instance, nodes.ClassDef)):
+ # The operation was used on something which
+ # doesn't support it.
+ yield util.BadUnaryOperationMessage(operand, self.op, exc)
+ continue
+
+ try:
+ try:
+ methods = dunder_lookup.lookup(operand, meth)
+ except exceptions.AttributeInferenceError:
+ yield util.BadUnaryOperationMessage(operand, self.op, exc)
+ continue
+
+ meth = methods[0]
+ inferred = next(meth.infer(context=context))
+ if inferred is util.Uninferable or not inferred.callable():
+ continue
+
+ context = contextmod.copy_context(context)
+ context.callcontext = contextmod.CallContext(args=[operand])
+ call_results = inferred.infer_call_result(self, context=context)
+ result = next(call_results, None)
+ if result is None:
+ # Failed to infer, return the same type.
+ yield operand
+ else:
+ yield result
+ except exceptions.AttributeInferenceError as exc:
+ # The unary operation special method was not found.
+ yield util.BadUnaryOperationMessage(operand, self.op, exc)
+ except exceptions.InferenceError:
+ yield util.Uninferable
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_unaryop(self, context=None):
+ """Infer what an UnaryOp should return when evaluated."""
+ yield from _filter_operation_errors(
+ self, _infer_unaryop, context, util.BadUnaryOperationMessage
+ )
+ return dict(node=self, context=context)
+
+
+nodes.UnaryOp._infer_unaryop = _infer_unaryop
+nodes.UnaryOp._infer = infer_unaryop
+
+
+def _is_not_implemented(const):
+ """Check if the given const node is NotImplemented."""
+ return isinstance(const, nodes.Const) and const.value is NotImplemented
+
+
+def _invoke_binop_inference(instance, opnode, op, other, context, method_name):
+ """Invoke binary operation inference on the given instance."""
+ methods = dunder_lookup.lookup(instance, method_name)
+ context = contextmod.bind_context_to_node(context, instance)
+ method = methods[0]
+ inferred = next(method.infer(context=context))
+ if inferred is util.Uninferable:
+ raise exceptions.InferenceError
+ return instance.infer_binary_op(opnode, op, other, context, inferred)
+
+
+def _aug_op(instance, opnode, op, other, context, reverse=False):
+ """Get an inference callable for an augmented binary operation."""
+ method_name = protocols.AUGMENTED_OP_METHOD[op]
+ return functools.partial(
+ _invoke_binop_inference,
+ instance=instance,
+ op=op,
+ opnode=opnode,
+ other=other,
+ context=context,
+ method_name=method_name,
+ )
+
+
+def _bin_op(instance, opnode, op, other, context, reverse=False):
+ """Get an inference callable for a normal binary operation.
+
+ If *reverse* is True, then the reflected method will be used instead.
+ """
+ if reverse:
+ method_name = protocols.REFLECTED_BIN_OP_METHOD[op]
+ else:
+ method_name = protocols.BIN_OP_METHOD[op]
+ return functools.partial(
+ _invoke_binop_inference,
+ instance=instance,
+ op=op,
+ opnode=opnode,
+ other=other,
+ context=context,
+ method_name=method_name,
+ )
+
+
+def _get_binop_contexts(context, left, right):
+ """Get contexts for binary operations.
+
+ This will return two inference contexts, the first one
+ for x.__op__(y), the other one for y.__rop__(x), where
+ only the arguments are inversed.
+ """
+ # The order is important, since the first one should be
+ # left.__op__(right).
+ for arg in (right, left):
+ new_context = context.clone()
+ new_context.callcontext = contextmod.CallContext(args=[arg])
+ new_context.boundnode = None
+ yield new_context
+
+
+def _same_type(type1, type2):
+ """Check if type1 is the same as type2."""
+ return type1.qname() == type2.qname()
+
+
+def _get_binop_flow(
+ left, left_type, binary_opnode, right, right_type, context, reverse_context
+):
+ """Get the flow for binary operations.
+
+ The rules are a bit messy:
+
+ * if left and right have the same type, then only one
+ method will be called, left.__op__(right)
+ * if left and right are unrelated typewise, then first
+ left.__op__(right) is tried and if this does not exist
+ or returns NotImplemented, then right.__rop__(left) is tried.
+ * if left is a subtype of right, then only left.__op__(right)
+ is tried.
+ * if left is a supertype of right, then right.__rop__(left)
+ is first tried and then left.__op__(right)
+ """
+ op = binary_opnode.op
+ if _same_type(left_type, right_type):
+ methods = [_bin_op(left, binary_opnode, op, right, context)]
+ elif helpers.is_subtype(left_type, right_type):
+ methods = [_bin_op(left, binary_opnode, op, right, context)]
+ elif helpers.is_supertype(left_type, right_type):
+ methods = [
+ _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
+ _bin_op(left, binary_opnode, op, right, context),
+ ]
+ else:
+ methods = [
+ _bin_op(left, binary_opnode, op, right, context),
+ _bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
+ ]
+ return methods
+
+
+def _get_aug_flow(
+ left, left_type, aug_opnode, right, right_type, context, reverse_context
+):
+ """Get the flow for augmented binary operations.
+
+ The rules are a bit messy:
+
+ * if left and right have the same type, then left.__augop__(right)
+ is first tried and then left.__op__(right).
+ * if left and right are unrelated typewise, then
+ left.__augop__(right) is tried, then left.__op__(right)
+ is tried and then right.__rop__(left) is tried.
+ * if left is a subtype of right, then left.__augop__(right)
+ is tried and then left.__op__(right).
+ * if left is a supertype of right, then left.__augop__(right)
+ is tried, then right.__rop__(left) and then
+ left.__op__(right)
+ """
+ bin_op = aug_opnode.op.strip("=")
+ aug_op = aug_opnode.op
+ if _same_type(left_type, right_type):
+ methods = [
+ _aug_op(left, aug_opnode, aug_op, right, context),
+ _bin_op(left, aug_opnode, bin_op, right, context),
+ ]
+ elif helpers.is_subtype(left_type, right_type):
+ methods = [
+ _aug_op(left, aug_opnode, aug_op, right, context),
+ _bin_op(left, aug_opnode, bin_op, right, context),
+ ]
+ elif helpers.is_supertype(left_type, right_type):
+ methods = [
+ _aug_op(left, aug_opnode, aug_op, right, context),
+ _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
+ _bin_op(left, aug_opnode, bin_op, right, context),
+ ]
+ else:
+ methods = [
+ _aug_op(left, aug_opnode, aug_op, right, context),
+ _bin_op(left, aug_opnode, bin_op, right, context),
+ _bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
+ ]
+ return methods
+
+
+def _infer_binary_operation(left, right, binary_opnode, context, flow_factory):
+ """Infer a binary operation between a left operand and a right operand
+
+ This is used by both normal binary operations and augmented binary
+ operations, the only difference is the flow factory used.
+ """
+
+ context, reverse_context = _get_binop_contexts(context, left, right)
+ left_type = helpers.object_type(left)
+ right_type = helpers.object_type(right)
+ methods = flow_factory(
+ left, left_type, binary_opnode, right, right_type, context, reverse_context
+ )
+ for method in methods:
+ try:
+ results = list(method())
+ except AttributeError:
+ continue
+ except exceptions.AttributeInferenceError:
+ continue
+ except exceptions.InferenceError:
+ yield util.Uninferable
+ return
+ else:
+ if any(result is util.Uninferable for result in results):
+ yield util.Uninferable
+ return
+
+ if all(map(_is_not_implemented, results)):
+ continue
+ not_implemented = sum(
+ 1 for result in results if _is_not_implemented(result)
+ )
+ if not_implemented and not_implemented != len(results):
+ # Can't infer yet what this is.
+ yield util.Uninferable
+ return
+
+ yield from results
+ return
+ # The operation doesn't seem to be supported so let the caller know about it
+ yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type)
+
+
+def _infer_binop(self, context):
+ """Binary operation inference logic."""
+ left = self.left
+ right = self.right
+
+ # we use two separate contexts for evaluating lhs and rhs because
+ # 1. evaluating lhs may leave some undesired entries in context.path
+ # which may not let us infer right value of rhs
+ context = context or contextmod.InferenceContext()
+ lhs_context = contextmod.copy_context(context)
+ rhs_context = contextmod.copy_context(context)
+ lhs_iter = left.infer(context=lhs_context)
+ rhs_iter = right.infer(context=rhs_context)
+ for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
+ if any(value is util.Uninferable for value in (rhs, lhs)):
+ # Don't know how to process this.
+ yield util.Uninferable
+ return
+
+ try:
+ yield from _infer_binary_operation(lhs, rhs, self, context, _get_binop_flow)
+ except exceptions._NonDeducibleTypeHierarchy:
+ yield util.Uninferable
+
+
+@decorators.yes_if_nothing_inferred
+@decorators.path_wrapper
+def infer_binop(self, context=None):
+ return _filter_operation_errors(
+ self, _infer_binop, context, util.BadBinaryOperationMessage
+ )
+
+
+nodes.BinOp._infer_binop = _infer_binop
+nodes.BinOp._infer = infer_binop
+
+
+def _infer_augassign(self, context=None):
+ """Inference logic for augmented binary operations."""
+ if context is None:
+ context = contextmod.InferenceContext()
+
+ rhs_context = context.clone()
+
+ lhs_iter = self.target.infer_lhs(context=context)
+ rhs_iter = self.value.infer(context=rhs_context)
+ for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
+ if any(value is util.Uninferable for value in (rhs, lhs)):
+ # Don't know how to process this.
+ yield util.Uninferable
+ return
+
+ try:
+ yield from _infer_binary_operation(
+ left=lhs,
+ right=rhs,
+ binary_opnode=self,
+ context=context,
+ flow_factory=_get_aug_flow,
+ )
+ except exceptions._NonDeducibleTypeHierarchy:
+ yield util.Uninferable
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_augassign(self, context=None):
+ return _filter_operation_errors(
+ self, _infer_augassign, context, util.BadBinaryOperationMessage
+ )
+
+
+nodes.AugAssign._infer_augassign = _infer_augassign
+nodes.AugAssign._infer = infer_augassign
+
+# End of binary operation inference.
+
+
+@decorators.raise_if_nothing_inferred
+def infer_arguments(self, context=None):
+ name = context.lookupname
+ if name is None:
+ raise exceptions.InferenceError(node=self, context=context)
+ return protocols._arguments_infer_argname(self, name, context)
+
+
+nodes.Arguments._infer = infer_arguments
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_assign(self, context=None):
+ """infer a AssignName/AssignAttr: need to inspect the RHS part of the
+ assign node
+ """
+ stmt = self.statement()
+ if isinstance(stmt, nodes.AugAssign):
+ return stmt.infer(context)
+
+ stmts = list(self.assigned_stmts(context=context))
+ return bases._infer_stmts(stmts, context)
+
+
+nodes.AssignName._infer = infer_assign
+nodes.AssignAttr._infer = infer_assign
+
+
+@decorators.raise_if_nothing_inferred
+@decorators.path_wrapper
+def infer_empty_node(self, context=None):
+ if not self.has_underlying_object():
+ yield util.Uninferable
+ else:
+ try:
+ yield from MANAGER.infer_ast_from_something(self.object, context=context)
+ except exceptions.AstroidError:
+ yield util.Uninferable
+
+
+nodes.EmptyNode._infer = infer_empty_node
+
+
+@decorators.raise_if_nothing_inferred
+def infer_index(self, context=None):
+ return self.value.infer(context)
+
+
+nodes.Index._infer = infer_index
+
+# TODO: move directly into bases.Instance when the dependency hell
+# will be solved.
+def instance_getitem(self, index, context=None):
+ # Rewrap index to Const for this case
+ new_context = contextmod.bind_context_to_node(context, self)
+ if not context:
+ context = new_context
+
+ # Create a new callcontext for providing index as an argument.
+ new_context.callcontext = contextmod.CallContext(args=[index])
+
+ method = next(self.igetattr("__getitem__", context=context), None)
+ if not isinstance(method, bases.BoundMethod):
+ raise exceptions.InferenceError(
+ "Could not find __getitem__ for {node!r}.", node=self, context=context
+ )
+
+ return next(method.infer_call_result(self, new_context))
+
+
+bases.Instance.getitem = instance_getitem
+
+
+def _populate_context_lookup(call, context):
+ # Allows context to be saved for later
+ # for inference inside a function
+ context_lookup = {}
+ if context is None:
+ return context_lookup
+ for arg in call.args:
+ if isinstance(arg, nodes.Starred):
+ context_lookup[arg.value] = context
+ else:
+ context_lookup[arg] = context
+ keywords = call.keywords if call.keywords is not None else []
+ for keyword in keywords:
+ context_lookup[keyword.value] = context
+ return context_lookup
+
+
+@decorators.raise_if_nothing_inferred
+def infer_ifexp(self, context=None):
+ """Support IfExp inference
+
+ If we can't infer the truthiness of the condition, we default
+ to inferring both branches. Otherwise, we infer either branch
+ depending on the condition.
+ """
+ both_branches = False
+ # We use two separate contexts for evaluating lhs and rhs because
+ # evaluating lhs may leave some undesired entries in context.path
+ # which may not let us infer right value of rhs.
+
+ context = context or contextmod.InferenceContext()
+ lhs_context = contextmod.copy_context(context)
+ rhs_context = contextmod.copy_context(context)
+ try:
+ test = next(self.test.infer(context=context.clone()))
+ except exceptions.InferenceError:
+ both_branches = True
+ else:
+ if test is not util.Uninferable:
+ if test.bool_value():
+ yield from self.body.infer(context=lhs_context)
+ else:
+ yield from self.orelse.infer(context=rhs_context)
+ else:
+ both_branches = True
+ if both_branches:
+ yield from self.body.infer(context=lhs_context)
+ yield from self.orelse.infer(context=rhs_context)
+
+
+nodes.IfExp._infer = infer_ifexp
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/__init__.py b/src/main/python/venv/Lib/site-packages/astroid/interpreter/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..1bd9d33
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/__init__.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc
new file mode 100644
index 0000000..4001903
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/dunder_lookup.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc
new file mode 100644
index 0000000..dc15f91
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/interpreter/__pycache__/objectmodel.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__init__.py b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..6cdce33
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/__init__.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc
new file mode 100644
index 0000000..1f091df
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/spec.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc
new file mode 100644
index 0000000..2a7fdbb
Binary files /dev/null and b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/__pycache__/util.cpython-37.pyc differ
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/spec.py b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/spec.py
new file mode 100644
index 0000000..84e093b
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/spec.py
@@ -0,0 +1,344 @@
+# Copyright (c) 2016-2018 Claudiu Popa
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2017 Chris Philip
+# Copyright (c) 2017 Hugo
+# Copyright (c) 2017 ioanatia
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2018 Nick Drozd
+
+import abc
+import collections
+import distutils
+import enum
+import imp
+import os
+import sys
+import zipimport
+
+try:
+ import importlib.machinery
+
+ _HAS_MACHINERY = True
+except ImportError:
+ _HAS_MACHINERY = False
+
+try:
+ from functools import lru_cache
+except ImportError:
+ from backports.functools_lru_cache import lru_cache
+
+from . import util
+
+ModuleType = enum.Enum(
+ "ModuleType",
+ "C_BUILTIN C_EXTENSION PKG_DIRECTORY "
+ "PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE "
+ "PY_SOURCE PY_ZIPMODULE PY_NAMESPACE",
+)
+_ImpTypes = {
+ imp.C_BUILTIN: ModuleType.C_BUILTIN,
+ imp.C_EXTENSION: ModuleType.C_EXTENSION,
+ imp.PKG_DIRECTORY: ModuleType.PKG_DIRECTORY,
+ imp.PY_COMPILED: ModuleType.PY_COMPILED,
+ imp.PY_FROZEN: ModuleType.PY_FROZEN,
+ imp.PY_SOURCE: ModuleType.PY_SOURCE,
+}
+if hasattr(imp, "PY_RESOURCE"):
+ _ImpTypes[imp.PY_RESOURCE] = ModuleType.PY_RESOURCE
+if hasattr(imp, "PY_CODERESOURCE"):
+ _ImpTypes[imp.PY_CODERESOURCE] = ModuleType.PY_CODERESOURCE
+
+
+def _imp_type_to_module_type(imp_type):
+ return _ImpTypes[imp_type]
+
+
+_ModuleSpec = collections.namedtuple(
+ "_ModuleSpec", "name type location " "origin submodule_search_locations"
+)
+
+
+class ModuleSpec(_ModuleSpec):
+ """Defines a class similar to PEP 420's ModuleSpec
+
+ A module spec defines a name of a module, its type, location
+ and where submodules can be found, if the module is a package.
+ """
+
+ def __new__(
+ cls,
+ name,
+ module_type,
+ location=None,
+ origin=None,
+ submodule_search_locations=None,
+ ):
+ return _ModuleSpec.__new__(
+ cls,
+ name=name,
+ type=module_type,
+ location=location,
+ origin=origin,
+ submodule_search_locations=submodule_search_locations,
+ )
+
+
+class Finder:
+ """A finder is a class which knows how to find a particular module."""
+
+ def __init__(self, path=None):
+ self._path = path or sys.path
+
+ @abc.abstractmethod
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ """Find the given module
+
+ Each finder is responsible for each protocol of finding, as long as
+ they all return a ModuleSpec.
+
+ :param str modname: The module which needs to be searched.
+ :param list module_parts: It should be a list of strings,
+ where each part contributes to the module's
+ namespace.
+ :param list processed: What parts from the module parts were processed
+ so far.
+ :param list submodule_path: A list of paths where the module
+ can be looked into.
+ :returns: A ModuleSpec, describing how and where the module was found,
+ None, otherwise.
+ """
+
+ def contribute_to_path(self, spec, processed):
+ """Get a list of extra paths where this finder can search."""
+
+
+class ImpFinder(Finder):
+ """A finder based on the imp module."""
+
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ if submodule_path is not None:
+ submodule_path = list(submodule_path)
+ try:
+ stream, mp_filename, mp_desc = imp.find_module(modname, submodule_path)
+ except ImportError:
+ return None
+
+ # Close resources.
+ if stream:
+ stream.close()
+
+ return ModuleSpec(
+ name=modname,
+ location=mp_filename,
+ module_type=_imp_type_to_module_type(mp_desc[2]),
+ )
+
+ def contribute_to_path(self, spec, processed):
+ if spec.location is None:
+ # Builtin.
+ return None
+
+ if _is_setuptools_namespace(spec.location):
+ # extend_path is called, search sys.path for module/packages
+ # of this name see pkgutil.extend_path documentation
+ path = [
+ os.path.join(p, *processed)
+ for p in sys.path
+ if os.path.isdir(os.path.join(p, *processed))
+ ]
+ # We already import distutils elsewhere in astroid,
+ # so if it is the same module, we can use it directly.
+ elif spec.name == "distutils" and spec.location in distutils.__path__:
+ # distutils is patched inside virtualenvs to pick up submodules
+ # from the original Python, not from the virtualenv itself.
+ path = list(distutils.__path__)
+ else:
+ path = [spec.location]
+ return path
+
+
+class ExplicitNamespacePackageFinder(ImpFinder):
+ """A finder for the explicit namespace packages, generated through pkg_resources."""
+
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ if processed:
+ modname = ".".join(processed + [modname])
+ if util.is_namespace(modname) and modname in sys.modules:
+ submodule_path = sys.modules[modname].__path__
+ return ModuleSpec(
+ name=modname,
+ location="",
+ origin="namespace",
+ module_type=ModuleType.PY_NAMESPACE,
+ submodule_search_locations=submodule_path,
+ )
+ return None
+
+ def contribute_to_path(self, spec, processed):
+ return spec.submodule_search_locations
+
+
+class ZipFinder(Finder):
+ """Finder that knows how to find a module inside zip files."""
+
+ def __init__(self, path):
+ super(ZipFinder, self).__init__(path)
+ self._zipimporters = _precache_zipimporters(path)
+
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ try:
+ file_type, filename, path = _search_zip(module_parts, self._zipimporters)
+ except ImportError:
+ return None
+
+ return ModuleSpec(
+ name=modname,
+ location=filename,
+ origin="egg",
+ module_type=file_type,
+ submodule_search_locations=path,
+ )
+
+
+class PathSpecFinder(Finder):
+ """Finder based on importlib.machinery.PathFinder."""
+
+ def find_module(self, modname, module_parts, processed, submodule_path):
+ spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path)
+ if spec:
+ # origin can be either a string on older Python versions
+ # or None in case it is a namespace package:
+ # https://github.com/python/cpython/pull/5481
+ is_namespace_pkg = spec.origin in ("namespace", None)
+ location = spec.origin if not is_namespace_pkg else None
+ module_type = ModuleType.PY_NAMESPACE if is_namespace_pkg else None
+ spec = ModuleSpec(
+ name=spec.name,
+ location=location,
+ origin=spec.origin,
+ module_type=module_type,
+ submodule_search_locations=list(spec.submodule_search_locations or []),
+ )
+ return spec
+
+ def contribute_to_path(self, spec, processed):
+ if spec.type == ModuleType.PY_NAMESPACE:
+ return spec.submodule_search_locations
+ return None
+
+
+_SPEC_FINDERS = (ImpFinder, ZipFinder)
+if _HAS_MACHINERY:
+ _SPEC_FINDERS += (PathSpecFinder,)
+_SPEC_FINDERS += (ExplicitNamespacePackageFinder,)
+
+
+def _is_setuptools_namespace(location):
+ try:
+ with open(os.path.join(location, "__init__.py"), "rb") as stream:
+ data = stream.read(4096)
+ except IOError:
+ pass
+ else:
+ extend_path = b"pkgutil" in data and b"extend_path" in data
+ declare_namespace = (
+ b"pkg_resources" in data and b"declare_namespace(__name__)" in data
+ )
+ return extend_path or declare_namespace
+
+
+@lru_cache()
+def _cached_set_diff(left, right):
+ result = set(left)
+ result.difference_update(right)
+ return result
+
+
+def _precache_zipimporters(path=None):
+ pic = sys.path_importer_cache
+
+ # When measured, despite having the same complexity (O(n)),
+ # converting to tuples and then caching the conversion to sets
+ # and the set difference is faster than converting to sets
+ # and then only caching the set difference.
+
+ req_paths = tuple(path or sys.path)
+ cached_paths = tuple(pic)
+ new_paths = _cached_set_diff(req_paths, cached_paths)
+ for entry_path in new_paths:
+ try:
+ pic[entry_path] = zipimport.zipimporter(entry_path)
+ except zipimport.ZipImportError:
+ continue
+ return pic
+
+
+def _search_zip(modpath, pic):
+ for filepath, importer in list(pic.items()):
+ if importer is not None:
+ found = importer.find_module(modpath[0])
+ if found:
+ if not importer.find_module(os.path.sep.join(modpath)):
+ raise ImportError(
+ "No module named %s in %s/%s"
+ % (".".join(modpath[1:]), filepath, modpath)
+ )
+ # import code; code.interact(local=locals())
+ return (
+ ModuleType.PY_ZIPMODULE,
+ os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath),
+ filepath,
+ )
+ raise ImportError("No module named %s" % ".".join(modpath))
+
+
+def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path):
+ finders = [finder(search_path) for finder in _SPEC_FINDERS]
+ for finder in finders:
+ spec = finder.find_module(modname, module_parts, processed, submodule_path)
+ if spec is None:
+ continue
+ return finder, spec
+
+ raise ImportError("No module named %s" % ".".join(module_parts))
+
+
+def find_spec(modpath, path=None):
+ """Find a spec for the given module.
+
+ :type modpath: list or tuple
+ :param modpath:
+ split module's name (i.e name of a module or package split
+ on '.'), with leading empty strings for explicit relative import
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :rtype: ModuleSpec
+ :return: A module spec, which describes how the module was
+ found and where.
+ """
+ _path = path or sys.path
+
+ # Need a copy for not mutating the argument.
+ modpath = modpath[:]
+
+ submodule_path = None
+ module_parts = modpath[:]
+ processed = []
+
+ while modpath:
+ modname = modpath.pop(0)
+ finder, spec = _find_spec_with_path(
+ _path, modname, module_parts, processed, submodule_path or path
+ )
+ processed.append(modname)
+ if modpath:
+ submodule_path = finder.contribute_to_path(spec, processed)
+
+ if spec.type == ModuleType.PKG_DIRECTORY:
+ spec = spec._replace(submodule_search_locations=submodule_path)
+
+ return spec
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/util.py b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/util.py
new file mode 100644
index 0000000..a917bd3
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/interpreter/_import/util.py
@@ -0,0 +1,10 @@
+# Copyright (c) 2016, 2018 Claudiu Popa
+
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
+
+
+def is_namespace(modname):
+ return pkg_resources is not None and modname in pkg_resources._namespace_packages
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py b/src/main/python/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py
new file mode 100644
index 0000000..0ae9bc9
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/interpreter/dunder_lookup.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2016-2018 Claudiu Popa
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Contains logic for retrieving special methods.
+
+This implementation does not rely on the dot attribute access
+logic, found in ``.getattr()``. The difference between these two
+is that the dunder methods are looked with the type slots
+(you can find more about these here
+http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/)
+As such, the lookup for the special methods is actually simpler than
+the dot attribute access.
+"""
+import itertools
+
+import astroid
+from astroid import exceptions
+
+
+def _lookup_in_mro(node, name):
+ attrs = node.locals.get(name, [])
+
+ nodes = itertools.chain.from_iterable(
+ ancestor.locals.get(name, []) for ancestor in node.ancestors(recurs=True)
+ )
+ values = list(itertools.chain(attrs, nodes))
+ if not values:
+ raise exceptions.AttributeInferenceError(attribute=name, target=node)
+
+ return values
+
+
+def lookup(node, name):
+ """Lookup the given special method name in the given *node*
+
+ If the special method was found, then a list of attributes
+ will be returned. Otherwise, `astroid.AttributeInferenceError`
+ is going to be raised.
+ """
+ if isinstance(
+ node, (astroid.List, astroid.Tuple, astroid.Const, astroid.Dict, astroid.Set)
+ ):
+ return _builtin_lookup(node, name)
+ if isinstance(node, astroid.Instance):
+ return _lookup_in_mro(node, name)
+ if isinstance(node, astroid.ClassDef):
+ return _class_lookup(node, name)
+
+ raise exceptions.AttributeInferenceError(attribute=name, target=node)
+
+
+def _class_lookup(node, name):
+ metaclass = node.metaclass()
+ if metaclass is None:
+ raise exceptions.AttributeInferenceError(attribute=name, target=node)
+
+ return _lookup_in_mro(metaclass, name)
+
+
+def _builtin_lookup(node, name):
+ values = node.locals.get(name, [])
+ if not values:
+ raise exceptions.AttributeInferenceError(attribute=name, target=node)
+
+ return values
diff --git a/src/main/python/venv/Lib/site-packages/astroid/interpreter/objectmodel.py b/src/main/python/venv/Lib/site-packages/astroid/interpreter/objectmodel.py
new file mode 100644
index 0000000..5e488d9
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/interpreter/objectmodel.py
@@ -0,0 +1,738 @@
+# Copyright (c) 2016-2018 Claudiu Popa
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2017-2018 Bryce Guinta
+# Copyright (c) 2017 Ceridwen
+# Copyright (c) 2017 Calen Pennington
+# Copyright (c) 2018 Nick Drozd
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+"""
+Data object model, as per https://docs.python.org/3/reference/datamodel.html.
+
+This module describes, at least partially, a data object model for some
+of astroid's nodes. The model contains special attributes that nodes such
+as functions, classes, modules etc have, such as __doc__, __class__,
+__module__ etc, being used when doing attribute lookups over nodes.
+
+For instance, inferring `obj.__class__` will first trigger an inference
+of the `obj` variable. If it was successfully inferred, then an attribute
+`__class__ will be looked for in the inferred object. This is the part
+where the data model occurs. The model is attached to those nodes
+and the lookup mechanism will try to see if attributes such as
+`__class__` are defined by the model or not. If they are defined,
+the model will be requested to return the corresponding value of that
+attribute. Thus the model can be viewed as a special part of the lookup
+mechanism.
+"""
+
+import itertools
+import pprint
+import os
+import types
+from functools import lru_cache
+
+import astroid
+from astroid import context as contextmod
+from astroid import exceptions
+from astroid import node_classes
+
+
+IMPL_PREFIX = "attr_"
+
+
+def _dunder_dict(instance, attributes):
+ obj = node_classes.Dict(parent=instance)
+
+ # Convert the keys to node strings
+ keys = [
+ node_classes.Const(value=value, parent=obj) for value in list(attributes.keys())
+ ]
+
+ # The original attribute has a list of elements for each key,
+ # but that is not useful for retrieving the special attribute's value.
+ # In this case, we're picking the last value from each list.
+ values = [elem[-1] for elem in attributes.values()]
+
+ obj.postinit(list(zip(keys, values)))
+ return obj
+
+
+class ObjectModel:
+ def __init__(self):
+ self._instance = None
+
+ def __repr__(self):
+ result = []
+ cname = type(self).__name__
+ string = "%(cname)s(%(fields)s)"
+ alignment = len(cname) + 1
+ for field in sorted(self.attributes()):
+ width = 80 - len(field) - alignment
+ lines = pprint.pformat(field, indent=2, width=width).splitlines(True)
+
+ inner = [lines[0]]
+ for line in lines[1:]:
+ inner.append(" " * alignment + line)
+ result.append(field)
+
+ return string % {
+ "cname": cname,
+ "fields": (",\n" + " " * alignment).join(result),
+ }
+
+ def __call__(self, instance):
+ self._instance = instance
+ return self
+
+ def __get__(self, instance, cls=None):
+ # ObjectModel needs to be a descriptor so that just doing
+ # `special_attributes = SomeObjectModel` should be enough in the body of a node.
+ # But at the same time, node.special_attributes should return an object
+ # which can be used for manipulating the special attributes. That's the reason
+ # we pass the instance through which it got accessed to ObjectModel.__call__,
+ # returning itself afterwards, so we can still have access to the
+ # underlying data model and to the instance for which it got accessed.
+ return self(instance)
+
+ def __contains__(self, name):
+ return name in self.attributes()
+
+ @lru_cache(maxsize=None)
+ def attributes(self):
+ """Get the attributes which are exported by this object model."""
+ return [
+ obj[len(IMPL_PREFIX) :] for obj in dir(self) if obj.startswith(IMPL_PREFIX)
+ ]
+
+ def lookup(self, name):
+ """Look up the given *name* in the current model
+
+ It should return an AST or an interpreter object,
+ but if the name is not found, then an AttributeInferenceError will be raised.
+ """
+
+ if name in self.attributes():
+ return getattr(self, IMPL_PREFIX + name)
+ raise exceptions.AttributeInferenceError(target=self._instance, attribute=name)
+
+
+class ModuleModel(ObjectModel):
+ def _builtins(self):
+ builtins_ast_module = astroid.MANAGER.builtins_module
+ return builtins_ast_module.special_attributes.lookup("__dict__")
+
+ @property
+ def attr_builtins(self):
+ return self._builtins()
+
+ @property
+ def attr___path__(self):
+ if not self._instance.package:
+ raise exceptions.AttributeInferenceError(
+ target=self._instance, attribute="__path__"
+ )
+
+ path_objs = [
+ node_classes.Const(
+ value=path
+ if not path.endswith("__init__.py")
+ else os.path.dirname(path),
+ parent=self._instance,
+ )
+ for path in self._instance.path
+ ]
+
+ container = node_classes.List(parent=self._instance)
+ container.postinit(path_objs)
+
+ return container
+
+ @property
+ def attr___name__(self):
+ return node_classes.Const(value=self._instance.name, parent=self._instance)
+
+ @property
+ def attr___doc__(self):
+ return node_classes.Const(value=self._instance.doc, parent=self._instance)
+
+ @property
+ def attr___file__(self):
+ return node_classes.Const(value=self._instance.file, parent=self._instance)
+
+ @property
+ def attr___dict__(self):
+ return _dunder_dict(self._instance, self._instance.globals)
+
+ @property
+ def attr___package__(self):
+ if not self._instance.package:
+ value = ""
+ else:
+ value = self._instance.name
+
+ return node_classes.Const(value=value, parent=self._instance)
+
+ # These are related to the Python 3 implementation of the
+ # import system,
+ # https://docs.python.org/3/reference/import.html#import-related-module-attributes
+
+ @property
+ def attr___spec__(self):
+ # No handling for now.
+ return node_classes.Unknown()
+
+ @property
+ def attr___loader__(self):
+ # No handling for now.
+ return node_classes.Unknown()
+
+ @property
+ def attr___cached__(self):
+ # No handling for now.
+ return node_classes.Unknown()
+
+
+class FunctionModel(ObjectModel):
+ @property
+ def attr___name__(self):
+ return node_classes.Const(value=self._instance.name, parent=self._instance)
+
+ @property
+ def attr___doc__(self):
+ return node_classes.Const(value=self._instance.doc, parent=self._instance)
+
+ @property
+ def attr___qualname__(self):
+ return node_classes.Const(value=self._instance.qname(), parent=self._instance)
+
+ @property
+ def attr___defaults__(self):
+ func = self._instance
+ if not func.args.defaults:
+ return node_classes.Const(value=None, parent=func)
+
+ defaults_obj = node_classes.Tuple(parent=func)
+ defaults_obj.postinit(func.args.defaults)
+ return defaults_obj
+
+ @property
+ def attr___annotations__(self):
+ obj = node_classes.Dict(parent=self._instance)
+
+ if not self._instance.returns:
+ returns = None
+ else:
+ returns = self._instance.returns
+
+ args = self._instance.args
+ pair_annotations = itertools.chain(
+ zip(args.args or [], args.annotations),
+ zip(args.kwonlyargs, args.kwonlyargs_annotations),
+ zip(args.posonlyargs or [], args.posonlyargs_annotations),
+ )
+
+ annotations = {
+ arg.name: annotation for (arg, annotation) in pair_annotations if annotation
+ }
+ if args.varargannotation:
+ annotations[args.vararg] = args.varargannotation
+ if args.kwargannotation:
+ annotations[args.kwarg] = args.kwargannotation
+ if returns:
+ annotations["return"] = returns
+
+ items = [
+ (node_classes.Const(key, parent=obj), value)
+ for (key, value) in annotations.items()
+ ]
+
+ obj.postinit(items)
+ return obj
+
+ @property
+ def attr___dict__(self):
+ return node_classes.Dict(parent=self._instance)
+
+ attr___globals__ = attr___dict__
+
+ @property
+ def attr___kwdefaults__(self):
+ def _default_args(args, parent):
+ for arg in args.kwonlyargs:
+ try:
+ default = args.default_value(arg.name)
+ except exceptions.NoDefault:
+ continue
+
+ name = node_classes.Const(arg.name, parent=parent)
+ yield name, default
+
+ args = self._instance.args
+ obj = node_classes.Dict(parent=self._instance)
+ defaults = dict(_default_args(args, obj))
+
+ obj.postinit(list(defaults.items()))
+ return obj
+
+ @property
+ def attr___module__(self):
+ return node_classes.Const(self._instance.root().qname())
+
+ @property
+ def attr___get__(self):
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import bases
+
+ func = self._instance
+
+ class DescriptorBoundMethod(bases.BoundMethod):
+ """Bound method which knows how to understand calling descriptor binding."""
+
+ def implicit_parameters(self):
+ # Different than BoundMethod since the signature
+ # is different.
+ return 0
+
+ def infer_call_result(self, caller, context=None):
+ if len(caller.args) > 2 or len(caller.args) < 1:
+ raise exceptions.InferenceError(
+ "Invalid arguments for descriptor binding",
+ target=self,
+ context=context,
+ )
+
+ context = contextmod.copy_context(context)
+ cls = next(caller.args[0].infer(context=context))
+
+ if cls is astroid.Uninferable:
+ raise exceptions.InferenceError(
+ "Invalid class inferred", target=self, context=context
+ )
+
+ # For some reason func is a Node that the below
+ # code is not expecting
+ if isinstance(func, bases.BoundMethod):
+ yield func
+ return
+
+ # Rebuild the original value, but with the parent set as the
+ # class where it will be bound.
+ new_func = func.__class__(
+ name=func.name,
+ doc=func.doc,
+ lineno=func.lineno,
+ col_offset=func.col_offset,
+ parent=cls,
+ )
+ # pylint: disable=no-member
+ new_func.postinit(func.args, func.body, func.decorators, func.returns)
+
+ # Build a proper bound method that points to our newly built function.
+ proxy = bases.UnboundMethod(new_func)
+ yield bases.BoundMethod(proxy=proxy, bound=cls)
+
+ @property
+ def args(self):
+ """Overwrite the underlying args to match those of the underlying func
+
+ Usually the underlying *func* is a function/method, as in:
+
+ def test(self):
+ pass
+
+ This has only the *self* parameter but when we access test.__get__
+ we get a new object which has two parameters, *self* and *type*.
+ """
+ nonlocal func
+ positional_or_keyword_params = func.args.args.copy()
+ positional_or_keyword_params.append(astroid.AssignName(name="type"))
+
+ positional_only_params = func.args.posonlyargs.copy()
+
+ arguments = astroid.Arguments(parent=func.args.parent)
+ arguments.postinit(
+ args=positional_or_keyword_params,
+ posonlyargs=positional_only_params,
+ defaults=[],
+ kwonlyargs=[],
+ kw_defaults=[],
+ annotations=[],
+ )
+ return arguments
+
+ return DescriptorBoundMethod(proxy=self._instance, bound=self._instance)
+
+ # These are here just for completion.
+ @property
+ def attr___ne__(self):
+ return node_classes.Unknown()
+
+ attr___subclasshook__ = attr___ne__
+ attr___str__ = attr___ne__
+ attr___sizeof__ = attr___ne__
+ attr___setattr___ = attr___ne__
+ attr___repr__ = attr___ne__
+ attr___reduce__ = attr___ne__
+ attr___reduce_ex__ = attr___ne__
+ attr___new__ = attr___ne__
+ attr___lt__ = attr___ne__
+ attr___eq__ = attr___ne__
+ attr___gt__ = attr___ne__
+ attr___format__ = attr___ne__
+ attr___delattr___ = attr___ne__
+ attr___getattribute__ = attr___ne__
+ attr___hash__ = attr___ne__
+ attr___init__ = attr___ne__
+ attr___dir__ = attr___ne__
+ attr___call__ = attr___ne__
+ attr___class__ = attr___ne__
+ attr___closure__ = attr___ne__
+ attr___code__ = attr___ne__
+
+
+class ClassModel(ObjectModel):
+ @property
+ def attr___module__(self):
+ return node_classes.Const(self._instance.root().qname())
+
+ @property
+ def attr___name__(self):
+ return node_classes.Const(self._instance.name)
+
+ @property
+ def attr___qualname__(self):
+ return node_classes.Const(self._instance.qname())
+
+ @property
+ def attr___doc__(self):
+ return node_classes.Const(self._instance.doc)
+
+ @property
+ def attr___mro__(self):
+ if not self._instance.newstyle:
+ raise exceptions.AttributeInferenceError(
+ target=self._instance, attribute="__mro__"
+ )
+
+ mro = self._instance.mro()
+ obj = node_classes.Tuple(parent=self._instance)
+ obj.postinit(mro)
+ return obj
+
+ @property
+ def attr_mro(self):
+ if not self._instance.newstyle:
+ raise exceptions.AttributeInferenceError(
+ target=self._instance, attribute="mro"
+ )
+
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import bases
+
+ other_self = self
+
+ # Cls.mro is a method and we need to return one in order to have a proper inference.
+ # The method we're returning is capable of inferring the underlying MRO though.
+ class MroBoundMethod(bases.BoundMethod):
+ def infer_call_result(self, caller, context=None):
+ yield other_self.attr___mro__
+
+ implicit_metaclass = self._instance.implicit_metaclass()
+ mro_method = implicit_metaclass.locals["mro"][0]
+ return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass)
+
+ @property
+ def attr___bases__(self):
+ obj = node_classes.Tuple()
+ context = contextmod.InferenceContext()
+ elts = list(self._instance._inferred_bases(context))
+ obj.postinit(elts=elts)
+ return obj
+
+ @property
+ def attr___class__(self):
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import helpers
+
+ return helpers.object_type(self._instance)
+
+ @property
+ def attr___subclasses__(self):
+ """Get the subclasses of the underlying class
+
+ This looks only in the current module for retrieving the subclasses,
+ thus it might miss a couple of them.
+ """
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import bases
+ from astroid import scoped_nodes
+
+ if not self._instance.newstyle:
+ raise exceptions.AttributeInferenceError(
+ target=self._instance, attribute="__subclasses__"
+ )
+
+ qname = self._instance.qname()
+ root = self._instance.root()
+ classes = [
+ cls
+ for cls in root.nodes_of_class(scoped_nodes.ClassDef)
+ if cls != self._instance and cls.is_subtype_of(qname)
+ ]
+
+ obj = node_classes.List(parent=self._instance)
+ obj.postinit(classes)
+
+ class SubclassesBoundMethod(bases.BoundMethod):
+ def infer_call_result(self, caller, context=None):
+ yield obj
+
+ implicit_metaclass = self._instance.implicit_metaclass()
+ subclasses_method = implicit_metaclass.locals["__subclasses__"][0]
+ return SubclassesBoundMethod(proxy=subclasses_method, bound=implicit_metaclass)
+
+ @property
+ def attr___dict__(self):
+ return node_classes.Dict(parent=self._instance)
+
+
+class SuperModel(ObjectModel):
+ @property
+ def attr___thisclass__(self):
+ return self._instance.mro_pointer
+
+ @property
+ def attr___self_class__(self):
+ return self._instance._self_class
+
+ @property
+ def attr___self__(self):
+ return self._instance.type
+
+ @property
+ def attr___class__(self):
+ return self._instance._proxied
+
+
+class UnboundMethodModel(ObjectModel):
+ @property
+ def attr___class__(self):
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import helpers
+
+ return helpers.object_type(self._instance)
+
+ @property
+ def attr___func__(self):
+ return self._instance._proxied
+
+ @property
+ def attr___self__(self):
+ return node_classes.Const(value=None, parent=self._instance)
+
+ attr_im_func = attr___func__
+ attr_im_class = attr___class__
+ attr_im_self = attr___self__
+
+
+class BoundMethodModel(FunctionModel):
+ @property
+ def attr___func__(self):
+ return self._instance._proxied._proxied
+
+ @property
+ def attr___self__(self):
+ return self._instance.bound
+
+
+class GeneratorModel(FunctionModel):
+ def __new__(cls, *args, **kwargs):
+ # Append the values from the GeneratorType unto this object.
+ ret = super(GeneratorModel, cls).__new__(cls, *args, **kwargs)
+ generator = astroid.MANAGER.builtins_module["generator"]
+ for name, values in generator.locals.items():
+ method = values[0]
+ patched = lambda cls, meth=method: meth
+
+ setattr(type(ret), IMPL_PREFIX + name, property(patched))
+
+ return ret
+
+ @property
+ def attr___name__(self):
+ return node_classes.Const(
+ value=self._instance.parent.name, parent=self._instance
+ )
+
+ @property
+ def attr___doc__(self):
+ return node_classes.Const(
+ value=self._instance.parent.doc, parent=self._instance
+ )
+
+
+class AsyncGeneratorModel(GeneratorModel):
+ def __new__(cls, *args, **kwargs):
+ # Append the values from the AGeneratorType unto this object.
+ ret = super().__new__(cls, *args, **kwargs)
+ astroid_builtins = astroid.MANAGER.builtins_module
+ generator = astroid_builtins.get("async_generator")
+ if generator is None:
+ # Make it backward compatible.
+ generator = astroid_builtins.get("generator")
+
+ for name, values in generator.locals.items():
+ method = values[0]
+ patched = lambda cls, meth=method: meth
+
+ setattr(type(ret), IMPL_PREFIX + name, property(patched))
+
+ return ret
+
+
+class InstanceModel(ObjectModel):
+ @property
+ def attr___class__(self):
+ return self._instance._proxied
+
+ @property
+ def attr___module__(self):
+ return node_classes.Const(self._instance.root().qname())
+
+ @property
+ def attr___doc__(self):
+ return node_classes.Const(self._instance.doc)
+
+ @property
+ def attr___dict__(self):
+ return _dunder_dict(self._instance, self._instance.instance_attrs)
+
+
+# Exception instances
+
+
+class ExceptionInstanceModel(InstanceModel):
+ @property
+ def attr_args(self):
+ message = node_classes.Const("")
+ args = node_classes.Tuple(parent=self._instance)
+ args.postinit((message,))
+ return args
+
+ @property
+ def attr___traceback__(self):
+ builtins_ast_module = astroid.MANAGER.builtins_module
+ traceback_type = builtins_ast_module[types.TracebackType.__name__]
+ return traceback_type.instantiate_class()
+
+
+class SyntaxErrorInstanceModel(ExceptionInstanceModel):
+ @property
+ def attr_text(self):
+ return node_classes.Const("")
+
+
+class OSErrorInstanceModel(ExceptionInstanceModel):
+ @property
+ def attr_filename(self):
+ return node_classes.Const("")
+
+ @property
+ def attr_errno(self):
+ return node_classes.Const(0)
+
+ @property
+ def attr_strerror(self):
+ return node_classes.Const("")
+
+ attr_filename2 = attr_filename
+
+
+class ImportErrorInstanceModel(ExceptionInstanceModel):
+ @property
+ def attr_name(self):
+ return node_classes.Const("")
+
+ @property
+ def attr_path(self):
+ return node_classes.Const("")
+
+
+BUILTIN_EXCEPTIONS = {
+ "builtins.SyntaxError": SyntaxErrorInstanceModel,
+ "builtins.ImportError": ImportErrorInstanceModel,
+ # These are all similar to OSError in terms of attributes
+ "builtins.OSError": OSErrorInstanceModel,
+ "builtins.BlockingIOError": OSErrorInstanceModel,
+ "builtins.BrokenPipeError": OSErrorInstanceModel,
+ "builtins.ChildProcessError": OSErrorInstanceModel,
+ "builtins.ConnectionAbortedError": OSErrorInstanceModel,
+ "builtins.ConnectionError": OSErrorInstanceModel,
+ "builtins.ConnectionRefusedError": OSErrorInstanceModel,
+ "builtins.ConnectionResetError": OSErrorInstanceModel,
+ "builtins.FileExistsError": OSErrorInstanceModel,
+ "builtins.FileNotFoundError": OSErrorInstanceModel,
+ "builtins.InterruptedError": OSErrorInstanceModel,
+ "builtins.IsADirectoryError": OSErrorInstanceModel,
+ "builtins.NotADirectoryError": OSErrorInstanceModel,
+ "builtins.PermissionError": OSErrorInstanceModel,
+ "builtins.ProcessLookupError": OSErrorInstanceModel,
+ "builtins.TimeoutError": OSErrorInstanceModel,
+}
+
+
+class DictModel(ObjectModel):
+ @property
+ def attr___class__(self):
+ return self._instance._proxied
+
+ def _generic_dict_attribute(self, obj, name):
+ """Generate a bound method that can infer the given *obj*."""
+
+ class DictMethodBoundMethod(astroid.BoundMethod):
+ def infer_call_result(self, caller, context=None):
+ yield obj
+
+ meth = next(self._instance._proxied.igetattr(name))
+ return DictMethodBoundMethod(proxy=meth, bound=self._instance)
+
+ @property
+ def attr_items(self):
+ elems = []
+ obj = node_classes.List(parent=self._instance)
+ for key, value in self._instance.items:
+ elem = node_classes.Tuple(parent=obj)
+ elem.postinit((key, value))
+ elems.append(elem)
+ obj.postinit(elts=elems)
+
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import objects
+
+ obj = objects.DictItems(obj)
+ return self._generic_dict_attribute(obj, "items")
+
+ @property
+ def attr_keys(self):
+ keys = [key for (key, _) in self._instance.items]
+ obj = node_classes.List(parent=self._instance)
+ obj.postinit(elts=keys)
+
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import objects
+
+ obj = objects.DictKeys(obj)
+ return self._generic_dict_attribute(obj, "keys")
+
+ @property
+ def attr_values(self):
+
+ values = [value for (_, value) in self._instance.items]
+ obj = node_classes.List(parent=self._instance)
+ obj.postinit(values)
+
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid import objects
+
+ obj = objects.DictValues(obj)
+ return self._generic_dict_attribute(obj, "values")
diff --git a/src/main/python/venv/Lib/site-packages/astroid/manager.py b/src/main/python/venv/Lib/site-packages/astroid/manager.py
new file mode 100644
index 0000000..e5fd0d6
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/manager.py
@@ -0,0 +1,337 @@
+# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 BioGeek
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2017 Iva Miholic
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 Nick Drozd
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""astroid manager: avoid multiple astroid build of a same module when
+possible by providing a class responsible to get astroid representation
+from various source and using a cache of built modules)
+"""
+
+import os
+import zipimport
+
+from astroid import exceptions
+from astroid.interpreter._import import spec
+from astroid import modutils
+from astroid import transforms
+
+
+ZIP_IMPORT_EXTS = (".zip", ".egg", ".whl")
+
+
+def safe_repr(obj):
+ try:
+ return repr(obj)
+ except Exception: # pylint: disable=broad-except
+ return "???"
+
+
+class AstroidManager:
+ """the astroid manager, responsible to build astroid from files
+ or modules.
+
+ Use the Borg pattern.
+ """
+
+ name = "astroid loader"
+ brain = {}
+
+ def __init__(self):
+ self.__dict__ = AstroidManager.brain
+ if not self.__dict__:
+ # NOTE: cache entries are added by the [re]builder
+ self.astroid_cache = {}
+ self._mod_file_cache = {}
+ self._failed_import_hooks = []
+ self.always_load_extensions = False
+ self.optimize_ast = False
+ self.extension_package_whitelist = set()
+ self._transform = transforms.TransformVisitor()
+
+ # Export these APIs for convenience
+ self.register_transform = self._transform.register_transform
+ self.unregister_transform = self._transform.unregister_transform
+ self.max_inferable_values = 100
+
+ @property
+ def builtins_module(self):
+ return self.astroid_cache["builtins"]
+
+ def visit_transforms(self, node):
+ """Visit the transforms and apply them to the given *node*."""
+ return self._transform.visit(node)
+
+ def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
+ """given a module name, return the astroid object"""
+ try:
+ filepath = modutils.get_source_file(filepath, include_no_ext=True)
+ source = True
+ except modutils.NoSourceFile:
+ pass
+ if modname is None:
+ try:
+ modname = ".".join(modutils.modpath_from_file(filepath))
+ except ImportError:
+ modname = filepath
+ if (
+ modname in self.astroid_cache
+ and self.astroid_cache[modname].file == filepath
+ ):
+ return self.astroid_cache[modname]
+ if source:
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid.builder import AstroidBuilder
+
+ return AstroidBuilder(self).file_build(filepath, modname)
+ if fallback and modname:
+ return self.ast_from_module_name(modname)
+ raise exceptions.AstroidBuildingError(
+ "Unable to build an AST for {path}.", path=filepath
+ )
+
+ def _build_stub_module(self, modname):
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid.builder import AstroidBuilder
+
+ return AstroidBuilder(self).string_build("", modname)
+
+ def _build_namespace_module(self, modname, path):
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid.builder import build_namespace_package_module
+
+ return build_namespace_package_module(modname, path)
+
+ def _can_load_extension(self, modname):
+ if self.always_load_extensions:
+ return True
+ if modutils.is_standard_module(modname):
+ return True
+ parts = modname.split(".")
+ return any(
+ ".".join(parts[:x]) in self.extension_package_whitelist
+ for x in range(1, len(parts) + 1)
+ )
+
+ def ast_from_module_name(self, modname, context_file=None):
+ """given a module name, return the astroid object"""
+ if modname in self.astroid_cache:
+ return self.astroid_cache[modname]
+ if modname == "__main__":
+ return self._build_stub_module(modname)
+ old_cwd = os.getcwd()
+ if context_file:
+ os.chdir(os.path.dirname(context_file))
+ try:
+ found_spec = self.file_from_module_name(modname, context_file)
+ if found_spec.type == spec.ModuleType.PY_ZIPMODULE:
+ module = self.zip_import_data(found_spec.location)
+ if module is not None:
+ return module
+
+ elif found_spec.type in (
+ spec.ModuleType.C_BUILTIN,
+ spec.ModuleType.C_EXTENSION,
+ ):
+ if (
+ found_spec.type == spec.ModuleType.C_EXTENSION
+ and not self._can_load_extension(modname)
+ ):
+ return self._build_stub_module(modname)
+ try:
+ module = modutils.load_module_from_name(modname)
+ except Exception as ex:
+ raise exceptions.AstroidImportError(
+ "Loading {modname} failed with:\n{error}",
+ modname=modname,
+ path=found_spec.location,
+ ) from ex
+ return self.ast_from_module(module, modname)
+
+ elif found_spec.type == spec.ModuleType.PY_COMPILED:
+ raise exceptions.AstroidImportError(
+ "Unable to load compiled module {modname}.",
+ modname=modname,
+ path=found_spec.location,
+ )
+
+ elif found_spec.type == spec.ModuleType.PY_NAMESPACE:
+ return self._build_namespace_module(
+ modname, found_spec.submodule_search_locations
+ )
+
+ if found_spec.location is None:
+ raise exceptions.AstroidImportError(
+ "Can't find a file for module {modname}.", modname=modname
+ )
+
+ return self.ast_from_file(found_spec.location, modname, fallback=False)
+ except exceptions.AstroidBuildingError as e:
+ for hook in self._failed_import_hooks:
+ try:
+ return hook(modname)
+ except exceptions.AstroidBuildingError:
+ pass
+ raise e
+ finally:
+ os.chdir(old_cwd)
+
+ def zip_import_data(self, filepath):
+ if zipimport is None:
+ return None
+
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid.builder import AstroidBuilder
+
+ builder = AstroidBuilder(self)
+ for ext in ZIP_IMPORT_EXTS:
+ try:
+ eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
+ except ValueError:
+ continue
+ try:
+ importer = zipimport.zipimporter(eggpath + ext)
+ zmodname = resource.replace(os.path.sep, ".")
+ if importer.is_package(resource):
+ zmodname = zmodname + ".__init__"
+ module = builder.string_build(
+ importer.get_source(resource), zmodname, filepath
+ )
+ return module
+ except Exception: # pylint: disable=broad-except
+ continue
+ return None
+
+ def file_from_module_name(self, modname, contextfile):
+ try:
+ value = self._mod_file_cache[(modname, contextfile)]
+ except KeyError:
+ try:
+ value = modutils.file_info_from_modpath(
+ modname.split("."), context_file=contextfile
+ )
+ except ImportError as ex:
+ value = exceptions.AstroidImportError(
+ "Failed to import module {modname} with error:\n{error}.",
+ modname=modname,
+ error=ex,
+ )
+ self._mod_file_cache[(modname, contextfile)] = value
+ if isinstance(value, exceptions.AstroidBuildingError):
+ raise value
+ return value
+
+ def ast_from_module(self, module, modname=None):
+ """given an imported module, return the astroid object"""
+ modname = modname or module.__name__
+ if modname in self.astroid_cache:
+ return self.astroid_cache[modname]
+ try:
+ # some builtin modules don't have __file__ attribute
+ filepath = module.__file__
+ if modutils.is_python_source(filepath):
+ return self.ast_from_file(filepath, modname)
+ except AttributeError:
+ pass
+
+ # pylint: disable=import-outside-toplevel; circular import
+ from astroid.builder import AstroidBuilder
+
+ return AstroidBuilder(self).module_build(module, modname)
+
+ def ast_from_class(self, klass, modname=None):
+ """get astroid for the given class"""
+ if modname is None:
+ try:
+ modname = klass.__module__
+ except AttributeError as exc:
+ raise exceptions.AstroidBuildingError(
+ "Unable to get module for class {class_name}.",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ modname=modname,
+ ) from exc
+ modastroid = self.ast_from_module_name(modname)
+ return modastroid.getattr(klass.__name__)[0] # XXX
+
+ def infer_ast_from_something(self, obj, context=None):
+ """infer astroid for the given class"""
+ if hasattr(obj, "__class__") and not isinstance(obj, type):
+ klass = obj.__class__
+ else:
+ klass = obj
+ try:
+ modname = klass.__module__
+ except AttributeError as exc:
+ raise exceptions.AstroidBuildingError(
+ "Unable to get module for {class_repr}.",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ ) from exc
+ except Exception as exc:
+ raise exceptions.AstroidImportError(
+ "Unexpected error while retrieving module for {class_repr}:\n"
+ "{error}",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ ) from exc
+ try:
+ name = klass.__name__
+ except AttributeError as exc:
+ raise exceptions.AstroidBuildingError(
+ "Unable to get name for {class_repr}:\n",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ ) from exc
+ except Exception as exc:
+ raise exceptions.AstroidImportError(
+ "Unexpected error while retrieving name for {class_repr}:\n" "{error}",
+ cls=klass,
+ class_repr=safe_repr(klass),
+ ) from exc
+ # take care, on living object __module__ is regularly wrong :(
+ modastroid = self.ast_from_module_name(modname)
+ if klass is obj:
+ for inferred in modastroid.igetattr(name, context):
+ yield inferred
+ else:
+ for inferred in modastroid.igetattr(name, context):
+ yield inferred.instantiate_class()
+
+ def register_failed_import_hook(self, hook):
+ """Registers a hook to resolve imports that cannot be found otherwise.
+
+ `hook` must be a function that accepts a single argument `modname` which
+ contains the name of the module or package that could not be imported.
+ If `hook` can resolve the import, must return a node of type `astroid.Module`,
+ otherwise, it must raise `AstroidBuildingError`.
+ """
+ self._failed_import_hooks.append(hook)
+
+ def cache_module(self, module):
+ """Cache a module if no module with the same name is known yet."""
+ self.astroid_cache.setdefault(module.name, module)
+
+ def bootstrap(self):
+ """Bootstrap the required AST modules needed for the manager to work
+
+ The bootstrap usually involves building the AST for the builtins
+ module, which is required by the rest of astroid to work correctly.
+ """
+ from astroid import raw_building # pylint: disable=import-outside-toplevel
+
+ raw_building._astroid_bootstrapping()
+
+ def clear_cache(self):
+ """Clear the underlying cache. Also bootstraps the builtins module."""
+ self.astroid_cache.clear()
+ self.bootstrap()
diff --git a/src/main/python/venv/Lib/site-packages/astroid/mixins.py b/src/main/python/venv/Lib/site-packages/astroid/mixins.py
new file mode 100644
index 0000000..497a840
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/mixins.py
@@ -0,0 +1,160 @@
+# Copyright (c) 2010-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014-2016, 2018 Claudiu Popa
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2018 Nick Drozd
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""This module contains some mixins for the different nodes.
+"""
+import itertools
+
+from astroid import decorators
+from astroid import exceptions
+
+
+class BlockRangeMixIn:
+ """override block range """
+
+ @decorators.cachedproperty
+ def blockstart_tolineno(self):
+ return self.lineno
+
+ def _elsed_block_range(self, lineno, orelse, last=None):
+ """handle block line numbers range for try/finally, for, if and while
+ statements
+ """
+ if lineno == self.fromlineno:
+ return lineno, lineno
+ if orelse:
+ if lineno >= orelse[0].fromlineno:
+ return lineno, orelse[-1].tolineno
+ return lineno, orelse[0].fromlineno - 1
+ return lineno, last or self.tolineno
+
+
+class FilterStmtsMixin:
+ """Mixin for statement filtering and assignment type"""
+
+ def _get_filtered_stmts(self, _, node, _stmts, mystmt):
+ """method used in _filter_stmts to get statements and trigger break"""
+ if self.statement() is mystmt:
+ # original node's statement is the assignment, only keep
+ # current node (gen exp, list comp)
+ return [node], True
+ return _stmts, False
+
+ def assign_type(self):
+ return self
+
+
+class AssignTypeMixin:
+ def assign_type(self):
+ return self
+
+ def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt):
+ """method used in filter_stmts"""
+ if self is mystmt:
+ return _stmts, True
+ if self.statement() is mystmt:
+ # original node's statement is the assignment, only keep
+ # current node (gen exp, list comp)
+ return [node], True
+ return _stmts, False
+
+
+class ParentAssignTypeMixin(AssignTypeMixin):
+ def assign_type(self):
+ return self.parent.assign_type()
+
+
+class ImportFromMixin(FilterStmtsMixin):
+ """MixIn for From and Import Nodes"""
+
+ def _infer_name(self, frame, name):
+ return name
+
+ def do_import_module(self, modname=None):
+ """return the ast for a module whose name is imported by
+ """
+ # handle special case where we are on a package node importing a module
+ # using the same name as the package, which may end in an infinite loop
+ # on relative imports
+ # XXX: no more needed ?
+ mymodule = self.root()
+ level = getattr(self, "level", None) # Import as no level
+ if modname is None:
+ modname = self.modname
+ # XXX we should investigate deeper if we really want to check
+ # importing itself: modname and mymodule.name be relative or absolute
+ if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
+ # FIXME: we used to raise InferenceError here, but why ?
+ return mymodule
+
+ return mymodule.import_module(
+ modname, level=level, relative_only=level and level >= 1
+ )
+
+ def real_name(self, asname):
+ """get name from 'as' name"""
+ for name, _asname in self.names:
+ if name == "*":
+ return asname
+ if not _asname:
+ name = name.split(".", 1)[0]
+ _asname = name
+ if asname == _asname:
+ return name
+ raise exceptions.AttributeInferenceError(
+ "Could not find original name for {attribute} in {target!r}",
+ target=self,
+ attribute=asname,
+ )
+
+
+class MultiLineBlockMixin:
+ """Mixin for nodes with multi-line blocks, e.g. For and FunctionDef.
+ Note that this does not apply to every node with a `body` field.
+ For instance, an If node has a multi-line body, but the body of an
+ IfExpr is not multi-line, and hence cannot contain Return nodes,
+ Assign nodes, etc.
+ """
+
+ @decorators.cachedproperty
+ def _multi_line_blocks(self):
+ return tuple(getattr(self, field) for field in self._multi_line_block_fields)
+
+ def _get_return_nodes_skip_functions(self):
+ for block in self._multi_line_blocks:
+ for child_node in block:
+ if child_node.is_function:
+ continue
+ yield from child_node._get_return_nodes_skip_functions()
+
+ def _get_yield_nodes_skip_lambdas(self):
+ for block in self._multi_line_blocks:
+ for child_node in block:
+ if child_node.is_lambda:
+ continue
+ yield from child_node._get_yield_nodes_skip_lambdas()
+
+ @decorators.cached
+ def _get_assign_nodes(self):
+ children_assign_nodes = (
+ child_node._get_assign_nodes()
+ for block in self._multi_line_blocks
+ for child_node in block
+ )
+ return list(itertools.chain.from_iterable(children_assign_nodes))
+
+
+class NoChildrenMixin:
+ """Mixin for nodes with no children, e.g. Pass."""
+
+ def get_children(self):
+ yield from ()
diff --git a/src/main/python/venv/Lib/site-packages/astroid/modutils.py b/src/main/python/venv/Lib/site-packages/astroid/modutils.py
new file mode 100644
index 0000000..0c009b1
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/modutils.py
@@ -0,0 +1,698 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Google, Inc.
+# Copyright (c) 2014 Denis Laxalde
+# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2015 Radosław Ganczarek
+# Copyright (c) 2016 Derek Gustafson
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2016 Ceridwen
+# Copyright (c) 2018 Mario Corchero
+# Copyright (c) 2018 Mario Corchero
+# Copyright (c) 2018 Anthony Sottile
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+"""Python modules manipulation utility functions.
+
+:type PY_SOURCE_EXTS: tuple(str)
+:var PY_SOURCE_EXTS: list of possible python source file extension
+
+:type STD_LIB_DIRS: set of str
+:var STD_LIB_DIRS: directories where standard modules are located
+
+:type BUILTIN_MODULES: dict
+:var BUILTIN_MODULES: dictionary with builtin module names has key
+"""
+import imp
+import os
+import platform
+import sys
+import itertools
+from distutils.sysconfig import get_python_lib # pylint: disable=import-error
+
+# pylint: disable=import-error, no-name-in-module
+from distutils.errors import DistutilsPlatformError
+
+# distutils is replaced by virtualenv with a module that does
+# weird path manipulations in order to get to the
+# real distutils module.
+
+from .interpreter._import import spec
+from .interpreter._import import util
+
+if sys.platform.startswith("win"):
+ PY_SOURCE_EXTS = ("py", "pyw")
+ PY_COMPILED_EXTS = ("dll", "pyd")
+else:
+ PY_SOURCE_EXTS = ("py",)
+ PY_COMPILED_EXTS = ("so",)
+
+
+try:
+ # The explicit sys.prefix is to work around a patch in virtualenv that
+ # replaces the 'real' sys.prefix (i.e. the location of the binary)
+ # with the prefix from which the virtualenv was created. This throws
+ # off the detection logic for standard library modules, thus the
+ # workaround.
+ STD_LIB_DIRS = {
+ get_python_lib(standard_lib=True, prefix=sys.prefix),
+ # Take care of installations where exec_prefix != prefix.
+ get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
+ get_python_lib(standard_lib=True),
+ }
+# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
+# non-valid path, see https://bugs.pypy.org/issue1164
+except DistutilsPlatformError:
+ STD_LIB_DIRS = set()
+
+if os.name == "nt":
+ STD_LIB_DIRS.add(os.path.join(sys.prefix, "dlls"))
+ try:
+ # real_prefix is defined when running inside virtual environments,
+ # created with the **virtualenv** library.
+ STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "dlls"))
+ except AttributeError:
+ # sys.base_exec_prefix is always defined, but in a virtual environment
+ # created with the stdlib **venv** module, it points to the original
+ # installation, if the virtual env is activated.
+ try:
+ STD_LIB_DIRS.add(os.path.join(sys.base_exec_prefix, "dlls"))
+ except AttributeError:
+ pass
+
+if platform.python_implementation() == "PyPy":
+ _root = os.path.join(sys.prefix, "lib_pypy")
+ STD_LIB_DIRS.add(_root)
+ try:
+ # real_prefix is defined when running inside virtualenv.
+ STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "lib_pypy"))
+ except AttributeError:
+ pass
+ del _root
+if os.name == "posix":
+ # Need the real prefix is we're under a virtualenv, otherwise
+ # the usual one will do.
+ try:
+ prefix = sys.real_prefix
+ except AttributeError:
+ prefix = sys.prefix
+
+ def _posix_path(path):
+ base_python = "python%d.%d" % sys.version_info[:2]
+ return os.path.join(prefix, path, base_python)
+
+ STD_LIB_DIRS.add(_posix_path("lib"))
+ if sys.maxsize > 2 ** 32:
+ # This tries to fix a problem with /usr/lib64 builds,
+ # where systems are running both 32-bit and 64-bit code
+ # on the same machine, which reflects into the places where
+ # standard library could be found. More details can be found
+ # here http://bugs.python.org/issue1294959.
+ # An easy reproducing case would be
+ # https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753
+ STD_LIB_DIRS.add(_posix_path("lib64"))
+
+EXT_LIB_DIRS = {get_python_lib(), get_python_lib(True)}
+IS_JYTHON = platform.python_implementation() == "Jython"
+BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True)
+
+
+class NoSourceFile(Exception):
+ """exception raised when we are not able to get a python
+ source file for a precompiled file
+ """
+
+
+def _normalize_path(path):
+ return os.path.normcase(os.path.abspath(path))
+
+
+def _canonicalize_path(path):
+ return os.path.realpath(os.path.expanduser(path))
+
+
+def _path_from_filename(filename, is_jython=IS_JYTHON):
+ if not is_jython:
+ return filename
+ head, has_pyclass, _ = filename.partition("$py.class")
+ if has_pyclass:
+ return head + ".py"
+ return filename
+
+
+def _handle_blacklist(blacklist, dirnames, filenames):
+ """remove files/directories in the black list
+
+ dirnames/filenames are usually from os.walk
+ """
+ for norecurs in blacklist:
+ if norecurs in dirnames:
+ dirnames.remove(norecurs)
+ elif norecurs in filenames:
+ filenames.remove(norecurs)
+
+
+_NORM_PATH_CACHE = {}
+
+
+def _cache_normalize_path(path):
+ """abspath with caching"""
+ # _module_file calls abspath on every path in sys.path every time it's
+ # called; on a larger codebase this easily adds up to half a second just
+ # assembling path components. This cache alleviates that.
+ try:
+ return _NORM_PATH_CACHE[path]
+ except KeyError:
+ if not path: # don't cache result for ''
+ return _normalize_path(path)
+ result = _NORM_PATH_CACHE[path] = _normalize_path(path)
+ return result
+
+
+def load_module_from_name(dotted_name, path=None, use_sys=True):
+ """Load a Python module from its name.
+
+ :type dotted_name: str
+ :param dotted_name: python name of a module or package
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be
+ used or not
+
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ return load_module_from_modpath(dotted_name.split("."), path, use_sys)
+
+
+def load_module_from_modpath(parts, path=None, use_sys=1):
+ """Load a python module from its split name.
+
+ :type parts: list(str) or tuple(str)
+ :param parts:
+ python name of a module or package split on '.'
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be used or not
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ if use_sys:
+ try:
+ return sys.modules[".".join(parts)]
+ except KeyError:
+ pass
+ modpath = []
+ prevmodule = None
+ for part in parts:
+ modpath.append(part)
+ curname = ".".join(modpath)
+ module = None
+ if len(modpath) != len(parts):
+ # even with use_sys=False, should try to get outer packages from sys.modules
+ module = sys.modules.get(curname)
+ elif use_sys:
+ # because it may have been indirectly loaded through a parent
+ module = sys.modules.get(curname)
+ if module is None:
+ mp_file, mp_filename, mp_desc = imp.find_module(part, path)
+ module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
+ # mp_file still needs to be closed.
+ if mp_file:
+ mp_file.close()
+ if prevmodule:
+ setattr(prevmodule, part, module)
+ _file = getattr(module, "__file__", "")
+ prevmodule = module
+ if not _file and util.is_namespace(curname):
+ continue
+ if not _file and len(modpath) != len(parts):
+ raise ImportError("no module in %s" % ".".join(parts[len(modpath) :]))
+ path = [os.path.dirname(_file)]
+ return module
+
+
+def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None):
+ """Load a Python module from it's path.
+
+ :type filepath: str
+ :param filepath: path to the python module or package
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type use_sys: bool
+ :param use_sys:
+ boolean indicating whether the sys.modules dictionary should be
+ used or not
+
+
+ :raise ImportError: if the module or package is not found
+
+ :rtype: module
+ :return: the loaded module
+ """
+ modpath = modpath_from_file(filepath, extrapath)
+ return load_module_from_modpath(modpath, path, use_sys)
+
+
+def check_modpath_has_init(path, mod_path):
+ """check there are some __init__.py all along the way"""
+ modpath = []
+ for part in mod_path:
+ modpath.append(part)
+ path = os.path.join(path, part)
+ if not _has_init(path):
+ old_namespace = util.is_namespace(".".join(modpath))
+ if not old_namespace:
+ return False
+ return True
+
+
+def _get_relative_base_path(filename, path_to_check):
+ """Extracts the relative mod path of the file to import from
+
+ Check if a file is within the passed in path and if so, returns the
+ relative mod path from the one passed in.
+
+ If the filename is no in path_to_check, returns None
+
+ Note this function will look for both abs and realpath of the file,
+ this allows to find the relative base path even if the file is a
+ symlink of a file in the passed in path
+
+ Examples:
+ _get_relative_base_path("/a/b/c/d.py", "/a/b") -> ["c","d"]
+ _get_relative_base_path("/a/b/c/d.py", "/dev") -> None
+ """
+ importable_path = None
+ path_to_check = os.path.normcase(path_to_check)
+ abs_filename = os.path.abspath(filename)
+ if os.path.normcase(abs_filename).startswith(path_to_check):
+ importable_path = abs_filename
+
+ real_filename = os.path.realpath(filename)
+ if os.path.normcase(real_filename).startswith(path_to_check):
+ importable_path = real_filename
+
+ if importable_path:
+ base_path = os.path.splitext(importable_path)[0]
+ relative_base_path = base_path[len(path_to_check) :]
+ return [pkg for pkg in relative_base_path.split(os.sep) if pkg]
+
+ return None
+
+
+def modpath_from_file_with_callback(filename, extrapath=None, is_package_cb=None):
+ filename = os.path.expanduser(_path_from_filename(filename))
+
+ if extrapath is not None:
+ for path_ in itertools.chain(map(_canonicalize_path, extrapath), extrapath):
+ path = os.path.abspath(path_)
+ if not path:
+ continue
+ submodpath = _get_relative_base_path(filename, path)
+ if not submodpath:
+ continue
+ if is_package_cb(path, submodpath[:-1]):
+ return extrapath[path_].split(".") + submodpath
+
+ for path in itertools.chain(map(_canonicalize_path, sys.path), sys.path):
+ path = _cache_normalize_path(path)
+ if not path:
+ continue
+ modpath = _get_relative_base_path(filename, path)
+ if not modpath:
+ continue
+ if is_package_cb(path, modpath[:-1]):
+ return modpath
+
+ raise ImportError(
+ "Unable to find module for %s in %s" % (filename, ", \n".join(sys.path))
+ )
+
+
+def modpath_from_file(filename, extrapath=None):
+ """given a file path return the corresponding split module's name
+ (i.e name of a module or package split on '.')
+
+ :type filename: str
+ :param filename: file's path for which we want the module's name
+
+ :type extrapath: dict
+ :param extrapath:
+ optional extra search path, with path as key and package name for the path
+ as value. This is usually useful to handle package split in multiple
+ directories using __path__ trick.
+
+
+ :raise ImportError:
+ if the corresponding module's name has not been found
+
+ :rtype: list(str)
+ :return: the corresponding split module's name
+ """
+ return modpath_from_file_with_callback(filename, extrapath, check_modpath_has_init)
+
+
+def file_from_modpath(modpath, path=None, context_file=None):
+ return file_info_from_modpath(modpath, path, context_file).location
+
+
+def file_info_from_modpath(modpath, path=None, context_file=None):
+ """given a mod path (i.e. split module / package name), return the
+ corresponding file, giving priority to source file over precompiled
+ file if it exists
+
+ :type modpath: list or tuple
+ :param modpath:
+ split module's name (i.e name of a module or package split
+ on '.')
+ (this means explicit relative imports that start with dots have
+ empty strings in this list!)
+
+ :type path: list or None
+ :param path:
+ optional list of path where the module or package should be
+ searched (use sys.path if nothing or None is given)
+
+ :type context_file: str or None
+ :param context_file:
+ context file to consider, necessary if the identifier has been
+ introduced using a relative import unresolvable in the actual
+ context (i.e. modutils)
+
+ :raise ImportError: if there is no such module in the directory
+
+ :rtype: (str or None, import type)
+ :return:
+ the path to the module's file or None if it's an integrated
+ builtin module such as 'sys'
+ """
+ if context_file is not None:
+ context = os.path.dirname(context_file)
+ else:
+ context = context_file
+ if modpath[0] == "xml":
+ # handle _xmlplus
+ try:
+ return _spec_from_modpath(["_xmlplus"] + modpath[1:], path, context)
+ except ImportError:
+ return _spec_from_modpath(modpath, path, context)
+ elif modpath == ["os", "path"]:
+ # FIXME: currently ignoring search_path...
+ return spec.ModuleSpec(
+ name="os.path", location=os.path.__file__, module_type=imp.PY_SOURCE
+ )
+ return _spec_from_modpath(modpath, path, context)
+
+
+def get_module_part(dotted_name, context_file=None):
+ """given a dotted name return the module part of the name :
+
+ >>> get_module_part('astroid.as_string.dump')
+ 'astroid.as_string'
+
+ :type dotted_name: str
+ :param dotted_name: full name of the identifier we are interested in
+
+ :type context_file: str or None
+ :param context_file:
+ context file to consider, necessary if the identifier has been
+ introduced using a relative import unresolvable in the actual
+ context (i.e. modutils)
+
+
+ :raise ImportError: if there is no such module in the directory
+
+ :rtype: str or None
+ :return:
+ the module part of the name or None if we have not been able at
+ all to import the given name
+
+ XXX: deprecated, since it doesn't handle package precedence over module
+ (see #10066)
+ """
+ # os.path trick
+ if dotted_name.startswith("os.path"):
+ return "os.path"
+ parts = dotted_name.split(".")
+ if context_file is not None:
+ # first check for builtin module which won't be considered latter
+ # in that case (path != None)
+ if parts[0] in BUILTIN_MODULES:
+ if len(parts) > 2:
+ raise ImportError(dotted_name)
+ return parts[0]
+ # don't use += or insert, we want a new list to be created !
+ path = None
+ starti = 0
+ if parts[0] == "":
+ assert (
+ context_file is not None
+ ), "explicit relative import, but no context_file?"
+ path = [] # prevent resolving the import non-relatively
+ starti = 1
+ while parts[starti] == "": # for all further dots: change context
+ starti += 1
+ context_file = os.path.dirname(context_file)
+ for i in range(starti, len(parts)):
+ try:
+ file_from_modpath(
+ parts[starti : i + 1], path=path, context_file=context_file
+ )
+ except ImportError:
+ if i < max(1, len(parts) - 2):
+ raise
+ return ".".join(parts[:i])
+ return dotted_name
+
+
+def get_module_files(src_directory, blacklist, list_all=False):
+ """given a package directory return a list of all available python
+ module's files in the package and its subpackages
+
+ :type src_directory: str
+ :param src_directory:
+ path of the directory corresponding to the package
+
+ :type blacklist: list or tuple
+ :param blacklist: iterable
+ list of files or directories to ignore.
+
+ :type list_all: bool
+ :param list_all:
+ get files from all paths, including ones without __init__.py
+
+ :rtype: list
+ :return:
+ the list of all available python module's files in the package and
+ its subpackages
+ """
+ files = []
+ for directory, dirnames, filenames in os.walk(src_directory):
+ if directory in blacklist:
+ continue
+ _handle_blacklist(blacklist, dirnames, filenames)
+ # check for __init__.py
+ if not list_all and "__init__.py" not in filenames:
+ dirnames[:] = ()
+ continue
+ for filename in filenames:
+ if _is_python_file(filename):
+ src = os.path.join(directory, filename)
+ files.append(src)
+ return files
+
+
+def get_source_file(filename, include_no_ext=False):
+ """given a python module's file name return the matching source file
+ name (the filename will be returned identically if it's already an
+ absolute path to a python source file...)
+
+ :type filename: str
+ :param filename: python module's file name
+
+
+ :raise NoSourceFile: if no source file exists on the file system
+
+ :rtype: str
+ :return: the absolute path of the source file if it exists
+ """
+ filename = os.path.abspath(_path_from_filename(filename))
+ base, orig_ext = os.path.splitext(filename)
+ for ext in PY_SOURCE_EXTS:
+ source_path = "%s.%s" % (base, ext)
+ if os.path.exists(source_path):
+ return source_path
+ if include_no_ext and not orig_ext and os.path.exists(base):
+ return base
+ raise NoSourceFile(filename)
+
+
+def is_python_source(filename):
+ """
+ rtype: bool
+ return: True if the filename is a python source file
+ """
+ return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
+
+
+def is_standard_module(modname, std_path=None):
+ """try to guess if a module is a standard python module (by default,
+ see `std_path` parameter's description)
+
+ :type modname: str
+ :param modname: name of the module we are interested in
+
+ :type std_path: list(str) or tuple(str)
+ :param std_path: list of path considered has standard
+
+
+ :rtype: bool
+ :return:
+ true if the module:
+ - is located on the path listed in one of the directory in `std_path`
+ - is a built-in module
+ """
+ modname = modname.split(".")[0]
+ try:
+ filename = file_from_modpath([modname])
+ except ImportError:
+ # import failed, i'm probably not so wrong by supposing it's
+ # not standard...
+ return False
+ # modules which are not living in a file are considered standard
+ # (sys and __builtin__ for instance)
+ if filename is None:
+ # we assume there are no namespaces in stdlib
+ return not util.is_namespace(modname)
+ filename = _normalize_path(filename)
+ for path in EXT_LIB_DIRS:
+ if filename.startswith(_cache_normalize_path(path)):
+ return False
+ if std_path is None:
+ std_path = STD_LIB_DIRS
+ for path in std_path:
+ if filename.startswith(_cache_normalize_path(path)):
+ return True
+ return False
+
+
+def is_relative(modname, from_file):
+ """return true if the given module name is relative to the given
+ file name
+
+ :type modname: str
+ :param modname: name of the module we are interested in
+
+ :type from_file: str
+ :param from_file:
+ path of the module from which modname has been imported
+
+ :rtype: bool
+ :return:
+ true if the module has been imported relatively to `from_file`
+ """
+ if not os.path.isdir(from_file):
+ from_file = os.path.dirname(from_file)
+ if from_file in sys.path:
+ return False
+ try:
+ stream, _, _ = imp.find_module(modname.split(".")[0], [from_file])
+
+ # Close the stream to avoid ResourceWarnings.
+ if stream:
+ stream.close()
+ return True
+ except ImportError:
+ return False
+
+
+# internal only functions #####################################################
+
+
+def _spec_from_modpath(modpath, path=None, context=None):
+ """given a mod path (i.e. split module / package name), return the
+ corresponding spec
+
+ this function is used internally, see `file_from_modpath`'s
+ documentation for more information
+ """
+ assert modpath
+ location = None
+ if context is not None:
+ try:
+ found_spec = spec.find_spec(modpath, [context])
+ location = found_spec.location
+ except ImportError:
+ found_spec = spec.find_spec(modpath, path)
+ location = found_spec.location
+ else:
+ found_spec = spec.find_spec(modpath, path)
+ if found_spec.type == spec.ModuleType.PY_COMPILED:
+ try:
+ location = get_source_file(found_spec.location)
+ return found_spec._replace(
+ location=location, type=spec.ModuleType.PY_SOURCE
+ )
+ except NoSourceFile:
+ return found_spec._replace(location=location)
+ elif found_spec.type == spec.ModuleType.C_BUILTIN:
+ # integrated builtin module
+ return found_spec._replace(location=None)
+ elif found_spec.type == spec.ModuleType.PKG_DIRECTORY:
+ location = _has_init(found_spec.location)
+ return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE)
+ return found_spec
+
+
+def _is_python_file(filename):
+ """return true if the given filename should be considered as a python file
+
+ .pyc and .pyo are ignored
+ """
+ return filename.endswith((".py", ".so", ".pyd", ".pyw"))
+
+
+def _has_init(directory):
+ """if the given directory has a valid __init__ file, return its path,
+ else return None
+ """
+ mod_or_pack = os.path.join(directory, "__init__")
+ for ext in PY_SOURCE_EXTS + ("pyc", "pyo"):
+ if os.path.exists(mod_or_pack + "." + ext):
+ return mod_or_pack + "." + ext
+ return None
+
+
+def is_namespace(specobj):
+ return specobj.type == spec.ModuleType.PY_NAMESPACE
+
+
+def is_directory(specobj):
+ return specobj.type == spec.ModuleType.PKG_DIRECTORY
diff --git a/src/main/python/venv/Lib/site-packages/astroid/node_classes.py b/src/main/python/venv/Lib/site-packages/astroid/node_classes.py
new file mode 100644
index 0000000..994c96b
--- /dev/null
+++ b/src/main/python/venv/Lib/site-packages/astroid/node_classes.py
@@ -0,0 +1,4775 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE)
+# Copyright (c) 2010 Daniel Harding
+# Copyright (c) 2012 FELD Boris
+# Copyright (c) 2013-2014 Google, Inc.
+# Copyright (c) 2014-2018 Claudiu Popa
+# Copyright (c) 2014 Eevee (Alex Munroe)
+# Copyright (c) 2015-2016 Ceridwen
+# Copyright (c) 2015 Florian Bruhin
+# Copyright (c) 2016-2017 Derek Gustafson
+# Copyright (c) 2016 Jared Garst
+# Copyright (c) 2016 Jakub Wilk
+# Copyright (c) 2016 Dave Baum
+# Copyright (c) 2017-2018 Ashley Whetter
+# Copyright (c) 2017 Łukasz Rogalski
+# Copyright (c) 2017 rr-
+# Copyright (c) 2018 Bryce Guinta
+# Copyright (c) 2018 brendanator
+# Copyright (c) 2018 Nick Drozd
+# Copyright (c) 2018 HoverHell
+
+# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
+# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
+
+# pylint: disable=too-many-lines; https://github.com/PyCQA/astroid/issues/465
+
+"""Module for some node classes. More nodes in scoped_nodes.py
+"""
+
+import abc
+import builtins as builtins_mod
+import itertools
+import pprint
+import sys
+from functools import lru_cache, singledispatch as _singledispatch
+
+from astroid import as_string
+from astroid import bases
+from astroid import context as contextmod
+from astroid import decorators
+from astroid import exceptions
+from astroid import manager
+from astroid import mixins
+from astroid import util
+
+
+BUILTINS = builtins_mod.__name__
+MANAGER = manager.AstroidManager()
+PY38 = sys.version_info[:2] >= (3, 8)
+
+
+def _is_const(value):
+ return isinstance(value, tuple(CONST_CLS))
+
+
+@decorators.raise_if_nothing_inferred
+def unpack_infer(stmt, context=None):
+ """recursively generate nodes inferred by the given statement.
+ If the inferred value is a list or a tuple, recurse on the elements
+ """
+ if isinstance(stmt, (List, Tuple)):
+ for elt in stmt.elts:
+ if elt is util.Uninferable:
+ yield elt
+ continue
+ yield from unpack_infer(elt, context)
+ return dict(node=stmt, context=context)
+ # if inferred is a final node, return it and stop
+ inferred = next(stmt.infer(context))
+ if inferred is stmt:
+ yield inferred
+ return dict(node=stmt, context=context)
+ # else, infer recursively, except Uninferable object that should be returned as is
+ for inferred in stmt.infer(context):
+ if inferred is util.Uninferable:
+ yield inferred
+ else:
+ yield from unpack_infer(inferred, context)
+
+ return dict(node=stmt, context=context)
+
+
+def are_exclusive(
+ stmt1, stmt2, exceptions=None
+): # pylint: disable=redefined-outer-name
+ """return true if the two given statements are mutually exclusive
+
+ `exceptions` may be a list of exception names. If specified, discard If
+ branches and check one of the statement is in an exception handler catching
+ one of the given exceptions.
+
+ algorithm :
+ 1) index stmt1's parents
+ 2) climb among stmt2's parents until we find a common parent
+ 3) if the common parent is a If or TryExcept statement, look if nodes are
+ in exclusive branches
+ """
+ # index stmt1's parents
+ stmt1_parents = {}
+ children = {}
+ node = stmt1.parent
+ previous = stmt1
+ while node:
+ stmt1_parents[node] = 1
+ children[node] = previous
+ previous = node
+ node = node.parent
+ # climb among stmt2's parents until we find a common parent
+ node = stmt2.parent
+ previous = stmt2
+ while node:
+ if node in stmt1_parents:
+ # if the common parent is a If or TryExcept statement, look if
+ # nodes are in exclusive branches
+ if isinstance(node, If) and exceptions is None:
+ if (
+ node.locate_child(previous)[1]
+ is not node.locate_child(children[node])[1]
+ ):
+ return True
+ elif isinstance(node, TryExcept):
+ c2attr, c2node = node.locate_child(previous)
+ c1attr, c1node = node.locate_child(children[node])
+ if c1node is not c2node:
+ first_in_body_caught_by_handlers = (
+ c2attr == "handlers"
+ and c1attr == "body"
+ and previous.catch(exceptions)
+ )
+ second_in_body_caught_by_handlers = (
+ c2attr == "body"
+ and c1attr == "handlers"
+ and children[node].catch(exceptions)
+ )
+ first_in_else_other_in_handlers = (
+ c2attr == "handlers" and c1attr == "orelse"
+ )
+ second_in_else_other_in_handlers = (
+ c2attr == "orelse" and c1attr == "handlers"
+ )
+ if any(
+ (
+ first_in_body_caught_by_handlers,
+ second_in_body_caught_by_handlers,
+ first_in_else_other_in_handlers,
+ second_in_else_other_in_handlers,
+ )
+ ):
+ return True
+ elif c2attr == "handlers" and c1attr == "handlers":
+ return previous is not children[node]
+ return False
+ previous = node
+ node = node.parent
+ return False
+
+
+# getitem() helpers.
+
+_SLICE_SENTINEL = object()
+
+
+def _slice_value(index, context=None):
+ """Get the value of the given slice index."""
+
+ if isinstance(index, Const):
+ if isinstance(index.value, (int, type(None))):
+ return index.value
+ elif index is None:
+ return None
+ else:
+ # Try to infer what the index actually is.
+ # Since we can't return all the possible values,
+ # we'll stop at the first possible value.
+ try:
+ inferred = next(index.infer(context=context))
+ except exceptions.InferenceError:
+ pass
+ else:
+ if isinstance(inferred, Const):
+ if isinstance(inferred.value, (int, type(None))):
+ return inferred.value
+
+ # Use a sentinel, because None can be a valid
+ # value that this function can return,
+ # as it is the case for unspecified bounds.
+ return _SLICE_SENTINEL
+
+
+def _infer_slice(node, context=None):
+ lower = _slice_value(node.lower, context)
+ upper = _slice_value(node.upper, context)
+ step = _slice_value(node.step, context)
+ if all(elem is not _SLICE_SENTINEL for elem in (lower, upper, step)):
+ return slice(lower, upper, step)
+
+ raise exceptions.AstroidTypeError(
+ message="Could not infer slice used in subscript",
+ node=node,
+ index=node.parent,
+ context=context,
+ )
+
+
+def _container_getitem(instance, elts, index, context=None):
+ """Get a slice or an item, using the given *index*, for the given sequence."""
+ try:
+ if isinstance(index, Slice):
+ index_slice = _infer_slice(index, context=context)
+ new_cls = instance.__class__()
+ new_cls.elts = elts[index_slice]
+ new_cls.parent = instance.parent
+ return new_cls
+ if isinstance(index, Const):
+ return elts[index.value]
+ except IndexError as exc:
+ raise exceptions.AstroidIndexError(
+ message="Index {index!s} out of range",
+ node=instance,
+ index=index,
+ context=context,
+ ) from exc
+ except TypeError as exc:
+ raise exceptions.AstroidTypeError(
+ message="Type error {error!r}", node=instance, index=index, context=context
+ ) from exc
+
+ raise exceptions.AstroidTypeError("Could not use %s as subscript index" % index)
+
+
+OP_PRECEDENCE = {
+ op: precedence
+ for precedence, ops in enumerate(
+ [
+ ["Lambda"], # lambda x: x + 1
+ ["IfExp"], # 1 if True else 2
+ ["or"],
+ ["and"],
+ ["not"],
+ ["Compare"], # in, not in, is, is not, <, <=, >, >=, !=, ==
+ ["|"],
+ ["^"],
+ ["&"],
+ ["<<", ">>"],
+ ["+", "-"],
+ ["*", "@", "/", "//", "%"],
+ ["UnaryOp"], # +, -, ~
+ ["**"],
+ ["Await"],
+ ]
+ )
+ for op in ops
+}
+
+
+class NodeNG:
+ """ A node of the new Abstract Syntax Tree (AST).
+
+ This is the base class for all Astroid node classes.
+ """
+
+ is_statement = False
+ """Whether this node indicates a statement.
+
+ :type: bool
+ """
+ optional_assign = False # True for For (and for Comprehension if py <3.0)
+ """Whether this node optionally assigns a variable.
+
+ This is for loop assignments because loop won't necessarily perform an
+ assignment if the loop has no iterations.
+ This is also the case from comprehensions in Python 2.
+
+ :type: bool
+ """
+ is_function = False # True for FunctionDef nodes
+ """Whether this node indicates a function.
+
+ :type: bool
+ """
+ is_lambda = False
+ # Attributes below are set by the builder module or by raw factories
+ lineno = None
+ """The line that this node appears on in the source code.
+
+ :type: int or None
+ """
+ col_offset = None
+ """The column that this node appears on in the source code.
+
+ :type: int or None
+ """
+ parent = None
+ """The parent node in the syntax tree.
+
+ :type: NodeNG or None
+ """
+ _astroid_fields = ()
+ """Node attributes that contain child nodes.
+
+ This is redefined in most concrete classes.
+
+ :type: tuple(str)
+ """
+ _other_fields = ()
+ """Node attributes that do not contain child nodes.
+
+ :type: tuple(str)
+ """
+ _other_other_fields = ()
+ """Attributes that contain AST-dependent fields.
+
+ :type: tuple(str)
+ """
+ # instance specific inference function infer(node, context)
+ _explicit_inference = None
+
+ def __init__(self, lineno=None, col_offset=None, parent=None):
+ """
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.lineno = lineno
+ self.col_offset = col_offset
+ self.parent = parent
+
+ def infer(self, context=None, **kwargs):
+ """Get a generator of the inferred values.
+
+ This is the main entry point to the inference system.
+
+ .. seealso:: :ref:`inference`
+
+ If the instance has some explicit inference function set, it will be
+ called instead of the default interface.
+
+ :returns: The inferred values.
+ :rtype: iterable
+ """
+ if context is not None:
+ context = context.extra_context.get(self, context)
+ if self._explicit_inference is not None:
+ # explicit_inference is not bound, give it self explicitly
+ try:
+ # pylint: disable=not-callable
+ return self._explicit_inference(self, context, **kwargs)
+ except exceptions.UseInferenceDefault:
+ pass
+
+ if not context:
+ return self._infer(context, **kwargs)
+
+ key = (self, context.lookupname, context.callcontext, context.boundnode)
+ if key in context.inferred:
+ return iter(context.inferred[key])
+
+ gen = context.cache_generator(key, self._infer(context, **kwargs))
+ return util.limit_inference(gen, MANAGER.max_inferable_values)
+
+ def _repr_name(self):
+ """Get a name for nice representation.
+
+ This is either :attr:`name`, :attr:`attrname`, or the empty string.
+
+ :returns: The nice name.
+ :rtype: str
+ """
+ names = {"name", "attrname"}
+ if all(name not in self._astroid_fields for name in names):
+ return getattr(self, "name", getattr(self, "attrname", ""))
+ return ""
+
+ def __str__(self):
+ rname = self._repr_name()
+ cname = type(self).__name__
+ if rname:
+ string = "%(cname)s.%(rname)s(%(fields)s)"
+ alignment = len(cname) + len(rname) + 2
+ else:
+ string = "%(cname)s(%(fields)s)"
+ alignment = len(cname) + 1
+ result = []
+ for field in self._other_fields + self._astroid_fields:
+ value = getattr(self, field)
+ width = 80 - len(field) - alignment
+ lines = pprint.pformat(value, indent=2, width=width).splitlines(True)
+
+ inner = [lines[0]]
+ for line in lines[1:]:
+ inner.append(" " * alignment + line)
+ result.append("%s=%s" % (field, "".join(inner)))
+
+ return string % {
+ "cname": cname,
+ "rname": rname,
+ "fields": (",\n" + " " * alignment).join(result),
+ }
+
+ def __repr__(self):
+ rname = self._repr_name()
+ if rname:
+ string = "<%(cname)s.%(rname)s l.%(lineno)s at 0x%(id)x>"
+ else:
+ string = "<%(cname)s l.%(lineno)s at 0x%(id)x>"
+ return string % {
+ "cname": type(self).__name__,
+ "rname": rname,
+ "lineno": self.fromlineno,
+ "id": id(self),
+ }
+
+ def accept(self, visitor):
+ """Visit this node using the given visitor."""
+ func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
+ return func(self)
+
+ def get_children(self):
+ """Get the child nodes below this node.
+
+ :returns: The children.
+ :rtype: iterable(NodeNG)
+ """
+ for field in self._astroid_fields:
+ attr = getattr(self, field)
+ if attr is None:
+ continue
+ if isinstance(attr, (list, tuple)):
+ yield from attr
+ else:
+ yield attr
+
+ def last_child(self):
+ """An optimized version of list(get_children())[-1]
+
+ :returns: The last child, or None if no children exist.
+ :rtype: NodeNG or None
+ """
+ for field in self._astroid_fields[::-1]:
+ attr = getattr(self, field)
+ if not attr: # None or empty listy / tuple
+ continue
+ if isinstance(attr, (list, tuple)):
+ return attr[-1]
+
+ return attr
+ return None
+
+ def parent_of(self, node):
+ """Check if this node is the parent of the given node.
+
+ :param node: The node to check if it is the child.
+ :type node: NodeNG
+
+ :returns: True if this node is the parent of the given node,
+ False otherwise.
+ :rtype: bool
+ """
+ parent = node.parent
+ while parent is not None:
+ if self is parent:
+ return True
+ parent = parent.parent
+ return False
+
+ def statement(self):
+ """The first parent node, including self, marked as statement node.
+
+ :returns: The first parent statement.
+ :rtype: NodeNG
+ """
+ if self.is_statement:
+ return self
+ return self.parent.statement()
+
+ def frame(self):
+ """The first parent frame node.
+
+ A frame node is a :class:`Module`, :class:`FunctionDef`,
+ or :class:`ClassDef`.
+
+ :returns: The first parent frame node.
+ :rtype: Module or FunctionDef or ClassDef
+ """
+ return self.parent.frame()
+
+ def scope(self):
+ """The first parent node defining a new scope.
+
+ :returns: The first parent scope node.
+ :rtype: Module or FunctionDef or ClassDef or Lambda or GenExpr
+ """
+ if self.parent:
+ return self.parent.scope()
+ return None
+
+ def root(self):
+ """Return the root node of the syntax tree.
+
+ :returns: The root node.
+ :rtype: Module
+ """
+ if self.parent:
+ return self.parent.root()
+ return self
+
+ def child_sequence(self, child):
+ """Search for the sequence that contains this child.
+
+ :param child: The child node to search sequences for.
+ :type child: NodeNG
+
+ :returns: The sequence containing the given child node.
+ :rtype: iterable(NodeNG)
+
+ :raises AstroidError: If no sequence could be found that contains
+ the given child.
+ """
+ for field in self._astroid_fields:
+ node_or_sequence = getattr(self, field)
+ if node_or_sequence is child:
+ return [node_or_sequence]
+ # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
+ if (
+ isinstance(node_or_sequence, (tuple, list))
+ and child in node_or_sequence
+ ):
+ return node_or_sequence
+
+ msg = "Could not find %s in %s's children"
+ raise exceptions.AstroidError(msg % (repr(child), repr(self)))
+
+ def locate_child(self, child):
+ """Find the field of this node that contains the given child.
+
+ :param child: The child node to search fields for.
+ :type child: NodeNG
+
+ :returns: A tuple of the name of the field that contains the child,
+ and the sequence or node that contains the child node.
+ :rtype: tuple(str, iterable(NodeNG) or NodeNG)
+
+ :raises AstroidError: If no field could be found that contains
+ the given child.
+ """
+ for field in self._astroid_fields:
+ node_or_sequence = getattr(self, field)
+ # /!\ compiler.ast Nodes have an __iter__ walking over child nodes
+ if child is node_or_sequence:
+ return field, child
+ if (
+ isinstance(node_or_sequence, (tuple, list))
+ and child in node_or_sequence
+ ):
+ return field, node_or_sequence
+ msg = "Could not find %s in %s's children"
+ raise exceptions.AstroidError(msg % (repr(child), repr(self)))
+
+ # FIXME : should we merge child_sequence and locate_child ? locate_child
+ # is only used in are_exclusive, child_sequence one time in pylint.
+
+ def next_sibling(self):
+ """The next sibling statement node.
+
+ :returns: The next sibling statement node.
+ :rtype: NodeNG or None
+ """
+ return self.parent.next_sibling()
+
+ def previous_sibling(self):
+ """The previous sibling statement.
+
+ :returns: The previous sibling statement node.
+ :rtype: NodeNG or None
+ """
+ return self.parent.previous_sibling()
+
+ # these are lazy because they're relatively expensive to compute for every
+ # single node, and they rarely get looked at
+
+ @decorators.cachedproperty
+ def fromlineno(self):
+ """The first line that this node appears on in the source code.
+
+ :type: int or None
+ """
+ if self.lineno is None:
+ return self._fixed_source_line()
+
+ return self.lineno
+
+ @decorators.cachedproperty
+ def tolineno(self):
+ """The last line that this node appears on in the source code.
+
+ :type: int or None
+ """
+ if not self._astroid_fields:
+ # can't have children
+ lastchild = None
+ else:
+ lastchild = self.last_child()
+ if lastchild is None:
+ return self.fromlineno
+
+ return lastchild.tolineno
+
+ def _fixed_source_line(self):
+ """Attempt to find the line that this node appears on.
+
+ We need this method since not all nodes have :attr:`lineno` set.
+
+ :returns: The line number of this node,
+ or None if this could not be determined.
+ :rtype: int or None
+ """
+ line = self.lineno
+ _node = self
+ try:
+ while line is None:
+ _node = next(_node.get_children())
+ line = _node.lineno
+ except StopIteration:
+ _node = self.parent
+ while _node and line is None:
+ line = _node.lineno
+ _node = _node.parent
+ return line
+
+ def block_range(self, lineno):
+ """Get a range from the given line number to where this node ends.
+
+ :param lineno: The line number to start the range at.
+ :type lineno: int
+
+ :returns: The range of line numbers that this node belongs to,
+ starting at the given line number.
+ :rtype: tuple(int, int or None)
+ """
+ return lineno, self.tolineno
+
+ def set_local(self, name, stmt):
+ """Define that the given name is declared in the given statement node.
+
+ This definition is stored on the parent scope node.
+
+ .. seealso:: :meth:`scope`
+
+ :param name: The name that is being defined.
+ :type name: str
+
+ :param stmt: The statement that defines the given name.
+ :type stmt: NodeNG
+ """
+ self.parent.set_local(name, stmt)
+
+ def nodes_of_class(self, klass, skip_klass=None):
+ """Get the nodes (including this one or below) of the given types.
+
+ :param klass: The types of node to search for.
+ :type klass: builtins.type or tuple(builtins.type)
+
+ :param skip_klass: The types of node to ignore. This is useful to ignore
+ subclasses of :attr:`klass`.
+ :type skip_klass: builtins.type or tuple(builtins.type)
+
+ :returns: The node of the given types.
+ :rtype: iterable(NodeNG)
+ """
+ if isinstance(self, klass):
+ yield self
+
+ if skip_klass is None:
+ for child_node in self.get_children():
+ yield from child_node.nodes_of_class(klass, skip_klass)
+
+ return
+
+ for child_node in self.get_children():
+ if isinstance(child_node, skip_klass):
+ continue
+ yield from child_node.nodes_of_class(klass, skip_klass)
+
+ @decorators.cached
+ def _get_assign_nodes(self):
+ return []
+
+ def _get_name_nodes(self):
+ for child_node in self.get_children():
+ yield from child_node._get_name_nodes()
+
+ def _get_return_nodes_skip_functions(self):
+ yield from ()
+
+ def _get_yield_nodes_skip_lambdas(self):
+ yield from ()
+
+ def _infer_name(self, frame, name):
+ # overridden for ImportFrom, Import, Global, TryExcept and Arguments
+ pass
+
+ def _infer(self, context=None):
+ """we don't know how to resolve a statement by default"""
+ # this method is overridden by most concrete classes
+ raise exceptions.InferenceError(
+ "No inference function for {node!r}.", node=self, context=context
+ )
+
+ def inferred(self):
+ """Get a list of the inferred values.
+
+ .. seealso:: :ref:`inference`
+
+ :returns: The inferred values.
+ :rtype: list
+ """
+ return list(self.infer())
+
+ def instantiate_class(self):
+ """Instantiate an instance of the defined class.
+
+ .. note::
+
+ On anything other than a :class:`ClassDef` this will return self.
+
+ :returns: An instance of the defined class.
+ :rtype: object
+ """
+ return self
+
+ def has_base(self, node):
+ """Check if this node inherits from the given type.
+
+ :param node: The node defining the base to look for.
+ Usually this is a :class:`Name` node.
+ :type node: NodeNG
+ """
+ return False
+
+ def callable(self):
+ """Whether this node defines something that is callable.
+
+ :returns: True if this defines something that is callable,
+ False otherwise.
+ :rtype: bool
+ """
+ return False
+
+ def eq(self, value):
+ return False
+
+ def as_string(self):
+ """Get the source code that this node represents.
+
+ :returns: The source code.
+ :rtype: str
+ """
+ return as_string.to_code(self)
+
+ def repr_tree(
+ self,
+ ids=False,
+ include_linenos=False,
+ ast_state=False,
+ indent=" ",
+ max_depth=0,
+ max_width=80,
+ ):
+ """Get a string representation of the AST from this node.
+
+ :param ids: If true, includes the ids with the node type names.
+ :type ids: bool
+
+ :param include_linenos: If true, includes the line numbers and
+ column offsets.
+ :type include_linenos: bool
+
+ :param ast_state: If true, includes information derived from
+ the whole AST like local and global variables.
+ :type ast_state: bool
+
+ :param indent: A string to use to indent the output string.
+ :type indent: str
+
+ :param max_depth: If set to a positive integer, won't return
+ nodes deeper than max_depth in the string.
+ :type max_depth: int
+
+ :param max_width: Attempt to format the output string to stay
+ within this number of characters, but can exceed it under some
+ circumstances. Only positive integer values are valid, the default is 80.
+ :type max_width: int
+
+ :returns: The string representation of the AST.
+ :rtype: str
+ """
+ # pylint: disable=too-many-statements
+ @_singledispatch
+ def _repr_tree(node, result, done, cur_indent="", depth=1):
+ """Outputs a representation of a non-tuple/list, non-node that's
+ contained within an AST, including strings.
+ """
+ lines = pprint.pformat(
+ node, width=max(max_width - len(cur_indent), 1)
+ ).splitlines(True)
+ result.append(lines[0])
+ result.extend([cur_indent + line for line in lines[1:]])
+ return len(lines) != 1
+
+ # pylint: disable=unused-variable; doesn't understand singledispatch
+ @_repr_tree.register(tuple)
+ @_repr_tree.register(list)
+ def _repr_seq(node, result, done, cur_indent="", depth=1):
+ """Outputs a representation of a sequence that's contained within an AST."""
+ cur_indent += indent
+ result.append("[")
+ if not node:
+ broken = False
+ elif len(node) == 1:
+ broken = _repr_tree(node[0], result, done, cur_indent, depth)
+ elif len(node) == 2:
+ broken = _repr_tree(node[0], result, done, cur_indent, depth)
+ if not broken:
+ result.append(", ")
+ else:
+ result.append(",\n")
+ result.append(cur_indent)
+ broken = _repr_tree(node[1], result, done, cur_indent, depth) or broken
+ else:
+ result.append("\n")
+ result.append(cur_indent)
+ for child in node[:-1]:
+ _repr_tree(child, result, done, cur_indent, depth)
+ result.append(",\n")
+ result.append(cur_indent)
+ _repr_tree(node[-1], result, done, cur_indent, depth)
+ broken = True
+ result.append("]")
+ return broken
+
+ # pylint: disable=unused-variable; doesn't understand singledispatch
+ @_repr_tree.register(NodeNG)
+ def _repr_node(node, result, done, cur_indent="", depth=1):
+ """Outputs a strings representation of an astroid node."""
+ if node in done:
+ result.append(
+ indent
+ + " max_depth:
+ result.append("...")
+ return False
+ depth += 1
+ cur_indent += indent
+ if ids:
+ result.append("%s<0x%x>(\n" % (type(node).__name__, id(node)))
+ else:
+ result.append("%s(" % type(node).__name__)
+ fields = []
+ if include_linenos:
+ fields.extend(("lineno", "col_offset"))
+ fields.extend(node._other_fields)
+ fields.extend(node._astroid_fields)
+ if ast_state:
+ fields.extend(node._other_other_fields)
+ if not fields:
+ broken = False
+ elif len(fields) == 1:
+ result.append("%s=" % fields[0])
+ broken = _repr_tree(
+ getattr(node, fields[0]), result, done, cur_indent, depth
+ )
+ else:
+ result.append("\n")
+ result.append(cur_indent)
+ for field in fields[:-1]:
+ result.append("%s=" % field)
+ _repr_tree(getattr(node, field), result, done, cur_indent, depth)
+ result.append(",\n")
+ result.append(cur_indent)
+ result.append("%s=" % fields[-1])
+ _repr_tree(getattr(node, fields[-1]), result, done, cur_indent, depth)
+ broken = True
+ result.append(")")
+ return broken
+
+ result = []
+ _repr_tree(self, result, set())
+ return "".join(result)
+
+ def bool_value(self):
+ """Determine the boolean value of this node.
+
+ The boolean value of a node can have three
+ possible values:
+
+ * False: For instance, empty data structures,
+ False, empty strings, instances which return
+ explicitly False from the __nonzero__ / __bool__
+ method.
+ * True: Most of constructs are True by default:
+ classes, functions, modules etc
+ * Uninferable: The inference engine is uncertain of the
+ node's value.
+
+ :returns: The boolean value of this node.
+ :rtype: bool or Uninferable
+ """
+ return util.Uninferable
+
+ def op_precedence(self):
+ # Look up by class name or default to highest precedence
+ return OP_PRECEDENCE.get(self.__class__.__name__, len(OP_PRECEDENCE))
+
+ def op_left_associative(self):
+ # Everything is left associative except `**` and IfExp
+ return True
+
+
+class Statement(NodeNG):
+ """Statement node adding a few attributes"""
+
+ is_statement = True
+ """Whether this node indicates a statement.
+
+ :type: bool
+ """
+
+ def next_sibling(self):
+ """The next sibling statement node.
+
+ :returns: The next sibling statement node.
+ :rtype: NodeNG or None
+ """
+ stmts = self.parent.child_sequence(self)
+ index = stmts.index(self)
+ try:
+ return stmts[index + 1]
+ except IndexError:
+ pass
+
+ def previous_sibling(self):
+ """The previous sibling statement.
+
+ :returns: The previous sibling statement node.
+ :rtype: NodeNG or None
+ """
+ stmts = self.parent.child_sequence(self)
+ index = stmts.index(self)
+ if index >= 1:
+ return stmts[index - 1]
+ return None
+
+
+class _BaseContainer(
+ mixins.ParentAssignTypeMixin, NodeNG, bases.Instance, metaclass=abc.ABCMeta
+):
+ """Base class for Set, FrozenSet, Tuple and List."""
+
+ _astroid_fields = ("elts",)
+
+ def __init__(self, lineno=None, col_offset=None, parent=None):
+ """
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.elts = []
+ """The elements in the node.
+
+ :type: list(NodeNG)
+ """
+
+ super(_BaseContainer, self).__init__(lineno, col_offset, parent)
+
+ def postinit(self, elts):
+ """Do some setup after initialisation.
+
+ :param elts: The list of elements the that node contains.
+ :type elts: list(NodeNG)
+ """
+ self.elts = elts
+
+ @classmethod
+ def from_elements(cls, elts=None):
+ """Create a node of this type from the given list of elements.
+
+ :param elts: The list of elements that the node should contain.
+ :type elts: list(NodeNG)
+
+ :returns: A new node containing the given elements.
+ :rtype: NodeNG
+ """
+ node = cls()
+ if elts is None:
+ node.elts = []
+ else:
+ node.elts = [const_factory(e) if _is_const(e) else e for e in elts]
+ return node
+
+ def itered(self):
+ """An iterator over the elements this node contains.
+
+ :returns: The contents of this node.
+ :rtype: iterable(NodeNG)
+ """
+ return self.elts
+
+ def bool_value(self):
+ """Determine the boolean value of this node.
+
+ :returns: The boolean value of this node.
+ :rtype: bool or Uninferable
+ """
+ return bool(self.elts)
+
+ @abc.abstractmethod
+ def pytype(self):
+ """Get the name of the type that this node represents.
+
+ :returns: The name of the type.
+ :rtype: str
+ """
+
+ def get_children(self):
+ yield from self.elts
+
+
+class LookupMixIn:
+ """Mixin to look up a name in the right scope."""
+
+ @lru_cache(maxsize=None)
+ def lookup(self, name):
+ """Lookup where the given variable is assigned.
+
+ The lookup starts from self's scope. If self is not a frame itself
+ and the name is found in the inner frame locals, statements will be
+ filtered to remove ignorable statements according to self's location.
+
+ :param name: The name of the variable to find assignments for.
+ :type name: str
+
+ :returns: The scope node and the list of assignments associated to the
+ given name according to the scope where it has been found (locals,
+ globals or builtin).
+ :rtype: tuple(str, list(NodeNG))
+ """
+ return self.scope().scope_lookup(self, name)
+
+ def ilookup(self, name):
+ """Lookup the inferred values of the given variable.
+
+ :param name: The variable name to find values for.
+ :type name: str
+
+ :returns: The inferred values of the statements returned from
+ :meth:`lookup`.
+ :rtype: iterable
+ """
+ frame, stmts = self.lookup(name)
+ context = contextmod.InferenceContext()
+ return bases._infer_stmts(stmts, context, frame)
+
+ def _get_filtered_node_statements(self, nodes):
+ statements = [(node, node.statement()) for node in nodes]
+ # Next we check if we have ExceptHandlers that are parent
+ # of the underlying variable, in which case the last one survives
+ if len(statements) > 1 and all(
+ isinstance(stmt, ExceptHandler) for _, stmt in statements
+ ):
+ statements = [
+ (node, stmt) for node, stmt in statements if stmt.parent_of(self)
+ ]
+ return statements
+
+ def _filter_stmts(self, stmts, frame, offset):
+ """Filter the given list of statements to remove ignorable statements.
+
+ If self is not a frame itself and the name is found in the inner
+ frame locals, statements will be filtered to remove ignorable
+ statements according to self's location.
+
+ :param stmts: The statements to filter.
+ :type stmts: list(NodeNG)
+
+ :param frame: The frame that all of the given statements belong to.
+ :type frame: NodeNG
+
+ :param offset: The line offset to filter statements up to.
+ :type offset: int
+
+ :returns: The filtered statements.
+ :rtype: list(NodeNG)
+ """
+ # if offset == -1, my actual frame is not the inner frame but its parent
+ #
+ # class A(B): pass
+ #
+ # we need this to resolve B correctly
+ if offset == -1:
+ myframe = self.frame().parent.frame()
+ else:
+ myframe = self.frame()
+ # If the frame of this node is the same as the statement
+ # of this node, then the node is part of a class or
+ # a function definition and the frame of this node should be the
+ # the upper frame, not the frame of the definition.
+ # For more information why this is important,
+ # see Pylint issue #295.
+ # For example, for 'b', the statement is the same
+ # as the frame / scope:
+ #
+ # def test(b=1):
+ # ...
+
+ if self.statement() is myframe and myframe.parent:
+ myframe = myframe.parent.frame()
+ mystmt = self.statement()
+ # line filtering if we are in the same frame
+ #
+ # take care node may be missing lineno information (this is the case for
+ # nodes inserted for living objects)
+ if myframe is frame and mystmt.fromlineno is not None:
+ assert mystmt.fromlineno is not None, mystmt
+ mylineno = mystmt.fromlineno + offset
+ else:
+ # disabling lineno filtering
+ mylineno = 0
+
+ _stmts = []
+ _stmt_parents = []
+ statements = self._get_filtered_node_statements(stmts)
+
+ for node, stmt in statements:
+ # line filtering is on and we have reached our location, break
+ if stmt.fromlineno > mylineno > 0:
+ break
+ # Ignore decorators with the same name as the
+ # decorated function
+ # Fixes issue #375
+ if mystmt is stmt and is_from_decorator(self):
+ continue
+ assert hasattr(node, "assign_type"), (
+ node,
+ node.scope(),
+ node.scope().locals,
+ )
+ assign_type = node.assign_type()
+ if node.has_base(self):
+ break
+
+ _stmts, done = assign_type._get_filtered_stmts(self, node, _stmts, mystmt)
+ if done:
+ break
+
+ optional_assign = assign_type.optional_assign
+ if optional_assign and assign_type.parent_of(self):
+ # we are inside a loop, loop var assignment is hiding previous
+ # assignment
+ _stmts = [node]
+ _stmt_parents = [stmt.parent]
+ continue
+
+ if isinstance(assign_type, NamedExpr):
+ _stmts = [node]
+ continue
+
+ # XXX comment various branches below!!!
+ try:
+ pindex = _stmt_parents.index(stmt.parent)
+ except ValueError:
+ pass
+ else:
+ # we got a parent index, this means the currently visited node
+ # is at the same block level as a previously visited node
+ if _stmts[pindex].assign_type().parent_of(assign_type):
+ # both statements are not at the same block level
+ continue
+ # if currently visited node is following previously considered
+ # assignment and both are not exclusive, we can drop the
+ # previous one. For instance in the following code ::
+ #
+ # if a:
+ # x = 1
+ # else:
+ # x = 2
+ # print x
+ #
+ # we can't remove neither x = 1 nor x = 2 when looking for 'x'
+ # of 'print x'; while in the following ::
+ #
+ # x = 1
+ # x = 2
+ # print x
+ #
+ # we can remove x = 1 when we see x = 2
+ #
+ # moreover, on loop assignment types, assignment won't
+ # necessarily be done if the loop has no iteration, so we don't
+ # want to clear previous assignments if any (hence the test on
+ # optional_assign)
+ if not (optional_assign or are_exclusive(_stmts[pindex], node)):
+ if (
+ # In case of partial function node, if the statement is different
+ # from the origin function then it can be deleted otherwise it should
+ # remain to be able to correctly infer the call to origin function.
+ not node.is_function
+ or node.qname() != "PartialFunction"
+ or node.name != _stmts[pindex].name
+ ):
+ del _stmt_parents[pindex]
+ del _stmts[pindex]
+ if isinstance(node, AssignName):
+ if not optional_assign and stmt.parent is mystmt.parent:
+ _stmts = []
+ _stmt_parents = []
+ elif isinstance(node, DelName):
+ _stmts = []
+ _stmt_parents = []
+ continue
+ if not are_exclusive(self, node):
+ _stmts.append(node)
+ _stmt_parents.append(stmt.parent)
+ return _stmts
+
+
+# Name classes
+
+
+class AssignName(
+ mixins.NoChildrenMixin, LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG
+):
+ """Variation of :class:`ast.Assign` representing assignment to a name.
+
+ An :class:`AssignName` is the name of something that is assigned to.
+ This includes variables defined in a function signature or in a loop.
+
+ >>> node = astroid.extract_node('variable = range(10)')
+ >>> node
+
+ >>> list(node.get_children())
+ [, ]
+ >>> list(node.get_children())[0].as_string()
+ 'variable'
+ """
+
+ _other_fields = ("name",)
+
+ def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param name: The name that is assigned to.
+ :type name: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.name = name
+ """The name that is assigned to.
+
+ :type: str or None
+ """
+
+ super(AssignName, self).__init__(lineno, col_offset, parent)
+
+
+class DelName(
+ mixins.NoChildrenMixin, LookupMixIn, mixins.ParentAssignTypeMixin, NodeNG
+):
+ """Variation of :class:`ast.Delete` representing deletion of a name.
+
+ A :class:`DelName` is the name of something that is deleted.
+
+ >>> node = astroid.extract_node("del variable #@")
+ >>> list(node.get_children())
+ []
+ >>> list(node.get_children())[0].as_string()
+ 'variable'
+ """
+
+ _other_fields = ("name",)
+
+ def __init__(self, name=None, lineno=None, col_offset=None, parent=None):
+ """
+ :param name: The name that is being deleted.
+ :type name: str or None
+
+ :param lineno: The line that this node appears on in the source code.
+ :type lineno: int or None
+
+ :param col_offset: The column that this node appears on in the
+ source code.
+ :type col_offset: int or None
+
+ :param parent: The parent node in the syntax tree.
+ :type parent: NodeNG or None
+ """
+ self.name = name
+ """The name that is being deleted.
+
+ :type: str or None
+ """
+
+ super(DelName, self).__init__(lineno, col_offset, parent)
+
+
+class Name(mixins.NoChildrenMixin, LookupMixIn, NodeNG):
+ """Class representing an :class:`ast.Name` node.
+
+ A :class:`Name` node is something that is named, but not covered by
+ :class:`AssignName` or :class:`DelName`.
+
+ >>> node = astroid.extract_node('range(10)')
+ >>> node
+
+ >>> list(node.get_children())
+ [