summaryrefslogtreecommitdiff
path: root/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal
diff options
context:
space:
mode:
Diffstat (limited to 'venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal')
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__init__.py78
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/__init__.cpython-37.pycbin1859 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/build_env.cpython-37.pycbin7472 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/cache.cpython-37.pycbin7067 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/configuration.cpython-37.pycbin9852 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/download.cpython-37.pycbin21188 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/exceptions.cpython-37.pycbin11759 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/index.cpython-37.pycbin25269 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/locations.cpython-37.pycbin4446 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pep425tags.cpython-37.pycbin8178 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pyproject.cpython-37.pycbin3195 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/resolve.cpython-37.pycbin9113 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/wheel.cpython-37.pycbin25946 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/build_env.py215
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cache.py224
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__init__.py4
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/__init__.cpython-37.pycbin290 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pycbin5103 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/base_command.cpython-37.pycbin7831 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pycbin16906 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/main_parser.cpython-37.pycbin2376 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/parser.cpython-37.pycbin8954 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/status_codes.cpython-37.pycbin419 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/autocompletion.py152
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/base_command.py341
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/cmdoptions.py809
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/main_parser.py104
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/parser.py261
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/status_codes.py8
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__init__.py79
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/__init__.cpython-37.pycbin2518 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/check.cpython-37.pycbin1343 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/completion.cpython-37.pycbin3092 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/configuration.cpython-37.pycbin6444 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/download.cpython-37.pycbin4731 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/freeze.cpython-37.pycbin2888 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/hash.cpython-37.pycbin2082 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/help.cpython-37.pycbin1258 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/install.cpython-37.pycbin12475 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/list.cpython-37.pycbin8724 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/search.cpython-37.pycbin4324 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/show.cpython-37.pycbin5905 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/uninstall.cpython-37.pycbin2714 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/wheel.cpython-37.pycbin5017 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/check.py41
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/completion.py94
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/configuration.py227
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/download.py176
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/freeze.py96
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/hash.py57
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/help.py37
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/install.py566
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/list.py301
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/search.py135
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/show.py168
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/uninstall.py78
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/wheel.py186
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/configuration.py387
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/download.py971
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/exceptions.py274
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/index.py990
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/locations.py211
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__init__.py2
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/__init__.cpython-37.pycbin278 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/candidate.cpython-37.pycbin1327 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/format_control.cpython-37.pycbin2281 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/index.cpython-37.pycbin1182 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/link.cpython-37.pycbin5016 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/candidate.py31
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/format_control.py73
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/index.py31
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/link.py163
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__init__.py0
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/__init__.cpython-37.pycbin214 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/check.cpython-37.pycbin3644 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/freeze.cpython-37.pycbin5629 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/prepare.cpython-37.pycbin10290 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/check.py155
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/freeze.py247
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/prepare.py413
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pep425tags.py381
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pyproject.py171
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py77
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pycbin1711 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pycbin7631 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pycbin9209 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pycbin25046 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pycbin6046 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pycbin3160 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pycbin17003 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py339
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py382
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py1021
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py197
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py88
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py596
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/resolve.py393
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py0
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pycbin209 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pycbin8056 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pycbin6157 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pycbin2583 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pycbin1286 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pycbin678 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pycbin1697 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pycbin3616 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pycbin7843 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pycbin25748 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pycbin1958 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pycbin4094 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pycbin2630 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pycbin404 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pycbin4931 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pycbin1353 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pycbin12315 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py270
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py264
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py90
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py39
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py30
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py93
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py115
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py318
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py1040
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py40
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py164
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py85
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py8
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py155
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py29
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py441
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__init__.py534
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/__init__.cpython-37.pycbin15400 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pycbin3817 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/git.cpython-37.pycbin9459 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pycbin3790 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/subversion.cpython-37.pycbin6002 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/bazaar.py114
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/git.py369
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/mercurial.py103
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/subversion.py200
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/wheel.py1095
142 files changed, 0 insertions, 17626 deletions
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__init__.py
deleted file mode 100644
index 276124d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__init__.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-from __future__ import absolute_import
-
-import locale
-import logging
-import os
-import warnings
-
-import sys
-
-# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
-# but if invoked (i.e. imported), it will issue a warning to stderr if socks
-# isn't available. requests unconditionally imports urllib3's socks contrib
-# module, triggering this warning. The warning breaks DEP-8 tests (because of
-# the stderr output) and is just plain annoying in normal usage. I don't want
-# to add socks as yet another dependency for pip, nor do I want to allow-stder
-# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
-# be done before the import of pip.vcs.
-from pip._vendor.urllib3.exceptions import DependencyWarning
-warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
-
-# We want to inject the use of SecureTransport as early as possible so that any
-# references or sessions or what have you are ensured to have it, however we
-# only want to do this in the case that we're running on macOS and the linked
-# OpenSSL is too old to handle TLSv1.2
-try:
- import ssl
-except ImportError:
- pass
-else:
- # Checks for OpenSSL 1.0.1 on MacOS
- if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
- try:
- from pip._vendor.urllib3.contrib import securetransport
- except (ImportError, OSError):
- pass
- else:
- securetransport.inject_into_urllib3()
-
-from pip._internal.cli.autocompletion import autocomplete
-from pip._internal.cli.main_parser import parse_command
-from pip._internal.commands import commands_dict
-from pip._internal.exceptions import PipError
-from pip._internal.utils import deprecation
-from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
-from pip._vendor.urllib3.exceptions import InsecureRequestWarning
-
-logger = logging.getLogger(__name__)
-
-# Hide the InsecureRequestWarning from urllib3
-warnings.filterwarnings("ignore", category=InsecureRequestWarning)
-
-
-def main(args=None):
- if args is None:
- args = sys.argv[1:]
-
- # Configure our deprecation warnings to be sent through loggers
- deprecation.install_warning_logger()
-
- autocomplete()
-
- try:
- cmd_name, cmd_args = parse_command(args)
- except PipError as exc:
- sys.stderr.write("ERROR: %s" % exc)
- sys.stderr.write(os.linesep)
- sys.exit(1)
-
- # Needed for locale.getpreferredencoding(False) to work
- # in pip._internal.utils.encoding.auto_decode
- try:
- locale.setlocale(locale.LC_ALL, '')
- except locale.Error as e:
- # setlocale can apparently crash if locale are uninitialized
- logger.debug("Ignoring error %s when setting locale", e)
- command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
- return command.main(cmd_args)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 652b82d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/build_env.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/build_env.cpython-37.pyc
deleted file mode 100644
index 9265cac..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/build_env.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/cache.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/cache.cpython-37.pyc
deleted file mode 100644
index 317369b..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/cache.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/configuration.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/configuration.cpython-37.pyc
deleted file mode 100644
index 3663edd..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/configuration.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/download.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/download.cpython-37.pyc
deleted file mode 100644
index b325759..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/download.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/exceptions.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/exceptions.cpython-37.pyc
deleted file mode 100644
index 1d80026..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/exceptions.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/index.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/index.cpython-37.pyc
deleted file mode 100644
index 33e8429..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/index.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/locations.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/locations.cpython-37.pyc
deleted file mode 100644
index a1dc40a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/locations.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pep425tags.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pep425tags.cpython-37.pyc
deleted file mode 100644
index 0d4bd32..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pep425tags.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pyproject.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pyproject.cpython-37.pyc
deleted file mode 100644
index e0bed54..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/pyproject.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/resolve.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/resolve.cpython-37.pyc
deleted file mode 100644
index ca401eb..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/resolve.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/wheel.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/wheel.cpython-37.pyc
deleted file mode 100644
index 037cf93..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__pycache__/wheel.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/build_env.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/build_env.py
deleted file mode 100644
index d744cc7..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/build_env.py
+++ /dev/null
@@ -1,215 +0,0 @@
-"""Build Environment used for isolation during sdist building
-"""
-
-import logging
-import os
-import sys
-import textwrap
-from collections import OrderedDict
-from distutils.sysconfig import get_python_lib
-from sysconfig import get_paths
-
-from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
-
-from pip import __file__ as pip_location
-from pip._internal.utils.misc import call_subprocess
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import open_spinner
-
-if MYPY_CHECK_RUNNING:
- from typing import Tuple, Set, Iterable, Optional, List # noqa: F401
- from pip._internal.index import PackageFinder # noqa: F401
-
-logger = logging.getLogger(__name__)
-
-
-class _Prefix:
-
- def __init__(self, path):
- # type: (str) -> None
- self.path = path
- self.setup = False
- self.bin_dir = get_paths(
- 'nt' if os.name == 'nt' else 'posix_prefix',
- vars={'base': path, 'platbase': path}
- )['scripts']
- # Note: prefer distutils' sysconfig to get the
- # library paths so PyPy is correctly supported.
- purelib = get_python_lib(plat_specific=False, prefix=path)
- platlib = get_python_lib(plat_specific=True, prefix=path)
- if purelib == platlib:
- self.lib_dirs = [purelib]
- else:
- self.lib_dirs = [purelib, platlib]
-
-
-class BuildEnvironment(object):
- """Creates and manages an isolated environment to install build deps
- """
-
- def __init__(self):
- # type: () -> None
- self._temp_dir = TempDirectory(kind="build-env")
- self._temp_dir.create()
-
- self._prefixes = OrderedDict((
- (name, _Prefix(os.path.join(self._temp_dir.path, name)))
- for name in ('normal', 'overlay')
- ))
-
- self._bin_dirs = [] # type: List[str]
- self._lib_dirs = [] # type: List[str]
- for prefix in reversed(list(self._prefixes.values())):
- self._bin_dirs.append(prefix.bin_dir)
- self._lib_dirs.extend(prefix.lib_dirs)
-
- # Customize site to:
- # - ensure .pth files are honored
- # - prevent access to system site packages
- system_sites = {
- os.path.normcase(site) for site in (
- get_python_lib(plat_specific=False),
- get_python_lib(plat_specific=True),
- )
- }
- self._site_dir = os.path.join(self._temp_dir.path, 'site')
- if not os.path.exists(self._site_dir):
- os.mkdir(self._site_dir)
- with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
- fp.write(textwrap.dedent(
- '''
- import os, site, sys
-
- # First, drop system-sites related paths.
- original_sys_path = sys.path[:]
- known_paths = set()
- for path in {system_sites!r}:
- site.addsitedir(path, known_paths=known_paths)
- system_paths = set(
- os.path.normcase(path)
- for path in sys.path[len(original_sys_path):]
- )
- original_sys_path = [
- path for path in original_sys_path
- if os.path.normcase(path) not in system_paths
- ]
- sys.path = original_sys_path
-
- # Second, add lib directories.
- # ensuring .pth file are processed.
- for path in {lib_dirs!r}:
- assert not path in sys.path
- site.addsitedir(path)
- '''
- ).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
-
- def __enter__(self):
- self._save_env = {
- name: os.environ.get(name, None)
- for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
- }
-
- path = self._bin_dirs[:]
- old_path = self._save_env['PATH']
- if old_path:
- path.extend(old_path.split(os.pathsep))
-
- pythonpath = [self._site_dir]
-
- os.environ.update({
- 'PATH': os.pathsep.join(path),
- 'PYTHONNOUSERSITE': '1',
- 'PYTHONPATH': os.pathsep.join(pythonpath),
- })
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- for varname, old_value in self._save_env.items():
- if old_value is None:
- os.environ.pop(varname, None)
- else:
- os.environ[varname] = old_value
-
- def cleanup(self):
- # type: () -> None
- self._temp_dir.cleanup()
-
- def check_requirements(self, reqs):
- # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
- """Return 2 sets:
- - conflicting requirements: set of (installed, wanted) reqs tuples
- - missing requirements: set of reqs
- """
- missing = set()
- conflicting = set()
- if reqs:
- ws = WorkingSet(self._lib_dirs)
- for req in reqs:
- try:
- if ws.find(Requirement.parse(req)) is None:
- missing.add(req)
- except VersionConflict as e:
- conflicting.add((str(e.args[0].as_requirement()),
- str(e.args[1])))
- return conflicting, missing
-
- def install_requirements(
- self,
- finder, # type: PackageFinder
- requirements, # type: Iterable[str]
- prefix_as_string, # type: str
- message # type: Optional[str]
- ):
- # type: (...) -> None
- prefix = self._prefixes[prefix_as_string]
- assert not prefix.setup
- prefix.setup = True
- if not requirements:
- return
- args = [
- sys.executable, os.path.dirname(pip_location), 'install',
- '--ignore-installed', '--no-user', '--prefix', prefix.path,
- '--no-warn-script-location',
- ] # type: List[str]
- if logger.getEffectiveLevel() <= logging.DEBUG:
- args.append('-v')
- for format_control in ('no_binary', 'only_binary'):
- formats = getattr(finder.format_control, format_control)
- args.extend(('--' + format_control.replace('_', '-'),
- ','.join(sorted(formats or {':none:'}))))
- if finder.index_urls:
- args.extend(['-i', finder.index_urls[0]])
- for extra_index in finder.index_urls[1:]:
- args.extend(['--extra-index-url', extra_index])
- else:
- args.append('--no-index')
- for link in finder.find_links:
- args.extend(['--find-links', link])
- for _, host, _ in finder.secure_origins:
- args.extend(['--trusted-host', host])
- if finder.allow_all_prereleases:
- args.append('--pre')
- args.append('--')
- args.extend(requirements)
- with open_spinner(message) as spinner:
- call_subprocess(args, show_stdout=False, spinner=spinner)
-
-
-class NoOpBuildEnvironment(BuildEnvironment):
- """A no-op drop-in replacement for BuildEnvironment
- """
-
- def __init__(self):
- pass
-
- def __enter__(self):
- pass
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- pass
-
- def cleanup(self):
- pass
-
- def install_requirements(self, finder, requirements, prefix, message):
- raise NotImplementedError()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cache.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cache.py
deleted file mode 100644
index eb295c4..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cache.py
+++ /dev/null
@@ -1,224 +0,0 @@
-"""Cache Management
-"""
-
-import errno
-import hashlib
-import logging
-import os
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.download import path_to_url
-from pip._internal.models.link import Link
-from pip._internal.utils.compat import expanduser
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.wheel import InvalidWheelFilename, Wheel
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, Set, List, Any # noqa: F401
- from pip._internal.index import FormatControl # noqa: F401
-
-logger = logging.getLogger(__name__)
-
-
-class Cache(object):
- """An abstract class - provides cache directories for data from links
-
-
- :param cache_dir: The root of the cache.
- :param format_control: An object of FormatControl class to limit
- binaries being read from the cache.
- :param allowed_formats: which formats of files the cache should store.
- ('binary' and 'source' are the only allowed values)
- """
-
- def __init__(self, cache_dir, format_control, allowed_formats):
- # type: (str, FormatControl, Set[str]) -> None
- super(Cache, self).__init__()
- self.cache_dir = expanduser(cache_dir) if cache_dir else None
- self.format_control = format_control
- self.allowed_formats = allowed_formats
-
- _valid_formats = {"source", "binary"}
- assert self.allowed_formats.union(_valid_formats) == _valid_formats
-
- def _get_cache_path_parts(self, link):
- # type: (Link) -> List[str]
- """Get parts of part that must be os.path.joined with cache_dir
- """
-
- # We want to generate an url to use as our cache key, we don't want to
- # just re-use the URL because it might have other items in the fragment
- # and we don't care about those.
- key_parts = [link.url_without_fragment]
- if link.hash_name is not None and link.hash is not None:
- key_parts.append("=".join([link.hash_name, link.hash]))
- key_url = "#".join(key_parts)
-
- # Encode our key url with sha224, we'll use this because it has similar
- # security properties to sha256, but with a shorter total output (and
- # thus less secure). However the differences don't make a lot of
- # difference for our use case here.
- hashed = hashlib.sha224(key_url.encode()).hexdigest()
-
- # We want to nest the directories some to prevent having a ton of top
- # level directories where we might run out of sub directories on some
- # FS.
- parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
-
- return parts
-
- def _get_candidates(self, link, package_name):
- # type: (Link, Optional[str]) -> List[Any]
- can_not_cache = (
- not self.cache_dir or
- not package_name or
- not link
- )
- if can_not_cache:
- return []
-
- canonical_name = canonicalize_name(package_name)
- formats = self.format_control.get_allowed_formats(
- canonical_name
- )
- if not self.allowed_formats.intersection(formats):
- return []
-
- root = self.get_path_for_link(link)
- try:
- return os.listdir(root)
- except OSError as err:
- if err.errno in {errno.ENOENT, errno.ENOTDIR}:
- return []
- raise
-
- def get_path_for_link(self, link):
- # type: (Link) -> str
- """Return a directory to store cached items in for link.
- """
- raise NotImplementedError()
-
- def get(self, link, package_name):
- # type: (Link, Optional[str]) -> Link
- """Returns a link to a cached item if it exists, otherwise returns the
- passed link.
- """
- raise NotImplementedError()
-
- def _link_for_candidate(self, link, candidate):
- # type: (Link, str) -> Link
- root = self.get_path_for_link(link)
- path = os.path.join(root, candidate)
-
- return Link(path_to_url(path))
-
- def cleanup(self):
- # type: () -> None
- pass
-
-
-class SimpleWheelCache(Cache):
- """A cache of wheels for future installs.
- """
-
- def __init__(self, cache_dir, format_control):
- # type: (str, FormatControl) -> None
- super(SimpleWheelCache, self).__init__(
- cache_dir, format_control, {"binary"}
- )
-
- def get_path_for_link(self, link):
- # type: (Link) -> str
- """Return a directory to store cached wheels for link
-
- Because there are M wheels for any one sdist, we provide a directory
- to cache them in, and then consult that directory when looking up
- cache hits.
-
- We only insert things into the cache if they have plausible version
- numbers, so that we don't contaminate the cache with things that were
- not unique. E.g. ./package might have dozens of installs done for it
- and build a version of 0.0...and if we built and cached a wheel, we'd
- end up using the same wheel even if the source has been edited.
-
- :param link: The link of the sdist for which this will cache wheels.
- """
- parts = self._get_cache_path_parts(link)
-
- # Store wheels within the root cache_dir
- return os.path.join(self.cache_dir, "wheels", *parts)
-
- def get(self, link, package_name):
- # type: (Link, Optional[str]) -> Link
- candidates = []
-
- for wheel_name in self._get_candidates(link, package_name):
- try:
- wheel = Wheel(wheel_name)
- except InvalidWheelFilename:
- continue
- if not wheel.supported():
- # Built for a different python/arch/etc
- continue
- candidates.append((wheel.support_index_min(), wheel_name))
-
- if not candidates:
- return link
-
- return self._link_for_candidate(link, min(candidates)[1])
-
-
-class EphemWheelCache(SimpleWheelCache):
- """A SimpleWheelCache that creates it's own temporary cache directory
- """
-
- def __init__(self, format_control):
- # type: (FormatControl) -> None
- self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
- self._temp_dir.create()
-
- super(EphemWheelCache, self).__init__(
- self._temp_dir.path, format_control
- )
-
- def cleanup(self):
- # type: () -> None
- self._temp_dir.cleanup()
-
-
-class WheelCache(Cache):
- """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
-
- This Cache allows for gracefully degradation, using the ephem wheel cache
- when a certain link is not found in the simple wheel cache first.
- """
-
- def __init__(self, cache_dir, format_control):
- # type: (str, FormatControl) -> None
- super(WheelCache, self).__init__(
- cache_dir, format_control, {'binary'}
- )
- self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
- self._ephem_cache = EphemWheelCache(format_control)
-
- def get_path_for_link(self, link):
- # type: (Link) -> str
- return self._wheel_cache.get_path_for_link(link)
-
- def get_ephem_path_for_link(self, link):
- # type: (Link) -> str
- return self._ephem_cache.get_path_for_link(link)
-
- def get(self, link, package_name):
- # type: (Link, Optional[str]) -> Link
- retval = self._wheel_cache.get(link, package_name)
- if retval is link:
- retval = self._ephem_cache.get(link, package_name)
- return retval
-
- def cleanup(self):
- # type: () -> None
- self._wheel_cache.cleanup()
- self._ephem_cache.cleanup()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__init__.py
deleted file mode 100644
index e589bb9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-"""Subpackage containing all of pip's command line interface related code
-"""
-
-# This file intentionally does not import submodules
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 2a9f1c0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc
deleted file mode 100644
index 5368708..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc
deleted file mode 100644
index 0a13ac6..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/base_command.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc
deleted file mode 100644
index 79e406d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc
deleted file mode 100644
index b4248f2..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/parser.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/parser.cpython-37.pyc
deleted file mode 100644
index cb35c86..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/parser.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc
deleted file mode 100644
index 217d9ca..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/autocompletion.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/autocompletion.py
deleted file mode 100644
index 0a04199..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/autocompletion.py
+++ /dev/null
@@ -1,152 +0,0 @@
-"""Logic that powers autocompletion installed by ``pip completion``.
-"""
-
-import optparse
-import os
-import sys
-
-from pip._internal.cli.main_parser import create_main_parser
-from pip._internal.commands import commands_dict, get_summaries
-from pip._internal.utils.misc import get_installed_distributions
-
-
-def autocomplete():
- """Entry Point for completion of main and subcommand options.
- """
- # Don't complete if user hasn't sourced bash_completion file.
- if 'PIP_AUTO_COMPLETE' not in os.environ:
- return
- cwords = os.environ['COMP_WORDS'].split()[1:]
- cword = int(os.environ['COMP_CWORD'])
- try:
- current = cwords[cword - 1]
- except IndexError:
- current = ''
-
- subcommands = [cmd for cmd, summary in get_summaries()]
- options = []
- # subcommand
- try:
- subcommand_name = [w for w in cwords if w in subcommands][0]
- except IndexError:
- subcommand_name = None
-
- parser = create_main_parser()
- # subcommand options
- if subcommand_name:
- # special case: 'help' subcommand has no options
- if subcommand_name == 'help':
- sys.exit(1)
- # special case: list locally installed dists for show and uninstall
- should_list_installed = (
- subcommand_name in ['show', 'uninstall'] and
- not current.startswith('-')
- )
- if should_list_installed:
- installed = []
- lc = current.lower()
- for dist in get_installed_distributions(local_only=True):
- if dist.key.startswith(lc) and dist.key not in cwords[1:]:
- installed.append(dist.key)
- # if there are no dists installed, fall back to option completion
- if installed:
- for dist in installed:
- print(dist)
- sys.exit(1)
-
- subcommand = commands_dict[subcommand_name]()
-
- for opt in subcommand.parser.option_list_all:
- if opt.help != optparse.SUPPRESS_HELP:
- for opt_str in opt._long_opts + opt._short_opts:
- options.append((opt_str, opt.nargs))
-
- # filter out previously specified options from available options
- prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
- options = [(x, v) for (x, v) in options if x not in prev_opts]
- # filter options by current input
- options = [(k, v) for k, v in options if k.startswith(current)]
- # get completion type given cwords and available subcommand options
- completion_type = get_path_completion_type(
- cwords, cword, subcommand.parser.option_list_all,
- )
- # get completion files and directories if ``completion_type`` is
- # ``<file>``, ``<dir>`` or ``<path>``
- if completion_type:
- options = auto_complete_paths(current, completion_type)
- options = ((opt, 0) for opt in options)
- for option in options:
- opt_label = option[0]
- # append '=' to options which require args
- if option[1] and option[0][:2] == "--":
- opt_label += '='
- print(opt_label)
- else:
- # show main parser options only when necessary
-
- opts = [i.option_list for i in parser.option_groups]
- opts.append(parser.option_list)
- opts = (o for it in opts for o in it)
- if current.startswith('-'):
- for opt in opts:
- if opt.help != optparse.SUPPRESS_HELP:
- subcommands += opt._long_opts + opt._short_opts
- else:
- # get completion type given cwords and all available options
- completion_type = get_path_completion_type(cwords, cword, opts)
- if completion_type:
- subcommands = auto_complete_paths(current, completion_type)
-
- print(' '.join([x for x in subcommands if x.startswith(current)]))
- sys.exit(1)
-
-
-def get_path_completion_type(cwords, cword, opts):
- """Get the type of path completion (``file``, ``dir``, ``path`` or None)
-
- :param cwords: same as the environmental variable ``COMP_WORDS``
- :param cword: same as the environmental variable ``COMP_CWORD``
- :param opts: The available options to check
- :return: path completion type (``file``, ``dir``, ``path`` or None)
- """
- if cword < 2 or not cwords[cword - 2].startswith('-'):
- return
- for opt in opts:
- if opt.help == optparse.SUPPRESS_HELP:
- continue
- for o in str(opt).split('/'):
- if cwords[cword - 2].split('=')[0] == o:
- if not opt.metavar or any(
- x in ('path', 'file', 'dir')
- for x in opt.metavar.split('/')):
- return opt.metavar
-
-
-def auto_complete_paths(current, completion_type):
- """If ``completion_type`` is ``file`` or ``path``, list all regular files
- and directories starting with ``current``; otherwise only list directories
- starting with ``current``.
-
- :param current: The word to be completed
- :param completion_type: path completion type(`file`, `path` or `dir`)i
- :return: A generator of regular files and/or directories
- """
- directory, filename = os.path.split(current)
- current_path = os.path.abspath(directory)
- # Don't complete paths if they can't be accessed
- if not os.access(current_path, os.R_OK):
- return
- filename = os.path.normcase(filename)
- # list all files that start with ``filename``
- file_list = (x for x in os.listdir(current_path)
- if os.path.normcase(x).startswith(filename))
- for f in file_list:
- opt = os.path.join(current_path, f)
- comp_file = os.path.normcase(os.path.join(directory, f))
- # complete regular files when there is not ``<dir>`` after option
- # complete directories when there is ``<file>``, ``<path>`` or
- # ``<dir>``after option
- if completion_type != 'dir' and os.path.isfile(opt):
- yield comp_file
- elif os.path.isdir(opt):
- yield os.path.join(comp_file, '')
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/base_command.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/base_command.py
deleted file mode 100644
index 3ceea49..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/base_command.py
+++ /dev/null
@@ -1,341 +0,0 @@
-"""Base Command class, and related routines"""
-from __future__ import absolute_import, print_function
-
-import logging
-import logging.config
-import optparse
-import os
-import platform
-import sys
-import traceback
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.parser import (
- ConfigOptionParser, UpdatingDefaultsHelpFormatter,
-)
-from pip._internal.cli.status_codes import (
- ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
- VIRTUALENV_NOT_FOUND,
-)
-from pip._internal.download import PipSession
-from pip._internal.exceptions import (
- BadCommand, CommandError, InstallationError, PreviousBuildDirError,
- UninstallationError,
-)
-from pip._internal.index import PackageFinder
-from pip._internal.locations import running_under_virtualenv
-from pip._internal.req.constructors import (
- install_req_from_editable, install_req_from_line,
-)
-from pip._internal.req.req_file import parse_requirements
-from pip._internal.utils.deprecation import deprecated
-from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
-from pip._internal.utils.misc import (
- get_prog, normalize_path, redact_password_from_url,
-)
-from pip._internal.utils.outdated import pip_version_check
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, List, Tuple, Any # noqa: F401
- from optparse import Values # noqa: F401
- from pip._internal.cache import WheelCache # noqa: F401
- from pip._internal.req.req_set import RequirementSet # noqa: F401
-
-__all__ = ['Command']
-
-logger = logging.getLogger(__name__)
-
-
-class Command(object):
- name = None # type: Optional[str]
- usage = None # type: Optional[str]
- hidden = False # type: bool
- ignore_require_venv = False # type: bool
-
- def __init__(self, isolated=False):
- # type: (bool) -> None
- parser_kw = {
- 'usage': self.usage,
- 'prog': '%s %s' % (get_prog(), self.name),
- 'formatter': UpdatingDefaultsHelpFormatter(),
- 'add_help_option': False,
- 'name': self.name,
- 'description': self.__doc__,
- 'isolated': isolated,
- }
-
- self.parser = ConfigOptionParser(**parser_kw)
-
- # Commands should add options to this option group
- optgroup_name = '%s Options' % self.name.capitalize()
- self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
-
- # Add the general options
- gen_opts = cmdoptions.make_option_group(
- cmdoptions.general_group,
- self.parser,
- )
- self.parser.add_option_group(gen_opts)
-
- def run(self, options, args):
- # type: (Values, List[Any]) -> Any
- raise NotImplementedError
-
- def _build_session(self, options, retries=None, timeout=None):
- # type: (Values, Optional[int], Optional[int]) -> PipSession
- session = PipSession(
- cache=(
- normalize_path(os.path.join(options.cache_dir, "http"))
- if options.cache_dir else None
- ),
- retries=retries if retries is not None else options.retries,
- insecure_hosts=options.trusted_hosts,
- )
-
- # Handle custom ca-bundles from the user
- if options.cert:
- session.verify = options.cert
-
- # Handle SSL client certificate
- if options.client_cert:
- session.cert = options.client_cert
-
- # Handle timeouts
- if options.timeout or timeout:
- session.timeout = (
- timeout if timeout is not None else options.timeout
- )
-
- # Handle configured proxies
- if options.proxy:
- session.proxies = {
- "http": options.proxy,
- "https": options.proxy,
- }
-
- # Determine if we can prompt the user for authentication or not
- session.auth.prompting = not options.no_input
-
- return session
-
- def parse_args(self, args):
- # type: (List[str]) -> Tuple
- # factored out for testability
- return self.parser.parse_args(args)
-
- def main(self, args):
- # type: (List[str]) -> int
- options, args = self.parse_args(args)
-
- # Set verbosity so that it can be used elsewhere.
- self.verbosity = options.verbose - options.quiet
-
- level_number = setup_logging(
- verbosity=self.verbosity,
- no_color=options.no_color,
- user_log_file=options.log,
- )
-
- if sys.version_info[:2] == (3, 4):
- deprecated(
- "Python 3.4 support has been deprecated. pip 19.1 will be the "
- "last one supporting it. Please upgrade your Python as Python "
- "3.4 won't be maintained after March 2019 (cf PEP 429).",
- replacement=None,
- gone_in='19.2',
- )
- elif sys.version_info[:2] == (2, 7):
- message = (
- "A future version of pip will drop support for Python 2.7."
- )
- if platform.python_implementation() == "CPython":
- message = (
- "Python 2.7 will reach the end of its life on January "
- "1st, 2020. Please upgrade your Python as Python 2.7 "
- "won't be maintained after that date. "
- ) + message
- deprecated(message, replacement=None, gone_in=None)
-
- # TODO: Try to get these passing down from the command?
- # without resorting to os.environ to hold these.
- # This also affects isolated builds and it should.
-
- if options.no_input:
- os.environ['PIP_NO_INPUT'] = '1'
-
- if options.exists_action:
- os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
-
- if options.require_venv and not self.ignore_require_venv:
- # If a venv is required check if it can really be found
- if not running_under_virtualenv():
- logger.critical(
- 'Could not find an activated virtualenv (required).'
- )
- sys.exit(VIRTUALENV_NOT_FOUND)
-
- try:
- status = self.run(options, args)
- # FIXME: all commands should return an exit status
- # and when it is done, isinstance is not needed anymore
- if isinstance(status, int):
- return status
- except PreviousBuildDirError as exc:
- logger.critical(str(exc))
- logger.debug('Exception information:', exc_info=True)
-
- return PREVIOUS_BUILD_DIR_ERROR
- except (InstallationError, UninstallationError, BadCommand) as exc:
- logger.critical(str(exc))
- logger.debug('Exception information:', exc_info=True)
-
- return ERROR
- except CommandError as exc:
- logger.critical('ERROR: %s', exc)
- logger.debug('Exception information:', exc_info=True)
-
- return ERROR
- except BrokenStdoutLoggingError:
- # Bypass our logger and write any remaining messages to stderr
- # because stdout no longer works.
- print('ERROR: Pipe to stdout was broken', file=sys.stderr)
- if level_number <= logging.DEBUG:
- traceback.print_exc(file=sys.stderr)
-
- return ERROR
- except KeyboardInterrupt:
- logger.critical('Operation cancelled by user')
- logger.debug('Exception information:', exc_info=True)
-
- return ERROR
- except BaseException:
- logger.critical('Exception:', exc_info=True)
-
- return UNKNOWN_ERROR
- finally:
- allow_version_check = (
- # Does this command have the index_group options?
- hasattr(options, "no_index") and
- # Is this command allowed to perform this check?
- not (options.disable_pip_version_check or options.no_index)
- )
- # Check if we're using the latest version of pip available
- if allow_version_check:
- session = self._build_session(
- options,
- retries=0,
- timeout=min(5, options.timeout)
- )
- with session:
- pip_version_check(session, options)
-
- # Shutdown the logging module
- logging.shutdown()
-
- return SUCCESS
-
-
-class RequirementCommand(Command):
-
- @staticmethod
- def populate_requirement_set(requirement_set, # type: RequirementSet
- args, # type: List[str]
- options, # type: Values
- finder, # type: PackageFinder
- session, # type: PipSession
- name, # type: str
- wheel_cache # type: Optional[WheelCache]
- ):
- # type: (...) -> None
- """
- Marshal cmd line args into a requirement set.
- """
- # NOTE: As a side-effect, options.require_hashes and
- # requirement_set.require_hashes may be updated
-
- for filename in options.constraints:
- for req_to_add in parse_requirements(
- filename,
- constraint=True, finder=finder, options=options,
- session=session, wheel_cache=wheel_cache):
- req_to_add.is_direct = True
- requirement_set.add_requirement(req_to_add)
-
- for req in args:
- req_to_add = install_req_from_line(
- req, None, isolated=options.isolated_mode,
- use_pep517=options.use_pep517,
- wheel_cache=wheel_cache
- )
- req_to_add.is_direct = True
- requirement_set.add_requirement(req_to_add)
-
- for req in options.editables:
- req_to_add = install_req_from_editable(
- req,
- isolated=options.isolated_mode,
- use_pep517=options.use_pep517,
- wheel_cache=wheel_cache
- )
- req_to_add.is_direct = True
- requirement_set.add_requirement(req_to_add)
-
- for filename in options.requirements:
- for req_to_add in parse_requirements(
- filename,
- finder=finder, options=options, session=session,
- wheel_cache=wheel_cache,
- use_pep517=options.use_pep517):
- req_to_add.is_direct = True
- requirement_set.add_requirement(req_to_add)
- # If --require-hashes was a line in a requirements file, tell
- # RequirementSet about it:
- requirement_set.require_hashes = options.require_hashes
-
- if not (args or options.editables or options.requirements):
- opts = {'name': name}
- if options.find_links:
- raise CommandError(
- 'You must give at least one requirement to %(name)s '
- '(maybe you meant "pip %(name)s %(links)s"?)' %
- dict(opts, links=' '.join(options.find_links)))
- else:
- raise CommandError(
- 'You must give at least one requirement to %(name)s '
- '(see "pip help %(name)s")' % opts)
-
- def _build_package_finder(
- self,
- options, # type: Values
- session, # type: PipSession
- platform=None, # type: Optional[str]
- python_versions=None, # type: Optional[List[str]]
- abi=None, # type: Optional[str]
- implementation=None # type: Optional[str]
- ):
- # type: (...) -> PackageFinder
- """
- Create a package finder appropriate to this requirement command.
- """
- index_urls = [options.index_url] + options.extra_index_urls
- if options.no_index:
- logger.debug(
- 'Ignoring indexes: %s',
- ','.join(redact_password_from_url(url) for url in index_urls),
- )
- index_urls = []
-
- return PackageFinder(
- find_links=options.find_links,
- format_control=options.format_control,
- index_urls=index_urls,
- trusted_hosts=options.trusted_hosts,
- allow_all_prereleases=options.pre,
- session=session,
- platform=platform,
- versions=python_versions,
- abi=abi,
- implementation=implementation,
- prefer_binary=options.prefer_binary,
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/cmdoptions.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/cmdoptions.py
deleted file mode 100644
index 5cf5ee9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/cmdoptions.py
+++ /dev/null
@@ -1,809 +0,0 @@
-"""
-shared options and groups
-
-The principle here is to define options once, but *not* instantiate them
-globally. One reason being that options with action='append' can carry state
-between parses. pip parses general options twice internally, and shouldn't
-pass on state. To be consistent, all options will follow this design.
-
-"""
-from __future__ import absolute_import
-
-import textwrap
-import warnings
-from distutils.util import strtobool
-from functools import partial
-from optparse import SUPPRESS_HELP, Option, OptionGroup
-
-from pip._internal.exceptions import CommandError
-from pip._internal.locations import USER_CACHE_DIR, src_prefix
-from pip._internal.models.format_control import FormatControl
-from pip._internal.models.index import PyPI
-from pip._internal.utils.hashes import STRONG_HASHES
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import BAR_TYPES
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Callable, Dict, List, Optional, Union # noqa: F401
- from optparse import OptionParser, Values # noqa: F401
- from pip._internal.cli.parser import ConfigOptionParser # noqa: F401
-
-
-def raise_option_error(parser, option, msg):
- """
- Raise an option parsing error using parser.error().
-
- Args:
- parser: an OptionParser instance.
- option: an Option instance.
- msg: the error text.
- """
- msg = '{} error: {}'.format(option, msg)
- msg = textwrap.fill(' '.join(msg.split()))
- parser.error(msg)
-
-
-def make_option_group(group, parser):
- # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
- """
- Return an OptionGroup object
- group -- assumed to be dict with 'name' and 'options' keys
- parser -- an optparse Parser
- """
- option_group = OptionGroup(parser, group['name'])
- for option in group['options']:
- option_group.add_option(option())
- return option_group
-
-
-def check_install_build_global(options, check_options=None):
- # type: (Values, Optional[Values]) -> None
- """Disable wheels if per-setup.py call options are set.
-
- :param options: The OptionParser options to update.
- :param check_options: The options to check, if not supplied defaults to
- options.
- """
- if check_options is None:
- check_options = options
-
- def getname(n):
- return getattr(check_options, n, None)
- names = ["build_options", "global_options", "install_options"]
- if any(map(getname, names)):
- control = options.format_control
- control.disallow_binaries()
- warnings.warn(
- 'Disabling all use of wheels due to the use of --build-options '
- '/ --global-options / --install-options.', stacklevel=2,
- )
-
-
-def check_dist_restriction(options, check_target=False):
- # type: (Values, bool) -> None
- """Function for determining if custom platform options are allowed.
-
- :param options: The OptionParser options.
- :param check_target: Whether or not to check if --target is being used.
- """
- dist_restriction_set = any([
- options.python_version,
- options.platform,
- options.abi,
- options.implementation,
- ])
-
- binary_only = FormatControl(set(), {':all:'})
- sdist_dependencies_allowed = (
- options.format_control != binary_only and
- not options.ignore_dependencies
- )
-
- # Installations or downloads using dist restrictions must not combine
- # source distributions and dist-specific wheels, as they are not
- # gauranteed to be locally compatible.
- if dist_restriction_set and sdist_dependencies_allowed:
- raise CommandError(
- "When restricting platform and interpreter constraints using "
- "--python-version, --platform, --abi, or --implementation, "
- "either --no-deps must be set, or --only-binary=:all: must be "
- "set and --no-binary must not be set (or must be set to "
- ":none:)."
- )
-
- if check_target:
- if dist_restriction_set and not options.target_dir:
- raise CommandError(
- "Can not use any platform or abi specific options unless "
- "installing via '--target'"
- )
-
-
-###########
-# options #
-###########
-
-help_ = partial(
- Option,
- '-h', '--help',
- dest='help',
- action='help',
- help='Show help.',
-) # type: Callable[..., Option]
-
-isolated_mode = partial(
- Option,
- "--isolated",
- dest="isolated_mode",
- action="store_true",
- default=False,
- help=(
- "Run pip in an isolated mode, ignoring environment variables and user "
- "configuration."
- ),
-) # type: Callable[..., Option]
-
-require_virtualenv = partial(
- Option,
- # Run only if inside a virtualenv, bail if not.
- '--require-virtualenv', '--require-venv',
- dest='require_venv',
- action='store_true',
- default=False,
- help=SUPPRESS_HELP
-) # type: Callable[..., Option]
-
-verbose = partial(
- Option,
- '-v', '--verbose',
- dest='verbose',
- action='count',
- default=0,
- help='Give more output. Option is additive, and can be used up to 3 times.'
-) # type: Callable[..., Option]
-
-no_color = partial(
- Option,
- '--no-color',
- dest='no_color',
- action='store_true',
- default=False,
- help="Suppress colored output",
-) # type: Callable[..., Option]
-
-version = partial(
- Option,
- '-V', '--version',
- dest='version',
- action='store_true',
- help='Show version and exit.',
-) # type: Callable[..., Option]
-
-quiet = partial(
- Option,
- '-q', '--quiet',
- dest='quiet',
- action='count',
- default=0,
- help=(
- 'Give less output. Option is additive, and can be used up to 3'
- ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
- ' levels).'
- ),
-) # type: Callable[..., Option]
-
-progress_bar = partial(
- Option,
- '--progress-bar',
- dest='progress_bar',
- type='choice',
- choices=list(BAR_TYPES.keys()),
- default='on',
- help=(
- 'Specify type of progress to be displayed [' +
- '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
- ),
-) # type: Callable[..., Option]
-
-log = partial(
- Option,
- "--log", "--log-file", "--local-log",
- dest="log",
- metavar="path",
- help="Path to a verbose appending log."
-) # type: Callable[..., Option]
-
-no_input = partial(
- Option,
- # Don't ask for input
- '--no-input',
- dest='no_input',
- action='store_true',
- default=False,
- help=SUPPRESS_HELP
-) # type: Callable[..., Option]
-
-proxy = partial(
- Option,
- '--proxy',
- dest='proxy',
- type='str',
- default='',
- help="Specify a proxy in the form [user:passwd@]proxy.server:port."
-) # type: Callable[..., Option]
-
-retries = partial(
- Option,
- '--retries',
- dest='retries',
- type='int',
- default=5,
- help="Maximum number of retries each connection should attempt "
- "(default %default times).",
-) # type: Callable[..., Option]
-
-timeout = partial(
- Option,
- '--timeout', '--default-timeout',
- metavar='sec',
- dest='timeout',
- type='float',
- default=15,
- help='Set the socket timeout (default %default seconds).',
-) # type: Callable[..., Option]
-
-skip_requirements_regex = partial(
- Option,
- # A regex to be used to skip requirements
- '--skip-requirements-regex',
- dest='skip_requirements_regex',
- type='str',
- default='',
- help=SUPPRESS_HELP,
-) # type: Callable[..., Option]
-
-
-def exists_action():
- # type: () -> Option
- return Option(
- # Option when path already exist
- '--exists-action',
- dest='exists_action',
- type='choice',
- choices=['s', 'i', 'w', 'b', 'a'],
- default=[],
- action='append',
- metavar='action',
- help="Default action when a path already exists: "
- "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
- )
-
-
-cert = partial(
- Option,
- '--cert',
- dest='cert',
- type='str',
- metavar='path',
- help="Path to alternate CA bundle.",
-) # type: Callable[..., Option]
-
-client_cert = partial(
- Option,
- '--client-cert',
- dest='client_cert',
- type='str',
- default=None,
- metavar='path',
- help="Path to SSL client certificate, a single file containing the "
- "private key and the certificate in PEM format.",
-) # type: Callable[..., Option]
-
-index_url = partial(
- Option,
- '-i', '--index-url', '--pypi-url',
- dest='index_url',
- metavar='URL',
- default=PyPI.simple_url,
- help="Base URL of Python Package Index (default %default). "
- "This should point to a repository compliant with PEP 503 "
- "(the simple repository API) or a local directory laid out "
- "in the same format.",
-) # type: Callable[..., Option]
-
-
-def extra_index_url():
- return Option(
- '--extra-index-url',
- dest='extra_index_urls',
- metavar='URL',
- action='append',
- default=[],
- help="Extra URLs of package indexes to use in addition to "
- "--index-url. Should follow the same rules as "
- "--index-url.",
- )
-
-
-no_index = partial(
- Option,
- '--no-index',
- dest='no_index',
- action='store_true',
- default=False,
- help='Ignore package index (only looking at --find-links URLs instead).',
-) # type: Callable[..., Option]
-
-
-def find_links():
- # type: () -> Option
- return Option(
- '-f', '--find-links',
- dest='find_links',
- action='append',
- default=[],
- metavar='url',
- help="If a url or path to an html file, then parse for links to "
- "archives. If a local path or file:// url that's a directory, "
- "then look for archives in the directory listing.",
- )
-
-
-def trusted_host():
- # type: () -> Option
- return Option(
- "--trusted-host",
- dest="trusted_hosts",
- action="append",
- metavar="HOSTNAME",
- default=[],
- help="Mark this host as trusted, even though it does not have valid "
- "or any HTTPS.",
- )
-
-
-def constraints():
- # type: () -> Option
- return Option(
- '-c', '--constraint',
- dest='constraints',
- action='append',
- default=[],
- metavar='file',
- help='Constrain versions using the given constraints file. '
- 'This option can be used multiple times.'
- )
-
-
-def requirements():
- # type: () -> Option
- return Option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='file',
- help='Install from the given requirements file. '
- 'This option can be used multiple times.'
- )
-
-
-def editable():
- # type: () -> Option
- return Option(
- '-e', '--editable',
- dest='editables',
- action='append',
- default=[],
- metavar='path/url',
- help=('Install a project in editable mode (i.e. setuptools '
- '"develop mode") from a local project path or a VCS url.'),
- )
-
-
-src = partial(
- Option,
- '--src', '--source', '--source-dir', '--source-directory',
- dest='src_dir',
- metavar='dir',
- default=src_prefix,
- help='Directory to check out editable projects into. '
- 'The default in a virtualenv is "<venv path>/src". '
- 'The default for global installs is "<current dir>/src".'
-) # type: Callable[..., Option]
-
-
-def _get_format_control(values, option):
- # type: (Values, Option) -> Any
- """Get a format_control object."""
- return getattr(values, option.dest)
-
-
-def _handle_no_binary(option, opt_str, value, parser):
- # type: (Option, str, str, OptionParser) -> None
- existing = _get_format_control(parser.values, option)
- FormatControl.handle_mutual_excludes(
- value, existing.no_binary, existing.only_binary,
- )
-
-
-def _handle_only_binary(option, opt_str, value, parser):
- # type: (Option, str, str, OptionParser) -> None
- existing = _get_format_control(parser.values, option)
- FormatControl.handle_mutual_excludes(
- value, existing.only_binary, existing.no_binary,
- )
-
-
-def no_binary():
- # type: () -> Option
- format_control = FormatControl(set(), set())
- return Option(
- "--no-binary", dest="format_control", action="callback",
- callback=_handle_no_binary, type="str",
- default=format_control,
- help="Do not use binary packages. Can be supplied multiple times, and "
- "each time adds to the existing value. Accepts either :all: to "
- "disable all binary packages, :none: to empty the set, or one or "
- "more package names with commas between them. Note that some "
- "packages are tricky to compile and may fail to install when "
- "this option is used on them.",
- )
-
-
-def only_binary():
- # type: () -> Option
- format_control = FormatControl(set(), set())
- return Option(
- "--only-binary", dest="format_control", action="callback",
- callback=_handle_only_binary, type="str",
- default=format_control,
- help="Do not use source packages. Can be supplied multiple times, and "
- "each time adds to the existing value. Accepts either :all: to "
- "disable all source packages, :none: to empty the set, or one or "
- "more package names with commas between them. Packages without "
- "binary distributions will fail to install when this option is "
- "used on them.",
- )
-
-
-platform = partial(
- Option,
- '--platform',
- dest='platform',
- metavar='platform',
- default=None,
- help=("Only use wheels compatible with <platform>. "
- "Defaults to the platform of the running system."),
-) # type: Callable[..., Option]
-
-
-python_version = partial(
- Option,
- '--python-version',
- dest='python_version',
- metavar='python_version',
- default=None,
- help=("Only use wheels compatible with Python "
- "interpreter version <version>. If not specified, then the "
- "current system interpreter minor version is used. A major "
- "version (e.g. '2') can be specified to match all "
- "minor revs of that major version. A minor version "
- "(e.g. '34') can also be specified."),
-) # type: Callable[..., Option]
-
-
-implementation = partial(
- Option,
- '--implementation',
- dest='implementation',
- metavar='implementation',
- default=None,
- help=("Only use wheels compatible with Python "
- "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
- " or 'ip'. If not specified, then the current "
- "interpreter implementation is used. Use 'py' to force "
- "implementation-agnostic wheels."),
-) # type: Callable[..., Option]
-
-
-abi = partial(
- Option,
- '--abi',
- dest='abi',
- metavar='abi',
- default=None,
- help=("Only use wheels compatible with Python "
- "abi <abi>, e.g. 'pypy_41'. If not specified, then the "
- "current interpreter abi tag is used. Generally "
- "you will need to specify --implementation, "
- "--platform, and --python-version when using "
- "this option."),
-) # type: Callable[..., Option]
-
-
-def prefer_binary():
- # type: () -> Option
- return Option(
- "--prefer-binary",
- dest="prefer_binary",
- action="store_true",
- default=False,
- help="Prefer older binary packages over newer source packages."
- )
-
-
-cache_dir = partial(
- Option,
- "--cache-dir",
- dest="cache_dir",
- default=USER_CACHE_DIR,
- metavar="dir",
- help="Store the cache data in <dir>."
-) # type: Callable[..., Option]
-
-
-def no_cache_dir_callback(option, opt, value, parser):
- """
- Process a value provided for the --no-cache-dir option.
-
- This is an optparse.Option callback for the --no-cache-dir option.
- """
- # The value argument will be None if --no-cache-dir is passed via the
- # command-line, since the option doesn't accept arguments. However,
- # the value can be non-None if the option is triggered e.g. by an
- # environment variable, like PIP_NO_CACHE_DIR=true.
- if value is not None:
- # Then parse the string value to get argument error-checking.
- try:
- strtobool(value)
- except ValueError as exc:
- raise_option_error(parser, option=option, msg=str(exc))
-
- # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
- # converted to 0 (like "false" or "no") caused cache_dir to be disabled
- # rather than enabled (logic would say the latter). Thus, we disable
- # the cache directory not just on values that parse to True, but (for
- # backwards compatibility reasons) also on values that parse to False.
- # In other words, always set it to False if the option is provided in
- # some (valid) form.
- parser.values.cache_dir = False
-
-
-no_cache = partial(
- Option,
- "--no-cache-dir",
- dest="cache_dir",
- action="callback",
- callback=no_cache_dir_callback,
- help="Disable the cache.",
-) # type: Callable[..., Option]
-
-no_deps = partial(
- Option,
- '--no-deps', '--no-dependencies',
- dest='ignore_dependencies',
- action='store_true',
- default=False,
- help="Don't install package dependencies.",
-) # type: Callable[..., Option]
-
-build_dir = partial(
- Option,
- '-b', '--build', '--build-dir', '--build-directory',
- dest='build_dir',
- metavar='dir',
- help='Directory to unpack packages into and build in. Note that '
- 'an initial build still takes place in a temporary directory. '
- 'The location of temporary directories can be controlled by setting '
- 'the TMPDIR environment variable (TEMP on Windows) appropriately. '
- 'When passed, build directories are not cleaned in case of failures.'
-) # type: Callable[..., Option]
-
-ignore_requires_python = partial(
- Option,
- '--ignore-requires-python',
- dest='ignore_requires_python',
- action='store_true',
- help='Ignore the Requires-Python information.'
-) # type: Callable[..., Option]
-
-no_build_isolation = partial(
- Option,
- '--no-build-isolation',
- dest='build_isolation',
- action='store_false',
- default=True,
- help='Disable isolation when building a modern source distribution. '
- 'Build dependencies specified by PEP 518 must be already installed '
- 'if this option is used.'
-) # type: Callable[..., Option]
-
-
-def no_use_pep517_callback(option, opt, value, parser):
- """
- Process a value provided for the --no-use-pep517 option.
-
- This is an optparse.Option callback for the no_use_pep517 option.
- """
- # Since --no-use-pep517 doesn't accept arguments, the value argument
- # will be None if --no-use-pep517 is passed via the command-line.
- # However, the value can be non-None if the option is triggered e.g.
- # by an environment variable, for example "PIP_NO_USE_PEP517=true".
- if value is not None:
- msg = """A value was passed for --no-use-pep517,
- probably using either the PIP_NO_USE_PEP517 environment variable
- or the "no-use-pep517" config file option. Use an appropriate value
- of the PIP_USE_PEP517 environment variable or the "use-pep517"
- config file option instead.
- """
- raise_option_error(parser, option=option, msg=msg)
-
- # Otherwise, --no-use-pep517 was passed via the command-line.
- parser.values.use_pep517 = False
-
-
-use_pep517 = partial(
- Option,
- '--use-pep517',
- dest='use_pep517',
- action='store_true',
- default=None,
- help='Use PEP 517 for building source distributions '
- '(use --no-use-pep517 to force legacy behaviour).'
-) # type: Any
-
-no_use_pep517 = partial(
- Option,
- '--no-use-pep517',
- dest='use_pep517',
- action='callback',
- callback=no_use_pep517_callback,
- default=None,
- help=SUPPRESS_HELP
-) # type: Any
-
-install_options = partial(
- Option,
- '--install-option',
- dest='install_options',
- action='append',
- metavar='options',
- help="Extra arguments to be supplied to the setup.py install "
- "command (use like --install-option=\"--install-scripts=/usr/local/"
- "bin\"). Use multiple --install-option options to pass multiple "
- "options to setup.py install. If you are using an option with a "
- "directory path, be sure to use absolute path.",
-) # type: Callable[..., Option]
-
-global_options = partial(
- Option,
- '--global-option',
- dest='global_options',
- action='append',
- metavar='options',
- help="Extra global options to be supplied to the setup.py "
- "call before the install command.",
-) # type: Callable[..., Option]
-
-no_clean = partial(
- Option,
- '--no-clean',
- action='store_true',
- default=False,
- help="Don't clean up build directories."
-) # type: Callable[..., Option]
-
-pre = partial(
- Option,
- '--pre',
- action='store_true',
- default=False,
- help="Include pre-release and development versions. By default, "
- "pip only finds stable versions.",
-) # type: Callable[..., Option]
-
-disable_pip_version_check = partial(
- Option,
- "--disable-pip-version-check",
- dest="disable_pip_version_check",
- action="store_true",
- default=False,
- help="Don't periodically check PyPI to determine whether a new version "
- "of pip is available for download. Implied with --no-index.",
-) # type: Callable[..., Option]
-
-
-# Deprecated, Remove later
-always_unzip = partial(
- Option,
- '-Z', '--always-unzip',
- dest='always_unzip',
- action='store_true',
- help=SUPPRESS_HELP,
-) # type: Callable[..., Option]
-
-
-def _merge_hash(option, opt_str, value, parser):
- # type: (Option, str, str, OptionParser) -> None
- """Given a value spelled "algo:digest", append the digest to a list
- pointed to in a dict by the algo name."""
- if not parser.values.hashes:
- parser.values.hashes = {} # type: ignore
- try:
- algo, digest = value.split(':', 1)
- except ValueError:
- parser.error('Arguments to %s must be a hash name '
- 'followed by a value, like --hash=sha256:abcde...' %
- opt_str)
- if algo not in STRONG_HASHES:
- parser.error('Allowed hash algorithms for %s are %s.' %
- (opt_str, ', '.join(STRONG_HASHES)))
- parser.values.hashes.setdefault(algo, []).append(digest)
-
-
-hash = partial(
- Option,
- '--hash',
- # Hash values eventually end up in InstallRequirement.hashes due to
- # __dict__ copying in process_line().
- dest='hashes',
- action='callback',
- callback=_merge_hash,
- type='string',
- help="Verify that the package's archive matches this "
- 'hash before installing. Example: --hash=sha256:abcdef...',
-) # type: Callable[..., Option]
-
-
-require_hashes = partial(
- Option,
- '--require-hashes',
- dest='require_hashes',
- action='store_true',
- default=False,
- help='Require a hash to check each requirement against, for '
- 'repeatable installs. This option is implied when any package in a '
- 'requirements file has a --hash option.',
-) # type: Callable[..., Option]
-
-
-##########
-# groups #
-##########
-
-general_group = {
- 'name': 'General Options',
- 'options': [
- help_,
- isolated_mode,
- require_virtualenv,
- verbose,
- version,
- quiet,
- log,
- no_input,
- proxy,
- retries,
- timeout,
- skip_requirements_regex,
- exists_action,
- trusted_host,
- cert,
- client_cert,
- cache_dir,
- no_cache,
- disable_pip_version_check,
- no_color,
- ]
-} # type: Dict[str, Any]
-
-index_group = {
- 'name': 'Package Index Options',
- 'options': [
- index_url,
- extra_index_url,
- no_index,
- find_links,
- ]
-} # type: Dict[str, Any]
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/main_parser.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/main_parser.py
deleted file mode 100644
index b17c749..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/main_parser.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""A single place for constructing and exposing the main parser
-"""
-
-import os
-import sys
-
-from pip import __version__
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.parser import (
- ConfigOptionParser, UpdatingDefaultsHelpFormatter,
-)
-from pip._internal.commands import (
- commands_dict, get_similar_commands, get_summaries,
-)
-from pip._internal.exceptions import CommandError
-from pip._internal.utils.misc import get_prog
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Tuple, List # noqa: F401
-
-
-__all__ = ["create_main_parser", "parse_command"]
-
-
-def create_main_parser():
- # type: () -> ConfigOptionParser
- """Creates and returns the main parser for pip's CLI
- """
-
- parser_kw = {
- 'usage': '\n%prog <command> [options]',
- 'add_help_option': False,
- 'formatter': UpdatingDefaultsHelpFormatter(),
- 'name': 'global',
- 'prog': get_prog(),
- }
-
- parser = ConfigOptionParser(**parser_kw)
- parser.disable_interspersed_args()
-
- pip_pkg_dir = os.path.abspath(os.path.join(
- os.path.dirname(__file__), "..", "..",
- ))
- parser.version = 'pip %s from %s (python %s)' % (
- __version__, pip_pkg_dir, sys.version[:3],
- )
-
- # add the general options
- gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
- parser.add_option_group(gen_opts)
-
- # so the help formatter knows
- parser.main = True # type: ignore
-
- # create command listing for description
- command_summaries = get_summaries()
- description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
- parser.description = '\n'.join(description)
-
- return parser
-
-
-def parse_command(args):
- # type: (List[str]) -> Tuple[str, List[str]]
- parser = create_main_parser()
-
- # Note: parser calls disable_interspersed_args(), so the result of this
- # call is to split the initial args into the general options before the
- # subcommand and everything else.
- # For example:
- # args: ['--timeout=5', 'install', '--user', 'INITools']
- # general_options: ['--timeout==5']
- # args_else: ['install', '--user', 'INITools']
- general_options, args_else = parser.parse_args(args)
-
- # --version
- if general_options.version:
- sys.stdout.write(parser.version) # type: ignore
- sys.stdout.write(os.linesep)
- sys.exit()
-
- # pip || pip help -> print_help()
- if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
- parser.print_help()
- sys.exit()
-
- # the subcommand name
- cmd_name = args_else[0]
-
- if cmd_name not in commands_dict:
- guess = get_similar_commands(cmd_name)
-
- msg = ['unknown command "%s"' % cmd_name]
- if guess:
- msg.append('maybe you meant "%s"' % guess)
-
- raise CommandError(' - '.join(msg))
-
- # all the args without the subcommand
- cmd_args = args[:]
- cmd_args.remove(cmd_name)
-
- return cmd_name, cmd_args
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/parser.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/parser.py
deleted file mode 100644
index e1eaac4..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/parser.py
+++ /dev/null
@@ -1,261 +0,0 @@
-"""Base option parser setup"""
-from __future__ import absolute_import
-
-import logging
-import optparse
-import sys
-import textwrap
-from distutils.util import strtobool
-
-from pip._vendor.six import string_types
-
-from pip._internal.cli.status_codes import UNKNOWN_ERROR
-from pip._internal.configuration import Configuration, ConfigurationError
-from pip._internal.utils.compat import get_terminal_size
-
-logger = logging.getLogger(__name__)
-
-
-class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
- """A prettier/less verbose help formatter for optparse."""
-
- def __init__(self, *args, **kwargs):
- # help position must be aligned with __init__.parseopts.description
- kwargs['max_help_position'] = 30
- kwargs['indent_increment'] = 1
- kwargs['width'] = get_terminal_size()[0] - 2
- optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
-
- def format_option_strings(self, option):
- return self._format_option_strings(option, ' <%s>', ', ')
-
- def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
- """
- Return a comma-separated list of option strings and metavars.
-
- :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
- :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
- :param optsep: separator
- """
- opts = []
-
- if option._short_opts:
- opts.append(option._short_opts[0])
- if option._long_opts:
- opts.append(option._long_opts[0])
- if len(opts) > 1:
- opts.insert(1, optsep)
-
- if option.takes_value():
- metavar = option.metavar or option.dest.lower()
- opts.append(mvarfmt % metavar.lower())
-
- return ''.join(opts)
-
- def format_heading(self, heading):
- if heading == 'Options':
- return ''
- return heading + ':\n'
-
- def format_usage(self, usage):
- """
- Ensure there is only one newline between usage and the first heading
- if there is no description.
- """
- msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
- return msg
-
- def format_description(self, description):
- # leave full control over description to us
- if description:
- if hasattr(self.parser, 'main'):
- label = 'Commands'
- else:
- label = 'Description'
- # some doc strings have initial newlines, some don't
- description = description.lstrip('\n')
- # some doc strings have final newlines and spaces, some don't
- description = description.rstrip()
- # dedent, then reindent
- description = self.indent_lines(textwrap.dedent(description), " ")
- description = '%s:\n%s\n' % (label, description)
- return description
- else:
- return ''
-
- def format_epilog(self, epilog):
- # leave full control over epilog to us
- if epilog:
- return epilog
- else:
- return ''
-
- def indent_lines(self, text, indent):
- new_lines = [indent + line for line in text.split('\n')]
- return "\n".join(new_lines)
-
-
-class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
- """Custom help formatter for use in ConfigOptionParser.
-
- This is updates the defaults before expanding them, allowing
- them to show up correctly in the help listing.
- """
-
- def expand_default(self, option):
- if self.parser is not None:
- self.parser._update_defaults(self.parser.defaults)
- return optparse.IndentedHelpFormatter.expand_default(self, option)
-
-
-class CustomOptionParser(optparse.OptionParser):
-
- def insert_option_group(self, idx, *args, **kwargs):
- """Insert an OptionGroup at a given position."""
- group = self.add_option_group(*args, **kwargs)
-
- self.option_groups.pop()
- self.option_groups.insert(idx, group)
-
- return group
-
- @property
- def option_list_all(self):
- """Get a list of all options, including those in option groups."""
- res = self.option_list[:]
- for i in self.option_groups:
- res.extend(i.option_list)
-
- return res
-
-
-class ConfigOptionParser(CustomOptionParser):
- """Custom option parser which updates its defaults by checking the
- configuration files and environmental variables"""
-
- def __init__(self, *args, **kwargs):
- self.name = kwargs.pop('name')
-
- isolated = kwargs.pop("isolated", False)
- self.config = Configuration(isolated)
-
- assert self.name
- optparse.OptionParser.__init__(self, *args, **kwargs)
-
- def check_default(self, option, key, val):
- try:
- return option.check_value(key, val)
- except optparse.OptionValueError as exc:
- print("An error occurred during configuration: %s" % exc)
- sys.exit(3)
-
- def _get_ordered_configuration_items(self):
- # Configuration gives keys in an unordered manner. Order them.
- override_order = ["global", self.name, ":env:"]
-
- # Pool the options into different groups
- section_items = {name: [] for name in override_order}
- for section_key, val in self.config.items():
- # ignore empty values
- if not val:
- logger.debug(
- "Ignoring configuration key '%s' as it's value is empty.",
- section_key
- )
- continue
-
- section, key = section_key.split(".", 1)
- if section in override_order:
- section_items[section].append((key, val))
-
- # Yield each group in their override order
- for section in override_order:
- for key, val in section_items[section]:
- yield key, val
-
- def _update_defaults(self, defaults):
- """Updates the given defaults with values from the config files and
- the environ. Does a little special handling for certain types of
- options (lists)."""
-
- # Accumulate complex default state.
- self.values = optparse.Values(self.defaults)
- late_eval = set()
- # Then set the options with those values
- for key, val in self._get_ordered_configuration_items():
- # '--' because configuration supports only long names
- option = self.get_option('--' + key)
-
- # Ignore options not present in this parser. E.g. non-globals put
- # in [global] by users that want them to apply to all applicable
- # commands.
- if option is None:
- continue
-
- if option.action in ('store_true', 'store_false', 'count'):
- try:
- val = strtobool(val)
- except ValueError:
- error_msg = invalid_config_error_message(
- option.action, key, val
- )
- self.error(error_msg)
-
- elif option.action == 'append':
- val = val.split()
- val = [self.check_default(option, key, v) for v in val]
- elif option.action == 'callback':
- late_eval.add(option.dest)
- opt_str = option.get_opt_string()
- val = option.convert_value(opt_str, val)
- # From take_action
- args = option.callback_args or ()
- kwargs = option.callback_kwargs or {}
- option.callback(option, opt_str, val, self, *args, **kwargs)
- else:
- val = self.check_default(option, key, val)
-
- defaults[option.dest] = val
-
- for key in late_eval:
- defaults[key] = getattr(self.values, key)
- self.values = None
- return defaults
-
- def get_default_values(self):
- """Overriding to make updating the defaults after instantiation of
- the option parser possible, _update_defaults() does the dirty work."""
- if not self.process_default_values:
- # Old, pre-Optik 1.5 behaviour.
- return optparse.Values(self.defaults)
-
- # Load the configuration, or error out in case of an error
- try:
- self.config.load()
- except ConfigurationError as err:
- self.exit(UNKNOWN_ERROR, str(err))
-
- defaults = self._update_defaults(self.defaults.copy()) # ours
- for option in self._get_all_options():
- default = defaults.get(option.dest)
- if isinstance(default, string_types):
- opt_str = option.get_opt_string()
- defaults[option.dest] = option.check_value(opt_str, default)
- return optparse.Values(defaults)
-
- def error(self, msg):
- self.print_usage(sys.stderr)
- self.exit(UNKNOWN_ERROR, "%s\n" % msg)
-
-
-def invalid_config_error_message(action, key, val):
- """Returns a better error message when invalid configuration option
- is provided."""
- if action in ('store_true', 'store_false'):
- return ("{0} is not a valid value for {1} option, "
- "please specify a boolean value like yes/no, "
- "true/false or 1/0 instead.").format(val, key)
-
- return ("{0} is not a valid value for {1} option, "
- "please specify a numerical value like 1/0 "
- "instead.").format(val, key)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/status_codes.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/status_codes.py
deleted file mode 100644
index 275360a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/status_codes.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from __future__ import absolute_import
-
-SUCCESS = 0
-ERROR = 1
-UNKNOWN_ERROR = 2
-VIRTUALENV_NOT_FOUND = 3
-PREVIOUS_BUILD_DIR_ERROR = 4
-NO_MATCHES_FOUND = 23
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__init__.py
deleted file mode 100644
index c7d1da3..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__init__.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
-Package containing all pip commands
-"""
-from __future__ import absolute_import
-
-from pip._internal.commands.completion import CompletionCommand
-from pip._internal.commands.configuration import ConfigurationCommand
-from pip._internal.commands.download import DownloadCommand
-from pip._internal.commands.freeze import FreezeCommand
-from pip._internal.commands.hash import HashCommand
-from pip._internal.commands.help import HelpCommand
-from pip._internal.commands.list import ListCommand
-from pip._internal.commands.check import CheckCommand
-from pip._internal.commands.search import SearchCommand
-from pip._internal.commands.show import ShowCommand
-from pip._internal.commands.install import InstallCommand
-from pip._internal.commands.uninstall import UninstallCommand
-from pip._internal.commands.wheel import WheelCommand
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import List, Type # noqa: F401
- from pip._internal.cli.base_command import Command # noqa: F401
-
-commands_order = [
- InstallCommand,
- DownloadCommand,
- UninstallCommand,
- FreezeCommand,
- ListCommand,
- ShowCommand,
- CheckCommand,
- ConfigurationCommand,
- SearchCommand,
- WheelCommand,
- HashCommand,
- CompletionCommand,
- HelpCommand,
-] # type: List[Type[Command]]
-
-commands_dict = {c.name: c for c in commands_order}
-
-
-def get_summaries(ordered=True):
- """Yields sorted (command name, command summary) tuples."""
-
- if ordered:
- cmditems = _sort_commands(commands_dict, commands_order)
- else:
- cmditems = commands_dict.items()
-
- for name, command_class in cmditems:
- yield (name, command_class.summary)
-
-
-def get_similar_commands(name):
- """Command name auto-correct."""
- from difflib import get_close_matches
-
- name = name.lower()
-
- close_commands = get_close_matches(name, commands_dict.keys())
-
- if close_commands:
- return close_commands[0]
- else:
- return False
-
-
-def _sort_commands(cmddict, order):
- def keyfn(key):
- try:
- return order.index(key[1])
- except ValueError:
- # unordered items should come last
- return 0xff
-
- return sorted(cmddict.items(), key=keyfn)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 81f2d6a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/check.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/check.cpython-37.pyc
deleted file mode 100644
index 4cf332a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/check.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/completion.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/completion.cpython-37.pyc
deleted file mode 100644
index 2f10e9a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/completion.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc
deleted file mode 100644
index b33e6c9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/configuration.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/download.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/download.cpython-37.pyc
deleted file mode 100644
index 8c02ceb..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/download.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc
deleted file mode 100644
index 351ab9f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/freeze.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/hash.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/hash.cpython-37.pyc
deleted file mode 100644
index 2b6183f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/hash.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/help.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/help.cpython-37.pyc
deleted file mode 100644
index 0eaefaf..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/help.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/install.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/install.cpython-37.pyc
deleted file mode 100644
index 4587dd5..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/install.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/list.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/list.cpython-37.pyc
deleted file mode 100644
index 219febe..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/list.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/search.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/search.cpython-37.pyc
deleted file mode 100644
index 7809bf0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/search.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/show.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/show.cpython-37.pyc
deleted file mode 100644
index bb07126..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/show.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc
deleted file mode 100644
index b528ae4..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc
deleted file mode 100644
index 8198357..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__pycache__/wheel.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/check.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/check.py
deleted file mode 100644
index 801cecc..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/check.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import logging
-
-from pip._internal.cli.base_command import Command
-from pip._internal.operations.check import (
- check_package_set, create_package_set_from_installed,
-)
-
-logger = logging.getLogger(__name__)
-
-
-class CheckCommand(Command):
- """Verify installed packages have compatible dependencies."""
- name = 'check'
- usage = """
- %prog [options]"""
- summary = 'Verify installed packages have compatible dependencies.'
-
- def run(self, options, args):
- package_set, parsing_probs = create_package_set_from_installed()
- missing, conflicting = check_package_set(package_set)
-
- for project_name in missing:
- version = package_set[project_name].version
- for dependency in missing[project_name]:
- logger.info(
- "%s %s requires %s, which is not installed.",
- project_name, version, dependency[0],
- )
-
- for project_name in conflicting:
- version = package_set[project_name].version
- for dep_name, dep_version, req in conflicting[project_name]:
- logger.info(
- "%s %s has requirement %s, but you have %s %s.",
- project_name, version, req, dep_name, dep_version,
- )
-
- if missing or conflicting or parsing_probs:
- return 1
- else:
- logger.info("No broken requirements found.")
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/completion.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/completion.py
deleted file mode 100644
index 2fcdd39..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/completion.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from __future__ import absolute_import
-
-import sys
-import textwrap
-
-from pip._internal.cli.base_command import Command
-from pip._internal.utils.misc import get_prog
-
-BASE_COMPLETION = """
-# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
-"""
-
-COMPLETION_SCRIPTS = {
- 'bash': """
- _pip_completion()
- {
- COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
- COMP_CWORD=$COMP_CWORD \\
- PIP_AUTO_COMPLETE=1 $1 ) )
- }
- complete -o default -F _pip_completion %(prog)s
- """,
- 'zsh': """
- function _pip_completion {
- local words cword
- read -Ac words
- read -cn cword
- reply=( $( COMP_WORDS="$words[*]" \\
- COMP_CWORD=$(( cword-1 )) \\
- PIP_AUTO_COMPLETE=1 $words[1] ) )
- }
- compctl -K _pip_completion %(prog)s
- """,
- 'fish': """
- function __fish_complete_pip
- set -lx COMP_WORDS (commandline -o) ""
- set -lx COMP_CWORD ( \\
- math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
- )
- set -lx PIP_AUTO_COMPLETE 1
- string split \\ -- (eval $COMP_WORDS[1])
- end
- complete -fa "(__fish_complete_pip)" -c %(prog)s
- """,
-}
-
-
-class CompletionCommand(Command):
- """A helper command to be used for command completion."""
- name = 'completion'
- summary = 'A helper command used for command completion.'
- ignore_require_venv = True
-
- def __init__(self, *args, **kw):
- super(CompletionCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(
- '--bash', '-b',
- action='store_const',
- const='bash',
- dest='shell',
- help='Emit completion code for bash')
- cmd_opts.add_option(
- '--zsh', '-z',
- action='store_const',
- const='zsh',
- dest='shell',
- help='Emit completion code for zsh')
- cmd_opts.add_option(
- '--fish', '-f',
- action='store_const',
- const='fish',
- dest='shell',
- help='Emit completion code for fish')
-
- self.parser.insert_option_group(0, cmd_opts)
-
- def run(self, options, args):
- """Prints the completion code of the given shell"""
- shells = COMPLETION_SCRIPTS.keys()
- shell_options = ['--' + shell for shell in sorted(shells)]
- if options.shell in shells:
- script = textwrap.dedent(
- COMPLETION_SCRIPTS.get(options.shell, '') % {
- 'prog': get_prog(),
- }
- )
- print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
- else:
- sys.stderr.write(
- 'ERROR: You must pass %s\n' % ' or '.join(shell_options)
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/configuration.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/configuration.py
deleted file mode 100644
index 826c08d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/configuration.py
+++ /dev/null
@@ -1,227 +0,0 @@
-import logging
-import os
-import subprocess
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.configuration import Configuration, kinds
-from pip._internal.exceptions import PipError
-from pip._internal.locations import venv_config_file
-from pip._internal.utils.misc import get_prog
-
-logger = logging.getLogger(__name__)
-
-
-class ConfigurationCommand(Command):
- """Manage local and global configuration.
-
- Subcommands:
-
- list: List the active configuration (or from the file specified)
- edit: Edit the configuration file in an editor
- get: Get the value associated with name
- set: Set the name=value
- unset: Unset the value associated with name
-
- If none of --user, --global and --venv are passed, a virtual
- environment configuration file is used if one is active and the file
- exists. Otherwise, all modifications happen on the to the user file by
- default.
- """
-
- name = 'config'
- usage = """
- %prog [<file-option>] list
- %prog [<file-option>] [--editor <editor-path>] edit
-
- %prog [<file-option>] get name
- %prog [<file-option>] set name value
- %prog [<file-option>] unset name
- """
-
- summary = "Manage local and global configuration."
-
- def __init__(self, *args, **kwargs):
- super(ConfigurationCommand, self).__init__(*args, **kwargs)
-
- self.configuration = None
-
- self.cmd_opts.add_option(
- '--editor',
- dest='editor',
- action='store',
- default=None,
- help=(
- 'Editor to use to edit the file. Uses VISUAL or EDITOR '
- 'environment variables if not provided.'
- )
- )
-
- self.cmd_opts.add_option(
- '--global',
- dest='global_file',
- action='store_true',
- default=False,
- help='Use the system-wide configuration file only'
- )
-
- self.cmd_opts.add_option(
- '--user',
- dest='user_file',
- action='store_true',
- default=False,
- help='Use the user configuration file only'
- )
-
- self.cmd_opts.add_option(
- '--venv',
- dest='venv_file',
- action='store_true',
- default=False,
- help='Use the virtualenv configuration file only'
- )
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- handlers = {
- "list": self.list_values,
- "edit": self.open_in_editor,
- "get": self.get_name,
- "set": self.set_name_value,
- "unset": self.unset_name
- }
-
- # Determine action
- if not args or args[0] not in handlers:
- logger.error("Need an action ({}) to perform.".format(
- ", ".join(sorted(handlers)))
- )
- return ERROR
-
- action = args[0]
-
- # Determine which configuration files are to be loaded
- # Depends on whether the command is modifying.
- try:
- load_only = self._determine_file(
- options, need_value=(action in ["get", "set", "unset", "edit"])
- )
- except PipError as e:
- logger.error(e.args[0])
- return ERROR
-
- # Load a new configuration
- self.configuration = Configuration(
- isolated=options.isolated_mode, load_only=load_only
- )
- self.configuration.load()
-
- # Error handling happens here, not in the action-handlers.
- try:
- handlers[action](options, args[1:])
- except PipError as e:
- logger.error(e.args[0])
- return ERROR
-
- return SUCCESS
-
- def _determine_file(self, options, need_value):
- file_options = {
- kinds.USER: options.user_file,
- kinds.GLOBAL: options.global_file,
- kinds.VENV: options.venv_file
- }
-
- if sum(file_options.values()) == 0:
- if not need_value:
- return None
- # Default to user, unless there's a virtualenv file.
- elif os.path.exists(venv_config_file):
- return kinds.VENV
- else:
- return kinds.USER
- elif sum(file_options.values()) == 1:
- # There's probably a better expression for this.
- return [key for key in file_options if file_options[key]][0]
-
- raise PipError(
- "Need exactly one file to operate upon "
- "(--user, --venv, --global) to perform."
- )
-
- def list_values(self, options, args):
- self._get_n_args(args, "list", n=0)
-
- for key, value in sorted(self.configuration.items()):
- logger.info("%s=%r", key, value)
-
- def get_name(self, options, args):
- key = self._get_n_args(args, "get [name]", n=1)
- value = self.configuration.get_value(key)
-
- logger.info("%s", value)
-
- def set_name_value(self, options, args):
- key, value = self._get_n_args(args, "set [name] [value]", n=2)
- self.configuration.set_value(key, value)
-
- self._save_configuration()
-
- def unset_name(self, options, args):
- key = self._get_n_args(args, "unset [name]", n=1)
- self.configuration.unset_value(key)
-
- self._save_configuration()
-
- def open_in_editor(self, options, args):
- editor = self._determine_editor(options)
-
- fname = self.configuration.get_file_to_edit()
- if fname is None:
- raise PipError("Could not determine appropriate file.")
-
- try:
- subprocess.check_call([editor, fname])
- except subprocess.CalledProcessError as e:
- raise PipError(
- "Editor Subprocess exited with exit code {}"
- .format(e.returncode)
- )
-
- def _get_n_args(self, args, example, n):
- """Helper to make sure the command got the right number of arguments
- """
- if len(args) != n:
- msg = (
- 'Got unexpected number of arguments, expected {}. '
- '(example: "{} config {}")'
- ).format(n, get_prog(), example)
- raise PipError(msg)
-
- if n == 1:
- return args[0]
- else:
- return args
-
- def _save_configuration(self):
- # We successfully ran a modifying command. Need to save the
- # configuration.
- try:
- self.configuration.save()
- except Exception:
- logger.error(
- "Unable to save configuration. Please report this as a bug.",
- exc_info=1
- )
- raise PipError("Internal Error.")
-
- def _determine_editor(self, options):
- if options.editor is not None:
- return options.editor
- elif "VISUAL" in os.environ:
- return os.environ["VISUAL"]
- elif "EDITOR" in os.environ:
- return os.environ["EDITOR"]
- else:
- raise PipError("Could not determine editor to use.")
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/download.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/download.py
deleted file mode 100644
index a57e4bc..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/download.py
+++ /dev/null
@@ -1,176 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import RequirementCommand
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req import RequirementSet
-from pip._internal.req.req_tracker import RequirementTracker
-from pip._internal.resolve import Resolver
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import ensure_dir, normalize_path
-from pip._internal.utils.temp_dir import TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-class DownloadCommand(RequirementCommand):
- """
- Download packages from:
-
- - PyPI (and other indexes) using requirement specifiers.
- - VCS project urls.
- - Local project directories.
- - Local or remote source archives.
-
- pip also supports downloading from "requirements files", which provide
- an easy way to specify a whole environment to be downloaded.
- """
- name = 'download'
-
- usage = """
- %prog [options] <requirement specifier> [package-index-options] ...
- %prog [options] -r <requirements file> [package-index-options] ...
- %prog [options] <vcs project url> ...
- %prog [options] <local project path> ...
- %prog [options] <archive url/path> ..."""
-
- summary = 'Download packages.'
-
- def __init__(self, *args, **kw):
- super(DownloadCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(cmdoptions.constraints())
- cmd_opts.add_option(cmdoptions.requirements())
- cmd_opts.add_option(cmdoptions.build_dir())
- cmd_opts.add_option(cmdoptions.no_deps())
- cmd_opts.add_option(cmdoptions.global_options())
- cmd_opts.add_option(cmdoptions.no_binary())
- cmd_opts.add_option(cmdoptions.only_binary())
- cmd_opts.add_option(cmdoptions.prefer_binary())
- cmd_opts.add_option(cmdoptions.src())
- cmd_opts.add_option(cmdoptions.pre())
- cmd_opts.add_option(cmdoptions.no_clean())
- cmd_opts.add_option(cmdoptions.require_hashes())
- cmd_opts.add_option(cmdoptions.progress_bar())
- cmd_opts.add_option(cmdoptions.no_build_isolation())
- cmd_opts.add_option(cmdoptions.use_pep517())
- cmd_opts.add_option(cmdoptions.no_use_pep517())
-
- cmd_opts.add_option(
- '-d', '--dest', '--destination-dir', '--destination-directory',
- dest='download_dir',
- metavar='dir',
- default=os.curdir,
- help=("Download packages into <dir>."),
- )
-
- cmd_opts.add_option(cmdoptions.platform())
- cmd_opts.add_option(cmdoptions.python_version())
- cmd_opts.add_option(cmdoptions.implementation())
- cmd_opts.add_option(cmdoptions.abi())
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, cmd_opts)
-
- def run(self, options, args):
- options.ignore_installed = True
- # editable doesn't really make sense for `pip download`, but the bowels
- # of the RequirementSet code require that property.
- options.editables = []
-
- if options.python_version:
- python_versions = [options.python_version]
- else:
- python_versions = None
-
- cmdoptions.check_dist_restriction(options)
-
- options.src_dir = os.path.abspath(options.src_dir)
- options.download_dir = normalize_path(options.download_dir)
-
- ensure_dir(options.download_dir)
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(
- options=options,
- session=session,
- platform=options.platform,
- python_versions=python_versions,
- abi=options.abi,
- implementation=options.implementation,
- )
- build_delete = (not (options.no_clean or options.build_dir))
- if options.cache_dir and not check_path_owner(options.cache_dir):
- logger.warning(
- "The directory '%s' or its parent directory is not owned "
- "by the current user and caching wheels has been "
- "disabled. check the permissions and owner of that "
- "directory. If executing pip with sudo, you may want "
- "sudo's -H flag.",
- options.cache_dir,
- )
- options.cache_dir = None
-
- with RequirementTracker() as req_tracker, TempDirectory(
- options.build_dir, delete=build_delete, kind="download"
- ) as directory:
-
- requirement_set = RequirementSet(
- require_hashes=options.require_hashes,
- )
- self.populate_requirement_set(
- requirement_set,
- args,
- options,
- finder,
- session,
- self.name,
- None
- )
-
- preparer = RequirementPreparer(
- build_dir=directory.path,
- src_dir=options.src_dir,
- download_dir=options.download_dir,
- wheel_download_dir=None,
- progress_bar=options.progress_bar,
- build_isolation=options.build_isolation,
- req_tracker=req_tracker,
- )
-
- resolver = Resolver(
- preparer=preparer,
- finder=finder,
- session=session,
- wheel_cache=None,
- use_user_site=False,
- upgrade_strategy="to-satisfy-only",
- force_reinstall=False,
- ignore_dependencies=options.ignore_dependencies,
- ignore_requires_python=False,
- ignore_installed=True,
- isolated=options.isolated_mode,
- )
- resolver.resolve(requirement_set)
-
- downloaded = ' '.join([
- req.name for req in requirement_set.successfully_downloaded
- ])
- if downloaded:
- logger.info('Successfully downloaded %s', downloaded)
-
- # Clean up
- if not options.no_clean:
- requirement_set.cleanup_files()
-
- return requirement_set
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/freeze.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/freeze.py
deleted file mode 100644
index dc9c53a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/freeze.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from __future__ import absolute_import
-
-import sys
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli.base_command import Command
-from pip._internal.models.format_control import FormatControl
-from pip._internal.operations.freeze import freeze
-from pip._internal.utils.compat import stdlib_pkgs
-
-DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
-
-
-class FreezeCommand(Command):
- """
- Output installed packages in requirements format.
-
- packages are listed in a case-insensitive sorted order.
- """
- name = 'freeze'
- usage = """
- %prog [options]"""
- summary = 'Output installed packages in requirements format.'
- log_streams = ("ext://sys.stderr", "ext://sys.stderr")
-
- def __init__(self, *args, **kw):
- super(FreezeCommand, self).__init__(*args, **kw)
-
- self.cmd_opts.add_option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='file',
- help="Use the order in the given requirements file and its "
- "comments when generating output. This option can be "
- "used multiple times.")
- self.cmd_opts.add_option(
- '-f', '--find-links',
- dest='find_links',
- action='append',
- default=[],
- metavar='URL',
- help='URL for finding packages, which will be added to the '
- 'output.')
- self.cmd_opts.add_option(
- '-l', '--local',
- dest='local',
- action='store_true',
- default=False,
- help='If in a virtualenv that has global access, do not output '
- 'globally-installed packages.')
- self.cmd_opts.add_option(
- '--user',
- dest='user',
- action='store_true',
- default=False,
- help='Only output packages installed in user-site.')
- self.cmd_opts.add_option(
- '--all',
- dest='freeze_all',
- action='store_true',
- help='Do not skip these packages in the output:'
- ' %s' % ', '.join(DEV_PKGS))
- self.cmd_opts.add_option(
- '--exclude-editable',
- dest='exclude_editable',
- action='store_true',
- help='Exclude editable package from output.')
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- format_control = FormatControl(set(), set())
- wheel_cache = WheelCache(options.cache_dir, format_control)
- skip = set(stdlib_pkgs)
- if not options.freeze_all:
- skip.update(DEV_PKGS)
-
- freeze_kwargs = dict(
- requirement=options.requirements,
- find_links=options.find_links,
- local_only=options.local,
- user_only=options.user,
- skip_regex=options.skip_requirements_regex,
- isolated=options.isolated_mode,
- wheel_cache=wheel_cache,
- skip=skip,
- exclude_editable=options.exclude_editable,
- )
-
- try:
- for line in freeze(**freeze_kwargs):
- sys.stdout.write(line + '\n')
- finally:
- wheel_cache.cleanup()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/hash.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/hash.py
deleted file mode 100644
index 423440e..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/hash.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from __future__ import absolute_import
-
-import hashlib
-import logging
-import sys
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR
-from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
-from pip._internal.utils.misc import read_chunks
-
-logger = logging.getLogger(__name__)
-
-
-class HashCommand(Command):
- """
- Compute a hash of a local package archive.
-
- These can be used with --hash in a requirements file to do repeatable
- installs.
-
- """
- name = 'hash'
- usage = '%prog [options] <file> ...'
- summary = 'Compute hashes of package archives.'
- ignore_require_venv = True
-
- def __init__(self, *args, **kw):
- super(HashCommand, self).__init__(*args, **kw)
- self.cmd_opts.add_option(
- '-a', '--algorithm',
- dest='algorithm',
- choices=STRONG_HASHES,
- action='store',
- default=FAVORITE_HASH,
- help='The hash algorithm to use: one of %s' %
- ', '.join(STRONG_HASHES))
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- if not args:
- self.parser.print_usage(sys.stderr)
- return ERROR
-
- algorithm = options.algorithm
- for path in args:
- logger.info('%s:\n--hash=%s:%s',
- path, algorithm, _hash_of_file(path, algorithm))
-
-
-def _hash_of_file(path, algorithm):
- """Return the hash digest of a file."""
- with open(path, 'rb') as archive:
- hash = hashlib.new(algorithm)
- for chunk in read_chunks(archive):
- hash.update(chunk)
- return hash.hexdigest()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/help.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/help.py
deleted file mode 100644
index 49a81cb..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/help.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from __future__ import absolute_import
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.exceptions import CommandError
-
-
-class HelpCommand(Command):
- """Show help for commands"""
- name = 'help'
- usage = """
- %prog <command>"""
- summary = 'Show help for commands.'
- ignore_require_venv = True
-
- def run(self, options, args):
- from pip._internal.commands import commands_dict, get_similar_commands
-
- try:
- # 'pip help' with no args is handled by pip.__init__.parseopt()
- cmd_name = args[0] # the command we need help for
- except IndexError:
- return SUCCESS
-
- if cmd_name not in commands_dict:
- guess = get_similar_commands(cmd_name)
-
- msg = ['unknown command "%s"' % cmd_name]
- if guess:
- msg.append('maybe you meant "%s"' % guess)
-
- raise CommandError(' - '.join(msg))
-
- command = commands_dict[cmd_name]()
- command.parser.print_help()
-
- return SUCCESS
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/install.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/install.py
deleted file mode 100644
index 1c244d2..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/install.py
+++ /dev/null
@@ -1,566 +0,0 @@
-from __future__ import absolute_import
-
-import errno
-import logging
-import operator
-import os
-import shutil
-from optparse import SUPPRESS_HELP
-
-from pip._vendor import pkg_resources
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import RequirementCommand
-from pip._internal.cli.status_codes import ERROR
-from pip._internal.exceptions import (
- CommandError, InstallationError, PreviousBuildDirError,
-)
-from pip._internal.locations import distutils_scheme, virtualenv_no_global
-from pip._internal.operations.check import check_install_conflicts
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req import RequirementSet, install_given_reqs
-from pip._internal.req.req_tracker import RequirementTracker
-from pip._internal.resolve import Resolver
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import (
- ensure_dir, get_installed_version,
- protect_pip_from_modification_on_windows,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.wheel import WheelBuilder
-
-logger = logging.getLogger(__name__)
-
-
-class InstallCommand(RequirementCommand):
- """
- Install packages from:
-
- - PyPI (and other indexes) using requirement specifiers.
- - VCS project urls.
- - Local project directories.
- - Local or remote source archives.
-
- pip also supports installing from "requirements files", which provide
- an easy way to specify a whole environment to be installed.
- """
- name = 'install'
-
- usage = """
- %prog [options] <requirement specifier> [package-index-options] ...
- %prog [options] -r <requirements file> [package-index-options] ...
- %prog [options] [-e] <vcs project url> ...
- %prog [options] [-e] <local project path> ...
- %prog [options] <archive url/path> ..."""
-
- summary = 'Install packages.'
-
- def __init__(self, *args, **kw):
- super(InstallCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(cmdoptions.requirements())
- cmd_opts.add_option(cmdoptions.constraints())
- cmd_opts.add_option(cmdoptions.no_deps())
- cmd_opts.add_option(cmdoptions.pre())
-
- cmd_opts.add_option(cmdoptions.editable())
- cmd_opts.add_option(
- '-t', '--target',
- dest='target_dir',
- metavar='dir',
- default=None,
- help='Install packages into <dir>. '
- 'By default this will not replace existing files/folders in '
- '<dir>. Use --upgrade to replace existing packages in <dir> '
- 'with new versions.'
- )
- cmd_opts.add_option(cmdoptions.platform())
- cmd_opts.add_option(cmdoptions.python_version())
- cmd_opts.add_option(cmdoptions.implementation())
- cmd_opts.add_option(cmdoptions.abi())
-
- cmd_opts.add_option(
- '--user',
- dest='use_user_site',
- action='store_true',
- help="Install to the Python user install directory for your "
- "platform. Typically ~/.local/, or %APPDATA%\\Python on "
- "Windows. (See the Python documentation for site.USER_BASE "
- "for full details.)")
- cmd_opts.add_option(
- '--no-user',
- dest='use_user_site',
- action='store_false',
- help=SUPPRESS_HELP)
- cmd_opts.add_option(
- '--root',
- dest='root_path',
- metavar='dir',
- default=None,
- help="Install everything relative to this alternate root "
- "directory.")
- cmd_opts.add_option(
- '--prefix',
- dest='prefix_path',
- metavar='dir',
- default=None,
- help="Installation prefix where lib, bin and other top-level "
- "folders are placed")
-
- cmd_opts.add_option(cmdoptions.build_dir())
-
- cmd_opts.add_option(cmdoptions.src())
-
- cmd_opts.add_option(
- '-U', '--upgrade',
- dest='upgrade',
- action='store_true',
- help='Upgrade all specified packages to the newest available '
- 'version. The handling of dependencies depends on the '
- 'upgrade-strategy used.'
- )
-
- cmd_opts.add_option(
- '--upgrade-strategy',
- dest='upgrade_strategy',
- default='only-if-needed',
- choices=['only-if-needed', 'eager'],
- help='Determines how dependency upgrading should be handled '
- '[default: %default]. '
- '"eager" - dependencies are upgraded regardless of '
- 'whether the currently installed version satisfies the '
- 'requirements of the upgraded package(s). '
- '"only-if-needed" - are upgraded only when they do not '
- 'satisfy the requirements of the upgraded package(s).'
- )
-
- cmd_opts.add_option(
- '--force-reinstall',
- dest='force_reinstall',
- action='store_true',
- help='Reinstall all packages even if they are already '
- 'up-to-date.')
-
- cmd_opts.add_option(
- '-I', '--ignore-installed',
- dest='ignore_installed',
- action='store_true',
- help='Ignore the installed packages (reinstalling instead).')
-
- cmd_opts.add_option(cmdoptions.ignore_requires_python())
- cmd_opts.add_option(cmdoptions.no_build_isolation())
- cmd_opts.add_option(cmdoptions.use_pep517())
- cmd_opts.add_option(cmdoptions.no_use_pep517())
-
- cmd_opts.add_option(cmdoptions.install_options())
- cmd_opts.add_option(cmdoptions.global_options())
-
- cmd_opts.add_option(
- "--compile",
- action="store_true",
- dest="compile",
- default=True,
- help="Compile Python source files to bytecode",
- )
-
- cmd_opts.add_option(
- "--no-compile",
- action="store_false",
- dest="compile",
- help="Do not compile Python source files to bytecode",
- )
-
- cmd_opts.add_option(
- "--no-warn-script-location",
- action="store_false",
- dest="warn_script_location",
- default=True,
- help="Do not warn when installing scripts outside PATH",
- )
- cmd_opts.add_option(
- "--no-warn-conflicts",
- action="store_false",
- dest="warn_about_conflicts",
- default=True,
- help="Do not warn about broken dependencies",
- )
-
- cmd_opts.add_option(cmdoptions.no_binary())
- cmd_opts.add_option(cmdoptions.only_binary())
- cmd_opts.add_option(cmdoptions.prefer_binary())
- cmd_opts.add_option(cmdoptions.no_clean())
- cmd_opts.add_option(cmdoptions.require_hashes())
- cmd_opts.add_option(cmdoptions.progress_bar())
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, cmd_opts)
-
- def run(self, options, args):
- cmdoptions.check_install_build_global(options)
- upgrade_strategy = "to-satisfy-only"
- if options.upgrade:
- upgrade_strategy = options.upgrade_strategy
-
- if options.build_dir:
- options.build_dir = os.path.abspath(options.build_dir)
-
- cmdoptions.check_dist_restriction(options, check_target=True)
-
- if options.python_version:
- python_versions = [options.python_version]
- else:
- python_versions = None
-
- options.src_dir = os.path.abspath(options.src_dir)
- install_options = options.install_options or []
- if options.use_user_site:
- if options.prefix_path:
- raise CommandError(
- "Can not combine '--user' and '--prefix' as they imply "
- "different installation locations"
- )
- if virtualenv_no_global():
- raise InstallationError(
- "Can not perform a '--user' install. User site-packages "
- "are not visible in this virtualenv."
- )
- install_options.append('--user')
- install_options.append('--prefix=')
-
- target_temp_dir = TempDirectory(kind="target")
- if options.target_dir:
- options.ignore_installed = True
- options.target_dir = os.path.abspath(options.target_dir)
- if (os.path.exists(options.target_dir) and not
- os.path.isdir(options.target_dir)):
- raise CommandError(
- "Target path exists but is not a directory, will not "
- "continue."
- )
-
- # Create a target directory for using with the target option
- target_temp_dir.create()
- install_options.append('--home=' + target_temp_dir.path)
-
- global_options = options.global_options or []
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(
- options=options,
- session=session,
- platform=options.platform,
- python_versions=python_versions,
- abi=options.abi,
- implementation=options.implementation,
- )
- build_delete = (not (options.no_clean or options.build_dir))
- wheel_cache = WheelCache(options.cache_dir, options.format_control)
-
- if options.cache_dir and not check_path_owner(options.cache_dir):
- logger.warning(
- "The directory '%s' or its parent directory is not owned "
- "by the current user and caching wheels has been "
- "disabled. check the permissions and owner of that "
- "directory. If executing pip with sudo, you may want "
- "sudo's -H flag.",
- options.cache_dir,
- )
- options.cache_dir = None
-
- with RequirementTracker() as req_tracker, TempDirectory(
- options.build_dir, delete=build_delete, kind="install"
- ) as directory:
- requirement_set = RequirementSet(
- require_hashes=options.require_hashes,
- check_supported_wheels=not options.target_dir,
- )
-
- try:
- self.populate_requirement_set(
- requirement_set, args, options, finder, session,
- self.name, wheel_cache
- )
- preparer = RequirementPreparer(
- build_dir=directory.path,
- src_dir=options.src_dir,
- download_dir=None,
- wheel_download_dir=None,
- progress_bar=options.progress_bar,
- build_isolation=options.build_isolation,
- req_tracker=req_tracker,
- )
-
- resolver = Resolver(
- preparer=preparer,
- finder=finder,
- session=session,
- wheel_cache=wheel_cache,
- use_user_site=options.use_user_site,
- upgrade_strategy=upgrade_strategy,
- force_reinstall=options.force_reinstall,
- ignore_dependencies=options.ignore_dependencies,
- ignore_requires_python=options.ignore_requires_python,
- ignore_installed=options.ignore_installed,
- isolated=options.isolated_mode,
- use_pep517=options.use_pep517
- )
- resolver.resolve(requirement_set)
-
- protect_pip_from_modification_on_windows(
- modifying_pip=requirement_set.has_requirement("pip")
- )
-
- # Consider legacy and PEP517-using requirements separately
- legacy_requirements = []
- pep517_requirements = []
- for req in requirement_set.requirements.values():
- if req.use_pep517:
- pep517_requirements.append(req)
- else:
- legacy_requirements.append(req)
-
- # We don't build wheels for legacy requirements if we
- # don't have wheel installed or we don't have a cache dir
- try:
- import wheel # noqa: F401
- build_legacy = bool(options.cache_dir)
- except ImportError:
- build_legacy = False
-
- wb = WheelBuilder(
- finder, preparer, wheel_cache,
- build_options=[], global_options=[],
- )
-
- # Always build PEP 517 requirements
- build_failures = wb.build(
- pep517_requirements,
- session=session, autobuilding=True
- )
-
- if build_legacy:
- # We don't care about failures building legacy
- # requirements, as we'll fall through to a direct
- # install for those.
- wb.build(
- legacy_requirements,
- session=session, autobuilding=True
- )
-
- # If we're using PEP 517, we cannot do a direct install
- # so we fail here.
- if build_failures:
- raise InstallationError(
- "Could not build wheels for {} which use"
- " PEP 517 and cannot be installed directly".format(
- ", ".join(r.name for r in build_failures)))
-
- to_install = resolver.get_installation_order(
- requirement_set
- )
-
- # Consistency Checking of the package set we're installing.
- should_warn_about_conflicts = (
- not options.ignore_dependencies and
- options.warn_about_conflicts
- )
- if should_warn_about_conflicts:
- self._warn_about_conflicts(to_install)
-
- # Don't warn about script install locations if
- # --target has been specified
- warn_script_location = options.warn_script_location
- if options.target_dir:
- warn_script_location = False
-
- installed = install_given_reqs(
- to_install,
- install_options,
- global_options,
- root=options.root_path,
- home=target_temp_dir.path,
- prefix=options.prefix_path,
- pycompile=options.compile,
- warn_script_location=warn_script_location,
- use_user_site=options.use_user_site,
- )
-
- lib_locations = get_lib_location_guesses(
- user=options.use_user_site,
- home=target_temp_dir.path,
- root=options.root_path,
- prefix=options.prefix_path,
- isolated=options.isolated_mode,
- )
- working_set = pkg_resources.WorkingSet(lib_locations)
-
- reqs = sorted(installed, key=operator.attrgetter('name'))
- items = []
- for req in reqs:
- item = req.name
- try:
- installed_version = get_installed_version(
- req.name, working_set=working_set
- )
- if installed_version:
- item += '-' + installed_version
- except Exception:
- pass
- items.append(item)
- installed = ' '.join(items)
- if installed:
- logger.info('Successfully installed %s', installed)
- except EnvironmentError as error:
- show_traceback = (self.verbosity >= 1)
-
- message = create_env_error_message(
- error, show_traceback, options.use_user_site,
- )
- logger.error(message, exc_info=show_traceback)
-
- return ERROR
- except PreviousBuildDirError:
- options.no_clean = True
- raise
- finally:
- # Clean up
- if not options.no_clean:
- requirement_set.cleanup_files()
- wheel_cache.cleanup()
-
- if options.target_dir:
- self._handle_target_dir(
- options.target_dir, target_temp_dir, options.upgrade
- )
- return requirement_set
-
- def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
- ensure_dir(target_dir)
-
- # Checking both purelib and platlib directories for installed
- # packages to be moved to target directory
- lib_dir_list = []
-
- with target_temp_dir:
- # Checking both purelib and platlib directories for installed
- # packages to be moved to target directory
- scheme = distutils_scheme('', home=target_temp_dir.path)
- purelib_dir = scheme['purelib']
- platlib_dir = scheme['platlib']
- data_dir = scheme['data']
-
- if os.path.exists(purelib_dir):
- lib_dir_list.append(purelib_dir)
- if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
- lib_dir_list.append(platlib_dir)
- if os.path.exists(data_dir):
- lib_dir_list.append(data_dir)
-
- for lib_dir in lib_dir_list:
- for item in os.listdir(lib_dir):
- if lib_dir == data_dir:
- ddir = os.path.join(data_dir, item)
- if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
- continue
- target_item_dir = os.path.join(target_dir, item)
- if os.path.exists(target_item_dir):
- if not upgrade:
- logger.warning(
- 'Target directory %s already exists. Specify '
- '--upgrade to force replacement.',
- target_item_dir
- )
- continue
- if os.path.islink(target_item_dir):
- logger.warning(
- 'Target directory %s already exists and is '
- 'a link. Pip will not automatically replace '
- 'links, please remove if replacement is '
- 'desired.',
- target_item_dir
- )
- continue
- if os.path.isdir(target_item_dir):
- shutil.rmtree(target_item_dir)
- else:
- os.remove(target_item_dir)
-
- shutil.move(
- os.path.join(lib_dir, item),
- target_item_dir
- )
-
- def _warn_about_conflicts(self, to_install):
- try:
- package_set, _dep_info = check_install_conflicts(to_install)
- except Exception:
- logger.error("Error checking for conflicts.", exc_info=True)
- return
- missing, conflicting = _dep_info
-
- # NOTE: There is some duplication here from pip check
- for project_name in missing:
- version = package_set[project_name][0]
- for dependency in missing[project_name]:
- logger.critical(
- "%s %s requires %s, which is not installed.",
- project_name, version, dependency[1],
- )
-
- for project_name in conflicting:
- version = package_set[project_name][0]
- for dep_name, dep_version, req in conflicting[project_name]:
- logger.critical(
- "%s %s has requirement %s, but you'll have %s %s which is "
- "incompatible.",
- project_name, version, req, dep_name, dep_version,
- )
-
-
-def get_lib_location_guesses(*args, **kwargs):
- scheme = distutils_scheme('', *args, **kwargs)
- return [scheme['purelib'], scheme['platlib']]
-
-
-def create_env_error_message(error, show_traceback, using_user_site):
- """Format an error message for an EnvironmentError
-
- It may occur anytime during the execution of the install command.
- """
- parts = []
-
- # Mention the error if we are not going to show a traceback
- parts.append("Could not install packages due to an EnvironmentError")
- if not show_traceback:
- parts.append(": ")
- parts.append(str(error))
- else:
- parts.append(".")
-
- # Spilt the error indication from a helper message (if any)
- parts[-1] += "\n"
-
- # Suggest useful actions to the user:
- # (1) using user site-packages or (2) verifying the permissions
- if error.errno == errno.EACCES:
- user_option_part = "Consider using the `--user` option"
- permissions_part = "Check the permissions"
-
- if not using_user_site:
- parts.extend([
- user_option_part, " or ",
- permissions_part.lower(),
- ])
- else:
- parts.append(permissions_part)
- parts.append(".\n")
-
- return "".join(parts).strip() + "\n"
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/list.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/list.py
deleted file mode 100644
index a640274..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/list.py
+++ /dev/null
@@ -1,301 +0,0 @@
-from __future__ import absolute_import
-
-import json
-import logging
-
-from pip._vendor import six
-from pip._vendor.six.moves import zip_longest
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import Command
-from pip._internal.exceptions import CommandError
-from pip._internal.index import PackageFinder
-from pip._internal.utils.misc import (
- dist_is_editable, get_installed_distributions,
-)
-from pip._internal.utils.packaging import get_installer
-
-logger = logging.getLogger(__name__)
-
-
-class ListCommand(Command):
- """
- List installed packages, including editables.
-
- Packages are listed in a case-insensitive sorted order.
- """
- name = 'list'
- usage = """
- %prog [options]"""
- summary = 'List installed packages.'
-
- def __init__(self, *args, **kw):
- super(ListCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(
- '-o', '--outdated',
- action='store_true',
- default=False,
- help='List outdated packages')
- cmd_opts.add_option(
- '-u', '--uptodate',
- action='store_true',
- default=False,
- help='List uptodate packages')
- cmd_opts.add_option(
- '-e', '--editable',
- action='store_true',
- default=False,
- help='List editable projects.')
- cmd_opts.add_option(
- '-l', '--local',
- action='store_true',
- default=False,
- help=('If in a virtualenv that has global access, do not list '
- 'globally-installed packages.'),
- )
- self.cmd_opts.add_option(
- '--user',
- dest='user',
- action='store_true',
- default=False,
- help='Only output packages installed in user-site.')
-
- cmd_opts.add_option(
- '--pre',
- action='store_true',
- default=False,
- help=("Include pre-release and development versions. By default, "
- "pip only finds stable versions."),
- )
-
- cmd_opts.add_option(
- '--format',
- action='store',
- dest='list_format',
- default="columns",
- choices=('columns', 'freeze', 'json'),
- help="Select the output format among: columns (default), freeze, "
- "or json",
- )
-
- cmd_opts.add_option(
- '--not-required',
- action='store_true',
- dest='not_required',
- help="List packages that are not dependencies of "
- "installed packages.",
- )
-
- cmd_opts.add_option(
- '--exclude-editable',
- action='store_false',
- dest='include_editable',
- help='Exclude editable package from output.',
- )
- cmd_opts.add_option(
- '--include-editable',
- action='store_true',
- dest='include_editable',
- help='Include editable package from output.',
- default=True,
- )
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group, self.parser
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, cmd_opts)
-
- def _build_package_finder(self, options, index_urls, session):
- """
- Create a package finder appropriate to this list command.
- """
- return PackageFinder(
- find_links=options.find_links,
- index_urls=index_urls,
- allow_all_prereleases=options.pre,
- trusted_hosts=options.trusted_hosts,
- session=session,
- )
-
- def run(self, options, args):
- if options.outdated and options.uptodate:
- raise CommandError(
- "Options --outdated and --uptodate cannot be combined.")
-
- packages = get_installed_distributions(
- local_only=options.local,
- user_only=options.user,
- editables_only=options.editable,
- include_editables=options.include_editable,
- )
-
- # get_not_required must be called firstly in order to find and
- # filter out all dependencies correctly. Otherwise a package
- # can't be identified as requirement because some parent packages
- # could be filtered out before.
- if options.not_required:
- packages = self.get_not_required(packages, options)
-
- if options.outdated:
- packages = self.get_outdated(packages, options)
- elif options.uptodate:
- packages = self.get_uptodate(packages, options)
-
- self.output_package_listing(packages, options)
-
- def get_outdated(self, packages, options):
- return [
- dist for dist in self.iter_packages_latest_infos(packages, options)
- if dist.latest_version > dist.parsed_version
- ]
-
- def get_uptodate(self, packages, options):
- return [
- dist for dist in self.iter_packages_latest_infos(packages, options)
- if dist.latest_version == dist.parsed_version
- ]
-
- def get_not_required(self, packages, options):
- dep_keys = set()
- for dist in packages:
- dep_keys.update(requirement.key for requirement in dist.requires())
- return {pkg for pkg in packages if pkg.key not in dep_keys}
-
- def iter_packages_latest_infos(self, packages, options):
- index_urls = [options.index_url] + options.extra_index_urls
- if options.no_index:
- logger.debug('Ignoring indexes: %s', ','.join(index_urls))
- index_urls = []
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(options, index_urls, session)
-
- for dist in packages:
- typ = 'unknown'
- all_candidates = finder.find_all_candidates(dist.key)
- if not options.pre:
- # Remove prereleases
- all_candidates = [candidate for candidate in all_candidates
- if not candidate.version.is_prerelease]
-
- if not all_candidates:
- continue
- best_candidate = max(all_candidates,
- key=finder._candidate_sort_key)
- remote_version = best_candidate.version
- if best_candidate.location.is_wheel:
- typ = 'wheel'
- else:
- typ = 'sdist'
- # This is dirty but makes the rest of the code much cleaner
- dist.latest_version = remote_version
- dist.latest_filetype = typ
- yield dist
-
- def output_package_listing(self, packages, options):
- packages = sorted(
- packages,
- key=lambda dist: dist.project_name.lower(),
- )
- if options.list_format == 'columns' and packages:
- data, header = format_for_columns(packages, options)
- self.output_package_listing_columns(data, header)
- elif options.list_format == 'freeze':
- for dist in packages:
- if options.verbose >= 1:
- logger.info("%s==%s (%s)", dist.project_name,
- dist.version, dist.location)
- else:
- logger.info("%s==%s", dist.project_name, dist.version)
- elif options.list_format == 'json':
- logger.info(format_for_json(packages, options))
-
- def output_package_listing_columns(self, data, header):
- # insert the header first: we need to know the size of column names
- if len(data) > 0:
- data.insert(0, header)
-
- pkg_strings, sizes = tabulate(data)
-
- # Create and add a separator.
- if len(data) > 0:
- pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
-
- for val in pkg_strings:
- logger.info(val)
-
-
-def tabulate(vals):
- # From pfmoore on GitHub:
- # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
- assert len(vals) > 0
-
- sizes = [0] * max(len(x) for x in vals)
- for row in vals:
- sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
-
- result = []
- for row in vals:
- display = " ".join([str(c).ljust(s) if c is not None else ''
- for s, c in zip_longest(sizes, row)])
- result.append(display)
-
- return result, sizes
-
-
-def format_for_columns(pkgs, options):
- """
- Convert the package data into something usable
- by output_package_listing_columns.
- """
- running_outdated = options.outdated
- # Adjust the header for the `pip list --outdated` case.
- if running_outdated:
- header = ["Package", "Version", "Latest", "Type"]
- else:
- header = ["Package", "Version"]
-
- data = []
- if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
- header.append("Location")
- if options.verbose >= 1:
- header.append("Installer")
-
- for proj in pkgs:
- # if we're working on the 'outdated' list, separate out the
- # latest_version and type
- row = [proj.project_name, proj.version]
-
- if running_outdated:
- row.append(proj.latest_version)
- row.append(proj.latest_filetype)
-
- if options.verbose >= 1 or dist_is_editable(proj):
- row.append(proj.location)
- if options.verbose >= 1:
- row.append(get_installer(proj))
-
- data.append(row)
-
- return data, header
-
-
-def format_for_json(packages, options):
- data = []
- for dist in packages:
- info = {
- 'name': dist.project_name,
- 'version': six.text_type(dist.version),
- }
- if options.verbose >= 1:
- info['location'] = dist.location
- info['installer'] = get_installer(dist)
- if options.outdated:
- info['latest_version'] = six.text_type(dist.latest_version)
- info['latest_filetype'] = dist.latest_filetype
- data.append(info)
- return json.dumps(data)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/search.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/search.py
deleted file mode 100644
index c157a31..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/search.py
+++ /dev/null
@@ -1,135 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import sys
-import textwrap
-from collections import OrderedDict
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging.version import parse as parse_version
-# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
-# why we ignore the type on this import
-from pip._vendor.six.moves import xmlrpc_client # type: ignore
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
-from pip._internal.download import PipXmlrpcTransport
-from pip._internal.exceptions import CommandError
-from pip._internal.models.index import PyPI
-from pip._internal.utils.compat import get_terminal_size
-from pip._internal.utils.logging import indent_log
-
-logger = logging.getLogger(__name__)
-
-
-class SearchCommand(Command):
- """Search for PyPI packages whose name or summary contains <query>."""
- name = 'search'
- usage = """
- %prog [options] <query>"""
- summary = 'Search PyPI for packages.'
- ignore_require_venv = True
-
- def __init__(self, *args, **kw):
- super(SearchCommand, self).__init__(*args, **kw)
- self.cmd_opts.add_option(
- '-i', '--index',
- dest='index',
- metavar='URL',
- default=PyPI.pypi_url,
- help='Base URL of Python Package Index (default %default)')
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- if not args:
- raise CommandError('Missing required argument (search query).')
- query = args
- pypi_hits = self.search(query, options)
- hits = transform_hits(pypi_hits)
-
- terminal_width = None
- if sys.stdout.isatty():
- terminal_width = get_terminal_size()[0]
-
- print_results(hits, terminal_width=terminal_width)
- if pypi_hits:
- return SUCCESS
- return NO_MATCHES_FOUND
-
- def search(self, query, options):
- index_url = options.index
- with self._build_session(options) as session:
- transport = PipXmlrpcTransport(index_url, session)
- pypi = xmlrpc_client.ServerProxy(index_url, transport)
- hits = pypi.search({'name': query, 'summary': query}, 'or')
- return hits
-
-
-def transform_hits(hits):
- """
- The list from pypi is really a list of versions. We want a list of
- packages with the list of versions stored inline. This converts the
- list from pypi into one we can use.
- """
- packages = OrderedDict()
- for hit in hits:
- name = hit['name']
- summary = hit['summary']
- version = hit['version']
-
- if name not in packages.keys():
- packages[name] = {
- 'name': name,
- 'summary': summary,
- 'versions': [version],
- }
- else:
- packages[name]['versions'].append(version)
-
- # if this is the highest version, replace summary and score
- if version == highest_version(packages[name]['versions']):
- packages[name]['summary'] = summary
-
- return list(packages.values())
-
-
-def print_results(hits, name_column_width=None, terminal_width=None):
- if not hits:
- return
- if name_column_width is None:
- name_column_width = max([
- len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
- for hit in hits
- ]) + 4
-
- installed_packages = [p.project_name for p in pkg_resources.working_set]
- for hit in hits:
- name = hit['name']
- summary = hit['summary'] or ''
- latest = highest_version(hit.get('versions', ['-']))
- if terminal_width is not None:
- target_width = terminal_width - name_column_width - 5
- if target_width > 10:
- # wrap and indent summary to fit terminal
- summary = textwrap.wrap(summary, target_width)
- summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
-
- line = '%-*s - %s' % (name_column_width,
- '%s (%s)' % (name, latest), summary)
- try:
- logger.info(line)
- if name in installed_packages:
- dist = pkg_resources.get_distribution(name)
- with indent_log():
- if dist.version == latest:
- logger.info('INSTALLED: %s (latest)', dist.version)
- else:
- logger.info('INSTALLED: %s', dist.version)
- logger.info('LATEST: %s', latest)
- except UnicodeEncodeError:
- pass
-
-
-def highest_version(versions):
- return max(versions, key=parse_version)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/show.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/show.py
deleted file mode 100644
index f92c9bc..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/show.py
+++ /dev/null
@@ -1,168 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import os
-from email.parser import FeedParser # type: ignore
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-
-logger = logging.getLogger(__name__)
-
-
-class ShowCommand(Command):
- """
- Show information about one or more installed packages.
-
- The output is in RFC-compliant mail header format.
- """
- name = 'show'
- usage = """
- %prog [options] <package> ..."""
- summary = 'Show information about installed packages.'
- ignore_require_venv = True
-
- def __init__(self, *args, **kw):
- super(ShowCommand, self).__init__(*args, **kw)
- self.cmd_opts.add_option(
- '-f', '--files',
- dest='files',
- action='store_true',
- default=False,
- help='Show the full list of installed files for each package.')
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- if not args:
- logger.warning('ERROR: Please provide a package name or names.')
- return ERROR
- query = args
-
- results = search_packages_info(query)
- if not print_results(
- results, list_files=options.files, verbose=options.verbose):
- return ERROR
- return SUCCESS
-
-
-def search_packages_info(query):
- """
- Gather details from installed distributions. Print distribution name,
- version, location, and installed files. Installed files requires a
- pip generated 'installed-files.txt' in the distributions '.egg-info'
- directory.
- """
- installed = {}
- for p in pkg_resources.working_set:
- installed[canonicalize_name(p.project_name)] = p
-
- query_names = [canonicalize_name(name) for name in query]
-
- for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
- package = {
- 'name': dist.project_name,
- 'version': dist.version,
- 'location': dist.location,
- 'requires': [dep.project_name for dep in dist.requires()],
- }
- file_list = None
- metadata = None
- if isinstance(dist, pkg_resources.DistInfoDistribution):
- # RECORDs should be part of .dist-info metadatas
- if dist.has_metadata('RECORD'):
- lines = dist.get_metadata_lines('RECORD')
- paths = [l.split(',')[0] for l in lines]
- paths = [os.path.join(dist.location, p) for p in paths]
- file_list = [os.path.relpath(p, dist.location) for p in paths]
-
- if dist.has_metadata('METADATA'):
- metadata = dist.get_metadata('METADATA')
- else:
- # Otherwise use pip's log for .egg-info's
- if dist.has_metadata('installed-files.txt'):
- paths = dist.get_metadata_lines('installed-files.txt')
- paths = [os.path.join(dist.egg_info, p) for p in paths]
- file_list = [os.path.relpath(p, dist.location) for p in paths]
-
- if dist.has_metadata('PKG-INFO'):
- metadata = dist.get_metadata('PKG-INFO')
-
- if dist.has_metadata('entry_points.txt'):
- entry_points = dist.get_metadata_lines('entry_points.txt')
- package['entry_points'] = entry_points
-
- if dist.has_metadata('INSTALLER'):
- for line in dist.get_metadata_lines('INSTALLER'):
- if line.strip():
- package['installer'] = line.strip()
- break
-
- # @todo: Should pkg_resources.Distribution have a
- # `get_pkg_info` method?
- feed_parser = FeedParser()
- feed_parser.feed(metadata)
- pkg_info_dict = feed_parser.close()
- for key in ('metadata-version', 'summary',
- 'home-page', 'author', 'author-email', 'license'):
- package[key] = pkg_info_dict.get(key)
-
- # It looks like FeedParser cannot deal with repeated headers
- classifiers = []
- for line in metadata.splitlines():
- if line.startswith('Classifier: '):
- classifiers.append(line[len('Classifier: '):])
- package['classifiers'] = classifiers
-
- if file_list:
- package['files'] = sorted(file_list)
- yield package
-
-
-def print_results(distributions, list_files=False, verbose=False):
- """
- Print the informations from installed distributions found.
- """
- results_printed = False
- for i, dist in enumerate(distributions):
- results_printed = True
- if i > 0:
- logger.info("---")
-
- name = dist.get('name', '')
- required_by = [
- pkg.project_name for pkg in pkg_resources.working_set
- if name in [required.name for required in pkg.requires()]
- ]
-
- logger.info("Name: %s", name)
- logger.info("Version: %s", dist.get('version', ''))
- logger.info("Summary: %s", dist.get('summary', ''))
- logger.info("Home-page: %s", dist.get('home-page', ''))
- logger.info("Author: %s", dist.get('author', ''))
- logger.info("Author-email: %s", dist.get('author-email', ''))
- logger.info("License: %s", dist.get('license', ''))
- logger.info("Location: %s", dist.get('location', ''))
- logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
- logger.info("Required-by: %s", ', '.join(required_by))
-
- if verbose:
- logger.info("Metadata-Version: %s",
- dist.get('metadata-version', ''))
- logger.info("Installer: %s", dist.get('installer', ''))
- logger.info("Classifiers:")
- for classifier in dist.get('classifiers', []):
- logger.info(" %s", classifier)
- logger.info("Entry-points:")
- for entry in dist.get('entry_points', []):
- logger.info(" %s", entry.strip())
- if list_files:
- logger.info("Files:")
- for line in dist.get('files', []):
- logger.info(" %s", line.strip())
- if "files" not in dist:
- logger.info("Cannot locate installed-files.txt")
- return results_printed
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/uninstall.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/uninstall.py
deleted file mode 100644
index 0cd6f54..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/uninstall.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from __future__ import absolute_import
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cli.base_command import Command
-from pip._internal.exceptions import InstallationError
-from pip._internal.req import parse_requirements
-from pip._internal.req.constructors import install_req_from_line
-from pip._internal.utils.misc import protect_pip_from_modification_on_windows
-
-
-class UninstallCommand(Command):
- """
- Uninstall packages.
-
- pip is able to uninstall most installed packages. Known exceptions are:
-
- - Pure distutils packages installed with ``python setup.py install``, which
- leave behind no metadata to determine what files were installed.
- - Script wrappers installed by ``python setup.py develop``.
- """
- name = 'uninstall'
- usage = """
- %prog [options] <package> ...
- %prog [options] -r <requirements file> ..."""
- summary = 'Uninstall packages.'
-
- def __init__(self, *args, **kw):
- super(UninstallCommand, self).__init__(*args, **kw)
- self.cmd_opts.add_option(
- '-r', '--requirement',
- dest='requirements',
- action='append',
- default=[],
- metavar='file',
- help='Uninstall all the packages listed in the given requirements '
- 'file. This option can be used multiple times.',
- )
- self.cmd_opts.add_option(
- '-y', '--yes',
- dest='yes',
- action='store_true',
- help="Don't ask for confirmation of uninstall deletions.")
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options, args):
- with self._build_session(options) as session:
- reqs_to_uninstall = {}
- for name in args:
- req = install_req_from_line(
- name, isolated=options.isolated_mode,
- )
- if req.name:
- reqs_to_uninstall[canonicalize_name(req.name)] = req
- for filename in options.requirements:
- for req in parse_requirements(
- filename,
- options=options,
- session=session):
- if req.name:
- reqs_to_uninstall[canonicalize_name(req.name)] = req
- if not reqs_to_uninstall:
- raise InstallationError(
- 'You must give at least one requirement to %(name)s (see '
- '"pip help %(name)s")' % dict(name=self.name)
- )
-
- protect_pip_from_modification_on_windows(
- modifying_pip="pip" in reqs_to_uninstall
- )
-
- for req in reqs_to_uninstall.values():
- uninstall_pathset = req.uninstall(
- auto_confirm=options.yes, verbose=self.verbosity > 0,
- )
- if uninstall_pathset:
- uninstall_pathset.commit()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/wheel.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/wheel.py
deleted file mode 100644
index cd72a3d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/wheel.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import RequirementCommand
-from pip._internal.exceptions import CommandError, PreviousBuildDirError
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req import RequirementSet
-from pip._internal.req.req_tracker import RequirementTracker
-from pip._internal.resolve import Resolver
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.wheel import WheelBuilder
-
-logger = logging.getLogger(__name__)
-
-
-class WheelCommand(RequirementCommand):
- """
- Build Wheel archives for your requirements and dependencies.
-
- Wheel is a built-package format, and offers the advantage of not
- recompiling your software during every install. For more details, see the
- wheel docs: https://wheel.readthedocs.io/en/latest/
-
- Requirements: setuptools>=0.8, and wheel.
-
- 'pip wheel' uses the bdist_wheel setuptools extension from the wheel
- package to build individual wheels.
-
- """
-
- name = 'wheel'
- usage = """
- %prog [options] <requirement specifier> ...
- %prog [options] -r <requirements file> ...
- %prog [options] [-e] <vcs project url> ...
- %prog [options] [-e] <local project path> ...
- %prog [options] <archive url/path> ..."""
-
- summary = 'Build wheels from your requirements.'
-
- def __init__(self, *args, **kw):
- super(WheelCommand, self).__init__(*args, **kw)
-
- cmd_opts = self.cmd_opts
-
- cmd_opts.add_option(
- '-w', '--wheel-dir',
- dest='wheel_dir',
- metavar='dir',
- default=os.curdir,
- help=("Build wheels into <dir>, where the default is the "
- "current working directory."),
- )
- cmd_opts.add_option(cmdoptions.no_binary())
- cmd_opts.add_option(cmdoptions.only_binary())
- cmd_opts.add_option(cmdoptions.prefer_binary())
- cmd_opts.add_option(
- '--build-option',
- dest='build_options',
- metavar='options',
- action='append',
- help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
- )
- cmd_opts.add_option(cmdoptions.no_build_isolation())
- cmd_opts.add_option(cmdoptions.use_pep517())
- cmd_opts.add_option(cmdoptions.no_use_pep517())
- cmd_opts.add_option(cmdoptions.constraints())
- cmd_opts.add_option(cmdoptions.editable())
- cmd_opts.add_option(cmdoptions.requirements())
- cmd_opts.add_option(cmdoptions.src())
- cmd_opts.add_option(cmdoptions.ignore_requires_python())
- cmd_opts.add_option(cmdoptions.no_deps())
- cmd_opts.add_option(cmdoptions.build_dir())
- cmd_opts.add_option(cmdoptions.progress_bar())
-
- cmd_opts.add_option(
- '--global-option',
- dest='global_options',
- action='append',
- metavar='options',
- help="Extra global options to be supplied to the setup.py "
- "call before the 'bdist_wheel' command.")
-
- cmd_opts.add_option(
- '--pre',
- action='store_true',
- default=False,
- help=("Include pre-release and development versions. By default, "
- "pip only finds stable versions."),
- )
-
- cmd_opts.add_option(cmdoptions.no_clean())
- cmd_opts.add_option(cmdoptions.require_hashes())
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, cmd_opts)
-
- def run(self, options, args):
- cmdoptions.check_install_build_global(options)
-
- index_urls = [options.index_url] + options.extra_index_urls
- if options.no_index:
- logger.debug('Ignoring indexes: %s', ','.join(index_urls))
- index_urls = []
-
- if options.build_dir:
- options.build_dir = os.path.abspath(options.build_dir)
-
- options.src_dir = os.path.abspath(options.src_dir)
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(options, session)
- build_delete = (not (options.no_clean or options.build_dir))
- wheel_cache = WheelCache(options.cache_dir, options.format_control)
-
- with RequirementTracker() as req_tracker, TempDirectory(
- options.build_dir, delete=build_delete, kind="wheel"
- ) as directory:
-
- requirement_set = RequirementSet(
- require_hashes=options.require_hashes,
- )
-
- try:
- self.populate_requirement_set(
- requirement_set, args, options, finder, session,
- self.name, wheel_cache
- )
-
- preparer = RequirementPreparer(
- build_dir=directory.path,
- src_dir=options.src_dir,
- download_dir=None,
- wheel_download_dir=options.wheel_dir,
- progress_bar=options.progress_bar,
- build_isolation=options.build_isolation,
- req_tracker=req_tracker,
- )
-
- resolver = Resolver(
- preparer=preparer,
- finder=finder,
- session=session,
- wheel_cache=wheel_cache,
- use_user_site=False,
- upgrade_strategy="to-satisfy-only",
- force_reinstall=False,
- ignore_dependencies=options.ignore_dependencies,
- ignore_requires_python=options.ignore_requires_python,
- ignore_installed=True,
- isolated=options.isolated_mode,
- use_pep517=options.use_pep517
- )
- resolver.resolve(requirement_set)
-
- # build wheels
- wb = WheelBuilder(
- finder, preparer, wheel_cache,
- build_options=options.build_options or [],
- global_options=options.global_options or [],
- no_clean=options.no_clean,
- )
- build_failures = wb.build(
- requirement_set.requirements.values(), session=session,
- )
- if len(build_failures) != 0:
- raise CommandError(
- "Failed to build one or more wheels"
- )
- except PreviousBuildDirError:
- options.no_clean = True
- raise
- finally:
- if not options.no_clean:
- requirement_set.cleanup_files()
- wheel_cache.cleanup()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/configuration.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/configuration.py
deleted file mode 100644
index fe6df9b..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/configuration.py
+++ /dev/null
@@ -1,387 +0,0 @@
-"""Configuration management setup
-
-Some terminology:
-- name
- As written in config files.
-- value
- Value associated with a name
-- key
- Name combined with it's section (section.name)
-- variant
- A single word describing where the configuration key-value pair came from
-"""
-
-import locale
-import logging
-import os
-
-from pip._vendor import six
-from pip._vendor.six.moves import configparser
-
-from pip._internal.exceptions import (
- ConfigurationError, ConfigurationFileCouldNotBeLoaded,
-)
-from pip._internal.locations import (
- legacy_config_file, new_config_file, running_under_virtualenv,
- site_config_files, venv_config_file,
-)
-from pip._internal.utils.misc import ensure_dir, enum
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Any, Dict, Iterable, List, NewType, Optional, Tuple
- )
-
- RawConfigParser = configparser.RawConfigParser # Shorthand
- Kind = NewType("Kind", str)
-
-logger = logging.getLogger(__name__)
-
-
-# NOTE: Maybe use the optionx attribute to normalize keynames.
-def _normalize_name(name):
- # type: (str) -> str
- """Make a name consistent regardless of source (environment or file)
- """
- name = name.lower().replace('_', '-')
- if name.startswith('--'):
- name = name[2:] # only prefer long opts
- return name
-
-
-def _disassemble_key(name):
- # type: (str) -> List[str]
- return name.split(".", 1)
-
-
-# The kinds of configurations there are.
-kinds = enum(
- USER="user", # User Specific
- GLOBAL="global", # System Wide
- VENV="venv", # Virtual Environment Specific
- ENV="env", # from PIP_CONFIG_FILE
- ENV_VAR="env-var", # from Environment Variables
-)
-
-
-class Configuration(object):
- """Handles management of configuration.
-
- Provides an interface to accessing and managing configuration files.
-
- This class converts provides an API that takes "section.key-name" style
- keys and stores the value associated with it as "key-name" under the
- section "section".
-
- This allows for a clean interface wherein the both the section and the
- key-name are preserved in an easy to manage form in the configuration files
- and the data stored is also nice.
- """
-
- def __init__(self, isolated, load_only=None):
- # type: (bool, Kind) -> None
- super(Configuration, self).__init__()
-
- _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None]
- if load_only not in _valid_load_only:
- raise ConfigurationError(
- "Got invalid value for load_only - should be one of {}".format(
- ", ".join(map(repr, _valid_load_only[:-1]))
- )
- )
- self.isolated = isolated # type: bool
- self.load_only = load_only # type: Optional[Kind]
-
- # The order here determines the override order.
- self._override_order = [
- kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR
- ]
-
- self._ignore_env_names = ["version", "help"]
-
- # Because we keep track of where we got the data from
- self._parsers = {
- variant: [] for variant in self._override_order
- } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
- self._config = {
- variant: {} for variant in self._override_order
- } # type: Dict[Kind, Dict[str, Any]]
- self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
-
- def load(self):
- # type: () -> None
- """Loads configuration from configuration files and environment
- """
- self._load_config_files()
- if not self.isolated:
- self._load_environment_vars()
-
- def get_file_to_edit(self):
- # type: () -> Optional[str]
- """Returns the file with highest priority in configuration
- """
- assert self.load_only is not None, \
- "Need to be specified a file to be editing"
-
- try:
- return self._get_parser_to_modify()[0]
- except IndexError:
- return None
-
- def items(self):
- # type: () -> Iterable[Tuple[str, Any]]
- """Returns key-value pairs like dict.items() representing the loaded
- configuration
- """
- return self._dictionary.items()
-
- def get_value(self, key):
- # type: (str) -> Any
- """Get a value from the configuration.
- """
- try:
- return self._dictionary[key]
- except KeyError:
- raise ConfigurationError("No such key - {}".format(key))
-
- def set_value(self, key, value):
- # type: (str, Any) -> None
- """Modify a value in the configuration.
- """
- self._ensure_have_load_only()
-
- fname, parser = self._get_parser_to_modify()
-
- if parser is not None:
- section, name = _disassemble_key(key)
-
- # Modify the parser and the configuration
- if not parser.has_section(section):
- parser.add_section(section)
- parser.set(section, name, value)
-
- self._config[self.load_only][key] = value
- self._mark_as_modified(fname, parser)
-
- def unset_value(self, key):
- # type: (str) -> None
- """Unset a value in the configuration.
- """
- self._ensure_have_load_only()
-
- if key not in self._config[self.load_only]:
- raise ConfigurationError("No such key - {}".format(key))
-
- fname, parser = self._get_parser_to_modify()
-
- if parser is not None:
- section, name = _disassemble_key(key)
-
- # Remove the key in the parser
- modified_something = False
- if parser.has_section(section):
- # Returns whether the option was removed or not
- modified_something = parser.remove_option(section, name)
-
- if modified_something:
- # name removed from parser, section may now be empty
- section_iter = iter(parser.items(section))
- try:
- val = six.next(section_iter)
- except StopIteration:
- val = None
-
- if val is None:
- parser.remove_section(section)
-
- self._mark_as_modified(fname, parser)
- else:
- raise ConfigurationError(
- "Fatal Internal error [id=1]. Please report as a bug."
- )
-
- del self._config[self.load_only][key]
-
- def save(self):
- # type: () -> None
- """Save the currentin-memory state.
- """
- self._ensure_have_load_only()
-
- for fname, parser in self._modified_parsers:
- logger.info("Writing to %s", fname)
-
- # Ensure directory exists.
- ensure_dir(os.path.dirname(fname))
-
- with open(fname, "w") as f:
- parser.write(f) # type: ignore
-
- #
- # Private routines
- #
-
- def _ensure_have_load_only(self):
- # type: () -> None
- if self.load_only is None:
- raise ConfigurationError("Needed a specific file to be modifying.")
- logger.debug("Will be working with %s variant only", self.load_only)
-
- @property
- def _dictionary(self):
- # type: () -> Dict[str, Any]
- """A dictionary representing the loaded configuration.
- """
- # NOTE: Dictionaries are not populated if not loaded. So, conditionals
- # are not needed here.
- retval = {}
-
- for variant in self._override_order:
- retval.update(self._config[variant])
-
- return retval
-
- def _load_config_files(self):
- # type: () -> None
- """Loads configuration from configuration files
- """
- config_files = dict(self._iter_config_files())
- if config_files[kinds.ENV][0:1] == [os.devnull]:
- logger.debug(
- "Skipping loading configuration files due to "
- "environment's PIP_CONFIG_FILE being os.devnull"
- )
- return
-
- for variant, files in config_files.items():
- for fname in files:
- # If there's specific variant set in `load_only`, load only
- # that variant, not the others.
- if self.load_only is not None and variant != self.load_only:
- logger.debug(
- "Skipping file '%s' (variant: %s)", fname, variant
- )
- continue
-
- parser = self._load_file(variant, fname)
-
- # Keeping track of the parsers used
- self._parsers[variant].append((fname, parser))
-
- def _load_file(self, variant, fname):
- # type: (Kind, str) -> RawConfigParser
- logger.debug("For variant '%s', will try loading '%s'", variant, fname)
- parser = self._construct_parser(fname)
-
- for section in parser.sections():
- items = parser.items(section)
- self._config[variant].update(self._normalized_keys(section, items))
-
- return parser
-
- def _construct_parser(self, fname):
- # type: (str) -> RawConfigParser
- parser = configparser.RawConfigParser()
- # If there is no such file, don't bother reading it but create the
- # parser anyway, to hold the data.
- # Doing this is useful when modifying and saving files, where we don't
- # need to construct a parser.
- if os.path.exists(fname):
- try:
- parser.read(fname)
- except UnicodeDecodeError:
- # See https://github.com/pypa/pip/issues/4963
- raise ConfigurationFileCouldNotBeLoaded(
- reason="contains invalid {} characters".format(
- locale.getpreferredencoding(False)
- ),
- fname=fname,
- )
- except configparser.Error as error:
- # See https://github.com/pypa/pip/issues/4893
- raise ConfigurationFileCouldNotBeLoaded(error=error)
- return parser
-
- def _load_environment_vars(self):
- # type: () -> None
- """Loads configuration from environment variables
- """
- self._config[kinds.ENV_VAR].update(
- self._normalized_keys(":env:", self._get_environ_vars())
- )
-
- def _normalized_keys(self, section, items):
- # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
- """Normalizes items to construct a dictionary with normalized keys.
-
- This routine is where the names become keys and are made the same
- regardless of source - configuration files or environment.
- """
- normalized = {}
- for name, val in items:
- key = section + "." + _normalize_name(name)
- normalized[key] = val
- return normalized
-
- def _get_environ_vars(self):
- # type: () -> Iterable[Tuple[str, str]]
- """Returns a generator with all environmental vars with prefix PIP_"""
- for key, val in os.environ.items():
- should_be_yielded = (
- key.startswith("PIP_") and
- key[4:].lower() not in self._ignore_env_names
- )
- if should_be_yielded:
- yield key[4:].lower(), val
-
- # XXX: This is patched in the tests.
- def _iter_config_files(self):
- # type: () -> Iterable[Tuple[Kind, List[str]]]
- """Yields variant and configuration files associated with it.
-
- This should be treated like items of a dictionary.
- """
- # SMELL: Move the conditions out of this function
-
- # environment variables have the lowest priority
- config_file = os.environ.get('PIP_CONFIG_FILE', None)
- if config_file is not None:
- yield kinds.ENV, [config_file]
- else:
- yield kinds.ENV, []
-
- # at the base we have any global configuration
- yield kinds.GLOBAL, list(site_config_files)
-
- # per-user configuration next
- should_load_user_config = not self.isolated and not (
- config_file and os.path.exists(config_file)
- )
- if should_load_user_config:
- # The legacy config file is overridden by the new config file
- yield kinds.USER, [legacy_config_file, new_config_file]
-
- # finally virtualenv configuration first trumping others
- if running_under_virtualenv():
- yield kinds.VENV, [venv_config_file]
-
- def _get_parser_to_modify(self):
- # type: () -> Tuple[str, RawConfigParser]
- # Determine which parser to modify
- parsers = self._parsers[self.load_only]
- if not parsers:
- # This should not happen if everything works correctly.
- raise ConfigurationError(
- "Fatal Internal error [id=2]. Please report as a bug."
- )
-
- # Use the highest priority parser.
- return parsers[-1]
-
- # XXX: This is patched in the tests.
- def _mark_as_modified(self, fname, parser):
- # type: (str, RawConfigParser) -> None
- file_parser_tuple = (fname, parser)
- if file_parser_tuple not in self._modified_parsers:
- self._modified_parsers.append(file_parser_tuple)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/download.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/download.py
deleted file mode 100644
index 2bbe176..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/download.py
+++ /dev/null
@@ -1,971 +0,0 @@
-from __future__ import absolute_import
-
-import cgi
-import email.utils
-import getpass
-import json
-import logging
-import mimetypes
-import os
-import platform
-import re
-import shutil
-import sys
-
-from pip._vendor import requests, six, urllib3
-from pip._vendor.cachecontrol import CacheControlAdapter
-from pip._vendor.cachecontrol.caches import FileCache
-from pip._vendor.lockfile import LockError
-from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
-from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
-from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
-from pip._vendor.requests.structures import CaseInsensitiveDict
-from pip._vendor.requests.utils import get_netrc_auth
-# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
-# why we ignore the type on this import
-from pip._vendor.six.moves import xmlrpc_client # type: ignore
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-from pip._vendor.urllib3.util import IS_PYOPENSSL
-
-import pip
-from pip._internal.exceptions import HashMismatch, InstallationError
-from pip._internal.locations import write_delete_marker_file
-from pip._internal.models.index import PyPI
-from pip._internal.utils.encoding import auto_decode
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.glibc import libc_ver
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume,
- display_path, format_size, get_installed_version, rmtree,
- split_auth_from_netloc, splitext, unpack_file,
-)
-from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import DownloadProgressProvider
-from pip._internal.vcs import vcs
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Optional, Tuple, Dict, IO, Text, Union
- )
- from pip._internal.models.link import Link # noqa: F401
- from pip._internal.utils.hashes import Hashes # noqa: F401
- from pip._internal.vcs import AuthInfo # noqa: F401
-
-try:
- import ssl # noqa
-except ImportError:
- ssl = None
-
-HAS_TLS = (ssl is not None) or IS_PYOPENSSL
-
-__all__ = ['get_file_content',
- 'is_url', 'url_to_path', 'path_to_url',
- 'is_archive_file', 'unpack_vcs_link',
- 'unpack_file_url', 'is_vcs_url', 'is_file_url',
- 'unpack_http_url', 'unpack_url']
-
-
-logger = logging.getLogger(__name__)
-
-
-def user_agent():
- """
- Return a string representing the user agent.
- """
- data = {
- "installer": {"name": "pip", "version": pip.__version__},
- "python": platform.python_version(),
- "implementation": {
- "name": platform.python_implementation(),
- },
- }
-
- if data["implementation"]["name"] == 'CPython':
- data["implementation"]["version"] = platform.python_version()
- elif data["implementation"]["name"] == 'PyPy':
- if sys.pypy_version_info.releaselevel == 'final':
- pypy_version_info = sys.pypy_version_info[:3]
- else:
- pypy_version_info = sys.pypy_version_info
- data["implementation"]["version"] = ".".join(
- [str(x) for x in pypy_version_info]
- )
- elif data["implementation"]["name"] == 'Jython':
- # Complete Guess
- data["implementation"]["version"] = platform.python_version()
- elif data["implementation"]["name"] == 'IronPython':
- # Complete Guess
- data["implementation"]["version"] = platform.python_version()
-
- if sys.platform.startswith("linux"):
- from pip._vendor import distro
- distro_infos = dict(filter(
- lambda x: x[1],
- zip(["name", "version", "id"], distro.linux_distribution()),
- ))
- libc = dict(filter(
- lambda x: x[1],
- zip(["lib", "version"], libc_ver()),
- ))
- if libc:
- distro_infos["libc"] = libc
- if distro_infos:
- data["distro"] = distro_infos
-
- if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
- data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
-
- if platform.system():
- data.setdefault("system", {})["name"] = platform.system()
-
- if platform.release():
- data.setdefault("system", {})["release"] = platform.release()
-
- if platform.machine():
- data["cpu"] = platform.machine()
-
- if HAS_TLS:
- data["openssl_version"] = ssl.OPENSSL_VERSION
-
- setuptools_version = get_installed_version("setuptools")
- if setuptools_version is not None:
- data["setuptools_version"] = setuptools_version
-
- return "{data[installer][name]}/{data[installer][version]} {json}".format(
- data=data,
- json=json.dumps(data, separators=(",", ":"), sort_keys=True),
- )
-
-
-class MultiDomainBasicAuth(AuthBase):
-
- def __init__(self, prompting=True):
- # type: (bool) -> None
- self.prompting = prompting
- self.passwords = {} # type: Dict[str, AuthInfo]
-
- def __call__(self, req):
- parsed = urllib_parse.urlparse(req.url)
-
- # Split the credentials from the netloc.
- netloc, url_user_password = split_auth_from_netloc(parsed.netloc)
-
- # Set the url of the request to the url without any credentials
- req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
-
- # Use any stored credentials that we have for this netloc
- username, password = self.passwords.get(netloc, (None, None))
-
- # Use the credentials embedded in the url if we have none stored
- if username is None:
- username, password = url_user_password
-
- # Get creds from netrc if we still don't have them
- if username is None and password is None:
- netrc_auth = get_netrc_auth(req.url)
- username, password = netrc_auth if netrc_auth else (None, None)
-
- if username or password:
- # Store the username and password
- self.passwords[netloc] = (username, password)
-
- # Send the basic auth with this request
- req = HTTPBasicAuth(username or "", password or "")(req)
-
- # Attach a hook to handle 401 responses
- req.register_hook("response", self.handle_401)
-
- return req
-
- def handle_401(self, resp, **kwargs):
- # We only care about 401 responses, anything else we want to just
- # pass through the actual response
- if resp.status_code != 401:
- return resp
-
- # We are not able to prompt the user so simply return the response
- if not self.prompting:
- return resp
-
- parsed = urllib_parse.urlparse(resp.url)
-
- # Prompt the user for a new username and password
- username = six.moves.input("User for %s: " % parsed.netloc)
- password = getpass.getpass("Password: ")
-
- # Store the new username and password to use for future requests
- if username or password:
- self.passwords[parsed.netloc] = (username, password)
-
- # Consume content and release the original connection to allow our new
- # request to reuse the same one.
- resp.content
- resp.raw.release_conn()
-
- # Add our new username and password to the request
- req = HTTPBasicAuth(username or "", password or "")(resp.request)
- req.register_hook("response", self.warn_on_401)
-
- # Send our new request
- new_resp = resp.connection.send(req, **kwargs)
- new_resp.history.append(resp)
-
- return new_resp
-
- def warn_on_401(self, resp, **kwargs):
- # warn user that they provided incorrect credentials
- if resp.status_code == 401:
- logger.warning('401 Error, Credentials not correct for %s',
- resp.request.url)
-
-
-class LocalFSAdapter(BaseAdapter):
-
- def send(self, request, stream=None, timeout=None, verify=None, cert=None,
- proxies=None):
- pathname = url_to_path(request.url)
-
- resp = Response()
- resp.status_code = 200
- resp.url = request.url
-
- try:
- stats = os.stat(pathname)
- except OSError as exc:
- resp.status_code = 404
- resp.raw = exc
- else:
- modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
- content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
- resp.headers = CaseInsensitiveDict({
- "Content-Type": content_type,
- "Content-Length": stats.st_size,
- "Last-Modified": modified,
- })
-
- resp.raw = open(pathname, "rb")
- resp.close = resp.raw.close
-
- return resp
-
- def close(self):
- pass
-
-
-class SafeFileCache(FileCache):
- """
- A file based cache which is safe to use even when the target directory may
- not be accessible or writable.
- """
-
- def __init__(self, *args, **kwargs):
- super(SafeFileCache, self).__init__(*args, **kwargs)
-
- # Check to ensure that the directory containing our cache directory
- # is owned by the user current executing pip. If it does not exist
- # we will check the parent directory until we find one that does exist.
- # If it is not owned by the user executing pip then we will disable
- # the cache and log a warning.
- if not check_path_owner(self.directory):
- logger.warning(
- "The directory '%s' or its parent directory is not owned by "
- "the current user and the cache has been disabled. Please "
- "check the permissions and owner of that directory. If "
- "executing pip with sudo, you may want sudo's -H flag.",
- self.directory,
- )
-
- # Set our directory to None to disable the Cache
- self.directory = None
-
- def get(self, *args, **kwargs):
- # If we don't have a directory, then the cache should be a no-op.
- if self.directory is None:
- return
-
- try:
- return super(SafeFileCache, self).get(*args, **kwargs)
- except (LockError, OSError, IOError):
- # We intentionally silence this error, if we can't access the cache
- # then we can just skip caching and process the request as if
- # caching wasn't enabled.
- pass
-
- def set(self, *args, **kwargs):
- # If we don't have a directory, then the cache should be a no-op.
- if self.directory is None:
- return
-
- try:
- return super(SafeFileCache, self).set(*args, **kwargs)
- except (LockError, OSError, IOError):
- # We intentionally silence this error, if we can't access the cache
- # then we can just skip caching and process the request as if
- # caching wasn't enabled.
- pass
-
- def delete(self, *args, **kwargs):
- # If we don't have a directory, then the cache should be a no-op.
- if self.directory is None:
- return
-
- try:
- return super(SafeFileCache, self).delete(*args, **kwargs)
- except (LockError, OSError, IOError):
- # We intentionally silence this error, if we can't access the cache
- # then we can just skip caching and process the request as if
- # caching wasn't enabled.
- pass
-
-
-class InsecureHTTPAdapter(HTTPAdapter):
-
- def cert_verify(self, conn, url, verify, cert):
- conn.cert_reqs = 'CERT_NONE'
- conn.ca_certs = None
-
-
-class PipSession(requests.Session):
-
- timeout = None # type: Optional[int]
-
- def __init__(self, *args, **kwargs):
- retries = kwargs.pop("retries", 0)
- cache = kwargs.pop("cache", None)
- insecure_hosts = kwargs.pop("insecure_hosts", [])
-
- super(PipSession, self).__init__(*args, **kwargs)
-
- # Attach our User Agent to the request
- self.headers["User-Agent"] = user_agent()
-
- # Attach our Authentication handler to the session
- self.auth = MultiDomainBasicAuth()
-
- # Create our urllib3.Retry instance which will allow us to customize
- # how we handle retries.
- retries = urllib3.Retry(
- # Set the total number of retries that a particular request can
- # have.
- total=retries,
-
- # A 503 error from PyPI typically means that the Fastly -> Origin
- # connection got interrupted in some way. A 503 error in general
- # is typically considered a transient error so we'll go ahead and
- # retry it.
- # A 500 may indicate transient error in Amazon S3
- # A 520 or 527 - may indicate transient error in CloudFlare
- status_forcelist=[500, 503, 520, 527],
-
- # Add a small amount of back off between failed requests in
- # order to prevent hammering the service.
- backoff_factor=0.25,
- )
-
- # We want to _only_ cache responses on securely fetched origins. We do
- # this because we can't validate the response of an insecurely fetched
- # origin, and we don't want someone to be able to poison the cache and
- # require manual eviction from the cache to fix it.
- if cache:
- secure_adapter = CacheControlAdapter(
- cache=SafeFileCache(cache, use_dir_lock=True),
- max_retries=retries,
- )
- else:
- secure_adapter = HTTPAdapter(max_retries=retries)
-
- # Our Insecure HTTPAdapter disables HTTPS validation. It does not
- # support caching (see above) so we'll use it for all http:// URLs as
- # well as any https:// host that we've marked as ignoring TLS errors
- # for.
- insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
-
- self.mount("https://", secure_adapter)
- self.mount("http://", insecure_adapter)
-
- # Enable file:// urls
- self.mount("file://", LocalFSAdapter())
-
- # We want to use a non-validating adapter for any requests which are
- # deemed insecure.
- for host in insecure_hosts:
- self.mount("https://{}/".format(host), insecure_adapter)
-
- def request(self, method, url, *args, **kwargs):
- # Allow setting a default timeout on a session
- kwargs.setdefault("timeout", self.timeout)
-
- # Dispatch the actual request
- return super(PipSession, self).request(method, url, *args, **kwargs)
-
-
-def get_file_content(url, comes_from=None, session=None):
- # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
- """Gets the content of a file; it may be a filename, file: URL, or
- http: URL. Returns (location, content). Content is unicode.
-
- :param url: File path or url.
- :param comes_from: Origin description of requirements.
- :param session: Instance of pip.download.PipSession.
- """
- if session is None:
- raise TypeError(
- "get_file_content() missing 1 required keyword argument: 'session'"
- )
-
- match = _scheme_re.search(url)
- if match:
- scheme = match.group(1).lower()
- if (scheme == 'file' and comes_from and
- comes_from.startswith('http')):
- raise InstallationError(
- 'Requirements file %s references URL %s, which is local'
- % (comes_from, url))
- if scheme == 'file':
- path = url.split(':', 1)[1]
- path = path.replace('\\', '/')
- match = _url_slash_drive_re.match(path)
- if match:
- path = match.group(1) + ':' + path.split('|', 1)[1]
- path = urllib_parse.unquote(path)
- if path.startswith('/'):
- path = '/' + path.lstrip('/')
- url = path
- else:
- # FIXME: catch some errors
- resp = session.get(url)
- resp.raise_for_status()
- return resp.url, resp.text
- try:
- with open(url, 'rb') as f:
- content = auto_decode(f.read())
- except IOError as exc:
- raise InstallationError(
- 'Could not open requirements file: %s' % str(exc)
- )
- return url, content
-
-
-_scheme_re = re.compile(r'^(http|https|file):', re.I)
-_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
-
-
-def is_url(name):
- # type: (Union[str, Text]) -> bool
- """Returns true if the name looks like a URL"""
- if ':' not in name:
- return False
- scheme = name.split(':', 1)[0].lower()
- return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
-
-
-def url_to_path(url):
- # type: (str) -> str
- """
- Convert a file: URL to a path.
- """
- assert url.startswith('file:'), (
- "You can only turn file: urls into filenames (not %r)" % url)
-
- _, netloc, path, _, _ = urllib_parse.urlsplit(url)
-
- # if we have a UNC path, prepend UNC share notation
- if netloc:
- netloc = '\\\\' + netloc
-
- path = urllib_request.url2pathname(netloc + path)
- return path
-
-
-def path_to_url(path):
- # type: (Union[str, Text]) -> str
- """
- Convert a path to a file: URL. The path will be made absolute and have
- quoted path parts.
- """
- path = os.path.normpath(os.path.abspath(path))
- url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
- return url
-
-
-def is_archive_file(name):
- # type: (str) -> bool
- """Return True if `name` is a considered as an archive file."""
- ext = splitext(name)[1].lower()
- if ext in ARCHIVE_EXTENSIONS:
- return True
- return False
-
-
-def unpack_vcs_link(link, location):
- vcs_backend = _get_used_vcs_backend(link)
- vcs_backend.unpack(location)
-
-
-def _get_used_vcs_backend(link):
- for backend in vcs.backends:
- if link.scheme in backend.schemes:
- vcs_backend = backend(link.url)
- return vcs_backend
-
-
-def is_vcs_url(link):
- # type: (Link) -> bool
- return bool(_get_used_vcs_backend(link))
-
-
-def is_file_url(link):
- # type: (Link) -> bool
- return link.url.lower().startswith('file:')
-
-
-def is_dir_url(link):
- # type: (Link) -> bool
- """Return whether a file:// Link points to a directory.
-
- ``link`` must not have any other scheme but file://. Call is_file_url()
- first.
-
- """
- link_path = url_to_path(link.url_without_fragment)
- return os.path.isdir(link_path)
-
-
-def _progress_indicator(iterable, *args, **kwargs):
- return iterable
-
-
-def _download_url(
- resp, # type: Response
- link, # type: Link
- content_file, # type: IO
- hashes, # type: Hashes
- progress_bar # type: str
-):
- # type: (...) -> None
- try:
- total_length = int(resp.headers['content-length'])
- except (ValueError, KeyError, TypeError):
- total_length = 0
-
- cached_resp = getattr(resp, "from_cache", False)
- if logger.getEffectiveLevel() > logging.INFO:
- show_progress = False
- elif cached_resp:
- show_progress = False
- elif total_length > (40 * 1000):
- show_progress = True
- elif not total_length:
- show_progress = True
- else:
- show_progress = False
-
- show_url = link.show_url
-
- def resp_read(chunk_size):
- try:
- # Special case for urllib3.
- for chunk in resp.raw.stream(
- chunk_size,
- # We use decode_content=False here because we don't
- # want urllib3 to mess with the raw bytes we get
- # from the server. If we decompress inside of
- # urllib3 then we cannot verify the checksum
- # because the checksum will be of the compressed
- # file. This breakage will only occur if the
- # server adds a Content-Encoding header, which
- # depends on how the server was configured:
- # - Some servers will notice that the file isn't a
- # compressible file and will leave the file alone
- # and with an empty Content-Encoding
- # - Some servers will notice that the file is
- # already compressed and will leave the file
- # alone and will add a Content-Encoding: gzip
- # header
- # - Some servers won't notice anything at all and
- # will take a file that's already been compressed
- # and compress it again and set the
- # Content-Encoding: gzip header
- #
- # By setting this not to decode automatically we
- # hope to eliminate problems with the second case.
- decode_content=False):
- yield chunk
- except AttributeError:
- # Standard file-like object.
- while True:
- chunk = resp.raw.read(chunk_size)
- if not chunk:
- break
- yield chunk
-
- def written_chunks(chunks):
- for chunk in chunks:
- content_file.write(chunk)
- yield chunk
-
- progress_indicator = _progress_indicator
-
- if link.netloc == PyPI.netloc:
- url = show_url
- else:
- url = link.url_without_fragment
-
- if show_progress: # We don't show progress on cached responses
- progress_indicator = DownloadProgressProvider(progress_bar,
- max=total_length)
- if total_length:
- logger.info("Downloading %s (%s)", url, format_size(total_length))
- else:
- logger.info("Downloading %s", url)
- elif cached_resp:
- logger.info("Using cached %s", url)
- else:
- logger.info("Downloading %s", url)
-
- logger.debug('Downloading from URL %s', link)
-
- downloaded_chunks = written_chunks(
- progress_indicator(
- resp_read(CONTENT_CHUNK_SIZE),
- CONTENT_CHUNK_SIZE
- )
- )
- if hashes:
- hashes.check_against_chunks(downloaded_chunks)
- else:
- consume(downloaded_chunks)
-
-
-def _copy_file(filename, location, link):
- copy = True
- download_location = os.path.join(location, link.filename)
- if os.path.exists(download_location):
- response = ask_path_exists(
- 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
- display_path(download_location), ('i', 'w', 'b', 'a'))
- if response == 'i':
- copy = False
- elif response == 'w':
- logger.warning('Deleting %s', display_path(download_location))
- os.remove(download_location)
- elif response == 'b':
- dest_file = backup_dir(download_location)
- logger.warning(
- 'Backing up %s to %s',
- display_path(download_location),
- display_path(dest_file),
- )
- shutil.move(download_location, dest_file)
- elif response == 'a':
- sys.exit(-1)
- if copy:
- shutil.copy(filename, download_location)
- logger.info('Saved %s', display_path(download_location))
-
-
-def unpack_http_url(
- link, # type: Link
- location, # type: str
- download_dir=None, # type: Optional[str]
- session=None, # type: Optional[PipSession]
- hashes=None, # type: Optional[Hashes]
- progress_bar="on" # type: str
-):
- # type: (...) -> None
- if session is None:
- raise TypeError(
- "unpack_http_url() missing 1 required keyword argument: 'session'"
- )
-
- with TempDirectory(kind="unpack") as temp_dir:
- # If a download dir is specified, is the file already downloaded there?
- already_downloaded_path = None
- if download_dir:
- already_downloaded_path = _check_download_dir(link,
- download_dir,
- hashes)
-
- if already_downloaded_path:
- from_path = already_downloaded_path
- content_type = mimetypes.guess_type(from_path)[0]
- else:
- # let's download to a tmp dir
- from_path, content_type = _download_http_url(link,
- session,
- temp_dir.path,
- hashes,
- progress_bar)
-
- # unpack the archive to the build dir location. even when only
- # downloading archives, they have to be unpacked to parse dependencies
- unpack_file(from_path, location, content_type, link)
-
- # a download dir is specified; let's copy the archive there
- if download_dir and not already_downloaded_path:
- _copy_file(from_path, download_dir, link)
-
- if not already_downloaded_path:
- os.unlink(from_path)
-
-
-def unpack_file_url(
- link, # type: Link
- location, # type: str
- download_dir=None, # type: Optional[str]
- hashes=None # type: Optional[Hashes]
-):
- # type: (...) -> None
- """Unpack link into location.
-
- If download_dir is provided and link points to a file, make a copy
- of the link file inside download_dir.
- """
- link_path = url_to_path(link.url_without_fragment)
-
- # If it's a url to a local directory
- if is_dir_url(link):
- if os.path.isdir(location):
- rmtree(location)
- shutil.copytree(link_path, location, symlinks=True)
- if download_dir:
- logger.info('Link is a directory, ignoring download_dir')
- return
-
- # If --require-hashes is off, `hashes` is either empty, the
- # link's embedded hash, or MissingHashes; it is required to
- # match. If --require-hashes is on, we are satisfied by any
- # hash in `hashes` matching: a URL-based or an option-based
- # one; no internet-sourced hash will be in `hashes`.
- if hashes:
- hashes.check_against_path(link_path)
-
- # If a download dir is specified, is the file already there and valid?
- already_downloaded_path = None
- if download_dir:
- already_downloaded_path = _check_download_dir(link,
- download_dir,
- hashes)
-
- if already_downloaded_path:
- from_path = already_downloaded_path
- else:
- from_path = link_path
-
- content_type = mimetypes.guess_type(from_path)[0]
-
- # unpack the archive to the build dir location. even when only downloading
- # archives, they have to be unpacked to parse dependencies
- unpack_file(from_path, location, content_type, link)
-
- # a download dir is specified and not already downloaded
- if download_dir and not already_downloaded_path:
- _copy_file(from_path, download_dir, link)
-
-
-def _copy_dist_from_dir(link_path, location):
- """Copy distribution files in `link_path` to `location`.
-
- Invoked when user requests to install a local directory. E.g.:
-
- pip install .
- pip install ~/dev/git-repos/python-prompt-toolkit
-
- """
-
- # Note: This is currently VERY SLOW if you have a lot of data in the
- # directory, because it copies everything with `shutil.copytree`.
- # What it should really do is build an sdist and install that.
- # See https://github.com/pypa/pip/issues/2195
-
- if os.path.isdir(location):
- rmtree(location)
-
- # build an sdist
- setup_py = 'setup.py'
- sdist_args = [sys.executable]
- sdist_args.append('-c')
- sdist_args.append(SETUPTOOLS_SHIM % setup_py)
- sdist_args.append('sdist')
- sdist_args += ['--dist-dir', location]
- logger.info('Running setup.py sdist for %s', link_path)
-
- with indent_log():
- call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
-
- # unpack sdist into `location`
- sdist = os.path.join(location, os.listdir(location)[0])
- logger.info('Unpacking sdist %s into %s', sdist, location)
- unpack_file(sdist, location, content_type=None, link=None)
-
-
-class PipXmlrpcTransport(xmlrpc_client.Transport):
- """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
- object.
- """
-
- def __init__(self, index_url, session, use_datetime=False):
- xmlrpc_client.Transport.__init__(self, use_datetime)
- index_parts = urllib_parse.urlparse(index_url)
- self._scheme = index_parts.scheme
- self._session = session
-
- def request(self, host, handler, request_body, verbose=False):
- parts = (self._scheme, host, handler, None, None, None)
- url = urllib_parse.urlunparse(parts)
- try:
- headers = {'Content-Type': 'text/xml'}
- response = self._session.post(url, data=request_body,
- headers=headers, stream=True)
- response.raise_for_status()
- self.verbose = verbose
- return self.parse_response(response.raw)
- except requests.HTTPError as exc:
- logger.critical(
- "HTTP error %s while getting %s",
- exc.response.status_code, url,
- )
- raise
-
-
-def unpack_url(
- link, # type: Optional[Link]
- location, # type: Optional[str]
- download_dir=None, # type: Optional[str]
- only_download=False, # type: bool
- session=None, # type: Optional[PipSession]
- hashes=None, # type: Optional[Hashes]
- progress_bar="on" # type: str
-):
- # type: (...) -> None
- """Unpack link.
- If link is a VCS link:
- if only_download, export into download_dir and ignore location
- else unpack into location
- for other types of link:
- - unpack into location
- - if download_dir, copy the file into download_dir
- - if only_download, mark location for deletion
-
- :param hashes: A Hashes object, one of whose embedded hashes must match,
- or HashMismatch will be raised. If the Hashes is empty, no matches are
- required, and unhashable types of requirements (like VCS ones, which
- would ordinarily raise HashUnsupported) are allowed.
- """
- # non-editable vcs urls
- if is_vcs_url(link):
- unpack_vcs_link(link, location)
-
- # file urls
- elif is_file_url(link):
- unpack_file_url(link, location, download_dir, hashes=hashes)
-
- # http urls
- else:
- if session is None:
- session = PipSession()
-
- unpack_http_url(
- link,
- location,
- download_dir,
- session,
- hashes=hashes,
- progress_bar=progress_bar
- )
- if only_download:
- write_delete_marker_file(location)
-
-
-def _download_http_url(
- link, # type: Link
- session, # type: PipSession
- temp_dir, # type: str
- hashes, # type: Hashes
- progress_bar # type: str
-):
- # type: (...) -> Tuple[str, str]
- """Download link url into temp_dir using provided session"""
- target_url = link.url.split('#', 1)[0]
- try:
- resp = session.get(
- target_url,
- # We use Accept-Encoding: identity here because requests
- # defaults to accepting compressed responses. This breaks in
- # a variety of ways depending on how the server is configured.
- # - Some servers will notice that the file isn't a compressible
- # file and will leave the file alone and with an empty
- # Content-Encoding
- # - Some servers will notice that the file is already
- # compressed and will leave the file alone and will add a
- # Content-Encoding: gzip header
- # - Some servers won't notice anything at all and will take
- # a file that's already been compressed and compress it again
- # and set the Content-Encoding: gzip header
- # By setting this to request only the identity encoding We're
- # hoping to eliminate the third case. Hopefully there does not
- # exist a server which when given a file will notice it is
- # already compressed and that you're not asking for a
- # compressed file and will then decompress it before sending
- # because if that's the case I don't think it'll ever be
- # possible to make this work.
- headers={"Accept-Encoding": "identity"},
- stream=True,
- )
- resp.raise_for_status()
- except requests.HTTPError as exc:
- logger.critical(
- "HTTP error %s while getting %s", exc.response.status_code, link,
- )
- raise
-
- content_type = resp.headers.get('content-type', '')
- filename = link.filename # fallback
- # Have a look at the Content-Disposition header for a better guess
- content_disposition = resp.headers.get('content-disposition')
- if content_disposition:
- type, params = cgi.parse_header(content_disposition)
- # We use ``or`` here because we don't want to use an "empty" value
- # from the filename param.
- filename = params.get('filename') or filename
- ext = splitext(filename)[1]
- if not ext:
- ext = mimetypes.guess_extension(content_type)
- if ext:
- filename += ext
- if not ext and link.url != resp.url:
- ext = os.path.splitext(resp.url)[1]
- if ext:
- filename += ext
- file_path = os.path.join(temp_dir, filename)
- with open(file_path, 'wb') as content_file:
- _download_url(resp, link, content_file, hashes, progress_bar)
- return file_path, content_type
-
-
-def _check_download_dir(link, download_dir, hashes):
- # type: (Link, str, Hashes) -> Optional[str]
- """ Check download_dir for previously downloaded file with correct hash
- If a correct file is found return its path else None
- """
- download_path = os.path.join(download_dir, link.filename)
- if os.path.exists(download_path):
- # If already downloaded, does its hash match?
- logger.info('File was already downloaded %s', download_path)
- if hashes:
- try:
- hashes.check_against_path(download_path)
- except HashMismatch:
- logger.warning(
- 'Previously-downloaded file %s has bad hash. '
- 'Re-downloading.',
- download_path
- )
- os.unlink(download_path)
- return None
- return download_path
- return None
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/exceptions.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/exceptions.py
deleted file mode 100644
index 38ceeea..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/exceptions.py
+++ /dev/null
@@ -1,274 +0,0 @@
-"""Exceptions used throughout package"""
-from __future__ import absolute_import
-
-from itertools import chain, groupby, repeat
-
-from pip._vendor.six import iteritems
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional # noqa: F401
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
-
-
-class PipError(Exception):
- """Base pip exception"""
-
-
-class ConfigurationError(PipError):
- """General exception in configuration"""
-
-
-class InstallationError(PipError):
- """General exception during installation"""
-
-
-class UninstallationError(PipError):
- """General exception during uninstallation"""
-
-
-class DistributionNotFound(InstallationError):
- """Raised when a distribution cannot be found to satisfy a requirement"""
-
-
-class RequirementsFileParseError(InstallationError):
- """Raised when a general error occurs parsing a requirements file line."""
-
-
-class BestVersionAlreadyInstalled(PipError):
- """Raised when the most up-to-date version of a package is already
- installed."""
-
-
-class BadCommand(PipError):
- """Raised when virtualenv or a command is not found"""
-
-
-class CommandError(PipError):
- """Raised when there is an error in command-line arguments"""
-
-
-class PreviousBuildDirError(PipError):
- """Raised when there's a previous conflicting build directory"""
-
-
-class InvalidWheelFilename(InstallationError):
- """Invalid wheel filename."""
-
-
-class UnsupportedWheel(InstallationError):
- """Unsupported wheel."""
-
-
-class HashErrors(InstallationError):
- """Multiple HashError instances rolled into one for reporting"""
-
- def __init__(self):
- self.errors = []
-
- def append(self, error):
- self.errors.append(error)
-
- def __str__(self):
- lines = []
- self.errors.sort(key=lambda e: e.order)
- for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
- lines.append(cls.head)
- lines.extend(e.body() for e in errors_of_cls)
- if lines:
- return '\n'.join(lines)
-
- def __nonzero__(self):
- return bool(self.errors)
-
- def __bool__(self):
- return self.__nonzero__()
-
-
-class HashError(InstallationError):
- """
- A failure to verify a package against known-good hashes
-
- :cvar order: An int sorting hash exception classes by difficulty of
- recovery (lower being harder), so the user doesn't bother fretting
- about unpinned packages when he has deeper issues, like VCS
- dependencies, to deal with. Also keeps error reports in a
- deterministic order.
- :cvar head: A section heading for display above potentially many
- exceptions of this kind
- :ivar req: The InstallRequirement that triggered this error. This is
- pasted on after the exception is instantiated, because it's not
- typically available earlier.
-
- """
- req = None # type: Optional[InstallRequirement]
- head = ''
-
- def body(self):
- """Return a summary of me for display under the heading.
-
- This default implementation simply prints a description of the
- triggering requirement.
-
- :param req: The InstallRequirement that provoked this error, with
- populate_link() having already been called
-
- """
- return ' %s' % self._requirement_name()
-
- def __str__(self):
- return '%s\n%s' % (self.head, self.body())
-
- def _requirement_name(self):
- """Return a description of the requirement that triggered me.
-
- This default implementation returns long description of the req, with
- line numbers
-
- """
- return str(self.req) if self.req else 'unknown package'
-
-
-class VcsHashUnsupported(HashError):
- """A hash was provided for a version-control-system-based requirement, but
- we don't have a method for hashing those."""
-
- order = 0
- head = ("Can't verify hashes for these requirements because we don't "
- "have a way to hash version control repositories:")
-
-
-class DirectoryUrlHashUnsupported(HashError):
- """A hash was provided for a version-control-system-based requirement, but
- we don't have a method for hashing those."""
-
- order = 1
- head = ("Can't verify hashes for these file:// requirements because they "
- "point to directories:")
-
-
-class HashMissing(HashError):
- """A hash was needed for a requirement but is absent."""
-
- order = 2
- head = ('Hashes are required in --require-hashes mode, but they are '
- 'missing from some requirements. Here is a list of those '
- 'requirements along with the hashes their downloaded archives '
- 'actually had. Add lines like these to your requirements files to '
- 'prevent tampering. (If you did not enable --require-hashes '
- 'manually, note that it turns on automatically when any package '
- 'has a hash.)')
-
- def __init__(self, gotten_hash):
- """
- :param gotten_hash: The hash of the (possibly malicious) archive we
- just downloaded
- """
- self.gotten_hash = gotten_hash
-
- def body(self):
- # Dodge circular import.
- from pip._internal.utils.hashes import FAVORITE_HASH
-
- package = None
- if self.req:
- # In the case of URL-based requirements, display the original URL
- # seen in the requirements file rather than the package name,
- # so the output can be directly copied into the requirements file.
- package = (self.req.original_link if self.req.original_link
- # In case someone feeds something downright stupid
- # to InstallRequirement's constructor.
- else getattr(self.req, 'req', None))
- return ' %s --hash=%s:%s' % (package or 'unknown package',
- FAVORITE_HASH,
- self.gotten_hash)
-
-
-class HashUnpinned(HashError):
- """A requirement had a hash specified but was not pinned to a specific
- version."""
-
- order = 3
- head = ('In --require-hashes mode, all requirements must have their '
- 'versions pinned with ==. These do not:')
-
-
-class HashMismatch(HashError):
- """
- Distribution file hash values don't match.
-
- :ivar package_name: The name of the package that triggered the hash
- mismatch. Feel free to write to this after the exception is raise to
- improve its error message.
-
- """
- order = 4
- head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
- 'FILE. If you have updated the package versions, please update '
- 'the hashes. Otherwise, examine the package contents carefully; '
- 'someone may have tampered with them.')
-
- def __init__(self, allowed, gots):
- """
- :param allowed: A dict of algorithm names pointing to lists of allowed
- hex digests
- :param gots: A dict of algorithm names pointing to hashes we
- actually got from the files under suspicion
- """
- self.allowed = allowed
- self.gots = gots
-
- def body(self):
- return ' %s:\n%s' % (self._requirement_name(),
- self._hash_comparison())
-
- def _hash_comparison(self):
- """
- Return a comparison of actual and expected hash values.
-
- Example::
-
- Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
- or 123451234512345123451234512345123451234512345
- Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
-
- """
- def hash_then_or(hash_name):
- # For now, all the decent hashes have 6-char names, so we can get
- # away with hard-coding space literals.
- return chain([hash_name], repeat(' or'))
-
- lines = []
- for hash_name, expecteds in iteritems(self.allowed):
- prefix = hash_then_or(hash_name)
- lines.extend((' Expected %s %s' % (next(prefix), e))
- for e in expecteds)
- lines.append(' Got %s\n' %
- self.gots[hash_name].hexdigest())
- prefix = ' or'
- return '\n'.join(lines)
-
-
-class UnsupportedPythonVersion(InstallationError):
- """Unsupported python version according to Requires-Python package
- metadata."""
-
-
-class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
- """When there are errors while loading a configuration file
- """
-
- def __init__(self, reason="could not be loaded", fname=None, error=None):
- super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
- self.reason = reason
- self.fname = fname
- self.error = error
-
- def __str__(self):
- if self.fname is not None:
- message_part = " in {}.".format(self.fname)
- else:
- assert self.error is not None
- message_part = ".\n{}\n".format(self.error.message)
- return "Configuration file {}{}".format(self.reason, message_part)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/index.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/index.py
deleted file mode 100644
index 9eda3a3..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/index.py
+++ /dev/null
@@ -1,990 +0,0 @@
-"""Routines related to PyPI, indexes"""
-from __future__ import absolute_import
-
-import cgi
-import itertools
-import logging
-import mimetypes
-import os
-import posixpath
-import re
-import sys
-from collections import namedtuple
-
-from pip._vendor import html5lib, requests, six
-from pip._vendor.distlib.compat import unescape
-from pip._vendor.packaging import specifiers
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.requests.exceptions import RetryError, SSLError
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-
-from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path
-from pip._internal.exceptions import (
- BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
- UnsupportedWheel,
-)
-from pip._internal.models.candidate import InstallationCandidate
-from pip._internal.models.format_control import FormatControl
-from pip._internal.models.index import PyPI
-from pip._internal.models.link import Link
-from pip._internal.pep425tags import get_supported
-from pip._internal.utils.compat import ipaddress
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path,
- redact_password_from_url,
-)
-from pip._internal.utils.packaging import check_requires_python
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.wheel import Wheel
-
-if MYPY_CHECK_RUNNING:
- from logging import Logger # noqa: F401
- from typing import ( # noqa: F401
- Tuple, Optional, Any, List, Union, Callable, Set, Sequence,
- Iterable, MutableMapping
- )
- from pip._vendor.packaging.version import _BaseVersion # noqa: F401
- from pip._vendor.requests import Response # noqa: F401
- from pip._internal.req import InstallRequirement # noqa: F401
- from pip._internal.download import PipSession # noqa: F401
-
- SecureOrigin = Tuple[str, str, Optional[str]]
- BuildTag = Tuple[Any, ...] # either emply tuple or Tuple[int, str]
- CandidateSortingKey = Tuple[int, _BaseVersion, BuildTag, Optional[int]]
-
-__all__ = ['FormatControl', 'PackageFinder']
-
-
-SECURE_ORIGINS = [
- # protocol, hostname, port
- # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
- ("https", "*", "*"),
- ("*", "localhost", "*"),
- ("*", "127.0.0.0/8", "*"),
- ("*", "::1/128", "*"),
- ("file", "*", None),
- # ssh is always secure.
- ("ssh", "*", "*"),
-] # type: List[SecureOrigin]
-
-
-logger = logging.getLogger(__name__)
-
-
-def _match_vcs_scheme(url):
- # type: (str) -> Optional[str]
- """Look for VCS schemes in the URL.
-
- Returns the matched VCS scheme, or None if there's no match.
- """
- from pip._internal.vcs import VcsSupport
- for scheme in VcsSupport.schemes:
- if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
- return scheme
- return None
-
-
-def _is_url_like_archive(url):
- # type: (str) -> bool
- """Return whether the URL looks like an archive.
- """
- filename = Link(url).filename
- for bad_ext in ARCHIVE_EXTENSIONS:
- if filename.endswith(bad_ext):
- return True
- return False
-
-
-class _NotHTML(Exception):
- def __init__(self, content_type, request_desc):
- # type: (str, str) -> None
- super(_NotHTML, self).__init__(content_type, request_desc)
- self.content_type = content_type
- self.request_desc = request_desc
-
-
-def _ensure_html_header(response):
- # type: (Response) -> None
- """Check the Content-Type header to ensure the response contains HTML.
-
- Raises `_NotHTML` if the content type is not text/html.
- """
- content_type = response.headers.get("Content-Type", "")
- if not content_type.lower().startswith("text/html"):
- raise _NotHTML(content_type, response.request.method)
-
-
-class _NotHTTP(Exception):
- pass
-
-
-def _ensure_html_response(url, session):
- # type: (str, PipSession) -> None
- """Send a HEAD request to the URL, and ensure the response contains HTML.
-
- Raises `_NotHTTP` if the URL is not available for a HEAD request, or
- `_NotHTML` if the content type is not text/html.
- """
- scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
- if scheme not in {'http', 'https'}:
- raise _NotHTTP()
-
- resp = session.head(url, allow_redirects=True)
- resp.raise_for_status()
-
- _ensure_html_header(resp)
-
-
-def _get_html_response(url, session):
- # type: (str, PipSession) -> Response
- """Access an HTML page with GET, and return the response.
-
- This consists of three parts:
-
- 1. If the URL looks suspiciously like an archive, send a HEAD first to
- check the Content-Type is HTML, to avoid downloading a large file.
- Raise `_NotHTTP` if the content type cannot be determined, or
- `_NotHTML` if it is not HTML.
- 2. Actually perform the request. Raise HTTP exceptions on network failures.
- 3. Check the Content-Type header to make sure we got HTML, and raise
- `_NotHTML` otherwise.
- """
- if _is_url_like_archive(url):
- _ensure_html_response(url, session=session)
-
- logger.debug('Getting page %s', url)
-
- resp = session.get(
- url,
- headers={
- "Accept": "text/html",
- # We don't want to blindly returned cached data for
- # /simple/, because authors generally expecting that
- # twine upload && pip install will function, but if
- # they've done a pip install in the last ~10 minutes
- # it won't. Thus by setting this to zero we will not
- # blindly use any cached data, however the benefit of
- # using max-age=0 instead of no-cache, is that we will
- # still support conditional requests, so we will still
- # minimize traffic sent in cases where the page hasn't
- # changed at all, we will just always incur the round
- # trip for the conditional GET now instead of only
- # once per 10 minutes.
- # For more information, please see pypa/pip#5670.
- "Cache-Control": "max-age=0",
- },
- )
- resp.raise_for_status()
-
- # The check for archives above only works if the url ends with
- # something that looks like an archive. However that is not a
- # requirement of an url. Unless we issue a HEAD request on every
- # url we cannot know ahead of time for sure if something is HTML
- # or not. However we can check after we've downloaded it.
- _ensure_html_header(resp)
-
- return resp
-
-
-def _handle_get_page_fail(
- link, # type: Link
- reason, # type: Union[str, Exception]
- meth=None # type: Optional[Callable[..., None]]
-):
- # type: (...) -> None
- if meth is None:
- meth = logger.debug
- meth("Could not fetch URL %s: %s - skipping", link, reason)
-
-
-def _get_html_page(link, session=None):
- # type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
- if session is None:
- raise TypeError(
- "_get_html_page() missing 1 required keyword argument: 'session'"
- )
-
- url = link.url.split('#', 1)[0]
-
- # Check for VCS schemes that do not support lookup as web pages.
- vcs_scheme = _match_vcs_scheme(url)
- if vcs_scheme:
- logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
- return None
-
- # Tack index.html onto file:// URLs that point to directories
- scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
- if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
- # add trailing slash if not present so urljoin doesn't trim
- # final segment
- if not url.endswith('/'):
- url += '/'
- url = urllib_parse.urljoin(url, 'index.html')
- logger.debug(' file: URL is directory, getting %s', url)
-
- try:
- resp = _get_html_response(url, session=session)
- except _NotHTTP as exc:
- logger.debug(
- 'Skipping page %s because it looks like an archive, and cannot '
- 'be checked by HEAD.', link,
- )
- except _NotHTML as exc:
- logger.debug(
- 'Skipping page %s because the %s request got Content-Type: %s',
- link, exc.request_desc, exc.content_type,
- )
- except requests.HTTPError as exc:
- _handle_get_page_fail(link, exc)
- except RetryError as exc:
- _handle_get_page_fail(link, exc)
- except SSLError as exc:
- reason = "There was a problem confirming the ssl certificate: "
- reason += str(exc)
- _handle_get_page_fail(link, reason, meth=logger.info)
- except requests.ConnectionError as exc:
- _handle_get_page_fail(link, "connection error: %s" % exc)
- except requests.Timeout:
- _handle_get_page_fail(link, "timed out")
- else:
- return HTMLPage(resp.content, resp.url, resp.headers)
- return None
-
-
-class PackageFinder(object):
- """This finds packages.
-
- This is meant to match easy_install's technique for looking for
- packages, by reading pages and looking for appropriate links.
- """
-
- def __init__(
- self,
- find_links, # type: List[str]
- index_urls, # type: List[str]
- allow_all_prereleases=False, # type: bool
- trusted_hosts=None, # type: Optional[Iterable[str]]
- session=None, # type: Optional[PipSession]
- format_control=None, # type: Optional[FormatControl]
- platform=None, # type: Optional[str]
- versions=None, # type: Optional[List[str]]
- abi=None, # type: Optional[str]
- implementation=None, # type: Optional[str]
- prefer_binary=False # type: bool
- ):
- # type: (...) -> None
- """Create a PackageFinder.
-
- :param format_control: A FormatControl object or None. Used to control
- the selection of source packages / binary packages when consulting
- the index and links.
- :param platform: A string or None. If None, searches for packages
- that are supported by the current system. Otherwise, will find
- packages that can be built on the platform passed in. These
- packages will only be downloaded for distribution: they will
- not be built locally.
- :param versions: A list of strings or None. This is passed directly
- to pep425tags.py in the get_supported() method.
- :param abi: A string or None. This is passed directly
- to pep425tags.py in the get_supported() method.
- :param implementation: A string or None. This is passed directly
- to pep425tags.py in the get_supported() method.
- """
- if session is None:
- raise TypeError(
- "PackageFinder() missing 1 required keyword argument: "
- "'session'"
- )
-
- # Build find_links. If an argument starts with ~, it may be
- # a local file relative to a home directory. So try normalizing
- # it and if it exists, use the normalized version.
- # This is deliberately conservative - it might be fine just to
- # blindly normalize anything starting with a ~...
- self.find_links = [] # type: List[str]
- for link in find_links:
- if link.startswith('~'):
- new_link = normalize_path(link)
- if os.path.exists(new_link):
- link = new_link
- self.find_links.append(link)
-
- self.index_urls = index_urls
-
- # These are boring links that have already been logged somehow:
- self.logged_links = set() # type: Set[Link]
-
- self.format_control = format_control or FormatControl(set(), set())
-
- # Domains that we won't emit warnings for when not using HTTPS
- self.secure_origins = [
- ("*", host, "*")
- for host in (trusted_hosts if trusted_hosts else [])
- ] # type: List[SecureOrigin]
-
- # Do we want to allow _all_ pre-releases?
- self.allow_all_prereleases = allow_all_prereleases
-
- # The Session we'll use to make requests
- self.session = session
-
- # The valid tags to check potential found wheel candidates against
- self.valid_tags = get_supported(
- versions=versions,
- platform=platform,
- abi=abi,
- impl=implementation,
- )
-
- # Do we prefer old, but valid, binary dist over new source dist
- self.prefer_binary = prefer_binary
-
- # If we don't have TLS enabled, then WARN if anyplace we're looking
- # relies on TLS.
- if not HAS_TLS:
- for link in itertools.chain(self.index_urls, self.find_links):
- parsed = urllib_parse.urlparse(link)
- if parsed.scheme == "https":
- logger.warning(
- "pip is configured with locations that require "
- "TLS/SSL, however the ssl module in Python is not "
- "available."
- )
- break
-
- def get_formatted_locations(self):
- # type: () -> str
- lines = []
- if self.index_urls and self.index_urls != [PyPI.simple_url]:
- lines.append(
- "Looking in indexes: {}".format(", ".join(
- redact_password_from_url(url) for url in self.index_urls))
- )
- if self.find_links:
- lines.append(
- "Looking in links: {}".format(", ".join(self.find_links))
- )
- return "\n".join(lines)
-
- @staticmethod
- def _sort_locations(locations, expand_dir=False):
- # type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
- """
- Sort locations into "files" (archives) and "urls", and return
- a pair of lists (files,urls)
- """
- files = []
- urls = []
-
- # puts the url for the given file path into the appropriate list
- def sort_path(path):
- url = path_to_url(path)
- if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
- urls.append(url)
- else:
- files.append(url)
-
- for url in locations:
-
- is_local_path = os.path.exists(url)
- is_file_url = url.startswith('file:')
-
- if is_local_path or is_file_url:
- if is_local_path:
- path = url
- else:
- path = url_to_path(url)
- if os.path.isdir(path):
- if expand_dir:
- path = os.path.realpath(path)
- for item in os.listdir(path):
- sort_path(os.path.join(path, item))
- elif is_file_url:
- urls.append(url)
- else:
- logger.warning(
- "Path '{0}' is ignored: "
- "it is a directory.".format(path),
- )
- elif os.path.isfile(path):
- sort_path(path)
- else:
- logger.warning(
- "Url '%s' is ignored: it is neither a file "
- "nor a directory.", url,
- )
- elif is_url(url):
- # Only add url with clear scheme
- urls.append(url)
- else:
- logger.warning(
- "Url '%s' is ignored. It is either a non-existing "
- "path or lacks a specific scheme.", url,
- )
-
- return files, urls
-
- def _candidate_sort_key(self, candidate):
- # type: (InstallationCandidate) -> CandidateSortingKey
- """
- Function used to generate link sort key for link tuples.
- The greater the return value, the more preferred it is.
- If not finding wheels, then sorted by version only.
- If finding wheels, then the sort order is by version, then:
- 1. existing installs
- 2. wheels ordered via Wheel.support_index_min(self.valid_tags)
- 3. source archives
- If prefer_binary was set, then all wheels are sorted above sources.
- Note: it was considered to embed this logic into the Link
- comparison operators, but then different sdist links
- with the same version, would have to be considered equal
- """
- support_num = len(self.valid_tags)
- build_tag = tuple() # type: BuildTag
- binary_preference = 0
- if candidate.location.is_wheel:
- # can raise InvalidWheelFilename
- wheel = Wheel(candidate.location.filename)
- if not wheel.supported(self.valid_tags):
- raise UnsupportedWheel(
- "%s is not a supported wheel for this platform. It "
- "can't be sorted." % wheel.filename
- )
- if self.prefer_binary:
- binary_preference = 1
- pri = -(wheel.support_index_min(self.valid_tags))
- if wheel.build_tag is not None:
- match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
- build_tag_groups = match.groups()
- build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
- else: # sdist
- pri = -(support_num)
- return (binary_preference, candidate.version, build_tag, pri)
-
- def _validate_secure_origin(self, logger, location):
- # type: (Logger, Link) -> bool
- # Determine if this url used a secure transport mechanism
- parsed = urllib_parse.urlparse(str(location))
- origin = (parsed.scheme, parsed.hostname, parsed.port)
-
- # The protocol to use to see if the protocol matches.
- # Don't count the repository type as part of the protocol: in
- # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
- # the last scheme.)
- protocol = origin[0].rsplit('+', 1)[-1]
-
- # Determine if our origin is a secure origin by looking through our
- # hardcoded list of secure origins, as well as any additional ones
- # configured on this PackageFinder instance.
- for secure_origin in (SECURE_ORIGINS + self.secure_origins):
- if protocol != secure_origin[0] and secure_origin[0] != "*":
- continue
-
- try:
- # We need to do this decode dance to ensure that we have a
- # unicode object, even on Python 2.x.
- addr = ipaddress.ip_address(
- origin[1]
- if (
- isinstance(origin[1], six.text_type) or
- origin[1] is None
- )
- else origin[1].decode("utf8")
- )
- network = ipaddress.ip_network(
- secure_origin[1]
- if isinstance(secure_origin[1], six.text_type)
- # setting secure_origin[1] to proper Union[bytes, str]
- # creates problems in other places
- else secure_origin[1].decode("utf8") # type: ignore
- )
- except ValueError:
- # We don't have both a valid address or a valid network, so
- # we'll check this origin against hostnames.
- if (origin[1] and
- origin[1].lower() != secure_origin[1].lower() and
- secure_origin[1] != "*"):
- continue
- else:
- # We have a valid address and network, so see if the address
- # is contained within the network.
- if addr not in network:
- continue
-
- # Check to see if the port patches
- if (origin[2] != secure_origin[2] and
- secure_origin[2] != "*" and
- secure_origin[2] is not None):
- continue
-
- # If we've gotten here, then this origin matches the current
- # secure origin and we should return True
- return True
-
- # If we've gotten to this point, then the origin isn't secure and we
- # will not accept it as a valid location to search. We will however
- # log a warning that we are ignoring it.
- logger.warning(
- "The repository located at %s is not a trusted or secure host and "
- "is being ignored. If this repository is available via HTTPS we "
- "recommend you use HTTPS instead, otherwise you may silence "
- "this warning and allow it anyway with '--trusted-host %s'.",
- parsed.hostname,
- parsed.hostname,
- )
-
- return False
-
- def _get_index_urls_locations(self, project_name):
- # type: (str) -> List[str]
- """Returns the locations found via self.index_urls
-
- Checks the url_name on the main (first in the list) index and
- use this url_name to produce all locations
- """
-
- def mkurl_pypi_url(url):
- loc = posixpath.join(
- url,
- urllib_parse.quote(canonicalize_name(project_name)))
- # For maximum compatibility with easy_install, ensure the path
- # ends in a trailing slash. Although this isn't in the spec
- # (and PyPI can handle it without the slash) some other index
- # implementations might break if they relied on easy_install's
- # behavior.
- if not loc.endswith('/'):
- loc = loc + '/'
- return loc
-
- return [mkurl_pypi_url(url) for url in self.index_urls]
-
- def find_all_candidates(self, project_name):
- # type: (str) -> List[Optional[InstallationCandidate]]
- """Find all available InstallationCandidate for project_name
-
- This checks index_urls and find_links.
- All versions found are returned as an InstallationCandidate list.
-
- See _link_package_versions for details on which files are accepted
- """
- index_locations = self._get_index_urls_locations(project_name)
- index_file_loc, index_url_loc = self._sort_locations(index_locations)
- fl_file_loc, fl_url_loc = self._sort_locations(
- self.find_links, expand_dir=True,
- )
-
- file_locations = (Link(url) for url in itertools.chain(
- index_file_loc, fl_file_loc,
- ))
-
- # We trust every url that the user has given us whether it was given
- # via --index-url or --find-links.
- # We want to filter out any thing which does not have a secure origin.
- url_locations = [
- link for link in itertools.chain(
- (Link(url) for url in index_url_loc),
- (Link(url) for url in fl_url_loc),
- )
- if self._validate_secure_origin(logger, link)
- ]
-
- logger.debug('%d location(s) to search for versions of %s:',
- len(url_locations), project_name)
-
- for location in url_locations:
- logger.debug('* %s', location)
-
- canonical_name = canonicalize_name(project_name)
- formats = self.format_control.get_allowed_formats(canonical_name)
- search = Search(project_name, canonical_name, formats)
- find_links_versions = self._package_versions(
- # We trust every directly linked archive in find_links
- (Link(url, '-f') for url in self.find_links),
- search
- )
-
- page_versions = []
- for page in self._get_pages(url_locations, project_name):
- logger.debug('Analyzing links from page %s', page.url)
- with indent_log():
- page_versions.extend(
- self._package_versions(page.iter_links(), search)
- )
-
- file_versions = self._package_versions(file_locations, search)
- if file_versions:
- file_versions.sort(reverse=True)
- logger.debug(
- 'Local files found: %s',
- ', '.join([
- url_to_path(candidate.location.url)
- for candidate in file_versions
- ])
- )
-
- # This is an intentional priority ordering
- return file_versions + find_links_versions + page_versions
-
- def find_requirement(self, req, upgrade):
- # type: (InstallRequirement, bool) -> Optional[Link]
- """Try to find a Link matching req
-
- Expects req, an InstallRequirement and upgrade, a boolean
- Returns a Link if found,
- Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
- """
- all_candidates = self.find_all_candidates(req.name)
-
- # Filter out anything which doesn't match our specifier
- compatible_versions = set(
- req.specifier.filter(
- # We turn the version object into a str here because otherwise
- # when we're debundled but setuptools isn't, Python will see
- # packaging.version.Version and
- # pkg_resources._vendor.packaging.version.Version as different
- # types. This way we'll use a str as a common data interchange
- # format. If we stop using the pkg_resources provided specifier
- # and start using our own, we can drop the cast to str().
- [str(c.version) for c in all_candidates],
- prereleases=(
- self.allow_all_prereleases
- if self.allow_all_prereleases else None
- ),
- )
- )
- applicable_candidates = [
- # Again, converting to str to deal with debundling.
- c for c in all_candidates if str(c.version) in compatible_versions
- ]
-
- if applicable_candidates:
- best_candidate = max(applicable_candidates,
- key=self._candidate_sort_key)
- else:
- best_candidate = None
-
- if req.satisfied_by is not None:
- installed_version = parse_version(req.satisfied_by.version)
- else:
- installed_version = None
-
- if installed_version is None and best_candidate is None:
- logger.critical(
- 'Could not find a version that satisfies the requirement %s '
- '(from versions: %s)',
- req,
- ', '.join(
- sorted(
- {str(c.version) for c in all_candidates},
- key=parse_version,
- )
- )
- )
-
- raise DistributionNotFound(
- 'No matching distribution found for %s' % req
- )
-
- best_installed = False
- if installed_version and (
- best_candidate is None or
- best_candidate.version <= installed_version):
- best_installed = True
-
- if not upgrade and installed_version is not None:
- if best_installed:
- logger.debug(
- 'Existing installed version (%s) is most up-to-date and '
- 'satisfies requirement',
- installed_version,
- )
- else:
- logger.debug(
- 'Existing installed version (%s) satisfies requirement '
- '(most up-to-date version is %s)',
- installed_version,
- best_candidate.version,
- )
- return None
-
- if best_installed:
- # We have an existing version, and its the best version
- logger.debug(
- 'Installed version (%s) is most up-to-date (past versions: '
- '%s)',
- installed_version,
- ', '.join(sorted(compatible_versions, key=parse_version)) or
- "none",
- )
- raise BestVersionAlreadyInstalled
-
- logger.debug(
- 'Using version %s (newest of versions: %s)',
- best_candidate.version,
- ', '.join(sorted(compatible_versions, key=parse_version))
- )
- return best_candidate.location
-
- def _get_pages(self, locations, project_name):
- # type: (Iterable[Link], str) -> Iterable[HTMLPage]
- """
- Yields (page, page_url) from the given locations, skipping
- locations that have errors.
- """
- seen = set() # type: Set[Link]
- for location in locations:
- if location in seen:
- continue
- seen.add(location)
-
- page = _get_html_page(location, session=self.session)
- if page is None:
- continue
-
- yield page
-
- _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
-
- def _sort_links(self, links):
- # type: (Iterable[Link]) -> List[Link]
- """
- Returns elements of links in order, non-egg links first, egg links
- second, while eliminating duplicates
- """
- eggs, no_eggs = [], []
- seen = set() # type: Set[Link]
- for link in links:
- if link not in seen:
- seen.add(link)
- if link.egg_fragment:
- eggs.append(link)
- else:
- no_eggs.append(link)
- return no_eggs + eggs
-
- def _package_versions(
- self,
- links, # type: Iterable[Link]
- search # type: Search
- ):
- # type: (...) -> List[Optional[InstallationCandidate]]
- result = []
- for link in self._sort_links(links):
- v = self._link_package_versions(link, search)
- if v is not None:
- result.append(v)
- return result
-
- def _log_skipped_link(self, link, reason):
- # type: (Link, str) -> None
- if link not in self.logged_links:
- logger.debug('Skipping link %s; %s', link, reason)
- self.logged_links.add(link)
-
- def _link_package_versions(self, link, search):
- # type: (Link, Search) -> Optional[InstallationCandidate]
- """Return an InstallationCandidate or None"""
- version = None
- if link.egg_fragment:
- egg_info = link.egg_fragment
- ext = link.ext
- else:
- egg_info, ext = link.splitext()
- if not ext:
- self._log_skipped_link(link, 'not a file')
- return None
- if ext not in SUPPORTED_EXTENSIONS:
- self._log_skipped_link(
- link, 'unsupported archive format: %s' % ext,
- )
- return None
- if "binary" not in search.formats and ext == WHEEL_EXTENSION:
- self._log_skipped_link(
- link, 'No binaries permitted for %s' % search.supplied,
- )
- return None
- if "macosx10" in link.path and ext == '.zip':
- self._log_skipped_link(link, 'macosx10 one')
- return None
- if ext == WHEEL_EXTENSION:
- try:
- wheel = Wheel(link.filename)
- except InvalidWheelFilename:
- self._log_skipped_link(link, 'invalid wheel filename')
- return None
- if canonicalize_name(wheel.name) != search.canonical:
- self._log_skipped_link(
- link, 'wrong project name (not %s)' % search.supplied)
- return None
-
- if not wheel.supported(self.valid_tags):
- self._log_skipped_link(
- link, 'it is not compatible with this Python')
- return None
-
- version = wheel.version
-
- # This should be up by the search.ok_binary check, but see issue 2700.
- if "source" not in search.formats and ext != WHEEL_EXTENSION:
- self._log_skipped_link(
- link, 'No sources permitted for %s' % search.supplied,
- )
- return None
-
- if not version:
- version = _egg_info_matches(egg_info, search.canonical)
- if not version:
- self._log_skipped_link(
- link, 'Missing project version for %s' % search.supplied)
- return None
-
- match = self._py_version_re.search(version)
- if match:
- version = version[:match.start()]
- py_version = match.group(1)
- if py_version != sys.version[:3]:
- self._log_skipped_link(
- link, 'Python version is incorrect')
- return None
- try:
- support_this_python = check_requires_python(link.requires_python)
- except specifiers.InvalidSpecifier:
- logger.debug("Package %s has an invalid Requires-Python entry: %s",
- link.filename, link.requires_python)
- support_this_python = True
-
- if not support_this_python:
- logger.debug("The package %s is incompatible with the python "
- "version in use. Acceptable python versions are: %s",
- link, link.requires_python)
- return None
- logger.debug('Found link %s, version: %s', link, version)
-
- return InstallationCandidate(search.supplied, version, link)
-
-
-def _find_name_version_sep(egg_info, canonical_name):
- # type: (str, str) -> int
- """Find the separator's index based on the package's canonical name.
-
- `egg_info` must be an egg info string for the given package, and
- `canonical_name` must be the package's canonical name.
-
- This function is needed since the canonicalized name does not necessarily
- have the same length as the egg info's name part. An example::
-
- >>> egg_info = 'foo__bar-1.0'
- >>> canonical_name = 'foo-bar'
- >>> _find_name_version_sep(egg_info, canonical_name)
- 8
- """
- # Project name and version must be separated by one single dash. Find all
- # occurrences of dashes; if the string in front of it matches the canonical
- # name, this is the one separating the name and version parts.
- for i, c in enumerate(egg_info):
- if c != "-":
- continue
- if canonicalize_name(egg_info[:i]) == canonical_name:
- return i
- raise ValueError("{} does not match {}".format(egg_info, canonical_name))
-
-
-def _egg_info_matches(egg_info, canonical_name):
- # type: (str, str) -> Optional[str]
- """Pull the version part out of a string.
-
- :param egg_info: The string to parse. E.g. foo-2.1
- :param canonical_name: The canonicalized name of the package this
- belongs to.
- """
- try:
- version_start = _find_name_version_sep(egg_info, canonical_name) + 1
- except ValueError:
- return None
- version = egg_info[version_start:]
- if not version:
- return None
- return version
-
-
-def _determine_base_url(document, page_url):
- """Determine the HTML document's base URL.
-
- This looks for a ``<base>`` tag in the HTML document. If present, its href
- attribute denotes the base URL of anchor tags in the document. If there is
- no such tag (or if it does not have a valid href attribute), the HTML
- file's URL is used as the base URL.
-
- :param document: An HTML document representation. The current
- implementation expects the result of ``html5lib.parse()``.
- :param page_url: The URL of the HTML document.
- """
- for base in document.findall(".//base"):
- href = base.get("href")
- if href is not None:
- return href
- return page_url
-
-
-def _get_encoding_from_headers(headers):
- """Determine if we have any encoding information in our headers.
- """
- if headers and "Content-Type" in headers:
- content_type, params = cgi.parse_header(headers["Content-Type"])
- if "charset" in params:
- return params['charset']
- return None
-
-
-_CLEAN_LINK_RE = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
-
-
-def _clean_link(url):
- # type: (str) -> str
- """Makes sure a link is fully encoded. That is, if a ' ' shows up in
- the link, it will be rewritten to %20 (while not over-quoting
- % or other characters)."""
- return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url)
-
-
-class HTMLPage(object):
- """Represents one page, along with its URL"""
-
- def __init__(self, content, url, headers=None):
- # type: (bytes, str, MutableMapping[str, str]) -> None
- self.content = content
- self.url = url
- self.headers = headers
-
- def __str__(self):
- return redact_password_from_url(self.url)
-
- def iter_links(self):
- # type: () -> Iterable[Link]
- """Yields all links in the page"""
- document = html5lib.parse(
- self.content,
- transport_encoding=_get_encoding_from_headers(self.headers),
- namespaceHTMLElements=False,
- )
- base_url = _determine_base_url(document, self.url)
- for anchor in document.findall(".//a"):
- if anchor.get("href"):
- href = anchor.get("href")
- url = _clean_link(urllib_parse.urljoin(base_url, href))
- pyrequire = anchor.get('data-requires-python')
- pyrequire = unescape(pyrequire) if pyrequire else None
- yield Link(url, self.url, requires_python=pyrequire)
-
-
-Search = namedtuple('Search', 'supplied canonical formats')
-"""Capture key aspects of a search.
-
-:attribute supplied: The user supplied package.
-:attribute canonical: The canonical package name.
-:attribute formats: The formats allowed for this package. Should be a set
- with 'binary' or 'source' or both in it.
-"""
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/locations.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/locations.py
deleted file mode 100644
index c6e2a3e..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/locations.py
+++ /dev/null
@@ -1,211 +0,0 @@
-"""Locations where we look for configs, install stuff, etc"""
-from __future__ import absolute_import
-
-import os
-import os.path
-import platform
-import site
-import sys
-import sysconfig
-from distutils import sysconfig as distutils_sysconfig
-from distutils.command.install import SCHEME_KEYS # type: ignore
-
-from pip._internal.utils import appdirs
-from pip._internal.utils.compat import WINDOWS, expanduser
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Union, Dict, List, Optional # noqa: F401
-
-
-# Application Directories
-USER_CACHE_DIR = appdirs.user_cache_dir("pip")
-
-
-DELETE_MARKER_MESSAGE = '''\
-This file is placed here by pip to indicate the source was put
-here by pip.
-
-Once this package is successfully installed this source code will be
-deleted (unless you remove this file).
-'''
-PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
-
-
-def write_delete_marker_file(directory):
- # type: (str) -> None
- """
- Write the pip delete marker file into this directory.
- """
- filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
- with open(filepath, 'w') as marker_fp:
- marker_fp.write(DELETE_MARKER_MESSAGE)
-
-
-def running_under_virtualenv():
- # type: () -> bool
- """
- Return True if we're running inside a virtualenv, False otherwise.
-
- """
- if hasattr(sys, 'real_prefix'):
- return True
- elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
- return True
-
- return False
-
-
-def virtualenv_no_global():
- # type: () -> bool
- """
- Return True if in a venv and no system site packages.
- """
- # this mirrors the logic in virtualenv.py for locating the
- # no-global-site-packages.txt file
- site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
- no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
- if running_under_virtualenv() and os.path.isfile(no_global_file):
- return True
- else:
- return False
-
-
-if running_under_virtualenv():
- src_prefix = os.path.join(sys.prefix, 'src')
-else:
- # FIXME: keep src in cwd for now (it is not a temporary folder)
- try:
- src_prefix = os.path.join(os.getcwd(), 'src')
- except OSError:
- # In case the current working directory has been renamed or deleted
- sys.exit(
- "The folder you are executing pip from can no longer be found."
- )
-
-# under macOS + virtualenv sys.prefix is not properly resolved
-# it is something like /path/to/python/bin/..
-# Note: using realpath due to tmp dirs on OSX being symlinks
-src_prefix = os.path.abspath(src_prefix)
-
-# FIXME doesn't account for venv linked to global site-packages
-
-site_packages = sysconfig.get_path("purelib") # type: Optional[str]
-
-# This is because of a bug in PyPy's sysconfig module, see
-# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
-# for more information.
-if platform.python_implementation().lower() == "pypy":
- site_packages = distutils_sysconfig.get_python_lib()
-try:
- # Use getusersitepackages if this is present, as it ensures that the
- # value is initialised properly.
- user_site = site.getusersitepackages()
-except AttributeError:
- user_site = site.USER_SITE
-user_dir = expanduser('~')
-if WINDOWS:
- bin_py = os.path.join(sys.prefix, 'Scripts')
- bin_user = os.path.join(user_site, 'Scripts')
- # buildout uses 'bin' on Windows too?
- if not os.path.exists(bin_py):
- bin_py = os.path.join(sys.prefix, 'bin')
- bin_user = os.path.join(user_site, 'bin')
-
- config_basename = 'pip.ini'
-
- legacy_storage_dir = os.path.join(user_dir, 'pip')
- legacy_config_file = os.path.join(
- legacy_storage_dir,
- config_basename,
- )
-else:
- bin_py = os.path.join(sys.prefix, 'bin')
- bin_user = os.path.join(user_site, 'bin')
-
- config_basename = 'pip.conf'
-
- legacy_storage_dir = os.path.join(user_dir, '.pip')
- legacy_config_file = os.path.join(
- legacy_storage_dir,
- config_basename,
- )
- # Forcing to use /usr/local/bin for standard macOS framework installs
- # Also log to ~/Library/Logs/ for use with the Console.app log viewer
- if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
- bin_py = '/usr/local/bin'
-
-site_config_files = [
- os.path.join(path, config_basename)
- for path in appdirs.site_config_dirs('pip')
-]
-
-venv_config_file = os.path.join(sys.prefix, config_basename)
-new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename)
-
-
-def distutils_scheme(dist_name, user=False, home=None, root=None,
- isolated=False, prefix=None):
- # type:(str, bool, str, str, bool, str) -> dict
- """
- Return a distutils install scheme
- """
- from distutils.dist import Distribution
-
- scheme = {}
-
- if isolated:
- extra_dist_args = {"script_args": ["--no-user-cfg"]}
- else:
- extra_dist_args = {}
- dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
- dist_args.update(extra_dist_args)
-
- d = Distribution(dist_args)
- # Ignoring, typeshed issue reported python/typeshed/issues/2567
- d.parse_config_files()
- # NOTE: Ignoring type since mypy can't find attributes on 'Command'
- i = d.get_command_obj('install', create=True) # type: Any
- assert i is not None
- # NOTE: setting user or home has the side-effect of creating the home dir
- # or user base for installations during finalize_options()
- # ideally, we'd prefer a scheme class that has no side-effects.
- assert not (user and prefix), "user={} prefix={}".format(user, prefix)
- i.user = user or i.user
- if user:
- i.prefix = ""
- i.prefix = prefix or i.prefix
- i.home = home or i.home
- i.root = root or i.root
- i.finalize_options()
- for key in SCHEME_KEYS:
- scheme[key] = getattr(i, 'install_' + key)
-
- # install_lib specified in setup.cfg should install *everything*
- # into there (i.e. it takes precedence over both purelib and
- # platlib). Note, i.install_lib is *always* set after
- # finalize_options(); we only want to override here if the user
- # has explicitly requested it hence going back to the config
-
- # Ignoring, typeshed issue reported python/typeshed/issues/2567
- if 'install_lib' in d.get_option_dict('install'): # type: ignore
- scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
-
- if running_under_virtualenv():
- scheme['headers'] = os.path.join(
- sys.prefix,
- 'include',
- 'site',
- 'python' + sys.version[:3],
- dist_name,
- )
-
- if root is not None:
- path_no_drive = os.path.splitdrive(
- os.path.abspath(scheme["headers"]))[1]
- scheme["headers"] = os.path.join(
- root,
- path_no_drive[1:],
- )
-
- return scheme
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__init__.py
deleted file mode 100644
index 7855226..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-"""A package that contains models that represent entities.
-"""
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 8b92b6b..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/candidate.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/candidate.cpython-37.pyc
deleted file mode 100644
index 445bffe..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/candidate.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/format_control.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/format_control.cpython-37.pyc
deleted file mode 100644
index 304897a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/format_control.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/index.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/index.cpython-37.pyc
deleted file mode 100644
index cad3ad0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/index.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/link.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/link.cpython-37.pyc
deleted file mode 100644
index e74697d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__pycache__/link.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/candidate.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/candidate.py
deleted file mode 100644
index 4475458..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/candidate.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.utils.models import KeyBasedCompareMixin
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from pip._vendor.packaging.version import _BaseVersion # noqa: F401
- from pip._internal.models.link import Link # noqa: F401
- from typing import Any, Union # noqa: F401
-
-
-class InstallationCandidate(KeyBasedCompareMixin):
- """Represents a potential "candidate" for installation.
- """
-
- def __init__(self, project, version, location):
- # type: (Any, str, Link) -> None
- self.project = project
- self.version = parse_version(version) # type: _BaseVersion
- self.location = location
-
- super(InstallationCandidate, self).__init__(
- key=(self.project, self.version, self.location),
- defining_class=InstallationCandidate
- )
-
- def __repr__(self):
- # type: () -> str
- return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
- self.project, self.version, self.location,
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/format_control.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/format_control.py
deleted file mode 100644
index 971a391..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/format_control.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, Set, FrozenSet # noqa: F401
-
-
-class FormatControl(object):
- """Helper for managing formats from which a package can be installed.
- """
-
- def __init__(self, no_binary=None, only_binary=None):
- # type: (Optional[Set], Optional[Set]) -> None
- if no_binary is None:
- no_binary = set()
- if only_binary is None:
- only_binary = set()
-
- self.no_binary = no_binary
- self.only_binary = only_binary
-
- def __eq__(self, other):
- return self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __repr__(self):
- return "{}({}, {})".format(
- self.__class__.__name__,
- self.no_binary,
- self.only_binary
- )
-
- @staticmethod
- def handle_mutual_excludes(value, target, other):
- # type: (str, Optional[Set], Optional[Set]) -> None
- new = value.split(',')
- while ':all:' in new:
- other.clear()
- target.clear()
- target.add(':all:')
- del new[:new.index(':all:') + 1]
- # Without a none, we want to discard everything as :all: covers it
- if ':none:' not in new:
- return
- for name in new:
- if name == ':none:':
- target.clear()
- continue
- name = canonicalize_name(name)
- other.discard(name)
- target.add(name)
-
- def get_allowed_formats(self, canonical_name):
- # type: (str) -> FrozenSet
- result = {"binary", "source"}
- if canonical_name in self.only_binary:
- result.discard('source')
- elif canonical_name in self.no_binary:
- result.discard('binary')
- elif ':all:' in self.only_binary:
- result.discard('source')
- elif ':all:' in self.no_binary:
- result.discard('binary')
- return frozenset(result)
-
- def disallow_binaries(self):
- # type: () -> None
- self.handle_mutual_excludes(
- ':all:', self.no_binary, self.only_binary,
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/index.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/index.py
deleted file mode 100644
index ead1efb..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/index.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-
-class PackageIndex(object):
- """Represents a Package Index and provides easier access to endpoints
- """
-
- def __init__(self, url, file_storage_domain):
- # type: (str, str) -> None
- super(PackageIndex, self).__init__()
- self.url = url
- self.netloc = urllib_parse.urlsplit(url).netloc
- self.simple_url = self._url_for_path('simple')
- self.pypi_url = self._url_for_path('pypi')
-
- # This is part of a temporary hack used to block installs of PyPI
- # packages which depend on external urls only necessary until PyPI can
- # block such packages themselves
- self.file_storage_domain = file_storage_domain
-
- def _url_for_path(self, path):
- # type: (str) -> str
- return urllib_parse.urljoin(self.url, path)
-
-
-PyPI = PackageIndex(
- 'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
-)
-TestPyPI = PackageIndex(
- 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
-)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/link.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/link.py
deleted file mode 100644
index ad2f93e..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/link.py
+++ /dev/null
@@ -1,163 +0,0 @@
-import posixpath
-import re
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.download import path_to_url
-from pip._internal.utils.misc import (
- WHEEL_EXTENSION, redact_password_from_url, splitext,
-)
-from pip._internal.utils.models import KeyBasedCompareMixin
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, Tuple, Union, Text # noqa: F401
- from pip._internal.index import HTMLPage # noqa: F401
-
-
-class Link(KeyBasedCompareMixin):
- """Represents a parsed link from a Package Index's simple URL
- """
-
- def __init__(self, url, comes_from=None, requires_python=None):
- # type: (str, Optional[Union[str, HTMLPage]], Optional[str]) -> None
- """
- url:
- url of the resource pointed to (href of the link)
- comes_from:
- instance of HTMLPage where the link was found, or string.
- requires_python:
- String containing the `Requires-Python` metadata field, specified
- in PEP 345. This may be specified by a data-requires-python
- attribute in the HTML link tag, as described in PEP 503.
- """
-
- # url can be a UNC windows share
- if url.startswith('\\\\'):
- url = path_to_url(url)
-
- self.url = url
- self.comes_from = comes_from
- self.requires_python = requires_python if requires_python else None
-
- super(Link, self).__init__(
- key=(self.url),
- defining_class=Link
- )
-
- def __str__(self):
- if self.requires_python:
- rp = ' (requires-python:%s)' % self.requires_python
- else:
- rp = ''
- if self.comes_from:
- return '%s (from %s)%s' % (redact_password_from_url(self.url),
- self.comes_from, rp)
- else:
- return redact_password_from_url(str(self.url))
-
- def __repr__(self):
- return '<Link %s>' % self
-
- @property
- def filename(self):
- # type: () -> str
- _, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
- name = posixpath.basename(path.rstrip('/')) or netloc
- name = urllib_parse.unquote(name)
- assert name, ('URL %r produced no filename' % self.url)
- return name
-
- @property
- def scheme(self):
- # type: () -> str
- return urllib_parse.urlsplit(self.url)[0]
-
- @property
- def netloc(self):
- # type: () -> str
- return urllib_parse.urlsplit(self.url)[1]
-
- @property
- def path(self):
- # type: () -> str
- return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
-
- def splitext(self):
- # type: () -> Tuple[str, str]
- return splitext(posixpath.basename(self.path.rstrip('/')))
-
- @property
- def ext(self):
- # type: () -> str
- return self.splitext()[1]
-
- @property
- def url_without_fragment(self):
- # type: () -> str
- scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
- return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
-
- _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
-
- @property
- def egg_fragment(self):
- # type: () -> Optional[str]
- match = self._egg_fragment_re.search(self.url)
- if not match:
- return None
- return match.group(1)
-
- _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
-
- @property
- def subdirectory_fragment(self):
- # type: () -> Optional[str]
- match = self._subdirectory_fragment_re.search(self.url)
- if not match:
- return None
- return match.group(1)
-
- _hash_re = re.compile(
- r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
- )
-
- @property
- def hash(self):
- # type: () -> Optional[str]
- match = self._hash_re.search(self.url)
- if match:
- return match.group(2)
- return None
-
- @property
- def hash_name(self):
- # type: () -> Optional[str]
- match = self._hash_re.search(self.url)
- if match:
- return match.group(1)
- return None
-
- @property
- def show_url(self):
- # type: () -> Optional[str]
- return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
-
- @property
- def is_wheel(self):
- # type: () -> bool
- return self.ext == WHEEL_EXTENSION
-
- @property
- def is_artifact(self):
- # type: () -> bool
- """
- Determines if this points to an actual artifact (e.g. a tarball) or if
- it points to an "abstract" thing like a path or a VCS location.
- """
- from pip._internal.vcs import vcs
-
- if self.scheme in vcs.all_schemes:
- return False
-
- return True
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__init__.py
+++ /dev/null
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 07a1eb7..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/check.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/check.cpython-37.pyc
deleted file mode 100644
index a85fd1b..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/check.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc
deleted file mode 100644
index 7d8bed4..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/freeze.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc
deleted file mode 100644
index 952f933..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__pycache__/prepare.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/check.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/check.py
deleted file mode 100644
index 0b56eda..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/check.py
+++ /dev/null
@@ -1,155 +0,0 @@
-"""Validation of dependencies of packages
-"""
-
-import logging
-from collections import namedtuple
-
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.pkg_resources import RequirementParseError
-
-from pip._internal.operations.prepare import make_abstract_dist
-from pip._internal.utils.misc import get_installed_distributions
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-logger = logging.getLogger(__name__)
-
-if MYPY_CHECK_RUNNING:
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
- from typing import ( # noqa: F401
- Any, Callable, Dict, Optional, Set, Tuple, List
- )
-
- # Shorthands
- PackageSet = Dict[str, 'PackageDetails']
- Missing = Tuple[str, Any]
- Conflicting = Tuple[str, str, Any]
-
- MissingDict = Dict[str, List[Missing]]
- ConflictingDict = Dict[str, List[Conflicting]]
- CheckResult = Tuple[MissingDict, ConflictingDict]
-
-PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
-
-
-def create_package_set_from_installed(**kwargs):
- # type: (**Any) -> Tuple[PackageSet, bool]
- """Converts a list of distributions into a PackageSet.
- """
- # Default to using all packages installed on the system
- if kwargs == {}:
- kwargs = {"local_only": False, "skip": ()}
-
- package_set = {}
- problems = False
- for dist in get_installed_distributions(**kwargs):
- name = canonicalize_name(dist.project_name)
- try:
- package_set[name] = PackageDetails(dist.version, dist.requires())
- except RequirementParseError as e:
- # Don't crash on broken metadata
- logging.warning("Error parsing requirements for %s: %s", name, e)
- problems = True
- return package_set, problems
-
-
-def check_package_set(package_set, should_ignore=None):
- # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
- """Check if a package set is consistent
-
- If should_ignore is passed, it should be a callable that takes a
- package name and returns a boolean.
- """
- if should_ignore is None:
- def should_ignore(name):
- return False
-
- missing = dict()
- conflicting = dict()
-
- for package_name in package_set:
- # Info about dependencies of package_name
- missing_deps = set() # type: Set[Missing]
- conflicting_deps = set() # type: Set[Conflicting]
-
- if should_ignore(package_name):
- continue
-
- for req in package_set[package_name].requires:
- name = canonicalize_name(req.project_name) # type: str
-
- # Check if it's missing
- if name not in package_set:
- missed = True
- if req.marker is not None:
- missed = req.marker.evaluate()
- if missed:
- missing_deps.add((name, req))
- continue
-
- # Check if there's a conflict
- version = package_set[name].version # type: str
- if not req.specifier.contains(version, prereleases=True):
- conflicting_deps.add((name, version, req))
-
- if missing_deps:
- missing[package_name] = sorted(missing_deps, key=str)
- if conflicting_deps:
- conflicting[package_name] = sorted(conflicting_deps, key=str)
-
- return missing, conflicting
-
-
-def check_install_conflicts(to_install):
- # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
- """For checking if the dependency graph would be consistent after \
- installing given requirements
- """
- # Start from the current state
- package_set, _ = create_package_set_from_installed()
- # Install packages
- would_be_installed = _simulate_installation_of(to_install, package_set)
-
- # Only warn about directly-dependent packages; create a whitelist of them
- whitelist = _create_whitelist(would_be_installed, package_set)
-
- return (
- package_set,
- check_package_set(
- package_set, should_ignore=lambda name: name not in whitelist
- )
- )
-
-
-def _simulate_installation_of(to_install, package_set):
- # type: (List[InstallRequirement], PackageSet) -> Set[str]
- """Computes the version of packages after installing to_install.
- """
-
- # Keep track of packages that were installed
- installed = set()
-
- # Modify it as installing requirement_set would (assuming no errors)
- for inst_req in to_install:
- dist = make_abstract_dist(inst_req).dist()
- name = canonicalize_name(dist.key)
- package_set[name] = PackageDetails(dist.version, dist.requires())
-
- installed.add(name)
-
- return installed
-
-
-def _create_whitelist(would_be_installed, package_set):
- # type: (Set[str], PackageSet) -> Set[str]
- packages_affected = set(would_be_installed)
-
- for package_name in package_set:
- if package_name in packages_affected:
- continue
-
- for req in package_set[package_name].requires:
- if canonicalize_name(req.name) in packages_affected:
- packages_affected.add(package_name)
- break
-
- return packages_affected
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/freeze.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/freeze.py
deleted file mode 100644
index 388bb73..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/freeze.py
+++ /dev/null
@@ -1,247 +0,0 @@
-from __future__ import absolute_import
-
-import collections
-import logging
-import os
-import re
-
-from pip._vendor import six
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.pkg_resources import RequirementParseError
-
-from pip._internal.exceptions import BadCommand, InstallationError
-from pip._internal.req.constructors import (
- install_req_from_editable, install_req_from_line,
-)
-from pip._internal.req.req_file import COMMENT_RE
-from pip._internal.utils.misc import (
- dist_is_editable, get_installed_distributions,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
- )
- from pip._internal.cache import WheelCache # noqa: F401
- from pip._vendor.pkg_resources import ( # noqa: F401
- Distribution, Requirement
- )
-
- RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
-
-
-logger = logging.getLogger(__name__)
-
-
-def freeze(
- requirement=None, # type: Optional[List[str]]
- find_links=None, # type: Optional[List[str]]
- local_only=None, # type: Optional[bool]
- user_only=None, # type: Optional[bool]
- skip_regex=None, # type: Optional[str]
- isolated=False, # type: bool
- wheel_cache=None, # type: Optional[WheelCache]
- exclude_editable=False, # type: bool
- skip=() # type: Container[str]
-):
- # type: (...) -> Iterator[str]
- find_links = find_links or []
- skip_match = None
-
- if skip_regex:
- skip_match = re.compile(skip_regex).search
-
- for link in find_links:
- yield '-f %s' % link
- installations = {} # type: Dict[str, FrozenRequirement]
- for dist in get_installed_distributions(local_only=local_only,
- skip=(),
- user_only=user_only):
- try:
- req = FrozenRequirement.from_dist(dist)
- except RequirementParseError:
- logger.warning(
- "Could not parse requirement: %s",
- dist.project_name
- )
- continue
- if exclude_editable and req.editable:
- continue
- installations[req.name] = req
-
- if requirement:
- # the options that don't get turned into an InstallRequirement
- # should only be emitted once, even if the same option is in multiple
- # requirements files, so we need to keep track of what has been emitted
- # so that we don't emit it again if it's seen again
- emitted_options = set() # type: Set[str]
- # keep track of which files a requirement is in so that we can
- # give an accurate warning if a requirement appears multiple times.
- req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
- for req_file_path in requirement:
- with open(req_file_path) as req_file:
- for line in req_file:
- if (not line.strip() or
- line.strip().startswith('#') or
- (skip_match and skip_match(line)) or
- line.startswith((
- '-r', '--requirement',
- '-Z', '--always-unzip',
- '-f', '--find-links',
- '-i', '--index-url',
- '--pre',
- '--trusted-host',
- '--process-dependency-links',
- '--extra-index-url'))):
- line = line.rstrip()
- if line not in emitted_options:
- emitted_options.add(line)
- yield line
- continue
-
- if line.startswith('-e') or line.startswith('--editable'):
- if line.startswith('-e'):
- line = line[2:].strip()
- else:
- line = line[len('--editable'):].strip().lstrip('=')
- line_req = install_req_from_editable(
- line,
- isolated=isolated,
- wheel_cache=wheel_cache,
- )
- else:
- line_req = install_req_from_line(
- COMMENT_RE.sub('', line).strip(),
- isolated=isolated,
- wheel_cache=wheel_cache,
- )
-
- if not line_req.name:
- logger.info(
- "Skipping line in requirement file [%s] because "
- "it's not clear what it would install: %s",
- req_file_path, line.strip(),
- )
- logger.info(
- " (add #egg=PackageName to the URL to avoid"
- " this warning)"
- )
- elif line_req.name not in installations:
- # either it's not installed, or it is installed
- # but has been processed already
- if not req_files[line_req.name]:
- logger.warning(
- "Requirement file [%s] contains %s, but "
- "package %r is not installed",
- req_file_path,
- COMMENT_RE.sub('', line).strip(), line_req.name
- )
- else:
- req_files[line_req.name].append(req_file_path)
- else:
- yield str(installations[line_req.name]).rstrip()
- del installations[line_req.name]
- req_files[line_req.name].append(req_file_path)
-
- # Warn about requirements that were included multiple times (in a
- # single requirements file or in different requirements files).
- for name, files in six.iteritems(req_files):
- if len(files) > 1:
- logger.warning("Requirement %s included multiple times [%s]",
- name, ', '.join(sorted(set(files))))
-
- yield(
- '## The following requirements were added by '
- 'pip freeze:'
- )
- for installation in sorted(
- installations.values(), key=lambda x: x.name.lower()):
- if canonicalize_name(installation.name) not in skip:
- yield str(installation).rstrip()
-
-
-def get_requirement_info(dist):
- # type: (Distribution) -> RequirementInfo
- """
- Compute and return values (req, editable, comments) for use in
- FrozenRequirement.from_dist().
- """
- if not dist_is_editable(dist):
- return (None, False, [])
-
- location = os.path.normcase(os.path.abspath(dist.location))
-
- from pip._internal.vcs import vcs, RemoteNotFoundError
- vc_type = vcs.get_backend_type(location)
-
- if not vc_type:
- req = dist.as_requirement()
- logger.debug(
- 'No VCS found for editable requirement {!r} in: {!r}', req,
- location,
- )
- comments = [
- '# Editable install with no version control ({})'.format(req)
- ]
- return (location, True, comments)
-
- try:
- req = vc_type.get_src_requirement(location, dist.project_name)
- except RemoteNotFoundError:
- req = dist.as_requirement()
- comments = [
- '# Editable {} install with no remote ({})'.format(
- vc_type.__name__, req,
- )
- ]
- return (location, True, comments)
-
- except BadCommand:
- logger.warning(
- 'cannot determine version of editable source in %s '
- '(%s command not found in path)',
- location,
- vc_type.name,
- )
- return (None, True, [])
-
- except InstallationError as exc:
- logger.warning(
- "Error when trying to get requirement for VCS system %s, "
- "falling back to uneditable format", exc
- )
- else:
- if req is not None:
- return (req, True, [])
-
- logger.warning(
- 'Could not determine repository location of %s', location
- )
- comments = ['## !! Could not determine repository location']
-
- return (None, False, comments)
-
-
-class FrozenRequirement(object):
- def __init__(self, name, req, editable, comments=()):
- # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
- self.name = name
- self.req = req
- self.editable = editable
- self.comments = comments
-
- @classmethod
- def from_dist(cls, dist):
- # type: (Distribution) -> FrozenRequirement
- req, editable, comments = get_requirement_info(dist)
- if req is None:
- req = dist.as_requirement()
-
- return cls(dist.project_name, req, editable, comments=comments)
-
- def __str__(self):
- req = self.req
- if self.editable:
- req = '-e %s' % req
- return '\n'.join(list(self.comments) + [str(req)]) + '\n'
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/prepare.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/prepare.py
deleted file mode 100644
index 4f31dd5..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/prepare.py
+++ /dev/null
@@ -1,413 +0,0 @@
-"""Prepares a distribution for installation
-"""
-
-import logging
-import os
-
-from pip._vendor import pkg_resources, requests
-
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.download import (
- is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
-)
-from pip._internal.exceptions import (
- DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
- PreviousBuildDirError, VcsHashUnsupported,
-)
-from pip._internal.utils.compat import expanduser
-from pip._internal.utils.hashes import MissingHashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import display_path, normalize_path
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.vcs import vcs
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Optional # noqa: F401
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
- from pip._internal.index import PackageFinder # noqa: F401
- from pip._internal.download import PipSession # noqa: F401
- from pip._internal.req.req_tracker import RequirementTracker # noqa: F401
-
-logger = logging.getLogger(__name__)
-
-
-def make_abstract_dist(req):
- # type: (InstallRequirement) -> DistAbstraction
- """Factory to make an abstract dist object.
-
- Preconditions: Either an editable req with a source_dir, or satisfied_by or
- a wheel link, or a non-editable req with a source_dir.
-
- :return: A concrete DistAbstraction.
- """
- if req.editable:
- return IsSDist(req)
- elif req.link and req.link.is_wheel:
- return IsWheel(req)
- else:
- return IsSDist(req)
-
-
-class DistAbstraction(object):
- """Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
-
- The requirements for anything installable are as follows:
- - we must be able to determine the requirement name
- (or we can't correctly handle the non-upgrade case).
- - we must be able to generate a list of run-time dependencies
- without installing any additional packages (or we would
- have to either burn time by doing temporary isolated installs
- or alternatively violate pips 'don't start installing unless
- all requirements are available' rule - neither of which are
- desirable).
- - for packages with setup requirements, we must also be able
- to determine their requirements without installing additional
- packages (for the same reason as run-time dependencies)
- - we must be able to create a Distribution object exposing the
- above metadata.
- """
-
- def __init__(self, req):
- # type: (InstallRequirement) -> None
- self.req = req # type: InstallRequirement
-
- def dist(self):
- # type: () -> Any
- """Return a setuptools Dist object."""
- raise NotImplementedError
-
- def prep_for_dist(self, finder, build_isolation):
- # type: (PackageFinder, bool) -> Any
- """Ensure that we can get a Dist for this requirement."""
- raise NotImplementedError
-
-
-class IsWheel(DistAbstraction):
-
- def dist(self):
- # type: () -> pkg_resources.Distribution
- return list(pkg_resources.find_distributions(
- self.req.source_dir))[0]
-
- def prep_for_dist(self, finder, build_isolation):
- # type: (PackageFinder, bool) -> Any
- # FIXME:https://github.com/pypa/pip/issues/1112
- pass
-
-
-class IsSDist(DistAbstraction):
-
- def dist(self):
- return self.req.get_dist()
-
- def prep_for_dist(self, finder, build_isolation):
- # type: (PackageFinder, bool) -> None
- # Prepare for building. We need to:
- # 1. Load pyproject.toml (if it exists)
- # 2. Set up the build environment
-
- self.req.load_pyproject_toml()
- should_isolate = self.req.use_pep517 and build_isolation
-
- def _raise_conflicts(conflicting_with, conflicting_reqs):
- raise InstallationError(
- "Some build dependencies for %s conflict with %s: %s." % (
- self.req, conflicting_with, ', '.join(
- '%s is incompatible with %s' % (installed, wanted)
- for installed, wanted in sorted(conflicting))))
-
- if should_isolate:
- # Isolate in a BuildEnvironment and install the build-time
- # requirements.
- self.req.build_env = BuildEnvironment()
- self.req.build_env.install_requirements(
- finder, self.req.pyproject_requires, 'overlay',
- "Installing build dependencies"
- )
- conflicting, missing = self.req.build_env.check_requirements(
- self.req.requirements_to_check
- )
- if conflicting:
- _raise_conflicts("PEP 517/518 supported requirements",
- conflicting)
- if missing:
- logger.warning(
- "Missing build requirements in pyproject.toml for %s.",
- self.req,
- )
- logger.warning(
- "The project does not specify a build backend, and "
- "pip cannot fall back to setuptools without %s.",
- " and ".join(map(repr, sorted(missing)))
- )
- # Install any extra build dependencies that the backend requests.
- # This must be done in a second pass, as the pyproject.toml
- # dependencies must be installed before we can call the backend.
- with self.req.build_env:
- # We need to have the env active when calling the hook.
- self.req.spin_message = "Getting requirements to build wheel"
- reqs = self.req.pep517_backend.get_requires_for_build_wheel()
- conflicting, missing = self.req.build_env.check_requirements(reqs)
- if conflicting:
- _raise_conflicts("the backend dependencies", conflicting)
- self.req.build_env.install_requirements(
- finder, missing, 'normal',
- "Installing backend dependencies"
- )
-
- self.req.prepare_metadata()
- self.req.assert_source_matches_version()
-
-
-class Installed(DistAbstraction):
-
- def dist(self):
- # type: () -> pkg_resources.Distribution
- return self.req.satisfied_by
-
- def prep_for_dist(self, finder, build_isolation):
- # type: (PackageFinder, bool) -> Any
- pass
-
-
-class RequirementPreparer(object):
- """Prepares a Requirement
- """
-
- def __init__(
- self,
- build_dir, # type: str
- download_dir, # type: Optional[str]
- src_dir, # type: str
- wheel_download_dir, # type: Optional[str]
- progress_bar, # type: str
- build_isolation, # type: bool
- req_tracker # type: RequirementTracker
- ):
- # type: (...) -> None
- super(RequirementPreparer, self).__init__()
-
- self.src_dir = src_dir
- self.build_dir = build_dir
- self.req_tracker = req_tracker
-
- # Where still packed archives should be written to. If None, they are
- # not saved, and are deleted immediately after unpacking.
- self.download_dir = download_dir
-
- # Where still-packed .whl files should be written to. If None, they are
- # written to the download_dir parameter. Separate to download_dir to
- # permit only keeping wheel archives for pip wheel.
- if wheel_download_dir:
- wheel_download_dir = normalize_path(wheel_download_dir)
- self.wheel_download_dir = wheel_download_dir
-
- # NOTE
- # download_dir and wheel_download_dir overlap semantically and may
- # be combined if we're willing to have non-wheel archives present in
- # the wheelhouse output by 'pip wheel'.
-
- self.progress_bar = progress_bar
-
- # Is build isolation allowed?
- self.build_isolation = build_isolation
-
- @property
- def _download_should_save(self):
- # type: () -> bool
- # TODO: Modify to reduce indentation needed
- if self.download_dir:
- self.download_dir = expanduser(self.download_dir)
- if os.path.exists(self.download_dir):
- return True
- else:
- logger.critical('Could not find download directory')
- raise InstallationError(
- "Could not find or access download directory '%s'"
- % display_path(self.download_dir))
- return False
-
- def prepare_linked_requirement(
- self,
- req, # type: InstallRequirement
- session, # type: PipSession
- finder, # type: PackageFinder
- upgrade_allowed, # type: bool
- require_hashes # type: bool
- ):
- # type: (...) -> DistAbstraction
- """Prepare a requirement that would be obtained from req.link
- """
- # TODO: Breakup into smaller functions
- if req.link and req.link.scheme == 'file':
- path = url_to_path(req.link.url)
- logger.info('Processing %s', display_path(path))
- else:
- logger.info('Collecting %s', req)
-
- with indent_log():
- # @@ if filesystem packages are not marked
- # editable in a req, a non deterministic error
- # occurs when the script attempts to unpack the
- # build directory
- req.ensure_has_source_dir(self.build_dir)
- # If a checkout exists, it's unwise to keep going. version
- # inconsistencies are logged later, but do not fail the
- # installation.
- # FIXME: this won't upgrade when there's an existing
- # package unpacked in `req.source_dir`
- # package unpacked in `req.source_dir`
- if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
- raise PreviousBuildDirError(
- "pip can't proceed with requirements '%s' due to a"
- " pre-existing build directory (%s). This is "
- "likely due to a previous installation that failed"
- ". pip is being responsible and not assuming it "
- "can delete this. Please delete it and try again."
- % (req, req.source_dir)
- )
- req.populate_link(finder, upgrade_allowed, require_hashes)
-
- # We can't hit this spot and have populate_link return None.
- # req.satisfied_by is None here (because we're
- # guarded) and upgrade has no impact except when satisfied_by
- # is not None.
- # Then inside find_requirement existing_applicable -> False
- # If no new versions are found, DistributionNotFound is raised,
- # otherwise a result is guaranteed.
- assert req.link
- link = req.link
-
- # Now that we have the real link, we can tell what kind of
- # requirements we have and raise some more informative errors
- # than otherwise. (For example, we can raise VcsHashUnsupported
- # for a VCS URL rather than HashMissing.)
- if require_hashes:
- # We could check these first 2 conditions inside
- # unpack_url and save repetition of conditions, but then
- # we would report less-useful error messages for
- # unhashable requirements, complaining that there's no
- # hash provided.
- if is_vcs_url(link):
- raise VcsHashUnsupported()
- elif is_file_url(link) and is_dir_url(link):
- raise DirectoryUrlHashUnsupported()
- if not req.original_link and not req.is_pinned:
- # Unpinned packages are asking for trouble when a new
- # version is uploaded. This isn't a security check, but
- # it saves users a surprising hash mismatch in the
- # future.
- #
- # file:/// URLs aren't pinnable, so don't complain
- # about them not being pinned.
- raise HashUnpinned()
-
- hashes = req.hashes(trust_internet=not require_hashes)
- if require_hashes and not hashes:
- # Known-good hashes are missing for this requirement, so
- # shim it with a facade object that will provoke hash
- # computation and then raise a HashMissing exception
- # showing the user what the hash should be.
- hashes = MissingHashes()
-
- try:
- download_dir = self.download_dir
- # We always delete unpacked sdists after pip ran.
- autodelete_unpacked = True
- if req.link.is_wheel and self.wheel_download_dir:
- # when doing 'pip wheel` we download wheels to a
- # dedicated dir.
- download_dir = self.wheel_download_dir
- if req.link.is_wheel:
- if download_dir:
- # When downloading, we only unpack wheels to get
- # metadata.
- autodelete_unpacked = True
- else:
- # When installing a wheel, we use the unpacked
- # wheel.
- autodelete_unpacked = False
- unpack_url(
- req.link, req.source_dir,
- download_dir, autodelete_unpacked,
- session=session, hashes=hashes,
- progress_bar=self.progress_bar
- )
- except requests.HTTPError as exc:
- logger.critical(
- 'Could not install requirement %s because of error %s',
- req,
- exc,
- )
- raise InstallationError(
- 'Could not install requirement %s because of HTTP '
- 'error %s for URL %s' %
- (req, exc, req.link)
- )
- abstract_dist = make_abstract_dist(req)
- with self.req_tracker.track(req):
- abstract_dist.prep_for_dist(finder, self.build_isolation)
- if self._download_should_save:
- # Make a .zip of the source_dir we already created.
- if req.link.scheme in vcs.all_schemes:
- req.archive(self.download_dir)
- return abstract_dist
-
- def prepare_editable_requirement(
- self,
- req, # type: InstallRequirement
- require_hashes, # type: bool
- use_user_site, # type: bool
- finder # type: PackageFinder
- ):
- # type: (...) -> DistAbstraction
- """Prepare an editable requirement
- """
- assert req.editable, "cannot prepare a non-editable req as editable"
-
- logger.info('Obtaining %s', req)
-
- with indent_log():
- if require_hashes:
- raise InstallationError(
- 'The editable requirement %s cannot be installed when '
- 'requiring hashes, because there is no single file to '
- 'hash.' % req
- )
- req.ensure_has_source_dir(self.src_dir)
- req.update_editable(not self._download_should_save)
-
- abstract_dist = make_abstract_dist(req)
- with self.req_tracker.track(req):
- abstract_dist.prep_for_dist(finder, self.build_isolation)
-
- if self._download_should_save:
- req.archive(self.download_dir)
- req.check_if_exists(use_user_site)
-
- return abstract_dist
-
- def prepare_installed_requirement(self, req, require_hashes, skip_reason):
- # type: (InstallRequirement, bool, Optional[str]) -> DistAbstraction
- """Prepare an already-installed requirement
- """
- assert req.satisfied_by, "req should have been satisfied but isn't"
- assert skip_reason is not None, (
- "did not get skip reason skipped but req.satisfied_by "
- "is set to %r" % (req.satisfied_by,)
- )
- logger.info(
- 'Requirement %s: %s (%s)',
- skip_reason, req, req.satisfied_by.version
- )
- with indent_log():
- if require_hashes:
- logger.debug(
- 'Since it is already installed, we are trusting this '
- 'package without checking its hash. To ensure a '
- 'completely repeatable environment, install into an '
- 'empty virtualenv.'
- )
- abstract_dist = Installed(req)
-
- return abstract_dist
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pep425tags.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pep425tags.py
deleted file mode 100644
index 1e782d1..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pep425tags.py
+++ /dev/null
@@ -1,381 +0,0 @@
-"""Generate and work with PEP 425 Compatibility Tags."""
-from __future__ import absolute_import
-
-import distutils.util
-import logging
-import platform
-import re
-import sys
-import sysconfig
-import warnings
-from collections import OrderedDict
-
-import pip._internal.utils.glibc
-from pip._internal.utils.compat import get_extension_suffixes
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Tuple, Callable, List, Optional, Union, Dict
- )
-
- Pep425Tag = Tuple[str, str, str]
-
-logger = logging.getLogger(__name__)
-
-_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
-
-
-def get_config_var(var):
- # type: (str) -> Optional[str]
- try:
- return sysconfig.get_config_var(var)
- except IOError as e: # Issue #1074
- warnings.warn("{}".format(e), RuntimeWarning)
- return None
-
-
-def get_abbr_impl():
- # type: () -> str
- """Return abbreviated implementation name."""
- if hasattr(sys, 'pypy_version_info'):
- pyimpl = 'pp'
- elif sys.platform.startswith('java'):
- pyimpl = 'jy'
- elif sys.platform == 'cli':
- pyimpl = 'ip'
- else:
- pyimpl = 'cp'
- return pyimpl
-
-
-def get_impl_ver():
- # type: () -> str
- """Return implementation version."""
- impl_ver = get_config_var("py_version_nodot")
- if not impl_ver or get_abbr_impl() == 'pp':
- impl_ver = ''.join(map(str, get_impl_version_info()))
- return impl_ver
-
-
-def get_impl_version_info():
- # type: () -> Tuple[int, ...]
- """Return sys.version_info-like tuple for use in decrementing the minor
- version."""
- if get_abbr_impl() == 'pp':
- # as per https://github.com/pypa/pip/issues/2882
- # attrs exist only on pypy
- return (sys.version_info[0],
- sys.pypy_version_info.major, # type: ignore
- sys.pypy_version_info.minor) # type: ignore
- else:
- return sys.version_info[0], sys.version_info[1]
-
-
-def get_impl_tag():
- # type: () -> str
- """
- Returns the Tag for this specific implementation.
- """
- return "{}{}".format(get_abbr_impl(), get_impl_ver())
-
-
-def get_flag(var, fallback, expected=True, warn=True):
- # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool
- """Use a fallback method for determining SOABI flags if the needed config
- var is unset or unavailable."""
- val = get_config_var(var)
- if val is None:
- if warn:
- logger.debug("Config variable '%s' is unset, Python ABI tag may "
- "be incorrect", var)
- return fallback()
- return val == expected
-
-
-def get_abi_tag():
- # type: () -> Optional[str]
- """Return the ABI tag based on SOABI (if available) or emulate SOABI
- (CPython 2, PyPy)."""
- soabi = get_config_var('SOABI')
- impl = get_abbr_impl()
- if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
- d = ''
- m = ''
- u = ''
- if get_flag('Py_DEBUG',
- lambda: hasattr(sys, 'gettotalrefcount'),
- warn=(impl == 'cp')):
- d = 'd'
- if get_flag('WITH_PYMALLOC',
- lambda: impl == 'cp',
- warn=(impl == 'cp')):
- m = 'm'
- if get_flag('Py_UNICODE_SIZE',
- lambda: sys.maxunicode == 0x10ffff,
- expected=4,
- warn=(impl == 'cp' and
- sys.version_info < (3, 3))) \
- and sys.version_info < (3, 3):
- u = 'u'
- abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
- elif soabi and soabi.startswith('cpython-'):
- abi = 'cp' + soabi.split('-')[1]
- elif soabi:
- abi = soabi.replace('.', '_').replace('-', '_')
- else:
- abi = None
- return abi
-
-
-def _is_running_32bit():
- # type: () -> bool
- return sys.maxsize == 2147483647
-
-
-def get_platform():
- # type: () -> str
- """Return our platform name 'win32', 'linux_x86_64'"""
- if sys.platform == 'darwin':
- # distutils.util.get_platform() returns the release based on the value
- # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
- # be significantly older than the user's current machine.
- release, _, machine = platform.mac_ver()
- split_ver = release.split('.')
-
- if machine == "x86_64" and _is_running_32bit():
- machine = "i386"
- elif machine == "ppc64" and _is_running_32bit():
- machine = "ppc"
-
- return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
-
- # XXX remove distutils dependency
- result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
- if result == "linux_x86_64" and _is_running_32bit():
- # 32 bit Python program (running on a 64 bit Linux): pip should only
- # install and run 32 bit compiled extensions in that case.
- result = "linux_i686"
-
- return result
-
-
-def is_manylinux1_compatible():
- # type: () -> bool
- # Only Linux, and only x86-64 / i686
- if get_platform() not in {"linux_x86_64", "linux_i686"}:
- return False
-
- # Check for presence of _manylinux module
- try:
- import _manylinux
- return bool(_manylinux.manylinux1_compatible)
- except (ImportError, AttributeError):
- # Fall through to heuristic check below
- pass
-
- # Check glibc version. CentOS 5 uses glibc 2.5.
- return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
-
-
-def is_manylinux2010_compatible():
- # type: () -> bool
- # Only Linux, and only x86-64 / i686
- if get_platform() not in {"linux_x86_64", "linux_i686"}:
- return False
-
- # Check for presence of _manylinux module
- try:
- import _manylinux
- return bool(_manylinux.manylinux2010_compatible)
- except (ImportError, AttributeError):
- # Fall through to heuristic check below
- pass
-
- # Check glibc version. CentOS 6 uses glibc 2.12.
- return pip._internal.utils.glibc.have_compatible_glibc(2, 12)
-
-
-def get_darwin_arches(major, minor, machine):
- # type: (int, int, str) -> List[str]
- """Return a list of supported arches (including group arches) for
- the given major, minor and machine architecture of an macOS machine.
- """
- arches = []
-
- def _supports_arch(major, minor, arch):
- # type: (int, int, str) -> bool
- # Looking at the application support for macOS versions in the chart
- # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
- # our timeline looks roughly like:
- #
- # 10.0 - Introduces ppc support.
- # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
- # and x86_64 support is CLI only, and cannot be used for GUI
- # applications.
- # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
- # 10.6 - Drops support for ppc64
- # 10.7 - Drops support for ppc
- #
- # Given that we do not know if we're installing a CLI or a GUI
- # application, we must be conservative and assume it might be a GUI
- # application and behave as if ppc64 and x86_64 support did not occur
- # until 10.5.
- #
- # Note: The above information is taken from the "Application support"
- # column in the chart not the "Processor support" since I believe
- # that we care about what instruction sets an application can use
- # not which processors the OS supports.
- if arch == 'ppc':
- return (major, minor) <= (10, 5)
- if arch == 'ppc64':
- return (major, minor) == (10, 5)
- if arch == 'i386':
- return (major, minor) >= (10, 4)
- if arch == 'x86_64':
- return (major, minor) >= (10, 5)
- if arch in groups:
- for garch in groups[arch]:
- if _supports_arch(major, minor, garch):
- return True
- return False
-
- groups = OrderedDict([
- ("fat", ("i386", "ppc")),
- ("intel", ("x86_64", "i386")),
- ("fat64", ("x86_64", "ppc64")),
- ("fat32", ("x86_64", "i386", "ppc")),
- ]) # type: Dict[str, Tuple[str, ...]]
-
- if _supports_arch(major, minor, machine):
- arches.append(machine)
-
- for garch in groups:
- if machine in groups[garch] and _supports_arch(major, minor, garch):
- arches.append(garch)
-
- arches.append('universal')
-
- return arches
-
-
-def get_all_minor_versions_as_strings(version_info):
- # type: (Tuple[int, ...]) -> List[str]
- versions = []
- major = version_info[:-1]
- # Support all previous minor Python versions.
- for minor in range(version_info[-1], -1, -1):
- versions.append(''.join(map(str, major + (minor,))))
- return versions
-
-
-def get_supported(
- versions=None, # type: Optional[List[str]]
- noarch=False, # type: bool
- platform=None, # type: Optional[str]
- impl=None, # type: Optional[str]
- abi=None # type: Optional[str]
-):
- # type: (...) -> List[Pep425Tag]
- """Return a list of supported tags for each version specified in
- `versions`.
-
- :param versions: a list of string versions, of the form ["33", "32"],
- or None. The first version will be assumed to support our ABI.
- :param platform: specify the exact platform you want valid
- tags for, or None. If None, use the local system platform.
- :param impl: specify the exact implementation you want valid
- tags for, or None. If None, use the local interpreter impl.
- :param abi: specify the exact abi you want valid
- tags for, or None. If None, use the local interpreter abi.
- """
- supported = []
-
- # Versions must be given with respect to the preference
- if versions is None:
- version_info = get_impl_version_info()
- versions = get_all_minor_versions_as_strings(version_info)
-
- impl = impl or get_abbr_impl()
-
- abis = [] # type: List[str]
-
- abi = abi or get_abi_tag()
- if abi:
- abis[0:0] = [abi]
-
- abi3s = set()
- for suffix in get_extension_suffixes():
- if suffix.startswith('.abi'):
- abi3s.add(suffix.split('.', 2)[1])
-
- abis.extend(sorted(list(abi3s)))
-
- abis.append('none')
-
- if not noarch:
- arch = platform or get_platform()
- arch_prefix, arch_sep, arch_suffix = arch.partition('_')
- if arch.startswith('macosx'):
- # support macosx-10.6-intel on macosx-10.9-x86_64
- match = _osx_arch_pat.match(arch)
- if match:
- name, major, minor, actual_arch = match.groups()
- tpl = '{}_{}_%i_%s'.format(name, major)
- arches = []
- for m in reversed(range(int(minor) + 1)):
- for a in get_darwin_arches(int(major), m, actual_arch):
- arches.append(tpl % (m, a))
- else:
- # arch pattern didn't match (?!)
- arches = [arch]
- elif arch_prefix == 'manylinux2010':
- # manylinux1 wheels run on most manylinux2010 systems with the
- # exception of wheels depending on ncurses. PEP 571 states
- # manylinux1 wheels should be considered manylinux2010 wheels:
- # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
- arches = [arch, 'manylinux1' + arch_sep + arch_suffix]
- elif platform is None:
- arches = []
- if is_manylinux2010_compatible():
- arches.append('manylinux2010' + arch_sep + arch_suffix)
- if is_manylinux1_compatible():
- arches.append('manylinux1' + arch_sep + arch_suffix)
- arches.append(arch)
- else:
- arches = [arch]
-
- # Current version, current API (built specifically for our Python):
- for abi in abis:
- for arch in arches:
- supported.append(('%s%s' % (impl, versions[0]), abi, arch))
-
- # abi3 modules compatible with older version of Python
- for version in versions[1:]:
- # abi3 was introduced in Python 3.2
- if version in {'31', '30'}:
- break
- for abi in abi3s: # empty set if not Python 3
- for arch in arches:
- supported.append(("%s%s" % (impl, version), abi, arch))
-
- # Has binaries, does not use the Python API:
- for arch in arches:
- supported.append(('py%s' % (versions[0][0]), 'none', arch))
-
- # No abi / arch, but requires our implementation:
- supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
- # Tagged specifically as being cross-version compatible
- # (with just the major version specified)
- supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
-
- # No abi / arch, generic Python
- for i, version in enumerate(versions):
- supported.append(('py%s' % (version,), 'none', 'any'))
- if i == 0:
- supported.append(('py%s' % (version[0]), 'none', 'any'))
-
- return supported
-
-
-implementation_tag = get_impl_tag()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pyproject.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pyproject.py
deleted file mode 100644
index 8d739a6..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/pyproject.py
+++ /dev/null
@@ -1,171 +0,0 @@
-from __future__ import absolute_import
-
-import io
-import os
-import sys
-
-from pip._vendor import pytoml, six
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Tuple, Optional, List # noqa: F401
-
-
-def _is_list_of_str(obj):
- # type: (Any) -> bool
- return (
- isinstance(obj, list) and
- all(isinstance(item, six.string_types) for item in obj)
- )
-
-
-def make_pyproject_path(setup_py_dir):
- # type: (str) -> str
- path = os.path.join(setup_py_dir, 'pyproject.toml')
-
- # Python2 __file__ should not be unicode
- if six.PY2 and isinstance(path, six.text_type):
- path = path.encode(sys.getfilesystemencoding())
-
- return path
-
-
-def load_pyproject_toml(
- use_pep517, # type: Optional[bool]
- pyproject_toml, # type: str
- setup_py, # type: str
- req_name # type: str
-):
- # type: (...) -> Optional[Tuple[List[str], str, List[str]]]
- """Load the pyproject.toml file.
-
- Parameters:
- use_pep517 - Has the user requested PEP 517 processing? None
- means the user hasn't explicitly specified.
- pyproject_toml - Location of the project's pyproject.toml file
- setup_py - Location of the project's setup.py file
- req_name - The name of the requirement we're processing (for
- error reporting)
-
- Returns:
- None if we should use the legacy code path, otherwise a tuple
- (
- requirements from pyproject.toml,
- name of PEP 517 backend,
- requirements we should check are installed after setting
- up the build environment
- )
- """
- has_pyproject = os.path.isfile(pyproject_toml)
- has_setup = os.path.isfile(setup_py)
-
- if has_pyproject:
- with io.open(pyproject_toml, encoding="utf-8") as f:
- pp_toml = pytoml.load(f)
- build_system = pp_toml.get("build-system")
- else:
- build_system = None
-
- # The following cases must use PEP 517
- # We check for use_pep517 being non-None and falsey because that means
- # the user explicitly requested --no-use-pep517. The value 0 as
- # opposed to False can occur when the value is provided via an
- # environment variable or config file option (due to the quirk of
- # strtobool() returning an integer in pip's configuration code).
- if has_pyproject and not has_setup:
- if use_pep517 is not None and not use_pep517:
- raise InstallationError(
- "Disabling PEP 517 processing is invalid: "
- "project does not have a setup.py"
- )
- use_pep517 = True
- elif build_system and "build-backend" in build_system:
- if use_pep517 is not None and not use_pep517:
- raise InstallationError(
- "Disabling PEP 517 processing is invalid: "
- "project specifies a build backend of {} "
- "in pyproject.toml".format(
- build_system["build-backend"]
- )
- )
- use_pep517 = True
-
- # If we haven't worked out whether to use PEP 517 yet,
- # and the user hasn't explicitly stated a preference,
- # we do so if the project has a pyproject.toml file.
- elif use_pep517 is None:
- use_pep517 = has_pyproject
-
- # At this point, we know whether we're going to use PEP 517.
- assert use_pep517 is not None
-
- # If we're using the legacy code path, there is nothing further
- # for us to do here.
- if not use_pep517:
- return None
-
- if build_system is None:
- # Either the user has a pyproject.toml with no build-system
- # section, or the user has no pyproject.toml, but has opted in
- # explicitly via --use-pep517.
- # In the absence of any explicit backend specification, we
- # assume the setuptools backend that most closely emulates the
- # traditional direct setup.py execution, and require wheel and
- # a version of setuptools that supports that backend.
-
- build_system = {
- "requires": ["setuptools>=40.8.0", "wheel"],
- "build-backend": "setuptools.build_meta:__legacy__",
- }
-
- # If we're using PEP 517, we have build system information (either
- # from pyproject.toml, or defaulted by the code above).
- # Note that at this point, we do not know if the user has actually
- # specified a backend, though.
- assert build_system is not None
-
- # Ensure that the build-system section in pyproject.toml conforms
- # to PEP 518.
- error_template = (
- "{package} has a pyproject.toml file that does not comply "
- "with PEP 518: {reason}"
- )
-
- # Specifying the build-system table but not the requires key is invalid
- if "requires" not in build_system:
- raise InstallationError(
- error_template.format(package=req_name, reason=(
- "it has a 'build-system' table but not "
- "'build-system.requires' which is mandatory in the table"
- ))
- )
-
- # Error out if requires is not a list of strings
- requires = build_system["requires"]
- if not _is_list_of_str(requires):
- raise InstallationError(error_template.format(
- package=req_name,
- reason="'build-system.requires' is not a list of strings.",
- ))
-
- backend = build_system.get("build-backend")
- check = [] # type: List[str]
- if backend is None:
- # If the user didn't specify a backend, we assume they want to use
- # the setuptools backend. But we can't be sure they have included
- # a version of setuptools which supplies the backend, or wheel
- # (which is needed by the backend) in their requirements. So we
- # make a note to check that those requirements are present once
- # we have set up the environment.
- # This is quite a lot of work to check for a very specific case. But
- # the problem is, that case is potentially quite common - projects that
- # adopted PEP 518 early for the ability to specify requirements to
- # execute setup.py, but never considered needing to mention the build
- # tools themselves. The original PEP 518 code had a similar check (but
- # implemented in a different way).
- backend = "setuptools.build_meta:__legacy__"
- check = ["setuptools>=40.8.0", "wheel"]
-
- return (requires, backend, check)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py
deleted file mode 100644
index 5e4eb92..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-
-from .req_install import InstallRequirement
-from .req_set import RequirementSet
-from .req_file import parse_requirements
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import List, Sequence # noqa: F401
-
-__all__ = [
- "RequirementSet", "InstallRequirement",
- "parse_requirements", "install_given_reqs",
-]
-
-logger = logging.getLogger(__name__)
-
-
-def install_given_reqs(
- to_install, # type: List[InstallRequirement]
- install_options, # type: List[str]
- global_options=(), # type: Sequence[str]
- *args, **kwargs
-):
- # type: (...) -> List[InstallRequirement]
- """
- Install everything in the given list.
-
- (to be called after having downloaded and unpacked the packages)
- """
-
- if to_install:
- logger.info(
- 'Installing collected packages: %s',
- ', '.join([req.name for req in to_install]),
- )
-
- with indent_log():
- for requirement in to_install:
- if requirement.conflicts_with:
- logger.info(
- 'Found existing installation: %s',
- requirement.conflicts_with,
- )
- with indent_log():
- uninstalled_pathset = requirement.uninstall(
- auto_confirm=True
- )
- try:
- requirement.install(
- install_options,
- global_options,
- *args,
- **kwargs
- )
- except Exception:
- should_rollback = (
- requirement.conflicts_with and
- not requirement.install_succeeded
- )
- # if install did not succeed, rollback previous uninstall
- if should_rollback:
- uninstalled_pathset.rollback()
- raise
- else:
- should_commit = (
- requirement.conflicts_with and
- requirement.install_succeeded
- )
- if should_commit:
- uninstalled_pathset.commit()
- requirement.remove_temporary_source()
-
- return to_install
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 3d591d3..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
deleted file mode 100644
index b60d743..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
deleted file mode 100644
index d5eb5fc..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
deleted file mode 100644
index ef28d33..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
deleted file mode 100644
index c6e6d28..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
deleted file mode 100644
index f054d9f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
deleted file mode 100644
index 9aca886..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py
deleted file mode 100644
index 1eed1dd..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py
+++ /dev/null
@@ -1,339 +0,0 @@
-"""Backing implementation for InstallRequirement's various constructors
-
-The idea here is that these formed a major chunk of InstallRequirement's size
-so, moving them and support code dedicated to them outside of that class
-helps creates for better understandability for the rest of the code.
-
-These are meant to be used elsewhere within pip to create instances of
-InstallRequirement.
-"""
-
-import logging
-import os
-import re
-
-from pip._vendor.packaging.markers import Marker
-from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
-from pip._vendor.packaging.specifiers import Specifier
-from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
-
-from pip._internal.download import (
- is_archive_file, is_url, path_to_url, url_to_path,
-)
-from pip._internal.exceptions import InstallationError
-from pip._internal.models.index import PyPI, TestPyPI
-from pip._internal.models.link import Link
-from pip._internal.pyproject import make_pyproject_path
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.misc import is_installable_dir
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.vcs import vcs
-from pip._internal.wheel import Wheel
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Optional, Tuple, Set, Any, Union, Text, Dict,
- )
- from pip._internal.cache import WheelCache # noqa: F401
-
-
-__all__ = [
- "install_req_from_editable", "install_req_from_line",
- "parse_editable"
-]
-
-logger = logging.getLogger(__name__)
-operators = Specifier._operators.keys()
-
-
-def _strip_extras(path):
- # type: (str) -> Tuple[str, Optional[str]]
- m = re.match(r'^(.+)(\[[^\]]+\])$', path)
- extras = None
- if m:
- path_no_extras = m.group(1)
- extras = m.group(2)
- else:
- path_no_extras = path
-
- return path_no_extras, extras
-
-
-def parse_editable(editable_req):
- # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
- """Parses an editable requirement into:
- - a requirement name
- - an URL
- - extras
- - editable options
- Accepted requirements:
- svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
- .[some_extra]
- """
-
- url = editable_req
-
- # If a file path is specified with extras, strip off the extras.
- url_no_extras, extras = _strip_extras(url)
-
- if os.path.isdir(url_no_extras):
- if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
- msg = (
- 'File "setup.py" not found. Directory cannot be installed '
- 'in editable mode: {}'.format(os.path.abspath(url_no_extras))
- )
- pyproject_path = make_pyproject_path(url_no_extras)
- if os.path.isfile(pyproject_path):
- msg += (
- '\n(A "pyproject.toml" file was found, but editable '
- 'mode currently requires a setup.py based build.)'
- )
- raise InstallationError(msg)
-
- # Treating it as code that has already been checked out
- url_no_extras = path_to_url(url_no_extras)
-
- if url_no_extras.lower().startswith('file:'):
- package_name = Link(url_no_extras).egg_fragment
- if extras:
- return (
- package_name,
- url_no_extras,
- Requirement("placeholder" + extras.lower()).extras,
- )
- else:
- return package_name, url_no_extras, None
-
- for version_control in vcs:
- if url.lower().startswith('%s:' % version_control):
- url = '%s+%s' % (version_control, url)
- break
-
- if '+' not in url:
- raise InstallationError(
- '%s should either be a path to a local project or a VCS url '
- 'beginning with svn+, git+, hg+, or bzr+' %
- editable_req
- )
-
- vc_type = url.split('+', 1)[0].lower()
-
- if not vcs.get_backend(vc_type):
- error_message = 'For --editable=%s only ' % editable_req + \
- ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
- ' is currently supported'
- raise InstallationError(error_message)
-
- package_name = Link(url).egg_fragment
- if not package_name:
- raise InstallationError(
- "Could not detect requirement name for '%s', please specify one "
- "with #egg=your_package_name" % editable_req
- )
- return package_name, url, None
-
-
-def deduce_helpful_msg(req):
- # type: (str) -> str
- """Returns helpful msg in case requirements file does not exist,
- or cannot be parsed.
-
- :params req: Requirements file path
- """
- msg = ""
- if os.path.exists(req):
- msg = " It does exist."
- # Try to parse and check if it is a requirements file.
- try:
- with open(req, 'r') as fp:
- # parse first line only
- next(parse_requirements(fp.read()))
- msg += " The argument you provided " + \
- "(%s) appears to be a" % (req) + \
- " requirements file. If that is the" + \
- " case, use the '-r' flag to install" + \
- " the packages specified within it."
- except RequirementParseError:
- logger.debug("Cannot parse '%s' as requirements \
- file" % (req), exc_info=True)
- else:
- msg += " File '%s' does not exist." % (req)
- return msg
-
-
-# ---- The actual constructors follow ----
-
-
-def install_req_from_editable(
- editable_req, # type: str
- comes_from=None, # type: Optional[str]
- use_pep517=None, # type: Optional[bool]
- isolated=False, # type: bool
- options=None, # type: Optional[Dict[str, Any]]
- wheel_cache=None, # type: Optional[WheelCache]
- constraint=False # type: bool
-):
- # type: (...) -> InstallRequirement
- name, url, extras_override = parse_editable(editable_req)
- if url.startswith('file:'):
- source_dir = url_to_path(url)
- else:
- source_dir = None
-
- if name is not None:
- try:
- req = Requirement(name)
- except InvalidRequirement:
- raise InstallationError("Invalid requirement: '%s'" % name)
- else:
- req = None
- return InstallRequirement(
- req, comes_from, source_dir=source_dir,
- editable=True,
- link=Link(url),
- constraint=constraint,
- use_pep517=use_pep517,
- isolated=isolated,
- options=options if options else {},
- wheel_cache=wheel_cache,
- extras=extras_override or (),
- )
-
-
-def install_req_from_line(
- name, # type: str
- comes_from=None, # type: Optional[Union[str, InstallRequirement]]
- use_pep517=None, # type: Optional[bool]
- isolated=False, # type: bool
- options=None, # type: Optional[Dict[str, Any]]
- wheel_cache=None, # type: Optional[WheelCache]
- constraint=False # type: bool
-):
- # type: (...) -> InstallRequirement
- """Creates an InstallRequirement from a name, which might be a
- requirement, directory containing 'setup.py', filename, or URL.
- """
- if is_url(name):
- marker_sep = '; '
- else:
- marker_sep = ';'
- if marker_sep in name:
- name, markers_as_string = name.split(marker_sep, 1)
- markers_as_string = markers_as_string.strip()
- if not markers_as_string:
- markers = None
- else:
- markers = Marker(markers_as_string)
- else:
- markers = None
- name = name.strip()
- req_as_string = None
- path = os.path.normpath(os.path.abspath(name))
- link = None
- extras_as_string = None
-
- if is_url(name):
- link = Link(name)
- else:
- p, extras_as_string = _strip_extras(path)
- looks_like_dir = os.path.isdir(p) and (
- os.path.sep in name or
- (os.path.altsep is not None and os.path.altsep in name) or
- name.startswith('.')
- )
- if looks_like_dir:
- if not is_installable_dir(p):
- raise InstallationError(
- "Directory %r is not installable. Neither 'setup.py' "
- "nor 'pyproject.toml' found." % name
- )
- link = Link(path_to_url(p))
- elif is_archive_file(p):
- if not os.path.isfile(p):
- logger.warning(
- 'Requirement %r looks like a filename, but the '
- 'file does not exist',
- name
- )
- link = Link(path_to_url(p))
-
- # it's a local file, dir, or url
- if link:
- # Handle relative file URLs
- if link.scheme == 'file' and re.search(r'\.\./', link.url):
- link = Link(
- path_to_url(os.path.normpath(os.path.abspath(link.path))))
- # wheel file
- if link.is_wheel:
- wheel = Wheel(link.filename) # can raise InvalidWheelFilename
- req_as_string = "%s==%s" % (wheel.name, wheel.version)
- else:
- # set the req to the egg fragment. when it's not there, this
- # will become an 'unnamed' requirement
- req_as_string = link.egg_fragment
-
- # a requirement specifier
- else:
- req_as_string = name
-
- if extras_as_string:
- extras = Requirement("placeholder" + extras_as_string.lower()).extras
- else:
- extras = ()
- if req_as_string is not None:
- try:
- req = Requirement(req_as_string)
- except InvalidRequirement:
- if os.path.sep in req_as_string:
- add_msg = "It looks like a path."
- add_msg += deduce_helpful_msg(req_as_string)
- elif ('=' in req_as_string and
- not any(op in req_as_string for op in operators)):
- add_msg = "= is not a valid operator. Did you mean == ?"
- else:
- add_msg = ""
- raise InstallationError(
- "Invalid requirement: '%s'\n%s" % (req_as_string, add_msg)
- )
- else:
- req = None
-
- return InstallRequirement(
- req, comes_from, link=link, markers=markers,
- use_pep517=use_pep517, isolated=isolated,
- options=options if options else {},
- wheel_cache=wheel_cache,
- constraint=constraint,
- extras=extras,
- )
-
-
-def install_req_from_req_string(
- req_string, # type: str
- comes_from=None, # type: Optional[InstallRequirement]
- isolated=False, # type: bool
- wheel_cache=None, # type: Optional[WheelCache]
- use_pep517=None # type: Optional[bool]
-):
- # type: (...) -> InstallRequirement
- try:
- req = Requirement(req_string)
- except InvalidRequirement:
- raise InstallationError("Invalid requirement: '%s'" % req)
-
- domains_not_allowed = [
- PyPI.file_storage_domain,
- TestPyPI.file_storage_domain,
- ]
- if req.url and comes_from.link.netloc in domains_not_allowed:
- # Explicitly disallow pypi packages that depend on external urls
- raise InstallationError(
- "Packages installed from PyPI cannot depend on packages "
- "which are not also hosted on PyPI.\n"
- "%s depends on %s " % (comes_from.name, req)
- )
-
- return InstallRequirement(
- req, comes_from, isolated=isolated, wheel_cache=wheel_cache,
- use_pep517=use_pep517
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py
deleted file mode 100644
index 726f2f6..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py
+++ /dev/null
@@ -1,382 +0,0 @@
-"""
-Requirements file parsing
-"""
-
-from __future__ import absolute_import
-
-import optparse
-import os
-import re
-import shlex
-import sys
-
-from pip._vendor.six.moves import filterfalse
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.cli import cmdoptions
-from pip._internal.download import get_file_content
-from pip._internal.exceptions import RequirementsFileParseError
-from pip._internal.req.constructors import (
- install_req_from_editable, install_req_from_line,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Iterator, Tuple, Optional, List, Callable, Text
- )
- from pip._internal.req import InstallRequirement # noqa: F401
- from pip._internal.cache import WheelCache # noqa: F401
- from pip._internal.index import PackageFinder # noqa: F401
- from pip._internal.download import PipSession # noqa: F401
-
- ReqFileLines = Iterator[Tuple[int, Text]]
-
-__all__ = ['parse_requirements']
-
-SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
-COMMENT_RE = re.compile(r'(^|\s)+#.*$')
-
-# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
-# variable name consisting of only uppercase letters, digits or the '_'
-# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
-# 2013 Edition.
-ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
-
-SUPPORTED_OPTIONS = [
- cmdoptions.constraints,
- cmdoptions.editable,
- cmdoptions.requirements,
- cmdoptions.no_index,
- cmdoptions.index_url,
- cmdoptions.find_links,
- cmdoptions.extra_index_url,
- cmdoptions.always_unzip,
- cmdoptions.no_binary,
- cmdoptions.only_binary,
- cmdoptions.pre,
- cmdoptions.trusted_host,
- cmdoptions.require_hashes,
-] # type: List[Callable[..., optparse.Option]]
-
-# options to be passed to requirements
-SUPPORTED_OPTIONS_REQ = [
- cmdoptions.install_options,
- cmdoptions.global_options,
- cmdoptions.hash,
-] # type: List[Callable[..., optparse.Option]]
-
-# the 'dest' string values
-SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
-
-
-def parse_requirements(
- filename, # type: str
- finder=None, # type: Optional[PackageFinder]
- comes_from=None, # type: Optional[str]
- options=None, # type: Optional[optparse.Values]
- session=None, # type: Optional[PipSession]
- constraint=False, # type: bool
- wheel_cache=None, # type: Optional[WheelCache]
- use_pep517=None # type: Optional[bool]
-):
- # type: (...) -> Iterator[InstallRequirement]
- """Parse a requirements file and yield InstallRequirement instances.
-
- :param filename: Path or url of requirements file.
- :param finder: Instance of pip.index.PackageFinder.
- :param comes_from: Origin description of requirements.
- :param options: cli options.
- :param session: Instance of pip.download.PipSession.
- :param constraint: If true, parsing a constraint file rather than
- requirements file.
- :param wheel_cache: Instance of pip.wheel.WheelCache
- :param use_pep517: Value of the --use-pep517 option.
- """
- if session is None:
- raise TypeError(
- "parse_requirements() missing 1 required keyword argument: "
- "'session'"
- )
-
- _, content = get_file_content(
- filename, comes_from=comes_from, session=session
- )
-
- lines_enum = preprocess(content, options)
-
- for line_number, line in lines_enum:
- req_iter = process_line(line, filename, line_number, finder,
- comes_from, options, session, wheel_cache,
- use_pep517=use_pep517, constraint=constraint)
- for req in req_iter:
- yield req
-
-
-def preprocess(content, options):
- # type: (Text, Optional[optparse.Values]) -> ReqFileLines
- """Split, filter, and join lines, and return a line iterator
-
- :param content: the content of the requirements file
- :param options: cli options
- """
- lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
- lines_enum = join_lines(lines_enum)
- lines_enum = ignore_comments(lines_enum)
- lines_enum = skip_regex(lines_enum, options)
- lines_enum = expand_env_variables(lines_enum)
- return lines_enum
-
-
-def process_line(
- line, # type: Text
- filename, # type: str
- line_number, # type: int
- finder=None, # type: Optional[PackageFinder]
- comes_from=None, # type: Optional[str]
- options=None, # type: Optional[optparse.Values]
- session=None, # type: Optional[PipSession]
- wheel_cache=None, # type: Optional[WheelCache]
- use_pep517=None, # type: Optional[bool]
- constraint=False # type: bool
-):
- # type: (...) -> Iterator[InstallRequirement]
- """Process a single requirements line; This can result in creating/yielding
- requirements, or updating the finder.
-
- For lines that contain requirements, the only options that have an effect
- are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
- requirement. Other options from SUPPORTED_OPTIONS may be present, but are
- ignored.
-
- For lines that do not contain requirements, the only options that have an
- effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
- be present, but are ignored. These lines may contain multiple options
- (although our docs imply only one is supported), and all our parsed and
- affect the finder.
-
- :param constraint: If True, parsing a constraints file.
- :param options: OptionParser options that we may update
- """
- parser = build_parser(line)
- defaults = parser.get_default_values()
- defaults.index_url = None
- if finder:
- defaults.format_control = finder.format_control
- args_str, options_str = break_args_options(line)
- # Prior to 2.7.3, shlex cannot deal with unicode entries
- if sys.version_info < (2, 7, 3):
- # https://github.com/python/mypy/issues/1174
- options_str = options_str.encode('utf8') # type: ignore
- # https://github.com/python/mypy/issues/1174
- opts, _ = parser.parse_args(
- shlex.split(options_str), defaults) # type: ignore
-
- # preserve for the nested code path
- line_comes_from = '%s %s (line %s)' % (
- '-c' if constraint else '-r', filename, line_number,
- )
-
- # yield a line requirement
- if args_str:
- isolated = options.isolated_mode if options else False
- if options:
- cmdoptions.check_install_build_global(options, opts)
- # get the options that apply to requirements
- req_options = {}
- for dest in SUPPORTED_OPTIONS_REQ_DEST:
- if dest in opts.__dict__ and opts.__dict__[dest]:
- req_options[dest] = opts.__dict__[dest]
- yield install_req_from_line(
- args_str, line_comes_from, constraint=constraint,
- use_pep517=use_pep517,
- isolated=isolated, options=req_options, wheel_cache=wheel_cache
- )
-
- # yield an editable requirement
- elif opts.editables:
- isolated = options.isolated_mode if options else False
- yield install_req_from_editable(
- opts.editables[0], comes_from=line_comes_from,
- use_pep517=use_pep517,
- constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
- )
-
- # parse a nested requirements file
- elif opts.requirements or opts.constraints:
- if opts.requirements:
- req_path = opts.requirements[0]
- nested_constraint = False
- else:
- req_path = opts.constraints[0]
- nested_constraint = True
- # original file is over http
- if SCHEME_RE.search(filename):
- # do a url join so relative paths work
- req_path = urllib_parse.urljoin(filename, req_path)
- # original file and nested file are paths
- elif not SCHEME_RE.search(req_path):
- # do a join so relative paths work
- req_path = os.path.join(os.path.dirname(filename), req_path)
- # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
- parsed_reqs = parse_requirements(
- req_path, finder, comes_from, options, session,
- constraint=nested_constraint, wheel_cache=wheel_cache
- )
- for req in parsed_reqs:
- yield req
-
- # percolate hash-checking option upward
- elif opts.require_hashes:
- options.require_hashes = opts.require_hashes
-
- # set finder options
- elif finder:
- if opts.index_url:
- finder.index_urls = [opts.index_url]
- if opts.no_index is True:
- finder.index_urls = []
- if opts.extra_index_urls:
- finder.index_urls.extend(opts.extra_index_urls)
- if opts.find_links:
- # FIXME: it would be nice to keep track of the source
- # of the find_links: support a find-links local path
- # relative to a requirements file.
- value = opts.find_links[0]
- req_dir = os.path.dirname(os.path.abspath(filename))
- relative_to_reqs_file = os.path.join(req_dir, value)
- if os.path.exists(relative_to_reqs_file):
- value = relative_to_reqs_file
- finder.find_links.append(value)
- if opts.pre:
- finder.allow_all_prereleases = True
- if opts.trusted_hosts:
- finder.secure_origins.extend(
- ("*", host, "*") for host in opts.trusted_hosts)
-
-
-def break_args_options(line):
- # type: (Text) -> Tuple[str, Text]
- """Break up the line into an args and options string. We only want to shlex
- (and then optparse) the options, not the args. args can contain markers
- which are corrupted by shlex.
- """
- tokens = line.split(' ')
- args = []
- options = tokens[:]
- for token in tokens:
- if token.startswith('-') or token.startswith('--'):
- break
- else:
- args.append(token)
- options.pop(0)
- return ' '.join(args), ' '.join(options) # type: ignore
-
-
-def build_parser(line):
- # type: (Text) -> optparse.OptionParser
- """
- Return a parser for parsing requirement lines
- """
- parser = optparse.OptionParser(add_help_option=False)
-
- option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
- for option_factory in option_factories:
- option = option_factory()
- parser.add_option(option)
-
- # By default optparse sys.exits on parsing errors. We want to wrap
- # that in our own exception.
- def parser_exit(self, msg):
- # add offending line
- msg = 'Invalid requirement: %s\n%s' % (line, msg)
- raise RequirementsFileParseError(msg)
- # NOTE: mypy disallows assigning to a method
- # https://github.com/python/mypy/issues/2427
- parser.exit = parser_exit # type: ignore
-
- return parser
-
-
-def join_lines(lines_enum):
- # type: (ReqFileLines) -> ReqFileLines
- """Joins a line ending in '\' with the previous line (except when following
- comments). The joined line takes on the index of the first line.
- """
- primary_line_number = None
- new_line = [] # type: List[Text]
- for line_number, line in lines_enum:
- if not line.endswith('\\') or COMMENT_RE.match(line):
- if COMMENT_RE.match(line):
- # this ensures comments are always matched later
- line = ' ' + line
- if new_line:
- new_line.append(line)
- yield primary_line_number, ''.join(new_line)
- new_line = []
- else:
- yield line_number, line
- else:
- if not new_line:
- primary_line_number = line_number
- new_line.append(line.strip('\\'))
-
- # last line contains \
- if new_line:
- yield primary_line_number, ''.join(new_line)
-
- # TODO: handle space after '\'.
-
-
-def ignore_comments(lines_enum):
- # type: (ReqFileLines) -> ReqFileLines
- """
- Strips comments and filter empty lines.
- """
- for line_number, line in lines_enum:
- line = COMMENT_RE.sub('', line)
- line = line.strip()
- if line:
- yield line_number, line
-
-
-def skip_regex(lines_enum, options):
- # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines
- """
- Skip lines that match '--skip-requirements-regex' pattern
-
- Note: the regex pattern is only built once
- """
- skip_regex = options.skip_requirements_regex if options else None
- if skip_regex:
- pattern = re.compile(skip_regex)
- lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
- return lines_enum
-
-
-def expand_env_variables(lines_enum):
- # type: (ReqFileLines) -> ReqFileLines
- """Replace all environment variables that can be retrieved via `os.getenv`.
-
- The only allowed format for environment variables defined in the
- requirement file is `${MY_VARIABLE_1}` to ensure two things:
-
- 1. Strings that contain a `$` aren't accidentally (partially) expanded.
- 2. Ensure consistency across platforms for requirement files.
-
- These points are the result of a discusssion on the `github pull
- request #3514 <https://github.com/pypa/pip/pull/3514>`_.
-
- Valid characters in variable names follow the `POSIX standard
- <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
- to uppercase letter, digits and the `_` (underscore).
- """
- for line_number, line in lines_enum:
- for env_var, var_name in ENV_VAR_RE.findall(line):
- value = os.getenv(var_name)
- if not value:
- continue
-
- line = line.replace(env_var, value)
-
- yield line_number, line
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py
deleted file mode 100644
index a4834b0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py
+++ /dev/null
@@ -1,1021 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import os
-import shutil
-import sys
-import sysconfig
-import zipfile
-from distutils.util import change_root
-
-from pip._vendor import pkg_resources, six
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import Version
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.pep517.wrappers import Pep517HookCaller
-
-from pip._internal import wheel
-from pip._internal.build_env import NoOpBuildEnvironment
-from pip._internal.exceptions import InstallationError
-from pip._internal.locations import (
- PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
-)
-from pip._internal.models.link import Link
-from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
-from pip._internal.req.req_uninstall import UninstallPathSet
-from pip._internal.utils.compat import native_str
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- _make_build_dir, ask_path_exists, backup_dir, call_subprocess,
- display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
- get_installed_version, redact_password_from_url, rmtree,
-)
-from pip._internal.utils.packaging import get_metadata
-from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import open_spinner
-from pip._internal.vcs import vcs
-from pip._internal.wheel import move_wheel_files
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Optional, Iterable, List, Union, Any, Text, Sequence, Dict
- )
- from pip._internal.build_env import BuildEnvironment # noqa: F401
- from pip._internal.cache import WheelCache # noqa: F401
- from pip._internal.index import PackageFinder # noqa: F401
- from pip._vendor.pkg_resources import Distribution # noqa: F401
- from pip._vendor.packaging.specifiers import SpecifierSet # noqa: F401
- from pip._vendor.packaging.markers import Marker # noqa: F401
-
-
-logger = logging.getLogger(__name__)
-
-
-class InstallRequirement(object):
- """
- Represents something that may be installed later on, may have information
- about where to fetch the relavant requirement and also contains logic for
- installing the said requirement.
- """
-
- def __init__(
- self,
- req, # type: Optional[Requirement]
- comes_from, # type: Optional[Union[str, InstallRequirement]]
- source_dir=None, # type: Optional[str]
- editable=False, # type: bool
- link=None, # type: Optional[Link]
- update=True, # type: bool
- markers=None, # type: Optional[Marker]
- use_pep517=None, # type: Optional[bool]
- isolated=False, # type: bool
- options=None, # type: Optional[Dict[str, Any]]
- wheel_cache=None, # type: Optional[WheelCache]
- constraint=False, # type: bool
- extras=() # type: Iterable[str]
- ):
- # type: (...) -> None
- assert req is None or isinstance(req, Requirement), req
- self.req = req
- self.comes_from = comes_from
- self.constraint = constraint
- if source_dir is not None:
- self.source_dir = os.path.normpath(os.path.abspath(source_dir))
- else:
- self.source_dir = None
- self.editable = editable
-
- self._wheel_cache = wheel_cache
- if link is None and req and req.url:
- # PEP 508 URL requirement
- link = Link(req.url)
- self.link = self.original_link = link
-
- if extras:
- self.extras = extras
- elif req:
- self.extras = {
- pkg_resources.safe_extra(extra) for extra in req.extras
- }
- else:
- self.extras = set()
- if markers is None and req:
- markers = req.marker
- self.markers = markers
-
- self._egg_info_path = None # type: Optional[str]
- # This holds the pkg_resources.Distribution object if this requirement
- # is already available:
- self.satisfied_by = None
- # This hold the pkg_resources.Distribution object if this requirement
- # conflicts with another installed distribution:
- self.conflicts_with = None
- # Temporary build location
- self._temp_build_dir = TempDirectory(kind="req-build")
- # Used to store the global directory where the _temp_build_dir should
- # have been created. Cf _correct_build_location method.
- self._ideal_build_dir = None # type: Optional[str]
- # True if the editable should be updated:
- self.update = update
- # Set to True after successful installation
- self.install_succeeded = None # type: Optional[bool]
- # UninstallPathSet of uninstalled distribution (for possible rollback)
- self.uninstalled_pathset = None
- self.options = options if options else {}
- # Set to True after successful preparation of this requirement
- self.prepared = False
- self.is_direct = False
-
- self.isolated = isolated
- self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
-
- # For PEP 517, the directory where we request the project metadata
- # gets stored. We need this to pass to build_wheel, so the backend
- # can ensure that the wheel matches the metadata (see the PEP for
- # details).
- self.metadata_directory = None # type: Optional[str]
-
- # The static build requirements (from pyproject.toml)
- self.pyproject_requires = None # type: Optional[List[str]]
-
- # Build requirements that we will check are available
- self.requirements_to_check = [] # type: List[str]
-
- # The PEP 517 backend we should use to build the project
- self.pep517_backend = None # type: Optional[Pep517HookCaller]
-
- # Are we using PEP 517 for this requirement?
- # After pyproject.toml has been loaded, the only valid values are True
- # and False. Before loading, None is valid (meaning "use the default").
- # Setting an explicit value before loading pyproject.toml is supported,
- # but after loading this flag should be treated as read only.
- self.use_pep517 = use_pep517
-
- def __str__(self):
- if self.req:
- s = str(self.req)
- if self.link:
- s += ' from %s' % redact_password_from_url(self.link.url)
- elif self.link:
- s = redact_password_from_url(self.link.url)
- else:
- s = '<InstallRequirement>'
- if self.satisfied_by is not None:
- s += ' in %s' % display_path(self.satisfied_by.location)
- if self.comes_from:
- if isinstance(self.comes_from, six.string_types):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += ' (from %s)' % comes_from
- return s
-
- def __repr__(self):
- return '<%s object: %s editable=%r>' % (
- self.__class__.__name__, str(self), self.editable)
-
- def populate_link(self, finder, upgrade, require_hashes):
- # type: (PackageFinder, bool, bool) -> None
- """Ensure that if a link can be found for this, that it is found.
-
- Note that self.link may still be None - if Upgrade is False and the
- requirement is already installed.
-
- If require_hashes is True, don't use the wheel cache, because cached
- wheels, always built locally, have different hashes than the files
- downloaded from the index server and thus throw false hash mismatches.
- Furthermore, cached wheels at present have undeterministic contents due
- to file modification times.
- """
- if self.link is None:
- self.link = finder.find_requirement(self, upgrade)
- if self._wheel_cache is not None and not require_hashes:
- old_link = self.link
- self.link = self._wheel_cache.get(self.link, self.name)
- if old_link != self.link:
- logger.debug('Using cached wheel link: %s', self.link)
-
- # Things that are valid for all kinds of requirements?
- @property
- def name(self):
- # type: () -> Optional[str]
- if self.req is None:
- return None
- return native_str(pkg_resources.safe_name(self.req.name))
-
- @property
- def specifier(self):
- # type: () -> SpecifierSet
- return self.req.specifier
-
- @property
- def is_pinned(self):
- # type: () -> bool
- """Return whether I am pinned to an exact version.
-
- For example, some-package==1.2 is pinned; some-package>1.2 is not.
- """
- specifiers = self.specifier
- return (len(specifiers) == 1 and
- next(iter(specifiers)).operator in {'==', '==='})
-
- @property
- def installed_version(self):
- return get_installed_version(self.name)
-
- def match_markers(self, extras_requested=None):
- # type: (Optional[Iterable[str]]) -> bool
- if not extras_requested:
- # Provide an extra to safely evaluate the markers
- # without matching any extra
- extras_requested = ('',)
- if self.markers is not None:
- return any(
- self.markers.evaluate({'extra': extra})
- for extra in extras_requested)
- else:
- return True
-
- @property
- def has_hash_options(self):
- # type: () -> bool
- """Return whether any known-good hashes are specified as options.
-
- These activate --require-hashes mode; hashes specified as part of a
- URL do not.
-
- """
- return bool(self.options.get('hashes', {}))
-
- def hashes(self, trust_internet=True):
- # type: (bool) -> Hashes
- """Return a hash-comparer that considers my option- and URL-based
- hashes to be known-good.
-
- Hashes in URLs--ones embedded in the requirements file, not ones
- downloaded from an index server--are almost peers with ones from
- flags. They satisfy --require-hashes (whether it was implicitly or
- explicitly activated) but do not activate it. md5 and sha224 are not
- allowed in flags, which should nudge people toward good algos. We
- always OR all hashes together, even ones from URLs.
-
- :param trust_internet: Whether to trust URL-based (#md5=...) hashes
- downloaded from the internet, as by populate_link()
-
- """
- good_hashes = self.options.get('hashes', {}).copy()
- link = self.link if trust_internet else self.original_link
- if link and link.hash:
- good_hashes.setdefault(link.hash_name, []).append(link.hash)
- return Hashes(good_hashes)
-
- def from_path(self):
- # type: () -> Optional[str]
- """Format a nice indicator to show where this "comes from"
- """
- if self.req is None:
- return None
- s = str(self.req)
- if self.comes_from:
- if isinstance(self.comes_from, six.string_types):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += '->' + comes_from
- return s
-
- def build_location(self, build_dir):
- # type: (str) -> Optional[str]
- assert build_dir is not None
- if self._temp_build_dir.path is not None:
- return self._temp_build_dir.path
- if self.req is None:
- # for requirement via a path to a directory: the name of the
- # package is not available yet so we create a temp directory
- # Once run_egg_info will have run, we'll be able
- # to fix it via _correct_build_location
- # Some systems have /tmp as a symlink which confuses custom
- # builds (such as numpy). Thus, we ensure that the real path
- # is returned.
- self._temp_build_dir.create()
- self._ideal_build_dir = build_dir
-
- return self._temp_build_dir.path
- if self.editable:
- name = self.name.lower()
- else:
- name = self.name
- # FIXME: Is there a better place to create the build_dir? (hg and bzr
- # need this)
- if not os.path.exists(build_dir):
- logger.debug('Creating directory %s', build_dir)
- _make_build_dir(build_dir)
- return os.path.join(build_dir, name)
-
- def _correct_build_location(self):
- # type: () -> None
- """Move self._temp_build_dir to self._ideal_build_dir/self.req.name
-
- For some requirements (e.g. a path to a directory), the name of the
- package is not available until we run egg_info, so the build_location
- will return a temporary directory and store the _ideal_build_dir.
-
- This is only called by self.run_egg_info to fix the temporary build
- directory.
- """
- if self.source_dir is not None:
- return
- assert self.req is not None
- assert self._temp_build_dir.path
- assert (self._ideal_build_dir is not None and
- self._ideal_build_dir.path) # type: ignore
- old_location = self._temp_build_dir.path
- self._temp_build_dir.path = None
-
- new_location = self.build_location(self._ideal_build_dir)
- if os.path.exists(new_location):
- raise InstallationError(
- 'A package already exists in %s; please remove it to continue'
- % display_path(new_location))
- logger.debug(
- 'Moving package %s from %s to new location %s',
- self, display_path(old_location), display_path(new_location),
- )
- shutil.move(old_location, new_location)
- self._temp_build_dir.path = new_location
- self._ideal_build_dir = None
- self.source_dir = os.path.normpath(os.path.abspath(new_location))
- self._egg_info_path = None
-
- # Correct the metadata directory, if it exists
- if self.metadata_directory:
- old_meta = self.metadata_directory
- rel = os.path.relpath(old_meta, start=old_location)
- new_meta = os.path.join(new_location, rel)
- new_meta = os.path.normpath(os.path.abspath(new_meta))
- self.metadata_directory = new_meta
-
- def remove_temporary_source(self):
- # type: () -> None
- """Remove the source files from this requirement, if they are marked
- for deletion"""
- if self.source_dir and os.path.exists(
- os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
- logger.debug('Removing source in %s', self.source_dir)
- rmtree(self.source_dir)
- self.source_dir = None
- self._temp_build_dir.cleanup()
- self.build_env.cleanup()
-
- def check_if_exists(self, use_user_site):
- # type: (bool) -> bool
- """Find an installed distribution that satisfies or conflicts
- with this requirement, and set self.satisfied_by or
- self.conflicts_with appropriately.
- """
- if self.req is None:
- return False
- try:
- # get_distribution() will resolve the entire list of requirements
- # anyway, and we've already determined that we need the requirement
- # in question, so strip the marker so that we don't try to
- # evaluate it.
- no_marker = Requirement(str(self.req))
- no_marker.marker = None
- self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
- if self.editable and self.satisfied_by:
- self.conflicts_with = self.satisfied_by
- # when installing editables, nothing pre-existing should ever
- # satisfy
- self.satisfied_by = None
- return True
- except pkg_resources.DistributionNotFound:
- return False
- except pkg_resources.VersionConflict:
- existing_dist = pkg_resources.get_distribution(
- self.req.name
- )
- if use_user_site:
- if dist_in_usersite(existing_dist):
- self.conflicts_with = existing_dist
- elif (running_under_virtualenv() and
- dist_in_site_packages(existing_dist)):
- raise InstallationError(
- "Will not install to the user site because it will "
- "lack sys.path precedence to %s in %s" %
- (existing_dist.project_name, existing_dist.location)
- )
- else:
- self.conflicts_with = existing_dist
- return True
-
- # Things valid for wheels
- @property
- def is_wheel(self):
- # type: () -> bool
- if not self.link:
- return False
- return self.link.is_wheel
-
- def move_wheel_files(
- self,
- wheeldir, # type: str
- root=None, # type: Optional[str]
- home=None, # type: Optional[str]
- prefix=None, # type: Optional[str]
- warn_script_location=True, # type: bool
- use_user_site=False, # type: bool
- pycompile=True # type: bool
- ):
- # type: (...) -> None
- move_wheel_files(
- self.name, self.req, wheeldir,
- user=use_user_site,
- home=home,
- root=root,
- prefix=prefix,
- pycompile=pycompile,
- isolated=self.isolated,
- warn_script_location=warn_script_location,
- )
-
- # Things valid for sdists
- @property
- def setup_py_dir(self):
- # type: () -> str
- return os.path.join(
- self.source_dir,
- self.link and self.link.subdirectory_fragment or '')
-
- @property
- def setup_py(self):
- # type: () -> str
- assert self.source_dir, "No source dir for %s" % self
-
- setup_py = os.path.join(self.setup_py_dir, 'setup.py')
-
- # Python2 __file__ should not be unicode
- if six.PY2 and isinstance(setup_py, six.text_type):
- setup_py = setup_py.encode(sys.getfilesystemencoding())
-
- return setup_py
-
- @property
- def pyproject_toml(self):
- # type: () -> str
- assert self.source_dir, "No source dir for %s" % self
-
- return make_pyproject_path(self.setup_py_dir)
-
- def load_pyproject_toml(self):
- # type: () -> None
- """Load the pyproject.toml file.
-
- After calling this routine, all of the attributes related to PEP 517
- processing for this requirement have been set. In particular, the
- use_pep517 attribute can be used to determine whether we should
- follow the PEP 517 or legacy (setup.py) code path.
- """
- pep517_data = load_pyproject_toml(
- self.use_pep517,
- self.pyproject_toml,
- self.setup_py,
- str(self)
- )
-
- if pep517_data is None:
- self.use_pep517 = False
- else:
- self.use_pep517 = True
- requires, backend, check = pep517_data
- self.requirements_to_check = check
- self.pyproject_requires = requires
- self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend)
-
- # Use a custom function to call subprocesses
- self.spin_message = ""
-
- def runner(cmd, cwd=None, extra_environ=None):
- with open_spinner(self.spin_message) as spinner:
- call_subprocess(
- cmd,
- cwd=cwd,
- extra_environ=extra_environ,
- show_stdout=False,
- spinner=spinner
- )
- self.spin_message = ""
-
- self.pep517_backend._subprocess_runner = runner
-
- def prepare_metadata(self):
- # type: () -> None
- """Ensure that project metadata is available.
-
- Under PEP 517, call the backend hook to prepare the metadata.
- Under legacy processing, call setup.py egg-info.
- """
- assert self.source_dir
-
- with indent_log():
- if self.use_pep517:
- self.prepare_pep517_metadata()
- else:
- self.run_egg_info()
-
- if not self.req:
- if isinstance(parse_version(self.metadata["Version"]), Version):
- op = "=="
- else:
- op = "==="
- self.req = Requirement(
- "".join([
- self.metadata["Name"],
- op,
- self.metadata["Version"],
- ])
- )
- self._correct_build_location()
- else:
- metadata_name = canonicalize_name(self.metadata["Name"])
- if canonicalize_name(self.req.name) != metadata_name:
- logger.warning(
- 'Generating metadata for package %s '
- 'produced metadata for project name %s. Fix your '
- '#egg=%s fragments.',
- self.name, metadata_name, self.name
- )
- self.req = Requirement(metadata_name)
-
- def prepare_pep517_metadata(self):
- # type: () -> None
- assert self.pep517_backend is not None
-
- metadata_dir = os.path.join(
- self.setup_py_dir,
- 'pip-wheel-metadata'
- )
- ensure_dir(metadata_dir)
-
- with self.build_env:
- # Note that Pep517HookCaller implements a fallback for
- # prepare_metadata_for_build_wheel, so we don't have to
- # consider the possibility that this hook doesn't exist.
- backend = self.pep517_backend
- self.spin_message = "Preparing wheel metadata"
- distinfo_dir = backend.prepare_metadata_for_build_wheel(
- metadata_dir
- )
-
- self.metadata_directory = os.path.join(metadata_dir, distinfo_dir)
-
- def run_egg_info(self):
- # type: () -> None
- if self.name:
- logger.debug(
- 'Running setup.py (path:%s) egg_info for package %s',
- self.setup_py, self.name,
- )
- else:
- logger.debug(
- 'Running setup.py (path:%s) egg_info for package from %s',
- self.setup_py, self.link,
- )
- script = SETUPTOOLS_SHIM % self.setup_py
- base_cmd = [sys.executable, '-c', script]
- if self.isolated:
- base_cmd += ["--no-user-cfg"]
- egg_info_cmd = base_cmd + ['egg_info']
- # We can't put the .egg-info files at the root, because then the
- # source code will be mistaken for an installed egg, causing
- # problems
- if self.editable:
- egg_base_option = [] # type: List[str]
- else:
- egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
- ensure_dir(egg_info_dir)
- egg_base_option = ['--egg-base', 'pip-egg-info']
- with self.build_env:
- call_subprocess(
- egg_info_cmd + egg_base_option,
- cwd=self.setup_py_dir,
- show_stdout=False,
- command_desc='python setup.py egg_info')
-
- @property
- def egg_info_path(self):
- # type: () -> str
- if self._egg_info_path is None:
- if self.editable:
- base = self.source_dir
- else:
- base = os.path.join(self.setup_py_dir, 'pip-egg-info')
- filenames = os.listdir(base)
- if self.editable:
- filenames = []
- for root, dirs, files in os.walk(base):
- for dir in vcs.dirnames:
- if dir in dirs:
- dirs.remove(dir)
- # Iterate over a copy of ``dirs``, since mutating
- # a list while iterating over it can cause trouble.
- # (See https://github.com/pypa/pip/pull/462.)
- for dir in list(dirs):
- # Don't search in anything that looks like a virtualenv
- # environment
- if (
- os.path.lexists(
- os.path.join(root, dir, 'bin', 'python')
- ) or
- os.path.exists(
- os.path.join(
- root, dir, 'Scripts', 'Python.exe'
- )
- )):
- dirs.remove(dir)
- # Also don't search through tests
- elif dir == 'test' or dir == 'tests':
- dirs.remove(dir)
- filenames.extend([os.path.join(root, dir)
- for dir in dirs])
- filenames = [f for f in filenames if f.endswith('.egg-info')]
-
- if not filenames:
- raise InstallationError(
- "Files/directories not found in %s" % base
- )
- # if we have more than one match, we pick the toplevel one. This
- # can easily be the case if there is a dist folder which contains
- # an extracted tarball for testing purposes.
- if len(filenames) > 1:
- filenames.sort(
- key=lambda x: x.count(os.path.sep) +
- (os.path.altsep and x.count(os.path.altsep) or 0)
- )
- self._egg_info_path = os.path.join(base, filenames[0])
- return self._egg_info_path
-
- @property
- def metadata(self):
- if not hasattr(self, '_metadata'):
- self._metadata = get_metadata(self.get_dist())
-
- return self._metadata
-
- def get_dist(self):
- # type: () -> Distribution
- """Return a pkg_resources.Distribution for this requirement"""
- if self.metadata_directory:
- base_dir, distinfo = os.path.split(self.metadata_directory)
- metadata = pkg_resources.PathMetadata(
- base_dir, self.metadata_directory
- )
- dist_name = os.path.splitext(distinfo)[0]
- typ = pkg_resources.DistInfoDistribution
- else:
- egg_info = self.egg_info_path.rstrip(os.path.sep)
- base_dir = os.path.dirname(egg_info)
- metadata = pkg_resources.PathMetadata(base_dir, egg_info)
- dist_name = os.path.splitext(os.path.basename(egg_info))[0]
- # https://github.com/python/mypy/issues/1174
- typ = pkg_resources.Distribution # type: ignore
-
- return typ(
- base_dir,
- project_name=dist_name,
- metadata=metadata,
- )
-
- def assert_source_matches_version(self):
- # type: () -> None
- assert self.source_dir
- version = self.metadata['version']
- if self.req.specifier and version not in self.req.specifier:
- logger.warning(
- 'Requested %s, but installing version %s',
- self,
- version,
- )
- else:
- logger.debug(
- 'Source in %s has version %s, which satisfies requirement %s',
- display_path(self.source_dir),
- version,
- self,
- )
-
- # For both source distributions and editables
- def ensure_has_source_dir(self, parent_dir):
- # type: (str) -> str
- """Ensure that a source_dir is set.
-
- This will create a temporary build dir if the name of the requirement
- isn't known yet.
-
- :param parent_dir: The ideal pip parent_dir for the source_dir.
- Generally src_dir for editables and build_dir for sdists.
- :return: self.source_dir
- """
- if self.source_dir is None:
- self.source_dir = self.build_location(parent_dir)
- return self.source_dir
-
- # For editable installations
- def install_editable(
- self,
- install_options, # type: List[str]
- global_options=(), # type: Sequence[str]
- prefix=None # type: Optional[str]
- ):
- # type: (...) -> None
- logger.info('Running setup.py develop for %s', self.name)
-
- if self.isolated:
- global_options = list(global_options) + ["--no-user-cfg"]
-
- if prefix:
- prefix_param = ['--prefix={}'.format(prefix)]
- install_options = list(install_options) + prefix_param
-
- with indent_log():
- # FIXME: should we do --install-headers here too?
- with self.build_env:
- call_subprocess(
- [
- sys.executable,
- '-c',
- SETUPTOOLS_SHIM % self.setup_py
- ] +
- list(global_options) +
- ['develop', '--no-deps'] +
- list(install_options),
-
- cwd=self.setup_py_dir,
- show_stdout=False,
- )
-
- self.install_succeeded = True
-
- def update_editable(self, obtain=True):
- # type: (bool) -> None
- if not self.link:
- logger.debug(
- "Cannot update repository at %s; repository location is "
- "unknown",
- self.source_dir,
- )
- return
- assert self.editable
- assert self.source_dir
- if self.link.scheme == 'file':
- # Static paths don't get updated
- return
- assert '+' in self.link.url, "bad url: %r" % self.link.url
- if not self.update:
- return
- vc_type, url = self.link.url.split('+', 1)
- backend = vcs.get_backend(vc_type)
- if backend:
- vcs_backend = backend(self.link.url)
- if obtain:
- vcs_backend.obtain(self.source_dir)
- else:
- vcs_backend.export(self.source_dir)
- else:
- assert 0, (
- 'Unexpected version control type (in %s): %s'
- % (self.link, vc_type))
-
- # Top-level Actions
- def uninstall(self, auto_confirm=False, verbose=False,
- use_user_site=False):
- # type: (bool, bool, bool) -> Optional[UninstallPathSet]
- """
- Uninstall the distribution currently satisfying this requirement.
-
- Prompts before removing or modifying files unless
- ``auto_confirm`` is True.
-
- Refuses to delete or modify files outside of ``sys.prefix`` -
- thus uninstallation within a virtual environment can only
- modify that virtual environment, even if the virtualenv is
- linked to global site-packages.
-
- """
- if not self.check_if_exists(use_user_site):
- logger.warning("Skipping %s as it is not installed.", self.name)
- return None
- dist = self.satisfied_by or self.conflicts_with
-
- uninstalled_pathset = UninstallPathSet.from_dist(dist)
- uninstalled_pathset.remove(auto_confirm, verbose)
- return uninstalled_pathset
-
- def _clean_zip_name(self, name, prefix): # only used by archive.
- assert name.startswith(prefix + os.path.sep), (
- "name %r doesn't start with prefix %r" % (name, prefix)
- )
- name = name[len(prefix) + 1:]
- name = name.replace(os.path.sep, '/')
- return name
-
- def _get_archive_name(self, path, parentdir, rootdir):
- # type: (str, str, str) -> str
- path = os.path.join(parentdir, path)
- name = self._clean_zip_name(path, rootdir)
- return self.name + '/' + name
-
- # TODO: Investigate if this should be kept in InstallRequirement
- # Seems to be used only when VCS + downloads
- def archive(self, build_dir):
- # type: (str) -> None
- assert self.source_dir
- create_archive = True
- archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
- archive_path = os.path.join(build_dir, archive_name)
- if os.path.exists(archive_path):
- response = ask_path_exists(
- 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
- display_path(archive_path), ('i', 'w', 'b', 'a'))
- if response == 'i':
- create_archive = False
- elif response == 'w':
- logger.warning('Deleting %s', display_path(archive_path))
- os.remove(archive_path)
- elif response == 'b':
- dest_file = backup_dir(archive_path)
- logger.warning(
- 'Backing up %s to %s',
- display_path(archive_path),
- display_path(dest_file),
- )
- shutil.move(archive_path, dest_file)
- elif response == 'a':
- sys.exit(-1)
- if create_archive:
- zip = zipfile.ZipFile(
- archive_path, 'w', zipfile.ZIP_DEFLATED,
- allowZip64=True
- )
- dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
- for dirpath, dirnames, filenames in os.walk(dir):
- if 'pip-egg-info' in dirnames:
- dirnames.remove('pip-egg-info')
- for dirname in dirnames:
- dir_arcname = self._get_archive_name(dirname,
- parentdir=dirpath,
- rootdir=dir)
- zipdir = zipfile.ZipInfo(dir_arcname + '/')
- zipdir.external_attr = 0x1ED << 16 # 0o755
- zip.writestr(zipdir, '')
- for filename in filenames:
- if filename == PIP_DELETE_MARKER_FILENAME:
- continue
- file_arcname = self._get_archive_name(filename,
- parentdir=dirpath,
- rootdir=dir)
- filename = os.path.join(dirpath, filename)
- zip.write(filename, file_arcname)
- zip.close()
- logger.info('Saved %s', display_path(archive_path))
-
- def install(
- self,
- install_options, # type: List[str]
- global_options=None, # type: Optional[Sequence[str]]
- root=None, # type: Optional[str]
- home=None, # type: Optional[str]
- prefix=None, # type: Optional[str]
- warn_script_location=True, # type: bool
- use_user_site=False, # type: bool
- pycompile=True # type: bool
- ):
- # type: (...) -> None
- global_options = global_options if global_options is not None else []
- if self.editable:
- self.install_editable(
- install_options, global_options, prefix=prefix,
- )
- return
- if self.is_wheel:
- version = wheel.wheel_version(self.source_dir)
- wheel.check_compatibility(version, self.name)
-
- self.move_wheel_files(
- self.source_dir, root=root, prefix=prefix, home=home,
- warn_script_location=warn_script_location,
- use_user_site=use_user_site, pycompile=pycompile,
- )
- self.install_succeeded = True
- return
-
- # Extend the list of global and install options passed on to
- # the setup.py call with the ones from the requirements file.
- # Options specified in requirements file override those
- # specified on the command line, since the last option given
- # to setup.py is the one that is used.
- global_options = list(global_options) + \
- self.options.get('global_options', [])
- install_options = list(install_options) + \
- self.options.get('install_options', [])
-
- if self.isolated:
- # https://github.com/python/mypy/issues/1174
- global_options = global_options + ["--no-user-cfg"] # type: ignore
-
- with TempDirectory(kind="record") as temp_dir:
- record_filename = os.path.join(temp_dir.path, 'install-record.txt')
- install_args = self.get_install_args(
- global_options, record_filename, root, prefix, pycompile,
- )
- msg = 'Running setup.py install for %s' % (self.name,)
- with open_spinner(msg) as spinner:
- with indent_log():
- with self.build_env:
- call_subprocess(
- install_args + install_options,
- cwd=self.setup_py_dir,
- show_stdout=False,
- spinner=spinner,
- )
-
- if not os.path.exists(record_filename):
- logger.debug('Record file %s not found', record_filename)
- return
- self.install_succeeded = True
-
- def prepend_root(path):
- if root is None or not os.path.isabs(path):
- return path
- else:
- return change_root(root, path)
-
- with open(record_filename) as f:
- for line in f:
- directory = os.path.dirname(line)
- if directory.endswith('.egg-info'):
- egg_info_dir = prepend_root(directory)
- break
- else:
- logger.warning(
- 'Could not find .egg-info directory in install record'
- ' for %s',
- self,
- )
- # FIXME: put the record somewhere
- # FIXME: should this be an error?
- return
- new_lines = []
- with open(record_filename) as f:
- for line in f:
- filename = line.strip()
- if os.path.isdir(filename):
- filename += os.path.sep
- new_lines.append(
- os.path.relpath(prepend_root(filename), egg_info_dir)
- )
- new_lines.sort()
- ensure_dir(egg_info_dir)
- inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
- with open(inst_files_path, 'w') as f:
- f.write('\n'.join(new_lines) + '\n')
-
- def get_install_args(
- self,
- global_options, # type: Sequence[str]
- record_filename, # type: str
- root, # type: Optional[str]
- prefix, # type: Optional[str]
- pycompile # type: bool
- ):
- # type: (...) -> List[str]
- install_args = [sys.executable, "-u"]
- install_args.append('-c')
- install_args.append(SETUPTOOLS_SHIM % self.setup_py)
- install_args += list(global_options) + \
- ['install', '--record', record_filename]
- install_args += ['--single-version-externally-managed']
-
- if root is not None:
- install_args += ['--root', root]
- if prefix is not None:
- install_args += ['--prefix', prefix]
-
- if pycompile:
- install_args += ["--compile"]
- else:
- install_args += ["--no-compile"]
-
- if running_under_virtualenv():
- py_ver_str = 'python' + sysconfig.get_python_version()
- install_args += ['--install-headers',
- os.path.join(sys.prefix, 'include', 'site',
- py_ver_str, self.name)]
-
- return install_args
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py
deleted file mode 100644
index d1410e9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py
+++ /dev/null
@@ -1,197 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-from collections import OrderedDict
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.wheel import Wheel
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, List, Tuple, Dict, Iterable # noqa: F401
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
-
-
-logger = logging.getLogger(__name__)
-
-
-class RequirementSet(object):
-
- def __init__(self, require_hashes=False, check_supported_wheels=True):
- # type: (bool, bool) -> None
- """Create a RequirementSet.
- """
-
- self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
- self.require_hashes = require_hashes
- self.check_supported_wheels = check_supported_wheels
-
- # Mapping of alias: real_name
- self.requirement_aliases = {} # type: Dict[str, str]
- self.unnamed_requirements = [] # type: List[InstallRequirement]
- self.successfully_downloaded = [] # type: List[InstallRequirement]
- self.reqs_to_cleanup = [] # type: List[InstallRequirement]
-
- def __str__(self):
- reqs = [req for req in self.requirements.values()
- if not req.comes_from]
- reqs.sort(key=lambda req: req.name.lower())
- return ' '.join([str(req.req) for req in reqs])
-
- def __repr__(self):
- reqs = [req for req in self.requirements.values()]
- reqs.sort(key=lambda req: req.name.lower())
- reqs_str = ', '.join([str(req.req) for req in reqs])
- return ('<%s object; %d requirement(s): %s>'
- % (self.__class__.__name__, len(reqs), reqs_str))
-
- def add_requirement(
- self,
- install_req, # type: InstallRequirement
- parent_req_name=None, # type: Optional[str]
- extras_requested=None # type: Optional[Iterable[str]]
- ):
- # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
- """Add install_req as a requirement to install.
-
- :param parent_req_name: The name of the requirement that needed this
- added. The name is used because when multiple unnamed requirements
- resolve to the same name, we could otherwise end up with dependency
- links that point outside the Requirements set. parent_req must
- already be added. Note that None implies that this is a user
- supplied requirement, vs an inferred one.
- :param extras_requested: an iterable of extras used to evaluate the
- environment markers.
- :return: Additional requirements to scan. That is either [] if
- the requirement is not applicable, or [install_req] if the
- requirement is applicable and has just been added.
- """
- name = install_req.name
-
- # If the markers do not match, ignore this requirement.
- if not install_req.match_markers(extras_requested):
- logger.info(
- "Ignoring %s: markers '%s' don't match your environment",
- name, install_req.markers,
- )
- return [], None
-
- # If the wheel is not supported, raise an error.
- # Should check this after filtering out based on environment markers to
- # allow specifying different wheels based on the environment/OS, in a
- # single requirements file.
- if install_req.link and install_req.link.is_wheel:
- wheel = Wheel(install_req.link.filename)
- if self.check_supported_wheels and not wheel.supported():
- raise InstallationError(
- "%s is not a supported wheel on this platform." %
- wheel.filename
- )
-
- # This next bit is really a sanity check.
- assert install_req.is_direct == (parent_req_name is None), (
- "a direct req shouldn't have a parent and also, "
- "a non direct req should have a parent"
- )
-
- # Unnamed requirements are scanned again and the requirement won't be
- # added as a dependency until after scanning.
- if not name:
- # url or path requirement w/o an egg fragment
- self.unnamed_requirements.append(install_req)
- return [install_req], None
-
- try:
- existing_req = self.get_requirement(name)
- except KeyError:
- existing_req = None
-
- has_conflicting_requirement = (
- parent_req_name is None and
- existing_req and
- not existing_req.constraint and
- existing_req.extras == install_req.extras and
- existing_req.req.specifier != install_req.req.specifier
- )
- if has_conflicting_requirement:
- raise InstallationError(
- "Double requirement given: %s (already in %s, name=%r)"
- % (install_req, existing_req, name)
- )
-
- # When no existing requirement exists, add the requirement as a
- # dependency and it will be scanned again after.
- if not existing_req:
- self.requirements[name] = install_req
- # FIXME: what about other normalizations? E.g., _ vs. -?
- if name.lower() != name:
- self.requirement_aliases[name.lower()] = name
- # We'd want to rescan this requirements later
- return [install_req], install_req
-
- # Assume there's no need to scan, and that we've already
- # encountered this for scanning.
- if install_req.constraint or not existing_req.constraint:
- return [], existing_req
-
- does_not_satisfy_constraint = (
- install_req.link and
- not (
- existing_req.link and
- install_req.link.path == existing_req.link.path
- )
- )
- if does_not_satisfy_constraint:
- self.reqs_to_cleanup.append(install_req)
- raise InstallationError(
- "Could not satisfy constraints for '%s': "
- "installation from path or url cannot be "
- "constrained to a version" % name,
- )
- # If we're now installing a constraint, mark the existing
- # object for real installation.
- existing_req.constraint = False
- existing_req.extras = tuple(sorted(
- set(existing_req.extras) | set(install_req.extras)
- ))
- logger.debug(
- "Setting %s extras to: %s",
- existing_req, existing_req.extras,
- )
- # Return the existing requirement for addition to the parent and
- # scanning again.
- return [existing_req], existing_req
-
- def has_requirement(self, project_name):
- # type: (str) -> bool
- name = project_name.lower()
- if (name in self.requirements and
- not self.requirements[name].constraint or
- name in self.requirement_aliases and
- not self.requirements[self.requirement_aliases[name]].constraint):
- return True
- return False
-
- @property
- def has_requirements(self):
- # type: () -> List[InstallRequirement]
- return list(req for req in self.requirements.values() if not
- req.constraint) or self.unnamed_requirements
-
- def get_requirement(self, project_name):
- # type: (str) -> InstallRequirement
- for name in project_name, project_name.lower():
- if name in self.requirements:
- return self.requirements[name]
- if name in self.requirement_aliases:
- return self.requirements[self.requirement_aliases[name]]
- raise KeyError("No project with the name %r" % project_name)
-
- def cleanup_files(self):
- # type: () -> None
- """Clean up files, remove builds."""
- logger.debug('Cleaning up...')
- with indent_log():
- for req in self.reqs_to_cleanup:
- req.remove_temporary_source()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py
deleted file mode 100644
index 82e084a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import hashlib
-import logging
-import os
-
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Set, Iterator # noqa: F401
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
- from pip._internal.models.link import Link # noqa: F401
-
-logger = logging.getLogger(__name__)
-
-
-class RequirementTracker(object):
-
- def __init__(self):
- # type: () -> None
- self._root = os.environ.get('PIP_REQ_TRACKER')
- if self._root is None:
- self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
- self._temp_dir.create()
- self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
- logger.debug('Created requirements tracker %r', self._root)
- else:
- self._temp_dir = None
- logger.debug('Re-using requirements tracker %r', self._root)
- self._entries = set() # type: Set[InstallRequirement]
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.cleanup()
-
- def _entry_path(self, link):
- # type: (Link) -> str
- hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
- return os.path.join(self._root, hashed)
-
- def add(self, req):
- # type: (InstallRequirement) -> None
- link = req.link
- info = str(req)
- entry_path = self._entry_path(link)
- try:
- with open(entry_path) as fp:
- # Error, these's already a build in progress.
- raise LookupError('%s is already being built: %s'
- % (link, fp.read()))
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
- assert req not in self._entries
- with open(entry_path, 'w') as fp:
- fp.write(info)
- self._entries.add(req)
- logger.debug('Added %s to build tracker %r', req, self._root)
-
- def remove(self, req):
- # type: (InstallRequirement) -> None
- link = req.link
- self._entries.remove(req)
- os.unlink(self._entry_path(link))
- logger.debug('Removed %s from build tracker %r', req, self._root)
-
- def cleanup(self):
- # type: () -> None
- for req in set(self._entries):
- self.remove(req)
- remove = self._temp_dir is not None
- if remove:
- self._temp_dir.cleanup()
- logger.debug('%s build tracker %r',
- 'Removed' if remove else 'Cleaned',
- self._root)
-
- @contextlib.contextmanager
- def track(self, req):
- # type: (InstallRequirement) -> Iterator[None]
- self.add(req)
- yield
- self.remove(req)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py
deleted file mode 100644
index c80959e..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py
+++ /dev/null
@@ -1,596 +0,0 @@
-from __future__ import absolute_import
-
-import csv
-import functools
-import logging
-import os
-import sys
-import sysconfig
-
-from pip._vendor import pkg_resources
-
-from pip._internal.exceptions import UninstallationError
-from pip._internal.locations import bin_py, bin_user
-from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
- normalize_path, renames, rmtree,
-)
-from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-def _script_names(dist, script_name, is_gui):
- """Create the fully qualified name of the files created by
- {console,gui}_scripts for the given ``dist``.
- Returns the list of file names
- """
- if dist_in_usersite(dist):
- bin_dir = bin_user
- else:
- bin_dir = bin_py
- exe_name = os.path.join(bin_dir, script_name)
- paths_to_remove = [exe_name]
- if WINDOWS:
- paths_to_remove.append(exe_name + '.exe')
- paths_to_remove.append(exe_name + '.exe.manifest')
- if is_gui:
- paths_to_remove.append(exe_name + '-script.pyw')
- else:
- paths_to_remove.append(exe_name + '-script.py')
- return paths_to_remove
-
-
-def _unique(fn):
- @functools.wraps(fn)
- def unique(*args, **kw):
- seen = set()
- for item in fn(*args, **kw):
- if item not in seen:
- seen.add(item)
- yield item
- return unique
-
-
-@_unique
-def uninstallation_paths(dist):
- """
- Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
-
- Yield paths to all the files in RECORD. For each .py file in RECORD, add
- the .pyc and .pyo in the same directory.
-
- UninstallPathSet.add() takes care of the __pycache__ .py[co].
- """
- r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
- for row in r:
- path = os.path.join(dist.location, row[0])
- yield path
- if path.endswith('.py'):
- dn, fn = os.path.split(path)
- base = fn[:-3]
- path = os.path.join(dn, base + '.pyc')
- yield path
- path = os.path.join(dn, base + '.pyo')
- yield path
-
-
-def compact(paths):
- """Compact a path set to contain the minimal number of paths
- necessary to contain all paths in the set. If /a/path/ and
- /a/path/to/a/file.txt are both in the set, leave only the
- shorter path."""
-
- sep = os.path.sep
- short_paths = set()
- for path in sorted(paths, key=len):
- should_skip = any(
- path.startswith(shortpath.rstrip("*")) and
- path[len(shortpath.rstrip("*").rstrip(sep))] == sep
- for shortpath in short_paths
- )
- if not should_skip:
- short_paths.add(path)
- return short_paths
-
-
-def compress_for_rename(paths):
- """Returns a set containing the paths that need to be renamed.
-
- This set may include directories when the original sequence of paths
- included every file on disk.
- """
- case_map = dict((os.path.normcase(p), p) for p in paths)
- remaining = set(case_map)
- unchecked = sorted(set(os.path.split(p)[0]
- for p in case_map.values()), key=len)
- wildcards = set()
-
- def norm_join(*a):
- return os.path.normcase(os.path.join(*a))
-
- for root in unchecked:
- if any(os.path.normcase(root).startswith(w)
- for w in wildcards):
- # This directory has already been handled.
- continue
-
- all_files = set()
- all_subdirs = set()
- for dirname, subdirs, files in os.walk(root):
- all_subdirs.update(norm_join(root, dirname, d)
- for d in subdirs)
- all_files.update(norm_join(root, dirname, f)
- for f in files)
- # If all the files we found are in our remaining set of files to
- # remove, then remove them from the latter set and add a wildcard
- # for the directory.
- if not (all_files - remaining):
- remaining.difference_update(all_files)
- wildcards.add(root + os.sep)
-
- return set(map(case_map.__getitem__, remaining)) | wildcards
-
-
-def compress_for_output_listing(paths):
- """Returns a tuple of 2 sets of which paths to display to user
-
- The first set contains paths that would be deleted. Files of a package
- are not added and the top-level directory of the package has a '*' added
- at the end - to signify that all it's contents are removed.
-
- The second set contains files that would have been skipped in the above
- folders.
- """
-
- will_remove = list(paths)
- will_skip = set()
-
- # Determine folders and files
- folders = set()
- files = set()
- for path in will_remove:
- if path.endswith(".pyc"):
- continue
- if path.endswith("__init__.py") or ".dist-info" in path:
- folders.add(os.path.dirname(path))
- files.add(path)
-
- _normcased_files = set(map(os.path.normcase, files))
-
- folders = compact(folders)
-
- # This walks the tree using os.walk to not miss extra folders
- # that might get added.
- for folder in folders:
- for dirpath, _, dirfiles in os.walk(folder):
- for fname in dirfiles:
- if fname.endswith(".pyc"):
- continue
-
- file_ = os.path.join(dirpath, fname)
- if (os.path.isfile(file_) and
- os.path.normcase(file_) not in _normcased_files):
- # We are skipping this file. Add it to the set.
- will_skip.add(file_)
-
- will_remove = files | {
- os.path.join(folder, "*") for folder in folders
- }
-
- return will_remove, will_skip
-
-
-class StashedUninstallPathSet(object):
- """A set of file rename operations to stash files while
- tentatively uninstalling them."""
- def __init__(self):
- # Mapping from source file root to [Adjacent]TempDirectory
- # for files under that directory.
- self._save_dirs = {}
- # (old path, new path) tuples for each move that may need
- # to be undone.
- self._moves = []
-
- def _get_directory_stash(self, path):
- """Stashes a directory.
-
- Directories are stashed adjacent to their original location if
- possible, or else moved/copied into the user's temp dir."""
-
- try:
- save_dir = AdjacentTempDirectory(path)
- save_dir.create()
- except OSError:
- save_dir = TempDirectory(kind="uninstall")
- save_dir.create()
- self._save_dirs[os.path.normcase(path)] = save_dir
-
- return save_dir.path
-
- def _get_file_stash(self, path):
- """Stashes a file.
-
- If no root has been provided, one will be created for the directory
- in the user's temp directory."""
- path = os.path.normcase(path)
- head, old_head = os.path.dirname(path), None
- save_dir = None
-
- while head != old_head:
- try:
- save_dir = self._save_dirs[head]
- break
- except KeyError:
- pass
- head, old_head = os.path.dirname(head), head
- else:
- # Did not find any suitable root
- head = os.path.dirname(path)
- save_dir = TempDirectory(kind='uninstall')
- save_dir.create()
- self._save_dirs[head] = save_dir
-
- relpath = os.path.relpath(path, head)
- if relpath and relpath != os.path.curdir:
- return os.path.join(save_dir.path, relpath)
- return save_dir.path
-
- def stash(self, path):
- """Stashes the directory or file and returns its new location.
- """
- if os.path.isdir(path):
- new_path = self._get_directory_stash(path)
- else:
- new_path = self._get_file_stash(path)
-
- self._moves.append((path, new_path))
- if os.path.isdir(path) and os.path.isdir(new_path):
- # If we're moving a directory, we need to
- # remove the destination first or else it will be
- # moved to inside the existing directory.
- # We just created new_path ourselves, so it will
- # be removable.
- os.rmdir(new_path)
- renames(path, new_path)
- return new_path
-
- def commit(self):
- """Commits the uninstall by removing stashed files."""
- for _, save_dir in self._save_dirs.items():
- save_dir.cleanup()
- self._moves = []
- self._save_dirs = {}
-
- def rollback(self):
- """Undoes the uninstall by moving stashed files back."""
- for p in self._moves:
- logging.info("Moving to %s\n from %s", *p)
-
- for new_path, path in self._moves:
- try:
- logger.debug('Replacing %s from %s', new_path, path)
- if os.path.isfile(new_path):
- os.unlink(new_path)
- elif os.path.isdir(new_path):
- rmtree(new_path)
- renames(path, new_path)
- except OSError as ex:
- logger.error("Failed to restore %s", new_path)
- logger.debug("Exception: %s", ex)
-
- self.commit()
-
- @property
- def can_rollback(self):
- return bool(self._moves)
-
-
-class UninstallPathSet(object):
- """A set of file paths to be removed in the uninstallation of a
- requirement."""
- def __init__(self, dist):
- self.paths = set()
- self._refuse = set()
- self.pth = {}
- self.dist = dist
- self._moved_paths = StashedUninstallPathSet()
-
- def _permitted(self, path):
- """
- Return True if the given path is one we are permitted to
- remove/modify, False otherwise.
-
- """
- return is_local(path)
-
- def add(self, path):
- head, tail = os.path.split(path)
-
- # we normalize the head to resolve parent directory symlinks, but not
- # the tail, since we only want to uninstall symlinks, not their targets
- path = os.path.join(normalize_path(head), os.path.normcase(tail))
-
- if not os.path.exists(path):
- return
- if self._permitted(path):
- self.paths.add(path)
- else:
- self._refuse.add(path)
-
- # __pycache__ files can show up after 'installed-files.txt' is created,
- # due to imports
- if os.path.splitext(path)[1] == '.py' and uses_pycache:
- self.add(cache_from_source(path))
-
- def add_pth(self, pth_file, entry):
- pth_file = normalize_path(pth_file)
- if self._permitted(pth_file):
- if pth_file not in self.pth:
- self.pth[pth_file] = UninstallPthEntries(pth_file)
- self.pth[pth_file].add(entry)
- else:
- self._refuse.add(pth_file)
-
- def remove(self, auto_confirm=False, verbose=False):
- """Remove paths in ``self.paths`` with confirmation (unless
- ``auto_confirm`` is True)."""
-
- if not self.paths:
- logger.info(
- "Can't uninstall '%s'. No files were found to uninstall.",
- self.dist.project_name,
- )
- return
-
- dist_name_version = (
- self.dist.project_name + "-" + self.dist.version
- )
- logger.info('Uninstalling %s:', dist_name_version)
-
- with indent_log():
- if auto_confirm or self._allowed_to_proceed(verbose):
- moved = self._moved_paths
-
- for_rename = compress_for_rename(self.paths)
-
- for path in sorted(compact(for_rename)):
- moved.stash(path)
- logger.debug('Removing file or directory %s', path)
-
- for pth in self.pth.values():
- pth.remove()
-
- logger.info('Successfully uninstalled %s', dist_name_version)
-
- def _allowed_to_proceed(self, verbose):
- """Display which files would be deleted and prompt for confirmation
- """
-
- def _display(msg, paths):
- if not paths:
- return
-
- logger.info(msg)
- with indent_log():
- for path in sorted(compact(paths)):
- logger.info(path)
-
- if not verbose:
- will_remove, will_skip = compress_for_output_listing(self.paths)
- else:
- # In verbose mode, display all the files that are going to be
- # deleted.
- will_remove = list(self.paths)
- will_skip = set()
-
- _display('Would remove:', will_remove)
- _display('Would not remove (might be manually added):', will_skip)
- _display('Would not remove (outside of prefix):', self._refuse)
- if verbose:
- _display('Will actually move:', compress_for_rename(self.paths))
-
- return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
-
- def rollback(self):
- """Rollback the changes previously made by remove()."""
- if not self._moved_paths.can_rollback:
- logger.error(
- "Can't roll back %s; was not uninstalled",
- self.dist.project_name,
- )
- return False
- logger.info('Rolling back uninstall of %s', self.dist.project_name)
- self._moved_paths.rollback()
- for pth in self.pth.values():
- pth.rollback()
-
- def commit(self):
- """Remove temporary save dir: rollback will no longer be possible."""
- self._moved_paths.commit()
-
- @classmethod
- def from_dist(cls, dist):
- dist_path = normalize_path(dist.location)
- if not dist_is_local(dist):
- logger.info(
- "Not uninstalling %s at %s, outside environment %s",
- dist.key,
- dist_path,
- sys.prefix,
- )
- return cls(dist)
-
- if dist_path in {p for p in {sysconfig.get_path("stdlib"),
- sysconfig.get_path("platstdlib")}
- if p}:
- logger.info(
- "Not uninstalling %s at %s, as it is in the standard library.",
- dist.key,
- dist_path,
- )
- return cls(dist)
-
- paths_to_remove = cls(dist)
- develop_egg_link = egg_link_path(dist)
- develop_egg_link_egg_info = '{}.egg-info'.format(
- pkg_resources.to_filename(dist.project_name))
- egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
- # Special case for distutils installed package
- distutils_egg_info = getattr(dist._provider, 'path', None)
-
- # Uninstall cases order do matter as in the case of 2 installs of the
- # same package, pip needs to uninstall the currently detected version
- if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
- not dist.egg_info.endswith(develop_egg_link_egg_info)):
- # if dist.egg_info.endswith(develop_egg_link_egg_info), we
- # are in fact in the develop_egg_link case
- paths_to_remove.add(dist.egg_info)
- if dist.has_metadata('installed-files.txt'):
- for installed_file in dist.get_metadata(
- 'installed-files.txt').splitlines():
- path = os.path.normpath(
- os.path.join(dist.egg_info, installed_file)
- )
- paths_to_remove.add(path)
- # FIXME: need a test for this elif block
- # occurs with --single-version-externally-managed/--record outside
- # of pip
- elif dist.has_metadata('top_level.txt'):
- if dist.has_metadata('namespace_packages.txt'):
- namespaces = dist.get_metadata('namespace_packages.txt')
- else:
- namespaces = []
- for top_level_pkg in [
- p for p
- in dist.get_metadata('top_level.txt').splitlines()
- if p and p not in namespaces]:
- path = os.path.join(dist.location, top_level_pkg)
- paths_to_remove.add(path)
- paths_to_remove.add(path + '.py')
- paths_to_remove.add(path + '.pyc')
- paths_to_remove.add(path + '.pyo')
-
- elif distutils_egg_info:
- raise UninstallationError(
- "Cannot uninstall {!r}. It is a distutils installed project "
- "and thus we cannot accurately determine which files belong "
- "to it which would lead to only a partial uninstall.".format(
- dist.project_name,
- )
- )
-
- elif dist.location.endswith('.egg'):
- # package installed by easy_install
- # We cannot match on dist.egg_name because it can slightly vary
- # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
- paths_to_remove.add(dist.location)
- easy_install_egg = os.path.split(dist.location)[1]
- easy_install_pth = os.path.join(os.path.dirname(dist.location),
- 'easy-install.pth')
- paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
-
- elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
- for path in uninstallation_paths(dist):
- paths_to_remove.add(path)
-
- elif develop_egg_link:
- # develop egg
- with open(develop_egg_link, 'r') as fh:
- link_pointer = os.path.normcase(fh.readline().strip())
- assert (link_pointer == dist.location), (
- 'Egg-link %s does not match installed location of %s '
- '(at %s)' % (link_pointer, dist.project_name, dist.location)
- )
- paths_to_remove.add(develop_egg_link)
- easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
- 'easy-install.pth')
- paths_to_remove.add_pth(easy_install_pth, dist.location)
-
- else:
- logger.debug(
- 'Not sure how to uninstall: %s - Check: %s',
- dist, dist.location,
- )
-
- # find distutils scripts= scripts
- if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
- for script in dist.metadata_listdir('scripts'):
- if dist_in_usersite(dist):
- bin_dir = bin_user
- else:
- bin_dir = bin_py
- paths_to_remove.add(os.path.join(bin_dir, script))
- if WINDOWS:
- paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
-
- # find console_scripts
- _scripts_to_remove = []
- console_scripts = dist.get_entry_map(group='console_scripts')
- for name in console_scripts.keys():
- _scripts_to_remove.extend(_script_names(dist, name, False))
- # find gui_scripts
- gui_scripts = dist.get_entry_map(group='gui_scripts')
- for name in gui_scripts.keys():
- _scripts_to_remove.extend(_script_names(dist, name, True))
-
- for s in _scripts_to_remove:
- paths_to_remove.add(s)
-
- return paths_to_remove
-
-
-class UninstallPthEntries(object):
- def __init__(self, pth_file):
- if not os.path.isfile(pth_file):
- raise UninstallationError(
- "Cannot remove entries from nonexistent file %s" % pth_file
- )
- self.file = pth_file
- self.entries = set()
- self._saved_lines = None
-
- def add(self, entry):
- entry = os.path.normcase(entry)
- # On Windows, os.path.normcase converts the entry to use
- # backslashes. This is correct for entries that describe absolute
- # paths outside of site-packages, but all the others use forward
- # slashes.
- if WINDOWS and not os.path.splitdrive(entry)[0]:
- entry = entry.replace('\\', '/')
- self.entries.add(entry)
-
- def remove(self):
- logger.debug('Removing pth entries from %s:', self.file)
- with open(self.file, 'rb') as fh:
- # windows uses '\r\n' with py3k, but uses '\n' with py2.x
- lines = fh.readlines()
- self._saved_lines = lines
- if any(b'\r\n' in line for line in lines):
- endline = '\r\n'
- else:
- endline = '\n'
- # handle missing trailing newline
- if lines and not lines[-1].endswith(endline.encode("utf-8")):
- lines[-1] = lines[-1] + endline.encode("utf-8")
- for entry in self.entries:
- try:
- logger.debug('Removing entry: %s', entry)
- lines.remove((entry + endline).encode("utf-8"))
- except ValueError:
- pass
- with open(self.file, 'wb') as fh:
- fh.writelines(lines)
-
- def rollback(self):
- if self._saved_lines is None:
- logger.error(
- 'Cannot roll back changes to %s, none were made', self.file
- )
- return False
- logger.debug('Rolling %s back to previous state', self.file)
- with open(self.file, 'wb') as fh:
- fh.writelines(self._saved_lines)
- return True
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/resolve.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/resolve.py
deleted file mode 100644
index 33f572f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/resolve.py
+++ /dev/null
@@ -1,393 +0,0 @@
-"""Dependency Resolution
-
-The dependency resolution in pip is performed as follows:
-
-for top-level requirements:
- a. only one spec allowed per project, regardless of conflicts or not.
- otherwise a "double requirement" exception is raised
- b. they override sub-dependency requirements.
-for sub-dependencies
- a. "first found, wins" (where the order is breadth first)
-"""
-
-import logging
-from collections import defaultdict
-from itertools import chain
-
-from pip._internal.exceptions import (
- BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
- UnsupportedPythonVersion,
-)
-from pip._internal.req.constructors import install_req_from_req_string
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import dist_in_usersite, ensure_dir
-from pip._internal.utils.packaging import check_dist_requires_python
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, DefaultDict, List, Set # noqa: F401
- from pip._internal.download import PipSession # noqa: F401
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
- from pip._internal.index import PackageFinder # noqa: F401
- from pip._internal.req.req_set import RequirementSet # noqa: F401
- from pip._internal.operations.prepare import ( # noqa: F401
- DistAbstraction, RequirementPreparer
- )
- from pip._internal.cache import WheelCache # noqa: F401
-
-logger = logging.getLogger(__name__)
-
-
-class Resolver(object):
- """Resolves which packages need to be installed/uninstalled to perform \
- the requested operation without breaking the requirements of any package.
- """
-
- _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
-
- def __init__(
- self,
- preparer, # type: RequirementPreparer
- session, # type: PipSession
- finder, # type: PackageFinder
- wheel_cache, # type: Optional[WheelCache]
- use_user_site, # type: bool
- ignore_dependencies, # type: bool
- ignore_installed, # type: bool
- ignore_requires_python, # type: bool
- force_reinstall, # type: bool
- isolated, # type: bool
- upgrade_strategy, # type: str
- use_pep517=None # type: Optional[bool]
- ):
- # type: (...) -> None
- super(Resolver, self).__init__()
- assert upgrade_strategy in self._allowed_strategies
-
- self.preparer = preparer
- self.finder = finder
- self.session = session
-
- # NOTE: This would eventually be replaced with a cache that can give
- # information about both sdist and wheels transparently.
- self.wheel_cache = wheel_cache
-
- # This is set in resolve
- self.require_hashes = None # type: Optional[bool]
-
- self.upgrade_strategy = upgrade_strategy
- self.force_reinstall = force_reinstall
- self.isolated = isolated
- self.ignore_dependencies = ignore_dependencies
- self.ignore_installed = ignore_installed
- self.ignore_requires_python = ignore_requires_python
- self.use_user_site = use_user_site
- self.use_pep517 = use_pep517
-
- self._discovered_dependencies = \
- defaultdict(list) # type: DefaultDict[str, List]
-
- def resolve(self, requirement_set):
- # type: (RequirementSet) -> None
- """Resolve what operations need to be done
-
- As a side-effect of this method, the packages (and their dependencies)
- are downloaded, unpacked and prepared for installation. This
- preparation is done by ``pip.operations.prepare``.
-
- Once PyPI has static dependency metadata available, it would be
- possible to move the preparation to become a step separated from
- dependency resolution.
- """
- # make the wheelhouse
- if self.preparer.wheel_download_dir:
- ensure_dir(self.preparer.wheel_download_dir)
-
- # If any top-level requirement has a hash specified, enter
- # hash-checking mode, which requires hashes from all.
- root_reqs = (
- requirement_set.unnamed_requirements +
- list(requirement_set.requirements.values())
- )
- self.require_hashes = (
- requirement_set.require_hashes or
- any(req.has_hash_options for req in root_reqs)
- )
-
- # Display where finder is looking for packages
- locations = self.finder.get_formatted_locations()
- if locations:
- logger.info(locations)
-
- # Actually prepare the files, and collect any exceptions. Most hash
- # exceptions cannot be checked ahead of time, because
- # req.populate_link() needs to be called before we can make decisions
- # based on link type.
- discovered_reqs = [] # type: List[InstallRequirement]
- hash_errors = HashErrors()
- for req in chain(root_reqs, discovered_reqs):
- try:
- discovered_reqs.extend(
- self._resolve_one(requirement_set, req)
- )
- except HashError as exc:
- exc.req = req
- hash_errors.append(exc)
-
- if hash_errors:
- raise hash_errors
-
- def _is_upgrade_allowed(self, req):
- # type: (InstallRequirement) -> bool
- if self.upgrade_strategy == "to-satisfy-only":
- return False
- elif self.upgrade_strategy == "eager":
- return True
- else:
- assert self.upgrade_strategy == "only-if-needed"
- return req.is_direct
-
- def _set_req_to_reinstall(self, req):
- # type: (InstallRequirement) -> None
- """
- Set a requirement to be installed.
- """
- # Don't uninstall the conflict if doing a user install and the
- # conflict is not a user install.
- if not self.use_user_site or dist_in_usersite(req.satisfied_by):
- req.conflicts_with = req.satisfied_by
- req.satisfied_by = None
-
- # XXX: Stop passing requirement_set for options
- def _check_skip_installed(self, req_to_install):
- # type: (InstallRequirement) -> Optional[str]
- """Check if req_to_install should be skipped.
-
- This will check if the req is installed, and whether we should upgrade
- or reinstall it, taking into account all the relevant user options.
-
- After calling this req_to_install will only have satisfied_by set to
- None if the req_to_install is to be upgraded/reinstalled etc. Any
- other value will be a dist recording the current thing installed that
- satisfies the requirement.
-
- Note that for vcs urls and the like we can't assess skipping in this
- routine - we simply identify that we need to pull the thing down,
- then later on it is pulled down and introspected to assess upgrade/
- reinstalls etc.
-
- :return: A text reason for why it was skipped, or None.
- """
- if self.ignore_installed:
- return None
-
- req_to_install.check_if_exists(self.use_user_site)
- if not req_to_install.satisfied_by:
- return None
-
- if self.force_reinstall:
- self._set_req_to_reinstall(req_to_install)
- return None
-
- if not self._is_upgrade_allowed(req_to_install):
- if self.upgrade_strategy == "only-if-needed":
- return 'already satisfied, skipping upgrade'
- return 'already satisfied'
-
- # Check for the possibility of an upgrade. For link-based
- # requirements we have to pull the tree down and inspect to assess
- # the version #, so it's handled way down.
- if not req_to_install.link:
- try:
- self.finder.find_requirement(req_to_install, upgrade=True)
- except BestVersionAlreadyInstalled:
- # Then the best version is installed.
- return 'already up-to-date'
- except DistributionNotFound:
- # No distribution found, so we squash the error. It will
- # be raised later when we re-try later to do the install.
- # Why don't we just raise here?
- pass
-
- self._set_req_to_reinstall(req_to_install)
- return None
-
- def _get_abstract_dist_for(self, req):
- # type: (InstallRequirement) -> DistAbstraction
- """Takes a InstallRequirement and returns a single AbstractDist \
- representing a prepared variant of the same.
- """
- assert self.require_hashes is not None, (
- "require_hashes should have been set in Resolver.resolve()"
- )
-
- if req.editable:
- return self.preparer.prepare_editable_requirement(
- req, self.require_hashes, self.use_user_site, self.finder,
- )
-
- # satisfied_by is only evaluated by calling _check_skip_installed,
- # so it must be None here.
- assert req.satisfied_by is None
- skip_reason = self._check_skip_installed(req)
-
- if req.satisfied_by:
- return self.preparer.prepare_installed_requirement(
- req, self.require_hashes, skip_reason
- )
-
- upgrade_allowed = self._is_upgrade_allowed(req)
- abstract_dist = self.preparer.prepare_linked_requirement(
- req, self.session, self.finder, upgrade_allowed,
- self.require_hashes
- )
-
- # NOTE
- # The following portion is for determining if a certain package is
- # going to be re-installed/upgraded or not and reporting to the user.
- # This should probably get cleaned up in a future refactor.
-
- # req.req is only avail after unpack for URL
- # pkgs repeat check_if_exists to uninstall-on-upgrade
- # (#14)
- if not self.ignore_installed:
- req.check_if_exists(self.use_user_site)
-
- if req.satisfied_by:
- should_modify = (
- self.upgrade_strategy != "to-satisfy-only" or
- self.force_reinstall or
- self.ignore_installed or
- req.link.scheme == 'file'
- )
- if should_modify:
- self._set_req_to_reinstall(req)
- else:
- logger.info(
- 'Requirement already satisfied (use --upgrade to upgrade):'
- ' %s', req,
- )
-
- return abstract_dist
-
- def _resolve_one(
- self,
- requirement_set, # type: RequirementSet
- req_to_install # type: InstallRequirement
- ):
- # type: (...) -> List[InstallRequirement]
- """Prepare a single requirements file.
-
- :return: A list of additional InstallRequirements to also install.
- """
- # Tell user what we are doing for this requirement:
- # obtain (editable), skipping, processing (local url), collecting
- # (remote url or package name)
- if req_to_install.constraint or req_to_install.prepared:
- return []
-
- req_to_install.prepared = True
-
- # register tmp src for cleanup in case something goes wrong
- requirement_set.reqs_to_cleanup.append(req_to_install)
-
- abstract_dist = self._get_abstract_dist_for(req_to_install)
-
- # Parse and return dependencies
- dist = abstract_dist.dist()
- try:
- check_dist_requires_python(dist)
- except UnsupportedPythonVersion as err:
- if self.ignore_requires_python:
- logger.warning(err.args[0])
- else:
- raise
-
- more_reqs = [] # type: List[InstallRequirement]
-
- def add_req(subreq, extras_requested):
- sub_install_req = install_req_from_req_string(
- str(subreq),
- req_to_install,
- isolated=self.isolated,
- wheel_cache=self.wheel_cache,
- use_pep517=self.use_pep517
- )
- parent_req_name = req_to_install.name
- to_scan_again, add_to_parent = requirement_set.add_requirement(
- sub_install_req,
- parent_req_name=parent_req_name,
- extras_requested=extras_requested,
- )
- if parent_req_name and add_to_parent:
- self._discovered_dependencies[parent_req_name].append(
- add_to_parent
- )
- more_reqs.extend(to_scan_again)
-
- with indent_log():
- # We add req_to_install before its dependencies, so that we
- # can refer to it when adding dependencies.
- if not requirement_set.has_requirement(req_to_install.name):
- # 'unnamed' requirements will get added here
- req_to_install.is_direct = True
- requirement_set.add_requirement(
- req_to_install, parent_req_name=None,
- )
-
- if not self.ignore_dependencies:
- if req_to_install.extras:
- logger.debug(
- "Installing extra requirements: %r",
- ','.join(req_to_install.extras),
- )
- missing_requested = sorted(
- set(req_to_install.extras) - set(dist.extras)
- )
- for missing in missing_requested:
- logger.warning(
- '%s does not provide the extra \'%s\'',
- dist, missing
- )
-
- available_requested = sorted(
- set(dist.extras) & set(req_to_install.extras)
- )
- for subreq in dist.requires(available_requested):
- add_req(subreq, extras_requested=available_requested)
-
- if not req_to_install.editable and not req_to_install.satisfied_by:
- # XXX: --no-install leads this to report 'Successfully
- # downloaded' for only non-editable reqs, even though we took
- # action on them.
- requirement_set.successfully_downloaded.append(req_to_install)
-
- return more_reqs
-
- def get_installation_order(self, req_set):
- # type: (RequirementSet) -> List[InstallRequirement]
- """Create the installation order.
-
- The installation order is topological - requirements are installed
- before the requiring thing. We break cycles at an arbitrary point,
- and make no other guarantees.
- """
- # The current implementation, which we may change at any point
- # installs the user specified things in the order given, except when
- # dependencies must come earlier to achieve topological order.
- order = []
- ordered_reqs = set() # type: Set[InstallRequirement]
-
- def schedule(req):
- if req.satisfied_by or req in ordered_reqs:
- return
- if req.constraint:
- return
- ordered_reqs.add(req)
- for dep in self._discovered_dependencies[req.name]:
- schedule(dep)
- order.append(req)
-
- for install_req in req_set.requirements.values():
- schedule(install_req)
- return order
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py
+++ /dev/null
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 964b1b1..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
deleted file mode 100644
index 7357554..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
deleted file mode 100644
index 8b19861..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
deleted file mode 100644
index 80d7f26..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
deleted file mode 100644
index d09bef0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
deleted file mode 100644
index 5fc3e1d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
deleted file mode 100644
index 678cffb..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
deleted file mode 100644
index e6fc295..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
deleted file mode 100644
index 20fa454..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
deleted file mode 100644
index 6000d5f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pyc
deleted file mode 100644
index 8fb6ad9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
deleted file mode 100644
index 43fdcf0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
deleted file mode 100644
index af6c672..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
deleted file mode 100644
index 96efb71..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
deleted file mode 100644
index f2795a2..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
deleted file mode 100644
index c512b9f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
deleted file mode 100644
index d31671f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py
deleted file mode 100644
index 9af9fa7..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py
+++ /dev/null
@@ -1,270 +0,0 @@
-"""
-This code was taken from https://github.com/ActiveState/appdirs and modified
-to suit our purposes.
-"""
-from __future__ import absolute_import
-
-import os
-import sys
-
-from pip._vendor.six import PY2, text_type
-
-from pip._internal.utils.compat import WINDOWS, expanduser
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- List, Union
- )
-
-
-def user_cache_dir(appname):
- # type: (str) -> str
- r"""
- Return full path to the user-specific cache dir for this application.
-
- "appname" is the name of application.
-
- Typical user cache directories are:
- macOS: ~/Library/Caches/<AppName>
- Unix: ~/.cache/<AppName> (XDG default)
- Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
-
- On Windows the only suggestion in the MSDN docs is that local settings go
- in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
- non-roaming app data dir (the default returned by `user_data_dir`). Apps
- typically put cache data somewhere *under* the given dir here. Some
- examples:
- ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
- ...\Acme\SuperApp\Cache\1.0
-
- OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
- """
- if WINDOWS:
- # Get the base path
- path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
-
- # When using Python 2, return paths as bytes on Windows like we do on
- # other operating systems. See helper function docs for more details.
- if PY2 and isinstance(path, text_type):
- path = _win_path_to_bytes(path)
-
- # Add our app name and Cache directory to it
- path = os.path.join(path, appname, "Cache")
- elif sys.platform == "darwin":
- # Get the base path
- path = expanduser("~/Library/Caches")
-
- # Add our app name to it
- path = os.path.join(path, appname)
- else:
- # Get the base path
- path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
-
- # Add our app name to it
- path = os.path.join(path, appname)
-
- return path
-
-
-def user_data_dir(appname, roaming=False):
- # type: (str, bool) -> str
- r"""
- Return full path to the user-specific data dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "roaming" (boolean, default False) can be set True to use the Windows
- roaming appdata directory. That means that for users on a Windows
- network setup for roaming profiles, this user data will be
- sync'd on login. See
- <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
- for a discussion of issues.
-
- Typical user data directories are:
- macOS: ~/Library/Application Support/<AppName>
- if it exists, else ~/.config/<AppName>
- Unix: ~/.local/share/<AppName> # or in
- $XDG_DATA_HOME, if defined
- Win XP (not roaming): C:\Documents and Settings\<username>\ ...
- ...Application Data\<AppName>
- Win XP (roaming): C:\Documents and Settings\<username>\Local ...
- ...Settings\Application Data\<AppName>
- Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
- Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
-
- For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
- That means, by default "~/.local/share/<AppName>".
- """
- if WINDOWS:
- const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
- path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
- elif sys.platform == "darwin":
- path = os.path.join(
- expanduser('~/Library/Application Support/'),
- appname,
- ) if os.path.isdir(os.path.join(
- expanduser('~/Library/Application Support/'),
- appname,
- )
- ) else os.path.join(
- expanduser('~/.config/'),
- appname,
- )
- else:
- path = os.path.join(
- os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
- appname,
- )
-
- return path
-
-
-def user_config_dir(appname, roaming=True):
- # type: (str, bool) -> str
- """Return full path to the user-specific config dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "roaming" (boolean, default True) can be set False to not use the
- Windows roaming appdata directory. That means that for users on a
- Windows network setup for roaming profiles, this user data will be
- sync'd on login. See
- <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
- for a discussion of issues.
-
- Typical user data directories are:
- macOS: same as user_data_dir
- Unix: ~/.config/<AppName>
- Win *: same as user_data_dir
-
- For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
- That means, by default "~/.config/<AppName>".
- """
- if WINDOWS:
- path = user_data_dir(appname, roaming=roaming)
- elif sys.platform == "darwin":
- path = user_data_dir(appname)
- else:
- path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
- path = os.path.join(path, appname)
-
- return path
-
-
-# for the discussion regarding site_config_dirs locations
-# see <https://github.com/pypa/pip/issues/1733>
-def site_config_dirs(appname):
- # type: (str) -> List[str]
- r"""Return a list of potential user-shared config dirs for this application.
-
- "appname" is the name of application.
-
- Typical user config directories are:
- macOS: /Library/Application Support/<AppName>/
- Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
- $XDG_CONFIG_DIRS
- Win XP: C:\Documents and Settings\All Users\Application ...
- ...Data\<AppName>\
- Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
- on Vista.)
- Win 7: Hidden, but writeable on Win 7:
- C:\ProgramData\<AppName>\
- """
- if WINDOWS:
- path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
- pathlist = [os.path.join(path, appname)]
- elif sys.platform == 'darwin':
- pathlist = [os.path.join('/Library/Application Support', appname)]
- else:
- # try looking in $XDG_CONFIG_DIRS
- xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
- if xdg_config_dirs:
- pathlist = [
- os.path.join(expanduser(x), appname)
- for x in xdg_config_dirs.split(os.pathsep)
- ]
- else:
- pathlist = []
-
- # always look in /etc directly as well
- pathlist.append('/etc')
-
- return pathlist
-
-
-# -- Windows support functions --
-
-def _get_win_folder_from_registry(csidl_name):
- # type: (str) -> str
- """
- This is a fallback technique at best. I'm not sure if using the
- registry for this guarantees us the correct answer for all CSIDL_*
- names.
- """
- import _winreg
-
- shell_folder_name = {
- "CSIDL_APPDATA": "AppData",
- "CSIDL_COMMON_APPDATA": "Common AppData",
- "CSIDL_LOCAL_APPDATA": "Local AppData",
- }[csidl_name]
-
- key = _winreg.OpenKey(
- _winreg.HKEY_CURRENT_USER,
- r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
- )
- directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
- return directory
-
-
-def _get_win_folder_with_ctypes(csidl_name):
- # type: (str) -> str
- csidl_const = {
- "CSIDL_APPDATA": 26,
- "CSIDL_COMMON_APPDATA": 35,
- "CSIDL_LOCAL_APPDATA": 28,
- }[csidl_name]
-
- buf = ctypes.create_unicode_buffer(1024)
- ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
- # Downgrade to short path name if have highbit chars. See
- # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
- has_high_char = False
- for c in buf:
- if ord(c) > 255:
- has_high_char = True
- break
- if has_high_char:
- buf2 = ctypes.create_unicode_buffer(1024)
- if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
- buf = buf2
-
- return buf.value
-
-
-if WINDOWS:
- try:
- import ctypes
- _get_win_folder = _get_win_folder_with_ctypes
- except ImportError:
- _get_win_folder = _get_win_folder_from_registry
-
-
-def _win_path_to_bytes(path):
- """Encode Windows paths to bytes. Only used on Python 2.
-
- Motivation is to be consistent with other operating systems where paths
- are also returned as bytes. This avoids problems mixing bytes and Unicode
- elsewhere in the codebase. For more details and discussion see
- <https://github.com/pypa/pip/issues/3463>.
-
- If encoding using ASCII and MBCS fails, return the original Unicode path.
- """
- for encoding in ('ASCII', 'MBCS'):
- try:
- return path.encode(encoding)
- except (UnicodeEncodeError, LookupError):
- pass
- return path
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py
deleted file mode 100644
index 2d8b3bf..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py
+++ /dev/null
@@ -1,264 +0,0 @@
-"""Stuff that differs in different Python versions and platform
-distributions."""
-from __future__ import absolute_import, division
-
-import codecs
-import locale
-import logging
-import os
-import shutil
-import sys
-
-from pip._vendor.six import text_type
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Tuple, Text # noqa: F401
-
-try:
- import ipaddress
-except ImportError:
- try:
- from pip._vendor import ipaddress # type: ignore
- except ImportError:
- import ipaddr as ipaddress # type: ignore
- ipaddress.ip_address = ipaddress.IPAddress # type: ignore
- ipaddress.ip_network = ipaddress.IPNetwork # type: ignore
-
-
-__all__ = [
- "ipaddress", "uses_pycache", "console_to_str", "native_str",
- "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
- "get_extension_suffixes",
-]
-
-
-logger = logging.getLogger(__name__)
-
-if sys.version_info >= (3, 4):
- uses_pycache = True
- from importlib.util import cache_from_source
-else:
- import imp
-
- try:
- cache_from_source = imp.cache_from_source # type: ignore
- except AttributeError:
- # does not use __pycache__
- cache_from_source = None
-
- uses_pycache = cache_from_source is not None
-
-
-if sys.version_info >= (3, 5):
- backslashreplace_decode = "backslashreplace"
-else:
- # In version 3.4 and older, backslashreplace exists
- # but does not support use for decoding.
- # We implement our own replace handler for this
- # situation, so that we can consistently use
- # backslash replacement for all versions.
- def backslashreplace_decode_fn(err):
- raw_bytes = (err.object[i] for i in range(err.start, err.end))
- if sys.version_info[0] == 2:
- # Python 2 gave us characters - convert to numeric bytes
- raw_bytes = (ord(b) for b in raw_bytes)
- return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
- codecs.register_error(
- "backslashreplace_decode",
- backslashreplace_decode_fn,
- )
- backslashreplace_decode = "backslashreplace_decode"
-
-
-def console_to_str(data):
- # type: (bytes) -> Text
- """Return a string, safe for output, of subprocess output.
-
- We assume the data is in the locale preferred encoding.
- If it won't decode properly, we warn the user but decode as
- best we can.
-
- We also ensure that the output can be safely written to
- standard output without encoding errors.
- """
-
- # First, get the encoding we assume. This is the preferred
- # encoding for the locale, unless that is not found, or
- # it is ASCII, in which case assume UTF-8
- encoding = locale.getpreferredencoding()
- if (not encoding) or codecs.lookup(encoding).name == "ascii":
- encoding = "utf-8"
-
- # Now try to decode the data - if we fail, warn the user and
- # decode with replacement.
- try:
- decoded_data = data.decode(encoding)
- except UnicodeDecodeError:
- logger.warning(
- "Subprocess output does not appear to be encoded as %s",
- encoding,
- )
- decoded_data = data.decode(encoding, errors=backslashreplace_decode)
-
- # Make sure we can print the output, by encoding it to the output
- # encoding with replacement of unencodable characters, and then
- # decoding again.
- # We use stderr's encoding because it's less likely to be
- # redirected and if we don't find an encoding we skip this
- # step (on the assumption that output is wrapped by something
- # that won't fail).
- # The double getattr is to deal with the possibility that we're
- # being called in a situation where sys.__stderr__ doesn't exist,
- # or doesn't have an encoding attribute. Neither of these cases
- # should occur in normal pip use, but there's no harm in checking
- # in case people use pip in (unsupported) unusual situations.
- output_encoding = getattr(getattr(sys, "__stderr__", None),
- "encoding", None)
-
- if output_encoding:
- output_encoded = decoded_data.encode(
- output_encoding,
- errors="backslashreplace"
- )
- decoded_data = output_encoded.decode(output_encoding)
-
- return decoded_data
-
-
-if sys.version_info >= (3,):
- def native_str(s, replace=False):
- # type: (str, bool) -> str
- if isinstance(s, bytes):
- return s.decode('utf-8', 'replace' if replace else 'strict')
- return s
-
-else:
- def native_str(s, replace=False):
- # type: (str, bool) -> str
- # Replace is ignored -- unicode to UTF-8 can't fail
- if isinstance(s, text_type):
- return s.encode('utf-8')
- return s
-
-
-def get_path_uid(path):
- # type: (str) -> int
- """
- Return path's uid.
-
- Does not follow symlinks:
- https://github.com/pypa/pip/pull/935#discussion_r5307003
-
- Placed this function in compat due to differences on AIX and
- Jython, that should eventually go away.
-
- :raises OSError: When path is a symlink or can't be read.
- """
- if hasattr(os, 'O_NOFOLLOW'):
- fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
- file_uid = os.fstat(fd).st_uid
- os.close(fd)
- else: # AIX and Jython
- # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
- if not os.path.islink(path):
- # older versions of Jython don't have `os.fstat`
- file_uid = os.stat(path).st_uid
- else:
- # raise OSError for parity with os.O_NOFOLLOW above
- raise OSError(
- "%s is a symlink; Will not return uid for symlinks" % path
- )
- return file_uid
-
-
-if sys.version_info >= (3, 4):
- from importlib.machinery import EXTENSION_SUFFIXES
-
- def get_extension_suffixes():
- return EXTENSION_SUFFIXES
-else:
- from imp import get_suffixes
-
- def get_extension_suffixes():
- return [suffix[0] for suffix in get_suffixes()]
-
-
-def expanduser(path):
- # type: (str) -> str
- """
- Expand ~ and ~user constructions.
-
- Includes a workaround for https://bugs.python.org/issue14768
- """
- expanded = os.path.expanduser(path)
- if path.startswith('~/') and expanded.startswith('//'):
- expanded = expanded[1:]
- return expanded
-
-
-# packages in the stdlib that may have installation metadata, but should not be
-# considered 'installed'. this theoretically could be determined based on
-# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
-# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
-# make this ineffective, so hard-coding
-stdlib_pkgs = {"python", "wsgiref", "argparse"}
-
-
-# windows detection, covers cpython and ironpython
-WINDOWS = (sys.platform.startswith("win") or
- (sys.platform == 'cli' and os.name == 'nt'))
-
-
-def samefile(file1, file2):
- # type: (str, str) -> bool
- """Provide an alternative for os.path.samefile on Windows/Python2"""
- if hasattr(os.path, 'samefile'):
- return os.path.samefile(file1, file2)
- else:
- path1 = os.path.normcase(os.path.abspath(file1))
- path2 = os.path.normcase(os.path.abspath(file2))
- return path1 == path2
-
-
-if hasattr(shutil, 'get_terminal_size'):
- def get_terminal_size():
- # type: () -> Tuple[int, int]
- """
- Returns a tuple (x, y) representing the width(x) and the height(y)
- in characters of the terminal window.
- """
- return tuple(shutil.get_terminal_size()) # type: ignore
-else:
- def get_terminal_size():
- # type: () -> Tuple[int, int]
- """
- Returns a tuple (x, y) representing the width(x) and the height(y)
- in characters of the terminal window.
- """
- def ioctl_GWINSZ(fd):
- try:
- import fcntl
- import termios
- import struct
- cr = struct.unpack_from(
- 'hh',
- fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
- )
- except Exception:
- return None
- if cr == (0, 0):
- return None
- return cr
- cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
- if not cr:
- try:
- fd = os.open(os.ctermid(), os.O_RDONLY)
- cr = ioctl_GWINSZ(fd)
- os.close(fd)
- except Exception:
- pass
- if not cr:
- cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
- return int(cr[1]), int(cr[0])
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py
deleted file mode 100644
index 0beaf74..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py
+++ /dev/null
@@ -1,90 +0,0 @@
-"""
-A module that implements tooling to enable easy warnings about deprecations.
-"""
-from __future__ import absolute_import
-
-import logging
-import warnings
-
-from pip._vendor.packaging.version import parse
-
-from pip import __version__ as current_version
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Optional # noqa: F401
-
-
-class PipDeprecationWarning(Warning):
- pass
-
-
-_original_showwarning = None # type: Any
-
-
-# Warnings <-> Logging Integration
-def _showwarning(message, category, filename, lineno, file=None, line=None):
- if file is not None:
- if _original_showwarning is not None:
- _original_showwarning(
- message, category, filename, lineno, file, line,
- )
- elif issubclass(category, PipDeprecationWarning):
- # We use a specially named logger which will handle all of the
- # deprecation messages for pip.
- logger = logging.getLogger("pip._internal.deprecations")
- logger.warning(message)
- else:
- _original_showwarning(
- message, category, filename, lineno, file, line,
- )
-
-
-def install_warning_logger():
- # type: () -> None
- # Enable our Deprecation Warnings
- warnings.simplefilter("default", PipDeprecationWarning, append=True)
-
- global _original_showwarning
-
- if _original_showwarning is None:
- _original_showwarning = warnings.showwarning
- warnings.showwarning = _showwarning
-
-
-def deprecated(reason, replacement, gone_in, issue=None):
- # type: (str, Optional[str], Optional[str], Optional[int]) -> None
- """Helper to deprecate existing functionality.
-
- reason:
- Textual reason shown to the user about why this functionality has
- been deprecated.
- replacement:
- Textual suggestion shown to the user about what alternative
- functionality they can use.
- gone_in:
- The version of pip does this functionality should get removed in.
- Raises errors if pip's current version is greater than or equal to
- this.
- issue:
- Issue number on the tracker that would serve as a useful place for
- users to find related discussion and provide feedback.
-
- Always pass replacement, gone_in and issue as keyword arguments for clarity
- at the call site.
- """
-
- # Construct a nice message.
- # This is purposely eagerly formatted as we want it to appear as if someone
- # typed this entire message out.
- message = "DEPRECATION: " + reason
- if replacement is not None:
- message += " A possible replacement is {}.".format(replacement)
- if issue is not None:
- url = "https://github.com/pypa/pip/issues/" + str(issue)
- message += " You can find discussion regarding this at {}.".format(url)
-
- # Raise as an error if it has to be removed.
- if gone_in is not None and parse(current_version) >= parse(gone_in):
- raise PipDeprecationWarning(message)
- warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py
deleted file mode 100644
index d36defa..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import codecs
-import locale
-import re
-import sys
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import List, Tuple, Text # noqa: F401
-
-BOMS = [
- (codecs.BOM_UTF8, 'utf8'),
- (codecs.BOM_UTF16, 'utf16'),
- (codecs.BOM_UTF16_BE, 'utf16-be'),
- (codecs.BOM_UTF16_LE, 'utf16-le'),
- (codecs.BOM_UTF32, 'utf32'),
- (codecs.BOM_UTF32_BE, 'utf32-be'),
- (codecs.BOM_UTF32_LE, 'utf32-le'),
-] # type: List[Tuple[bytes, Text]]
-
-ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
-
-
-def auto_decode(data):
- # type: (bytes) -> Text
- """Check a bytes string for a BOM to correctly detect the encoding
-
- Fallback to locale.getpreferredencoding(False) like open() on Python3"""
- for bom, encoding in BOMS:
- if data.startswith(bom):
- return data[len(bom):].decode(encoding)
- # Lets check the first two lines as in PEP263
- for line in data.split(b'\n')[:2]:
- if line[0:1] == b'#' and ENCODING_RE.search(line):
- encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
- return data.decode(encoding)
- return data.decode(
- locale.getpreferredencoding(False) or sys.getdefaultencoding(),
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py
deleted file mode 100644
index 1e6b033..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import os
-import os.path
-
-from pip._internal.utils.compat import get_path_uid
-
-
-def check_path_owner(path):
- # type: (str) -> bool
- # If we don't have a way to check the effective uid of this process, then
- # we'll just assume that we own the directory.
- if not hasattr(os, "geteuid"):
- return True
-
- previous = None
- while path != previous:
- if os.path.lexists(path):
- # Check if path is writable by current user.
- if os.geteuid() == 0:
- # Special handling for root user in order to handle properly
- # cases where users use sudo without -H flag.
- try:
- path_uid = get_path_uid(path)
- except OSError:
- return False
- return path_uid == 0
- else:
- return os.access(path, os.W_OK)
- else:
- previous, path = path, os.path.dirname(path)
- return False # assume we don't own the path
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py
deleted file mode 100644
index 8a51f69..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py
+++ /dev/null
@@ -1,93 +0,0 @@
-from __future__ import absolute_import
-
-import ctypes
-import re
-import warnings
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, Tuple # noqa: F401
-
-
-def glibc_version_string():
- # type: () -> Optional[str]
- "Returns glibc version string, or None if not using glibc."
-
- # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
- # manpage says, "If filename is NULL, then the returned handle is for the
- # main program". This way we can let the linker do the work to figure out
- # which libc our process is actually using.
- process_namespace = ctypes.CDLL(None)
- try:
- gnu_get_libc_version = process_namespace.gnu_get_libc_version
- except AttributeError:
- # Symbol doesn't exist -> therefore, we are not linked to
- # glibc.
- return None
-
- # Call gnu_get_libc_version, which returns a string like "2.5"
- gnu_get_libc_version.restype = ctypes.c_char_p
- version_str = gnu_get_libc_version()
- # py2 / py3 compatibility:
- if not isinstance(version_str, str):
- version_str = version_str.decode("ascii")
-
- return version_str
-
-
-# Separated out from have_compatible_glibc for easier unit testing
-def check_glibc_version(version_str, required_major, minimum_minor):
- # type: (str, int, int) -> bool
- # Parse string and check against requested version.
- #
- # We use a regexp instead of str.split because we want to discard any
- # random junk that might come after the minor version -- this might happen
- # in patched/forked versions of glibc (e.g. Linaro's version of glibc
- # uses version strings like "2.20-2014.11"). See gh-3588.
- m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
- if not m:
- warnings.warn("Expected glibc version with 2 components major.minor,"
- " got: %s" % version_str, RuntimeWarning)
- return False
- return (int(m.group("major")) == required_major and
- int(m.group("minor")) >= minimum_minor)
-
-
-def have_compatible_glibc(required_major, minimum_minor):
- # type: (int, int) -> bool
- version_str = glibc_version_string() # type: Optional[str]
- if version_str is None:
- return False
- return check_glibc_version(version_str, required_major, minimum_minor)
-
-
-# platform.libc_ver regularly returns completely nonsensical glibc
-# versions. E.g. on my computer, platform says:
-#
-# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.7')
-# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.9')
-#
-# But the truth is:
-#
-# ~$ ldd --version
-# ldd (Debian GLIBC 2.22-11) 2.22
-#
-# This is unfortunate, because it means that the linehaul data on libc
-# versions that was generated by pip 8.1.2 and earlier is useless and
-# misleading. Solution: instead of using platform, use our code that actually
-# works.
-def libc_ver():
- # type: () -> Tuple[str, str]
- """Try to determine the glibc version
-
- Returns a tuple of strings (lib, version) which default to empty strings
- in case the lookup fails.
- """
- glibc_version = glibc_version_string()
- if glibc_version is None:
- return ("", "")
- else:
- return ("glibc", glibc_version)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py
deleted file mode 100644
index c6df7a1..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py
+++ /dev/null
@@ -1,115 +0,0 @@
-from __future__ import absolute_import
-
-import hashlib
-
-from pip._vendor.six import iteritems, iterkeys, itervalues
-
-from pip._internal.exceptions import (
- HashMismatch, HashMissing, InstallationError,
-)
-from pip._internal.utils.misc import read_chunks
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Dict, List, BinaryIO, NoReturn, Iterator
- )
- from pip._vendor.six import PY3
- if PY3:
- from hashlib import _Hash # noqa: F401
- else:
- from hashlib import _hash as _Hash # noqa: F401
-
-
-# The recommended hash algo of the moment. Change this whenever the state of
-# the art changes; it won't hurt backward compatibility.
-FAVORITE_HASH = 'sha256'
-
-
-# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
-# Currently, those are the ones at least as collision-resistant as sha256.
-STRONG_HASHES = ['sha256', 'sha384', 'sha512']
-
-
-class Hashes(object):
- """A wrapper that builds multiple hashes at once and checks them against
- known-good values
-
- """
- def __init__(self, hashes=None):
- # type: (Dict[str, List[str]]) -> None
- """
- :param hashes: A dict of algorithm names pointing to lists of allowed
- hex digests
- """
- self._allowed = {} if hashes is None else hashes
-
- def check_against_chunks(self, chunks):
- # type: (Iterator[bytes]) -> None
- """Check good hashes against ones built from iterable of chunks of
- data.
-
- Raise HashMismatch if none match.
-
- """
- gots = {}
- for hash_name in iterkeys(self._allowed):
- try:
- gots[hash_name] = hashlib.new(hash_name)
- except (ValueError, TypeError):
- raise InstallationError('Unknown hash name: %s' % hash_name)
-
- for chunk in chunks:
- for hash in itervalues(gots):
- hash.update(chunk)
-
- for hash_name, got in iteritems(gots):
- if got.hexdigest() in self._allowed[hash_name]:
- return
- self._raise(gots)
-
- def _raise(self, gots):
- # type: (Dict[str, _Hash]) -> NoReturn
- raise HashMismatch(self._allowed, gots)
-
- def check_against_file(self, file):
- # type: (BinaryIO) -> None
- """Check good hashes against a file-like object
-
- Raise HashMismatch if none match.
-
- """
- return self.check_against_chunks(read_chunks(file))
-
- def check_against_path(self, path):
- # type: (str) -> None
- with open(path, 'rb') as file:
- return self.check_against_file(file)
-
- def __nonzero__(self):
- # type: () -> bool
- """Return whether I know any known-good hashes."""
- return bool(self._allowed)
-
- def __bool__(self):
- # type: () -> bool
- return self.__nonzero__()
-
-
-class MissingHashes(Hashes):
- """A workalike for Hashes used when we're missing a hash for a requirement
-
- It computes the actual hash of the requirement and raises a HashMissing
- exception showing it to the user.
-
- """
- def __init__(self):
- # type: () -> None
- """Don't offer the ``hashes`` kwarg."""
- # Pass our favorite hash in to generate a "gotten hash". With the
- # empty list, it will never match, so an error will always raise.
- super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
-
- def _raise(self, gots):
- # type: (Dict[str, _Hash]) -> NoReturn
- raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py
deleted file mode 100644
index 579d696..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py
+++ /dev/null
@@ -1,318 +0,0 @@
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import logging
-import logging.handlers
-import os
-import sys
-
-from pip._vendor.six import PY2
-
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.misc import ensure_dir
-
-try:
- import threading
-except ImportError:
- import dummy_threading as threading # type: ignore
-
-
-try:
- from pip._vendor import colorama
-# Lots of different errors can come from this, including SystemError and
-# ImportError.
-except Exception:
- colorama = None
-
-
-_log_state = threading.local()
-_log_state.indentation = 0
-
-
-class BrokenStdoutLoggingError(Exception):
- """
- Raised if BrokenPipeError occurs for the stdout stream while logging.
- """
- pass
-
-
-# BrokenPipeError does not exist in Python 2 and, in addition, manifests
-# differently in Windows and non-Windows.
-if WINDOWS:
- # In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
- # https://bugs.python.org/issue19612
- # https://bugs.python.org/issue30418
- if PY2:
- def _is_broken_pipe_error(exc_class, exc):
- """See the docstring for non-Windows Python 3 below."""
- return (exc_class is IOError and
- exc.errno in (errno.EINVAL, errno.EPIPE))
- else:
- # In Windows, a broken pipe IOError became OSError in Python 3.
- def _is_broken_pipe_error(exc_class, exc):
- """See the docstring for non-Windows Python 3 below."""
- return ((exc_class is BrokenPipeError) or # noqa: F821
- (exc_class is OSError and
- exc.errno in (errno.EINVAL, errno.EPIPE)))
-elif PY2:
- def _is_broken_pipe_error(exc_class, exc):
- """See the docstring for non-Windows Python 3 below."""
- return (exc_class is IOError and exc.errno == errno.EPIPE)
-else:
- # Then we are in the non-Windows Python 3 case.
- def _is_broken_pipe_error(exc_class, exc):
- """
- Return whether an exception is a broken pipe error.
-
- Args:
- exc_class: an exception class.
- exc: an exception instance.
- """
- return (exc_class is BrokenPipeError) # noqa: F821
-
-
-@contextlib.contextmanager
-def indent_log(num=2):
- """
- A context manager which will cause the log output to be indented for any
- log messages emitted inside it.
- """
- _log_state.indentation += num
- try:
- yield
- finally:
- _log_state.indentation -= num
-
-
-def get_indentation():
- return getattr(_log_state, 'indentation', 0)
-
-
-class IndentingFormatter(logging.Formatter):
- def __init__(self, *args, **kwargs):
- """
- A logging.Formatter obeying containing indent_log contexts.
-
- :param add_timestamp: A bool indicating output lines should be prefixed
- with their record's timestamp.
- """
- self.add_timestamp = kwargs.pop("add_timestamp", False)
- super(IndentingFormatter, self).__init__(*args, **kwargs)
-
- def format(self, record):
- """
- Calls the standard formatter, but will indent all of the log messages
- by our current indentation level.
- """
- formatted = super(IndentingFormatter, self).format(record)
- prefix = ''
- if self.add_timestamp:
- prefix = self.formatTime(record, "%Y-%m-%dT%H:%M:%S ")
- prefix += " " * get_indentation()
- formatted = "".join([
- prefix + line
- for line in formatted.splitlines(True)
- ])
- return formatted
-
-
-def _color_wrap(*colors):
- def wrapped(inp):
- return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
- return wrapped
-
-
-class ColorizedStreamHandler(logging.StreamHandler):
-
- # Don't build up a list of colors if we don't have colorama
- if colorama:
- COLORS = [
- # This needs to be in order from highest logging level to lowest.
- (logging.ERROR, _color_wrap(colorama.Fore.RED)),
- (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
- ]
- else:
- COLORS = []
-
- def __init__(self, stream=None, no_color=None):
- logging.StreamHandler.__init__(self, stream)
- self._no_color = no_color
-
- if WINDOWS and colorama:
- self.stream = colorama.AnsiToWin32(self.stream)
-
- def _using_stdout(self):
- """
- Return whether the handler is using sys.stdout.
- """
- if WINDOWS and colorama:
- # Then self.stream is an AnsiToWin32 object.
- return self.stream.wrapped is sys.stdout
-
- return self.stream is sys.stdout
-
- def should_color(self):
- # Don't colorize things if we do not have colorama or if told not to
- if not colorama or self._no_color:
- return False
-
- real_stream = (
- self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
- else self.stream.wrapped
- )
-
- # If the stream is a tty we should color it
- if hasattr(real_stream, "isatty") and real_stream.isatty():
- return True
-
- # If we have an ANSI term we should color it
- if os.environ.get("TERM") == "ANSI":
- return True
-
- # If anything else we should not color it
- return False
-
- def format(self, record):
- msg = logging.StreamHandler.format(self, record)
-
- if self.should_color():
- for level, color in self.COLORS:
- if record.levelno >= level:
- msg = color(msg)
- break
-
- return msg
-
- # The logging module says handleError() can be customized.
- def handleError(self, record):
- exc_class, exc = sys.exc_info()[:2]
- # If a broken pipe occurred while calling write() or flush() on the
- # stdout stream in logging's Handler.emit(), then raise our special
- # exception so we can handle it in main() instead of logging the
- # broken pipe error and continuing.
- if (exc_class and self._using_stdout() and
- _is_broken_pipe_error(exc_class, exc)):
- raise BrokenStdoutLoggingError()
-
- return super(ColorizedStreamHandler, self).handleError(record)
-
-
-class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
-
- def _open(self):
- ensure_dir(os.path.dirname(self.baseFilename))
- return logging.handlers.RotatingFileHandler._open(self)
-
-
-class MaxLevelFilter(logging.Filter):
-
- def __init__(self, level):
- self.level = level
-
- def filter(self, record):
- return record.levelno < self.level
-
-
-def setup_logging(verbosity, no_color, user_log_file):
- """Configures and sets up all of the logging
-
- Returns the requested logging level, as its integer value.
- """
-
- # Determine the level to be logging at.
- if verbosity >= 1:
- level = "DEBUG"
- elif verbosity == -1:
- level = "WARNING"
- elif verbosity == -2:
- level = "ERROR"
- elif verbosity <= -3:
- level = "CRITICAL"
- else:
- level = "INFO"
-
- level_number = getattr(logging, level)
-
- # The "root" logger should match the "console" level *unless* we also need
- # to log to a user log file.
- include_user_log = user_log_file is not None
- if include_user_log:
- additional_log_file = user_log_file
- root_level = "DEBUG"
- else:
- additional_log_file = "/dev/null"
- root_level = level
-
- # Disable any logging besides WARNING unless we have DEBUG level logging
- # enabled for vendored libraries.
- vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
-
- # Shorthands for clarity
- log_streams = {
- "stdout": "ext://sys.stdout",
- "stderr": "ext://sys.stderr",
- }
- handler_classes = {
- "stream": "pip._internal.utils.logging.ColorizedStreamHandler",
- "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
- }
-
- logging.config.dictConfig({
- "version": 1,
- "disable_existing_loggers": False,
- "filters": {
- "exclude_warnings": {
- "()": "pip._internal.utils.logging.MaxLevelFilter",
- "level": logging.WARNING,
- },
- },
- "formatters": {
- "indent": {
- "()": IndentingFormatter,
- "format": "%(message)s",
- },
- "indent_with_timestamp": {
- "()": IndentingFormatter,
- "format": "%(message)s",
- "add_timestamp": True,
- },
- },
- "handlers": {
- "console": {
- "level": level,
- "class": handler_classes["stream"],
- "no_color": no_color,
- "stream": log_streams["stdout"],
- "filters": ["exclude_warnings"],
- "formatter": "indent",
- },
- "console_errors": {
- "level": "WARNING",
- "class": handler_classes["stream"],
- "no_color": no_color,
- "stream": log_streams["stderr"],
- "formatter": "indent",
- },
- "user_log": {
- "level": "DEBUG",
- "class": handler_classes["file"],
- "filename": additional_log_file,
- "delay": True,
- "formatter": "indent_with_timestamp",
- },
- },
- "root": {
- "level": root_level,
- "handlers": ["console", "console_errors"] + (
- ["user_log"] if include_user_log else []
- ),
- },
- "loggers": {
- "pip._vendor": {
- "level": vendored_log_level
- }
- },
- })
-
- return level_number
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py
deleted file mode 100644
index 84605ee..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py
+++ /dev/null
@@ -1,1040 +0,0 @@
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import io
-import locale
-# we have a submodule named 'logging' which would shadow this if we used the
-# regular name:
-import logging as std_logging
-import os
-import posixpath
-import re
-import shutil
-import stat
-import subprocess
-import sys
-import tarfile
-import zipfile
-from collections import deque
-
-from pip._vendor import pkg_resources
-# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
-# why we ignore the type on this import.
-from pip._vendor.retrying import retry # type: ignore
-from pip._vendor.six import PY2
-from pip._vendor.six.moves import input
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
-
-from pip._internal.exceptions import CommandError, InstallationError
-from pip._internal.locations import (
- running_under_virtualenv, site_packages, user_site, virtualenv_no_global,
- write_delete_marker_file,
-)
-from pip._internal.utils.compat import (
- WINDOWS, console_to_str, expanduser, stdlib_pkgs,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if PY2:
- from io import BytesIO as StringIO
-else:
- from io import StringIO
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Optional, Tuple, Iterable, List, Match, Union, Any, Mapping, Text,
- AnyStr, Container
- )
- from pip._vendor.pkg_resources import Distribution # noqa: F401
- from pip._internal.models.link import Link # noqa: F401
- from pip._internal.utils.ui import SpinnerInterface # noqa: F401
-
-
-__all__ = ['rmtree', 'display_path', 'backup_dir',
- 'ask', 'splitext',
- 'format_size', 'is_installable_dir',
- 'is_svn_page', 'file_contents',
- 'split_leading_dir', 'has_leading_dir',
- 'normalize_path',
- 'renames', 'get_prog',
- 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
- 'captured_stdout', 'ensure_dir',
- 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', 'WHEEL_EXTENSION',
- 'get_installed_version', 'remove_auth_from_url']
-
-
-logger = std_logging.getLogger(__name__)
-
-WHEEL_EXTENSION = '.whl'
-BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
-XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
-ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION)
-TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
-ARCHIVE_EXTENSIONS = (
- ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
-SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
-
-try:
- import bz2 # noqa
- SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
-except ImportError:
- logger.debug('bz2 module is not available')
-
-try:
- # Only for Python 3.3+
- import lzma # noqa
- SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
-except ImportError:
- logger.debug('lzma module is not available')
-
-
-def ensure_dir(path):
- # type: (AnyStr) -> None
- """os.path.makedirs without EEXIST."""
- try:
- os.makedirs(path)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
-
-def get_prog():
- # type: () -> str
- try:
- prog = os.path.basename(sys.argv[0])
- if prog in ('__main__.py', '-c'):
- return "%s -m pip" % sys.executable
- else:
- return prog
- except (AttributeError, TypeError, IndexError):
- pass
- return 'pip'
-
-
-# Retry every half second for up to 3 seconds
-@retry(stop_max_delay=3000, wait_fixed=500)
-def rmtree(dir, ignore_errors=False):
- # type: (str, bool) -> None
- shutil.rmtree(dir, ignore_errors=ignore_errors,
- onerror=rmtree_errorhandler)
-
-
-def rmtree_errorhandler(func, path, exc_info):
- """On Windows, the files in .svn are read-only, so when rmtree() tries to
- remove them, an exception is thrown. We catch that here, remove the
- read-only attribute, and hopefully continue without problems."""
- # if file type currently read only
- if os.stat(path).st_mode & stat.S_IREAD:
- # convert to read/write
- os.chmod(path, stat.S_IWRITE)
- # use the original function to repeat the operation
- func(path)
- return
- else:
- raise
-
-
-def display_path(path):
- # type: (Union[str, Text]) -> str
- """Gives the display value for a given path, making it relative to cwd
- if possible."""
- path = os.path.normcase(os.path.abspath(path))
- if sys.version_info[0] == 2:
- path = path.decode(sys.getfilesystemencoding(), 'replace')
- path = path.encode(sys.getdefaultencoding(), 'replace')
- if path.startswith(os.getcwd() + os.path.sep):
- path = '.' + path[len(os.getcwd()):]
- return path
-
-
-def backup_dir(dir, ext='.bak'):
- # type: (str, str) -> str
- """Figure out the name of a directory to back up the given dir to
- (adding .bak, .bak2, etc)"""
- n = 1
- extension = ext
- while os.path.exists(dir + extension):
- n += 1
- extension = ext + str(n)
- return dir + extension
-
-
-def ask_path_exists(message, options):
- # type: (str, Iterable[str]) -> str
- for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
- if action in options:
- return action
- return ask(message, options)
-
-
-def ask(message, options):
- # type: (str, Iterable[str]) -> str
- """Ask the message interactively, with the given possible responses"""
- while 1:
- if os.environ.get('PIP_NO_INPUT'):
- raise Exception(
- 'No input was expected ($PIP_NO_INPUT set); question: %s' %
- message
- )
- response = input(message)
- response = response.strip().lower()
- if response not in options:
- print(
- 'Your response (%r) was not one of the expected responses: '
- '%s' % (response, ', '.join(options))
- )
- else:
- return response
-
-
-def format_size(bytes):
- # type: (float) -> str
- if bytes > 1000 * 1000:
- return '%.1fMB' % (bytes / 1000.0 / 1000)
- elif bytes > 10 * 1000:
- return '%ikB' % (bytes / 1000)
- elif bytes > 1000:
- return '%.1fkB' % (bytes / 1000.0)
- else:
- return '%ibytes' % bytes
-
-
-def is_installable_dir(path):
- # type: (str) -> bool
- """Is path is a directory containing setup.py or pyproject.toml?
- """
- if not os.path.isdir(path):
- return False
- setup_py = os.path.join(path, 'setup.py')
- if os.path.isfile(setup_py):
- return True
- pyproject_toml = os.path.join(path, 'pyproject.toml')
- if os.path.isfile(pyproject_toml):
- return True
- return False
-
-
-def is_svn_page(html):
- # type: (Union[str, Text]) -> Optional[Match[Union[str, Text]]]
- """
- Returns true if the page appears to be the index page of an svn repository
- """
- return (re.search(r'<title>[^<]*Revision \d+:', html) and
- re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
-
-
-def file_contents(filename):
- # type: (str) -> Text
- with open(filename, 'rb') as fp:
- return fp.read().decode('utf-8')
-
-
-def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
- """Yield pieces of data from a file-like object until EOF."""
- while True:
- chunk = file.read(size)
- if not chunk:
- break
- yield chunk
-
-
-def split_leading_dir(path):
- # type: (Union[str, Text]) -> List[Union[str, Text]]
- path = path.lstrip('/').lstrip('\\')
- if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
- '\\' not in path):
- return path.split('/', 1)
- elif '\\' in path:
- return path.split('\\', 1)
- else:
- return [path, '']
-
-
-def has_leading_dir(paths):
- # type: (Iterable[Union[str, Text]]) -> bool
- """Returns true if all the paths have the same leading path name
- (i.e., everything is in one subdirectory in an archive)"""
- common_prefix = None
- for path in paths:
- prefix, rest = split_leading_dir(path)
- if not prefix:
- return False
- elif common_prefix is None:
- common_prefix = prefix
- elif prefix != common_prefix:
- return False
- return True
-
-
-def normalize_path(path, resolve_symlinks=True):
- # type: (str, bool) -> str
- """
- Convert a path to its canonical, case-normalized, absolute version.
-
- """
- path = expanduser(path)
- if resolve_symlinks:
- path = os.path.realpath(path)
- else:
- path = os.path.abspath(path)
- return os.path.normcase(path)
-
-
-def splitext(path):
- # type: (str) -> Tuple[str, str]
- """Like os.path.splitext, but take off .tar too"""
- base, ext = posixpath.splitext(path)
- if base.lower().endswith('.tar'):
- ext = base[-4:] + ext
- base = base[:-4]
- return base, ext
-
-
-def renames(old, new):
- # type: (str, str) -> None
- """Like os.renames(), but handles renaming across devices."""
- # Implementation borrowed from os.renames().
- head, tail = os.path.split(new)
- if head and tail and not os.path.exists(head):
- os.makedirs(head)
-
- shutil.move(old, new)
-
- head, tail = os.path.split(old)
- if head and tail:
- try:
- os.removedirs(head)
- except OSError:
- pass
-
-
-def is_local(path):
- # type: (str) -> bool
- """
- Return True if path is within sys.prefix, if we're running in a virtualenv.
-
- If we're not in a virtualenv, all paths are considered "local."
-
- """
- if not running_under_virtualenv():
- return True
- return normalize_path(path).startswith(normalize_path(sys.prefix))
-
-
-def dist_is_local(dist):
- # type: (Distribution) -> bool
- """
- Return True if given Distribution object is installed locally
- (i.e. within current virtualenv).
-
- Always True if we're not in a virtualenv.
-
- """
- return is_local(dist_location(dist))
-
-
-def dist_in_usersite(dist):
- # type: (Distribution) -> bool
- """
- Return True if given Distribution is installed in user site.
- """
- norm_path = normalize_path(dist_location(dist))
- return norm_path.startswith(normalize_path(user_site))
-
-
-def dist_in_site_packages(dist):
- # type: (Distribution) -> bool
- """
- Return True if given Distribution is installed in
- sysconfig.get_python_lib().
- """
- return normalize_path(
- dist_location(dist)
- ).startswith(normalize_path(site_packages))
-
-
-def dist_is_editable(dist):
- # type: (Distribution) -> bool
- """
- Return True if given Distribution is an editable install.
- """
- for path_item in sys.path:
- egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
- if os.path.isfile(egg_link):
- return True
- return False
-
-
-def get_installed_distributions(local_only=True,
- skip=stdlib_pkgs,
- include_editables=True,
- editables_only=False,
- user_only=False):
- # type: (bool, Container[str], bool, bool, bool) -> List[Distribution]
- """
- Return a list of installed Distribution objects.
-
- If ``local_only`` is True (default), only return installations
- local to the current virtualenv, if in a virtualenv.
-
- ``skip`` argument is an iterable of lower-case project names to
- ignore; defaults to stdlib_pkgs
-
- If ``include_editables`` is False, don't report editables.
-
- If ``editables_only`` is True , only report editables.
-
- If ``user_only`` is True , only report installations in the user
- site directory.
-
- """
- if local_only:
- local_test = dist_is_local
- else:
- def local_test(d):
- return True
-
- if include_editables:
- def editable_test(d):
- return True
- else:
- def editable_test(d):
- return not dist_is_editable(d)
-
- if editables_only:
- def editables_only_test(d):
- return dist_is_editable(d)
- else:
- def editables_only_test(d):
- return True
-
- if user_only:
- user_test = dist_in_usersite
- else:
- def user_test(d):
- return True
-
- # because of pkg_resources vendoring, mypy cannot find stub in typeshed
- return [d for d in pkg_resources.working_set # type: ignore
- if local_test(d) and
- d.key not in skip and
- editable_test(d) and
- editables_only_test(d) and
- user_test(d)
- ]
-
-
-def egg_link_path(dist):
- # type: (Distribution) -> Optional[str]
- """
- Return the path for the .egg-link file if it exists, otherwise, None.
-
- There's 3 scenarios:
- 1) not in a virtualenv
- try to find in site.USER_SITE, then site_packages
- 2) in a no-global virtualenv
- try to find in site_packages
- 3) in a yes-global virtualenv
- try to find in site_packages, then site.USER_SITE
- (don't look in global location)
-
- For #1 and #3, there could be odd cases, where there's an egg-link in 2
- locations.
-
- This method will just return the first one found.
- """
- sites = []
- if running_under_virtualenv():
- if virtualenv_no_global():
- sites.append(site_packages)
- else:
- sites.append(site_packages)
- if user_site:
- sites.append(user_site)
- else:
- if user_site:
- sites.append(user_site)
- sites.append(site_packages)
-
- for site in sites:
- egglink = os.path.join(site, dist.project_name) + '.egg-link'
- if os.path.isfile(egglink):
- return egglink
- return None
-
-
-def dist_location(dist):
- # type: (Distribution) -> str
- """
- Get the site-packages location of this distribution. Generally
- this is dist.location, except in the case of develop-installed
- packages, where dist.location is the source code location, and we
- want to know where the egg-link file is.
-
- """
- egg_link = egg_link_path(dist)
- if egg_link:
- return egg_link
- return dist.location
-
-
-def current_umask():
- """Get the current umask which involves having to set it temporarily."""
- mask = os.umask(0)
- os.umask(mask)
- return mask
-
-
-def unzip_file(filename, location, flatten=True):
- # type: (str, str, bool) -> None
- """
- Unzip the file (with path `filename`) to the destination `location`. All
- files are written based on system defaults and umask (i.e. permissions are
- not preserved), except that regular file members with any execute
- permissions (user, group, or world) have "chmod +x" applied after being
- written. Note that for windows, any execute changes using os.chmod are
- no-ops per the python docs.
- """
- ensure_dir(location)
- zipfp = open(filename, 'rb')
- try:
- zip = zipfile.ZipFile(zipfp, allowZip64=True)
- leading = has_leading_dir(zip.namelist()) and flatten
- for info in zip.infolist():
- name = info.filename
- fn = name
- if leading:
- fn = split_leading_dir(name)[1]
- fn = os.path.join(location, fn)
- dir = os.path.dirname(fn)
- if fn.endswith('/') or fn.endswith('\\'):
- # A directory
- ensure_dir(fn)
- else:
- ensure_dir(dir)
- # Don't use read() to avoid allocating an arbitrarily large
- # chunk of memory for the file's content
- fp = zip.open(name)
- try:
- with open(fn, 'wb') as destfp:
- shutil.copyfileobj(fp, destfp)
- finally:
- fp.close()
- mode = info.external_attr >> 16
- # if mode and regular file and any execute permissions for
- # user/group/world?
- if mode and stat.S_ISREG(mode) and mode & 0o111:
- # make dest file have execute for user/group/world
- # (chmod +x) no-op on windows per python docs
- os.chmod(fn, (0o777 - current_umask() | 0o111))
- finally:
- zipfp.close()
-
-
-def untar_file(filename, location):
- # type: (str, str) -> None
- """
- Untar the file (with path `filename`) to the destination `location`.
- All files are written based on system defaults and umask (i.e. permissions
- are not preserved), except that regular file members with any execute
- permissions (user, group, or world) have "chmod +x" applied after being
- written. Note that for windows, any execute changes using os.chmod are
- no-ops per the python docs.
- """
- ensure_dir(location)
- if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
- mode = 'r:gz'
- elif filename.lower().endswith(BZ2_EXTENSIONS):
- mode = 'r:bz2'
- elif filename.lower().endswith(XZ_EXTENSIONS):
- mode = 'r:xz'
- elif filename.lower().endswith('.tar'):
- mode = 'r'
- else:
- logger.warning(
- 'Cannot determine compression type for file %s', filename,
- )
- mode = 'r:*'
- tar = tarfile.open(filename, mode)
- try:
- leading = has_leading_dir([
- member.name for member in tar.getmembers()
- ])
- for member in tar.getmembers():
- fn = member.name
- if leading:
- # https://github.com/python/mypy/issues/1174
- fn = split_leading_dir(fn)[1] # type: ignore
- path = os.path.join(location, fn)
- if member.isdir():
- ensure_dir(path)
- elif member.issym():
- try:
- # https://github.com/python/typeshed/issues/2673
- tar._extract_member(member, path) # type: ignore
- except Exception as exc:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warning(
- 'In the tar file %s the member %s is invalid: %s',
- filename, member.name, exc,
- )
- continue
- else:
- try:
- fp = tar.extractfile(member)
- except (KeyError, AttributeError) as exc:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warning(
- 'In the tar file %s the member %s is invalid: %s',
- filename, member.name, exc,
- )
- continue
- ensure_dir(os.path.dirname(path))
- with open(path, 'wb') as destfp:
- shutil.copyfileobj(fp, destfp)
- fp.close()
- # Update the timestamp (useful for cython compiled files)
- # https://github.com/python/typeshed/issues/2673
- tar.utime(member, path) # type: ignore
- # member have any execute permissions for user/group/world?
- if member.mode & 0o111:
- # make dest file have execute for user/group/world
- # no-op on windows per python docs
- os.chmod(path, (0o777 - current_umask() | 0o111))
- finally:
- tar.close()
-
-
-def unpack_file(
- filename, # type: str
- location, # type: str
- content_type, # type: Optional[str]
- link # type: Optional[Link]
-):
- # type: (...) -> None
- filename = os.path.realpath(filename)
- if (content_type == 'application/zip' or
- filename.lower().endswith(ZIP_EXTENSIONS) or
- zipfile.is_zipfile(filename)):
- unzip_file(
- filename,
- location,
- flatten=not filename.endswith('.whl')
- )
- elif (content_type == 'application/x-gzip' or
- tarfile.is_tarfile(filename) or
- filename.lower().endswith(
- TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
- untar_file(filename, location)
- elif (content_type and content_type.startswith('text/html') and
- is_svn_page(file_contents(filename))):
- # We don't really care about this
- from pip._internal.vcs.subversion import Subversion
- Subversion('svn+' + link.url).unpack(location)
- else:
- # FIXME: handle?
- # FIXME: magic signatures?
- logger.critical(
- 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
- 'cannot detect archive format',
- filename, location, content_type,
- )
- raise InstallationError(
- 'Cannot determine archive format of %s' % location
- )
-
-
-def call_subprocess(
- cmd, # type: List[str]
- show_stdout=True, # type: bool
- cwd=None, # type: Optional[str]
- on_returncode='raise', # type: str
- extra_ok_returncodes=None, # type: Optional[Iterable[int]]
- command_desc=None, # type: Optional[str]
- extra_environ=None, # type: Optional[Mapping[str, Any]]
- unset_environ=None, # type: Optional[Iterable[str]]
- spinner=None # type: Optional[SpinnerInterface]
-):
- # type: (...) -> Optional[Text]
- """
- Args:
- extra_ok_returncodes: an iterable of integer return codes that are
- acceptable, in addition to 0. Defaults to None, which means [].
- unset_environ: an iterable of environment variable names to unset
- prior to calling subprocess.Popen().
- """
- if extra_ok_returncodes is None:
- extra_ok_returncodes = []
- if unset_environ is None:
- unset_environ = []
- # This function's handling of subprocess output is confusing and I
- # previously broke it terribly, so as penance I will write a long comment
- # explaining things.
- #
- # The obvious thing that affects output is the show_stdout=
- # kwarg. show_stdout=True means, let the subprocess write directly to our
- # stdout. Even though it is nominally the default, it is almost never used
- # inside pip (and should not be used in new code without a very good
- # reason); as of 2016-02-22 it is only used in a few places inside the VCS
- # wrapper code. Ideally we should get rid of it entirely, because it
- # creates a lot of complexity here for a rarely used feature.
- #
- # Most places in pip set show_stdout=False. What this means is:
- # - We connect the child stdout to a pipe, which we read.
- # - By default, we hide the output but show a spinner -- unless the
- # subprocess exits with an error, in which case we show the output.
- # - If the --verbose option was passed (= loglevel is DEBUG), then we show
- # the output unconditionally. (But in this case we don't want to show
- # the output a second time if it turns out that there was an error.)
- #
- # stderr is always merged with stdout (even if show_stdout=True).
- if show_stdout:
- stdout = None
- else:
- stdout = subprocess.PIPE
- if command_desc is None:
- cmd_parts = []
- for part in cmd:
- if ' ' in part or '\n' in part or '"' in part or "'" in part:
- part = '"%s"' % part.replace('"', '\\"')
- cmd_parts.append(part)
- command_desc = ' '.join(cmd_parts)
- logger.debug("Running command %s", command_desc)
- env = os.environ.copy()
- if extra_environ:
- env.update(extra_environ)
- for name in unset_environ:
- env.pop(name, None)
- try:
- proc = subprocess.Popen(
- cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
- stdout=stdout, cwd=cwd, env=env,
- )
- proc.stdin.close()
- except Exception as exc:
- logger.critical(
- "Error %s while executing command %s", exc, command_desc,
- )
- raise
- all_output = []
- if stdout is not None:
- while True:
- line = console_to_str(proc.stdout.readline())
- if not line:
- break
- line = line.rstrip()
- all_output.append(line + '\n')
- if logger.getEffectiveLevel() <= std_logging.DEBUG:
- # Show the line immediately
- logger.debug(line)
- else:
- # Update the spinner
- if spinner is not None:
- spinner.spin()
- try:
- proc.wait()
- finally:
- if proc.stdout:
- proc.stdout.close()
- if spinner is not None:
- if proc.returncode:
- spinner.finish("error")
- else:
- spinner.finish("done")
- if proc.returncode and proc.returncode not in extra_ok_returncodes:
- if on_returncode == 'raise':
- if (logger.getEffectiveLevel() > std_logging.DEBUG and
- not show_stdout):
- logger.info(
- 'Complete output from command %s:', command_desc,
- )
- logger.info(
- ''.join(all_output) +
- '\n----------------------------------------'
- )
- raise InstallationError(
- 'Command "%s" failed with error code %s in %s'
- % (command_desc, proc.returncode, cwd))
- elif on_returncode == 'warn':
- logger.warning(
- 'Command "%s" had error code %s in %s',
- command_desc, proc.returncode, cwd,
- )
- elif on_returncode == 'ignore':
- pass
- else:
- raise ValueError('Invalid value: on_returncode=%s' %
- repr(on_returncode))
- if not show_stdout:
- return ''.join(all_output)
- return None
-
-
-def read_text_file(filename):
- # type: (str) -> str
- """Return the contents of *filename*.
-
- Try to decode the file contents with utf-8, the preferred system encoding
- (e.g., cp1252 on some Windows machines), and latin1, in that order.
- Decoding a byte string with latin1 will never raise an error. In the worst
- case, the returned string will contain some garbage characters.
-
- """
- with open(filename, 'rb') as fp:
- data = fp.read()
-
- encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
- for enc in encodings:
- try:
- # https://github.com/python/mypy/issues/1174
- data = data.decode(enc) # type: ignore
- except UnicodeDecodeError:
- continue
- break
-
- assert not isinstance(data, bytes) # Latin1 should have worked.
- return data
-
-
-def _make_build_dir(build_dir):
- os.makedirs(build_dir)
- write_delete_marker_file(build_dir)
-
-
-class FakeFile(object):
- """Wrap a list of lines in an object with readline() to make
- ConfigParser happy."""
- def __init__(self, lines):
- self._gen = (l for l in lines)
-
- def readline(self):
- try:
- try:
- return next(self._gen)
- except NameError:
- return self._gen.next()
- except StopIteration:
- return ''
-
- def __iter__(self):
- return self._gen
-
-
-class StreamWrapper(StringIO):
-
- @classmethod
- def from_stream(cls, orig_stream):
- cls.orig_stream = orig_stream
- return cls()
-
- # compileall.compile_dir() needs stdout.encoding to print to stdout
- @property
- def encoding(self):
- return self.orig_stream.encoding
-
-
-@contextlib.contextmanager
-def captured_output(stream_name):
- """Return a context manager used by captured_stdout/stdin/stderr
- that temporarily replaces the sys stream *stream_name* with a StringIO.
-
- Taken from Lib/support/__init__.py in the CPython repo.
- """
- orig_stdout = getattr(sys, stream_name)
- setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
- try:
- yield getattr(sys, stream_name)
- finally:
- setattr(sys, stream_name, orig_stdout)
-
-
-def captured_stdout():
- """Capture the output of sys.stdout:
-
- with captured_stdout() as stdout:
- print('hello')
- self.assertEqual(stdout.getvalue(), 'hello\n')
-
- Taken from Lib/support/__init__.py in the CPython repo.
- """
- return captured_output('stdout')
-
-
-def captured_stderr():
- """
- See captured_stdout().
- """
- return captured_output('stderr')
-
-
-class cached_property(object):
- """A property that is only computed once per instance and then replaces
- itself with an ordinary attribute. Deleting the attribute resets the
- property.
-
- Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
- """
-
- def __init__(self, func):
- self.__doc__ = getattr(func, '__doc__')
- self.func = func
-
- def __get__(self, obj, cls):
- if obj is None:
- # We're being accessed from the class itself, not from an object
- return self
- value = obj.__dict__[self.func.__name__] = self.func(obj)
- return value
-
-
-def get_installed_version(dist_name, working_set=None):
- """Get the installed version of dist_name avoiding pkg_resources cache"""
- # Create a requirement that we'll look for inside of setuptools.
- req = pkg_resources.Requirement.parse(dist_name)
-
- if working_set is None:
- # We want to avoid having this cached, so we need to construct a new
- # working set each time.
- working_set = pkg_resources.WorkingSet()
-
- # Get the installed distribution from our working set
- dist = working_set.find(req)
-
- # Check to see if we got an installed distribution or not, if we did
- # we want to return it's version.
- return dist.version if dist else None
-
-
-def consume(iterator):
- """Consume an iterable at C speed."""
- deque(iterator, maxlen=0)
-
-
-# Simulates an enum
-def enum(*sequential, **named):
- enums = dict(zip(sequential, range(len(sequential))), **named)
- reverse = {value: key for key, value in enums.items()}
- enums['reverse_mapping'] = reverse
- return type('Enum', (), enums)
-
-
-def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None):
- """
- Return the URL for a VCS requirement.
-
- Args:
- repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
- project_name: the (unescaped) project name.
- """
- egg_project_name = pkg_resources.to_filename(project_name)
- req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name)
- if subdir:
- req += '&subdirectory={}'.format(subdir)
-
- return req
-
-
-def split_auth_from_netloc(netloc):
- """
- Parse out and remove the auth information from a netloc.
-
- Returns: (netloc, (username, password)).
- """
- if '@' not in netloc:
- return netloc, (None, None)
-
- # Split from the right because that's how urllib.parse.urlsplit()
- # behaves if more than one @ is present (which can be checked using
- # the password attribute of urlsplit()'s return value).
- auth, netloc = netloc.rsplit('@', 1)
- if ':' in auth:
- # Split from the left because that's how urllib.parse.urlsplit()
- # behaves if more than one : is present (which again can be checked
- # using the password attribute of the return value)
- user_pass = auth.split(':', 1)
- else:
- user_pass = auth, None
-
- user_pass = tuple(
- None if x is None else urllib_unquote(x) for x in user_pass
- )
-
- return netloc, user_pass
-
-
-def redact_netloc(netloc):
- # type: (str) -> str
- """
- Replace the password in a netloc with "****", if it exists.
-
- For example, "user:pass@example.com" returns "user:****@example.com".
- """
- netloc, (user, password) = split_auth_from_netloc(netloc)
- if user is None:
- return netloc
- password = '' if password is None else ':****'
- return '{user}{password}@{netloc}'.format(user=urllib_parse.quote(user),
- password=password,
- netloc=netloc)
-
-
-def _transform_url(url, transform_netloc):
- purl = urllib_parse.urlsplit(url)
- netloc = transform_netloc(purl.netloc)
- # stripped url
- url_pieces = (
- purl.scheme, netloc, purl.path, purl.query, purl.fragment
- )
- surl = urllib_parse.urlunsplit(url_pieces)
- return surl
-
-
-def _get_netloc(netloc):
- return split_auth_from_netloc(netloc)[0]
-
-
-def remove_auth_from_url(url):
- # type: (str) -> str
- # Return a copy of url with 'username:password@' removed.
- # username/pass params are passed to subversion through flags
- # and are not recognized in the url.
- return _transform_url(url, _get_netloc)
-
-
-def redact_password_from_url(url):
- # type: (str) -> str
- """Replace the password in a given url with ****."""
- return _transform_url(url, redact_netloc)
-
-
-def protect_pip_from_modification_on_windows(modifying_pip):
- """Protection of pip.exe from modification on Windows
-
- On Windows, any operation modifying pip should be run as:
- python -m pip ...
- """
- pip_names = [
- "pip.exe",
- "pip{}.exe".format(sys.version_info[0]),
- "pip{}.{}.exe".format(*sys.version_info[:2])
- ]
-
- # See https://github.com/pypa/pip/issues/1299 for more discussion
- should_show_use_python_msg = (
- modifying_pip and
- WINDOWS and
- os.path.basename(sys.argv[0]) in pip_names
- )
-
- if should_show_use_python_msg:
- new_command = [
- sys.executable, "-m", "pip"
- ] + sys.argv[1:]
- raise CommandError(
- 'To modify pip, please run the following command:\n{}'
- .format(" ".join(new_command))
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py
deleted file mode 100644
index d5cb80a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""Utilities for defining models
-"""
-
-import operator
-
-
-class KeyBasedCompareMixin(object):
- """Provides comparision capabilities that is based on a key
- """
-
- def __init__(self, key, defining_class):
- self._compare_key = key
- self._defining_class = defining_class
-
- def __hash__(self):
- return hash(self._compare_key)
-
- def __lt__(self, other):
- return self._compare(other, operator.__lt__)
-
- def __le__(self, other):
- return self._compare(other, operator.__le__)
-
- def __gt__(self, other):
- return self._compare(other, operator.__gt__)
-
- def __ge__(self, other):
- return self._compare(other, operator.__ge__)
-
- def __eq__(self, other):
- return self._compare(other, operator.__eq__)
-
- def __ne__(self, other):
- return self._compare(other, operator.__ne__)
-
- def _compare(self, other, method):
- if not isinstance(other, self._defining_class):
- return NotImplemented
-
- return method(self._compare_key, other._compare_key)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py
deleted file mode 100644
index 37c47a4..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py
+++ /dev/null
@@ -1,164 +0,0 @@
-from __future__ import absolute_import
-
-import datetime
-import json
-import logging
-import os.path
-import sys
-
-from pip._vendor import lockfile, pkg_resources
-from pip._vendor.packaging import version as packaging_version
-
-from pip._internal.index import PackageFinder
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import ensure_dir, get_installed_version
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- import optparse # noqa: F401
- from typing import Any, Dict # noqa: F401
- from pip._internal.download import PipSession # noqa: F401
-
-
-SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
-
-
-logger = logging.getLogger(__name__)
-
-
-class SelfCheckState(object):
- def __init__(self, cache_dir):
- # type: (str) -> None
- self.state = {} # type: Dict[str, Any]
- self.statefile_path = None
-
- # Try to load the existing state
- if cache_dir:
- self.statefile_path = os.path.join(cache_dir, "selfcheck.json")
- try:
- with open(self.statefile_path) as statefile:
- self.state = json.load(statefile)[sys.prefix]
- except (IOError, ValueError, KeyError):
- # Explicitly suppressing exceptions, since we don't want to
- # error out if the cache file is invalid.
- pass
-
- def save(self, pypi_version, current_time):
- # type: (str, datetime.datetime) -> None
- # If we do not have a path to cache in, don't bother saving.
- if not self.statefile_path:
- return
-
- # Check to make sure that we own the directory
- if not check_path_owner(os.path.dirname(self.statefile_path)):
- return
-
- # Now that we've ensured the directory is owned by this user, we'll go
- # ahead and make sure that all our directories are created.
- ensure_dir(os.path.dirname(self.statefile_path))
-
- # Attempt to write out our version check file
- with lockfile.LockFile(self.statefile_path):
- if os.path.exists(self.statefile_path):
- with open(self.statefile_path) as statefile:
- state = json.load(statefile)
- else:
- state = {}
-
- state[sys.prefix] = {
- "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
- "pypi_version": pypi_version,
- }
-
- with open(self.statefile_path, "w") as statefile:
- json.dump(state, statefile, sort_keys=True,
- separators=(",", ":"))
-
-
-def was_installed_by_pip(pkg):
- # type: (str) -> bool
- """Checks whether pkg was installed by pip
-
- This is used not to display the upgrade message when pip is in fact
- installed by system package manager, such as dnf on Fedora.
- """
- try:
- dist = pkg_resources.get_distribution(pkg)
- return (dist.has_metadata('INSTALLER') and
- 'pip' in dist.get_metadata_lines('INSTALLER'))
- except pkg_resources.DistributionNotFound:
- return False
-
-
-def pip_version_check(session, options):
- # type: (PipSession, optparse.Values) -> None
- """Check for an update for pip.
-
- Limit the frequency of checks to once per week. State is stored either in
- the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
- of the pip script path.
- """
- installed_version = get_installed_version("pip")
- if not installed_version:
- return
-
- pip_version = packaging_version.parse(installed_version)
- pypi_version = None
-
- try:
- state = SelfCheckState(cache_dir=options.cache_dir)
-
- current_time = datetime.datetime.utcnow()
- # Determine if we need to refresh the state
- if "last_check" in state.state and "pypi_version" in state.state:
- last_check = datetime.datetime.strptime(
- state.state["last_check"],
- SELFCHECK_DATE_FMT
- )
- if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
- pypi_version = state.state["pypi_version"]
-
- # Refresh the version if we need to or just see if we need to warn
- if pypi_version is None:
- # Lets use PackageFinder to see what the latest pip version is
- finder = PackageFinder(
- find_links=options.find_links,
- index_urls=[options.index_url] + options.extra_index_urls,
- allow_all_prereleases=False, # Explicitly set to False
- trusted_hosts=options.trusted_hosts,
- session=session,
- )
- all_candidates = finder.find_all_candidates("pip")
- if not all_candidates:
- return
- pypi_version = str(
- max(all_candidates, key=lambda c: c.version).version
- )
-
- # save that we've performed a check
- state.save(pypi_version, current_time)
-
- remote_version = packaging_version.parse(pypi_version)
-
- # Determine if our pypi_version is older
- if (pip_version < remote_version and
- pip_version.base_version != remote_version.base_version and
- was_installed_by_pip('pip')):
- # Advise "python -m pip" on Windows to avoid issues
- # with overwriting pip.exe.
- if WINDOWS:
- pip_cmd = "python -m pip"
- else:
- pip_cmd = "pip"
- logger.warning(
- "You are using pip version %s, however version %s is "
- "available.\nYou should consider upgrading via the "
- "'%s install --upgrade pip' command.",
- pip_version, pypi_version, pip_cmd
- )
- except Exception:
- logger.debug(
- "There was an error checking the latest version of pip",
- exc_info=True,
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py
deleted file mode 100644
index 7aaf7b5..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import sys
-from email.parser import FeedParser
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging import specifiers, version
-
-from pip._internal import exceptions
-from pip._internal.utils.misc import display_path
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional # noqa: F401
- from email.message import Message # noqa: F401
- from pip._vendor.pkg_resources import Distribution # noqa: F401
-
-
-logger = logging.getLogger(__name__)
-
-
-def check_requires_python(requires_python):
- # type: (Optional[str]) -> bool
- """
- Check if the python version in use match the `requires_python` specifier.
-
- Returns `True` if the version of python in use matches the requirement.
- Returns `False` if the version of python in use does not matches the
- requirement.
-
- Raises an InvalidSpecifier if `requires_python` have an invalid format.
- """
- if requires_python is None:
- # The package provides no information
- return True
- requires_python_specifier = specifiers.SpecifierSet(requires_python)
-
- # We only use major.minor.micro
- python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
- return python_version in requires_python_specifier
-
-
-def get_metadata(dist):
- # type: (Distribution) -> Message
- if (isinstance(dist, pkg_resources.DistInfoDistribution) and
- dist.has_metadata('METADATA')):
- metadata = dist.get_metadata('METADATA')
- elif dist.has_metadata('PKG-INFO'):
- metadata = dist.get_metadata('PKG-INFO')
- else:
- logger.warning("No metadata found in %s", display_path(dist.location))
- metadata = ''
-
- feed_parser = FeedParser()
- feed_parser.feed(metadata)
- return feed_parser.close()
-
-
-def check_dist_requires_python(dist):
- pkg_info_dict = get_metadata(dist)
- requires_python = pkg_info_dict.get('Requires-Python')
- try:
- if not check_requires_python(requires_python):
- raise exceptions.UnsupportedPythonVersion(
- "%s requires Python '%s' but the running Python is %s" % (
- dist.project_name,
- requires_python,
- '.'.join(map(str, sys.version_info[:3])),)
- )
- except specifiers.InvalidSpecifier as e:
- logger.warning(
- "Package %s has an invalid Requires-Python entry %s - %s",
- dist.project_name, requires_python, e,
- )
- return
-
-
-def get_installer(dist):
- # type: (Distribution) -> str
- if dist.has_metadata('INSTALLER'):
- for line in dist.get_metadata_lines('INSTALLER'):
- if line.strip():
- return line.strip()
- return ''
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py
deleted file mode 100644
index 03973e9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py
+++ /dev/null
@@ -1,8 +0,0 @@
-# Shim to wrap setup.py invocation with setuptools
-SETUPTOOLS_SHIM = (
- "import setuptools, tokenize;__file__=%r;"
- "f=getattr(tokenize, 'open', open)(__file__);"
- "code=f.read().replace('\\r\\n', '\\n');"
- "f.close();"
- "exec(compile(code, __file__, 'exec'))"
-)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py
deleted file mode 100644
index 2c81ad5..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py
+++ /dev/null
@@ -1,155 +0,0 @@
-from __future__ import absolute_import
-
-import errno
-import itertools
-import logging
-import os.path
-import tempfile
-
-from pip._internal.utils.misc import rmtree
-
-logger = logging.getLogger(__name__)
-
-
-class TempDirectory(object):
- """Helper class that owns and cleans up a temporary directory.
-
- This class can be used as a context manager or as an OO representation of a
- temporary directory.
-
- Attributes:
- path
- Location to the created temporary directory or None
- delete
- Whether the directory should be deleted when exiting
- (when used as a contextmanager)
-
- Methods:
- create()
- Creates a temporary directory and stores its path in the path
- attribute.
- cleanup()
- Deletes the temporary directory and sets path attribute to None
-
- When used as a context manager, a temporary directory is created on
- entering the context and, if the delete attribute is True, on exiting the
- context the created directory is deleted.
- """
-
- def __init__(self, path=None, delete=None, kind="temp"):
- super(TempDirectory, self).__init__()
-
- if path is None and delete is None:
- # If we were not given an explicit directory, and we were not given
- # an explicit delete option, then we'll default to deleting.
- delete = True
-
- self.path = path
- self.delete = delete
- self.kind = kind
-
- def __repr__(self):
- return "<{} {!r}>".format(self.__class__.__name__, self.path)
-
- def __enter__(self):
- self.create()
- return self
-
- def __exit__(self, exc, value, tb):
- if self.delete:
- self.cleanup()
-
- def create(self):
- """Create a temporary directory and store its path in self.path
- """
- if self.path is not None:
- logger.debug(
- "Skipped creation of temporary directory: {}".format(self.path)
- )
- return
- # We realpath here because some systems have their default tmpdir
- # symlinked to another directory. This tends to confuse build
- # scripts, so we canonicalize the path by traversing potential
- # symlinks here.
- self.path = os.path.realpath(
- tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
- )
- logger.debug("Created temporary directory: {}".format(self.path))
-
- def cleanup(self):
- """Remove the temporary directory created and reset state
- """
- if self.path is not None and os.path.exists(self.path):
- rmtree(self.path)
- self.path = None
-
-
-class AdjacentTempDirectory(TempDirectory):
- """Helper class that creates a temporary directory adjacent to a real one.
-
- Attributes:
- original
- The original directory to create a temp directory for.
- path
- After calling create() or entering, contains the full
- path to the temporary directory.
- delete
- Whether the directory should be deleted when exiting
- (when used as a contextmanager)
-
- """
- # The characters that may be used to name the temp directory
- # We always prepend a ~ and then rotate through these until
- # a usable name is found.
- # pkg_resources raises a different error for .dist-info folder
- # with leading '-' and invalid metadata
- LEADING_CHARS = "-~.=%0123456789"
-
- def __init__(self, original, delete=None):
- super(AdjacentTempDirectory, self).__init__(delete=delete)
- self.original = original.rstrip('/\\')
-
- @classmethod
- def _generate_names(cls, name):
- """Generates a series of temporary names.
-
- The algorithm replaces the leading characters in the name
- with ones that are valid filesystem characters, but are not
- valid package names (for both Python and pip definitions of
- package).
- """
- for i in range(1, len(name)):
- for candidate in itertools.combinations_with_replacement(
- cls.LEADING_CHARS, i - 1):
- new_name = '~' + ''.join(candidate) + name[i:]
- if new_name != name:
- yield new_name
-
- # If we make it this far, we will have to make a longer name
- for i in range(len(cls.LEADING_CHARS)):
- for candidate in itertools.combinations_with_replacement(
- cls.LEADING_CHARS, i):
- new_name = '~' + ''.join(candidate) + name
- if new_name != name:
- yield new_name
-
- def create(self):
- root, name = os.path.split(self.original)
- for candidate in self._generate_names(name):
- path = os.path.join(root, candidate)
- try:
- os.mkdir(path)
- except OSError as ex:
- # Continue if the name exists already
- if ex.errno != errno.EEXIST:
- raise
- else:
- self.path = os.path.realpath(path)
- break
-
- if not self.path:
- # Final fallback on the default behavior.
- self.path = os.path.realpath(
- tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
- )
- logger.debug("Created temporary directory: {}".format(self.path))
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py
deleted file mode 100644
index e085cdf..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""For neatly implementing static typing in pip.
-
-`mypy` - the static type analysis tool we use - uses the `typing` module, which
-provides core functionality fundamental to mypy's functioning.
-
-Generally, `typing` would be imported at runtime and used in that fashion -
-it acts as a no-op at runtime and does not have any run-time overhead by
-design.
-
-As it turns out, `typing` is not vendorable - it uses separate sources for
-Python 2/Python 3. Thus, this codebase can not expect it to be present.
-To work around this, mypy allows the typing import to be behind a False-y
-optional to prevent it from running at runtime and type-comments can be used
-to remove the need for the types to be accessible directly during runtime.
-
-This module provides the False-y guard in a nicely named fashion so that a
-curious maintainer can reach here to read this.
-
-In pip, all static-typing related imports should be guarded as follows:
-
- from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
- if MYPY_CHECK_RUNNING:
- from typing import ... # noqa: F401
-
-Ref: https://github.com/python/mypy/issues/3216
-"""
-
-MYPY_CHECK_RUNNING = False
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py
deleted file mode 100644
index 433675d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py
+++ /dev/null
@@ -1,441 +0,0 @@
-from __future__ import absolute_import, division
-
-import contextlib
-import itertools
-import logging
-import sys
-import time
-from signal import SIGINT, default_int_handler, signal
-
-from pip._vendor import six
-from pip._vendor.progress.bar import (
- Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar,
- ShadyBar,
-)
-from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin
-from pip._vendor.progress.spinner import Spinner
-
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.logging import get_indentation
-from pip._internal.utils.misc import format_size
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Iterator, IO # noqa: F401
-
-try:
- from pip._vendor import colorama
-# Lots of different errors can come from this, including SystemError and
-# ImportError.
-except Exception:
- colorama = None
-
-logger = logging.getLogger(__name__)
-
-
-def _select_progress_class(preferred, fallback):
- encoding = getattr(preferred.file, "encoding", None)
-
- # If we don't know what encoding this file is in, then we'll just assume
- # that it doesn't support unicode and use the ASCII bar.
- if not encoding:
- return fallback
-
- # Collect all of the possible characters we want to use with the preferred
- # bar.
- characters = [
- getattr(preferred, "empty_fill", six.text_type()),
- getattr(preferred, "fill", six.text_type()),
- ]
- characters += list(getattr(preferred, "phases", []))
-
- # Try to decode the characters we're using for the bar using the encoding
- # of the given file, if this works then we'll assume that we can use the
- # fancier bar and if not we'll fall back to the plaintext bar.
- try:
- six.text_type().join(characters).encode(encoding)
- except UnicodeEncodeError:
- return fallback
- else:
- return preferred
-
-
-_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
-
-
-class InterruptibleMixin(object):
- """
- Helper to ensure that self.finish() gets called on keyboard interrupt.
-
- This allows downloads to be interrupted without leaving temporary state
- (like hidden cursors) behind.
-
- This class is similar to the progress library's existing SigIntMixin
- helper, but as of version 1.2, that helper has the following problems:
-
- 1. It calls sys.exit().
- 2. It discards the existing SIGINT handler completely.
- 3. It leaves its own handler in place even after an uninterrupted finish,
- which will have unexpected delayed effects if the user triggers an
- unrelated keyboard interrupt some time after a progress-displaying
- download has already completed, for example.
- """
-
- def __init__(self, *args, **kwargs):
- """
- Save the original SIGINT handler for later.
- """
- super(InterruptibleMixin, self).__init__(*args, **kwargs)
-
- self.original_handler = signal(SIGINT, self.handle_sigint)
-
- # If signal() returns None, the previous handler was not installed from
- # Python, and we cannot restore it. This probably should not happen,
- # but if it does, we must restore something sensible instead, at least.
- # The least bad option should be Python's default SIGINT handler, which
- # just raises KeyboardInterrupt.
- if self.original_handler is None:
- self.original_handler = default_int_handler
-
- def finish(self):
- """
- Restore the original SIGINT handler after finishing.
-
- This should happen regardless of whether the progress display finishes
- normally, or gets interrupted.
- """
- super(InterruptibleMixin, self).finish()
- signal(SIGINT, self.original_handler)
-
- def handle_sigint(self, signum, frame):
- """
- Call self.finish() before delegating to the original SIGINT handler.
-
- This handler should only be in place while the progress display is
- active.
- """
- self.finish()
- self.original_handler(signum, frame)
-
-
-class SilentBar(Bar):
-
- def update(self):
- pass
-
-
-class BlueEmojiBar(IncrementalBar):
-
- suffix = "%(percent)d%%"
- bar_prefix = " "
- bar_suffix = " "
- phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
-
-
-class DownloadProgressMixin(object):
-
- def __init__(self, *args, **kwargs):
- super(DownloadProgressMixin, self).__init__(*args, **kwargs)
- self.message = (" " * (get_indentation() + 2)) + self.message
-
- @property
- def downloaded(self):
- return format_size(self.index)
-
- @property
- def download_speed(self):
- # Avoid zero division errors...
- if self.avg == 0.0:
- return "..."
- return format_size(1 / self.avg) + "/s"
-
- @property
- def pretty_eta(self):
- if self.eta:
- return "eta %s" % self.eta_td
- return ""
-
- def iter(self, it, n=1):
- for x in it:
- yield x
- self.next(n)
- self.finish()
-
-
-class WindowsMixin(object):
-
- def __init__(self, *args, **kwargs):
- # The Windows terminal does not support the hide/show cursor ANSI codes
- # even with colorama. So we'll ensure that hide_cursor is False on
- # Windows.
- # This call neds to go before the super() call, so that hide_cursor
- # is set in time. The base progress bar class writes the "hide cursor"
- # code to the terminal in its init, so if we don't set this soon
- # enough, we get a "hide" with no corresponding "show"...
- if WINDOWS and self.hide_cursor:
- self.hide_cursor = False
-
- super(WindowsMixin, self).__init__(*args, **kwargs)
-
- # Check if we are running on Windows and we have the colorama module,
- # if we do then wrap our file with it.
- if WINDOWS and colorama:
- self.file = colorama.AnsiToWin32(self.file)
- # The progress code expects to be able to call self.file.isatty()
- # but the colorama.AnsiToWin32() object doesn't have that, so we'll
- # add it.
- self.file.isatty = lambda: self.file.wrapped.isatty()
- # The progress code expects to be able to call self.file.flush()
- # but the colorama.AnsiToWin32() object doesn't have that, so we'll
- # add it.
- self.file.flush = lambda: self.file.wrapped.flush()
-
-
-class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
- DownloadProgressMixin):
-
- file = sys.stdout
- message = "%(percent)d%%"
- suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
-
-# NOTE: The "type: ignore" comments on the following classes are there to
-# work around https://github.com/python/typing/issues/241
-
-
-class DefaultDownloadProgressBar(BaseDownloadProgressBar,
- _BaseBar):
- pass
-
-
-class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
- pass
-
-
-class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore
- IncrementalBar):
- pass
-
-
-class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore
- ChargingBar):
- pass
-
-
-class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore
- pass
-
-
-class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore
- FillingSquaresBar):
- pass
-
-
-class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
- FillingCirclesBar):
- pass
-
-
-class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
- BlueEmojiBar):
- pass
-
-
-class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
- DownloadProgressMixin, WritelnMixin, Spinner):
-
- file = sys.stdout
- suffix = "%(downloaded)s %(download_speed)s"
-
- def next_phase(self):
- if not hasattr(self, "_phaser"):
- self._phaser = itertools.cycle(self.phases)
- return next(self._phaser)
-
- def update(self):
- message = self.message % self
- phase = self.next_phase()
- suffix = self.suffix % self
- line = ''.join([
- message,
- " " if message else "",
- phase,
- " " if suffix else "",
- suffix,
- ])
-
- self.writeln(line)
-
-
-BAR_TYPES = {
- "off": (DownloadSilentBar, DownloadSilentBar),
- "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
- "ascii": (DownloadIncrementalBar, DownloadProgressSpinner),
- "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
- "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
-}
-
-
-def DownloadProgressProvider(progress_bar, max=None):
- if max is None or max == 0:
- return BAR_TYPES[progress_bar][1]().iter
- else:
- return BAR_TYPES[progress_bar][0](max=max).iter
-
-
-################################################################
-# Generic "something is happening" spinners
-#
-# We don't even try using progress.spinner.Spinner here because it's actually
-# simpler to reimplement from scratch than to coerce their code into doing
-# what we need.
-################################################################
-
-@contextlib.contextmanager
-def hidden_cursor(file):
- # type: (IO) -> Iterator[None]
- # The Windows terminal does not support the hide/show cursor ANSI codes,
- # even via colorama. So don't even try.
- if WINDOWS:
- yield
- # We don't want to clutter the output with control characters if we're
- # writing to a file, or if the user is running with --quiet.
- # See https://github.com/pypa/pip/issues/3418
- elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
- yield
- else:
- file.write(HIDE_CURSOR)
- try:
- yield
- finally:
- file.write(SHOW_CURSOR)
-
-
-class RateLimiter(object):
- def __init__(self, min_update_interval_seconds):
- # type: (float) -> None
- self._min_update_interval_seconds = min_update_interval_seconds
- self._last_update = 0 # type: float
-
- def ready(self):
- # type: () -> bool
- now = time.time()
- delta = now - self._last_update
- return delta >= self._min_update_interval_seconds
-
- def reset(self):
- # type: () -> None
- self._last_update = time.time()
-
-
-class SpinnerInterface(object):
- def spin(self):
- # type: () -> None
- raise NotImplementedError()
-
- def finish(self, final_status):
- # type: (str) -> None
- raise NotImplementedError()
-
-
-class InteractiveSpinner(SpinnerInterface):
- def __init__(self, message, file=None, spin_chars="-\\|/",
- # Empirically, 8 updates/second looks nice
- min_update_interval_seconds=0.125):
- self._message = message
- if file is None:
- file = sys.stdout
- self._file = file
- self._rate_limiter = RateLimiter(min_update_interval_seconds)
- self._finished = False
-
- self._spin_cycle = itertools.cycle(spin_chars)
-
- self._file.write(" " * get_indentation() + self._message + " ... ")
- self._width = 0
-
- def _write(self, status):
- assert not self._finished
- # Erase what we wrote before by backspacing to the beginning, writing
- # spaces to overwrite the old text, and then backspacing again
- backup = "\b" * self._width
- self._file.write(backup + " " * self._width + backup)
- # Now we have a blank slate to add our status
- self._file.write(status)
- self._width = len(status)
- self._file.flush()
- self._rate_limiter.reset()
-
- def spin(self):
- # type: () -> None
- if self._finished:
- return
- if not self._rate_limiter.ready():
- return
- self._write(next(self._spin_cycle))
-
- def finish(self, final_status):
- # type: (str) -> None
- if self._finished:
- return
- self._write(final_status)
- self._file.write("\n")
- self._file.flush()
- self._finished = True
-
-
-# Used for dumb terminals, non-interactive installs (no tty), etc.
-# We still print updates occasionally (once every 60 seconds by default) to
-# act as a keep-alive for systems like Travis-CI that take lack-of-output as
-# an indication that a task has frozen.
-class NonInteractiveSpinner(SpinnerInterface):
- def __init__(self, message, min_update_interval_seconds=60):
- # type: (str, float) -> None
- self._message = message
- self._finished = False
- self._rate_limiter = RateLimiter(min_update_interval_seconds)
- self._update("started")
-
- def _update(self, status):
- assert not self._finished
- self._rate_limiter.reset()
- logger.info("%s: %s", self._message, status)
-
- def spin(self):
- # type: () -> None
- if self._finished:
- return
- if not self._rate_limiter.ready():
- return
- self._update("still running...")
-
- def finish(self, final_status):
- # type: (str) -> None
- if self._finished:
- return
- self._update("finished with status '%s'" % (final_status,))
- self._finished = True
-
-
-@contextlib.contextmanager
-def open_spinner(message):
- # type: (str) -> Iterator[SpinnerInterface]
- # Interactive spinner goes directly to sys.stdout rather than being routed
- # through the logging system, but it acts like it has level INFO,
- # i.e. it's only displayed if we're at level INFO or better.
- # Non-interactive spinner goes through the logging system, so it is always
- # in sync with logging configuration.
- if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
- spinner = InteractiveSpinner(message) # type: SpinnerInterface
- else:
- spinner = NonInteractiveSpinner(message)
- try:
- with hidden_cursor(sys.stdout):
- yield spinner
- except KeyboardInterrupt:
- spinner.finish("canceled")
- raise
- except Exception:
- spinner.finish("error")
- raise
- else:
- spinner.finish("done")
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__init__.py
deleted file mode 100644
index 9cba764..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__init__.py
+++ /dev/null
@@ -1,534 +0,0 @@
-"""Handles all VCS (version control) support"""
-from __future__ import absolute_import
-
-import errno
-import logging
-import os
-import shutil
-import sys
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.exceptions import BadCommand
-from pip._internal.utils.misc import (
- display_path, backup_dir, call_subprocess, rmtree, ask_path_exists,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type
- )
- from pip._internal.utils.ui import SpinnerInterface # noqa: F401
-
- AuthInfo = Tuple[Optional[str], Optional[str]]
-
-__all__ = ['vcs']
-
-
-logger = logging.getLogger(__name__)
-
-
-class RemoteNotFoundError(Exception):
- pass
-
-
-class RevOptions(object):
-
- """
- Encapsulates a VCS-specific revision to install, along with any VCS
- install options.
-
- Instances of this class should be treated as if immutable.
- """
-
- def __init__(self, vcs, rev=None, extra_args=None):
- # type: (VersionControl, Optional[str], Optional[List[str]]) -> None
- """
- Args:
- vcs: a VersionControl object.
- rev: the name of the revision to install.
- extra_args: a list of extra options.
- """
- if extra_args is None:
- extra_args = []
-
- self.extra_args = extra_args
- self.rev = rev
- self.vcs = vcs
-
- def __repr__(self):
- return '<RevOptions {}: rev={!r}>'.format(self.vcs.name, self.rev)
-
- @property
- def arg_rev(self):
- # type: () -> Optional[str]
- if self.rev is None:
- return self.vcs.default_arg_rev
-
- return self.rev
-
- def to_args(self):
- # type: () -> List[str]
- """
- Return the VCS-specific command arguments.
- """
- args = [] # type: List[str]
- rev = self.arg_rev
- if rev is not None:
- args += self.vcs.get_base_rev_args(rev)
- args += self.extra_args
-
- return args
-
- def to_display(self):
- # type: () -> str
- if not self.rev:
- return ''
-
- return ' (to revision {})'.format(self.rev)
-
- def make_new(self, rev):
- # type: (str) -> RevOptions
- """
- Make a copy of the current instance, but with a new rev.
-
- Args:
- rev: the name of the revision for the new object.
- """
- return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
-
-
-class VcsSupport(object):
- _registry = {} # type: Dict[str, Type[VersionControl]]
- schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
-
- def __init__(self):
- # type: () -> None
- # Register more schemes with urlparse for various version control
- # systems
- urllib_parse.uses_netloc.extend(self.schemes)
- # Python >= 2.7.4, 3.3 doesn't have uses_fragment
- if getattr(urllib_parse, 'uses_fragment', None):
- urllib_parse.uses_fragment.extend(self.schemes)
- super(VcsSupport, self).__init__()
-
- def __iter__(self):
- return self._registry.__iter__()
-
- @property
- def backends(self):
- # type: () -> List[Type[VersionControl]]
- return list(self._registry.values())
-
- @property
- def dirnames(self):
- # type: () -> List[str]
- return [backend.dirname for backend in self.backends]
-
- @property
- def all_schemes(self):
- # type: () -> List[str]
- schemes = [] # type: List[str]
- for backend in self.backends:
- schemes.extend(backend.schemes)
- return schemes
-
- def register(self, cls):
- # type: (Type[VersionControl]) -> None
- if not hasattr(cls, 'name'):
- logger.warning('Cannot register VCS %s', cls.__name__)
- return
- if cls.name not in self._registry:
- self._registry[cls.name] = cls
- logger.debug('Registered VCS backend: %s', cls.name)
-
- def unregister(self, cls=None, name=None):
- # type: (Optional[Type[VersionControl]], Optional[str]) -> None
- if name in self._registry:
- del self._registry[name]
- elif cls in self._registry.values():
- del self._registry[cls.name]
- else:
- logger.warning('Cannot unregister because no class or name given')
-
- def get_backend_type(self, location):
- # type: (str) -> Optional[Type[VersionControl]]
- """
- Return the type of the version control backend if found at given
- location, e.g. vcs.get_backend_type('/path/to/vcs/checkout')
- """
- for vc_type in self._registry.values():
- if vc_type.controls_location(location):
- logger.debug('Determine that %s uses VCS: %s',
- location, vc_type.name)
- return vc_type
- return None
-
- def get_backend(self, name):
- # type: (str) -> Optional[Type[VersionControl]]
- name = name.lower()
- if name in self._registry:
- return self._registry[name]
- return None
-
-
-vcs = VcsSupport()
-
-
-class VersionControl(object):
- name = ''
- dirname = ''
- repo_name = ''
- # List of supported schemes for this Version Control
- schemes = () # type: Tuple[str, ...]
- # Iterable of environment variable names to pass to call_subprocess().
- unset_environ = () # type: Tuple[str, ...]
- default_arg_rev = None # type: Optional[str]
-
- def __init__(self, url=None, *args, **kwargs):
- self.url = url
- super(VersionControl, self).__init__(*args, **kwargs)
-
- def get_base_rev_args(self, rev):
- """
- Return the base revision arguments for a vcs command.
-
- Args:
- rev: the name of a revision to install. Cannot be None.
- """
- raise NotImplementedError
-
- def make_rev_options(self, rev=None, extra_args=None):
- # type: (Optional[str], Optional[List[str]]) -> RevOptions
- """
- Return a RevOptions object.
-
- Args:
- rev: the name of a revision to install.
- extra_args: a list of extra options.
- """
- return RevOptions(self, rev, extra_args=extra_args)
-
- @classmethod
- def _is_local_repository(cls, repo):
- # type: (str) -> bool
- """
- posix absolute paths start with os.path.sep,
- win32 ones start with drive (like c:\\folder)
- """
- drive, tail = os.path.splitdrive(repo)
- return repo.startswith(os.path.sep) or bool(drive)
-
- def export(self, location):
- """
- Export the repository at the url to the destination location
- i.e. only download the files, without vcs informations
- """
- raise NotImplementedError
-
- def get_netloc_and_auth(self, netloc, scheme):
- """
- Parse the repository URL's netloc, and return the new netloc to use
- along with auth information.
-
- Args:
- netloc: the original repository URL netloc.
- scheme: the repository URL's scheme without the vcs prefix.
-
- This is mainly for the Subversion class to override, so that auth
- information can be provided via the --username and --password options
- instead of through the URL. For other subclasses like Git without
- such an option, auth information must stay in the URL.
-
- Returns: (netloc, (username, password)).
- """
- return netloc, (None, None)
-
- def get_url_rev_and_auth(self, url):
- # type: (str) -> Tuple[str, Optional[str], AuthInfo]
- """
- Parse the repository URL to use, and return the URL, revision,
- and auth info to use.
-
- Returns: (url, rev, (username, password)).
- """
- scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
- if '+' not in scheme:
- raise ValueError(
- "Sorry, {!r} is a malformed VCS url. "
- "The format is <vcs>+<protocol>://<url>, "
- "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
- )
- # Remove the vcs prefix.
- scheme = scheme.split('+', 1)[1]
- netloc, user_pass = self.get_netloc_and_auth(netloc, scheme)
- rev = None
- if '@' in path:
- path, rev = path.rsplit('@', 1)
- url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
- return url, rev, user_pass
-
- def make_rev_args(self, username, password):
- """
- Return the RevOptions "extra arguments" to use in obtain().
- """
- return []
-
- def get_url_rev_options(self, url):
- # type: (str) -> Tuple[str, RevOptions]
- """
- Return the URL and RevOptions object to use in obtain() and in
- some cases export(), as a tuple (url, rev_options).
- """
- url, rev, user_pass = self.get_url_rev_and_auth(url)
- username, password = user_pass
- extra_args = self.make_rev_args(username, password)
- rev_options = self.make_rev_options(rev, extra_args=extra_args)
-
- return url, rev_options
-
- def normalize_url(self, url):
- # type: (str) -> str
- """
- Normalize a URL for comparison by unquoting it and removing any
- trailing slash.
- """
- return urllib_parse.unquote(url).rstrip('/')
-
- def compare_urls(self, url1, url2):
- # type: (str, str) -> bool
- """
- Compare two repo URLs for identity, ignoring incidental differences.
- """
- return (self.normalize_url(url1) == self.normalize_url(url2))
-
- def fetch_new(self, dest, url, rev_options):
- """
- Fetch a revision from a repository, in the case that this is the
- first fetch from the repository.
-
- Args:
- dest: the directory to fetch the repository to.
- rev_options: a RevOptions object.
- """
- raise NotImplementedError
-
- def switch(self, dest, url, rev_options):
- """
- Switch the repo at ``dest`` to point to ``URL``.
-
- Args:
- rev_options: a RevOptions object.
- """
- raise NotImplementedError
-
- def update(self, dest, url, rev_options):
- """
- Update an already-existing repo to the given ``rev_options``.
-
- Args:
- rev_options: a RevOptions object.
- """
- raise NotImplementedError
-
- def is_commit_id_equal(self, dest, name):
- """
- Return whether the id of the current commit equals the given name.
-
- Args:
- dest: the repository directory.
- name: a string name.
- """
- raise NotImplementedError
-
- def obtain(self, dest):
- # type: (str) -> None
- """
- Install or update in editable mode the package represented by this
- VersionControl object.
-
- Args:
- dest: the repository directory in which to install or update.
- """
- url, rev_options = self.get_url_rev_options(self.url)
-
- if not os.path.exists(dest):
- self.fetch_new(dest, url, rev_options)
- return
-
- rev_display = rev_options.to_display()
- if self.is_repository_directory(dest):
- existing_url = self.get_remote_url(dest)
- if self.compare_urls(existing_url, url):
- logger.debug(
- '%s in %s exists, and has correct URL (%s)',
- self.repo_name.title(),
- display_path(dest),
- url,
- )
- if not self.is_commit_id_equal(dest, rev_options.rev):
- logger.info(
- 'Updating %s %s%s',
- display_path(dest),
- self.repo_name,
- rev_display,
- )
- self.update(dest, url, rev_options)
- else:
- logger.info('Skipping because already up-to-date.')
- return
-
- logger.warning(
- '%s %s in %s exists with URL %s',
- self.name,
- self.repo_name,
- display_path(dest),
- existing_url,
- )
- prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
- ('s', 'i', 'w', 'b'))
- else:
- logger.warning(
- 'Directory %s already exists, and is not a %s %s.',
- dest,
- self.name,
- self.repo_name,
- )
- # https://github.com/python/mypy/issues/1174
- prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore
- ('i', 'w', 'b'))
-
- logger.warning(
- 'The plan is to install the %s repository %s',
- self.name,
- url,
- )
- response = ask_path_exists('What to do? %s' % prompt[0], prompt[1])
-
- if response == 'a':
- sys.exit(-1)
-
- if response == 'w':
- logger.warning('Deleting %s', display_path(dest))
- rmtree(dest)
- self.fetch_new(dest, url, rev_options)
- return
-
- if response == 'b':
- dest_dir = backup_dir(dest)
- logger.warning(
- 'Backing up %s to %s', display_path(dest), dest_dir,
- )
- shutil.move(dest, dest_dir)
- self.fetch_new(dest, url, rev_options)
- return
-
- # Do nothing if the response is "i".
- if response == 's':
- logger.info(
- 'Switching %s %s to %s%s',
- self.repo_name,
- display_path(dest),
- url,
- rev_display,
- )
- self.switch(dest, url, rev_options)
-
- def unpack(self, location):
- # type: (str) -> None
- """
- Clean up current location and download the url repository
- (and vcs infos) into location
- """
- if os.path.exists(location):
- rmtree(location)
- self.obtain(location)
-
- @classmethod
- def get_src_requirement(cls, location, project_name):
- """
- Return a string representing the requirement needed to
- redownload the files currently present in location, something
- like:
- {repository_url}@{revision}#egg={project_name}-{version_identifier}
- """
- raise NotImplementedError
-
- @classmethod
- def get_remote_url(cls, location):
- """
- Return the url used at location
-
- Raises RemoteNotFoundError if the repository does not have a remote
- url configured.
- """
- raise NotImplementedError
-
- @classmethod
- def get_revision(cls, location):
- """
- Return the current commit id of the files at the given location.
- """
- raise NotImplementedError
-
- @classmethod
- def run_command(
- cls,
- cmd, # type: List[str]
- show_stdout=True, # type: bool
- cwd=None, # type: Optional[str]
- on_returncode='raise', # type: str
- extra_ok_returncodes=None, # type: Optional[Iterable[int]]
- command_desc=None, # type: Optional[str]
- extra_environ=None, # type: Optional[Mapping[str, Any]]
- spinner=None # type: Optional[SpinnerInterface]
- ):
- # type: (...) -> Optional[Text]
- """
- Run a VCS subcommand
- This is simply a wrapper around call_subprocess that adds the VCS
- command name, and checks that the VCS is available
- """
- cmd = [cls.name] + cmd
- try:
- return call_subprocess(cmd, show_stdout, cwd,
- on_returncode=on_returncode,
- extra_ok_returncodes=extra_ok_returncodes,
- command_desc=command_desc,
- extra_environ=extra_environ,
- unset_environ=cls.unset_environ,
- spinner=spinner)
- except OSError as e:
- # errno.ENOENT = no such file or directory
- # In other words, the VCS executable isn't available
- if e.errno == errno.ENOENT:
- raise BadCommand(
- 'Cannot find command %r - do you have '
- '%r installed and in your '
- 'PATH?' % (cls.name, cls.name))
- else:
- raise # re-raise exception if a different error occurred
-
- @classmethod
- def is_repository_directory(cls, path):
- # type: (str) -> bool
- """
- Return whether a directory path is a repository directory.
- """
- logger.debug('Checking in %s for %s (%s)...',
- path, cls.dirname, cls.name)
- return os.path.exists(os.path.join(path, cls.dirname))
-
- @classmethod
- def controls_location(cls, location):
- # type: (str) -> bool
- """
- Check if a location is controlled by the vcs.
- It is meant to be overridden to implement smarter detection
- mechanisms for specific vcs.
-
- This can do more than is_repository_directory() alone. For example,
- the Git override checks that Git is actually available.
- """
- return cls.is_repository_directory(location)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 96e1d11..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc
deleted file mode 100644
index 1b9c28f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/git.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/git.cpython-37.pyc
deleted file mode 100644
index ce34c91..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/git.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc
deleted file mode 100644
index 929ae09..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc
deleted file mode 100644
index 80e464e..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/bazaar.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/bazaar.py
deleted file mode 100644
index 4c6ac79..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/bazaar.py
+++ /dev/null
@@ -1,114 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.download import path_to_url
-from pip._internal.utils.misc import (
- display_path, make_vcs_requirement_url, rmtree,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.vcs import VersionControl, vcs
-
-logger = logging.getLogger(__name__)
-
-
-class Bazaar(VersionControl):
- name = 'bzr'
- dirname = '.bzr'
- repo_name = 'branch'
- schemes = (
- 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
- 'bzr+lp',
- )
-
- def __init__(self, url=None, *args, **kwargs):
- super(Bazaar, self).__init__(url, *args, **kwargs)
- # This is only needed for python <2.7.5
- # Register lp but do not expose as a scheme to support bzr+lp.
- if getattr(urllib_parse, 'uses_fragment', None):
- urllib_parse.uses_fragment.extend(['lp'])
-
- def get_base_rev_args(self, rev):
- return ['-r', rev]
-
- def export(self, location):
- """
- Export the Bazaar repository at the url to the destination location
- """
- # Remove the location to make sure Bazaar can export it correctly
- if os.path.exists(location):
- rmtree(location)
-
- with TempDirectory(kind="export") as temp_dir:
- self.unpack(temp_dir.path)
-
- self.run_command(
- ['export', location],
- cwd=temp_dir.path, show_stdout=False,
- )
-
- def fetch_new(self, dest, url, rev_options):
- rev_display = rev_options.to_display()
- logger.info(
- 'Checking out %s%s to %s',
- url,
- rev_display,
- display_path(dest),
- )
- cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
- self.run_command(cmd_args)
-
- def switch(self, dest, url, rev_options):
- self.run_command(['switch', url], cwd=dest)
-
- def update(self, dest, url, rev_options):
- cmd_args = ['pull', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- def get_url_rev_and_auth(self, url):
- # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
- url, rev, user_pass = super(Bazaar, self).get_url_rev_and_auth(url)
- if url.startswith('ssh://'):
- url = 'bzr+' + url
- return url, rev, user_pass
-
- @classmethod
- def get_remote_url(cls, location):
- urls = cls.run_command(['info'], show_stdout=False, cwd=location)
- for line in urls.splitlines():
- line = line.strip()
- for x in ('checkout of branch: ',
- 'parent branch: '):
- if line.startswith(x):
- repo = line.split(x)[1]
- if cls._is_local_repository(repo):
- return path_to_url(repo)
- return repo
- return None
-
- @classmethod
- def get_revision(cls, location):
- revision = cls.run_command(
- ['revno'], show_stdout=False, cwd=location,
- )
- return revision.splitlines()[-1]
-
- @classmethod
- def get_src_requirement(cls, location, project_name):
- repo = cls.get_remote_url(location)
- if not repo:
- return None
- if not repo.lower().startswith('bzr:'):
- repo = 'bzr+' + repo
- current_rev = cls.get_revision(location)
- return make_vcs_requirement_url(repo, current_rev, project_name)
-
- def is_commit_id_equal(self, dest, name):
- """Always assume the versions don't match"""
- return False
-
-
-vcs.register(Bazaar)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/git.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/git.py
deleted file mode 100644
index dd2bd61..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/git.py
+++ /dev/null
@@ -1,369 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import os.path
-import re
-
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-
-from pip._internal.exceptions import BadCommand
-from pip._internal.utils.compat import samefile
-from pip._internal.utils.misc import (
- display_path, make_vcs_requirement_url, redact_password_from_url,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.vcs import RemoteNotFoundError, VersionControl, vcs
-
-urlsplit = urllib_parse.urlsplit
-urlunsplit = urllib_parse.urlunsplit
-
-
-logger = logging.getLogger(__name__)
-
-
-HASH_REGEX = re.compile('[a-fA-F0-9]{40}')
-
-
-def looks_like_hash(sha):
- return bool(HASH_REGEX.match(sha))
-
-
-class Git(VersionControl):
- name = 'git'
- dirname = '.git'
- repo_name = 'clone'
- schemes = (
- 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
- )
- # Prevent the user's environment variables from interfering with pip:
- # https://github.com/pypa/pip/issues/1130
- unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
- default_arg_rev = 'HEAD'
-
- def __init__(self, url=None, *args, **kwargs):
-
- # Works around an apparent Git bug
- # (see https://article.gmane.org/gmane.comp.version-control.git/146500)
- if url:
- scheme, netloc, path, query, fragment = urlsplit(url)
- if scheme.endswith('file'):
- initial_slashes = path[:-len(path.lstrip('/'))]
- newpath = (
- initial_slashes +
- urllib_request.url2pathname(path)
- .replace('\\', '/').lstrip('/')
- )
- url = urlunsplit((scheme, netloc, newpath, query, fragment))
- after_plus = scheme.find('+') + 1
- url = scheme[:after_plus] + urlunsplit(
- (scheme[after_plus:], netloc, newpath, query, fragment),
- )
-
- super(Git, self).__init__(url, *args, **kwargs)
-
- def get_base_rev_args(self, rev):
- return [rev]
-
- def get_git_version(self):
- VERSION_PFX = 'git version '
- version = self.run_command(['version'], show_stdout=False)
- if version.startswith(VERSION_PFX):
- version = version[len(VERSION_PFX):].split()[0]
- else:
- version = ''
- # get first 3 positions of the git version becasue
- # on windows it is x.y.z.windows.t, and this parses as
- # LegacyVersion which always smaller than a Version.
- version = '.'.join(version.split('.')[:3])
- return parse_version(version)
-
- def get_current_branch(self, location):
- """
- Return the current branch, or None if HEAD isn't at a branch
- (e.g. detached HEAD).
- """
- # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
- # HEAD rather than a symbolic ref. In addition, the -q causes the
- # command to exit with status code 1 instead of 128 in this case
- # and to suppress the message to stderr.
- args = ['symbolic-ref', '-q', 'HEAD']
- output = self.run_command(
- args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
- )
- ref = output.strip()
-
- if ref.startswith('refs/heads/'):
- return ref[len('refs/heads/'):]
-
- return None
-
- def export(self, location):
- """Export the Git repository at the url to the destination location"""
- if not location.endswith('/'):
- location = location + '/'
-
- with TempDirectory(kind="export") as temp_dir:
- self.unpack(temp_dir.path)
- self.run_command(
- ['checkout-index', '-a', '-f', '--prefix', location],
- show_stdout=False, cwd=temp_dir.path
- )
-
- def get_revision_sha(self, dest, rev):
- """
- Return (sha_or_none, is_branch), where sha_or_none is a commit hash
- if the revision names a remote branch or tag, otherwise None.
-
- Args:
- dest: the repository directory.
- rev: the revision name.
- """
- # Pass rev to pre-filter the list.
- output = self.run_command(['show-ref', rev], cwd=dest,
- show_stdout=False, on_returncode='ignore')
- refs = {}
- for line in output.strip().splitlines():
- try:
- sha, ref = line.split()
- except ValueError:
- # Include the offending line to simplify troubleshooting if
- # this error ever occurs.
- raise ValueError('unexpected show-ref line: {!r}'.format(line))
-
- refs[ref] = sha
-
- branch_ref = 'refs/remotes/origin/{}'.format(rev)
- tag_ref = 'refs/tags/{}'.format(rev)
-
- sha = refs.get(branch_ref)
- if sha is not None:
- return (sha, True)
-
- sha = refs.get(tag_ref)
-
- return (sha, False)
-
- def resolve_revision(self, dest, url, rev_options):
- """
- Resolve a revision to a new RevOptions object with the SHA1 of the
- branch, tag, or ref if found.
-
- Args:
- rev_options: a RevOptions object.
- """
- rev = rev_options.arg_rev
- sha, is_branch = self.get_revision_sha(dest, rev)
-
- if sha is not None:
- rev_options = rev_options.make_new(sha)
- rev_options.branch_name = rev if is_branch else None
-
- return rev_options
-
- # Do not show a warning for the common case of something that has
- # the form of a Git commit hash.
- if not looks_like_hash(rev):
- logger.warning(
- "Did not find branch or tag '%s', assuming revision or ref.",
- rev,
- )
-
- if not rev.startswith('refs/'):
- return rev_options
-
- # If it looks like a ref, we have to fetch it explicitly.
- self.run_command(
- ['fetch', '-q', url] + rev_options.to_args(),
- cwd=dest,
- )
- # Change the revision to the SHA of the ref we fetched
- sha = self.get_revision(dest, rev='FETCH_HEAD')
- rev_options = rev_options.make_new(sha)
-
- return rev_options
-
- def is_commit_id_equal(self, dest, name):
- """
- Return whether the current commit hash equals the given name.
-
- Args:
- dest: the repository directory.
- name: a string name.
- """
- if not name:
- # Then avoid an unnecessary subprocess call.
- return False
-
- return self.get_revision(dest) == name
-
- def fetch_new(self, dest, url, rev_options):
- rev_display = rev_options.to_display()
- logger.info(
- 'Cloning %s%s to %s', redact_password_from_url(url),
- rev_display, display_path(dest),
- )
- self.run_command(['clone', '-q', url, dest])
-
- if rev_options.rev:
- # Then a specific revision was requested.
- rev_options = self.resolve_revision(dest, url, rev_options)
- branch_name = getattr(rev_options, 'branch_name', None)
- if branch_name is None:
- # Only do a checkout if the current commit id doesn't match
- # the requested revision.
- if not self.is_commit_id_equal(dest, rev_options.rev):
- cmd_args = ['checkout', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
- elif self.get_current_branch(dest) != branch_name:
- # Then a specific branch was requested, and that branch
- # is not yet checked out.
- track_branch = 'origin/{}'.format(branch_name)
- cmd_args = [
- 'checkout', '-b', branch_name, '--track', track_branch,
- ]
- self.run_command(cmd_args, cwd=dest)
-
- #: repo may contain submodules
- self.update_submodules(dest)
-
- def switch(self, dest, url, rev_options):
- self.run_command(['config', 'remote.origin.url', url], cwd=dest)
- cmd_args = ['checkout', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- self.update_submodules(dest)
-
- def update(self, dest, url, rev_options):
- # First fetch changes from the default remote
- if self.get_git_version() >= parse_version('1.9.0'):
- # fetch tags in addition to everything else
- self.run_command(['fetch', '-q', '--tags'], cwd=dest)
- else:
- self.run_command(['fetch', '-q'], cwd=dest)
- # Then reset to wanted revision (maybe even origin/master)
- rev_options = self.resolve_revision(dest, url, rev_options)
- cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
- #: update submodules
- self.update_submodules(dest)
-
- @classmethod
- def get_remote_url(cls, location):
- """
- Return URL of the first remote encountered.
-
- Raises RemoteNotFoundError if the repository does not have a remote
- url configured.
- """
- # We need to pass 1 for extra_ok_returncodes since the command
- # exits with return code 1 if there are no matching lines.
- stdout = cls.run_command(
- ['config', '--get-regexp', r'remote\..*\.url'],
- extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
- )
- remotes = stdout.splitlines()
- try:
- found_remote = remotes[0]
- except IndexError:
- raise RemoteNotFoundError
-
- for remote in remotes:
- if remote.startswith('remote.origin.url '):
- found_remote = remote
- break
- url = found_remote.split(' ')[1]
- return url.strip()
-
- @classmethod
- def get_revision(cls, location, rev=None):
- if rev is None:
- rev = 'HEAD'
- current_rev = cls.run_command(
- ['rev-parse', rev], show_stdout=False, cwd=location,
- )
- return current_rev.strip()
-
- @classmethod
- def _get_subdirectory(cls, location):
- """Return the relative path of setup.py to the git repo root."""
- # find the repo root
- git_dir = cls.run_command(['rev-parse', '--git-dir'],
- show_stdout=False, cwd=location).strip()
- if not os.path.isabs(git_dir):
- git_dir = os.path.join(location, git_dir)
- root_dir = os.path.join(git_dir, '..')
- # find setup.py
- orig_location = location
- while not os.path.exists(os.path.join(location, 'setup.py')):
- last_location = location
- location = os.path.dirname(location)
- if location == last_location:
- # We've traversed up to the root of the filesystem without
- # finding setup.py
- logger.warning(
- "Could not find setup.py for directory %s (tried all "
- "parent directories)",
- orig_location,
- )
- return None
- # relative path of setup.py to repo root
- if samefile(root_dir, location):
- return None
- return os.path.relpath(location, root_dir)
-
- @classmethod
- def get_src_requirement(cls, location, project_name):
- repo = cls.get_remote_url(location)
- if not repo.lower().startswith('git:'):
- repo = 'git+' + repo
- current_rev = cls.get_revision(location)
- subdir = cls._get_subdirectory(location)
- req = make_vcs_requirement_url(repo, current_rev, project_name,
- subdir=subdir)
-
- return req
-
- def get_url_rev_and_auth(self, url):
- """
- Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
- That's required because although they use SSH they sometimes don't
- work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
- parsing. Hence we remove it again afterwards and return it as a stub.
- """
- if '://' not in url:
- assert 'file:' not in url
- url = url.replace('git+', 'git+ssh://')
- url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url)
- url = url.replace('ssh://', '')
- else:
- url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url)
-
- return url, rev, user_pass
-
- def update_submodules(self, location):
- if not os.path.exists(os.path.join(location, '.gitmodules')):
- return
- self.run_command(
- ['submodule', 'update', '--init', '--recursive', '-q'],
- cwd=location,
- )
-
- @classmethod
- def controls_location(cls, location):
- if super(Git, cls).controls_location(location):
- return True
- try:
- r = cls.run_command(['rev-parse'],
- cwd=location,
- show_stdout=False,
- on_returncode='ignore')
- return not r
- except BadCommand:
- logger.debug("could not determine if %s is under git control "
- "because git is not available", location)
- return False
-
-
-vcs.register(Git)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/mercurial.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/mercurial.py
deleted file mode 100644
index 26e75de..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/mercurial.py
+++ /dev/null
@@ -1,103 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._vendor.six.moves import configparser
-
-from pip._internal.download import path_to_url
-from pip._internal.utils.misc import display_path, make_vcs_requirement_url
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.vcs import VersionControl, vcs
-
-logger = logging.getLogger(__name__)
-
-
-class Mercurial(VersionControl):
- name = 'hg'
- dirname = '.hg'
- repo_name = 'clone'
- schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
-
- def get_base_rev_args(self, rev):
- return [rev]
-
- def export(self, location):
- """Export the Hg repository at the url to the destination location"""
- with TempDirectory(kind="export") as temp_dir:
- self.unpack(temp_dir.path)
-
- self.run_command(
- ['archive', location], show_stdout=False, cwd=temp_dir.path
- )
-
- def fetch_new(self, dest, url, rev_options):
- rev_display = rev_options.to_display()
- logger.info(
- 'Cloning hg %s%s to %s',
- url,
- rev_display,
- display_path(dest),
- )
- self.run_command(['clone', '--noupdate', '-q', url, dest])
- cmd_args = ['update', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- def switch(self, dest, url, rev_options):
- repo_config = os.path.join(dest, self.dirname, 'hgrc')
- config = configparser.SafeConfigParser()
- try:
- config.read(repo_config)
- config.set('paths', 'default', url)
- with open(repo_config, 'w') as config_file:
- config.write(config_file)
- except (OSError, configparser.NoSectionError) as exc:
- logger.warning(
- 'Could not switch Mercurial repository to %s: %s', url, exc,
- )
- else:
- cmd_args = ['update', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- def update(self, dest, url, rev_options):
- self.run_command(['pull', '-q'], cwd=dest)
- cmd_args = ['update', '-q'] + rev_options.to_args()
- self.run_command(cmd_args, cwd=dest)
-
- @classmethod
- def get_remote_url(cls, location):
- url = cls.run_command(
- ['showconfig', 'paths.default'],
- show_stdout=False, cwd=location).strip()
- if cls._is_local_repository(url):
- url = path_to_url(url)
- return url.strip()
-
- @classmethod
- def get_revision(cls, location):
- current_revision = cls.run_command(
- ['parents', '--template={rev}'],
- show_stdout=False, cwd=location).strip()
- return current_revision
-
- @classmethod
- def get_revision_hash(cls, location):
- current_rev_hash = cls.run_command(
- ['parents', '--template={node}'],
- show_stdout=False, cwd=location).strip()
- return current_rev_hash
-
- @classmethod
- def get_src_requirement(cls, location, project_name):
- repo = cls.get_remote_url(location)
- if not repo.lower().startswith('hg:'):
- repo = 'hg+' + repo
- current_rev_hash = cls.get_revision_hash(location)
- return make_vcs_requirement_url(repo, current_rev_hash, project_name)
-
- def is_commit_id_equal(self, dest, name):
- """Always assume the versions don't match"""
- return False
-
-
-vcs.register(Mercurial)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/subversion.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/subversion.py
deleted file mode 100644
index 42ac5ac..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/vcs/subversion.py
+++ /dev/null
@@ -1,200 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import os
-import re
-
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc,
-)
-from pip._internal.vcs import VersionControl, vcs
-
-_svn_xml_url_re = re.compile('url="([^"]+)"')
-_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
-_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
-_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
-
-
-logger = logging.getLogger(__name__)
-
-
-class Subversion(VersionControl):
- name = 'svn'
- dirname = '.svn'
- repo_name = 'checkout'
- schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
-
- def get_base_rev_args(self, rev):
- return ['-r', rev]
-
- def export(self, location):
- """Export the svn repository at the url to the destination location"""
- url, rev_options = self.get_url_rev_options(self.url)
-
- logger.info('Exporting svn repository %s to %s', url, location)
- with indent_log():
- if os.path.exists(location):
- # Subversion doesn't like to check out over an existing
- # directory --force fixes this, but was only added in svn 1.5
- rmtree(location)
- cmd_args = ['export'] + rev_options.to_args() + [url, location]
- self.run_command(cmd_args, show_stdout=False)
-
- def fetch_new(self, dest, url, rev_options):
- rev_display = rev_options.to_display()
- logger.info(
- 'Checking out %s%s to %s',
- url,
- rev_display,
- display_path(dest),
- )
- cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
- self.run_command(cmd_args)
-
- def switch(self, dest, url, rev_options):
- cmd_args = ['switch'] + rev_options.to_args() + [url, dest]
- self.run_command(cmd_args)
-
- def update(self, dest, url, rev_options):
- cmd_args = ['update'] + rev_options.to_args() + [dest]
- self.run_command(cmd_args)
-
- @classmethod
- def get_revision(cls, location):
- """
- Return the maximum revision for all files under a given location
- """
- # Note: taken from setuptools.command.egg_info
- revision = 0
-
- for base, dirs, files in os.walk(location):
- if cls.dirname not in dirs:
- dirs[:] = []
- continue # no sense walking uncontrolled subdirs
- dirs.remove(cls.dirname)
- entries_fn = os.path.join(base, cls.dirname, 'entries')
- if not os.path.exists(entries_fn):
- # FIXME: should we warn?
- continue
-
- dirurl, localrev = cls._get_svn_url_rev(base)
-
- if base == location:
- base = dirurl + '/' # save the root url
- elif not dirurl or not dirurl.startswith(base):
- dirs[:] = []
- continue # not part of the same svn tree, skip it
- revision = max(revision, localrev)
- return revision
-
- def get_netloc_and_auth(self, netloc, scheme):
- """
- This override allows the auth information to be passed to svn via the
- --username and --password options instead of via the URL.
- """
- if scheme == 'ssh':
- # The --username and --password options can't be used for
- # svn+ssh URLs, so keep the auth information in the URL.
- return super(Subversion, self).get_netloc_and_auth(
- netloc, scheme)
-
- return split_auth_from_netloc(netloc)
-
- def get_url_rev_and_auth(self, url):
- # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
- url, rev, user_pass = super(Subversion, self).get_url_rev_and_auth(url)
- if url.startswith('ssh://'):
- url = 'svn+' + url
- return url, rev, user_pass
-
- def make_rev_args(self, username, password):
- extra_args = []
- if username:
- extra_args += ['--username', username]
- if password:
- extra_args += ['--password', password]
-
- return extra_args
-
- @classmethod
- def get_remote_url(cls, location):
- # In cases where the source is in a subdirectory, not alongside
- # setup.py we have to look up in the location until we find a real
- # setup.py
- orig_location = location
- while not os.path.exists(os.path.join(location, 'setup.py')):
- last_location = location
- location = os.path.dirname(location)
- if location == last_location:
- # We've traversed up to the root of the filesystem without
- # finding setup.py
- logger.warning(
- "Could not find setup.py for directory %s (tried all "
- "parent directories)",
- orig_location,
- )
- return None
-
- return cls._get_svn_url_rev(location)[0]
-
- @classmethod
- def _get_svn_url_rev(cls, location):
- from pip._internal.exceptions import InstallationError
-
- entries_path = os.path.join(location, cls.dirname, 'entries')
- if os.path.exists(entries_path):
- with open(entries_path) as f:
- data = f.read()
- else: # subversion >= 1.7 does not have the 'entries' file
- data = ''
-
- if (data.startswith('8') or
- data.startswith('9') or
- data.startswith('10')):
- data = list(map(str.splitlines, data.split('\n\x0c\n')))
- del data[0][0] # get rid of the '8'
- url = data[0][3]
- revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
- elif data.startswith('<?xml'):
- match = _svn_xml_url_re.search(data)
- if not match:
- raise ValueError('Badly formatted data: %r' % data)
- url = match.group(1) # get repository URL
- revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
- else:
- try:
- # subversion >= 1.7
- xml = cls.run_command(
- ['info', '--xml', location],
- show_stdout=False,
- )
- url = _svn_info_xml_url_re.search(xml).group(1)
- revs = [
- int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
- ]
- except InstallationError:
- url, revs = None, []
-
- if revs:
- rev = max(revs)
- else:
- rev = 0
-
- return url, rev
-
- @classmethod
- def get_src_requirement(cls, location, project_name):
- repo = cls.get_remote_url(location)
- if repo is None:
- return None
- repo = 'svn+' + repo
- rev = cls.get_revision(location)
- return make_vcs_requirement_url(repo, rev, project_name)
-
- def is_commit_id_equal(self, dest, name):
- """Always assume the versions don't match"""
- return False
-
-
-vcs.register(Subversion)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/wheel.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/wheel.py
deleted file mode 100644
index 67bcc7f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/wheel.py
+++ /dev/null
@@ -1,1095 +0,0 @@
-"""
-Support for installing and building the "wheel" binary package format.
-"""
-from __future__ import absolute_import
-
-import collections
-import compileall
-import csv
-import hashlib
-import logging
-import os.path
-import re
-import shutil
-import stat
-import sys
-import warnings
-from base64 import urlsafe_b64encode
-from email.parser import Parser
-
-from pip._vendor import pkg_resources
-from pip._vendor.distlib.scripts import ScriptMaker
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.six import StringIO
-
-from pip._internal import pep425tags
-from pip._internal.download import path_to_url, unpack_url
-from pip._internal.exceptions import (
- InstallationError, InvalidWheelFilename, UnsupportedWheel,
-)
-from pip._internal.locations import (
- PIP_DELETE_MARKER_FILENAME, distutils_scheme,
-)
-from pip._internal.models.link import Link
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- call_subprocess, captured_stdout, ensure_dir, read_chunks,
-)
-from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import open_spinner
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any,
- Union, Iterable
- )
- from pip._vendor.packaging.requirements import Requirement # noqa: F401
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
- from pip._internal.download import PipSession # noqa: F401
- from pip._internal.index import FormatControl, PackageFinder # noqa: F401
- from pip._internal.operations.prepare import ( # noqa: F401
- RequirementPreparer
- )
- from pip._internal.cache import WheelCache # noqa: F401
- from pip._internal.pep425tags import Pep425Tag # noqa: F401
-
- InstalledCSVRow = Tuple[str, ...]
-
-
-VERSION_COMPATIBLE = (1, 0)
-
-
-logger = logging.getLogger(__name__)
-
-
-def normpath(src, p):
- return os.path.relpath(src, p).replace(os.path.sep, '/')
-
-
-def rehash(path, blocksize=1 << 20):
- # type: (str, int) -> Tuple[str, str]
- """Return (hash, length) for path using hashlib.sha256()"""
- h = hashlib.sha256()
- length = 0
- with open(path, 'rb') as f:
- for block in read_chunks(f, size=blocksize):
- length += len(block)
- h.update(block)
- digest = 'sha256=' + urlsafe_b64encode(
- h.digest()
- ).decode('latin1').rstrip('=')
- # unicode/str python2 issues
- return (digest, str(length)) # type: ignore
-
-
-def open_for_csv(name, mode):
- # type: (str, Text) -> IO
- if sys.version_info[0] < 3:
- nl = {} # type: Dict[str, Any]
- bin = 'b'
- else:
- nl = {'newline': ''} # type: Dict[str, Any]
- bin = ''
- return open(name, mode + bin, **nl)
-
-
-def replace_python_tag(wheelname, new_tag):
- # type: (str, str) -> str
- """Replace the Python tag in a wheel file name with a new value.
- """
- parts = wheelname.split('-')
- parts[-3] = new_tag
- return '-'.join(parts)
-
-
-def fix_script(path):
- # type: (str) -> Optional[bool]
- """Replace #!python with #!/path/to/python
- Return True if file was changed."""
- # XXX RECORD hashes will need to be updated
- if os.path.isfile(path):
- with open(path, 'rb') as script:
- firstline = script.readline()
- if not firstline.startswith(b'#!python'):
- return False
- exename = sys.executable.encode(sys.getfilesystemencoding())
- firstline = b'#!' + exename + os.linesep.encode("ascii")
- rest = script.read()
- with open(path, 'wb') as script:
- script.write(firstline)
- script.write(rest)
- return True
- return None
-
-
-dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?)
- \.dist-info$""", re.VERBOSE)
-
-
-def root_is_purelib(name, wheeldir):
- # type: (str, str) -> bool
- """
- Return True if the extracted wheel in wheeldir should go into purelib.
- """
- name_folded = name.replace("-", "_")
- for item in os.listdir(wheeldir):
- match = dist_info_re.match(item)
- if match and match.group('name') == name_folded:
- with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
- for line in wheel:
- line = line.lower().rstrip()
- if line == "root-is-purelib: true":
- return True
- return False
-
-
-def get_entrypoints(filename):
- # type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
- if not os.path.exists(filename):
- return {}, {}
-
- # This is done because you can pass a string to entry_points wrappers which
- # means that they may or may not be valid INI files. The attempt here is to
- # strip leading and trailing whitespace in order to make them valid INI
- # files.
- with open(filename) as fp:
- data = StringIO()
- for line in fp:
- data.write(line.strip())
- data.write("\n")
- data.seek(0)
-
- # get the entry points and then the script names
- entry_points = pkg_resources.EntryPoint.parse_map(data)
- console = entry_points.get('console_scripts', {})
- gui = entry_points.get('gui_scripts', {})
-
- def _split_ep(s):
- """get the string representation of EntryPoint, remove space and split
- on '='"""
- return str(s).replace(" ", "").split("=")
-
- # convert the EntryPoint objects into strings with module:function
- console = dict(_split_ep(v) for v in console.values())
- gui = dict(_split_ep(v) for v in gui.values())
- return console, gui
-
-
-def message_about_scripts_not_on_PATH(scripts):
- # type: (Sequence[str]) -> Optional[str]
- """Determine if any scripts are not on PATH and format a warning.
-
- Returns a warning message if one or more scripts are not on PATH,
- otherwise None.
- """
- if not scripts:
- return None
-
- # Group scripts by the path they were installed in
- grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set]
- for destfile in scripts:
- parent_dir = os.path.dirname(destfile)
- script_name = os.path.basename(destfile)
- grouped_by_dir[parent_dir].add(script_name)
-
- # We don't want to warn for directories that are on PATH.
- not_warn_dirs = [
- os.path.normcase(i).rstrip(os.sep) for i in
- os.environ.get("PATH", "").split(os.pathsep)
- ]
- # If an executable sits with sys.executable, we don't warn for it.
- # This covers the case of venv invocations without activating the venv.
- not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
- warn_for = {
- parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
- if os.path.normcase(parent_dir) not in not_warn_dirs
- }
- if not warn_for:
- return None
-
- # Format a message
- msg_lines = []
- for parent_dir, scripts in warn_for.items():
- scripts = sorted(scripts)
- if len(scripts) == 1:
- start_text = "script {} is".format(scripts[0])
- else:
- start_text = "scripts {} are".format(
- ", ".join(scripts[:-1]) + " and " + scripts[-1]
- )
-
- msg_lines.append(
- "The {} installed in '{}' which is not on PATH."
- .format(start_text, parent_dir)
- )
-
- last_line_fmt = (
- "Consider adding {} to PATH or, if you prefer "
- "to suppress this warning, use --no-warn-script-location."
- )
- if len(msg_lines) == 1:
- msg_lines.append(last_line_fmt.format("this directory"))
- else:
- msg_lines.append(last_line_fmt.format("these directories"))
-
- # Returns the formatted multiline message
- return "\n".join(msg_lines)
-
-
-def sorted_outrows(outrows):
- # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
- """
- Return the given rows of a RECORD file in sorted order.
-
- Each row is a 3-tuple (path, hash, size) and corresponds to a record of
- a RECORD file (see PEP 376 and PEP 427 for details). For the rows
- passed to this function, the size can be an integer as an int or string,
- or the empty string.
- """
- # Normally, there should only be one row per path, in which case the
- # second and third elements don't come into play when sorting.
- # However, in cases in the wild where a path might happen to occur twice,
- # we don't want the sort operation to trigger an error (but still want
- # determinism). Since the third element can be an int or string, we
- # coerce each element to a string to avoid a TypeError in this case.
- # For additional background, see--
- # https://github.com/pypa/pip/issues/5868
- return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
-
-
-def get_csv_rows_for_installed(
- old_csv_rows, # type: Iterable[List[str]]
- installed, # type: Dict[str, str]
- changed, # type: set
- generated, # type: List[str]
- lib_dir, # type: str
-):
- # type: (...) -> List[InstalledCSVRow]
- """
- :param installed: A map from archive RECORD path to installation RECORD
- path.
- """
- installed_rows = [] # type: List[InstalledCSVRow]
- for row in old_csv_rows:
- if len(row) > 3:
- logger.warning(
- 'RECORD line has more than three elements: {}'.format(row)
- )
- # Make a copy because we are mutating the row.
- row = list(row)
- old_path = row[0]
- new_path = installed.pop(old_path, old_path)
- row[0] = new_path
- if new_path in changed:
- digest, length = rehash(new_path)
- row[1] = digest
- row[2] = length
- installed_rows.append(tuple(row))
- for f in generated:
- digest, length = rehash(f)
- installed_rows.append((normpath(f, lib_dir), digest, str(length)))
- for f in installed:
- installed_rows.append((installed[f], '', ''))
- return installed_rows
-
-
-def move_wheel_files(
- name, # type: str
- req, # type: Requirement
- wheeldir, # type: str
- user=False, # type: bool
- home=None, # type: Optional[str]
- root=None, # type: Optional[str]
- pycompile=True, # type: bool
- scheme=None, # type: Optional[Mapping[str, str]]
- isolated=False, # type: bool
- prefix=None, # type: Optional[str]
- warn_script_location=True # type: bool
-):
- # type: (...) -> None
- """Install a wheel"""
- # TODO: Investigate and break this up.
- # TODO: Look into moving this into a dedicated class for representing an
- # installation.
-
- if not scheme:
- scheme = distutils_scheme(
- name, user=user, home=home, root=root, isolated=isolated,
- prefix=prefix,
- )
-
- if root_is_purelib(name, wheeldir):
- lib_dir = scheme['purelib']
- else:
- lib_dir = scheme['platlib']
-
- info_dir = [] # type: List[str]
- data_dirs = []
- source = wheeldir.rstrip(os.path.sep) + os.path.sep
-
- # Record details of the files moved
- # installed = files copied from the wheel to the destination
- # changed = files changed while installing (scripts #! line typically)
- # generated = files newly generated during the install (script wrappers)
- installed = {} # type: Dict[str, str]
- changed = set()
- generated = [] # type: List[str]
-
- # Compile all of the pyc files that we're going to be installing
- if pycompile:
- with captured_stdout() as stdout:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore')
- compileall.compile_dir(source, force=True, quiet=True)
- logger.debug(stdout.getvalue())
-
- def record_installed(srcfile, destfile, modified=False):
- """Map archive RECORD paths to installation RECORD paths."""
- oldpath = normpath(srcfile, wheeldir)
- newpath = normpath(destfile, lib_dir)
- installed[oldpath] = newpath
- if modified:
- changed.add(destfile)
-
- def clobber(source, dest, is_base, fixer=None, filter=None):
- ensure_dir(dest) # common for the 'include' path
-
- for dir, subdirs, files in os.walk(source):
- basedir = dir[len(source):].lstrip(os.path.sep)
- destdir = os.path.join(dest, basedir)
- if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
- continue
- for s in subdirs:
- destsubdir = os.path.join(dest, basedir, s)
- if is_base and basedir == '' and destsubdir.endswith('.data'):
- data_dirs.append(s)
- continue
- elif (is_base and
- s.endswith('.dist-info') and
- canonicalize_name(s).startswith(
- canonicalize_name(req.name))):
- assert not info_dir, ('Multiple .dist-info directories: ' +
- destsubdir + ', ' +
- ', '.join(info_dir))
- info_dir.append(destsubdir)
- for f in files:
- # Skip unwanted files
- if filter and filter(f):
- continue
- srcfile = os.path.join(dir, f)
- destfile = os.path.join(dest, basedir, f)
- # directory creation is lazy and after the file filtering above
- # to ensure we don't install empty dirs; empty dirs can't be
- # uninstalled.
- ensure_dir(destdir)
-
- # copyfile (called below) truncates the destination if it
- # exists and then writes the new contents. This is fine in most
- # cases, but can cause a segfault if pip has loaded a shared
- # object (e.g. from pyopenssl through its vendored urllib3)
- # Since the shared object is mmap'd an attempt to call a
- # symbol in it will then cause a segfault. Unlinking the file
- # allows writing of new contents while allowing the process to
- # continue to use the old copy.
- if os.path.exists(destfile):
- os.unlink(destfile)
-
- # We use copyfile (not move, copy, or copy2) to be extra sure
- # that we are not moving directories over (copyfile fails for
- # directories) as well as to ensure that we are not copying
- # over any metadata because we want more control over what
- # metadata we actually copy over.
- shutil.copyfile(srcfile, destfile)
-
- # Copy over the metadata for the file, currently this only
- # includes the atime and mtime.
- st = os.stat(srcfile)
- if hasattr(os, "utime"):
- os.utime(destfile, (st.st_atime, st.st_mtime))
-
- # If our file is executable, then make our destination file
- # executable.
- if os.access(srcfile, os.X_OK):
- st = os.stat(srcfile)
- permissions = (
- st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
- )
- os.chmod(destfile, permissions)
-
- changed = False
- if fixer:
- changed = fixer(destfile)
- record_installed(srcfile, destfile, changed)
-
- clobber(source, lib_dir, True)
-
- assert info_dir, "%s .dist-info directory not found" % req
-
- # Get the defined entry points
- ep_file = os.path.join(info_dir[0], 'entry_points.txt')
- console, gui = get_entrypoints(ep_file)
-
- def is_entrypoint_wrapper(name):
- # EP, EP.exe and EP-script.py are scripts generated for
- # entry point EP by setuptools
- if name.lower().endswith('.exe'):
- matchname = name[:-4]
- elif name.lower().endswith('-script.py'):
- matchname = name[:-10]
- elif name.lower().endswith(".pya"):
- matchname = name[:-4]
- else:
- matchname = name
- # Ignore setuptools-generated scripts
- return (matchname in console or matchname in gui)
-
- for datadir in data_dirs:
- fixer = None
- filter = None
- for subdir in os.listdir(os.path.join(wheeldir, datadir)):
- fixer = None
- if subdir == 'scripts':
- fixer = fix_script
- filter = is_entrypoint_wrapper
- source = os.path.join(wheeldir, datadir, subdir)
- dest = scheme[subdir]
- clobber(source, dest, False, fixer=fixer, filter=filter)
-
- maker = ScriptMaker(None, scheme['scripts'])
-
- # Ensure old scripts are overwritten.
- # See https://github.com/pypa/pip/issues/1800
- maker.clobber = True
-
- # Ensure we don't generate any variants for scripts because this is almost
- # never what somebody wants.
- # See https://bitbucket.org/pypa/distlib/issue/35/
- maker.variants = {''}
-
- # This is required because otherwise distlib creates scripts that are not
- # executable.
- # See https://bitbucket.org/pypa/distlib/issue/32/
- maker.set_mode = True
-
- # Simplify the script and fix the fact that the default script swallows
- # every single stack trace.
- # See https://bitbucket.org/pypa/distlib/issue/34/
- # See https://bitbucket.org/pypa/distlib/issue/33/
- def _get_script_text(entry):
- if entry.suffix is None:
- raise InstallationError(
- "Invalid script entry point: %s for req: %s - A callable "
- "suffix is required. Cf https://packaging.python.org/en/"
- "latest/distributing.html#console-scripts for more "
- "information." % (entry, req)
- )
- return maker.script_template % {
- "module": entry.prefix,
- "import_name": entry.suffix.split(".")[0],
- "func": entry.suffix,
- }
- # ignore type, because mypy disallows assigning to a method,
- # see https://github.com/python/mypy/issues/2427
- maker._get_script_text = _get_script_text # type: ignore
- maker.script_template = r"""# -*- coding: utf-8 -*-
-import re
-import sys
-
-from %(module)s import %(import_name)s
-
-if __name__ == '__main__':
- sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
- sys.exit(%(func)s())
-"""
-
- # Special case pip and setuptools to generate versioned wrappers
- #
- # The issue is that some projects (specifically, pip and setuptools) use
- # code in setup.py to create "versioned" entry points - pip2.7 on Python
- # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
- # the wheel metadata at build time, and so if the wheel is installed with
- # a *different* version of Python the entry points will be wrong. The
- # correct fix for this is to enhance the metadata to be able to describe
- # such versioned entry points, but that won't happen till Metadata 2.0 is
- # available.
- # In the meantime, projects using versioned entry points will either have
- # incorrect versioned entry points, or they will not be able to distribute
- # "universal" wheels (i.e., they will need a wheel per Python version).
- #
- # Because setuptools and pip are bundled with _ensurepip and virtualenv,
- # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
- # override the versioned entry points in the wheel and generate the
- # correct ones. This code is purely a short-term measure until Metadata 2.0
- # is available.
- #
- # To add the level of hack in this section of code, in order to support
- # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
- # variable which will control which version scripts get installed.
- #
- # ENSUREPIP_OPTIONS=altinstall
- # - Only pipX.Y and easy_install-X.Y will be generated and installed
- # ENSUREPIP_OPTIONS=install
- # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
- # that this option is technically if ENSUREPIP_OPTIONS is set and is
- # not altinstall
- # DEFAULT
- # - The default behavior is to install pip, pipX, pipX.Y, easy_install
- # and easy_install-X.Y.
- pip_script = console.pop('pip', None)
- if pip_script:
- if "ENSUREPIP_OPTIONS" not in os.environ:
- spec = 'pip = ' + pip_script
- generated.extend(maker.make(spec))
-
- if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
- spec = 'pip%s = %s' % (sys.version[:1], pip_script)
- generated.extend(maker.make(spec))
-
- spec = 'pip%s = %s' % (sys.version[:3], pip_script)
- generated.extend(maker.make(spec))
- # Delete any other versioned pip entry points
- pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
- for k in pip_ep:
- del console[k]
- easy_install_script = console.pop('easy_install', None)
- if easy_install_script:
- if "ENSUREPIP_OPTIONS" not in os.environ:
- spec = 'easy_install = ' + easy_install_script
- generated.extend(maker.make(spec))
-
- spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
- generated.extend(maker.make(spec))
- # Delete any other versioned easy_install entry points
- easy_install_ep = [
- k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
- ]
- for k in easy_install_ep:
- del console[k]
-
- # Generate the console and GUI entry points specified in the wheel
- if len(console) > 0:
- generated_console_scripts = maker.make_multiple(
- ['%s = %s' % kv for kv in console.items()]
- )
- generated.extend(generated_console_scripts)
-
- if warn_script_location:
- msg = message_about_scripts_not_on_PATH(generated_console_scripts)
- if msg is not None:
- logger.warning(msg)
-
- if len(gui) > 0:
- generated.extend(
- maker.make_multiple(
- ['%s = %s' % kv for kv in gui.items()],
- {'gui': True}
- )
- )
-
- # Record pip as the installer
- installer = os.path.join(info_dir[0], 'INSTALLER')
- temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
- with open(temp_installer, 'wb') as installer_file:
- installer_file.write(b'pip\n')
- shutil.move(temp_installer, installer)
- generated.append(installer)
-
- # Record details of all files installed
- record = os.path.join(info_dir[0], 'RECORD')
- temp_record = os.path.join(info_dir[0], 'RECORD.pip')
- with open_for_csv(record, 'r') as record_in:
- with open_for_csv(temp_record, 'w+') as record_out:
- reader = csv.reader(record_in)
- outrows = get_csv_rows_for_installed(
- reader, installed=installed, changed=changed,
- generated=generated, lib_dir=lib_dir,
- )
- writer = csv.writer(record_out)
- # Sort to simplify testing.
- for row in sorted_outrows(outrows):
- writer.writerow(row)
- shutil.move(temp_record, record)
-
-
-def wheel_version(source_dir):
- # type: (Optional[str]) -> Optional[Tuple[int, ...]]
- """
- Return the Wheel-Version of an extracted wheel, if possible.
-
- Otherwise, return None if we couldn't parse / extract it.
- """
- try:
- dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
-
- wheel_data = dist.get_metadata('WHEEL')
- wheel_data = Parser().parsestr(wheel_data)
-
- version = wheel_data['Wheel-Version'].strip()
- version = tuple(map(int, version.split('.')))
- return version
- except Exception:
- return None
-
-
-def check_compatibility(version, name):
- # type: (Optional[Tuple[int, ...]], str) -> None
- """
- Raises errors or warns if called with an incompatible Wheel-Version.
-
- Pip should refuse to install a Wheel-Version that's a major series
- ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
- installing a version only minor version ahead (e.g 1.2 > 1.1).
-
- version: a 2-tuple representing a Wheel-Version (Major, Minor)
- name: name of wheel or package to raise exception about
-
- :raises UnsupportedWheel: when an incompatible Wheel-Version is given
- """
- if not version:
- raise UnsupportedWheel(
- "%s is in an unsupported or invalid wheel" % name
- )
- if version[0] > VERSION_COMPATIBLE[0]:
- raise UnsupportedWheel(
- "%s's Wheel-Version (%s) is not compatible with this version "
- "of pip" % (name, '.'.join(map(str, version)))
- )
- elif version > VERSION_COMPATIBLE:
- logger.warning(
- 'Installing from a newer Wheel-Version (%s)',
- '.'.join(map(str, version)),
- )
-
-
-class Wheel(object):
- """A wheel file"""
-
- # TODO: Maybe move the class into the models sub-package
- # TODO: Maybe move the install code into this class
-
- wheel_file_re = re.compile(
- r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
- ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
- \.whl|\.dist-info)$""",
- re.VERBOSE
- )
-
- def __init__(self, filename):
- # type: (str) -> None
- """
- :raises InvalidWheelFilename: when the filename is invalid for a wheel
- """
- wheel_info = self.wheel_file_re.match(filename)
- if not wheel_info:
- raise InvalidWheelFilename(
- "%s is not a valid wheel filename." % filename
- )
- self.filename = filename
- self.name = wheel_info.group('name').replace('_', '-')
- # we'll assume "_" means "-" due to wheel naming scheme
- # (https://github.com/pypa/pip/issues/1150)
- self.version = wheel_info.group('ver').replace('_', '-')
- self.build_tag = wheel_info.group('build')
- self.pyversions = wheel_info.group('pyver').split('.')
- self.abis = wheel_info.group('abi').split('.')
- self.plats = wheel_info.group('plat').split('.')
-
- # All the tag combinations from this file
- self.file_tags = {
- (x, y, z) for x in self.pyversions
- for y in self.abis for z in self.plats
- }
-
- def support_index_min(self, tags=None):
- # type: (Optional[List[Pep425Tag]]) -> Optional[int]
- """
- Return the lowest index that one of the wheel's file_tag combinations
- achieves in the supported_tags list e.g. if there are 8 supported tags,
- and one of the file tags is first in the list, then return 0. Returns
- None is the wheel is not supported.
- """
- if tags is None: # for mock
- tags = pep425tags.get_supported()
- indexes = [tags.index(c) for c in self.file_tags if c in tags]
- return min(indexes) if indexes else None
-
- def supported(self, tags=None):
- # type: (Optional[List[Pep425Tag]]) -> bool
- """Is this wheel supported on this system?"""
- if tags is None: # for mock
- tags = pep425tags.get_supported()
- return bool(set(tags).intersection(self.file_tags))
-
-
-def _contains_egg_info(
- s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
- """Determine whether the string looks like an egg_info.
-
- :param s: The string to parse. E.g. foo-2.1
- """
- return bool(_egg_info_re.search(s))
-
-
-def should_use_ephemeral_cache(
- req, # type: InstallRequirement
- format_control, # type: FormatControl
- autobuilding, # type: bool
- cache_available # type: bool
-):
- # type: (...) -> Optional[bool]
- """
- Return whether to build an InstallRequirement object using the
- ephemeral cache.
-
- :param cache_available: whether a cache directory is available for the
- autobuilding=True case.
-
- :return: True or False to build the requirement with ephem_cache=True
- or False, respectively; or None not to build the requirement.
- """
- if req.constraint:
- return None
- if req.is_wheel:
- if not autobuilding:
- logger.info(
- 'Skipping %s, due to already being wheel.', req.name,
- )
- return None
- if not autobuilding:
- return False
-
- if req.editable or not req.source_dir:
- return None
-
- if req.link and not req.link.is_artifact:
- # VCS checkout. Build wheel just for this run.
- return True
-
- if "binary" not in format_control.get_allowed_formats(
- canonicalize_name(req.name)):
- logger.info(
- "Skipping bdist_wheel for %s, due to binaries "
- "being disabled for it.", req.name,
- )
- return None
-
- link = req.link
- base, ext = link.splitext()
- if cache_available and _contains_egg_info(base):
- return False
-
- # Otherwise, build the wheel just for this run using the ephemeral
- # cache since we are either in the case of e.g. a local directory, or
- # no cache directory is available to use.
- return True
-
-
-def format_command(
- command_args, # type: List[str]
- command_output, # type: str
-):
- # type: (...) -> str
- """
- Format command information for logging.
- """
- text = 'Command arguments: {}\n'.format(command_args)
-
- if not command_output:
- text += 'Command output: None'
- elif logger.getEffectiveLevel() > logging.DEBUG:
- text += 'Command output: [use --verbose to show]'
- else:
- if not command_output.endswith('\n'):
- command_output += '\n'
- text += (
- 'Command output:\n{}'
- '-----------------------------------------'
- ).format(command_output)
-
- return text
-
-
-def get_legacy_build_wheel_path(
- names, # type: List[str]
- temp_dir, # type: str
- req, # type: InstallRequirement
- command_args, # type: List[str]
- command_output, # type: str
-):
- # type: (...) -> Optional[str]
- """
- Return the path to the wheel in the temporary build directory.
- """
- # Sort for determinism.
- names = sorted(names)
- if not names:
- msg = (
- 'Legacy build of wheel for {!r} created no files.\n'
- ).format(req.name)
- msg += format_command(command_args, command_output)
- logger.warning(msg)
- return None
-
- if len(names) > 1:
- msg = (
- 'Legacy build of wheel for {!r} created more than one file.\n'
- 'Filenames (choosing first): {}\n'
- ).format(req.name, names)
- msg += format_command(command_args, command_output)
- logger.warning(msg)
-
- return os.path.join(temp_dir, names[0])
-
-
-class WheelBuilder(object):
- """Build wheels from a RequirementSet."""
-
- def __init__(
- self,
- finder, # type: PackageFinder
- preparer, # type: RequirementPreparer
- wheel_cache, # type: WheelCache
- build_options=None, # type: Optional[List[str]]
- global_options=None, # type: Optional[List[str]]
- no_clean=False # type: bool
- ):
- # type: (...) -> None
- self.finder = finder
- self.preparer = preparer
- self.wheel_cache = wheel_cache
-
- self._wheel_dir = preparer.wheel_download_dir
-
- self.build_options = build_options or []
- self.global_options = global_options or []
- self.no_clean = no_clean
-
- def _build_one(self, req, output_dir, python_tag=None):
- """Build one wheel.
-
- :return: The filename of the built wheel, or None if the build failed.
- """
- # Install build deps into temporary directory (PEP 518)
- with req.build_env:
- return self._build_one_inside_env(req, output_dir,
- python_tag=python_tag)
-
- def _build_one_inside_env(self, req, output_dir, python_tag=None):
- with TempDirectory(kind="wheel") as temp_dir:
- if req.use_pep517:
- builder = self._build_one_pep517
- else:
- builder = self._build_one_legacy
- wheel_path = builder(req, temp_dir.path, python_tag=python_tag)
- if wheel_path is not None:
- wheel_name = os.path.basename(wheel_path)
- dest_path = os.path.join(output_dir, wheel_name)
- try:
- shutil.move(wheel_path, dest_path)
- logger.info('Stored in directory: %s', output_dir)
- return dest_path
- except Exception:
- pass
- # Ignore return, we can't do anything else useful.
- self._clean_one(req)
- return None
-
- def _base_setup_args(self, req):
- # NOTE: Eventually, we'd want to also -S to the flags here, when we're
- # isolating. Currently, it breaks Python in virtualenvs, because it
- # relies on site.py to find parts of the standard library outside the
- # virtualenv.
- return [
- sys.executable, '-u', '-c',
- SETUPTOOLS_SHIM % req.setup_py
- ] + list(self.global_options)
-
- def _build_one_pep517(self, req, tempd, python_tag=None):
- """Build one InstallRequirement using the PEP 517 build process.
-
- Returns path to wheel if successfully built. Otherwise, returns None.
- """
- assert req.metadata_directory is not None
- try:
- req.spin_message = 'Building wheel for %s (PEP 517)' % (req.name,)
- logger.debug('Destination directory: %s', tempd)
- wheel_name = req.pep517_backend.build_wheel(
- tempd,
- metadata_directory=req.metadata_directory
- )
- if python_tag:
- # General PEP 517 backends don't necessarily support
- # a "--python-tag" option, so we rename the wheel
- # file directly.
- new_name = replace_python_tag(wheel_name, python_tag)
- os.rename(
- os.path.join(tempd, wheel_name),
- os.path.join(tempd, new_name)
- )
- # Reassign to simplify the return at the end of function
- wheel_name = new_name
- except Exception:
- logger.error('Failed building wheel for %s', req.name)
- return None
- return os.path.join(tempd, wheel_name)
-
- def _build_one_legacy(self, req, tempd, python_tag=None):
- """Build one InstallRequirement using the "legacy" build process.
-
- Returns path to wheel if successfully built. Otherwise, returns None.
- """
- base_args = self._base_setup_args(req)
-
- spin_message = 'Building wheel for %s (setup.py)' % (req.name,)
- with open_spinner(spin_message) as spinner:
- logger.debug('Destination directory: %s', tempd)
- wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
- + self.build_options
-
- if python_tag is not None:
- wheel_args += ["--python-tag", python_tag]
-
- try:
- output = call_subprocess(wheel_args, cwd=req.setup_py_dir,
- show_stdout=False, spinner=spinner)
- except Exception:
- spinner.finish("error")
- logger.error('Failed building wheel for %s', req.name)
- return None
- names = os.listdir(tempd)
- wheel_path = get_legacy_build_wheel_path(
- names=names,
- temp_dir=tempd,
- req=req,
- command_args=wheel_args,
- command_output=output,
- )
- return wheel_path
-
- def _clean_one(self, req):
- base_args = self._base_setup_args(req)
-
- logger.info('Running setup.py clean for %s', req.name)
- clean_args = base_args + ['clean', '--all']
- try:
- call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
- return True
- except Exception:
- logger.error('Failed cleaning build dir for %s', req.name)
- return False
-
- def build(
- self,
- requirements, # type: Iterable[InstallRequirement]
- session, # type: PipSession
- autobuilding=False # type: bool
- ):
- # type: (...) -> List[InstallRequirement]
- """Build wheels.
-
- :param unpack: If True, replace the sdist we built from with the
- newly built wheel, in preparation for installation.
- :return: True if all the wheels built correctly.
- """
- buildset = []
- format_control = self.finder.format_control
- # Whether a cache directory is available for autobuilding=True.
- cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir)
-
- for req in requirements:
- ephem_cache = should_use_ephemeral_cache(
- req, format_control=format_control, autobuilding=autobuilding,
- cache_available=cache_available,
- )
- if ephem_cache is None:
- continue
-
- buildset.append((req, ephem_cache))
-
- if not buildset:
- return []
-
- # Is any wheel build not using the ephemeral cache?
- if any(not ephem_cache for _, ephem_cache in buildset):
- have_directory_for_build = self._wheel_dir or (
- autobuilding and self.wheel_cache.cache_dir
- )
- assert have_directory_for_build
-
- # TODO by @pradyunsg
- # Should break up this method into 2 separate methods.
-
- # Build the wheels.
- logger.info(
- 'Building wheels for collected packages: %s',
- ', '.join([req.name for (req, _) in buildset]),
- )
- _cache = self.wheel_cache # shorter name
- with indent_log():
- build_success, build_failure = [], []
- for req, ephem in buildset:
- python_tag = None
- if autobuilding:
- python_tag = pep425tags.implementation_tag
- if ephem:
- output_dir = _cache.get_ephem_path_for_link(req.link)
- else:
- output_dir = _cache.get_path_for_link(req.link)
- try:
- ensure_dir(output_dir)
- except OSError as e:
- logger.warning("Building wheel for %s failed: %s",
- req.name, e)
- build_failure.append(req)
- continue
- else:
- output_dir = self._wheel_dir
- wheel_file = self._build_one(
- req, output_dir,
- python_tag=python_tag,
- )
- if wheel_file:
- build_success.append(req)
- if autobuilding:
- # XXX: This is mildly duplicative with prepare_files,
- # but not close enough to pull out to a single common
- # method.
- # The code below assumes temporary source dirs -
- # prevent it doing bad things.
- if req.source_dir and not os.path.exists(os.path.join(
- req.source_dir, PIP_DELETE_MARKER_FILENAME)):
- raise AssertionError(
- "bad source dir - missing marker")
- # Delete the source we built the wheel from
- req.remove_temporary_source()
- # set the build directory again - name is known from
- # the work prepare_files did.
- req.source_dir = req.build_location(
- self.preparer.build_dir
- )
- # Update the link for this.
- req.link = Link(path_to_url(wheel_file))
- assert req.link.is_wheel
- # extract the wheel into the dir
- unpack_url(
- req.link, req.source_dir, None, False,
- session=session,
- )
- else:
- build_failure.append(req)
-
- # notify success/failure
- if build_success:
- logger.info(
- 'Successfully built %s',
- ' '.join([req.name for req in build_success]),
- )
- if build_failure:
- logger.info(
- 'Failed to build %s',
- ' '.join([req.name for req in build_failure]),
- )
- # Return a list of requirements that failed to build
- return build_failure