summaryrefslogtreecommitdiff
path: root/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils
diff options
context:
space:
mode:
authorpravindalve2023-05-30 04:20:14 +0530
committerGitHub2023-05-30 04:20:14 +0530
commitcbdd7ca21f1f673a3a739065098f7cc6c9c4b881 (patch)
tree595e888c38f00a314e751096b6bf636a544a5efe /venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils
parent7740d1ca0c2e6bf34900460b0c58fa4d528577fb (diff)
parent280c6aa89a15331fb76b7014957953dc72af6093 (diff)
downloadChemical-Simulator-GUI-cbdd7ca21f1f673a3a739065098f7cc6c9c4b881.tar.gz
Chemical-Simulator-GUI-cbdd7ca21f1f673a3a739065098f7cc6c9c4b881.tar.bz2
Chemical-Simulator-GUI-cbdd7ca21f1f673a3a739065098f7cc6c9c4b881.zip
Merge pull request #63 from brenda-br/Fix-35HEADmaster
Restructure Project and Deployment
Diffstat (limited to 'venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils')
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py0
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pycbin209 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pycbin8056 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pycbin6157 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pycbin2583 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pycbin1286 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pycbin678 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pycbin1697 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pycbin3616 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pycbin7843 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pycbin25748 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pycbin1958 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pycbin4094 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pycbin2630 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pycbin404 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pycbin4931 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pycbin1353 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pycbin12315 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py270
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py264
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py90
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py39
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py30
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py93
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py115
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py318
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py1040
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py40
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py164
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py85
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py8
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py155
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py29
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py441
34 files changed, 0 insertions, 3181 deletions
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py
+++ /dev/null
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 964b1b1..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
deleted file mode 100644
index 7357554..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
deleted file mode 100644
index 8b19861..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/compat.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
deleted file mode 100644
index 80d7f26..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
deleted file mode 100644
index d09bef0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/encoding.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
deleted file mode 100644
index 5fc3e1d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
deleted file mode 100644
index 678cffb..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/glibc.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
deleted file mode 100644
index e6fc295..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/hashes.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
deleted file mode 100644
index 20fa454..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/logging.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
deleted file mode 100644
index 6000d5f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/misc.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pyc
deleted file mode 100644
index 8fb6ad9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/models.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
deleted file mode 100644
index 43fdcf0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/outdated.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
deleted file mode 100644
index af6c672..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/packaging.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
deleted file mode 100644
index 96efb71..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
deleted file mode 100644
index f2795a2..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
deleted file mode 100644
index c512b9f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/typing.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
deleted file mode 100644
index d31671f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__pycache__/ui.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py
deleted file mode 100644
index 9af9fa7..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/appdirs.py
+++ /dev/null
@@ -1,270 +0,0 @@
-"""
-This code was taken from https://github.com/ActiveState/appdirs and modified
-to suit our purposes.
-"""
-from __future__ import absolute_import
-
-import os
-import sys
-
-from pip._vendor.six import PY2, text_type
-
-from pip._internal.utils.compat import WINDOWS, expanduser
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- List, Union
- )
-
-
-def user_cache_dir(appname):
- # type: (str) -> str
- r"""
- Return full path to the user-specific cache dir for this application.
-
- "appname" is the name of application.
-
- Typical user cache directories are:
- macOS: ~/Library/Caches/<AppName>
- Unix: ~/.cache/<AppName> (XDG default)
- Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
-
- On Windows the only suggestion in the MSDN docs is that local settings go
- in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
- non-roaming app data dir (the default returned by `user_data_dir`). Apps
- typically put cache data somewhere *under* the given dir here. Some
- examples:
- ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
- ...\Acme\SuperApp\Cache\1.0
-
- OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
- """
- if WINDOWS:
- # Get the base path
- path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
-
- # When using Python 2, return paths as bytes on Windows like we do on
- # other operating systems. See helper function docs for more details.
- if PY2 and isinstance(path, text_type):
- path = _win_path_to_bytes(path)
-
- # Add our app name and Cache directory to it
- path = os.path.join(path, appname, "Cache")
- elif sys.platform == "darwin":
- # Get the base path
- path = expanduser("~/Library/Caches")
-
- # Add our app name to it
- path = os.path.join(path, appname)
- else:
- # Get the base path
- path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
-
- # Add our app name to it
- path = os.path.join(path, appname)
-
- return path
-
-
-def user_data_dir(appname, roaming=False):
- # type: (str, bool) -> str
- r"""
- Return full path to the user-specific data dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "roaming" (boolean, default False) can be set True to use the Windows
- roaming appdata directory. That means that for users on a Windows
- network setup for roaming profiles, this user data will be
- sync'd on login. See
- <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
- for a discussion of issues.
-
- Typical user data directories are:
- macOS: ~/Library/Application Support/<AppName>
- if it exists, else ~/.config/<AppName>
- Unix: ~/.local/share/<AppName> # or in
- $XDG_DATA_HOME, if defined
- Win XP (not roaming): C:\Documents and Settings\<username>\ ...
- ...Application Data\<AppName>
- Win XP (roaming): C:\Documents and Settings\<username>\Local ...
- ...Settings\Application Data\<AppName>
- Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
- Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
-
- For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
- That means, by default "~/.local/share/<AppName>".
- """
- if WINDOWS:
- const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
- path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
- elif sys.platform == "darwin":
- path = os.path.join(
- expanduser('~/Library/Application Support/'),
- appname,
- ) if os.path.isdir(os.path.join(
- expanduser('~/Library/Application Support/'),
- appname,
- )
- ) else os.path.join(
- expanduser('~/.config/'),
- appname,
- )
- else:
- path = os.path.join(
- os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
- appname,
- )
-
- return path
-
-
-def user_config_dir(appname, roaming=True):
- # type: (str, bool) -> str
- """Return full path to the user-specific config dir for this application.
-
- "appname" is the name of application.
- If None, just the system directory is returned.
- "roaming" (boolean, default True) can be set False to not use the
- Windows roaming appdata directory. That means that for users on a
- Windows network setup for roaming profiles, this user data will be
- sync'd on login. See
- <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
- for a discussion of issues.
-
- Typical user data directories are:
- macOS: same as user_data_dir
- Unix: ~/.config/<AppName>
- Win *: same as user_data_dir
-
- For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
- That means, by default "~/.config/<AppName>".
- """
- if WINDOWS:
- path = user_data_dir(appname, roaming=roaming)
- elif sys.platform == "darwin":
- path = user_data_dir(appname)
- else:
- path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
- path = os.path.join(path, appname)
-
- return path
-
-
-# for the discussion regarding site_config_dirs locations
-# see <https://github.com/pypa/pip/issues/1733>
-def site_config_dirs(appname):
- # type: (str) -> List[str]
- r"""Return a list of potential user-shared config dirs for this application.
-
- "appname" is the name of application.
-
- Typical user config directories are:
- macOS: /Library/Application Support/<AppName>/
- Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
- $XDG_CONFIG_DIRS
- Win XP: C:\Documents and Settings\All Users\Application ...
- ...Data\<AppName>\
- Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
- on Vista.)
- Win 7: Hidden, but writeable on Win 7:
- C:\ProgramData\<AppName>\
- """
- if WINDOWS:
- path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
- pathlist = [os.path.join(path, appname)]
- elif sys.platform == 'darwin':
- pathlist = [os.path.join('/Library/Application Support', appname)]
- else:
- # try looking in $XDG_CONFIG_DIRS
- xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
- if xdg_config_dirs:
- pathlist = [
- os.path.join(expanduser(x), appname)
- for x in xdg_config_dirs.split(os.pathsep)
- ]
- else:
- pathlist = []
-
- # always look in /etc directly as well
- pathlist.append('/etc')
-
- return pathlist
-
-
-# -- Windows support functions --
-
-def _get_win_folder_from_registry(csidl_name):
- # type: (str) -> str
- """
- This is a fallback technique at best. I'm not sure if using the
- registry for this guarantees us the correct answer for all CSIDL_*
- names.
- """
- import _winreg
-
- shell_folder_name = {
- "CSIDL_APPDATA": "AppData",
- "CSIDL_COMMON_APPDATA": "Common AppData",
- "CSIDL_LOCAL_APPDATA": "Local AppData",
- }[csidl_name]
-
- key = _winreg.OpenKey(
- _winreg.HKEY_CURRENT_USER,
- r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
- )
- directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
- return directory
-
-
-def _get_win_folder_with_ctypes(csidl_name):
- # type: (str) -> str
- csidl_const = {
- "CSIDL_APPDATA": 26,
- "CSIDL_COMMON_APPDATA": 35,
- "CSIDL_LOCAL_APPDATA": 28,
- }[csidl_name]
-
- buf = ctypes.create_unicode_buffer(1024)
- ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
- # Downgrade to short path name if have highbit chars. See
- # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
- has_high_char = False
- for c in buf:
- if ord(c) > 255:
- has_high_char = True
- break
- if has_high_char:
- buf2 = ctypes.create_unicode_buffer(1024)
- if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
- buf = buf2
-
- return buf.value
-
-
-if WINDOWS:
- try:
- import ctypes
- _get_win_folder = _get_win_folder_with_ctypes
- except ImportError:
- _get_win_folder = _get_win_folder_from_registry
-
-
-def _win_path_to_bytes(path):
- """Encode Windows paths to bytes. Only used on Python 2.
-
- Motivation is to be consistent with other operating systems where paths
- are also returned as bytes. This avoids problems mixing bytes and Unicode
- elsewhere in the codebase. For more details and discussion see
- <https://github.com/pypa/pip/issues/3463>.
-
- If encoding using ASCII and MBCS fails, return the original Unicode path.
- """
- for encoding in ('ASCII', 'MBCS'):
- try:
- return path.encode(encoding)
- except (UnicodeEncodeError, LookupError):
- pass
- return path
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py
deleted file mode 100644
index 2d8b3bf..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/compat.py
+++ /dev/null
@@ -1,264 +0,0 @@
-"""Stuff that differs in different Python versions and platform
-distributions."""
-from __future__ import absolute_import, division
-
-import codecs
-import locale
-import logging
-import os
-import shutil
-import sys
-
-from pip._vendor.six import text_type
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Tuple, Text # noqa: F401
-
-try:
- import ipaddress
-except ImportError:
- try:
- from pip._vendor import ipaddress # type: ignore
- except ImportError:
- import ipaddr as ipaddress # type: ignore
- ipaddress.ip_address = ipaddress.IPAddress # type: ignore
- ipaddress.ip_network = ipaddress.IPNetwork # type: ignore
-
-
-__all__ = [
- "ipaddress", "uses_pycache", "console_to_str", "native_str",
- "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
- "get_extension_suffixes",
-]
-
-
-logger = logging.getLogger(__name__)
-
-if sys.version_info >= (3, 4):
- uses_pycache = True
- from importlib.util import cache_from_source
-else:
- import imp
-
- try:
- cache_from_source = imp.cache_from_source # type: ignore
- except AttributeError:
- # does not use __pycache__
- cache_from_source = None
-
- uses_pycache = cache_from_source is not None
-
-
-if sys.version_info >= (3, 5):
- backslashreplace_decode = "backslashreplace"
-else:
- # In version 3.4 and older, backslashreplace exists
- # but does not support use for decoding.
- # We implement our own replace handler for this
- # situation, so that we can consistently use
- # backslash replacement for all versions.
- def backslashreplace_decode_fn(err):
- raw_bytes = (err.object[i] for i in range(err.start, err.end))
- if sys.version_info[0] == 2:
- # Python 2 gave us characters - convert to numeric bytes
- raw_bytes = (ord(b) for b in raw_bytes)
- return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
- codecs.register_error(
- "backslashreplace_decode",
- backslashreplace_decode_fn,
- )
- backslashreplace_decode = "backslashreplace_decode"
-
-
-def console_to_str(data):
- # type: (bytes) -> Text
- """Return a string, safe for output, of subprocess output.
-
- We assume the data is in the locale preferred encoding.
- If it won't decode properly, we warn the user but decode as
- best we can.
-
- We also ensure that the output can be safely written to
- standard output without encoding errors.
- """
-
- # First, get the encoding we assume. This is the preferred
- # encoding for the locale, unless that is not found, or
- # it is ASCII, in which case assume UTF-8
- encoding = locale.getpreferredencoding()
- if (not encoding) or codecs.lookup(encoding).name == "ascii":
- encoding = "utf-8"
-
- # Now try to decode the data - if we fail, warn the user and
- # decode with replacement.
- try:
- decoded_data = data.decode(encoding)
- except UnicodeDecodeError:
- logger.warning(
- "Subprocess output does not appear to be encoded as %s",
- encoding,
- )
- decoded_data = data.decode(encoding, errors=backslashreplace_decode)
-
- # Make sure we can print the output, by encoding it to the output
- # encoding with replacement of unencodable characters, and then
- # decoding again.
- # We use stderr's encoding because it's less likely to be
- # redirected and if we don't find an encoding we skip this
- # step (on the assumption that output is wrapped by something
- # that won't fail).
- # The double getattr is to deal with the possibility that we're
- # being called in a situation where sys.__stderr__ doesn't exist,
- # or doesn't have an encoding attribute. Neither of these cases
- # should occur in normal pip use, but there's no harm in checking
- # in case people use pip in (unsupported) unusual situations.
- output_encoding = getattr(getattr(sys, "__stderr__", None),
- "encoding", None)
-
- if output_encoding:
- output_encoded = decoded_data.encode(
- output_encoding,
- errors="backslashreplace"
- )
- decoded_data = output_encoded.decode(output_encoding)
-
- return decoded_data
-
-
-if sys.version_info >= (3,):
- def native_str(s, replace=False):
- # type: (str, bool) -> str
- if isinstance(s, bytes):
- return s.decode('utf-8', 'replace' if replace else 'strict')
- return s
-
-else:
- def native_str(s, replace=False):
- # type: (str, bool) -> str
- # Replace is ignored -- unicode to UTF-8 can't fail
- if isinstance(s, text_type):
- return s.encode('utf-8')
- return s
-
-
-def get_path_uid(path):
- # type: (str) -> int
- """
- Return path's uid.
-
- Does not follow symlinks:
- https://github.com/pypa/pip/pull/935#discussion_r5307003
-
- Placed this function in compat due to differences on AIX and
- Jython, that should eventually go away.
-
- :raises OSError: When path is a symlink or can't be read.
- """
- if hasattr(os, 'O_NOFOLLOW'):
- fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
- file_uid = os.fstat(fd).st_uid
- os.close(fd)
- else: # AIX and Jython
- # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
- if not os.path.islink(path):
- # older versions of Jython don't have `os.fstat`
- file_uid = os.stat(path).st_uid
- else:
- # raise OSError for parity with os.O_NOFOLLOW above
- raise OSError(
- "%s is a symlink; Will not return uid for symlinks" % path
- )
- return file_uid
-
-
-if sys.version_info >= (3, 4):
- from importlib.machinery import EXTENSION_SUFFIXES
-
- def get_extension_suffixes():
- return EXTENSION_SUFFIXES
-else:
- from imp import get_suffixes
-
- def get_extension_suffixes():
- return [suffix[0] for suffix in get_suffixes()]
-
-
-def expanduser(path):
- # type: (str) -> str
- """
- Expand ~ and ~user constructions.
-
- Includes a workaround for https://bugs.python.org/issue14768
- """
- expanded = os.path.expanduser(path)
- if path.startswith('~/') and expanded.startswith('//'):
- expanded = expanded[1:]
- return expanded
-
-
-# packages in the stdlib that may have installation metadata, but should not be
-# considered 'installed'. this theoretically could be determined based on
-# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
-# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
-# make this ineffective, so hard-coding
-stdlib_pkgs = {"python", "wsgiref", "argparse"}
-
-
-# windows detection, covers cpython and ironpython
-WINDOWS = (sys.platform.startswith("win") or
- (sys.platform == 'cli' and os.name == 'nt'))
-
-
-def samefile(file1, file2):
- # type: (str, str) -> bool
- """Provide an alternative for os.path.samefile on Windows/Python2"""
- if hasattr(os.path, 'samefile'):
- return os.path.samefile(file1, file2)
- else:
- path1 = os.path.normcase(os.path.abspath(file1))
- path2 = os.path.normcase(os.path.abspath(file2))
- return path1 == path2
-
-
-if hasattr(shutil, 'get_terminal_size'):
- def get_terminal_size():
- # type: () -> Tuple[int, int]
- """
- Returns a tuple (x, y) representing the width(x) and the height(y)
- in characters of the terminal window.
- """
- return tuple(shutil.get_terminal_size()) # type: ignore
-else:
- def get_terminal_size():
- # type: () -> Tuple[int, int]
- """
- Returns a tuple (x, y) representing the width(x) and the height(y)
- in characters of the terminal window.
- """
- def ioctl_GWINSZ(fd):
- try:
- import fcntl
- import termios
- import struct
- cr = struct.unpack_from(
- 'hh',
- fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
- )
- except Exception:
- return None
- if cr == (0, 0):
- return None
- return cr
- cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
- if not cr:
- try:
- fd = os.open(os.ctermid(), os.O_RDONLY)
- cr = ioctl_GWINSZ(fd)
- os.close(fd)
- except Exception:
- pass
- if not cr:
- cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
- return int(cr[1]), int(cr[0])
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py
deleted file mode 100644
index 0beaf74..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py
+++ /dev/null
@@ -1,90 +0,0 @@
-"""
-A module that implements tooling to enable easy warnings about deprecations.
-"""
-from __future__ import absolute_import
-
-import logging
-import warnings
-
-from pip._vendor.packaging.version import parse
-
-from pip import __version__ as current_version
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Optional # noqa: F401
-
-
-class PipDeprecationWarning(Warning):
- pass
-
-
-_original_showwarning = None # type: Any
-
-
-# Warnings <-> Logging Integration
-def _showwarning(message, category, filename, lineno, file=None, line=None):
- if file is not None:
- if _original_showwarning is not None:
- _original_showwarning(
- message, category, filename, lineno, file, line,
- )
- elif issubclass(category, PipDeprecationWarning):
- # We use a specially named logger which will handle all of the
- # deprecation messages for pip.
- logger = logging.getLogger("pip._internal.deprecations")
- logger.warning(message)
- else:
- _original_showwarning(
- message, category, filename, lineno, file, line,
- )
-
-
-def install_warning_logger():
- # type: () -> None
- # Enable our Deprecation Warnings
- warnings.simplefilter("default", PipDeprecationWarning, append=True)
-
- global _original_showwarning
-
- if _original_showwarning is None:
- _original_showwarning = warnings.showwarning
- warnings.showwarning = _showwarning
-
-
-def deprecated(reason, replacement, gone_in, issue=None):
- # type: (str, Optional[str], Optional[str], Optional[int]) -> None
- """Helper to deprecate existing functionality.
-
- reason:
- Textual reason shown to the user about why this functionality has
- been deprecated.
- replacement:
- Textual suggestion shown to the user about what alternative
- functionality they can use.
- gone_in:
- The version of pip does this functionality should get removed in.
- Raises errors if pip's current version is greater than or equal to
- this.
- issue:
- Issue number on the tracker that would serve as a useful place for
- users to find related discussion and provide feedback.
-
- Always pass replacement, gone_in and issue as keyword arguments for clarity
- at the call site.
- """
-
- # Construct a nice message.
- # This is purposely eagerly formatted as we want it to appear as if someone
- # typed this entire message out.
- message = "DEPRECATION: " + reason
- if replacement is not None:
- message += " A possible replacement is {}.".format(replacement)
- if issue is not None:
- url = "https://github.com/pypa/pip/issues/" + str(issue)
- message += " You can find discussion regarding this at {}.".format(url)
-
- # Raise as an error if it has to be removed.
- if gone_in is not None and parse(current_version) >= parse(gone_in):
- raise PipDeprecationWarning(message)
- warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py
deleted file mode 100644
index d36defa..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import codecs
-import locale
-import re
-import sys
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import List, Tuple, Text # noqa: F401
-
-BOMS = [
- (codecs.BOM_UTF8, 'utf8'),
- (codecs.BOM_UTF16, 'utf16'),
- (codecs.BOM_UTF16_BE, 'utf16-be'),
- (codecs.BOM_UTF16_LE, 'utf16-le'),
- (codecs.BOM_UTF32, 'utf32'),
- (codecs.BOM_UTF32_BE, 'utf32-be'),
- (codecs.BOM_UTF32_LE, 'utf32-le'),
-] # type: List[Tuple[bytes, Text]]
-
-ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
-
-
-def auto_decode(data):
- # type: (bytes) -> Text
- """Check a bytes string for a BOM to correctly detect the encoding
-
- Fallback to locale.getpreferredencoding(False) like open() on Python3"""
- for bom, encoding in BOMS:
- if data.startswith(bom):
- return data[len(bom):].decode(encoding)
- # Lets check the first two lines as in PEP263
- for line in data.split(b'\n')[:2]:
- if line[0:1] == b'#' and ENCODING_RE.search(line):
- encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
- return data.decode(encoding)
- return data.decode(
- locale.getpreferredencoding(False) or sys.getdefaultencoding(),
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py
deleted file mode 100644
index 1e6b033..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import os
-import os.path
-
-from pip._internal.utils.compat import get_path_uid
-
-
-def check_path_owner(path):
- # type: (str) -> bool
- # If we don't have a way to check the effective uid of this process, then
- # we'll just assume that we own the directory.
- if not hasattr(os, "geteuid"):
- return True
-
- previous = None
- while path != previous:
- if os.path.lexists(path):
- # Check if path is writable by current user.
- if os.geteuid() == 0:
- # Special handling for root user in order to handle properly
- # cases where users use sudo without -H flag.
- try:
- path_uid = get_path_uid(path)
- except OSError:
- return False
- return path_uid == 0
- else:
- return os.access(path, os.W_OK)
- else:
- previous, path = path, os.path.dirname(path)
- return False # assume we don't own the path
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py
deleted file mode 100644
index 8a51f69..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/glibc.py
+++ /dev/null
@@ -1,93 +0,0 @@
-from __future__ import absolute_import
-
-import ctypes
-import re
-import warnings
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, Tuple # noqa: F401
-
-
-def glibc_version_string():
- # type: () -> Optional[str]
- "Returns glibc version string, or None if not using glibc."
-
- # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
- # manpage says, "If filename is NULL, then the returned handle is for the
- # main program". This way we can let the linker do the work to figure out
- # which libc our process is actually using.
- process_namespace = ctypes.CDLL(None)
- try:
- gnu_get_libc_version = process_namespace.gnu_get_libc_version
- except AttributeError:
- # Symbol doesn't exist -> therefore, we are not linked to
- # glibc.
- return None
-
- # Call gnu_get_libc_version, which returns a string like "2.5"
- gnu_get_libc_version.restype = ctypes.c_char_p
- version_str = gnu_get_libc_version()
- # py2 / py3 compatibility:
- if not isinstance(version_str, str):
- version_str = version_str.decode("ascii")
-
- return version_str
-
-
-# Separated out from have_compatible_glibc for easier unit testing
-def check_glibc_version(version_str, required_major, minimum_minor):
- # type: (str, int, int) -> bool
- # Parse string and check against requested version.
- #
- # We use a regexp instead of str.split because we want to discard any
- # random junk that might come after the minor version -- this might happen
- # in patched/forked versions of glibc (e.g. Linaro's version of glibc
- # uses version strings like "2.20-2014.11"). See gh-3588.
- m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
- if not m:
- warnings.warn("Expected glibc version with 2 components major.minor,"
- " got: %s" % version_str, RuntimeWarning)
- return False
- return (int(m.group("major")) == required_major and
- int(m.group("minor")) >= minimum_minor)
-
-
-def have_compatible_glibc(required_major, minimum_minor):
- # type: (int, int) -> bool
- version_str = glibc_version_string() # type: Optional[str]
- if version_str is None:
- return False
- return check_glibc_version(version_str, required_major, minimum_minor)
-
-
-# platform.libc_ver regularly returns completely nonsensical glibc
-# versions. E.g. on my computer, platform says:
-#
-# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.7')
-# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.9')
-#
-# But the truth is:
-#
-# ~$ ldd --version
-# ldd (Debian GLIBC 2.22-11) 2.22
-#
-# This is unfortunate, because it means that the linehaul data on libc
-# versions that was generated by pip 8.1.2 and earlier is useless and
-# misleading. Solution: instead of using platform, use our code that actually
-# works.
-def libc_ver():
- # type: () -> Tuple[str, str]
- """Try to determine the glibc version
-
- Returns a tuple of strings (lib, version) which default to empty strings
- in case the lookup fails.
- """
- glibc_version = glibc_version_string()
- if glibc_version is None:
- return ("", "")
- else:
- return ("glibc", glibc_version)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py
deleted file mode 100644
index c6df7a1..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/hashes.py
+++ /dev/null
@@ -1,115 +0,0 @@
-from __future__ import absolute_import
-
-import hashlib
-
-from pip._vendor.six import iteritems, iterkeys, itervalues
-
-from pip._internal.exceptions import (
- HashMismatch, HashMissing, InstallationError,
-)
-from pip._internal.utils.misc import read_chunks
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Dict, List, BinaryIO, NoReturn, Iterator
- )
- from pip._vendor.six import PY3
- if PY3:
- from hashlib import _Hash # noqa: F401
- else:
- from hashlib import _hash as _Hash # noqa: F401
-
-
-# The recommended hash algo of the moment. Change this whenever the state of
-# the art changes; it won't hurt backward compatibility.
-FAVORITE_HASH = 'sha256'
-
-
-# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
-# Currently, those are the ones at least as collision-resistant as sha256.
-STRONG_HASHES = ['sha256', 'sha384', 'sha512']
-
-
-class Hashes(object):
- """A wrapper that builds multiple hashes at once and checks them against
- known-good values
-
- """
- def __init__(self, hashes=None):
- # type: (Dict[str, List[str]]) -> None
- """
- :param hashes: A dict of algorithm names pointing to lists of allowed
- hex digests
- """
- self._allowed = {} if hashes is None else hashes
-
- def check_against_chunks(self, chunks):
- # type: (Iterator[bytes]) -> None
- """Check good hashes against ones built from iterable of chunks of
- data.
-
- Raise HashMismatch if none match.
-
- """
- gots = {}
- for hash_name in iterkeys(self._allowed):
- try:
- gots[hash_name] = hashlib.new(hash_name)
- except (ValueError, TypeError):
- raise InstallationError('Unknown hash name: %s' % hash_name)
-
- for chunk in chunks:
- for hash in itervalues(gots):
- hash.update(chunk)
-
- for hash_name, got in iteritems(gots):
- if got.hexdigest() in self._allowed[hash_name]:
- return
- self._raise(gots)
-
- def _raise(self, gots):
- # type: (Dict[str, _Hash]) -> NoReturn
- raise HashMismatch(self._allowed, gots)
-
- def check_against_file(self, file):
- # type: (BinaryIO) -> None
- """Check good hashes against a file-like object
-
- Raise HashMismatch if none match.
-
- """
- return self.check_against_chunks(read_chunks(file))
-
- def check_against_path(self, path):
- # type: (str) -> None
- with open(path, 'rb') as file:
- return self.check_against_file(file)
-
- def __nonzero__(self):
- # type: () -> bool
- """Return whether I know any known-good hashes."""
- return bool(self._allowed)
-
- def __bool__(self):
- # type: () -> bool
- return self.__nonzero__()
-
-
-class MissingHashes(Hashes):
- """A workalike for Hashes used when we're missing a hash for a requirement
-
- It computes the actual hash of the requirement and raises a HashMissing
- exception showing it to the user.
-
- """
- def __init__(self):
- # type: () -> None
- """Don't offer the ``hashes`` kwarg."""
- # Pass our favorite hash in to generate a "gotten hash". With the
- # empty list, it will never match, so an error will always raise.
- super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
-
- def _raise(self, gots):
- # type: (Dict[str, _Hash]) -> NoReturn
- raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py
deleted file mode 100644
index 579d696..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/logging.py
+++ /dev/null
@@ -1,318 +0,0 @@
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import logging
-import logging.handlers
-import os
-import sys
-
-from pip._vendor.six import PY2
-
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.misc import ensure_dir
-
-try:
- import threading
-except ImportError:
- import dummy_threading as threading # type: ignore
-
-
-try:
- from pip._vendor import colorama
-# Lots of different errors can come from this, including SystemError and
-# ImportError.
-except Exception:
- colorama = None
-
-
-_log_state = threading.local()
-_log_state.indentation = 0
-
-
-class BrokenStdoutLoggingError(Exception):
- """
- Raised if BrokenPipeError occurs for the stdout stream while logging.
- """
- pass
-
-
-# BrokenPipeError does not exist in Python 2 and, in addition, manifests
-# differently in Windows and non-Windows.
-if WINDOWS:
- # In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
- # https://bugs.python.org/issue19612
- # https://bugs.python.org/issue30418
- if PY2:
- def _is_broken_pipe_error(exc_class, exc):
- """See the docstring for non-Windows Python 3 below."""
- return (exc_class is IOError and
- exc.errno in (errno.EINVAL, errno.EPIPE))
- else:
- # In Windows, a broken pipe IOError became OSError in Python 3.
- def _is_broken_pipe_error(exc_class, exc):
- """See the docstring for non-Windows Python 3 below."""
- return ((exc_class is BrokenPipeError) or # noqa: F821
- (exc_class is OSError and
- exc.errno in (errno.EINVAL, errno.EPIPE)))
-elif PY2:
- def _is_broken_pipe_error(exc_class, exc):
- """See the docstring for non-Windows Python 3 below."""
- return (exc_class is IOError and exc.errno == errno.EPIPE)
-else:
- # Then we are in the non-Windows Python 3 case.
- def _is_broken_pipe_error(exc_class, exc):
- """
- Return whether an exception is a broken pipe error.
-
- Args:
- exc_class: an exception class.
- exc: an exception instance.
- """
- return (exc_class is BrokenPipeError) # noqa: F821
-
-
-@contextlib.contextmanager
-def indent_log(num=2):
- """
- A context manager which will cause the log output to be indented for any
- log messages emitted inside it.
- """
- _log_state.indentation += num
- try:
- yield
- finally:
- _log_state.indentation -= num
-
-
-def get_indentation():
- return getattr(_log_state, 'indentation', 0)
-
-
-class IndentingFormatter(logging.Formatter):
- def __init__(self, *args, **kwargs):
- """
- A logging.Formatter obeying containing indent_log contexts.
-
- :param add_timestamp: A bool indicating output lines should be prefixed
- with their record's timestamp.
- """
- self.add_timestamp = kwargs.pop("add_timestamp", False)
- super(IndentingFormatter, self).__init__(*args, **kwargs)
-
- def format(self, record):
- """
- Calls the standard formatter, but will indent all of the log messages
- by our current indentation level.
- """
- formatted = super(IndentingFormatter, self).format(record)
- prefix = ''
- if self.add_timestamp:
- prefix = self.formatTime(record, "%Y-%m-%dT%H:%M:%S ")
- prefix += " " * get_indentation()
- formatted = "".join([
- prefix + line
- for line in formatted.splitlines(True)
- ])
- return formatted
-
-
-def _color_wrap(*colors):
- def wrapped(inp):
- return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
- return wrapped
-
-
-class ColorizedStreamHandler(logging.StreamHandler):
-
- # Don't build up a list of colors if we don't have colorama
- if colorama:
- COLORS = [
- # This needs to be in order from highest logging level to lowest.
- (logging.ERROR, _color_wrap(colorama.Fore.RED)),
- (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
- ]
- else:
- COLORS = []
-
- def __init__(self, stream=None, no_color=None):
- logging.StreamHandler.__init__(self, stream)
- self._no_color = no_color
-
- if WINDOWS and colorama:
- self.stream = colorama.AnsiToWin32(self.stream)
-
- def _using_stdout(self):
- """
- Return whether the handler is using sys.stdout.
- """
- if WINDOWS and colorama:
- # Then self.stream is an AnsiToWin32 object.
- return self.stream.wrapped is sys.stdout
-
- return self.stream is sys.stdout
-
- def should_color(self):
- # Don't colorize things if we do not have colorama or if told not to
- if not colorama or self._no_color:
- return False
-
- real_stream = (
- self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
- else self.stream.wrapped
- )
-
- # If the stream is a tty we should color it
- if hasattr(real_stream, "isatty") and real_stream.isatty():
- return True
-
- # If we have an ANSI term we should color it
- if os.environ.get("TERM") == "ANSI":
- return True
-
- # If anything else we should not color it
- return False
-
- def format(self, record):
- msg = logging.StreamHandler.format(self, record)
-
- if self.should_color():
- for level, color in self.COLORS:
- if record.levelno >= level:
- msg = color(msg)
- break
-
- return msg
-
- # The logging module says handleError() can be customized.
- def handleError(self, record):
- exc_class, exc = sys.exc_info()[:2]
- # If a broken pipe occurred while calling write() or flush() on the
- # stdout stream in logging's Handler.emit(), then raise our special
- # exception so we can handle it in main() instead of logging the
- # broken pipe error and continuing.
- if (exc_class and self._using_stdout() and
- _is_broken_pipe_error(exc_class, exc)):
- raise BrokenStdoutLoggingError()
-
- return super(ColorizedStreamHandler, self).handleError(record)
-
-
-class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
-
- def _open(self):
- ensure_dir(os.path.dirname(self.baseFilename))
- return logging.handlers.RotatingFileHandler._open(self)
-
-
-class MaxLevelFilter(logging.Filter):
-
- def __init__(self, level):
- self.level = level
-
- def filter(self, record):
- return record.levelno < self.level
-
-
-def setup_logging(verbosity, no_color, user_log_file):
- """Configures and sets up all of the logging
-
- Returns the requested logging level, as its integer value.
- """
-
- # Determine the level to be logging at.
- if verbosity >= 1:
- level = "DEBUG"
- elif verbosity == -1:
- level = "WARNING"
- elif verbosity == -2:
- level = "ERROR"
- elif verbosity <= -3:
- level = "CRITICAL"
- else:
- level = "INFO"
-
- level_number = getattr(logging, level)
-
- # The "root" logger should match the "console" level *unless* we also need
- # to log to a user log file.
- include_user_log = user_log_file is not None
- if include_user_log:
- additional_log_file = user_log_file
- root_level = "DEBUG"
- else:
- additional_log_file = "/dev/null"
- root_level = level
-
- # Disable any logging besides WARNING unless we have DEBUG level logging
- # enabled for vendored libraries.
- vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
-
- # Shorthands for clarity
- log_streams = {
- "stdout": "ext://sys.stdout",
- "stderr": "ext://sys.stderr",
- }
- handler_classes = {
- "stream": "pip._internal.utils.logging.ColorizedStreamHandler",
- "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
- }
-
- logging.config.dictConfig({
- "version": 1,
- "disable_existing_loggers": False,
- "filters": {
- "exclude_warnings": {
- "()": "pip._internal.utils.logging.MaxLevelFilter",
- "level": logging.WARNING,
- },
- },
- "formatters": {
- "indent": {
- "()": IndentingFormatter,
- "format": "%(message)s",
- },
- "indent_with_timestamp": {
- "()": IndentingFormatter,
- "format": "%(message)s",
- "add_timestamp": True,
- },
- },
- "handlers": {
- "console": {
- "level": level,
- "class": handler_classes["stream"],
- "no_color": no_color,
- "stream": log_streams["stdout"],
- "filters": ["exclude_warnings"],
- "formatter": "indent",
- },
- "console_errors": {
- "level": "WARNING",
- "class": handler_classes["stream"],
- "no_color": no_color,
- "stream": log_streams["stderr"],
- "formatter": "indent",
- },
- "user_log": {
- "level": "DEBUG",
- "class": handler_classes["file"],
- "filename": additional_log_file,
- "delay": True,
- "formatter": "indent_with_timestamp",
- },
- },
- "root": {
- "level": root_level,
- "handlers": ["console", "console_errors"] + (
- ["user_log"] if include_user_log else []
- ),
- },
- "loggers": {
- "pip._vendor": {
- "level": vendored_log_level
- }
- },
- })
-
- return level_number
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py
deleted file mode 100644
index 84605ee..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/misc.py
+++ /dev/null
@@ -1,1040 +0,0 @@
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import io
-import locale
-# we have a submodule named 'logging' which would shadow this if we used the
-# regular name:
-import logging as std_logging
-import os
-import posixpath
-import re
-import shutil
-import stat
-import subprocess
-import sys
-import tarfile
-import zipfile
-from collections import deque
-
-from pip._vendor import pkg_resources
-# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
-# why we ignore the type on this import.
-from pip._vendor.retrying import retry # type: ignore
-from pip._vendor.six import PY2
-from pip._vendor.six.moves import input
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
-
-from pip._internal.exceptions import CommandError, InstallationError
-from pip._internal.locations import (
- running_under_virtualenv, site_packages, user_site, virtualenv_no_global,
- write_delete_marker_file,
-)
-from pip._internal.utils.compat import (
- WINDOWS, console_to_str, expanduser, stdlib_pkgs,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if PY2:
- from io import BytesIO as StringIO
-else:
- from io import StringIO
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Optional, Tuple, Iterable, List, Match, Union, Any, Mapping, Text,
- AnyStr, Container
- )
- from pip._vendor.pkg_resources import Distribution # noqa: F401
- from pip._internal.models.link import Link # noqa: F401
- from pip._internal.utils.ui import SpinnerInterface # noqa: F401
-
-
-__all__ = ['rmtree', 'display_path', 'backup_dir',
- 'ask', 'splitext',
- 'format_size', 'is_installable_dir',
- 'is_svn_page', 'file_contents',
- 'split_leading_dir', 'has_leading_dir',
- 'normalize_path',
- 'renames', 'get_prog',
- 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess',
- 'captured_stdout', 'ensure_dir',
- 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', 'WHEEL_EXTENSION',
- 'get_installed_version', 'remove_auth_from_url']
-
-
-logger = std_logging.getLogger(__name__)
-
-WHEEL_EXTENSION = '.whl'
-BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
-XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
-ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION)
-TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
-ARCHIVE_EXTENSIONS = (
- ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
-SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
-
-try:
- import bz2 # noqa
- SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
-except ImportError:
- logger.debug('bz2 module is not available')
-
-try:
- # Only for Python 3.3+
- import lzma # noqa
- SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
-except ImportError:
- logger.debug('lzma module is not available')
-
-
-def ensure_dir(path):
- # type: (AnyStr) -> None
- """os.path.makedirs without EEXIST."""
- try:
- os.makedirs(path)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise
-
-
-def get_prog():
- # type: () -> str
- try:
- prog = os.path.basename(sys.argv[0])
- if prog in ('__main__.py', '-c'):
- return "%s -m pip" % sys.executable
- else:
- return prog
- except (AttributeError, TypeError, IndexError):
- pass
- return 'pip'
-
-
-# Retry every half second for up to 3 seconds
-@retry(stop_max_delay=3000, wait_fixed=500)
-def rmtree(dir, ignore_errors=False):
- # type: (str, bool) -> None
- shutil.rmtree(dir, ignore_errors=ignore_errors,
- onerror=rmtree_errorhandler)
-
-
-def rmtree_errorhandler(func, path, exc_info):
- """On Windows, the files in .svn are read-only, so when rmtree() tries to
- remove them, an exception is thrown. We catch that here, remove the
- read-only attribute, and hopefully continue without problems."""
- # if file type currently read only
- if os.stat(path).st_mode & stat.S_IREAD:
- # convert to read/write
- os.chmod(path, stat.S_IWRITE)
- # use the original function to repeat the operation
- func(path)
- return
- else:
- raise
-
-
-def display_path(path):
- # type: (Union[str, Text]) -> str
- """Gives the display value for a given path, making it relative to cwd
- if possible."""
- path = os.path.normcase(os.path.abspath(path))
- if sys.version_info[0] == 2:
- path = path.decode(sys.getfilesystemencoding(), 'replace')
- path = path.encode(sys.getdefaultencoding(), 'replace')
- if path.startswith(os.getcwd() + os.path.sep):
- path = '.' + path[len(os.getcwd()):]
- return path
-
-
-def backup_dir(dir, ext='.bak'):
- # type: (str, str) -> str
- """Figure out the name of a directory to back up the given dir to
- (adding .bak, .bak2, etc)"""
- n = 1
- extension = ext
- while os.path.exists(dir + extension):
- n += 1
- extension = ext + str(n)
- return dir + extension
-
-
-def ask_path_exists(message, options):
- # type: (str, Iterable[str]) -> str
- for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
- if action in options:
- return action
- return ask(message, options)
-
-
-def ask(message, options):
- # type: (str, Iterable[str]) -> str
- """Ask the message interactively, with the given possible responses"""
- while 1:
- if os.environ.get('PIP_NO_INPUT'):
- raise Exception(
- 'No input was expected ($PIP_NO_INPUT set); question: %s' %
- message
- )
- response = input(message)
- response = response.strip().lower()
- if response not in options:
- print(
- 'Your response (%r) was not one of the expected responses: '
- '%s' % (response, ', '.join(options))
- )
- else:
- return response
-
-
-def format_size(bytes):
- # type: (float) -> str
- if bytes > 1000 * 1000:
- return '%.1fMB' % (bytes / 1000.0 / 1000)
- elif bytes > 10 * 1000:
- return '%ikB' % (bytes / 1000)
- elif bytes > 1000:
- return '%.1fkB' % (bytes / 1000.0)
- else:
- return '%ibytes' % bytes
-
-
-def is_installable_dir(path):
- # type: (str) -> bool
- """Is path is a directory containing setup.py or pyproject.toml?
- """
- if not os.path.isdir(path):
- return False
- setup_py = os.path.join(path, 'setup.py')
- if os.path.isfile(setup_py):
- return True
- pyproject_toml = os.path.join(path, 'pyproject.toml')
- if os.path.isfile(pyproject_toml):
- return True
- return False
-
-
-def is_svn_page(html):
- # type: (Union[str, Text]) -> Optional[Match[Union[str, Text]]]
- """
- Returns true if the page appears to be the index page of an svn repository
- """
- return (re.search(r'<title>[^<]*Revision \d+:', html) and
- re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
-
-
-def file_contents(filename):
- # type: (str) -> Text
- with open(filename, 'rb') as fp:
- return fp.read().decode('utf-8')
-
-
-def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
- """Yield pieces of data from a file-like object until EOF."""
- while True:
- chunk = file.read(size)
- if not chunk:
- break
- yield chunk
-
-
-def split_leading_dir(path):
- # type: (Union[str, Text]) -> List[Union[str, Text]]
- path = path.lstrip('/').lstrip('\\')
- if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or
- '\\' not in path):
- return path.split('/', 1)
- elif '\\' in path:
- return path.split('\\', 1)
- else:
- return [path, '']
-
-
-def has_leading_dir(paths):
- # type: (Iterable[Union[str, Text]]) -> bool
- """Returns true if all the paths have the same leading path name
- (i.e., everything is in one subdirectory in an archive)"""
- common_prefix = None
- for path in paths:
- prefix, rest = split_leading_dir(path)
- if not prefix:
- return False
- elif common_prefix is None:
- common_prefix = prefix
- elif prefix != common_prefix:
- return False
- return True
-
-
-def normalize_path(path, resolve_symlinks=True):
- # type: (str, bool) -> str
- """
- Convert a path to its canonical, case-normalized, absolute version.
-
- """
- path = expanduser(path)
- if resolve_symlinks:
- path = os.path.realpath(path)
- else:
- path = os.path.abspath(path)
- return os.path.normcase(path)
-
-
-def splitext(path):
- # type: (str) -> Tuple[str, str]
- """Like os.path.splitext, but take off .tar too"""
- base, ext = posixpath.splitext(path)
- if base.lower().endswith('.tar'):
- ext = base[-4:] + ext
- base = base[:-4]
- return base, ext
-
-
-def renames(old, new):
- # type: (str, str) -> None
- """Like os.renames(), but handles renaming across devices."""
- # Implementation borrowed from os.renames().
- head, tail = os.path.split(new)
- if head and tail and not os.path.exists(head):
- os.makedirs(head)
-
- shutil.move(old, new)
-
- head, tail = os.path.split(old)
- if head and tail:
- try:
- os.removedirs(head)
- except OSError:
- pass
-
-
-def is_local(path):
- # type: (str) -> bool
- """
- Return True if path is within sys.prefix, if we're running in a virtualenv.
-
- If we're not in a virtualenv, all paths are considered "local."
-
- """
- if not running_under_virtualenv():
- return True
- return normalize_path(path).startswith(normalize_path(sys.prefix))
-
-
-def dist_is_local(dist):
- # type: (Distribution) -> bool
- """
- Return True if given Distribution object is installed locally
- (i.e. within current virtualenv).
-
- Always True if we're not in a virtualenv.
-
- """
- return is_local(dist_location(dist))
-
-
-def dist_in_usersite(dist):
- # type: (Distribution) -> bool
- """
- Return True if given Distribution is installed in user site.
- """
- norm_path = normalize_path(dist_location(dist))
- return norm_path.startswith(normalize_path(user_site))
-
-
-def dist_in_site_packages(dist):
- # type: (Distribution) -> bool
- """
- Return True if given Distribution is installed in
- sysconfig.get_python_lib().
- """
- return normalize_path(
- dist_location(dist)
- ).startswith(normalize_path(site_packages))
-
-
-def dist_is_editable(dist):
- # type: (Distribution) -> bool
- """
- Return True if given Distribution is an editable install.
- """
- for path_item in sys.path:
- egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
- if os.path.isfile(egg_link):
- return True
- return False
-
-
-def get_installed_distributions(local_only=True,
- skip=stdlib_pkgs,
- include_editables=True,
- editables_only=False,
- user_only=False):
- # type: (bool, Container[str], bool, bool, bool) -> List[Distribution]
- """
- Return a list of installed Distribution objects.
-
- If ``local_only`` is True (default), only return installations
- local to the current virtualenv, if in a virtualenv.
-
- ``skip`` argument is an iterable of lower-case project names to
- ignore; defaults to stdlib_pkgs
-
- If ``include_editables`` is False, don't report editables.
-
- If ``editables_only`` is True , only report editables.
-
- If ``user_only`` is True , only report installations in the user
- site directory.
-
- """
- if local_only:
- local_test = dist_is_local
- else:
- def local_test(d):
- return True
-
- if include_editables:
- def editable_test(d):
- return True
- else:
- def editable_test(d):
- return not dist_is_editable(d)
-
- if editables_only:
- def editables_only_test(d):
- return dist_is_editable(d)
- else:
- def editables_only_test(d):
- return True
-
- if user_only:
- user_test = dist_in_usersite
- else:
- def user_test(d):
- return True
-
- # because of pkg_resources vendoring, mypy cannot find stub in typeshed
- return [d for d in pkg_resources.working_set # type: ignore
- if local_test(d) and
- d.key not in skip and
- editable_test(d) and
- editables_only_test(d) and
- user_test(d)
- ]
-
-
-def egg_link_path(dist):
- # type: (Distribution) -> Optional[str]
- """
- Return the path for the .egg-link file if it exists, otherwise, None.
-
- There's 3 scenarios:
- 1) not in a virtualenv
- try to find in site.USER_SITE, then site_packages
- 2) in a no-global virtualenv
- try to find in site_packages
- 3) in a yes-global virtualenv
- try to find in site_packages, then site.USER_SITE
- (don't look in global location)
-
- For #1 and #3, there could be odd cases, where there's an egg-link in 2
- locations.
-
- This method will just return the first one found.
- """
- sites = []
- if running_under_virtualenv():
- if virtualenv_no_global():
- sites.append(site_packages)
- else:
- sites.append(site_packages)
- if user_site:
- sites.append(user_site)
- else:
- if user_site:
- sites.append(user_site)
- sites.append(site_packages)
-
- for site in sites:
- egglink = os.path.join(site, dist.project_name) + '.egg-link'
- if os.path.isfile(egglink):
- return egglink
- return None
-
-
-def dist_location(dist):
- # type: (Distribution) -> str
- """
- Get the site-packages location of this distribution. Generally
- this is dist.location, except in the case of develop-installed
- packages, where dist.location is the source code location, and we
- want to know where the egg-link file is.
-
- """
- egg_link = egg_link_path(dist)
- if egg_link:
- return egg_link
- return dist.location
-
-
-def current_umask():
- """Get the current umask which involves having to set it temporarily."""
- mask = os.umask(0)
- os.umask(mask)
- return mask
-
-
-def unzip_file(filename, location, flatten=True):
- # type: (str, str, bool) -> None
- """
- Unzip the file (with path `filename`) to the destination `location`. All
- files are written based on system defaults and umask (i.e. permissions are
- not preserved), except that regular file members with any execute
- permissions (user, group, or world) have "chmod +x" applied after being
- written. Note that for windows, any execute changes using os.chmod are
- no-ops per the python docs.
- """
- ensure_dir(location)
- zipfp = open(filename, 'rb')
- try:
- zip = zipfile.ZipFile(zipfp, allowZip64=True)
- leading = has_leading_dir(zip.namelist()) and flatten
- for info in zip.infolist():
- name = info.filename
- fn = name
- if leading:
- fn = split_leading_dir(name)[1]
- fn = os.path.join(location, fn)
- dir = os.path.dirname(fn)
- if fn.endswith('/') or fn.endswith('\\'):
- # A directory
- ensure_dir(fn)
- else:
- ensure_dir(dir)
- # Don't use read() to avoid allocating an arbitrarily large
- # chunk of memory for the file's content
- fp = zip.open(name)
- try:
- with open(fn, 'wb') as destfp:
- shutil.copyfileobj(fp, destfp)
- finally:
- fp.close()
- mode = info.external_attr >> 16
- # if mode and regular file and any execute permissions for
- # user/group/world?
- if mode and stat.S_ISREG(mode) and mode & 0o111:
- # make dest file have execute for user/group/world
- # (chmod +x) no-op on windows per python docs
- os.chmod(fn, (0o777 - current_umask() | 0o111))
- finally:
- zipfp.close()
-
-
-def untar_file(filename, location):
- # type: (str, str) -> None
- """
- Untar the file (with path `filename`) to the destination `location`.
- All files are written based on system defaults and umask (i.e. permissions
- are not preserved), except that regular file members with any execute
- permissions (user, group, or world) have "chmod +x" applied after being
- written. Note that for windows, any execute changes using os.chmod are
- no-ops per the python docs.
- """
- ensure_dir(location)
- if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
- mode = 'r:gz'
- elif filename.lower().endswith(BZ2_EXTENSIONS):
- mode = 'r:bz2'
- elif filename.lower().endswith(XZ_EXTENSIONS):
- mode = 'r:xz'
- elif filename.lower().endswith('.tar'):
- mode = 'r'
- else:
- logger.warning(
- 'Cannot determine compression type for file %s', filename,
- )
- mode = 'r:*'
- tar = tarfile.open(filename, mode)
- try:
- leading = has_leading_dir([
- member.name for member in tar.getmembers()
- ])
- for member in tar.getmembers():
- fn = member.name
- if leading:
- # https://github.com/python/mypy/issues/1174
- fn = split_leading_dir(fn)[1] # type: ignore
- path = os.path.join(location, fn)
- if member.isdir():
- ensure_dir(path)
- elif member.issym():
- try:
- # https://github.com/python/typeshed/issues/2673
- tar._extract_member(member, path) # type: ignore
- except Exception as exc:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warning(
- 'In the tar file %s the member %s is invalid: %s',
- filename, member.name, exc,
- )
- continue
- else:
- try:
- fp = tar.extractfile(member)
- except (KeyError, AttributeError) as exc:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warning(
- 'In the tar file %s the member %s is invalid: %s',
- filename, member.name, exc,
- )
- continue
- ensure_dir(os.path.dirname(path))
- with open(path, 'wb') as destfp:
- shutil.copyfileobj(fp, destfp)
- fp.close()
- # Update the timestamp (useful for cython compiled files)
- # https://github.com/python/typeshed/issues/2673
- tar.utime(member, path) # type: ignore
- # member have any execute permissions for user/group/world?
- if member.mode & 0o111:
- # make dest file have execute for user/group/world
- # no-op on windows per python docs
- os.chmod(path, (0o777 - current_umask() | 0o111))
- finally:
- tar.close()
-
-
-def unpack_file(
- filename, # type: str
- location, # type: str
- content_type, # type: Optional[str]
- link # type: Optional[Link]
-):
- # type: (...) -> None
- filename = os.path.realpath(filename)
- if (content_type == 'application/zip' or
- filename.lower().endswith(ZIP_EXTENSIONS) or
- zipfile.is_zipfile(filename)):
- unzip_file(
- filename,
- location,
- flatten=not filename.endswith('.whl')
- )
- elif (content_type == 'application/x-gzip' or
- tarfile.is_tarfile(filename) or
- filename.lower().endswith(
- TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)):
- untar_file(filename, location)
- elif (content_type and content_type.startswith('text/html') and
- is_svn_page(file_contents(filename))):
- # We don't really care about this
- from pip._internal.vcs.subversion import Subversion
- Subversion('svn+' + link.url).unpack(location)
- else:
- # FIXME: handle?
- # FIXME: magic signatures?
- logger.critical(
- 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
- 'cannot detect archive format',
- filename, location, content_type,
- )
- raise InstallationError(
- 'Cannot determine archive format of %s' % location
- )
-
-
-def call_subprocess(
- cmd, # type: List[str]
- show_stdout=True, # type: bool
- cwd=None, # type: Optional[str]
- on_returncode='raise', # type: str
- extra_ok_returncodes=None, # type: Optional[Iterable[int]]
- command_desc=None, # type: Optional[str]
- extra_environ=None, # type: Optional[Mapping[str, Any]]
- unset_environ=None, # type: Optional[Iterable[str]]
- spinner=None # type: Optional[SpinnerInterface]
-):
- # type: (...) -> Optional[Text]
- """
- Args:
- extra_ok_returncodes: an iterable of integer return codes that are
- acceptable, in addition to 0. Defaults to None, which means [].
- unset_environ: an iterable of environment variable names to unset
- prior to calling subprocess.Popen().
- """
- if extra_ok_returncodes is None:
- extra_ok_returncodes = []
- if unset_environ is None:
- unset_environ = []
- # This function's handling of subprocess output is confusing and I
- # previously broke it terribly, so as penance I will write a long comment
- # explaining things.
- #
- # The obvious thing that affects output is the show_stdout=
- # kwarg. show_stdout=True means, let the subprocess write directly to our
- # stdout. Even though it is nominally the default, it is almost never used
- # inside pip (and should not be used in new code without a very good
- # reason); as of 2016-02-22 it is only used in a few places inside the VCS
- # wrapper code. Ideally we should get rid of it entirely, because it
- # creates a lot of complexity here for a rarely used feature.
- #
- # Most places in pip set show_stdout=False. What this means is:
- # - We connect the child stdout to a pipe, which we read.
- # - By default, we hide the output but show a spinner -- unless the
- # subprocess exits with an error, in which case we show the output.
- # - If the --verbose option was passed (= loglevel is DEBUG), then we show
- # the output unconditionally. (But in this case we don't want to show
- # the output a second time if it turns out that there was an error.)
- #
- # stderr is always merged with stdout (even if show_stdout=True).
- if show_stdout:
- stdout = None
- else:
- stdout = subprocess.PIPE
- if command_desc is None:
- cmd_parts = []
- for part in cmd:
- if ' ' in part or '\n' in part or '"' in part or "'" in part:
- part = '"%s"' % part.replace('"', '\\"')
- cmd_parts.append(part)
- command_desc = ' '.join(cmd_parts)
- logger.debug("Running command %s", command_desc)
- env = os.environ.copy()
- if extra_environ:
- env.update(extra_environ)
- for name in unset_environ:
- env.pop(name, None)
- try:
- proc = subprocess.Popen(
- cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
- stdout=stdout, cwd=cwd, env=env,
- )
- proc.stdin.close()
- except Exception as exc:
- logger.critical(
- "Error %s while executing command %s", exc, command_desc,
- )
- raise
- all_output = []
- if stdout is not None:
- while True:
- line = console_to_str(proc.stdout.readline())
- if not line:
- break
- line = line.rstrip()
- all_output.append(line + '\n')
- if logger.getEffectiveLevel() <= std_logging.DEBUG:
- # Show the line immediately
- logger.debug(line)
- else:
- # Update the spinner
- if spinner is not None:
- spinner.spin()
- try:
- proc.wait()
- finally:
- if proc.stdout:
- proc.stdout.close()
- if spinner is not None:
- if proc.returncode:
- spinner.finish("error")
- else:
- spinner.finish("done")
- if proc.returncode and proc.returncode not in extra_ok_returncodes:
- if on_returncode == 'raise':
- if (logger.getEffectiveLevel() > std_logging.DEBUG and
- not show_stdout):
- logger.info(
- 'Complete output from command %s:', command_desc,
- )
- logger.info(
- ''.join(all_output) +
- '\n----------------------------------------'
- )
- raise InstallationError(
- 'Command "%s" failed with error code %s in %s'
- % (command_desc, proc.returncode, cwd))
- elif on_returncode == 'warn':
- logger.warning(
- 'Command "%s" had error code %s in %s',
- command_desc, proc.returncode, cwd,
- )
- elif on_returncode == 'ignore':
- pass
- else:
- raise ValueError('Invalid value: on_returncode=%s' %
- repr(on_returncode))
- if not show_stdout:
- return ''.join(all_output)
- return None
-
-
-def read_text_file(filename):
- # type: (str) -> str
- """Return the contents of *filename*.
-
- Try to decode the file contents with utf-8, the preferred system encoding
- (e.g., cp1252 on some Windows machines), and latin1, in that order.
- Decoding a byte string with latin1 will never raise an error. In the worst
- case, the returned string will contain some garbage characters.
-
- """
- with open(filename, 'rb') as fp:
- data = fp.read()
-
- encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1']
- for enc in encodings:
- try:
- # https://github.com/python/mypy/issues/1174
- data = data.decode(enc) # type: ignore
- except UnicodeDecodeError:
- continue
- break
-
- assert not isinstance(data, bytes) # Latin1 should have worked.
- return data
-
-
-def _make_build_dir(build_dir):
- os.makedirs(build_dir)
- write_delete_marker_file(build_dir)
-
-
-class FakeFile(object):
- """Wrap a list of lines in an object with readline() to make
- ConfigParser happy."""
- def __init__(self, lines):
- self._gen = (l for l in lines)
-
- def readline(self):
- try:
- try:
- return next(self._gen)
- except NameError:
- return self._gen.next()
- except StopIteration:
- return ''
-
- def __iter__(self):
- return self._gen
-
-
-class StreamWrapper(StringIO):
-
- @classmethod
- def from_stream(cls, orig_stream):
- cls.orig_stream = orig_stream
- return cls()
-
- # compileall.compile_dir() needs stdout.encoding to print to stdout
- @property
- def encoding(self):
- return self.orig_stream.encoding
-
-
-@contextlib.contextmanager
-def captured_output(stream_name):
- """Return a context manager used by captured_stdout/stdin/stderr
- that temporarily replaces the sys stream *stream_name* with a StringIO.
-
- Taken from Lib/support/__init__.py in the CPython repo.
- """
- orig_stdout = getattr(sys, stream_name)
- setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
- try:
- yield getattr(sys, stream_name)
- finally:
- setattr(sys, stream_name, orig_stdout)
-
-
-def captured_stdout():
- """Capture the output of sys.stdout:
-
- with captured_stdout() as stdout:
- print('hello')
- self.assertEqual(stdout.getvalue(), 'hello\n')
-
- Taken from Lib/support/__init__.py in the CPython repo.
- """
- return captured_output('stdout')
-
-
-def captured_stderr():
- """
- See captured_stdout().
- """
- return captured_output('stderr')
-
-
-class cached_property(object):
- """A property that is only computed once per instance and then replaces
- itself with an ordinary attribute. Deleting the attribute resets the
- property.
-
- Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
- """
-
- def __init__(self, func):
- self.__doc__ = getattr(func, '__doc__')
- self.func = func
-
- def __get__(self, obj, cls):
- if obj is None:
- # We're being accessed from the class itself, not from an object
- return self
- value = obj.__dict__[self.func.__name__] = self.func(obj)
- return value
-
-
-def get_installed_version(dist_name, working_set=None):
- """Get the installed version of dist_name avoiding pkg_resources cache"""
- # Create a requirement that we'll look for inside of setuptools.
- req = pkg_resources.Requirement.parse(dist_name)
-
- if working_set is None:
- # We want to avoid having this cached, so we need to construct a new
- # working set each time.
- working_set = pkg_resources.WorkingSet()
-
- # Get the installed distribution from our working set
- dist = working_set.find(req)
-
- # Check to see if we got an installed distribution or not, if we did
- # we want to return it's version.
- return dist.version if dist else None
-
-
-def consume(iterator):
- """Consume an iterable at C speed."""
- deque(iterator, maxlen=0)
-
-
-# Simulates an enum
-def enum(*sequential, **named):
- enums = dict(zip(sequential, range(len(sequential))), **named)
- reverse = {value: key for key, value in enums.items()}
- enums['reverse_mapping'] = reverse
- return type('Enum', (), enums)
-
-
-def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None):
- """
- Return the URL for a VCS requirement.
-
- Args:
- repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
- project_name: the (unescaped) project name.
- """
- egg_project_name = pkg_resources.to_filename(project_name)
- req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name)
- if subdir:
- req += '&subdirectory={}'.format(subdir)
-
- return req
-
-
-def split_auth_from_netloc(netloc):
- """
- Parse out and remove the auth information from a netloc.
-
- Returns: (netloc, (username, password)).
- """
- if '@' not in netloc:
- return netloc, (None, None)
-
- # Split from the right because that's how urllib.parse.urlsplit()
- # behaves if more than one @ is present (which can be checked using
- # the password attribute of urlsplit()'s return value).
- auth, netloc = netloc.rsplit('@', 1)
- if ':' in auth:
- # Split from the left because that's how urllib.parse.urlsplit()
- # behaves if more than one : is present (which again can be checked
- # using the password attribute of the return value)
- user_pass = auth.split(':', 1)
- else:
- user_pass = auth, None
-
- user_pass = tuple(
- None if x is None else urllib_unquote(x) for x in user_pass
- )
-
- return netloc, user_pass
-
-
-def redact_netloc(netloc):
- # type: (str) -> str
- """
- Replace the password in a netloc with "****", if it exists.
-
- For example, "user:pass@example.com" returns "user:****@example.com".
- """
- netloc, (user, password) = split_auth_from_netloc(netloc)
- if user is None:
- return netloc
- password = '' if password is None else ':****'
- return '{user}{password}@{netloc}'.format(user=urllib_parse.quote(user),
- password=password,
- netloc=netloc)
-
-
-def _transform_url(url, transform_netloc):
- purl = urllib_parse.urlsplit(url)
- netloc = transform_netloc(purl.netloc)
- # stripped url
- url_pieces = (
- purl.scheme, netloc, purl.path, purl.query, purl.fragment
- )
- surl = urllib_parse.urlunsplit(url_pieces)
- return surl
-
-
-def _get_netloc(netloc):
- return split_auth_from_netloc(netloc)[0]
-
-
-def remove_auth_from_url(url):
- # type: (str) -> str
- # Return a copy of url with 'username:password@' removed.
- # username/pass params are passed to subversion through flags
- # and are not recognized in the url.
- return _transform_url(url, _get_netloc)
-
-
-def redact_password_from_url(url):
- # type: (str) -> str
- """Replace the password in a given url with ****."""
- return _transform_url(url, redact_netloc)
-
-
-def protect_pip_from_modification_on_windows(modifying_pip):
- """Protection of pip.exe from modification on Windows
-
- On Windows, any operation modifying pip should be run as:
- python -m pip ...
- """
- pip_names = [
- "pip.exe",
- "pip{}.exe".format(sys.version_info[0]),
- "pip{}.{}.exe".format(*sys.version_info[:2])
- ]
-
- # See https://github.com/pypa/pip/issues/1299 for more discussion
- should_show_use_python_msg = (
- modifying_pip and
- WINDOWS and
- os.path.basename(sys.argv[0]) in pip_names
- )
-
- if should_show_use_python_msg:
- new_command = [
- sys.executable, "-m", "pip"
- ] + sys.argv[1:]
- raise CommandError(
- 'To modify pip, please run the following command:\n{}'
- .format(" ".join(new_command))
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py
deleted file mode 100644
index d5cb80a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""Utilities for defining models
-"""
-
-import operator
-
-
-class KeyBasedCompareMixin(object):
- """Provides comparision capabilities that is based on a key
- """
-
- def __init__(self, key, defining_class):
- self._compare_key = key
- self._defining_class = defining_class
-
- def __hash__(self):
- return hash(self._compare_key)
-
- def __lt__(self, other):
- return self._compare(other, operator.__lt__)
-
- def __le__(self, other):
- return self._compare(other, operator.__le__)
-
- def __gt__(self, other):
- return self._compare(other, operator.__gt__)
-
- def __ge__(self, other):
- return self._compare(other, operator.__ge__)
-
- def __eq__(self, other):
- return self._compare(other, operator.__eq__)
-
- def __ne__(self, other):
- return self._compare(other, operator.__ne__)
-
- def _compare(self, other, method):
- if not isinstance(other, self._defining_class):
- return NotImplemented
-
- return method(self._compare_key, other._compare_key)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py
deleted file mode 100644
index 37c47a4..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/outdated.py
+++ /dev/null
@@ -1,164 +0,0 @@
-from __future__ import absolute_import
-
-import datetime
-import json
-import logging
-import os.path
-import sys
-
-from pip._vendor import lockfile, pkg_resources
-from pip._vendor.packaging import version as packaging_version
-
-from pip._internal.index import PackageFinder
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import ensure_dir, get_installed_version
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- import optparse # noqa: F401
- from typing import Any, Dict # noqa: F401
- from pip._internal.download import PipSession # noqa: F401
-
-
-SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
-
-
-logger = logging.getLogger(__name__)
-
-
-class SelfCheckState(object):
- def __init__(self, cache_dir):
- # type: (str) -> None
- self.state = {} # type: Dict[str, Any]
- self.statefile_path = None
-
- # Try to load the existing state
- if cache_dir:
- self.statefile_path = os.path.join(cache_dir, "selfcheck.json")
- try:
- with open(self.statefile_path) as statefile:
- self.state = json.load(statefile)[sys.prefix]
- except (IOError, ValueError, KeyError):
- # Explicitly suppressing exceptions, since we don't want to
- # error out if the cache file is invalid.
- pass
-
- def save(self, pypi_version, current_time):
- # type: (str, datetime.datetime) -> None
- # If we do not have a path to cache in, don't bother saving.
- if not self.statefile_path:
- return
-
- # Check to make sure that we own the directory
- if not check_path_owner(os.path.dirname(self.statefile_path)):
- return
-
- # Now that we've ensured the directory is owned by this user, we'll go
- # ahead and make sure that all our directories are created.
- ensure_dir(os.path.dirname(self.statefile_path))
-
- # Attempt to write out our version check file
- with lockfile.LockFile(self.statefile_path):
- if os.path.exists(self.statefile_path):
- with open(self.statefile_path) as statefile:
- state = json.load(statefile)
- else:
- state = {}
-
- state[sys.prefix] = {
- "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
- "pypi_version": pypi_version,
- }
-
- with open(self.statefile_path, "w") as statefile:
- json.dump(state, statefile, sort_keys=True,
- separators=(",", ":"))
-
-
-def was_installed_by_pip(pkg):
- # type: (str) -> bool
- """Checks whether pkg was installed by pip
-
- This is used not to display the upgrade message when pip is in fact
- installed by system package manager, such as dnf on Fedora.
- """
- try:
- dist = pkg_resources.get_distribution(pkg)
- return (dist.has_metadata('INSTALLER') and
- 'pip' in dist.get_metadata_lines('INSTALLER'))
- except pkg_resources.DistributionNotFound:
- return False
-
-
-def pip_version_check(session, options):
- # type: (PipSession, optparse.Values) -> None
- """Check for an update for pip.
-
- Limit the frequency of checks to once per week. State is stored either in
- the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
- of the pip script path.
- """
- installed_version = get_installed_version("pip")
- if not installed_version:
- return
-
- pip_version = packaging_version.parse(installed_version)
- pypi_version = None
-
- try:
- state = SelfCheckState(cache_dir=options.cache_dir)
-
- current_time = datetime.datetime.utcnow()
- # Determine if we need to refresh the state
- if "last_check" in state.state and "pypi_version" in state.state:
- last_check = datetime.datetime.strptime(
- state.state["last_check"],
- SELFCHECK_DATE_FMT
- )
- if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
- pypi_version = state.state["pypi_version"]
-
- # Refresh the version if we need to or just see if we need to warn
- if pypi_version is None:
- # Lets use PackageFinder to see what the latest pip version is
- finder = PackageFinder(
- find_links=options.find_links,
- index_urls=[options.index_url] + options.extra_index_urls,
- allow_all_prereleases=False, # Explicitly set to False
- trusted_hosts=options.trusted_hosts,
- session=session,
- )
- all_candidates = finder.find_all_candidates("pip")
- if not all_candidates:
- return
- pypi_version = str(
- max(all_candidates, key=lambda c: c.version).version
- )
-
- # save that we've performed a check
- state.save(pypi_version, current_time)
-
- remote_version = packaging_version.parse(pypi_version)
-
- # Determine if our pypi_version is older
- if (pip_version < remote_version and
- pip_version.base_version != remote_version.base_version and
- was_installed_by_pip('pip')):
- # Advise "python -m pip" on Windows to avoid issues
- # with overwriting pip.exe.
- if WINDOWS:
- pip_cmd = "python -m pip"
- else:
- pip_cmd = "pip"
- logger.warning(
- "You are using pip version %s, however version %s is "
- "available.\nYou should consider upgrading via the "
- "'%s install --upgrade pip' command.",
- pip_version, pypi_version, pip_cmd
- )
- except Exception:
- logger.debug(
- "There was an error checking the latest version of pip",
- exc_info=True,
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py
deleted file mode 100644
index 7aaf7b5..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import sys
-from email.parser import FeedParser
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging import specifiers, version
-
-from pip._internal import exceptions
-from pip._internal.utils.misc import display_path
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional # noqa: F401
- from email.message import Message # noqa: F401
- from pip._vendor.pkg_resources import Distribution # noqa: F401
-
-
-logger = logging.getLogger(__name__)
-
-
-def check_requires_python(requires_python):
- # type: (Optional[str]) -> bool
- """
- Check if the python version in use match the `requires_python` specifier.
-
- Returns `True` if the version of python in use matches the requirement.
- Returns `False` if the version of python in use does not matches the
- requirement.
-
- Raises an InvalidSpecifier if `requires_python` have an invalid format.
- """
- if requires_python is None:
- # The package provides no information
- return True
- requires_python_specifier = specifiers.SpecifierSet(requires_python)
-
- # We only use major.minor.micro
- python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
- return python_version in requires_python_specifier
-
-
-def get_metadata(dist):
- # type: (Distribution) -> Message
- if (isinstance(dist, pkg_resources.DistInfoDistribution) and
- dist.has_metadata('METADATA')):
- metadata = dist.get_metadata('METADATA')
- elif dist.has_metadata('PKG-INFO'):
- metadata = dist.get_metadata('PKG-INFO')
- else:
- logger.warning("No metadata found in %s", display_path(dist.location))
- metadata = ''
-
- feed_parser = FeedParser()
- feed_parser.feed(metadata)
- return feed_parser.close()
-
-
-def check_dist_requires_python(dist):
- pkg_info_dict = get_metadata(dist)
- requires_python = pkg_info_dict.get('Requires-Python')
- try:
- if not check_requires_python(requires_python):
- raise exceptions.UnsupportedPythonVersion(
- "%s requires Python '%s' but the running Python is %s" % (
- dist.project_name,
- requires_python,
- '.'.join(map(str, sys.version_info[:3])),)
- )
- except specifiers.InvalidSpecifier as e:
- logger.warning(
- "Package %s has an invalid Requires-Python entry %s - %s",
- dist.project_name, requires_python, e,
- )
- return
-
-
-def get_installer(dist):
- # type: (Distribution) -> str
- if dist.has_metadata('INSTALLER'):
- for line in dist.get_metadata_lines('INSTALLER'):
- if line.strip():
- return line.strip()
- return ''
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py
deleted file mode 100644
index 03973e9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py
+++ /dev/null
@@ -1,8 +0,0 @@
-# Shim to wrap setup.py invocation with setuptools
-SETUPTOOLS_SHIM = (
- "import setuptools, tokenize;__file__=%r;"
- "f=getattr(tokenize, 'open', open)(__file__);"
- "code=f.read().replace('\\r\\n', '\\n');"
- "f.close();"
- "exec(compile(code, __file__, 'exec'))"
-)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py
deleted file mode 100644
index 2c81ad5..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/temp_dir.py
+++ /dev/null
@@ -1,155 +0,0 @@
-from __future__ import absolute_import
-
-import errno
-import itertools
-import logging
-import os.path
-import tempfile
-
-from pip._internal.utils.misc import rmtree
-
-logger = logging.getLogger(__name__)
-
-
-class TempDirectory(object):
- """Helper class that owns and cleans up a temporary directory.
-
- This class can be used as a context manager or as an OO representation of a
- temporary directory.
-
- Attributes:
- path
- Location to the created temporary directory or None
- delete
- Whether the directory should be deleted when exiting
- (when used as a contextmanager)
-
- Methods:
- create()
- Creates a temporary directory and stores its path in the path
- attribute.
- cleanup()
- Deletes the temporary directory and sets path attribute to None
-
- When used as a context manager, a temporary directory is created on
- entering the context and, if the delete attribute is True, on exiting the
- context the created directory is deleted.
- """
-
- def __init__(self, path=None, delete=None, kind="temp"):
- super(TempDirectory, self).__init__()
-
- if path is None and delete is None:
- # If we were not given an explicit directory, and we were not given
- # an explicit delete option, then we'll default to deleting.
- delete = True
-
- self.path = path
- self.delete = delete
- self.kind = kind
-
- def __repr__(self):
- return "<{} {!r}>".format(self.__class__.__name__, self.path)
-
- def __enter__(self):
- self.create()
- return self
-
- def __exit__(self, exc, value, tb):
- if self.delete:
- self.cleanup()
-
- def create(self):
- """Create a temporary directory and store its path in self.path
- """
- if self.path is not None:
- logger.debug(
- "Skipped creation of temporary directory: {}".format(self.path)
- )
- return
- # We realpath here because some systems have their default tmpdir
- # symlinked to another directory. This tends to confuse build
- # scripts, so we canonicalize the path by traversing potential
- # symlinks here.
- self.path = os.path.realpath(
- tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
- )
- logger.debug("Created temporary directory: {}".format(self.path))
-
- def cleanup(self):
- """Remove the temporary directory created and reset state
- """
- if self.path is not None and os.path.exists(self.path):
- rmtree(self.path)
- self.path = None
-
-
-class AdjacentTempDirectory(TempDirectory):
- """Helper class that creates a temporary directory adjacent to a real one.
-
- Attributes:
- original
- The original directory to create a temp directory for.
- path
- After calling create() or entering, contains the full
- path to the temporary directory.
- delete
- Whether the directory should be deleted when exiting
- (when used as a contextmanager)
-
- """
- # The characters that may be used to name the temp directory
- # We always prepend a ~ and then rotate through these until
- # a usable name is found.
- # pkg_resources raises a different error for .dist-info folder
- # with leading '-' and invalid metadata
- LEADING_CHARS = "-~.=%0123456789"
-
- def __init__(self, original, delete=None):
- super(AdjacentTempDirectory, self).__init__(delete=delete)
- self.original = original.rstrip('/\\')
-
- @classmethod
- def _generate_names(cls, name):
- """Generates a series of temporary names.
-
- The algorithm replaces the leading characters in the name
- with ones that are valid filesystem characters, but are not
- valid package names (for both Python and pip definitions of
- package).
- """
- for i in range(1, len(name)):
- for candidate in itertools.combinations_with_replacement(
- cls.LEADING_CHARS, i - 1):
- new_name = '~' + ''.join(candidate) + name[i:]
- if new_name != name:
- yield new_name
-
- # If we make it this far, we will have to make a longer name
- for i in range(len(cls.LEADING_CHARS)):
- for candidate in itertools.combinations_with_replacement(
- cls.LEADING_CHARS, i):
- new_name = '~' + ''.join(candidate) + name
- if new_name != name:
- yield new_name
-
- def create(self):
- root, name = os.path.split(self.original)
- for candidate in self._generate_names(name):
- path = os.path.join(root, candidate)
- try:
- os.mkdir(path)
- except OSError as ex:
- # Continue if the name exists already
- if ex.errno != errno.EEXIST:
- raise
- else:
- self.path = os.path.realpath(path)
- break
-
- if not self.path:
- # Final fallback on the default behavior.
- self.path = os.path.realpath(
- tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
- )
- logger.debug("Created temporary directory: {}".format(self.path))
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py
deleted file mode 100644
index e085cdf..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""For neatly implementing static typing in pip.
-
-`mypy` - the static type analysis tool we use - uses the `typing` module, which
-provides core functionality fundamental to mypy's functioning.
-
-Generally, `typing` would be imported at runtime and used in that fashion -
-it acts as a no-op at runtime and does not have any run-time overhead by
-design.
-
-As it turns out, `typing` is not vendorable - it uses separate sources for
-Python 2/Python 3. Thus, this codebase can not expect it to be present.
-To work around this, mypy allows the typing import to be behind a False-y
-optional to prevent it from running at runtime and type-comments can be used
-to remove the need for the types to be accessible directly during runtime.
-
-This module provides the False-y guard in a nicely named fashion so that a
-curious maintainer can reach here to read this.
-
-In pip, all static-typing related imports should be guarded as follows:
-
- from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
- if MYPY_CHECK_RUNNING:
- from typing import ... # noqa: F401
-
-Ref: https://github.com/python/mypy/issues/3216
-"""
-
-MYPY_CHECK_RUNNING = False
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py
deleted file mode 100644
index 433675d..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/ui.py
+++ /dev/null
@@ -1,441 +0,0 @@
-from __future__ import absolute_import, division
-
-import contextlib
-import itertools
-import logging
-import sys
-import time
-from signal import SIGINT, default_int_handler, signal
-
-from pip._vendor import six
-from pip._vendor.progress.bar import (
- Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar,
- ShadyBar,
-)
-from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin
-from pip._vendor.progress.spinner import Spinner
-
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.logging import get_indentation
-from pip._internal.utils.misc import format_size
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Any, Iterator, IO # noqa: F401
-
-try:
- from pip._vendor import colorama
-# Lots of different errors can come from this, including SystemError and
-# ImportError.
-except Exception:
- colorama = None
-
-logger = logging.getLogger(__name__)
-
-
-def _select_progress_class(preferred, fallback):
- encoding = getattr(preferred.file, "encoding", None)
-
- # If we don't know what encoding this file is in, then we'll just assume
- # that it doesn't support unicode and use the ASCII bar.
- if not encoding:
- return fallback
-
- # Collect all of the possible characters we want to use with the preferred
- # bar.
- characters = [
- getattr(preferred, "empty_fill", six.text_type()),
- getattr(preferred, "fill", six.text_type()),
- ]
- characters += list(getattr(preferred, "phases", []))
-
- # Try to decode the characters we're using for the bar using the encoding
- # of the given file, if this works then we'll assume that we can use the
- # fancier bar and if not we'll fall back to the plaintext bar.
- try:
- six.text_type().join(characters).encode(encoding)
- except UnicodeEncodeError:
- return fallback
- else:
- return preferred
-
-
-_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
-
-
-class InterruptibleMixin(object):
- """
- Helper to ensure that self.finish() gets called on keyboard interrupt.
-
- This allows downloads to be interrupted without leaving temporary state
- (like hidden cursors) behind.
-
- This class is similar to the progress library's existing SigIntMixin
- helper, but as of version 1.2, that helper has the following problems:
-
- 1. It calls sys.exit().
- 2. It discards the existing SIGINT handler completely.
- 3. It leaves its own handler in place even after an uninterrupted finish,
- which will have unexpected delayed effects if the user triggers an
- unrelated keyboard interrupt some time after a progress-displaying
- download has already completed, for example.
- """
-
- def __init__(self, *args, **kwargs):
- """
- Save the original SIGINT handler for later.
- """
- super(InterruptibleMixin, self).__init__(*args, **kwargs)
-
- self.original_handler = signal(SIGINT, self.handle_sigint)
-
- # If signal() returns None, the previous handler was not installed from
- # Python, and we cannot restore it. This probably should not happen,
- # but if it does, we must restore something sensible instead, at least.
- # The least bad option should be Python's default SIGINT handler, which
- # just raises KeyboardInterrupt.
- if self.original_handler is None:
- self.original_handler = default_int_handler
-
- def finish(self):
- """
- Restore the original SIGINT handler after finishing.
-
- This should happen regardless of whether the progress display finishes
- normally, or gets interrupted.
- """
- super(InterruptibleMixin, self).finish()
- signal(SIGINT, self.original_handler)
-
- def handle_sigint(self, signum, frame):
- """
- Call self.finish() before delegating to the original SIGINT handler.
-
- This handler should only be in place while the progress display is
- active.
- """
- self.finish()
- self.original_handler(signum, frame)
-
-
-class SilentBar(Bar):
-
- def update(self):
- pass
-
-
-class BlueEmojiBar(IncrementalBar):
-
- suffix = "%(percent)d%%"
- bar_prefix = " "
- bar_suffix = " "
- phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
-
-
-class DownloadProgressMixin(object):
-
- def __init__(self, *args, **kwargs):
- super(DownloadProgressMixin, self).__init__(*args, **kwargs)
- self.message = (" " * (get_indentation() + 2)) + self.message
-
- @property
- def downloaded(self):
- return format_size(self.index)
-
- @property
- def download_speed(self):
- # Avoid zero division errors...
- if self.avg == 0.0:
- return "..."
- return format_size(1 / self.avg) + "/s"
-
- @property
- def pretty_eta(self):
- if self.eta:
- return "eta %s" % self.eta_td
- return ""
-
- def iter(self, it, n=1):
- for x in it:
- yield x
- self.next(n)
- self.finish()
-
-
-class WindowsMixin(object):
-
- def __init__(self, *args, **kwargs):
- # The Windows terminal does not support the hide/show cursor ANSI codes
- # even with colorama. So we'll ensure that hide_cursor is False on
- # Windows.
- # This call neds to go before the super() call, so that hide_cursor
- # is set in time. The base progress bar class writes the "hide cursor"
- # code to the terminal in its init, so if we don't set this soon
- # enough, we get a "hide" with no corresponding "show"...
- if WINDOWS and self.hide_cursor:
- self.hide_cursor = False
-
- super(WindowsMixin, self).__init__(*args, **kwargs)
-
- # Check if we are running on Windows and we have the colorama module,
- # if we do then wrap our file with it.
- if WINDOWS and colorama:
- self.file = colorama.AnsiToWin32(self.file)
- # The progress code expects to be able to call self.file.isatty()
- # but the colorama.AnsiToWin32() object doesn't have that, so we'll
- # add it.
- self.file.isatty = lambda: self.file.wrapped.isatty()
- # The progress code expects to be able to call self.file.flush()
- # but the colorama.AnsiToWin32() object doesn't have that, so we'll
- # add it.
- self.file.flush = lambda: self.file.wrapped.flush()
-
-
-class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
- DownloadProgressMixin):
-
- file = sys.stdout
- message = "%(percent)d%%"
- suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
-
-# NOTE: The "type: ignore" comments on the following classes are there to
-# work around https://github.com/python/typing/issues/241
-
-
-class DefaultDownloadProgressBar(BaseDownloadProgressBar,
- _BaseBar):
- pass
-
-
-class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
- pass
-
-
-class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore
- IncrementalBar):
- pass
-
-
-class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore
- ChargingBar):
- pass
-
-
-class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore
- pass
-
-
-class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore
- FillingSquaresBar):
- pass
-
-
-class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
- FillingCirclesBar):
- pass
-
-
-class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
- BlueEmojiBar):
- pass
-
-
-class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
- DownloadProgressMixin, WritelnMixin, Spinner):
-
- file = sys.stdout
- suffix = "%(downloaded)s %(download_speed)s"
-
- def next_phase(self):
- if not hasattr(self, "_phaser"):
- self._phaser = itertools.cycle(self.phases)
- return next(self._phaser)
-
- def update(self):
- message = self.message % self
- phase = self.next_phase()
- suffix = self.suffix % self
- line = ''.join([
- message,
- " " if message else "",
- phase,
- " " if suffix else "",
- suffix,
- ])
-
- self.writeln(line)
-
-
-BAR_TYPES = {
- "off": (DownloadSilentBar, DownloadSilentBar),
- "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
- "ascii": (DownloadIncrementalBar, DownloadProgressSpinner),
- "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
- "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
-}
-
-
-def DownloadProgressProvider(progress_bar, max=None):
- if max is None or max == 0:
- return BAR_TYPES[progress_bar][1]().iter
- else:
- return BAR_TYPES[progress_bar][0](max=max).iter
-
-
-################################################################
-# Generic "something is happening" spinners
-#
-# We don't even try using progress.spinner.Spinner here because it's actually
-# simpler to reimplement from scratch than to coerce their code into doing
-# what we need.
-################################################################
-
-@contextlib.contextmanager
-def hidden_cursor(file):
- # type: (IO) -> Iterator[None]
- # The Windows terminal does not support the hide/show cursor ANSI codes,
- # even via colorama. So don't even try.
- if WINDOWS:
- yield
- # We don't want to clutter the output with control characters if we're
- # writing to a file, or if the user is running with --quiet.
- # See https://github.com/pypa/pip/issues/3418
- elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
- yield
- else:
- file.write(HIDE_CURSOR)
- try:
- yield
- finally:
- file.write(SHOW_CURSOR)
-
-
-class RateLimiter(object):
- def __init__(self, min_update_interval_seconds):
- # type: (float) -> None
- self._min_update_interval_seconds = min_update_interval_seconds
- self._last_update = 0 # type: float
-
- def ready(self):
- # type: () -> bool
- now = time.time()
- delta = now - self._last_update
- return delta >= self._min_update_interval_seconds
-
- def reset(self):
- # type: () -> None
- self._last_update = time.time()
-
-
-class SpinnerInterface(object):
- def spin(self):
- # type: () -> None
- raise NotImplementedError()
-
- def finish(self, final_status):
- # type: (str) -> None
- raise NotImplementedError()
-
-
-class InteractiveSpinner(SpinnerInterface):
- def __init__(self, message, file=None, spin_chars="-\\|/",
- # Empirically, 8 updates/second looks nice
- min_update_interval_seconds=0.125):
- self._message = message
- if file is None:
- file = sys.stdout
- self._file = file
- self._rate_limiter = RateLimiter(min_update_interval_seconds)
- self._finished = False
-
- self._spin_cycle = itertools.cycle(spin_chars)
-
- self._file.write(" " * get_indentation() + self._message + " ... ")
- self._width = 0
-
- def _write(self, status):
- assert not self._finished
- # Erase what we wrote before by backspacing to the beginning, writing
- # spaces to overwrite the old text, and then backspacing again
- backup = "\b" * self._width
- self._file.write(backup + " " * self._width + backup)
- # Now we have a blank slate to add our status
- self._file.write(status)
- self._width = len(status)
- self._file.flush()
- self._rate_limiter.reset()
-
- def spin(self):
- # type: () -> None
- if self._finished:
- return
- if not self._rate_limiter.ready():
- return
- self._write(next(self._spin_cycle))
-
- def finish(self, final_status):
- # type: (str) -> None
- if self._finished:
- return
- self._write(final_status)
- self._file.write("\n")
- self._file.flush()
- self._finished = True
-
-
-# Used for dumb terminals, non-interactive installs (no tty), etc.
-# We still print updates occasionally (once every 60 seconds by default) to
-# act as a keep-alive for systems like Travis-CI that take lack-of-output as
-# an indication that a task has frozen.
-class NonInteractiveSpinner(SpinnerInterface):
- def __init__(self, message, min_update_interval_seconds=60):
- # type: (str, float) -> None
- self._message = message
- self._finished = False
- self._rate_limiter = RateLimiter(min_update_interval_seconds)
- self._update("started")
-
- def _update(self, status):
- assert not self._finished
- self._rate_limiter.reset()
- logger.info("%s: %s", self._message, status)
-
- def spin(self):
- # type: () -> None
- if self._finished:
- return
- if not self._rate_limiter.ready():
- return
- self._update("still running...")
-
- def finish(self, final_status):
- # type: (str) -> None
- if self._finished:
- return
- self._update("finished with status '%s'" % (final_status,))
- self._finished = True
-
-
-@contextlib.contextmanager
-def open_spinner(message):
- # type: (str) -> Iterator[SpinnerInterface]
- # Interactive spinner goes directly to sys.stdout rather than being routed
- # through the logging system, but it acts like it has level INFO,
- # i.e. it's only displayed if we're at level INFO or better.
- # Non-interactive spinner goes through the logging system, so it is always
- # in sync with logging configuration.
- if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
- spinner = InteractiveSpinner(message) # type: SpinnerInterface
- else:
- spinner = NonInteractiveSpinner(message)
- try:
- with hidden_cursor(sys.stdout):
- yield spinner
- except KeyboardInterrupt:
- spinner.finish("canceled")
- raise
- except Exception:
- spinner.finish("error")
- raise
- else:
- spinner.finish("done")