summaryrefslogtreecommitdiff
path: root/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req
diff options
context:
space:
mode:
Diffstat (limited to 'venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req')
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py77
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pycbin1711 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pycbin7631 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pycbin9209 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pycbin25046 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pycbin6046 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pycbin3160 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pycbin17003 -> 0 bytes
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py339
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py382
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py1021
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py197
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py88
-rw-r--r--venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py596
14 files changed, 0 insertions, 2700 deletions
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py
deleted file mode 100644
index 5e4eb92..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py
+++ /dev/null
@@ -1,77 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-
-from .req_install import InstallRequirement
-from .req_set import RequirementSet
-from .req_file import parse_requirements
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import List, Sequence # noqa: F401
-
-__all__ = [
- "RequirementSet", "InstallRequirement",
- "parse_requirements", "install_given_reqs",
-]
-
-logger = logging.getLogger(__name__)
-
-
-def install_given_reqs(
- to_install, # type: List[InstallRequirement]
- install_options, # type: List[str]
- global_options=(), # type: Sequence[str]
- *args, **kwargs
-):
- # type: (...) -> List[InstallRequirement]
- """
- Install everything in the given list.
-
- (to be called after having downloaded and unpacked the packages)
- """
-
- if to_install:
- logger.info(
- 'Installing collected packages: %s',
- ', '.join([req.name for req in to_install]),
- )
-
- with indent_log():
- for requirement in to_install:
- if requirement.conflicts_with:
- logger.info(
- 'Found existing installation: %s',
- requirement.conflicts_with,
- )
- with indent_log():
- uninstalled_pathset = requirement.uninstall(
- auto_confirm=True
- )
- try:
- requirement.install(
- install_options,
- global_options,
- *args,
- **kwargs
- )
- except Exception:
- should_rollback = (
- requirement.conflicts_with and
- not requirement.install_succeeded
- )
- # if install did not succeed, rollback previous uninstall
- if should_rollback:
- uninstalled_pathset.rollback()
- raise
- else:
- should_commit = (
- requirement.conflicts_with and
- requirement.install_succeeded
- )
- if should_commit:
- uninstalled_pathset.commit()
- requirement.remove_temporary_source()
-
- return to_install
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
deleted file mode 100644
index 3d591d3..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/__init__.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
deleted file mode 100644
index b60d743..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/constructors.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
deleted file mode 100644
index d5eb5fc..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_file.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
deleted file mode 100644
index ef28d33..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_install.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
deleted file mode 100644
index c6e6d28..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_set.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
deleted file mode 100644
index f054d9f..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
deleted file mode 100644
index 9aca886..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py
deleted file mode 100644
index 1eed1dd..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/constructors.py
+++ /dev/null
@@ -1,339 +0,0 @@
-"""Backing implementation for InstallRequirement's various constructors
-
-The idea here is that these formed a major chunk of InstallRequirement's size
-so, moving them and support code dedicated to them outside of that class
-helps creates for better understandability for the rest of the code.
-
-These are meant to be used elsewhere within pip to create instances of
-InstallRequirement.
-"""
-
-import logging
-import os
-import re
-
-from pip._vendor.packaging.markers import Marker
-from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
-from pip._vendor.packaging.specifiers import Specifier
-from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
-
-from pip._internal.download import (
- is_archive_file, is_url, path_to_url, url_to_path,
-)
-from pip._internal.exceptions import InstallationError
-from pip._internal.models.index import PyPI, TestPyPI
-from pip._internal.models.link import Link
-from pip._internal.pyproject import make_pyproject_path
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.misc import is_installable_dir
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.vcs import vcs
-from pip._internal.wheel import Wheel
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Optional, Tuple, Set, Any, Union, Text, Dict,
- )
- from pip._internal.cache import WheelCache # noqa: F401
-
-
-__all__ = [
- "install_req_from_editable", "install_req_from_line",
- "parse_editable"
-]
-
-logger = logging.getLogger(__name__)
-operators = Specifier._operators.keys()
-
-
-def _strip_extras(path):
- # type: (str) -> Tuple[str, Optional[str]]
- m = re.match(r'^(.+)(\[[^\]]+\])$', path)
- extras = None
- if m:
- path_no_extras = m.group(1)
- extras = m.group(2)
- else:
- path_no_extras = path
-
- return path_no_extras, extras
-
-
-def parse_editable(editable_req):
- # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
- """Parses an editable requirement into:
- - a requirement name
- - an URL
- - extras
- - editable options
- Accepted requirements:
- svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
- .[some_extra]
- """
-
- url = editable_req
-
- # If a file path is specified with extras, strip off the extras.
- url_no_extras, extras = _strip_extras(url)
-
- if os.path.isdir(url_no_extras):
- if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
- msg = (
- 'File "setup.py" not found. Directory cannot be installed '
- 'in editable mode: {}'.format(os.path.abspath(url_no_extras))
- )
- pyproject_path = make_pyproject_path(url_no_extras)
- if os.path.isfile(pyproject_path):
- msg += (
- '\n(A "pyproject.toml" file was found, but editable '
- 'mode currently requires a setup.py based build.)'
- )
- raise InstallationError(msg)
-
- # Treating it as code that has already been checked out
- url_no_extras = path_to_url(url_no_extras)
-
- if url_no_extras.lower().startswith('file:'):
- package_name = Link(url_no_extras).egg_fragment
- if extras:
- return (
- package_name,
- url_no_extras,
- Requirement("placeholder" + extras.lower()).extras,
- )
- else:
- return package_name, url_no_extras, None
-
- for version_control in vcs:
- if url.lower().startswith('%s:' % version_control):
- url = '%s+%s' % (version_control, url)
- break
-
- if '+' not in url:
- raise InstallationError(
- '%s should either be a path to a local project or a VCS url '
- 'beginning with svn+, git+, hg+, or bzr+' %
- editable_req
- )
-
- vc_type = url.split('+', 1)[0].lower()
-
- if not vcs.get_backend(vc_type):
- error_message = 'For --editable=%s only ' % editable_req + \
- ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
- ' is currently supported'
- raise InstallationError(error_message)
-
- package_name = Link(url).egg_fragment
- if not package_name:
- raise InstallationError(
- "Could not detect requirement name for '%s', please specify one "
- "with #egg=your_package_name" % editable_req
- )
- return package_name, url, None
-
-
-def deduce_helpful_msg(req):
- # type: (str) -> str
- """Returns helpful msg in case requirements file does not exist,
- or cannot be parsed.
-
- :params req: Requirements file path
- """
- msg = ""
- if os.path.exists(req):
- msg = " It does exist."
- # Try to parse and check if it is a requirements file.
- try:
- with open(req, 'r') as fp:
- # parse first line only
- next(parse_requirements(fp.read()))
- msg += " The argument you provided " + \
- "(%s) appears to be a" % (req) + \
- " requirements file. If that is the" + \
- " case, use the '-r' flag to install" + \
- " the packages specified within it."
- except RequirementParseError:
- logger.debug("Cannot parse '%s' as requirements \
- file" % (req), exc_info=True)
- else:
- msg += " File '%s' does not exist." % (req)
- return msg
-
-
-# ---- The actual constructors follow ----
-
-
-def install_req_from_editable(
- editable_req, # type: str
- comes_from=None, # type: Optional[str]
- use_pep517=None, # type: Optional[bool]
- isolated=False, # type: bool
- options=None, # type: Optional[Dict[str, Any]]
- wheel_cache=None, # type: Optional[WheelCache]
- constraint=False # type: bool
-):
- # type: (...) -> InstallRequirement
- name, url, extras_override = parse_editable(editable_req)
- if url.startswith('file:'):
- source_dir = url_to_path(url)
- else:
- source_dir = None
-
- if name is not None:
- try:
- req = Requirement(name)
- except InvalidRequirement:
- raise InstallationError("Invalid requirement: '%s'" % name)
- else:
- req = None
- return InstallRequirement(
- req, comes_from, source_dir=source_dir,
- editable=True,
- link=Link(url),
- constraint=constraint,
- use_pep517=use_pep517,
- isolated=isolated,
- options=options if options else {},
- wheel_cache=wheel_cache,
- extras=extras_override or (),
- )
-
-
-def install_req_from_line(
- name, # type: str
- comes_from=None, # type: Optional[Union[str, InstallRequirement]]
- use_pep517=None, # type: Optional[bool]
- isolated=False, # type: bool
- options=None, # type: Optional[Dict[str, Any]]
- wheel_cache=None, # type: Optional[WheelCache]
- constraint=False # type: bool
-):
- # type: (...) -> InstallRequirement
- """Creates an InstallRequirement from a name, which might be a
- requirement, directory containing 'setup.py', filename, or URL.
- """
- if is_url(name):
- marker_sep = '; '
- else:
- marker_sep = ';'
- if marker_sep in name:
- name, markers_as_string = name.split(marker_sep, 1)
- markers_as_string = markers_as_string.strip()
- if not markers_as_string:
- markers = None
- else:
- markers = Marker(markers_as_string)
- else:
- markers = None
- name = name.strip()
- req_as_string = None
- path = os.path.normpath(os.path.abspath(name))
- link = None
- extras_as_string = None
-
- if is_url(name):
- link = Link(name)
- else:
- p, extras_as_string = _strip_extras(path)
- looks_like_dir = os.path.isdir(p) and (
- os.path.sep in name or
- (os.path.altsep is not None and os.path.altsep in name) or
- name.startswith('.')
- )
- if looks_like_dir:
- if not is_installable_dir(p):
- raise InstallationError(
- "Directory %r is not installable. Neither 'setup.py' "
- "nor 'pyproject.toml' found." % name
- )
- link = Link(path_to_url(p))
- elif is_archive_file(p):
- if not os.path.isfile(p):
- logger.warning(
- 'Requirement %r looks like a filename, but the '
- 'file does not exist',
- name
- )
- link = Link(path_to_url(p))
-
- # it's a local file, dir, or url
- if link:
- # Handle relative file URLs
- if link.scheme == 'file' and re.search(r'\.\./', link.url):
- link = Link(
- path_to_url(os.path.normpath(os.path.abspath(link.path))))
- # wheel file
- if link.is_wheel:
- wheel = Wheel(link.filename) # can raise InvalidWheelFilename
- req_as_string = "%s==%s" % (wheel.name, wheel.version)
- else:
- # set the req to the egg fragment. when it's not there, this
- # will become an 'unnamed' requirement
- req_as_string = link.egg_fragment
-
- # a requirement specifier
- else:
- req_as_string = name
-
- if extras_as_string:
- extras = Requirement("placeholder" + extras_as_string.lower()).extras
- else:
- extras = ()
- if req_as_string is not None:
- try:
- req = Requirement(req_as_string)
- except InvalidRequirement:
- if os.path.sep in req_as_string:
- add_msg = "It looks like a path."
- add_msg += deduce_helpful_msg(req_as_string)
- elif ('=' in req_as_string and
- not any(op in req_as_string for op in operators)):
- add_msg = "= is not a valid operator. Did you mean == ?"
- else:
- add_msg = ""
- raise InstallationError(
- "Invalid requirement: '%s'\n%s" % (req_as_string, add_msg)
- )
- else:
- req = None
-
- return InstallRequirement(
- req, comes_from, link=link, markers=markers,
- use_pep517=use_pep517, isolated=isolated,
- options=options if options else {},
- wheel_cache=wheel_cache,
- constraint=constraint,
- extras=extras,
- )
-
-
-def install_req_from_req_string(
- req_string, # type: str
- comes_from=None, # type: Optional[InstallRequirement]
- isolated=False, # type: bool
- wheel_cache=None, # type: Optional[WheelCache]
- use_pep517=None # type: Optional[bool]
-):
- # type: (...) -> InstallRequirement
- try:
- req = Requirement(req_string)
- except InvalidRequirement:
- raise InstallationError("Invalid requirement: '%s'" % req)
-
- domains_not_allowed = [
- PyPI.file_storage_domain,
- TestPyPI.file_storage_domain,
- ]
- if req.url and comes_from.link.netloc in domains_not_allowed:
- # Explicitly disallow pypi packages that depend on external urls
- raise InstallationError(
- "Packages installed from PyPI cannot depend on packages "
- "which are not also hosted on PyPI.\n"
- "%s depends on %s " % (comes_from.name, req)
- )
-
- return InstallRequirement(
- req, comes_from, isolated=isolated, wheel_cache=wheel_cache,
- use_pep517=use_pep517
- )
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py
deleted file mode 100644
index 726f2f6..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_file.py
+++ /dev/null
@@ -1,382 +0,0 @@
-"""
-Requirements file parsing
-"""
-
-from __future__ import absolute_import
-
-import optparse
-import os
-import re
-import shlex
-import sys
-
-from pip._vendor.six.moves import filterfalse
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.cli import cmdoptions
-from pip._internal.download import get_file_content
-from pip._internal.exceptions import RequirementsFileParseError
-from pip._internal.req.constructors import (
- install_req_from_editable, install_req_from_line,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Iterator, Tuple, Optional, List, Callable, Text
- )
- from pip._internal.req import InstallRequirement # noqa: F401
- from pip._internal.cache import WheelCache # noqa: F401
- from pip._internal.index import PackageFinder # noqa: F401
- from pip._internal.download import PipSession # noqa: F401
-
- ReqFileLines = Iterator[Tuple[int, Text]]
-
-__all__ = ['parse_requirements']
-
-SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
-COMMENT_RE = re.compile(r'(^|\s)+#.*$')
-
-# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
-# variable name consisting of only uppercase letters, digits or the '_'
-# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
-# 2013 Edition.
-ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
-
-SUPPORTED_OPTIONS = [
- cmdoptions.constraints,
- cmdoptions.editable,
- cmdoptions.requirements,
- cmdoptions.no_index,
- cmdoptions.index_url,
- cmdoptions.find_links,
- cmdoptions.extra_index_url,
- cmdoptions.always_unzip,
- cmdoptions.no_binary,
- cmdoptions.only_binary,
- cmdoptions.pre,
- cmdoptions.trusted_host,
- cmdoptions.require_hashes,
-] # type: List[Callable[..., optparse.Option]]
-
-# options to be passed to requirements
-SUPPORTED_OPTIONS_REQ = [
- cmdoptions.install_options,
- cmdoptions.global_options,
- cmdoptions.hash,
-] # type: List[Callable[..., optparse.Option]]
-
-# the 'dest' string values
-SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
-
-
-def parse_requirements(
- filename, # type: str
- finder=None, # type: Optional[PackageFinder]
- comes_from=None, # type: Optional[str]
- options=None, # type: Optional[optparse.Values]
- session=None, # type: Optional[PipSession]
- constraint=False, # type: bool
- wheel_cache=None, # type: Optional[WheelCache]
- use_pep517=None # type: Optional[bool]
-):
- # type: (...) -> Iterator[InstallRequirement]
- """Parse a requirements file and yield InstallRequirement instances.
-
- :param filename: Path or url of requirements file.
- :param finder: Instance of pip.index.PackageFinder.
- :param comes_from: Origin description of requirements.
- :param options: cli options.
- :param session: Instance of pip.download.PipSession.
- :param constraint: If true, parsing a constraint file rather than
- requirements file.
- :param wheel_cache: Instance of pip.wheel.WheelCache
- :param use_pep517: Value of the --use-pep517 option.
- """
- if session is None:
- raise TypeError(
- "parse_requirements() missing 1 required keyword argument: "
- "'session'"
- )
-
- _, content = get_file_content(
- filename, comes_from=comes_from, session=session
- )
-
- lines_enum = preprocess(content, options)
-
- for line_number, line in lines_enum:
- req_iter = process_line(line, filename, line_number, finder,
- comes_from, options, session, wheel_cache,
- use_pep517=use_pep517, constraint=constraint)
- for req in req_iter:
- yield req
-
-
-def preprocess(content, options):
- # type: (Text, Optional[optparse.Values]) -> ReqFileLines
- """Split, filter, and join lines, and return a line iterator
-
- :param content: the content of the requirements file
- :param options: cli options
- """
- lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
- lines_enum = join_lines(lines_enum)
- lines_enum = ignore_comments(lines_enum)
- lines_enum = skip_regex(lines_enum, options)
- lines_enum = expand_env_variables(lines_enum)
- return lines_enum
-
-
-def process_line(
- line, # type: Text
- filename, # type: str
- line_number, # type: int
- finder=None, # type: Optional[PackageFinder]
- comes_from=None, # type: Optional[str]
- options=None, # type: Optional[optparse.Values]
- session=None, # type: Optional[PipSession]
- wheel_cache=None, # type: Optional[WheelCache]
- use_pep517=None, # type: Optional[bool]
- constraint=False # type: bool
-):
- # type: (...) -> Iterator[InstallRequirement]
- """Process a single requirements line; This can result in creating/yielding
- requirements, or updating the finder.
-
- For lines that contain requirements, the only options that have an effect
- are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
- requirement. Other options from SUPPORTED_OPTIONS may be present, but are
- ignored.
-
- For lines that do not contain requirements, the only options that have an
- effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
- be present, but are ignored. These lines may contain multiple options
- (although our docs imply only one is supported), and all our parsed and
- affect the finder.
-
- :param constraint: If True, parsing a constraints file.
- :param options: OptionParser options that we may update
- """
- parser = build_parser(line)
- defaults = parser.get_default_values()
- defaults.index_url = None
- if finder:
- defaults.format_control = finder.format_control
- args_str, options_str = break_args_options(line)
- # Prior to 2.7.3, shlex cannot deal with unicode entries
- if sys.version_info < (2, 7, 3):
- # https://github.com/python/mypy/issues/1174
- options_str = options_str.encode('utf8') # type: ignore
- # https://github.com/python/mypy/issues/1174
- opts, _ = parser.parse_args(
- shlex.split(options_str), defaults) # type: ignore
-
- # preserve for the nested code path
- line_comes_from = '%s %s (line %s)' % (
- '-c' if constraint else '-r', filename, line_number,
- )
-
- # yield a line requirement
- if args_str:
- isolated = options.isolated_mode if options else False
- if options:
- cmdoptions.check_install_build_global(options, opts)
- # get the options that apply to requirements
- req_options = {}
- for dest in SUPPORTED_OPTIONS_REQ_DEST:
- if dest in opts.__dict__ and opts.__dict__[dest]:
- req_options[dest] = opts.__dict__[dest]
- yield install_req_from_line(
- args_str, line_comes_from, constraint=constraint,
- use_pep517=use_pep517,
- isolated=isolated, options=req_options, wheel_cache=wheel_cache
- )
-
- # yield an editable requirement
- elif opts.editables:
- isolated = options.isolated_mode if options else False
- yield install_req_from_editable(
- opts.editables[0], comes_from=line_comes_from,
- use_pep517=use_pep517,
- constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
- )
-
- # parse a nested requirements file
- elif opts.requirements or opts.constraints:
- if opts.requirements:
- req_path = opts.requirements[0]
- nested_constraint = False
- else:
- req_path = opts.constraints[0]
- nested_constraint = True
- # original file is over http
- if SCHEME_RE.search(filename):
- # do a url join so relative paths work
- req_path = urllib_parse.urljoin(filename, req_path)
- # original file and nested file are paths
- elif not SCHEME_RE.search(req_path):
- # do a join so relative paths work
- req_path = os.path.join(os.path.dirname(filename), req_path)
- # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
- parsed_reqs = parse_requirements(
- req_path, finder, comes_from, options, session,
- constraint=nested_constraint, wheel_cache=wheel_cache
- )
- for req in parsed_reqs:
- yield req
-
- # percolate hash-checking option upward
- elif opts.require_hashes:
- options.require_hashes = opts.require_hashes
-
- # set finder options
- elif finder:
- if opts.index_url:
- finder.index_urls = [opts.index_url]
- if opts.no_index is True:
- finder.index_urls = []
- if opts.extra_index_urls:
- finder.index_urls.extend(opts.extra_index_urls)
- if opts.find_links:
- # FIXME: it would be nice to keep track of the source
- # of the find_links: support a find-links local path
- # relative to a requirements file.
- value = opts.find_links[0]
- req_dir = os.path.dirname(os.path.abspath(filename))
- relative_to_reqs_file = os.path.join(req_dir, value)
- if os.path.exists(relative_to_reqs_file):
- value = relative_to_reqs_file
- finder.find_links.append(value)
- if opts.pre:
- finder.allow_all_prereleases = True
- if opts.trusted_hosts:
- finder.secure_origins.extend(
- ("*", host, "*") for host in opts.trusted_hosts)
-
-
-def break_args_options(line):
- # type: (Text) -> Tuple[str, Text]
- """Break up the line into an args and options string. We only want to shlex
- (and then optparse) the options, not the args. args can contain markers
- which are corrupted by shlex.
- """
- tokens = line.split(' ')
- args = []
- options = tokens[:]
- for token in tokens:
- if token.startswith('-') or token.startswith('--'):
- break
- else:
- args.append(token)
- options.pop(0)
- return ' '.join(args), ' '.join(options) # type: ignore
-
-
-def build_parser(line):
- # type: (Text) -> optparse.OptionParser
- """
- Return a parser for parsing requirement lines
- """
- parser = optparse.OptionParser(add_help_option=False)
-
- option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
- for option_factory in option_factories:
- option = option_factory()
- parser.add_option(option)
-
- # By default optparse sys.exits on parsing errors. We want to wrap
- # that in our own exception.
- def parser_exit(self, msg):
- # add offending line
- msg = 'Invalid requirement: %s\n%s' % (line, msg)
- raise RequirementsFileParseError(msg)
- # NOTE: mypy disallows assigning to a method
- # https://github.com/python/mypy/issues/2427
- parser.exit = parser_exit # type: ignore
-
- return parser
-
-
-def join_lines(lines_enum):
- # type: (ReqFileLines) -> ReqFileLines
- """Joins a line ending in '\' with the previous line (except when following
- comments). The joined line takes on the index of the first line.
- """
- primary_line_number = None
- new_line = [] # type: List[Text]
- for line_number, line in lines_enum:
- if not line.endswith('\\') or COMMENT_RE.match(line):
- if COMMENT_RE.match(line):
- # this ensures comments are always matched later
- line = ' ' + line
- if new_line:
- new_line.append(line)
- yield primary_line_number, ''.join(new_line)
- new_line = []
- else:
- yield line_number, line
- else:
- if not new_line:
- primary_line_number = line_number
- new_line.append(line.strip('\\'))
-
- # last line contains \
- if new_line:
- yield primary_line_number, ''.join(new_line)
-
- # TODO: handle space after '\'.
-
-
-def ignore_comments(lines_enum):
- # type: (ReqFileLines) -> ReqFileLines
- """
- Strips comments and filter empty lines.
- """
- for line_number, line in lines_enum:
- line = COMMENT_RE.sub('', line)
- line = line.strip()
- if line:
- yield line_number, line
-
-
-def skip_regex(lines_enum, options):
- # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines
- """
- Skip lines that match '--skip-requirements-regex' pattern
-
- Note: the regex pattern is only built once
- """
- skip_regex = options.skip_requirements_regex if options else None
- if skip_regex:
- pattern = re.compile(skip_regex)
- lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
- return lines_enum
-
-
-def expand_env_variables(lines_enum):
- # type: (ReqFileLines) -> ReqFileLines
- """Replace all environment variables that can be retrieved via `os.getenv`.
-
- The only allowed format for environment variables defined in the
- requirement file is `${MY_VARIABLE_1}` to ensure two things:
-
- 1. Strings that contain a `$` aren't accidentally (partially) expanded.
- 2. Ensure consistency across platforms for requirement files.
-
- These points are the result of a discusssion on the `github pull
- request #3514 <https://github.com/pypa/pip/pull/3514>`_.
-
- Valid characters in variable names follow the `POSIX standard
- <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
- to uppercase letter, digits and the `_` (underscore).
- """
- for line_number, line in lines_enum:
- for env_var, var_name in ENV_VAR_RE.findall(line):
- value = os.getenv(var_name)
- if not value:
- continue
-
- line = line.replace(env_var, value)
-
- yield line_number, line
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py
deleted file mode 100644
index a4834b0..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_install.py
+++ /dev/null
@@ -1,1021 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-import os
-import shutil
-import sys
-import sysconfig
-import zipfile
-from distutils.util import change_root
-
-from pip._vendor import pkg_resources, six
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import Version
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.pep517.wrappers import Pep517HookCaller
-
-from pip._internal import wheel
-from pip._internal.build_env import NoOpBuildEnvironment
-from pip._internal.exceptions import InstallationError
-from pip._internal.locations import (
- PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
-)
-from pip._internal.models.link import Link
-from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
-from pip._internal.req.req_uninstall import UninstallPathSet
-from pip._internal.utils.compat import native_str
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- _make_build_dir, ask_path_exists, backup_dir, call_subprocess,
- display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
- get_installed_version, redact_password_from_url, rmtree,
-)
-from pip._internal.utils.packaging import get_metadata
-from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import open_spinner
-from pip._internal.vcs import vcs
-from pip._internal.wheel import move_wheel_files
-
-if MYPY_CHECK_RUNNING:
- from typing import ( # noqa: F401
- Optional, Iterable, List, Union, Any, Text, Sequence, Dict
- )
- from pip._internal.build_env import BuildEnvironment # noqa: F401
- from pip._internal.cache import WheelCache # noqa: F401
- from pip._internal.index import PackageFinder # noqa: F401
- from pip._vendor.pkg_resources import Distribution # noqa: F401
- from pip._vendor.packaging.specifiers import SpecifierSet # noqa: F401
- from pip._vendor.packaging.markers import Marker # noqa: F401
-
-
-logger = logging.getLogger(__name__)
-
-
-class InstallRequirement(object):
- """
- Represents something that may be installed later on, may have information
- about where to fetch the relavant requirement and also contains logic for
- installing the said requirement.
- """
-
- def __init__(
- self,
- req, # type: Optional[Requirement]
- comes_from, # type: Optional[Union[str, InstallRequirement]]
- source_dir=None, # type: Optional[str]
- editable=False, # type: bool
- link=None, # type: Optional[Link]
- update=True, # type: bool
- markers=None, # type: Optional[Marker]
- use_pep517=None, # type: Optional[bool]
- isolated=False, # type: bool
- options=None, # type: Optional[Dict[str, Any]]
- wheel_cache=None, # type: Optional[WheelCache]
- constraint=False, # type: bool
- extras=() # type: Iterable[str]
- ):
- # type: (...) -> None
- assert req is None or isinstance(req, Requirement), req
- self.req = req
- self.comes_from = comes_from
- self.constraint = constraint
- if source_dir is not None:
- self.source_dir = os.path.normpath(os.path.abspath(source_dir))
- else:
- self.source_dir = None
- self.editable = editable
-
- self._wheel_cache = wheel_cache
- if link is None and req and req.url:
- # PEP 508 URL requirement
- link = Link(req.url)
- self.link = self.original_link = link
-
- if extras:
- self.extras = extras
- elif req:
- self.extras = {
- pkg_resources.safe_extra(extra) for extra in req.extras
- }
- else:
- self.extras = set()
- if markers is None and req:
- markers = req.marker
- self.markers = markers
-
- self._egg_info_path = None # type: Optional[str]
- # This holds the pkg_resources.Distribution object if this requirement
- # is already available:
- self.satisfied_by = None
- # This hold the pkg_resources.Distribution object if this requirement
- # conflicts with another installed distribution:
- self.conflicts_with = None
- # Temporary build location
- self._temp_build_dir = TempDirectory(kind="req-build")
- # Used to store the global directory where the _temp_build_dir should
- # have been created. Cf _correct_build_location method.
- self._ideal_build_dir = None # type: Optional[str]
- # True if the editable should be updated:
- self.update = update
- # Set to True after successful installation
- self.install_succeeded = None # type: Optional[bool]
- # UninstallPathSet of uninstalled distribution (for possible rollback)
- self.uninstalled_pathset = None
- self.options = options if options else {}
- # Set to True after successful preparation of this requirement
- self.prepared = False
- self.is_direct = False
-
- self.isolated = isolated
- self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
-
- # For PEP 517, the directory where we request the project metadata
- # gets stored. We need this to pass to build_wheel, so the backend
- # can ensure that the wheel matches the metadata (see the PEP for
- # details).
- self.metadata_directory = None # type: Optional[str]
-
- # The static build requirements (from pyproject.toml)
- self.pyproject_requires = None # type: Optional[List[str]]
-
- # Build requirements that we will check are available
- self.requirements_to_check = [] # type: List[str]
-
- # The PEP 517 backend we should use to build the project
- self.pep517_backend = None # type: Optional[Pep517HookCaller]
-
- # Are we using PEP 517 for this requirement?
- # After pyproject.toml has been loaded, the only valid values are True
- # and False. Before loading, None is valid (meaning "use the default").
- # Setting an explicit value before loading pyproject.toml is supported,
- # but after loading this flag should be treated as read only.
- self.use_pep517 = use_pep517
-
- def __str__(self):
- if self.req:
- s = str(self.req)
- if self.link:
- s += ' from %s' % redact_password_from_url(self.link.url)
- elif self.link:
- s = redact_password_from_url(self.link.url)
- else:
- s = '<InstallRequirement>'
- if self.satisfied_by is not None:
- s += ' in %s' % display_path(self.satisfied_by.location)
- if self.comes_from:
- if isinstance(self.comes_from, six.string_types):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += ' (from %s)' % comes_from
- return s
-
- def __repr__(self):
- return '<%s object: %s editable=%r>' % (
- self.__class__.__name__, str(self), self.editable)
-
- def populate_link(self, finder, upgrade, require_hashes):
- # type: (PackageFinder, bool, bool) -> None
- """Ensure that if a link can be found for this, that it is found.
-
- Note that self.link may still be None - if Upgrade is False and the
- requirement is already installed.
-
- If require_hashes is True, don't use the wheel cache, because cached
- wheels, always built locally, have different hashes than the files
- downloaded from the index server and thus throw false hash mismatches.
- Furthermore, cached wheels at present have undeterministic contents due
- to file modification times.
- """
- if self.link is None:
- self.link = finder.find_requirement(self, upgrade)
- if self._wheel_cache is not None and not require_hashes:
- old_link = self.link
- self.link = self._wheel_cache.get(self.link, self.name)
- if old_link != self.link:
- logger.debug('Using cached wheel link: %s', self.link)
-
- # Things that are valid for all kinds of requirements?
- @property
- def name(self):
- # type: () -> Optional[str]
- if self.req is None:
- return None
- return native_str(pkg_resources.safe_name(self.req.name))
-
- @property
- def specifier(self):
- # type: () -> SpecifierSet
- return self.req.specifier
-
- @property
- def is_pinned(self):
- # type: () -> bool
- """Return whether I am pinned to an exact version.
-
- For example, some-package==1.2 is pinned; some-package>1.2 is not.
- """
- specifiers = self.specifier
- return (len(specifiers) == 1 and
- next(iter(specifiers)).operator in {'==', '==='})
-
- @property
- def installed_version(self):
- return get_installed_version(self.name)
-
- def match_markers(self, extras_requested=None):
- # type: (Optional[Iterable[str]]) -> bool
- if not extras_requested:
- # Provide an extra to safely evaluate the markers
- # without matching any extra
- extras_requested = ('',)
- if self.markers is not None:
- return any(
- self.markers.evaluate({'extra': extra})
- for extra in extras_requested)
- else:
- return True
-
- @property
- def has_hash_options(self):
- # type: () -> bool
- """Return whether any known-good hashes are specified as options.
-
- These activate --require-hashes mode; hashes specified as part of a
- URL do not.
-
- """
- return bool(self.options.get('hashes', {}))
-
- def hashes(self, trust_internet=True):
- # type: (bool) -> Hashes
- """Return a hash-comparer that considers my option- and URL-based
- hashes to be known-good.
-
- Hashes in URLs--ones embedded in the requirements file, not ones
- downloaded from an index server--are almost peers with ones from
- flags. They satisfy --require-hashes (whether it was implicitly or
- explicitly activated) but do not activate it. md5 and sha224 are not
- allowed in flags, which should nudge people toward good algos. We
- always OR all hashes together, even ones from URLs.
-
- :param trust_internet: Whether to trust URL-based (#md5=...) hashes
- downloaded from the internet, as by populate_link()
-
- """
- good_hashes = self.options.get('hashes', {}).copy()
- link = self.link if trust_internet else self.original_link
- if link and link.hash:
- good_hashes.setdefault(link.hash_name, []).append(link.hash)
- return Hashes(good_hashes)
-
- def from_path(self):
- # type: () -> Optional[str]
- """Format a nice indicator to show where this "comes from"
- """
- if self.req is None:
- return None
- s = str(self.req)
- if self.comes_from:
- if isinstance(self.comes_from, six.string_types):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += '->' + comes_from
- return s
-
- def build_location(self, build_dir):
- # type: (str) -> Optional[str]
- assert build_dir is not None
- if self._temp_build_dir.path is not None:
- return self._temp_build_dir.path
- if self.req is None:
- # for requirement via a path to a directory: the name of the
- # package is not available yet so we create a temp directory
- # Once run_egg_info will have run, we'll be able
- # to fix it via _correct_build_location
- # Some systems have /tmp as a symlink which confuses custom
- # builds (such as numpy). Thus, we ensure that the real path
- # is returned.
- self._temp_build_dir.create()
- self._ideal_build_dir = build_dir
-
- return self._temp_build_dir.path
- if self.editable:
- name = self.name.lower()
- else:
- name = self.name
- # FIXME: Is there a better place to create the build_dir? (hg and bzr
- # need this)
- if not os.path.exists(build_dir):
- logger.debug('Creating directory %s', build_dir)
- _make_build_dir(build_dir)
- return os.path.join(build_dir, name)
-
- def _correct_build_location(self):
- # type: () -> None
- """Move self._temp_build_dir to self._ideal_build_dir/self.req.name
-
- For some requirements (e.g. a path to a directory), the name of the
- package is not available until we run egg_info, so the build_location
- will return a temporary directory and store the _ideal_build_dir.
-
- This is only called by self.run_egg_info to fix the temporary build
- directory.
- """
- if self.source_dir is not None:
- return
- assert self.req is not None
- assert self._temp_build_dir.path
- assert (self._ideal_build_dir is not None and
- self._ideal_build_dir.path) # type: ignore
- old_location = self._temp_build_dir.path
- self._temp_build_dir.path = None
-
- new_location = self.build_location(self._ideal_build_dir)
- if os.path.exists(new_location):
- raise InstallationError(
- 'A package already exists in %s; please remove it to continue'
- % display_path(new_location))
- logger.debug(
- 'Moving package %s from %s to new location %s',
- self, display_path(old_location), display_path(new_location),
- )
- shutil.move(old_location, new_location)
- self._temp_build_dir.path = new_location
- self._ideal_build_dir = None
- self.source_dir = os.path.normpath(os.path.abspath(new_location))
- self._egg_info_path = None
-
- # Correct the metadata directory, if it exists
- if self.metadata_directory:
- old_meta = self.metadata_directory
- rel = os.path.relpath(old_meta, start=old_location)
- new_meta = os.path.join(new_location, rel)
- new_meta = os.path.normpath(os.path.abspath(new_meta))
- self.metadata_directory = new_meta
-
- def remove_temporary_source(self):
- # type: () -> None
- """Remove the source files from this requirement, if they are marked
- for deletion"""
- if self.source_dir and os.path.exists(
- os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
- logger.debug('Removing source in %s', self.source_dir)
- rmtree(self.source_dir)
- self.source_dir = None
- self._temp_build_dir.cleanup()
- self.build_env.cleanup()
-
- def check_if_exists(self, use_user_site):
- # type: (bool) -> bool
- """Find an installed distribution that satisfies or conflicts
- with this requirement, and set self.satisfied_by or
- self.conflicts_with appropriately.
- """
- if self.req is None:
- return False
- try:
- # get_distribution() will resolve the entire list of requirements
- # anyway, and we've already determined that we need the requirement
- # in question, so strip the marker so that we don't try to
- # evaluate it.
- no_marker = Requirement(str(self.req))
- no_marker.marker = None
- self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
- if self.editable and self.satisfied_by:
- self.conflicts_with = self.satisfied_by
- # when installing editables, nothing pre-existing should ever
- # satisfy
- self.satisfied_by = None
- return True
- except pkg_resources.DistributionNotFound:
- return False
- except pkg_resources.VersionConflict:
- existing_dist = pkg_resources.get_distribution(
- self.req.name
- )
- if use_user_site:
- if dist_in_usersite(existing_dist):
- self.conflicts_with = existing_dist
- elif (running_under_virtualenv() and
- dist_in_site_packages(existing_dist)):
- raise InstallationError(
- "Will not install to the user site because it will "
- "lack sys.path precedence to %s in %s" %
- (existing_dist.project_name, existing_dist.location)
- )
- else:
- self.conflicts_with = existing_dist
- return True
-
- # Things valid for wheels
- @property
- def is_wheel(self):
- # type: () -> bool
- if not self.link:
- return False
- return self.link.is_wheel
-
- def move_wheel_files(
- self,
- wheeldir, # type: str
- root=None, # type: Optional[str]
- home=None, # type: Optional[str]
- prefix=None, # type: Optional[str]
- warn_script_location=True, # type: bool
- use_user_site=False, # type: bool
- pycompile=True # type: bool
- ):
- # type: (...) -> None
- move_wheel_files(
- self.name, self.req, wheeldir,
- user=use_user_site,
- home=home,
- root=root,
- prefix=prefix,
- pycompile=pycompile,
- isolated=self.isolated,
- warn_script_location=warn_script_location,
- )
-
- # Things valid for sdists
- @property
- def setup_py_dir(self):
- # type: () -> str
- return os.path.join(
- self.source_dir,
- self.link and self.link.subdirectory_fragment or '')
-
- @property
- def setup_py(self):
- # type: () -> str
- assert self.source_dir, "No source dir for %s" % self
-
- setup_py = os.path.join(self.setup_py_dir, 'setup.py')
-
- # Python2 __file__ should not be unicode
- if six.PY2 and isinstance(setup_py, six.text_type):
- setup_py = setup_py.encode(sys.getfilesystemencoding())
-
- return setup_py
-
- @property
- def pyproject_toml(self):
- # type: () -> str
- assert self.source_dir, "No source dir for %s" % self
-
- return make_pyproject_path(self.setup_py_dir)
-
- def load_pyproject_toml(self):
- # type: () -> None
- """Load the pyproject.toml file.
-
- After calling this routine, all of the attributes related to PEP 517
- processing for this requirement have been set. In particular, the
- use_pep517 attribute can be used to determine whether we should
- follow the PEP 517 or legacy (setup.py) code path.
- """
- pep517_data = load_pyproject_toml(
- self.use_pep517,
- self.pyproject_toml,
- self.setup_py,
- str(self)
- )
-
- if pep517_data is None:
- self.use_pep517 = False
- else:
- self.use_pep517 = True
- requires, backend, check = pep517_data
- self.requirements_to_check = check
- self.pyproject_requires = requires
- self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend)
-
- # Use a custom function to call subprocesses
- self.spin_message = ""
-
- def runner(cmd, cwd=None, extra_environ=None):
- with open_spinner(self.spin_message) as spinner:
- call_subprocess(
- cmd,
- cwd=cwd,
- extra_environ=extra_environ,
- show_stdout=False,
- spinner=spinner
- )
- self.spin_message = ""
-
- self.pep517_backend._subprocess_runner = runner
-
- def prepare_metadata(self):
- # type: () -> None
- """Ensure that project metadata is available.
-
- Under PEP 517, call the backend hook to prepare the metadata.
- Under legacy processing, call setup.py egg-info.
- """
- assert self.source_dir
-
- with indent_log():
- if self.use_pep517:
- self.prepare_pep517_metadata()
- else:
- self.run_egg_info()
-
- if not self.req:
- if isinstance(parse_version(self.metadata["Version"]), Version):
- op = "=="
- else:
- op = "==="
- self.req = Requirement(
- "".join([
- self.metadata["Name"],
- op,
- self.metadata["Version"],
- ])
- )
- self._correct_build_location()
- else:
- metadata_name = canonicalize_name(self.metadata["Name"])
- if canonicalize_name(self.req.name) != metadata_name:
- logger.warning(
- 'Generating metadata for package %s '
- 'produced metadata for project name %s. Fix your '
- '#egg=%s fragments.',
- self.name, metadata_name, self.name
- )
- self.req = Requirement(metadata_name)
-
- def prepare_pep517_metadata(self):
- # type: () -> None
- assert self.pep517_backend is not None
-
- metadata_dir = os.path.join(
- self.setup_py_dir,
- 'pip-wheel-metadata'
- )
- ensure_dir(metadata_dir)
-
- with self.build_env:
- # Note that Pep517HookCaller implements a fallback for
- # prepare_metadata_for_build_wheel, so we don't have to
- # consider the possibility that this hook doesn't exist.
- backend = self.pep517_backend
- self.spin_message = "Preparing wheel metadata"
- distinfo_dir = backend.prepare_metadata_for_build_wheel(
- metadata_dir
- )
-
- self.metadata_directory = os.path.join(metadata_dir, distinfo_dir)
-
- def run_egg_info(self):
- # type: () -> None
- if self.name:
- logger.debug(
- 'Running setup.py (path:%s) egg_info for package %s',
- self.setup_py, self.name,
- )
- else:
- logger.debug(
- 'Running setup.py (path:%s) egg_info for package from %s',
- self.setup_py, self.link,
- )
- script = SETUPTOOLS_SHIM % self.setup_py
- base_cmd = [sys.executable, '-c', script]
- if self.isolated:
- base_cmd += ["--no-user-cfg"]
- egg_info_cmd = base_cmd + ['egg_info']
- # We can't put the .egg-info files at the root, because then the
- # source code will be mistaken for an installed egg, causing
- # problems
- if self.editable:
- egg_base_option = [] # type: List[str]
- else:
- egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
- ensure_dir(egg_info_dir)
- egg_base_option = ['--egg-base', 'pip-egg-info']
- with self.build_env:
- call_subprocess(
- egg_info_cmd + egg_base_option,
- cwd=self.setup_py_dir,
- show_stdout=False,
- command_desc='python setup.py egg_info')
-
- @property
- def egg_info_path(self):
- # type: () -> str
- if self._egg_info_path is None:
- if self.editable:
- base = self.source_dir
- else:
- base = os.path.join(self.setup_py_dir, 'pip-egg-info')
- filenames = os.listdir(base)
- if self.editable:
- filenames = []
- for root, dirs, files in os.walk(base):
- for dir in vcs.dirnames:
- if dir in dirs:
- dirs.remove(dir)
- # Iterate over a copy of ``dirs``, since mutating
- # a list while iterating over it can cause trouble.
- # (See https://github.com/pypa/pip/pull/462.)
- for dir in list(dirs):
- # Don't search in anything that looks like a virtualenv
- # environment
- if (
- os.path.lexists(
- os.path.join(root, dir, 'bin', 'python')
- ) or
- os.path.exists(
- os.path.join(
- root, dir, 'Scripts', 'Python.exe'
- )
- )):
- dirs.remove(dir)
- # Also don't search through tests
- elif dir == 'test' or dir == 'tests':
- dirs.remove(dir)
- filenames.extend([os.path.join(root, dir)
- for dir in dirs])
- filenames = [f for f in filenames if f.endswith('.egg-info')]
-
- if not filenames:
- raise InstallationError(
- "Files/directories not found in %s" % base
- )
- # if we have more than one match, we pick the toplevel one. This
- # can easily be the case if there is a dist folder which contains
- # an extracted tarball for testing purposes.
- if len(filenames) > 1:
- filenames.sort(
- key=lambda x: x.count(os.path.sep) +
- (os.path.altsep and x.count(os.path.altsep) or 0)
- )
- self._egg_info_path = os.path.join(base, filenames[0])
- return self._egg_info_path
-
- @property
- def metadata(self):
- if not hasattr(self, '_metadata'):
- self._metadata = get_metadata(self.get_dist())
-
- return self._metadata
-
- def get_dist(self):
- # type: () -> Distribution
- """Return a pkg_resources.Distribution for this requirement"""
- if self.metadata_directory:
- base_dir, distinfo = os.path.split(self.metadata_directory)
- metadata = pkg_resources.PathMetadata(
- base_dir, self.metadata_directory
- )
- dist_name = os.path.splitext(distinfo)[0]
- typ = pkg_resources.DistInfoDistribution
- else:
- egg_info = self.egg_info_path.rstrip(os.path.sep)
- base_dir = os.path.dirname(egg_info)
- metadata = pkg_resources.PathMetadata(base_dir, egg_info)
- dist_name = os.path.splitext(os.path.basename(egg_info))[0]
- # https://github.com/python/mypy/issues/1174
- typ = pkg_resources.Distribution # type: ignore
-
- return typ(
- base_dir,
- project_name=dist_name,
- metadata=metadata,
- )
-
- def assert_source_matches_version(self):
- # type: () -> None
- assert self.source_dir
- version = self.metadata['version']
- if self.req.specifier and version not in self.req.specifier:
- logger.warning(
- 'Requested %s, but installing version %s',
- self,
- version,
- )
- else:
- logger.debug(
- 'Source in %s has version %s, which satisfies requirement %s',
- display_path(self.source_dir),
- version,
- self,
- )
-
- # For both source distributions and editables
- def ensure_has_source_dir(self, parent_dir):
- # type: (str) -> str
- """Ensure that a source_dir is set.
-
- This will create a temporary build dir if the name of the requirement
- isn't known yet.
-
- :param parent_dir: The ideal pip parent_dir for the source_dir.
- Generally src_dir for editables and build_dir for sdists.
- :return: self.source_dir
- """
- if self.source_dir is None:
- self.source_dir = self.build_location(parent_dir)
- return self.source_dir
-
- # For editable installations
- def install_editable(
- self,
- install_options, # type: List[str]
- global_options=(), # type: Sequence[str]
- prefix=None # type: Optional[str]
- ):
- # type: (...) -> None
- logger.info('Running setup.py develop for %s', self.name)
-
- if self.isolated:
- global_options = list(global_options) + ["--no-user-cfg"]
-
- if prefix:
- prefix_param = ['--prefix={}'.format(prefix)]
- install_options = list(install_options) + prefix_param
-
- with indent_log():
- # FIXME: should we do --install-headers here too?
- with self.build_env:
- call_subprocess(
- [
- sys.executable,
- '-c',
- SETUPTOOLS_SHIM % self.setup_py
- ] +
- list(global_options) +
- ['develop', '--no-deps'] +
- list(install_options),
-
- cwd=self.setup_py_dir,
- show_stdout=False,
- )
-
- self.install_succeeded = True
-
- def update_editable(self, obtain=True):
- # type: (bool) -> None
- if not self.link:
- logger.debug(
- "Cannot update repository at %s; repository location is "
- "unknown",
- self.source_dir,
- )
- return
- assert self.editable
- assert self.source_dir
- if self.link.scheme == 'file':
- # Static paths don't get updated
- return
- assert '+' in self.link.url, "bad url: %r" % self.link.url
- if not self.update:
- return
- vc_type, url = self.link.url.split('+', 1)
- backend = vcs.get_backend(vc_type)
- if backend:
- vcs_backend = backend(self.link.url)
- if obtain:
- vcs_backend.obtain(self.source_dir)
- else:
- vcs_backend.export(self.source_dir)
- else:
- assert 0, (
- 'Unexpected version control type (in %s): %s'
- % (self.link, vc_type))
-
- # Top-level Actions
- def uninstall(self, auto_confirm=False, verbose=False,
- use_user_site=False):
- # type: (bool, bool, bool) -> Optional[UninstallPathSet]
- """
- Uninstall the distribution currently satisfying this requirement.
-
- Prompts before removing or modifying files unless
- ``auto_confirm`` is True.
-
- Refuses to delete or modify files outside of ``sys.prefix`` -
- thus uninstallation within a virtual environment can only
- modify that virtual environment, even if the virtualenv is
- linked to global site-packages.
-
- """
- if not self.check_if_exists(use_user_site):
- logger.warning("Skipping %s as it is not installed.", self.name)
- return None
- dist = self.satisfied_by or self.conflicts_with
-
- uninstalled_pathset = UninstallPathSet.from_dist(dist)
- uninstalled_pathset.remove(auto_confirm, verbose)
- return uninstalled_pathset
-
- def _clean_zip_name(self, name, prefix): # only used by archive.
- assert name.startswith(prefix + os.path.sep), (
- "name %r doesn't start with prefix %r" % (name, prefix)
- )
- name = name[len(prefix) + 1:]
- name = name.replace(os.path.sep, '/')
- return name
-
- def _get_archive_name(self, path, parentdir, rootdir):
- # type: (str, str, str) -> str
- path = os.path.join(parentdir, path)
- name = self._clean_zip_name(path, rootdir)
- return self.name + '/' + name
-
- # TODO: Investigate if this should be kept in InstallRequirement
- # Seems to be used only when VCS + downloads
- def archive(self, build_dir):
- # type: (str) -> None
- assert self.source_dir
- create_archive = True
- archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
- archive_path = os.path.join(build_dir, archive_name)
- if os.path.exists(archive_path):
- response = ask_path_exists(
- 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
- display_path(archive_path), ('i', 'w', 'b', 'a'))
- if response == 'i':
- create_archive = False
- elif response == 'w':
- logger.warning('Deleting %s', display_path(archive_path))
- os.remove(archive_path)
- elif response == 'b':
- dest_file = backup_dir(archive_path)
- logger.warning(
- 'Backing up %s to %s',
- display_path(archive_path),
- display_path(dest_file),
- )
- shutil.move(archive_path, dest_file)
- elif response == 'a':
- sys.exit(-1)
- if create_archive:
- zip = zipfile.ZipFile(
- archive_path, 'w', zipfile.ZIP_DEFLATED,
- allowZip64=True
- )
- dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
- for dirpath, dirnames, filenames in os.walk(dir):
- if 'pip-egg-info' in dirnames:
- dirnames.remove('pip-egg-info')
- for dirname in dirnames:
- dir_arcname = self._get_archive_name(dirname,
- parentdir=dirpath,
- rootdir=dir)
- zipdir = zipfile.ZipInfo(dir_arcname + '/')
- zipdir.external_attr = 0x1ED << 16 # 0o755
- zip.writestr(zipdir, '')
- for filename in filenames:
- if filename == PIP_DELETE_MARKER_FILENAME:
- continue
- file_arcname = self._get_archive_name(filename,
- parentdir=dirpath,
- rootdir=dir)
- filename = os.path.join(dirpath, filename)
- zip.write(filename, file_arcname)
- zip.close()
- logger.info('Saved %s', display_path(archive_path))
-
- def install(
- self,
- install_options, # type: List[str]
- global_options=None, # type: Optional[Sequence[str]]
- root=None, # type: Optional[str]
- home=None, # type: Optional[str]
- prefix=None, # type: Optional[str]
- warn_script_location=True, # type: bool
- use_user_site=False, # type: bool
- pycompile=True # type: bool
- ):
- # type: (...) -> None
- global_options = global_options if global_options is not None else []
- if self.editable:
- self.install_editable(
- install_options, global_options, prefix=prefix,
- )
- return
- if self.is_wheel:
- version = wheel.wheel_version(self.source_dir)
- wheel.check_compatibility(version, self.name)
-
- self.move_wheel_files(
- self.source_dir, root=root, prefix=prefix, home=home,
- warn_script_location=warn_script_location,
- use_user_site=use_user_site, pycompile=pycompile,
- )
- self.install_succeeded = True
- return
-
- # Extend the list of global and install options passed on to
- # the setup.py call with the ones from the requirements file.
- # Options specified in requirements file override those
- # specified on the command line, since the last option given
- # to setup.py is the one that is used.
- global_options = list(global_options) + \
- self.options.get('global_options', [])
- install_options = list(install_options) + \
- self.options.get('install_options', [])
-
- if self.isolated:
- # https://github.com/python/mypy/issues/1174
- global_options = global_options + ["--no-user-cfg"] # type: ignore
-
- with TempDirectory(kind="record") as temp_dir:
- record_filename = os.path.join(temp_dir.path, 'install-record.txt')
- install_args = self.get_install_args(
- global_options, record_filename, root, prefix, pycompile,
- )
- msg = 'Running setup.py install for %s' % (self.name,)
- with open_spinner(msg) as spinner:
- with indent_log():
- with self.build_env:
- call_subprocess(
- install_args + install_options,
- cwd=self.setup_py_dir,
- show_stdout=False,
- spinner=spinner,
- )
-
- if not os.path.exists(record_filename):
- logger.debug('Record file %s not found', record_filename)
- return
- self.install_succeeded = True
-
- def prepend_root(path):
- if root is None or not os.path.isabs(path):
- return path
- else:
- return change_root(root, path)
-
- with open(record_filename) as f:
- for line in f:
- directory = os.path.dirname(line)
- if directory.endswith('.egg-info'):
- egg_info_dir = prepend_root(directory)
- break
- else:
- logger.warning(
- 'Could not find .egg-info directory in install record'
- ' for %s',
- self,
- )
- # FIXME: put the record somewhere
- # FIXME: should this be an error?
- return
- new_lines = []
- with open(record_filename) as f:
- for line in f:
- filename = line.strip()
- if os.path.isdir(filename):
- filename += os.path.sep
- new_lines.append(
- os.path.relpath(prepend_root(filename), egg_info_dir)
- )
- new_lines.sort()
- ensure_dir(egg_info_dir)
- inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
- with open(inst_files_path, 'w') as f:
- f.write('\n'.join(new_lines) + '\n')
-
- def get_install_args(
- self,
- global_options, # type: Sequence[str]
- record_filename, # type: str
- root, # type: Optional[str]
- prefix, # type: Optional[str]
- pycompile # type: bool
- ):
- # type: (...) -> List[str]
- install_args = [sys.executable, "-u"]
- install_args.append('-c')
- install_args.append(SETUPTOOLS_SHIM % self.setup_py)
- install_args += list(global_options) + \
- ['install', '--record', record_filename]
- install_args += ['--single-version-externally-managed']
-
- if root is not None:
- install_args += ['--root', root]
- if prefix is not None:
- install_args += ['--prefix', prefix]
-
- if pycompile:
- install_args += ["--compile"]
- else:
- install_args += ["--no-compile"]
-
- if running_under_virtualenv():
- py_ver_str = 'python' + sysconfig.get_python_version()
- install_args += ['--install-headers',
- os.path.join(sys.prefix, 'include', 'site',
- py_ver_str, self.name)]
-
- return install_args
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py
deleted file mode 100644
index d1410e9..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_set.py
+++ /dev/null
@@ -1,197 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-from collections import OrderedDict
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.wheel import Wheel
-
-if MYPY_CHECK_RUNNING:
- from typing import Optional, List, Tuple, Dict, Iterable # noqa: F401
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
-
-
-logger = logging.getLogger(__name__)
-
-
-class RequirementSet(object):
-
- def __init__(self, require_hashes=False, check_supported_wheels=True):
- # type: (bool, bool) -> None
- """Create a RequirementSet.
- """
-
- self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
- self.require_hashes = require_hashes
- self.check_supported_wheels = check_supported_wheels
-
- # Mapping of alias: real_name
- self.requirement_aliases = {} # type: Dict[str, str]
- self.unnamed_requirements = [] # type: List[InstallRequirement]
- self.successfully_downloaded = [] # type: List[InstallRequirement]
- self.reqs_to_cleanup = [] # type: List[InstallRequirement]
-
- def __str__(self):
- reqs = [req for req in self.requirements.values()
- if not req.comes_from]
- reqs.sort(key=lambda req: req.name.lower())
- return ' '.join([str(req.req) for req in reqs])
-
- def __repr__(self):
- reqs = [req for req in self.requirements.values()]
- reqs.sort(key=lambda req: req.name.lower())
- reqs_str = ', '.join([str(req.req) for req in reqs])
- return ('<%s object; %d requirement(s): %s>'
- % (self.__class__.__name__, len(reqs), reqs_str))
-
- def add_requirement(
- self,
- install_req, # type: InstallRequirement
- parent_req_name=None, # type: Optional[str]
- extras_requested=None # type: Optional[Iterable[str]]
- ):
- # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
- """Add install_req as a requirement to install.
-
- :param parent_req_name: The name of the requirement that needed this
- added. The name is used because when multiple unnamed requirements
- resolve to the same name, we could otherwise end up with dependency
- links that point outside the Requirements set. parent_req must
- already be added. Note that None implies that this is a user
- supplied requirement, vs an inferred one.
- :param extras_requested: an iterable of extras used to evaluate the
- environment markers.
- :return: Additional requirements to scan. That is either [] if
- the requirement is not applicable, or [install_req] if the
- requirement is applicable and has just been added.
- """
- name = install_req.name
-
- # If the markers do not match, ignore this requirement.
- if not install_req.match_markers(extras_requested):
- logger.info(
- "Ignoring %s: markers '%s' don't match your environment",
- name, install_req.markers,
- )
- return [], None
-
- # If the wheel is not supported, raise an error.
- # Should check this after filtering out based on environment markers to
- # allow specifying different wheels based on the environment/OS, in a
- # single requirements file.
- if install_req.link and install_req.link.is_wheel:
- wheel = Wheel(install_req.link.filename)
- if self.check_supported_wheels and not wheel.supported():
- raise InstallationError(
- "%s is not a supported wheel on this platform." %
- wheel.filename
- )
-
- # This next bit is really a sanity check.
- assert install_req.is_direct == (parent_req_name is None), (
- "a direct req shouldn't have a parent and also, "
- "a non direct req should have a parent"
- )
-
- # Unnamed requirements are scanned again and the requirement won't be
- # added as a dependency until after scanning.
- if not name:
- # url or path requirement w/o an egg fragment
- self.unnamed_requirements.append(install_req)
- return [install_req], None
-
- try:
- existing_req = self.get_requirement(name)
- except KeyError:
- existing_req = None
-
- has_conflicting_requirement = (
- parent_req_name is None and
- existing_req and
- not existing_req.constraint and
- existing_req.extras == install_req.extras and
- existing_req.req.specifier != install_req.req.specifier
- )
- if has_conflicting_requirement:
- raise InstallationError(
- "Double requirement given: %s (already in %s, name=%r)"
- % (install_req, existing_req, name)
- )
-
- # When no existing requirement exists, add the requirement as a
- # dependency and it will be scanned again after.
- if not existing_req:
- self.requirements[name] = install_req
- # FIXME: what about other normalizations? E.g., _ vs. -?
- if name.lower() != name:
- self.requirement_aliases[name.lower()] = name
- # We'd want to rescan this requirements later
- return [install_req], install_req
-
- # Assume there's no need to scan, and that we've already
- # encountered this for scanning.
- if install_req.constraint or not existing_req.constraint:
- return [], existing_req
-
- does_not_satisfy_constraint = (
- install_req.link and
- not (
- existing_req.link and
- install_req.link.path == existing_req.link.path
- )
- )
- if does_not_satisfy_constraint:
- self.reqs_to_cleanup.append(install_req)
- raise InstallationError(
- "Could not satisfy constraints for '%s': "
- "installation from path or url cannot be "
- "constrained to a version" % name,
- )
- # If we're now installing a constraint, mark the existing
- # object for real installation.
- existing_req.constraint = False
- existing_req.extras = tuple(sorted(
- set(existing_req.extras) | set(install_req.extras)
- ))
- logger.debug(
- "Setting %s extras to: %s",
- existing_req, existing_req.extras,
- )
- # Return the existing requirement for addition to the parent and
- # scanning again.
- return [existing_req], existing_req
-
- def has_requirement(self, project_name):
- # type: (str) -> bool
- name = project_name.lower()
- if (name in self.requirements and
- not self.requirements[name].constraint or
- name in self.requirement_aliases and
- not self.requirements[self.requirement_aliases[name]].constraint):
- return True
- return False
-
- @property
- def has_requirements(self):
- # type: () -> List[InstallRequirement]
- return list(req for req in self.requirements.values() if not
- req.constraint) or self.unnamed_requirements
-
- def get_requirement(self, project_name):
- # type: (str) -> InstallRequirement
- for name in project_name, project_name.lower():
- if name in self.requirements:
- return self.requirements[name]
- if name in self.requirement_aliases:
- return self.requirements[self.requirement_aliases[name]]
- raise KeyError("No project with the name %r" % project_name)
-
- def cleanup_files(self):
- # type: () -> None
- """Clean up files, remove builds."""
- logger.debug('Cleaning up...')
- with indent_log():
- for req in self.reqs_to_cleanup:
- req.remove_temporary_source()
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py
deleted file mode 100644
index 82e084a..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import hashlib
-import logging
-import os
-
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
- from typing import Set, Iterator # noqa: F401
- from pip._internal.req.req_install import InstallRequirement # noqa: F401
- from pip._internal.models.link import Link # noqa: F401
-
-logger = logging.getLogger(__name__)
-
-
-class RequirementTracker(object):
-
- def __init__(self):
- # type: () -> None
- self._root = os.environ.get('PIP_REQ_TRACKER')
- if self._root is None:
- self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
- self._temp_dir.create()
- self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
- logger.debug('Created requirements tracker %r', self._root)
- else:
- self._temp_dir = None
- logger.debug('Re-using requirements tracker %r', self._root)
- self._entries = set() # type: Set[InstallRequirement]
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.cleanup()
-
- def _entry_path(self, link):
- # type: (Link) -> str
- hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
- return os.path.join(self._root, hashed)
-
- def add(self, req):
- # type: (InstallRequirement) -> None
- link = req.link
- info = str(req)
- entry_path = self._entry_path(link)
- try:
- with open(entry_path) as fp:
- # Error, these's already a build in progress.
- raise LookupError('%s is already being built: %s'
- % (link, fp.read()))
- except IOError as e:
- if e.errno != errno.ENOENT:
- raise
- assert req not in self._entries
- with open(entry_path, 'w') as fp:
- fp.write(info)
- self._entries.add(req)
- logger.debug('Added %s to build tracker %r', req, self._root)
-
- def remove(self, req):
- # type: (InstallRequirement) -> None
- link = req.link
- self._entries.remove(req)
- os.unlink(self._entry_path(link))
- logger.debug('Removed %s from build tracker %r', req, self._root)
-
- def cleanup(self):
- # type: () -> None
- for req in set(self._entries):
- self.remove(req)
- remove = self._temp_dir is not None
- if remove:
- self._temp_dir.cleanup()
- logger.debug('%s build tracker %r',
- 'Removed' if remove else 'Cleaned',
- self._root)
-
- @contextlib.contextmanager
- def track(self, req):
- # type: (InstallRequirement) -> Iterator[None]
- self.add(req)
- yield
- self.remove(req)
diff --git a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py b/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py
deleted file mode 100644
index c80959e..0000000
--- a/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_uninstall.py
+++ /dev/null
@@ -1,596 +0,0 @@
-from __future__ import absolute_import
-
-import csv
-import functools
-import logging
-import os
-import sys
-import sysconfig
-
-from pip._vendor import pkg_resources
-
-from pip._internal.exceptions import UninstallationError
-from pip._internal.locations import bin_py, bin_user
-from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
- normalize_path, renames, rmtree,
-)
-from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-def _script_names(dist, script_name, is_gui):
- """Create the fully qualified name of the files created by
- {console,gui}_scripts for the given ``dist``.
- Returns the list of file names
- """
- if dist_in_usersite(dist):
- bin_dir = bin_user
- else:
- bin_dir = bin_py
- exe_name = os.path.join(bin_dir, script_name)
- paths_to_remove = [exe_name]
- if WINDOWS:
- paths_to_remove.append(exe_name + '.exe')
- paths_to_remove.append(exe_name + '.exe.manifest')
- if is_gui:
- paths_to_remove.append(exe_name + '-script.pyw')
- else:
- paths_to_remove.append(exe_name + '-script.py')
- return paths_to_remove
-
-
-def _unique(fn):
- @functools.wraps(fn)
- def unique(*args, **kw):
- seen = set()
- for item in fn(*args, **kw):
- if item not in seen:
- seen.add(item)
- yield item
- return unique
-
-
-@_unique
-def uninstallation_paths(dist):
- """
- Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
-
- Yield paths to all the files in RECORD. For each .py file in RECORD, add
- the .pyc and .pyo in the same directory.
-
- UninstallPathSet.add() takes care of the __pycache__ .py[co].
- """
- r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
- for row in r:
- path = os.path.join(dist.location, row[0])
- yield path
- if path.endswith('.py'):
- dn, fn = os.path.split(path)
- base = fn[:-3]
- path = os.path.join(dn, base + '.pyc')
- yield path
- path = os.path.join(dn, base + '.pyo')
- yield path
-
-
-def compact(paths):
- """Compact a path set to contain the minimal number of paths
- necessary to contain all paths in the set. If /a/path/ and
- /a/path/to/a/file.txt are both in the set, leave only the
- shorter path."""
-
- sep = os.path.sep
- short_paths = set()
- for path in sorted(paths, key=len):
- should_skip = any(
- path.startswith(shortpath.rstrip("*")) and
- path[len(shortpath.rstrip("*").rstrip(sep))] == sep
- for shortpath in short_paths
- )
- if not should_skip:
- short_paths.add(path)
- return short_paths
-
-
-def compress_for_rename(paths):
- """Returns a set containing the paths that need to be renamed.
-
- This set may include directories when the original sequence of paths
- included every file on disk.
- """
- case_map = dict((os.path.normcase(p), p) for p in paths)
- remaining = set(case_map)
- unchecked = sorted(set(os.path.split(p)[0]
- for p in case_map.values()), key=len)
- wildcards = set()
-
- def norm_join(*a):
- return os.path.normcase(os.path.join(*a))
-
- for root in unchecked:
- if any(os.path.normcase(root).startswith(w)
- for w in wildcards):
- # This directory has already been handled.
- continue
-
- all_files = set()
- all_subdirs = set()
- for dirname, subdirs, files in os.walk(root):
- all_subdirs.update(norm_join(root, dirname, d)
- for d in subdirs)
- all_files.update(norm_join(root, dirname, f)
- for f in files)
- # If all the files we found are in our remaining set of files to
- # remove, then remove them from the latter set and add a wildcard
- # for the directory.
- if not (all_files - remaining):
- remaining.difference_update(all_files)
- wildcards.add(root + os.sep)
-
- return set(map(case_map.__getitem__, remaining)) | wildcards
-
-
-def compress_for_output_listing(paths):
- """Returns a tuple of 2 sets of which paths to display to user
-
- The first set contains paths that would be deleted. Files of a package
- are not added and the top-level directory of the package has a '*' added
- at the end - to signify that all it's contents are removed.
-
- The second set contains files that would have been skipped in the above
- folders.
- """
-
- will_remove = list(paths)
- will_skip = set()
-
- # Determine folders and files
- folders = set()
- files = set()
- for path in will_remove:
- if path.endswith(".pyc"):
- continue
- if path.endswith("__init__.py") or ".dist-info" in path:
- folders.add(os.path.dirname(path))
- files.add(path)
-
- _normcased_files = set(map(os.path.normcase, files))
-
- folders = compact(folders)
-
- # This walks the tree using os.walk to not miss extra folders
- # that might get added.
- for folder in folders:
- for dirpath, _, dirfiles in os.walk(folder):
- for fname in dirfiles:
- if fname.endswith(".pyc"):
- continue
-
- file_ = os.path.join(dirpath, fname)
- if (os.path.isfile(file_) and
- os.path.normcase(file_) not in _normcased_files):
- # We are skipping this file. Add it to the set.
- will_skip.add(file_)
-
- will_remove = files | {
- os.path.join(folder, "*") for folder in folders
- }
-
- return will_remove, will_skip
-
-
-class StashedUninstallPathSet(object):
- """A set of file rename operations to stash files while
- tentatively uninstalling them."""
- def __init__(self):
- # Mapping from source file root to [Adjacent]TempDirectory
- # for files under that directory.
- self._save_dirs = {}
- # (old path, new path) tuples for each move that may need
- # to be undone.
- self._moves = []
-
- def _get_directory_stash(self, path):
- """Stashes a directory.
-
- Directories are stashed adjacent to their original location if
- possible, or else moved/copied into the user's temp dir."""
-
- try:
- save_dir = AdjacentTempDirectory(path)
- save_dir.create()
- except OSError:
- save_dir = TempDirectory(kind="uninstall")
- save_dir.create()
- self._save_dirs[os.path.normcase(path)] = save_dir
-
- return save_dir.path
-
- def _get_file_stash(self, path):
- """Stashes a file.
-
- If no root has been provided, one will be created for the directory
- in the user's temp directory."""
- path = os.path.normcase(path)
- head, old_head = os.path.dirname(path), None
- save_dir = None
-
- while head != old_head:
- try:
- save_dir = self._save_dirs[head]
- break
- except KeyError:
- pass
- head, old_head = os.path.dirname(head), head
- else:
- # Did not find any suitable root
- head = os.path.dirname(path)
- save_dir = TempDirectory(kind='uninstall')
- save_dir.create()
- self._save_dirs[head] = save_dir
-
- relpath = os.path.relpath(path, head)
- if relpath and relpath != os.path.curdir:
- return os.path.join(save_dir.path, relpath)
- return save_dir.path
-
- def stash(self, path):
- """Stashes the directory or file and returns its new location.
- """
- if os.path.isdir(path):
- new_path = self._get_directory_stash(path)
- else:
- new_path = self._get_file_stash(path)
-
- self._moves.append((path, new_path))
- if os.path.isdir(path) and os.path.isdir(new_path):
- # If we're moving a directory, we need to
- # remove the destination first or else it will be
- # moved to inside the existing directory.
- # We just created new_path ourselves, so it will
- # be removable.
- os.rmdir(new_path)
- renames(path, new_path)
- return new_path
-
- def commit(self):
- """Commits the uninstall by removing stashed files."""
- for _, save_dir in self._save_dirs.items():
- save_dir.cleanup()
- self._moves = []
- self._save_dirs = {}
-
- def rollback(self):
- """Undoes the uninstall by moving stashed files back."""
- for p in self._moves:
- logging.info("Moving to %s\n from %s", *p)
-
- for new_path, path in self._moves:
- try:
- logger.debug('Replacing %s from %s', new_path, path)
- if os.path.isfile(new_path):
- os.unlink(new_path)
- elif os.path.isdir(new_path):
- rmtree(new_path)
- renames(path, new_path)
- except OSError as ex:
- logger.error("Failed to restore %s", new_path)
- logger.debug("Exception: %s", ex)
-
- self.commit()
-
- @property
- def can_rollback(self):
- return bool(self._moves)
-
-
-class UninstallPathSet(object):
- """A set of file paths to be removed in the uninstallation of a
- requirement."""
- def __init__(self, dist):
- self.paths = set()
- self._refuse = set()
- self.pth = {}
- self.dist = dist
- self._moved_paths = StashedUninstallPathSet()
-
- def _permitted(self, path):
- """
- Return True if the given path is one we are permitted to
- remove/modify, False otherwise.
-
- """
- return is_local(path)
-
- def add(self, path):
- head, tail = os.path.split(path)
-
- # we normalize the head to resolve parent directory symlinks, but not
- # the tail, since we only want to uninstall symlinks, not their targets
- path = os.path.join(normalize_path(head), os.path.normcase(tail))
-
- if not os.path.exists(path):
- return
- if self._permitted(path):
- self.paths.add(path)
- else:
- self._refuse.add(path)
-
- # __pycache__ files can show up after 'installed-files.txt' is created,
- # due to imports
- if os.path.splitext(path)[1] == '.py' and uses_pycache:
- self.add(cache_from_source(path))
-
- def add_pth(self, pth_file, entry):
- pth_file = normalize_path(pth_file)
- if self._permitted(pth_file):
- if pth_file not in self.pth:
- self.pth[pth_file] = UninstallPthEntries(pth_file)
- self.pth[pth_file].add(entry)
- else:
- self._refuse.add(pth_file)
-
- def remove(self, auto_confirm=False, verbose=False):
- """Remove paths in ``self.paths`` with confirmation (unless
- ``auto_confirm`` is True)."""
-
- if not self.paths:
- logger.info(
- "Can't uninstall '%s'. No files were found to uninstall.",
- self.dist.project_name,
- )
- return
-
- dist_name_version = (
- self.dist.project_name + "-" + self.dist.version
- )
- logger.info('Uninstalling %s:', dist_name_version)
-
- with indent_log():
- if auto_confirm or self._allowed_to_proceed(verbose):
- moved = self._moved_paths
-
- for_rename = compress_for_rename(self.paths)
-
- for path in sorted(compact(for_rename)):
- moved.stash(path)
- logger.debug('Removing file or directory %s', path)
-
- for pth in self.pth.values():
- pth.remove()
-
- logger.info('Successfully uninstalled %s', dist_name_version)
-
- def _allowed_to_proceed(self, verbose):
- """Display which files would be deleted and prompt for confirmation
- """
-
- def _display(msg, paths):
- if not paths:
- return
-
- logger.info(msg)
- with indent_log():
- for path in sorted(compact(paths)):
- logger.info(path)
-
- if not verbose:
- will_remove, will_skip = compress_for_output_listing(self.paths)
- else:
- # In verbose mode, display all the files that are going to be
- # deleted.
- will_remove = list(self.paths)
- will_skip = set()
-
- _display('Would remove:', will_remove)
- _display('Would not remove (might be manually added):', will_skip)
- _display('Would not remove (outside of prefix):', self._refuse)
- if verbose:
- _display('Will actually move:', compress_for_rename(self.paths))
-
- return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
-
- def rollback(self):
- """Rollback the changes previously made by remove()."""
- if not self._moved_paths.can_rollback:
- logger.error(
- "Can't roll back %s; was not uninstalled",
- self.dist.project_name,
- )
- return False
- logger.info('Rolling back uninstall of %s', self.dist.project_name)
- self._moved_paths.rollback()
- for pth in self.pth.values():
- pth.rollback()
-
- def commit(self):
- """Remove temporary save dir: rollback will no longer be possible."""
- self._moved_paths.commit()
-
- @classmethod
- def from_dist(cls, dist):
- dist_path = normalize_path(dist.location)
- if not dist_is_local(dist):
- logger.info(
- "Not uninstalling %s at %s, outside environment %s",
- dist.key,
- dist_path,
- sys.prefix,
- )
- return cls(dist)
-
- if dist_path in {p for p in {sysconfig.get_path("stdlib"),
- sysconfig.get_path("platstdlib")}
- if p}:
- logger.info(
- "Not uninstalling %s at %s, as it is in the standard library.",
- dist.key,
- dist_path,
- )
- return cls(dist)
-
- paths_to_remove = cls(dist)
- develop_egg_link = egg_link_path(dist)
- develop_egg_link_egg_info = '{}.egg-info'.format(
- pkg_resources.to_filename(dist.project_name))
- egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
- # Special case for distutils installed package
- distutils_egg_info = getattr(dist._provider, 'path', None)
-
- # Uninstall cases order do matter as in the case of 2 installs of the
- # same package, pip needs to uninstall the currently detected version
- if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
- not dist.egg_info.endswith(develop_egg_link_egg_info)):
- # if dist.egg_info.endswith(develop_egg_link_egg_info), we
- # are in fact in the develop_egg_link case
- paths_to_remove.add(dist.egg_info)
- if dist.has_metadata('installed-files.txt'):
- for installed_file in dist.get_metadata(
- 'installed-files.txt').splitlines():
- path = os.path.normpath(
- os.path.join(dist.egg_info, installed_file)
- )
- paths_to_remove.add(path)
- # FIXME: need a test for this elif block
- # occurs with --single-version-externally-managed/--record outside
- # of pip
- elif dist.has_metadata('top_level.txt'):
- if dist.has_metadata('namespace_packages.txt'):
- namespaces = dist.get_metadata('namespace_packages.txt')
- else:
- namespaces = []
- for top_level_pkg in [
- p for p
- in dist.get_metadata('top_level.txt').splitlines()
- if p and p not in namespaces]:
- path = os.path.join(dist.location, top_level_pkg)
- paths_to_remove.add(path)
- paths_to_remove.add(path + '.py')
- paths_to_remove.add(path + '.pyc')
- paths_to_remove.add(path + '.pyo')
-
- elif distutils_egg_info:
- raise UninstallationError(
- "Cannot uninstall {!r}. It is a distutils installed project "
- "and thus we cannot accurately determine which files belong "
- "to it which would lead to only a partial uninstall.".format(
- dist.project_name,
- )
- )
-
- elif dist.location.endswith('.egg'):
- # package installed by easy_install
- # We cannot match on dist.egg_name because it can slightly vary
- # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
- paths_to_remove.add(dist.location)
- easy_install_egg = os.path.split(dist.location)[1]
- easy_install_pth = os.path.join(os.path.dirname(dist.location),
- 'easy-install.pth')
- paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
-
- elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
- for path in uninstallation_paths(dist):
- paths_to_remove.add(path)
-
- elif develop_egg_link:
- # develop egg
- with open(develop_egg_link, 'r') as fh:
- link_pointer = os.path.normcase(fh.readline().strip())
- assert (link_pointer == dist.location), (
- 'Egg-link %s does not match installed location of %s '
- '(at %s)' % (link_pointer, dist.project_name, dist.location)
- )
- paths_to_remove.add(develop_egg_link)
- easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
- 'easy-install.pth')
- paths_to_remove.add_pth(easy_install_pth, dist.location)
-
- else:
- logger.debug(
- 'Not sure how to uninstall: %s - Check: %s',
- dist, dist.location,
- )
-
- # find distutils scripts= scripts
- if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
- for script in dist.metadata_listdir('scripts'):
- if dist_in_usersite(dist):
- bin_dir = bin_user
- else:
- bin_dir = bin_py
- paths_to_remove.add(os.path.join(bin_dir, script))
- if WINDOWS:
- paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
-
- # find console_scripts
- _scripts_to_remove = []
- console_scripts = dist.get_entry_map(group='console_scripts')
- for name in console_scripts.keys():
- _scripts_to_remove.extend(_script_names(dist, name, False))
- # find gui_scripts
- gui_scripts = dist.get_entry_map(group='gui_scripts')
- for name in gui_scripts.keys():
- _scripts_to_remove.extend(_script_names(dist, name, True))
-
- for s in _scripts_to_remove:
- paths_to_remove.add(s)
-
- return paths_to_remove
-
-
-class UninstallPthEntries(object):
- def __init__(self, pth_file):
- if not os.path.isfile(pth_file):
- raise UninstallationError(
- "Cannot remove entries from nonexistent file %s" % pth_file
- )
- self.file = pth_file
- self.entries = set()
- self._saved_lines = None
-
- def add(self, entry):
- entry = os.path.normcase(entry)
- # On Windows, os.path.normcase converts the entry to use
- # backslashes. This is correct for entries that describe absolute
- # paths outside of site-packages, but all the others use forward
- # slashes.
- if WINDOWS and not os.path.splitdrive(entry)[0]:
- entry = entry.replace('\\', '/')
- self.entries.add(entry)
-
- def remove(self):
- logger.debug('Removing pth entries from %s:', self.file)
- with open(self.file, 'rb') as fh:
- # windows uses '\r\n' with py3k, but uses '\n' with py2.x
- lines = fh.readlines()
- self._saved_lines = lines
- if any(b'\r\n' in line for line in lines):
- endline = '\r\n'
- else:
- endline = '\n'
- # handle missing trailing newline
- if lines and not lines[-1].endswith(endline.encode("utf-8")):
- lines[-1] = lines[-1] + endline.encode("utf-8")
- for entry in self.entries:
- try:
- logger.debug('Removing entry: %s', entry)
- lines.remove((entry + endline).encode("utf-8"))
- except ValueError:
- pass
- with open(self.file, 'wb') as fh:
- fh.writelines(lines)
-
- def rollback(self):
- if self._saved_lines is None:
- logger.error(
- 'Cannot roll back changes to %s, none were made', self.file
- )
- return False
- logger.debug('Rolling %s back to previous state', self.file)
- with open(self.file, 'wb') as fh:
- fh.writelines(self._saved_lines)
- return True