summaryrefslogtreecommitdiff
path: root/lib/python2.7/site-packages/pip/req
diff options
context:
space:
mode:
authorrahulp132020-03-17 14:55:41 +0530
committerrahulp132020-03-17 14:55:41 +0530
commit296443137f4288cb030e92859ccfbe3204bc1088 (patch)
treeca4798c2da1e7244edc3bc108d81b462b537aea2 /lib/python2.7/site-packages/pip/req
parent0db48f6533517ecebfd9f0693f89deca28408b76 (diff)
downloadKiCad-eSim-296443137f4288cb030e92859ccfbe3204bc1088.tar.gz
KiCad-eSim-296443137f4288cb030e92859ccfbe3204bc1088.tar.bz2
KiCad-eSim-296443137f4288cb030e92859ccfbe3204bc1088.zip
initial commit
Diffstat (limited to 'lib/python2.7/site-packages/pip/req')
-rw-r--r--lib/python2.7/site-packages/pip/req/__init__.py10
-rw-r--r--lib/python2.7/site-packages/pip/req/req_file.py342
-rw-r--r--lib/python2.7/site-packages/pip/req/req_install.py1204
-rw-r--r--lib/python2.7/site-packages/pip/req/req_set.py798
-rw-r--r--lib/python2.7/site-packages/pip/req/req_uninstall.py195
5 files changed, 2549 insertions, 0 deletions
diff --git a/lib/python2.7/site-packages/pip/req/__init__.py b/lib/python2.7/site-packages/pip/req/__init__.py
new file mode 100644
index 0000000..00185a4
--- /dev/null
+++ b/lib/python2.7/site-packages/pip/req/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+
+from .req_install import InstallRequirement
+from .req_set import RequirementSet, Requirements
+from .req_file import parse_requirements
+
+__all__ = [
+ "RequirementSet", "Requirements", "InstallRequirement",
+ "parse_requirements",
+]
diff --git a/lib/python2.7/site-packages/pip/req/req_file.py b/lib/python2.7/site-packages/pip/req/req_file.py
new file mode 100644
index 0000000..821df22
--- /dev/null
+++ b/lib/python2.7/site-packages/pip/req/req_file.py
@@ -0,0 +1,342 @@
+"""
+Requirements file parsing
+"""
+
+from __future__ import absolute_import
+
+import os
+import re
+import shlex
+import sys
+import optparse
+import warnings
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves import filterfalse
+
+import pip
+from pip.download import get_file_content
+from pip.req.req_install import InstallRequirement
+from pip.exceptions import (RequirementsFileParseError)
+from pip.utils.deprecation import RemovedInPip10Warning
+from pip import cmdoptions
+
+__all__ = ['parse_requirements']
+
+SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
+COMMENT_RE = re.compile(r'(^|\s)+#.*$')
+
+SUPPORTED_OPTIONS = [
+ cmdoptions.constraints,
+ cmdoptions.editable,
+ cmdoptions.requirements,
+ cmdoptions.no_index,
+ cmdoptions.index_url,
+ cmdoptions.find_links,
+ cmdoptions.extra_index_url,
+ cmdoptions.allow_external,
+ cmdoptions.allow_all_external,
+ cmdoptions.no_allow_external,
+ cmdoptions.allow_unsafe,
+ cmdoptions.no_allow_unsafe,
+ cmdoptions.use_wheel,
+ cmdoptions.no_use_wheel,
+ cmdoptions.always_unzip,
+ cmdoptions.no_binary,
+ cmdoptions.only_binary,
+ cmdoptions.pre,
+ cmdoptions.process_dependency_links,
+ cmdoptions.trusted_host,
+ cmdoptions.require_hashes,
+]
+
+# options to be passed to requirements
+SUPPORTED_OPTIONS_REQ = [
+ cmdoptions.install_options,
+ cmdoptions.global_options,
+ cmdoptions.hash,
+]
+
+# the 'dest' string values
+SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
+
+
+def parse_requirements(filename, finder=None, comes_from=None, options=None,
+ session=None, constraint=False, wheel_cache=None):
+ """Parse a requirements file and yield InstallRequirement instances.
+
+ :param filename: Path or url of requirements file.
+ :param finder: Instance of pip.index.PackageFinder.
+ :param comes_from: Origin description of requirements.
+ :param options: cli options.
+ :param session: Instance of pip.download.PipSession.
+ :param constraint: If true, parsing a constraint file rather than
+ requirements file.
+ :param wheel_cache: Instance of pip.wheel.WheelCache
+ """
+ if session is None:
+ raise TypeError(
+ "parse_requirements() missing 1 required keyword argument: "
+ "'session'"
+ )
+
+ _, content = get_file_content(
+ filename, comes_from=comes_from, session=session
+ )
+
+ lines_enum = preprocess(content, options)
+
+ for line_number, line in lines_enum:
+ req_iter = process_line(line, filename, line_number, finder,
+ comes_from, options, session, wheel_cache,
+ constraint=constraint)
+ for req in req_iter:
+ yield req
+
+
+def preprocess(content, options):
+ """Split, filter, and join lines, and return a line iterator
+
+ :param content: the content of the requirements file
+ :param options: cli options
+ """
+ lines_enum = enumerate(content.splitlines(), start=1)
+ lines_enum = join_lines(lines_enum)
+ lines_enum = ignore_comments(lines_enum)
+ lines_enum = skip_regex(lines_enum, options)
+ return lines_enum
+
+
+def process_line(line, filename, line_number, finder=None, comes_from=None,
+ options=None, session=None, wheel_cache=None,
+ constraint=False):
+ """Process a single requirements line; This can result in creating/yielding
+ requirements, or updating the finder.
+
+ For lines that contain requirements, the only options that have an effect
+ are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
+ requirement. Other options from SUPPORTED_OPTIONS may be present, but are
+ ignored.
+
+ For lines that do not contain requirements, the only options that have an
+ effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
+ be present, but are ignored. These lines may contain multiple options
+ (although our docs imply only one is supported), and all our parsed and
+ affect the finder.
+
+ :param constraint: If True, parsing a constraints file.
+ :param options: OptionParser options that we may update
+ """
+ parser = build_parser()
+ defaults = parser.get_default_values()
+ defaults.index_url = None
+ if finder:
+ # `finder.format_control` will be updated during parsing
+ defaults.format_control = finder.format_control
+ args_str, options_str = break_args_options(line)
+ if sys.version_info < (2, 7, 3):
+ # Prior to 2.7.3, shlex cannot deal with unicode entries
+ options_str = options_str.encode('utf8')
+ opts, _ = parser.parse_args(shlex.split(options_str), defaults)
+
+ # preserve for the nested code path
+ line_comes_from = '%s %s (line %s)' % (
+ '-c' if constraint else '-r', filename, line_number)
+
+ # yield a line requirement
+ if args_str:
+ isolated = options.isolated_mode if options else False
+ if options:
+ cmdoptions.check_install_build_global(options, opts)
+ # get the options that apply to requirements
+ req_options = {}
+ for dest in SUPPORTED_OPTIONS_REQ_DEST:
+ if dest in opts.__dict__ and opts.__dict__[dest]:
+ req_options[dest] = opts.__dict__[dest]
+ yield InstallRequirement.from_line(
+ args_str, line_comes_from, constraint=constraint,
+ isolated=isolated, options=req_options, wheel_cache=wheel_cache
+ )
+
+ # yield an editable requirement
+ elif opts.editables:
+ isolated = options.isolated_mode if options else False
+ default_vcs = options.default_vcs if options else None
+ yield InstallRequirement.from_editable(
+ opts.editables[0], comes_from=line_comes_from,
+ constraint=constraint, default_vcs=default_vcs, isolated=isolated,
+ wheel_cache=wheel_cache
+ )
+
+ # parse a nested requirements file
+ elif opts.requirements or opts.constraints:
+ if opts.requirements:
+ req_path = opts.requirements[0]
+ nested_constraint = False
+ else:
+ req_path = opts.constraints[0]
+ nested_constraint = True
+ # original file is over http
+ if SCHEME_RE.search(filename):
+ # do a url join so relative paths work
+ req_path = urllib_parse.urljoin(filename, req_path)
+ # original file and nested file are paths
+ elif not SCHEME_RE.search(req_path):
+ # do a join so relative paths work
+ req_path = os.path.join(os.path.dirname(filename), req_path)
+ # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
+ parser = parse_requirements(
+ req_path, finder, comes_from, options, session,
+ constraint=nested_constraint, wheel_cache=wheel_cache
+ )
+ for req in parser:
+ yield req
+
+ # percolate hash-checking option upward
+ elif opts.require_hashes:
+ options.require_hashes = opts.require_hashes
+
+ # set finder options
+ elif finder:
+ if opts.allow_external:
+ warnings.warn(
+ "--allow-external has been deprecated and will be removed in "
+ "the future. Due to changes in the repository protocol, it no "
+ "longer has any effect.",
+ RemovedInPip10Warning,
+ )
+
+ if opts.allow_all_external:
+ warnings.warn(
+ "--allow-all-external has been deprecated and will be removed "
+ "in the future. Due to changes in the repository protocol, it "
+ "no longer has any effect.",
+ RemovedInPip10Warning,
+ )
+
+ if opts.allow_unverified:
+ warnings.warn(
+ "--allow-unverified has been deprecated and will be removed "
+ "in the future. Due to changes in the repository protocol, it "
+ "no longer has any effect.",
+ RemovedInPip10Warning,
+ )
+
+ if opts.index_url:
+ finder.index_urls = [opts.index_url]
+ if opts.use_wheel is False:
+ finder.use_wheel = False
+ pip.index.fmt_ctl_no_use_wheel(finder.format_control)
+ if opts.no_index is True:
+ finder.index_urls = []
+ if opts.extra_index_urls:
+ finder.index_urls.extend(opts.extra_index_urls)
+ if opts.find_links:
+ # FIXME: it would be nice to keep track of the source
+ # of the find_links: support a find-links local path
+ # relative to a requirements file.
+ value = opts.find_links[0]
+ req_dir = os.path.dirname(os.path.abspath(filename))
+ relative_to_reqs_file = os.path.join(req_dir, value)
+ if os.path.exists(relative_to_reqs_file):
+ value = relative_to_reqs_file
+ finder.find_links.append(value)
+ if opts.pre:
+ finder.allow_all_prereleases = True
+ if opts.process_dependency_links:
+ finder.process_dependency_links = True
+ if opts.trusted_hosts:
+ finder.secure_origins.extend(
+ ("*", host, "*") for host in opts.trusted_hosts)
+
+
+def break_args_options(line):
+ """Break up the line into an args and options string. We only want to shlex
+ (and then optparse) the options, not the args. args can contain markers
+ which are corrupted by shlex.
+ """
+ tokens = line.split(' ')
+ args = []
+ options = tokens[:]
+ for token in tokens:
+ if token.startswith('-') or token.startswith('--'):
+ break
+ else:
+ args.append(token)
+ options.pop(0)
+ return ' '.join(args), ' '.join(options)
+
+
+def build_parser():
+ """
+ Return a parser for parsing requirement lines
+ """
+ parser = optparse.OptionParser(add_help_option=False)
+
+ option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
+ for option_factory in option_factories:
+ option = option_factory()
+ parser.add_option(option)
+
+ # By default optparse sys.exits on parsing errors. We want to wrap
+ # that in our own exception.
+ def parser_exit(self, msg):
+ raise RequirementsFileParseError(msg)
+ parser.exit = parser_exit
+
+ return parser
+
+
+def join_lines(lines_enum):
+ """Joins a line ending in '\' with the previous line (except when following
+ comments). The joined line takes on the index of the first line.
+ """
+ primary_line_number = None
+ new_line = []
+ for line_number, line in lines_enum:
+ if not line.endswith('\\') or COMMENT_RE.match(line):
+ if COMMENT_RE.match(line):
+ # this ensures comments are always matched later
+ line = ' ' + line
+ if new_line:
+ new_line.append(line)
+ yield primary_line_number, ''.join(new_line)
+ new_line = []
+ else:
+ yield line_number, line
+ else:
+ if not new_line:
+ primary_line_number = line_number
+ new_line.append(line.strip('\\'))
+
+ # last line contains \
+ if new_line:
+ yield primary_line_number, ''.join(new_line)
+
+ # TODO: handle space after '\'.
+
+
+def ignore_comments(lines_enum):
+ """
+ Strips comments and filter empty lines.
+ """
+ for line_number, line in lines_enum:
+ line = COMMENT_RE.sub('', line)
+ line = line.strip()
+ if line:
+ yield line_number, line
+
+
+def skip_regex(lines_enum, options):
+ """
+ Skip lines that match '--skip-requirements-regex' pattern
+
+ Note: the regex pattern is only built once
+ """
+ skip_regex = options.skip_requirements_regex if options else None
+ if skip_regex:
+ pattern = re.compile(skip_regex)
+ lines_enum = filterfalse(
+ lambda e: pattern.search(e[1]),
+ lines_enum)
+ return lines_enum
diff --git a/lib/python2.7/site-packages/pip/req/req_install.py b/lib/python2.7/site-packages/pip/req/req_install.py
new file mode 100644
index 0000000..1a98f37
--- /dev/null
+++ b/lib/python2.7/site-packages/pip/req/req_install.py
@@ -0,0 +1,1204 @@
+from __future__ import absolute_import
+
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+import traceback
+import warnings
+import zipfile
+
+from distutils import sysconfig
+from distutils.util import change_root
+from email.parser import FeedParser
+
+from pip._vendor import pkg_resources, six
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import Version, parse as parse_version
+from pip._vendor.six.moves import configparser
+
+import pip.wheel
+
+from pip.compat import native_str, get_stdlib, WINDOWS
+from pip.download import is_url, url_to_path, path_to_url, is_archive_file
+from pip.exceptions import (
+ InstallationError, UninstallationError,
+)
+from pip.locations import (
+ bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,
+)
+from pip.utils import (
+ display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
+ dist_in_usersite, dist_in_site_packages, egg_link_path,
+ call_subprocess, read_text_file, FakeFile, _make_build_dir, ensure_dir,
+ get_installed_version, normalize_path, dist_is_local,
+)
+
+from pip.utils.hashes import Hashes
+from pip.utils.deprecation import RemovedInPip10Warning
+from pip.utils.logging import indent_log
+from pip.utils.setuptools_build import SETUPTOOLS_SHIM
+from pip.utils.ui import open_spinner
+from pip.req.req_uninstall import UninstallPathSet
+from pip.vcs import vcs
+from pip.wheel import move_wheel_files, Wheel
+
+
+logger = logging.getLogger(__name__)
+
+operators = specifiers.Specifier._operators.keys()
+
+
+def _strip_extras(path):
+ m = re.match(r'^(.+)(\[[^\]]+\])$', path)
+ extras = None
+ if m:
+ path_no_extras = m.group(1)
+ extras = m.group(2)
+ else:
+ path_no_extras = path
+
+ return path_no_extras, extras
+
+
+def _safe_extras(extras):
+ return set(pkg_resources.safe_extra(extra) for extra in extras)
+
+
+class InstallRequirement(object):
+
+ def __init__(self, req, comes_from, source_dir=None, editable=False,
+ link=None, as_egg=False, update=True,
+ pycompile=True, markers=None, isolated=False, options=None,
+ wheel_cache=None, constraint=False):
+ self.extras = ()
+ if isinstance(req, six.string_types):
+ try:
+ req = Requirement(req)
+ except InvalidRequirement:
+ if os.path.sep in req:
+ add_msg = "It looks like a path. Does it exist ?"
+ elif '=' in req and not any(op in req for op in operators):
+ add_msg = "= is not a valid operator. Did you mean == ?"
+ else:
+ add_msg = traceback.format_exc()
+ raise InstallationError(
+ "Invalid requirement: '%s'\n%s" % (req, add_msg))
+ self.extras = _safe_extras(req.extras)
+
+ self.req = req
+ self.comes_from = comes_from
+ self.constraint = constraint
+ self.source_dir = source_dir
+ self.editable = editable
+
+ self._wheel_cache = wheel_cache
+ self.link = self.original_link = link
+ self.as_egg = as_egg
+ if markers is not None:
+ self.markers = markers
+ else:
+ self.markers = req and req.marker
+ self._egg_info_path = None
+ # This holds the pkg_resources.Distribution object if this requirement
+ # is already available:
+ self.satisfied_by = None
+ # This hold the pkg_resources.Distribution object if this requirement
+ # conflicts with another installed distribution:
+ self.conflicts_with = None
+ # Temporary build location
+ self._temp_build_dir = None
+ # Used to store the global directory where the _temp_build_dir should
+ # have been created. Cf _correct_build_location method.
+ self._ideal_build_dir = None
+ # True if the editable should be updated:
+ self.update = update
+ # Set to True after successful installation
+ self.install_succeeded = None
+ # UninstallPathSet of uninstalled distribution (for possible rollback)
+ self.uninstalled = None
+ # Set True if a legitimate do-nothing-on-uninstall has happened - e.g.
+ # system site packages, stdlib packages.
+ self.nothing_to_uninstall = False
+ self.use_user_site = False
+ self.target_dir = None
+ self.options = options if options else {}
+ self.pycompile = pycompile
+ # Set to True after successful preparation of this requirement
+ self.prepared = False
+
+ self.isolated = isolated
+
+ @classmethod
+ def from_editable(cls, editable_req, comes_from=None, default_vcs=None,
+ isolated=False, options=None, wheel_cache=None,
+ constraint=False):
+ from pip.index import Link
+
+ name, url, extras_override = parse_editable(
+ editable_req, default_vcs)
+ if url.startswith('file:'):
+ source_dir = url_to_path(url)
+ else:
+ source_dir = None
+
+ res = cls(name, comes_from, source_dir=source_dir,
+ editable=True,
+ link=Link(url),
+ constraint=constraint,
+ isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache)
+
+ if extras_override is not None:
+ res.extras = _safe_extras(extras_override)
+
+ return res
+
+ @classmethod
+ def from_line(
+ cls, name, comes_from=None, isolated=False, options=None,
+ wheel_cache=None, constraint=False):
+ """Creates an InstallRequirement from a name, which might be a
+ requirement, directory containing 'setup.py', filename, or URL.
+ """
+ from pip.index import Link
+
+ if is_url(name):
+ marker_sep = '; '
+ else:
+ marker_sep = ';'
+ if marker_sep in name:
+ name, markers = name.split(marker_sep, 1)
+ markers = markers.strip()
+ if not markers:
+ markers = None
+ else:
+ markers = Marker(markers)
+ else:
+ markers = None
+ name = name.strip()
+ req = None
+ path = os.path.normpath(os.path.abspath(name))
+ link = None
+ extras = None
+
+ if is_url(name):
+ link = Link(name)
+ else:
+ p, extras = _strip_extras(path)
+ if (os.path.isdir(p) and
+ (os.path.sep in name or name.startswith('.'))):
+
+ if not is_installable_dir(p):
+ raise InstallationError(
+ "Directory %r is not installable. File 'setup.py' "
+ "not found." % name
+ )
+ link = Link(path_to_url(p))
+ elif is_archive_file(p):
+ if not os.path.isfile(p):
+ logger.warning(
+ 'Requirement %r looks like a filename, but the '
+ 'file does not exist',
+ name
+ )
+ link = Link(path_to_url(p))
+
+ # it's a local file, dir, or url
+ if link:
+ # Handle relative file URLs
+ if link.scheme == 'file' and re.search(r'\.\./', link.url):
+ link = Link(
+ path_to_url(os.path.normpath(os.path.abspath(link.path))))
+ # wheel file
+ if link.is_wheel:
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
+ req = "%s==%s" % (wheel.name, wheel.version)
+ else:
+ # set the req to the egg fragment. when it's not there, this
+ # will become an 'unnamed' requirement
+ req = link.egg_fragment
+
+ # a requirement specifier
+ else:
+ req = name
+
+ options = options if options else {}
+ res = cls(req, comes_from, link=link, markers=markers,
+ isolated=isolated, options=options,
+ wheel_cache=wheel_cache, constraint=constraint)
+
+ if extras:
+ res.extras = _safe_extras(
+ Requirement('placeholder' + extras).extras)
+
+ return res
+
+ def __str__(self):
+ if self.req:
+ s = str(self.req)
+ if self.link:
+ s += ' from %s' % self.link.url
+ else:
+ s = self.link.url if self.link else None
+ if self.satisfied_by is not None:
+ s += ' in %s' % display_path(self.satisfied_by.location)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += ' (from %s)' % comes_from
+ return s
+
+ def __repr__(self):
+ return '<%s object: %s editable=%r>' % (
+ self.__class__.__name__, str(self), self.editable)
+
+ def populate_link(self, finder, upgrade, require_hashes):
+ """Ensure that if a link can be found for this, that it is found.
+
+ Note that self.link may still be None - if Upgrade is False and the
+ requirement is already installed.
+
+ If require_hashes is True, don't use the wheel cache, because cached
+ wheels, always built locally, have different hashes than the files
+ downloaded from the index server and thus throw false hash mismatches.
+ Furthermore, cached wheels at present have undeterministic contents due
+ to file modification times.
+ """
+ if self.link is None:
+ self.link = finder.find_requirement(self, upgrade)
+ if self._wheel_cache is not None and not require_hashes:
+ old_link = self.link
+ self.link = self._wheel_cache.cached_wheel(self.link, self.name)
+ if old_link != self.link:
+ logger.debug('Using cached wheel link: %s', self.link)
+
+ @property
+ def specifier(self):
+ return self.req.specifier
+
+ @property
+ def is_pinned(self):
+ """Return whether I am pinned to an exact version.
+
+ For example, some-package==1.2 is pinned; some-package>1.2 is not.
+ """
+ specifiers = self.specifier
+ return (len(specifiers) == 1 and
+ next(iter(specifiers)).operator in ('==', '==='))
+
+ def from_path(self):
+ if self.req is None:
+ return None
+ s = str(self.req)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += '->' + comes_from
+ return s
+
+ def build_location(self, build_dir):
+ if self._temp_build_dir is not None:
+ return self._temp_build_dir
+ if self.req is None:
+ # for requirement via a path to a directory: the name of the
+ # package is not available yet so we create a temp directory
+ # Once run_egg_info will have run, we'll be able
+ # to fix it via _correct_build_location
+ # Some systems have /tmp as a symlink which confuses custom
+ # builds (such as numpy). Thus, we ensure that the real path
+ # is returned.
+ self._temp_build_dir = os.path.realpath(
+ tempfile.mkdtemp('-build', 'pip-')
+ )
+ self._ideal_build_dir = build_dir
+ return self._temp_build_dir
+ if self.editable:
+ name = self.name.lower()
+ else:
+ name = self.name
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
+ # need this)
+ if not os.path.exists(build_dir):
+ logger.debug('Creating directory %s', build_dir)
+ _make_build_dir(build_dir)
+ return os.path.join(build_dir, name)
+
+ def _correct_build_location(self):
+ """Move self._temp_build_dir to self._ideal_build_dir/self.req.name
+
+ For some requirements (e.g. a path to a directory), the name of the
+ package is not available until we run egg_info, so the build_location
+ will return a temporary directory and store the _ideal_build_dir.
+
+ This is only called by self.egg_info_path to fix the temporary build
+ directory.
+ """
+ if self.source_dir is not None:
+ return
+ assert self.req is not None
+ assert self._temp_build_dir
+ assert self._ideal_build_dir
+ old_location = self._temp_build_dir
+ self._temp_build_dir = None
+ new_location = self.build_location(self._ideal_build_dir)
+ if os.path.exists(new_location):
+ raise InstallationError(
+ 'A package already exists in %s; please remove it to continue'
+ % display_path(new_location))
+ logger.debug(
+ 'Moving package %s from %s to new location %s',
+ self, display_path(old_location), display_path(new_location),
+ )
+ shutil.move(old_location, new_location)
+ self._temp_build_dir = new_location
+ self._ideal_build_dir = None
+ self.source_dir = new_location
+ self._egg_info_path = None
+
+ @property
+ def name(self):
+ if self.req is None:
+ return None
+ return native_str(pkg_resources.safe_name(self.req.name))
+
+ @property
+ def setup_py_dir(self):
+ return os.path.join(
+ self.source_dir,
+ self.link and self.link.subdirectory_fragment or '')
+
+ @property
+ def setup_py(self):
+ assert self.source_dir, "No source dir for %s" % self
+ try:
+ import setuptools # noqa
+ except ImportError:
+ if get_installed_version('setuptools') is None:
+ add_msg = "Please install setuptools."
+ else:
+ add_msg = traceback.format_exc()
+ # Setuptools is not available
+ raise InstallationError(
+ "Could not import setuptools which is required to "
+ "install from a source distribution.\n%s" % add_msg
+ )
+
+ setup_py = os.path.join(self.setup_py_dir, 'setup.py')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(setup_py, six.text_type):
+ setup_py = setup_py.encode(sys.getfilesystemencoding())
+
+ return setup_py
+
+ def run_egg_info(self):
+ assert self.source_dir
+ if self.name:
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package %s',
+ self.setup_py, self.name,
+ )
+ else:
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package from %s',
+ self.setup_py, self.link,
+ )
+
+ with indent_log():
+ script = SETUPTOOLS_SHIM % self.setup_py
+ base_cmd = [sys.executable, '-c', script]
+ if self.isolated:
+ base_cmd += ["--no-user-cfg"]
+ egg_info_cmd = base_cmd + ['egg_info']
+ # We can't put the .egg-info files at the root, because then the
+ # source code will be mistaken for an installed egg, causing
+ # problems
+ if self.editable:
+ egg_base_option = []
+ else:
+ egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
+ ensure_dir(egg_info_dir)
+ egg_base_option = ['--egg-base', 'pip-egg-info']
+ call_subprocess(
+ egg_info_cmd + egg_base_option,
+ cwd=self.setup_py_dir,
+ show_stdout=False,
+ command_desc='python setup.py egg_info')
+
+ if not self.req:
+ if isinstance(parse_version(self.pkg_info()["Version"]), Version):
+ op = "=="
+ else:
+ op = "==="
+ self.req = Requirement(
+ "".join([
+ self.pkg_info()["Name"],
+ op,
+ self.pkg_info()["Version"],
+ ])
+ )
+ self._correct_build_location()
+ else:
+ metadata_name = canonicalize_name(self.pkg_info()["Name"])
+ if canonicalize_name(self.req.name) != metadata_name:
+ logger.warning(
+ 'Running setup.py (path:%s) egg_info for package %s '
+ 'produced metadata for project name %s. Fix your '
+ '#egg=%s fragments.',
+ self.setup_py, self.name, metadata_name, self.name
+ )
+ self.req = Requirement(metadata_name)
+
+ def egg_info_data(self, filename):
+ if self.satisfied_by is not None:
+ if not self.satisfied_by.has_metadata(filename):
+ return None
+ return self.satisfied_by.get_metadata(filename)
+ assert self.source_dir
+ filename = self.egg_info_path(filename)
+ if not os.path.exists(filename):
+ return None
+ data = read_text_file(filename)
+ return data
+
+ def egg_info_path(self, filename):
+ if self._egg_info_path is None:
+ if self.editable:
+ base = self.source_dir
+ else:
+ base = os.path.join(self.setup_py_dir, 'pip-egg-info')
+ filenames = os.listdir(base)
+ if self.editable:
+ filenames = []
+ for root, dirs, files in os.walk(base):
+ for dir in vcs.dirnames:
+ if dir in dirs:
+ dirs.remove(dir)
+ # Iterate over a copy of ``dirs``, since mutating
+ # a list while iterating over it can cause trouble.
+ # (See https://github.com/pypa/pip/pull/462.)
+ for dir in list(dirs):
+ # Don't search in anything that looks like a virtualenv
+ # environment
+ if (
+ os.path.lexists(
+ os.path.join(root, dir, 'bin', 'python')
+ ) or
+ os.path.exists(
+ os.path.join(
+ root, dir, 'Scripts', 'Python.exe'
+ )
+ )):
+ dirs.remove(dir)
+ # Also don't search through tests
+ elif dir == 'test' or dir == 'tests':
+ dirs.remove(dir)
+ filenames.extend([os.path.join(root, dir)
+ for dir in dirs])
+ filenames = [f for f in filenames if f.endswith('.egg-info')]
+
+ if not filenames:
+ raise InstallationError(
+ 'No files/directories in %s (from %s)' % (base, filename)
+ )
+ assert filenames, \
+ "No files/directories in %s (from %s)" % (base, filename)
+
+ # if we have more than one match, we pick the toplevel one. This
+ # can easily be the case if there is a dist folder which contains
+ # an extracted tarball for testing purposes.
+ if len(filenames) > 1:
+ filenames.sort(
+ key=lambda x: x.count(os.path.sep) +
+ (os.path.altsep and x.count(os.path.altsep) or 0)
+ )
+ self._egg_info_path = os.path.join(base, filenames[0])
+ return os.path.join(self._egg_info_path, filename)
+
+ def pkg_info(self):
+ p = FeedParser()
+ data = self.egg_info_data('PKG-INFO')
+ if not data:
+ logger.warning(
+ 'No PKG-INFO file found in %s',
+ display_path(self.egg_info_path('PKG-INFO')),
+ )
+ p.feed(data or '')
+ return p.close()
+
+ _requirements_section_re = re.compile(r'\[(.*?)\]')
+
+ @property
+ def installed_version(self):
+ return get_installed_version(self.name)
+
+ def assert_source_matches_version(self):
+ assert self.source_dir
+ version = self.pkg_info()['version']
+ if self.req.specifier and version not in self.req.specifier:
+ logger.warning(
+ 'Requested %s, but installing version %s',
+ self,
+ self.installed_version,
+ )
+ else:
+ logger.debug(
+ 'Source in %s has version %s, which satisfies requirement %s',
+ display_path(self.source_dir),
+ version,
+ self,
+ )
+
+ def update_editable(self, obtain=True):
+ if not self.link:
+ logger.debug(
+ "Cannot update repository at %s; repository location is "
+ "unknown",
+ self.source_dir,
+ )
+ return
+ assert self.editable
+ assert self.source_dir
+ if self.link.scheme == 'file':
+ # Static paths don't get updated
+ return
+ assert '+' in self.link.url, "bad url: %r" % self.link.url
+ if not self.update:
+ return
+ vc_type, url = self.link.url.split('+', 1)
+ backend = vcs.get_backend(vc_type)
+ if backend:
+ vcs_backend = backend(self.link.url)
+ if obtain:
+ vcs_backend.obtain(self.source_dir)
+ else:
+ vcs_backend.export(self.source_dir)
+ else:
+ assert 0, (
+ 'Unexpected version control type (in %s): %s'
+ % (self.link, vc_type))
+
+ def uninstall(self, auto_confirm=False):
+ """
+ Uninstall the distribution currently satisfying this requirement.
+
+ Prompts before removing or modifying files unless
+ ``auto_confirm`` is True.
+
+ Refuses to delete or modify files outside of ``sys.prefix`` -
+ thus uninstallation within a virtual environment can only
+ modify that virtual environment, even if the virtualenv is
+ linked to global site-packages.
+
+ """
+ if not self.check_if_exists():
+ raise UninstallationError(
+ "Cannot uninstall requirement %s, not installed" % (self.name,)
+ )
+ dist = self.satisfied_by or self.conflicts_with
+
+ dist_path = normalize_path(dist.location)
+ if not dist_is_local(dist):
+ logger.info(
+ "Not uninstalling %s at %s, outside environment %s",
+ dist.key,
+ dist_path,
+ sys.prefix,
+ )
+ self.nothing_to_uninstall = True
+ return
+
+ if dist_path in get_stdlib():
+ logger.info(
+ "Not uninstalling %s at %s, as it is in the standard library.",
+ dist.key,
+ dist_path,
+ )
+ self.nothing_to_uninstall = True
+ return
+
+ paths_to_remove = UninstallPathSet(dist)
+ develop_egg_link = egg_link_path(dist)
+ develop_egg_link_egg_info = '{0}.egg-info'.format(
+ pkg_resources.to_filename(dist.project_name))
+ egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
+ # Special case for distutils installed package
+ distutils_egg_info = getattr(dist._provider, 'path', None)
+
+ # Uninstall cases order do matter as in the case of 2 installs of the
+ # same package, pip needs to uninstall the currently detected version
+ if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
+ not dist.egg_info.endswith(develop_egg_link_egg_info)):
+ # if dist.egg_info.endswith(develop_egg_link_egg_info), we
+ # are in fact in the develop_egg_link case
+ paths_to_remove.add(dist.egg_info)
+ if dist.has_metadata('installed-files.txt'):
+ for installed_file in dist.get_metadata(
+ 'installed-files.txt').splitlines():
+ path = os.path.normpath(
+ os.path.join(dist.egg_info, installed_file)
+ )
+ paths_to_remove.add(path)
+ # FIXME: need a test for this elif block
+ # occurs with --single-version-externally-managed/--record outside
+ # of pip
+ elif dist.has_metadata('top_level.txt'):
+ if dist.has_metadata('namespace_packages.txt'):
+ namespaces = dist.get_metadata('namespace_packages.txt')
+ else:
+ namespaces = []
+ for top_level_pkg in [
+ p for p
+ in dist.get_metadata('top_level.txt').splitlines()
+ if p and p not in namespaces]:
+ path = os.path.join(dist.location, top_level_pkg)
+ paths_to_remove.add(path)
+ paths_to_remove.add(path + '.py')
+ paths_to_remove.add(path + '.pyc')
+ paths_to_remove.add(path + '.pyo')
+
+ elif distutils_egg_info:
+ warnings.warn(
+ "Uninstalling a distutils installed project ({0}) has been "
+ "deprecated and will be removed in a future version. This is "
+ "due to the fact that uninstalling a distutils project will "
+ "only partially uninstall the project.".format(self.name),
+ RemovedInPip10Warning,
+ )
+ paths_to_remove.add(distutils_egg_info)
+
+ elif dist.location.endswith('.egg'):
+ # package installed by easy_install
+ # We cannot match on dist.egg_name because it can slightly vary
+ # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
+ paths_to_remove.add(dist.location)
+ easy_install_egg = os.path.split(dist.location)[1]
+ easy_install_pth = os.path.join(os.path.dirname(dist.location),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
+
+ elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
+ for path in pip.wheel.uninstallation_paths(dist):
+ paths_to_remove.add(path)
+
+ elif develop_egg_link:
+ # develop egg
+ with open(develop_egg_link, 'r') as fh:
+ link_pointer = os.path.normcase(fh.readline().strip())
+ assert (link_pointer == dist.location), (
+ 'Egg-link %s does not match installed location of %s '
+ '(at %s)' % (link_pointer, self.name, dist.location)
+ )
+ paths_to_remove.add(develop_egg_link)
+ easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, dist.location)
+
+ else:
+ logger.debug(
+ 'Not sure how to uninstall: %s - Check: %s',
+ dist, dist.location)
+
+ # find distutils scripts= scripts
+ if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
+ for script in dist.metadata_listdir('scripts'):
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ paths_to_remove.add(os.path.join(bin_dir, script))
+ if WINDOWS:
+ paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
+
+ # find console_scripts
+ if dist.has_metadata('entry_points.txt'):
+ if six.PY2:
+ options = {}
+ else:
+ options = {"delimiters": ('=', )}
+ config = configparser.SafeConfigParser(**options)
+ config.readfp(
+ FakeFile(dist.get_metadata_lines('entry_points.txt'))
+ )
+ if config.has_section('console_scripts'):
+ for name, value in config.items('console_scripts'):
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ paths_to_remove.add(os.path.join(bin_dir, name))
+ if WINDOWS:
+ paths_to_remove.add(
+ os.path.join(bin_dir, name) + '.exe'
+ )
+ paths_to_remove.add(
+ os.path.join(bin_dir, name) + '.exe.manifest'
+ )
+ paths_to_remove.add(
+ os.path.join(bin_dir, name) + '-script.py'
+ )
+
+ paths_to_remove.remove(auto_confirm)
+ self.uninstalled = paths_to_remove
+
+ def rollback_uninstall(self):
+ if self.uninstalled:
+ self.uninstalled.rollback()
+ else:
+ logger.error(
+ "Can't rollback %s, nothing uninstalled.", self.name,
+ )
+
+ def commit_uninstall(self):
+ if self.uninstalled:
+ self.uninstalled.commit()
+ elif not self.nothing_to_uninstall:
+ logger.error(
+ "Can't commit %s, nothing uninstalled.", self.name,
+ )
+
+ def archive(self, build_dir):
+ assert self.source_dir
+ create_archive = True
+ archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
+ archive_path = os.path.join(build_dir, archive_name)
+ if os.path.exists(archive_path):
+ response = ask_path_exists(
+ 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
+ display_path(archive_path), ('i', 'w', 'b', 'a'))
+ if response == 'i':
+ create_archive = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(archive_path))
+ os.remove(archive_path)
+ elif response == 'b':
+ dest_file = backup_dir(archive_path)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(archive_path),
+ display_path(dest_file),
+ )
+ shutil.move(archive_path, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+ if create_archive:
+ zip = zipfile.ZipFile(
+ archive_path, 'w', zipfile.ZIP_DEFLATED,
+ allowZip64=True
+ )
+ dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
+ for dirpath, dirnames, filenames in os.walk(dir):
+ if 'pip-egg-info' in dirnames:
+ dirnames.remove('pip-egg-info')
+ for dirname in dirnames:
+ dirname = os.path.join(dirpath, dirname)
+ name = self._clean_zip_name(dirname, dir)
+ zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
+ zipdir.external_attr = 0x1ED << 16 # 0o755
+ zip.writestr(zipdir, '')
+ for filename in filenames:
+ if filename == PIP_DELETE_MARKER_FILENAME:
+ continue
+ filename = os.path.join(dirpath, filename)
+ name = self._clean_zip_name(filename, dir)
+ zip.write(filename, self.name + '/' + name)
+ zip.close()
+ logger.info('Saved %s', display_path(archive_path))
+
+ def _clean_zip_name(self, name, prefix):
+ assert name.startswith(prefix + os.path.sep), (
+ "name %r doesn't start with prefix %r" % (name, prefix)
+ )
+ name = name[len(prefix) + 1:]
+ name = name.replace(os.path.sep, '/')
+ return name
+
+ def match_markers(self, extras_requested=None):
+ if not extras_requested:
+ # Provide an extra to safely evaluate the markers
+ # without matching any extra
+ extras_requested = ('',)
+ if self.markers is not None:
+ return any(
+ self.markers.evaluate({'extra': extra})
+ for extra in extras_requested)
+ else:
+ return True
+
+ def install(self, install_options, global_options=[], root=None,
+ prefix=None):
+ if self.editable:
+ self.install_editable(
+ install_options, global_options, prefix=prefix)
+ return
+ if self.is_wheel:
+ version = pip.wheel.wheel_version(self.source_dir)
+ pip.wheel.check_compatibility(version, self.name)
+
+ self.move_wheel_files(self.source_dir, root=root, prefix=prefix)
+ self.install_succeeded = True
+ return
+
+ # Extend the list of global and install options passed on to
+ # the setup.py call with the ones from the requirements file.
+ # Options specified in requirements file override those
+ # specified on the command line, since the last option given
+ # to setup.py is the one that is used.
+ global_options += self.options.get('global_options', [])
+ install_options += self.options.get('install_options', [])
+
+ if self.isolated:
+ global_options = list(global_options) + ["--no-user-cfg"]
+
+ temp_location = tempfile.mkdtemp('-record', 'pip-')
+ record_filename = os.path.join(temp_location, 'install-record.txt')
+ try:
+ install_args = self.get_install_args(
+ global_options, record_filename, root, prefix)
+ msg = 'Running setup.py install for %s' % (self.name,)
+ with open_spinner(msg) as spinner:
+ with indent_log():
+ call_subprocess(
+ install_args + install_options,
+ cwd=self.setup_py_dir,
+ show_stdout=False,
+ spinner=spinner,
+ )
+
+ if not os.path.exists(record_filename):
+ logger.debug('Record file %s not found', record_filename)
+ return
+ self.install_succeeded = True
+ if self.as_egg:
+ # there's no --always-unzip option we can pass to install
+ # command so we unable to save the installed-files.txt
+ return
+
+ def prepend_root(path):
+ if root is None or not os.path.isabs(path):
+ return path
+ else:
+ return change_root(root, path)
+
+ with open(record_filename) as f:
+ for line in f:
+ directory = os.path.dirname(line)
+ if directory.endswith('.egg-info'):
+ egg_info_dir = prepend_root(directory)
+ break
+ else:
+ logger.warning(
+ 'Could not find .egg-info directory in install record'
+ ' for %s',
+ self,
+ )
+ # FIXME: put the record somewhere
+ # FIXME: should this be an error?
+ return
+ new_lines = []
+ with open(record_filename) as f:
+ for line in f:
+ filename = line.strip()
+ if os.path.isdir(filename):
+ filename += os.path.sep
+ new_lines.append(
+ os.path.relpath(
+ prepend_root(filename), egg_info_dir)
+ )
+ inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
+ with open(inst_files_path, 'w') as f:
+ f.write('\n'.join(new_lines) + '\n')
+ finally:
+ if os.path.exists(record_filename):
+ os.remove(record_filename)
+ rmtree(temp_location)
+
+ def ensure_has_source_dir(self, parent_dir):
+ """Ensure that a source_dir is set.
+
+ This will create a temporary build dir if the name of the requirement
+ isn't known yet.
+
+ :param parent_dir: The ideal pip parent_dir for the source_dir.
+ Generally src_dir for editables and build_dir for sdists.
+ :return: self.source_dir
+ """
+ if self.source_dir is None:
+ self.source_dir = self.build_location(parent_dir)
+ return self.source_dir
+
+ def get_install_args(self, global_options, record_filename, root, prefix):
+ install_args = [sys.executable, "-u"]
+ install_args.append('-c')
+ install_args.append(SETUPTOOLS_SHIM % self.setup_py)
+ install_args += list(global_options) + \
+ ['install', '--record', record_filename]
+
+ if not self.as_egg:
+ install_args += ['--single-version-externally-managed']
+
+ if root is not None:
+ install_args += ['--root', root]
+ if prefix is not None:
+ install_args += ['--prefix', prefix]
+
+ if self.pycompile:
+ install_args += ["--compile"]
+ else:
+ install_args += ["--no-compile"]
+
+ if running_under_virtualenv():
+ py_ver_str = 'python' + sysconfig.get_python_version()
+ install_args += ['--install-headers',
+ os.path.join(sys.prefix, 'include', 'site',
+ py_ver_str, self.name)]
+
+ return install_args
+
+ def remove_temporary_source(self):
+ """Remove the source files from this requirement, if they are marked
+ for deletion"""
+ if self.source_dir and os.path.exists(
+ os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
+ logger.debug('Removing source in %s', self.source_dir)
+ rmtree(self.source_dir)
+ self.source_dir = None
+ if self._temp_build_dir and os.path.exists(self._temp_build_dir):
+ rmtree(self._temp_build_dir)
+ self._temp_build_dir = None
+
+ def install_editable(self, install_options,
+ global_options=(), prefix=None):
+ logger.info('Running setup.py develop for %s', self.name)
+
+ if self.isolated:
+ global_options = list(global_options) + ["--no-user-cfg"]
+
+ if prefix:
+ prefix_param = ['--prefix={0}'.format(prefix)]
+ install_options = list(install_options) + prefix_param
+
+ with indent_log():
+ # FIXME: should we do --install-headers here too?
+ call_subprocess(
+ [
+ sys.executable,
+ '-c',
+ SETUPTOOLS_SHIM % self.setup_py
+ ] +
+ list(global_options) +
+ ['develop', '--no-deps'] +
+ list(install_options),
+
+ cwd=self.setup_py_dir,
+ show_stdout=False)
+
+ self.install_succeeded = True
+
+ def check_if_exists(self):
+ """Find an installed distribution that satisfies or conflicts
+ with this requirement, and set self.satisfied_by or
+ self.conflicts_with appropriately.
+ """
+ if self.req is None:
+ return False
+ try:
+ # get_distribution() will resolve the entire list of requirements
+ # anyway, and we've already determined that we need the requirement
+ # in question, so strip the marker so that we don't try to
+ # evaluate it.
+ no_marker = Requirement(str(self.req))
+ no_marker.marker = None
+ self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
+ if self.editable and self.satisfied_by:
+ self.conflicts_with = self.satisfied_by
+ # when installing editables, nothing pre-existing should ever
+ # satisfy
+ self.satisfied_by = None
+ return True
+ except pkg_resources.DistributionNotFound:
+ return False
+ except pkg_resources.VersionConflict:
+ existing_dist = pkg_resources.get_distribution(
+ self.req.name
+ )
+ if self.use_user_site:
+ if dist_in_usersite(existing_dist):
+ self.conflicts_with = existing_dist
+ elif (running_under_virtualenv() and
+ dist_in_site_packages(existing_dist)):
+ raise InstallationError(
+ "Will not install to the user site because it will "
+ "lack sys.path precedence to %s in %s" %
+ (existing_dist.project_name, existing_dist.location)
+ )
+ else:
+ self.conflicts_with = existing_dist
+ return True
+
+ @property
+ def is_wheel(self):
+ return self.link and self.link.is_wheel
+
+ def move_wheel_files(self, wheeldir, root=None, prefix=None):
+ move_wheel_files(
+ self.name, self.req, wheeldir,
+ user=self.use_user_site,
+ home=self.target_dir,
+ root=root,
+ prefix=prefix,
+ pycompile=self.pycompile,
+ isolated=self.isolated,
+ )
+
+ def get_dist(self):
+ """Return a pkg_resources.Distribution built from self.egg_info_path"""
+ egg_info = self.egg_info_path('').rstrip('/')
+ base_dir = os.path.dirname(egg_info)
+ metadata = pkg_resources.PathMetadata(base_dir, egg_info)
+ dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+ return pkg_resources.Distribution(
+ os.path.dirname(egg_info),
+ project_name=dist_name,
+ metadata=metadata)
+
+ @property
+ def has_hash_options(self):
+ """Return whether any known-good hashes are specified as options.
+
+ These activate --require-hashes mode; hashes specified as part of a
+ URL do not.
+
+ """
+ return bool(self.options.get('hashes', {}))
+
+ def hashes(self, trust_internet=True):
+ """Return a hash-comparer that considers my option- and URL-based
+ hashes to be known-good.
+
+ Hashes in URLs--ones embedded in the requirements file, not ones
+ downloaded from an index server--are almost peers with ones from
+ flags. They satisfy --require-hashes (whether it was implicitly or
+ explicitly activated) but do not activate it. md5 and sha224 are not
+ allowed in flags, which should nudge people toward good algos. We
+ always OR all hashes together, even ones from URLs.
+
+ :param trust_internet: Whether to trust URL-based (#md5=...) hashes
+ downloaded from the internet, as by populate_link()
+
+ """
+ good_hashes = self.options.get('hashes', {}).copy()
+ link = self.link if trust_internet else self.original_link
+ if link and link.hash:
+ good_hashes.setdefault(link.hash_name, []).append(link.hash)
+ return Hashes(good_hashes)
+
+
+def _strip_postfix(req):
+ """
+ Strip req postfix ( -dev, 0.2, etc )
+ """
+ # FIXME: use package_to_requirement?
+ match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
+ if match:
+ # Strip off -dev, -0.2, etc.
+ req = match.group(1)
+ return req
+
+
+def parse_editable(editable_req, default_vcs=None):
+ """Parses an editable requirement into:
+ - a requirement name
+ - an URL
+ - extras
+ - editable options
+ Accepted requirements:
+ svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
+ .[some_extra]
+ """
+
+ from pip.index import Link
+
+ url = editable_req
+ extras = None
+
+ # If a file path is specified with extras, strip off the extras.
+ m = re.match(r'^(.+)(\[[^\]]+\])$', url)
+ if m:
+ url_no_extras = m.group(1)
+ extras = m.group(2)
+ else:
+ url_no_extras = url
+
+ if os.path.isdir(url_no_extras):
+ if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
+ raise InstallationError(
+ "Directory %r is not installable. File 'setup.py' not found." %
+ url_no_extras
+ )
+ # Treating it as code that has already been checked out
+ url_no_extras = path_to_url(url_no_extras)
+
+ if url_no_extras.lower().startswith('file:'):
+ package_name = Link(url_no_extras).egg_fragment
+ if extras:
+ return (
+ package_name,
+ url_no_extras,
+ Requirement("placeholder" + extras.lower()).extras,
+ )
+ else:
+ return package_name, url_no_extras, None
+
+ for version_control in vcs:
+ if url.lower().startswith('%s:' % version_control):
+ url = '%s+%s' % (version_control, url)
+ break
+
+ if '+' not in url:
+ if default_vcs:
+ warnings.warn(
+ "--default-vcs has been deprecated and will be removed in "
+ "the future.",
+ RemovedInPip10Warning,
+ )
+ url = default_vcs + '+' + url
+ else:
+ raise InstallationError(
+ '%s should either be a path to a local project or a VCS url '
+ 'beginning with svn+, git+, hg+, or bzr+' %
+ editable_req
+ )
+
+ vc_type = url.split('+', 1)[0].lower()
+
+ if not vcs.get_backend(vc_type):
+ error_message = 'For --editable=%s only ' % editable_req + \
+ ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
+ ' is currently supported'
+ raise InstallationError(error_message)
+
+ package_name = Link(url).egg_fragment
+ if not package_name:
+ raise InstallationError(
+ "Could not detect requirement name, please specify one with #egg="
+ )
+ if not package_name:
+ raise InstallationError(
+ '--editable=%s is not the right format; it must have '
+ '#egg=Package' % editable_req
+ )
+ return _strip_postfix(package_name), url, None
diff --git a/lib/python2.7/site-packages/pip/req/req_set.py b/lib/python2.7/site-packages/pip/req/req_set.py
new file mode 100644
index 0000000..76aec06
--- /dev/null
+++ b/lib/python2.7/site-packages/pip/req/req_set.py
@@ -0,0 +1,798 @@
+from __future__ import absolute_import
+
+from collections import defaultdict
+from itertools import chain
+import logging
+import os
+
+from pip._vendor import pkg_resources
+from pip._vendor import requests
+
+from pip.compat import expanduser
+from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path,
+ unpack_url)
+from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
+ DistributionNotFound, PreviousBuildDirError,
+ HashError, HashErrors, HashUnpinned,
+ DirectoryUrlHashUnsupported, VcsHashUnsupported,
+ UnsupportedPythonVersion)
+from pip.req.req_install import InstallRequirement
+from pip.utils import (
+ display_path, dist_in_usersite, ensure_dir, normalize_path)
+from pip.utils.hashes import MissingHashes
+from pip.utils.logging import indent_log
+from pip.utils.packaging import check_dist_requires_python
+from pip.vcs import vcs
+from pip.wheel import Wheel
+
+logger = logging.getLogger(__name__)
+
+
+class Requirements(object):
+
+ def __init__(self):
+ self._keys = []
+ self._dict = {}
+
+ def keys(self):
+ return self._keys
+
+ def values(self):
+ return [self._dict[key] for key in self._keys]
+
+ def __contains__(self, item):
+ return item in self._keys
+
+ def __setitem__(self, key, value):
+ if key not in self._keys:
+ self._keys.append(key)
+ self._dict[key] = value
+
+ def __getitem__(self, key):
+ return self._dict[key]
+
+ def __repr__(self):
+ values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
+ return 'Requirements({%s})' % ', '.join(values)
+
+
+class DistAbstraction(object):
+ """Abstracts out the wheel vs non-wheel prepare_files logic.
+
+ The requirements for anything installable are as follows:
+ - we must be able to determine the requirement name
+ (or we can't correctly handle the non-upgrade case).
+ - we must be able to generate a list of run-time dependencies
+ without installing any additional packages (or we would
+ have to either burn time by doing temporary isolated installs
+ or alternatively violate pips 'don't start installing unless
+ all requirements are available' rule - neither of which are
+ desirable).
+ - for packages with setup requirements, we must also be able
+ to determine their requirements without installing additional
+ packages (for the same reason as run-time dependencies)
+ - we must be able to create a Distribution object exposing the
+ above metadata.
+ """
+
+ def __init__(self, req_to_install):
+ self.req_to_install = req_to_install
+
+ def dist(self, finder):
+ """Return a setuptools Dist object."""
+ raise NotImplementedError(self.dist)
+
+ def prep_for_dist(self):
+ """Ensure that we can get a Dist for this requirement."""
+ raise NotImplementedError(self.dist)
+
+
+def make_abstract_dist(req_to_install):
+ """Factory to make an abstract dist object.
+
+ Preconditions: Either an editable req with a source_dir, or satisfied_by or
+ a wheel link, or a non-editable req with a source_dir.
+
+ :return: A concrete DistAbstraction.
+ """
+ if req_to_install.editable:
+ return IsSDist(req_to_install)
+ elif req_to_install.link and req_to_install.link.is_wheel:
+ return IsWheel(req_to_install)
+ else:
+ return IsSDist(req_to_install)
+
+
+class IsWheel(DistAbstraction):
+
+ def dist(self, finder):
+ return list(pkg_resources.find_distributions(
+ self.req_to_install.source_dir))[0]
+
+ def prep_for_dist(self):
+ # FIXME:https://github.com/pypa/pip/issues/1112
+ pass
+
+
+class IsSDist(DistAbstraction):
+
+ def dist(self, finder):
+ dist = self.req_to_install.get_dist()
+ # FIXME: shouldn't be globally added:
+ if dist.has_metadata('dependency_links.txt'):
+ finder.add_dependency_links(
+ dist.get_metadata_lines('dependency_links.txt')
+ )
+ return dist
+
+ def prep_for_dist(self):
+ self.req_to_install.run_egg_info()
+ self.req_to_install.assert_source_matches_version()
+
+
+class Installed(DistAbstraction):
+
+ def dist(self, finder):
+ return self.req_to_install.satisfied_by
+
+ def prep_for_dist(self):
+ pass
+
+
+class RequirementSet(object):
+
+ def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
+ upgrade_strategy=None, ignore_installed=False, as_egg=False,
+ target_dir=None, ignore_dependencies=False,
+ force_reinstall=False, use_user_site=False, session=None,
+ pycompile=True, isolated=False, wheel_download_dir=None,
+ wheel_cache=None, require_hashes=False,
+ ignore_requires_python=False):
+ """Create a RequirementSet.
+
+ :param wheel_download_dir: Where still-packed .whl files should be
+ written to. If None they are written to the download_dir parameter.
+ Separate to download_dir to permit only keeping wheel archives for
+ pip wheel.
+ :param download_dir: Where still packed archives should be written to.
+ If None they are not saved, and are deleted immediately after
+ unpacking.
+ :param wheel_cache: The pip wheel cache, for passing to
+ InstallRequirement.
+ """
+ if session is None:
+ raise TypeError(
+ "RequirementSet() missing 1 required keyword argument: "
+ "'session'"
+ )
+
+ self.build_dir = build_dir
+ self.src_dir = src_dir
+ # XXX: download_dir and wheel_download_dir overlap semantically and may
+ # be combined if we're willing to have non-wheel archives present in
+ # the wheelhouse output by 'pip wheel'.
+ self.download_dir = download_dir
+ self.upgrade = upgrade
+ self.upgrade_strategy = upgrade_strategy
+ self.ignore_installed = ignore_installed
+ self.force_reinstall = force_reinstall
+ self.requirements = Requirements()
+ # Mapping of alias: real_name
+ self.requirement_aliases = {}
+ self.unnamed_requirements = []
+ self.ignore_dependencies = ignore_dependencies
+ self.ignore_requires_python = ignore_requires_python
+ self.successfully_downloaded = []
+ self.successfully_installed = []
+ self.reqs_to_cleanup = []
+ self.as_egg = as_egg
+ self.use_user_site = use_user_site
+ self.target_dir = target_dir # set from --target option
+ self.session = session
+ self.pycompile = pycompile
+ self.isolated = isolated
+ if wheel_download_dir:
+ wheel_download_dir = normalize_path(wheel_download_dir)
+ self.wheel_download_dir = wheel_download_dir
+ self._wheel_cache = wheel_cache
+ self.require_hashes = require_hashes
+ # Maps from install_req -> dependencies_of_install_req
+ self._dependencies = defaultdict(list)
+
+ def __str__(self):
+ reqs = [req for req in self.requirements.values()
+ if not req.comes_from]
+ reqs.sort(key=lambda req: req.name.lower())
+ return ' '.join([str(req.req) for req in reqs])
+
+ def __repr__(self):
+ reqs = [req for req in self.requirements.values()]
+ reqs.sort(key=lambda req: req.name.lower())
+ reqs_str = ', '.join([str(req.req) for req in reqs])
+ return ('<%s object; %d requirement(s): %s>'
+ % (self.__class__.__name__, len(reqs), reqs_str))
+
+ def add_requirement(self, install_req, parent_req_name=None,
+ extras_requested=None):
+ """Add install_req as a requirement to install.
+
+ :param parent_req_name: The name of the requirement that needed this
+ added. The name is used because when multiple unnamed requirements
+ resolve to the same name, we could otherwise end up with dependency
+ links that point outside the Requirements set. parent_req must
+ already be added. Note that None implies that this is a user
+ supplied requirement, vs an inferred one.
+ :param extras_requested: an iterable of extras used to evaluate the
+ environement markers.
+ :return: Additional requirements to scan. That is either [] if
+ the requirement is not applicable, or [install_req] if the
+ requirement is applicable and has just been added.
+ """
+ name = install_req.name
+ if not install_req.match_markers(extras_requested):
+ logger.warning("Ignoring %s: markers '%s' don't match your "
+ "environment", install_req.name,
+ install_req.markers)
+ return []
+
+ # This check has to come after we filter requirements with the
+ # environment markers.
+ if install_req.link and install_req.link.is_wheel:
+ wheel = Wheel(install_req.link.filename)
+ if not wheel.supported():
+ raise InstallationError(
+ "%s is not a supported wheel on this platform." %
+ wheel.filename
+ )
+
+ install_req.as_egg = self.as_egg
+ install_req.use_user_site = self.use_user_site
+ install_req.target_dir = self.target_dir
+ install_req.pycompile = self.pycompile
+ install_req.is_direct = (parent_req_name is None)
+
+ if not name:
+ # url or path requirement w/o an egg fragment
+ self.unnamed_requirements.append(install_req)
+ return [install_req]
+ else:
+ try:
+ existing_req = self.get_requirement(name)
+ except KeyError:
+ existing_req = None
+ if (parent_req_name is None and existing_req and not
+ existing_req.constraint and
+ existing_req.extras == install_req.extras and not
+ existing_req.req.specifier == install_req.req.specifier):
+ raise InstallationError(
+ 'Double requirement given: %s (already in %s, name=%r)'
+ % (install_req, existing_req, name))
+ if not existing_req:
+ # Add requirement
+ self.requirements[name] = install_req
+ # FIXME: what about other normalizations? E.g., _ vs. -?
+ if name.lower() != name:
+ self.requirement_aliases[name.lower()] = name
+ result = [install_req]
+ else:
+ # Assume there's no need to scan, and that we've already
+ # encountered this for scanning.
+ result = []
+ if not install_req.constraint and existing_req.constraint:
+ if (install_req.link and not (existing_req.link and
+ install_req.link.path == existing_req.link.path)):
+ self.reqs_to_cleanup.append(install_req)
+ raise InstallationError(
+ "Could not satisfy constraints for '%s': "
+ "installation from path or url cannot be "
+ "constrained to a version" % name)
+ # If we're now installing a constraint, mark the existing
+ # object for real installation.
+ existing_req.constraint = False
+ existing_req.extras = tuple(
+ sorted(set(existing_req.extras).union(
+ set(install_req.extras))))
+ logger.debug("Setting %s extras to: %s",
+ existing_req, existing_req.extras)
+ # And now we need to scan this.
+ result = [existing_req]
+ # Canonicalise to the already-added object for the backref
+ # check below.
+ install_req = existing_req
+ if parent_req_name:
+ parent_req = self.get_requirement(parent_req_name)
+ self._dependencies[parent_req].append(install_req)
+ return result
+
+ def has_requirement(self, project_name):
+ name = project_name.lower()
+ if (name in self.requirements and
+ not self.requirements[name].constraint or
+ name in self.requirement_aliases and
+ not self.requirements[self.requirement_aliases[name]].constraint):
+ return True
+ return False
+
+ @property
+ def has_requirements(self):
+ return list(req for req in self.requirements.values() if not
+ req.constraint) or self.unnamed_requirements
+
+ @property
+ def is_download(self):
+ if self.download_dir:
+ self.download_dir = expanduser(self.download_dir)
+ if os.path.exists(self.download_dir):
+ return True
+ else:
+ logger.critical('Could not find download directory')
+ raise InstallationError(
+ "Could not find or access download directory '%s'"
+ % display_path(self.download_dir))
+ return False
+
+ def get_requirement(self, project_name):
+ for name in project_name, project_name.lower():
+ if name in self.requirements:
+ return self.requirements[name]
+ if name in self.requirement_aliases:
+ return self.requirements[self.requirement_aliases[name]]
+ raise KeyError("No project with the name %r" % project_name)
+
+ def uninstall(self, auto_confirm=False):
+ for req in self.requirements.values():
+ if req.constraint:
+ continue
+ req.uninstall(auto_confirm=auto_confirm)
+ req.commit_uninstall()
+
+ def prepare_files(self, finder):
+ """
+ Prepare process. Create temp directories, download and/or unpack files.
+ """
+ # make the wheelhouse
+ if self.wheel_download_dir:
+ ensure_dir(self.wheel_download_dir)
+
+ # If any top-level requirement has a hash specified, enter
+ # hash-checking mode, which requires hashes from all.
+ root_reqs = self.unnamed_requirements + self.requirements.values()
+ require_hashes = (self.require_hashes or
+ any(req.has_hash_options for req in root_reqs))
+ if require_hashes and self.as_egg:
+ raise InstallationError(
+ '--egg is not allowed with --require-hashes mode, since it '
+ 'delegates dependency resolution to setuptools and could thus '
+ 'result in installation of unhashed packages.')
+
+ # Actually prepare the files, and collect any exceptions. Most hash
+ # exceptions cannot be checked ahead of time, because
+ # req.populate_link() needs to be called before we can make decisions
+ # based on link type.
+ discovered_reqs = []
+ hash_errors = HashErrors()
+ for req in chain(root_reqs, discovered_reqs):
+ try:
+ discovered_reqs.extend(self._prepare_file(
+ finder,
+ req,
+ require_hashes=require_hashes,
+ ignore_dependencies=self.ignore_dependencies))
+ except HashError as exc:
+ exc.req = req
+ hash_errors.append(exc)
+
+ if hash_errors:
+ raise hash_errors
+
+ def _is_upgrade_allowed(self, req):
+ return self.upgrade and (
+ self.upgrade_strategy == "eager" or (
+ self.upgrade_strategy == "only-if-needed" and req.is_direct
+ )
+ )
+
+ def _check_skip_installed(self, req_to_install, finder):
+ """Check if req_to_install should be skipped.
+
+ This will check if the req is installed, and whether we should upgrade
+ or reinstall it, taking into account all the relevant user options.
+
+ After calling this req_to_install will only have satisfied_by set to
+ None if the req_to_install is to be upgraded/reinstalled etc. Any
+ other value will be a dist recording the current thing installed that
+ satisfies the requirement.
+
+ Note that for vcs urls and the like we can't assess skipping in this
+ routine - we simply identify that we need to pull the thing down,
+ then later on it is pulled down and introspected to assess upgrade/
+ reinstalls etc.
+
+ :return: A text reason for why it was skipped, or None.
+ """
+ # Check whether to upgrade/reinstall this req or not.
+ req_to_install.check_if_exists()
+ if req_to_install.satisfied_by:
+ upgrade_allowed = self._is_upgrade_allowed(req_to_install)
+
+ # Is the best version is installed.
+ best_installed = False
+
+ if upgrade_allowed:
+ # For link based requirements we have to pull the
+ # tree down and inspect to assess the version #, so
+ # its handled way down.
+ if not (self.force_reinstall or req_to_install.link):
+ try:
+ finder.find_requirement(
+ req_to_install, upgrade_allowed)
+ except BestVersionAlreadyInstalled:
+ best_installed = True
+ except DistributionNotFound:
+ # No distribution found, so we squash the
+ # error - it will be raised later when we
+ # re-try later to do the install.
+ # Why don't we just raise here?
+ pass
+
+ if not best_installed:
+ # don't uninstall conflict if user install and
+ # conflict is not user install
+ if not (self.use_user_site and not
+ dist_in_usersite(req_to_install.satisfied_by)):
+ req_to_install.conflicts_with = \
+ req_to_install.satisfied_by
+ req_to_install.satisfied_by = None
+
+ # Figure out a nice message to say why we're skipping this.
+ if best_installed:
+ skip_reason = 'already up-to-date'
+ elif self.upgrade_strategy == "only-if-needed":
+ skip_reason = 'not upgraded as not directly required'
+ else:
+ skip_reason = 'already satisfied'
+
+ return skip_reason
+ else:
+ return None
+
+ def _prepare_file(self,
+ finder,
+ req_to_install,
+ require_hashes=False,
+ ignore_dependencies=False):
+ """Prepare a single requirements file.
+
+ :return: A list of additional InstallRequirements to also install.
+ """
+ # Tell user what we are doing for this requirement:
+ # obtain (editable), skipping, processing (local url), collecting
+ # (remote url or package name)
+ if req_to_install.constraint or req_to_install.prepared:
+ return []
+
+ req_to_install.prepared = True
+
+ # ###################### #
+ # # print log messages # #
+ # ###################### #
+ if req_to_install.editable:
+ logger.info('Obtaining %s', req_to_install)
+ else:
+ # satisfied_by is only evaluated by calling _check_skip_installed,
+ # so it must be None here.
+ assert req_to_install.satisfied_by is None
+ if not self.ignore_installed:
+ skip_reason = self._check_skip_installed(
+ req_to_install, finder)
+
+ if req_to_install.satisfied_by:
+ assert skip_reason is not None, (
+ '_check_skip_installed returned None but '
+ 'req_to_install.satisfied_by is set to %r'
+ % (req_to_install.satisfied_by,))
+ logger.info(
+ 'Requirement %s: %s', skip_reason,
+ req_to_install)
+ else:
+ if (req_to_install.link and
+ req_to_install.link.scheme == 'file'):
+ path = url_to_path(req_to_install.link.url)
+ logger.info('Processing %s', display_path(path))
+ else:
+ logger.info('Collecting %s', req_to_install)
+
+ with indent_log():
+ # ################################ #
+ # # vcs update or unpack archive # #
+ # ################################ #
+ if req_to_install.editable:
+ if require_hashes:
+ raise InstallationError(
+ 'The editable requirement %s cannot be installed when '
+ 'requiring hashes, because there is no single file to '
+ 'hash.' % req_to_install)
+ req_to_install.ensure_has_source_dir(self.src_dir)
+ req_to_install.update_editable(not self.is_download)
+ abstract_dist = make_abstract_dist(req_to_install)
+ abstract_dist.prep_for_dist()
+ if self.is_download:
+ req_to_install.archive(self.download_dir)
+ req_to_install.check_if_exists()
+ elif req_to_install.satisfied_by:
+ if require_hashes:
+ logger.debug(
+ 'Since it is already installed, we are trusting this '
+ 'package without checking its hash. To ensure a '
+ 'completely repeatable environment, install into an '
+ 'empty virtualenv.')
+ abstract_dist = Installed(req_to_install)
+ else:
+ # @@ if filesystem packages are not marked
+ # editable in a req, a non deterministic error
+ # occurs when the script attempts to unpack the
+ # build directory
+ req_to_install.ensure_has_source_dir(self.build_dir)
+ # If a checkout exists, it's unwise to keep going. version
+ # inconsistencies are logged later, but do not fail the
+ # installation.
+ # FIXME: this won't upgrade when there's an existing
+ # package unpacked in `req_to_install.source_dir`
+ if os.path.exists(
+ os.path.join(req_to_install.source_dir, 'setup.py')):
+ raise PreviousBuildDirError(
+ "pip can't proceed with requirements '%s' due to a"
+ " pre-existing build directory (%s). This is "
+ "likely due to a previous installation that failed"
+ ". pip is being responsible and not assuming it "
+ "can delete this. Please delete it and try again."
+ % (req_to_install, req_to_install.source_dir)
+ )
+ req_to_install.populate_link(
+ finder,
+ self._is_upgrade_allowed(req_to_install),
+ require_hashes
+ )
+ # We can't hit this spot and have populate_link return None.
+ # req_to_install.satisfied_by is None here (because we're
+ # guarded) and upgrade has no impact except when satisfied_by
+ # is not None.
+ # Then inside find_requirement existing_applicable -> False
+ # If no new versions are found, DistributionNotFound is raised,
+ # otherwise a result is guaranteed.
+ assert req_to_install.link
+ link = req_to_install.link
+
+ # Now that we have the real link, we can tell what kind of
+ # requirements we have and raise some more informative errors
+ # than otherwise. (For example, we can raise VcsHashUnsupported
+ # for a VCS URL rather than HashMissing.)
+ if require_hashes:
+ # We could check these first 2 conditions inside
+ # unpack_url and save repetition of conditions, but then
+ # we would report less-useful error messages for
+ # unhashable requirements, complaining that there's no
+ # hash provided.
+ if is_vcs_url(link):
+ raise VcsHashUnsupported()
+ elif is_file_url(link) and is_dir_url(link):
+ raise DirectoryUrlHashUnsupported()
+ if (not req_to_install.original_link and
+ not req_to_install.is_pinned):
+ # Unpinned packages are asking for trouble when a new
+ # version is uploaded. This isn't a security check, but
+ # it saves users a surprising hash mismatch in the
+ # future.
+ #
+ # file:/// URLs aren't pinnable, so don't complain
+ # about them not being pinned.
+ raise HashUnpinned()
+ hashes = req_to_install.hashes(
+ trust_internet=not require_hashes)
+ if require_hashes and not hashes:
+ # Known-good hashes are missing for this requirement, so
+ # shim it with a facade object that will provoke hash
+ # computation and then raise a HashMissing exception
+ # showing the user what the hash should be.
+ hashes = MissingHashes()
+
+ try:
+ download_dir = self.download_dir
+ # We always delete unpacked sdists after pip ran.
+ autodelete_unpacked = True
+ if req_to_install.link.is_wheel \
+ and self.wheel_download_dir:
+ # when doing 'pip wheel` we download wheels to a
+ # dedicated dir.
+ download_dir = self.wheel_download_dir
+ if req_to_install.link.is_wheel:
+ if download_dir:
+ # When downloading, we only unpack wheels to get
+ # metadata.
+ autodelete_unpacked = True
+ else:
+ # When installing a wheel, we use the unpacked
+ # wheel.
+ autodelete_unpacked = False
+ unpack_url(
+ req_to_install.link, req_to_install.source_dir,
+ download_dir, autodelete_unpacked,
+ session=self.session, hashes=hashes)
+ except requests.HTTPError as exc:
+ logger.critical(
+ 'Could not install requirement %s because '
+ 'of error %s',
+ req_to_install,
+ exc,
+ )
+ raise InstallationError(
+ 'Could not install requirement %s because '
+ 'of HTTP error %s for URL %s' %
+ (req_to_install, exc, req_to_install.link)
+ )
+ abstract_dist = make_abstract_dist(req_to_install)
+ abstract_dist.prep_for_dist()
+ if self.is_download:
+ # Make a .zip of the source_dir we already created.
+ if req_to_install.link.scheme in vcs.all_schemes:
+ req_to_install.archive(self.download_dir)
+ # req_to_install.req is only avail after unpack for URL
+ # pkgs repeat check_if_exists to uninstall-on-upgrade
+ # (#14)
+ if not self.ignore_installed:
+ req_to_install.check_if_exists()
+ if req_to_install.satisfied_by:
+ if self.upgrade or self.ignore_installed:
+ # don't uninstall conflict if user install and
+ # conflict is not user install
+ if not (self.use_user_site and not
+ dist_in_usersite(
+ req_to_install.satisfied_by)):
+ req_to_install.conflicts_with = \
+ req_to_install.satisfied_by
+ req_to_install.satisfied_by = None
+ else:
+ logger.info(
+ 'Requirement already satisfied (use '
+ '--upgrade to upgrade): %s',
+ req_to_install,
+ )
+
+ # ###################### #
+ # # parse dependencies # #
+ # ###################### #
+ dist = abstract_dist.dist(finder)
+ try:
+ check_dist_requires_python(dist)
+ except UnsupportedPythonVersion as e:
+ if self.ignore_requires_python:
+ logger.warning(e.args[0])
+ else:
+ req_to_install.remove_temporary_source()
+ raise
+ more_reqs = []
+
+ def add_req(subreq, extras_requested):
+ sub_install_req = InstallRequirement(
+ str(subreq),
+ req_to_install,
+ isolated=self.isolated,
+ wheel_cache=self._wheel_cache,
+ )
+ more_reqs.extend(self.add_requirement(
+ sub_install_req, req_to_install.name,
+ extras_requested=extras_requested))
+
+ # We add req_to_install before its dependencies, so that we
+ # can refer to it when adding dependencies.
+ if not self.has_requirement(req_to_install.name):
+ # 'unnamed' requirements will get added here
+ self.add_requirement(req_to_install, None)
+
+ if not ignore_dependencies:
+ if (req_to_install.extras):
+ logger.debug(
+ "Installing extra requirements: %r",
+ ','.join(req_to_install.extras),
+ )
+ missing_requested = sorted(
+ set(req_to_install.extras) - set(dist.extras)
+ )
+ for missing in missing_requested:
+ logger.warning(
+ '%s does not provide the extra \'%s\'',
+ dist, missing
+ )
+
+ available_requested = sorted(
+ set(dist.extras) & set(req_to_install.extras)
+ )
+ for subreq in dist.requires(available_requested):
+ add_req(subreq, extras_requested=available_requested)
+
+ # cleanup tmp src
+ self.reqs_to_cleanup.append(req_to_install)
+
+ if not req_to_install.editable and not req_to_install.satisfied_by:
+ # XXX: --no-install leads this to report 'Successfully
+ # downloaded' for only non-editable reqs, even though we took
+ # action on them.
+ self.successfully_downloaded.append(req_to_install)
+
+ return more_reqs
+
+ def cleanup_files(self):
+ """Clean up files, remove builds."""
+ logger.debug('Cleaning up...')
+ with indent_log():
+ for req in self.reqs_to_cleanup:
+ req.remove_temporary_source()
+
+ def _to_install(self):
+ """Create the installation order.
+
+ The installation order is topological - requirements are installed
+ before the requiring thing. We break cycles at an arbitrary point,
+ and make no other guarantees.
+ """
+ # The current implementation, which we may change at any point
+ # installs the user specified things in the order given, except when
+ # dependencies must come earlier to achieve topological order.
+ order = []
+ ordered_reqs = set()
+
+ def schedule(req):
+ if req.satisfied_by or req in ordered_reqs:
+ return
+ if req.constraint:
+ return
+ ordered_reqs.add(req)
+ for dep in self._dependencies[req]:
+ schedule(dep)
+ order.append(req)
+ for install_req in self.requirements.values():
+ schedule(install_req)
+ return order
+
+ def install(self, install_options, global_options=(), *args, **kwargs):
+ """
+ Install everything in this set (after having downloaded and unpacked
+ the packages)
+ """
+ to_install = self._to_install()
+
+ if to_install:
+ logger.info(
+ 'Installing collected packages: %s',
+ ', '.join([req.name for req in to_install]),
+ )
+
+ with indent_log():
+ for requirement in to_install:
+ if requirement.conflicts_with:
+ logger.info(
+ 'Found existing installation: %s',
+ requirement.conflicts_with,
+ )
+ with indent_log():
+ requirement.uninstall(auto_confirm=True)
+ try:
+ requirement.install(
+ install_options,
+ global_options,
+ *args,
+ **kwargs
+ )
+ except:
+ # if install did not succeed, rollback previous uninstall
+ if (requirement.conflicts_with and not
+ requirement.install_succeeded):
+ requirement.rollback_uninstall()
+ raise
+ else:
+ if (requirement.conflicts_with and
+ requirement.install_succeeded):
+ requirement.commit_uninstall()
+ requirement.remove_temporary_source()
+
+ self.successfully_installed = to_install
diff --git a/lib/python2.7/site-packages/pip/req/req_uninstall.py b/lib/python2.7/site-packages/pip/req/req_uninstall.py
new file mode 100644
index 0000000..5248430
--- /dev/null
+++ b/lib/python2.7/site-packages/pip/req/req_uninstall.py
@@ -0,0 +1,195 @@
+from __future__ import absolute_import
+
+import logging
+import os
+import tempfile
+
+from pip.compat import uses_pycache, WINDOWS, cache_from_source
+from pip.exceptions import UninstallationError
+from pip.utils import rmtree, ask, is_local, renames, normalize_path
+from pip.utils.logging import indent_log
+
+
+logger = logging.getLogger(__name__)
+
+
+class UninstallPathSet(object):
+ """A set of file paths to be removed in the uninstallation of a
+ requirement."""
+ def __init__(self, dist):
+ self.paths = set()
+ self._refuse = set()
+ self.pth = {}
+ self.dist = dist
+ self.save_dir = None
+ self._moved_paths = []
+
+ def _permitted(self, path):
+ """
+ Return True if the given path is one we are permitted to
+ remove/modify, False otherwise.
+
+ """
+ return is_local(path)
+
+ def add(self, path):
+ head, tail = os.path.split(path)
+
+ # we normalize the head to resolve parent directory symlinks, but not
+ # the tail, since we only want to uninstall symlinks, not their targets
+ path = os.path.join(normalize_path(head), os.path.normcase(tail))
+
+ if not os.path.exists(path):
+ return
+ if self._permitted(path):
+ self.paths.add(path)
+ else:
+ self._refuse.add(path)
+
+ # __pycache__ files can show up after 'installed-files.txt' is created,
+ # due to imports
+ if os.path.splitext(path)[1] == '.py' and uses_pycache:
+ self.add(cache_from_source(path))
+
+ def add_pth(self, pth_file, entry):
+ pth_file = normalize_path(pth_file)
+ if self._permitted(pth_file):
+ if pth_file not in self.pth:
+ self.pth[pth_file] = UninstallPthEntries(pth_file)
+ self.pth[pth_file].add(entry)
+ else:
+ self._refuse.add(pth_file)
+
+ def compact(self, paths):
+ """Compact a path set to contain the minimal number of paths
+ necessary to contain all paths in the set. If /a/path/ and
+ /a/path/to/a/file.txt are both in the set, leave only the
+ shorter path."""
+ short_paths = set()
+ for path in sorted(paths, key=len):
+ if not any([
+ (path.startswith(shortpath) and
+ path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
+ for shortpath in short_paths]):
+ short_paths.add(path)
+ return short_paths
+
+ def _stash(self, path):
+ return os.path.join(
+ self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
+
+ def remove(self, auto_confirm=False):
+ """Remove paths in ``self.paths`` with confirmation (unless
+ ``auto_confirm`` is True)."""
+ if not self.paths:
+ logger.info(
+ "Can't uninstall '%s'. No files were found to uninstall.",
+ self.dist.project_name,
+ )
+ return
+ logger.info(
+ 'Uninstalling %s-%s:',
+ self.dist.project_name, self.dist.version
+ )
+
+ with indent_log():
+ paths = sorted(self.compact(self.paths))
+
+ if auto_confirm:
+ response = 'y'
+ else:
+ for path in paths:
+ logger.info(path)
+ response = ask('Proceed (y/n)? ', ('y', 'n'))
+ if self._refuse:
+ logger.info('Not removing or modifying (outside of prefix):')
+ for path in self.compact(self._refuse):
+ logger.info(path)
+ if response == 'y':
+ self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
+ prefix='pip-')
+ for path in paths:
+ new_path = self._stash(path)
+ logger.debug('Removing file or directory %s', path)
+ self._moved_paths.append(path)
+ renames(path, new_path)
+ for pth in self.pth.values():
+ pth.remove()
+ logger.info(
+ 'Successfully uninstalled %s-%s',
+ self.dist.project_name, self.dist.version
+ )
+
+ def rollback(self):
+ """Rollback the changes previously made by remove()."""
+ if self.save_dir is None:
+ logger.error(
+ "Can't roll back %s; was not uninstalled",
+ self.dist.project_name,
+ )
+ return False
+ logger.info('Rolling back uninstall of %s', self.dist.project_name)
+ for path in self._moved_paths:
+ tmp_path = self._stash(path)
+ logger.debug('Replacing %s', path)
+ renames(tmp_path, path)
+ for pth in self.pth.values():
+ pth.rollback()
+
+ def commit(self):
+ """Remove temporary save dir: rollback will no longer be possible."""
+ if self.save_dir is not None:
+ rmtree(self.save_dir)
+ self.save_dir = None
+ self._moved_paths = []
+
+
+class UninstallPthEntries(object):
+ def __init__(self, pth_file):
+ if not os.path.isfile(pth_file):
+ raise UninstallationError(
+ "Cannot remove entries from nonexistent file %s" % pth_file
+ )
+ self.file = pth_file
+ self.entries = set()
+ self._saved_lines = None
+
+ def add(self, entry):
+ entry = os.path.normcase(entry)
+ # On Windows, os.path.normcase converts the entry to use
+ # backslashes. This is correct for entries that describe absolute
+ # paths outside of site-packages, but all the others use forward
+ # slashes.
+ if WINDOWS and not os.path.splitdrive(entry)[0]:
+ entry = entry.replace('\\', '/')
+ self.entries.add(entry)
+
+ def remove(self):
+ logger.debug('Removing pth entries from %s:', self.file)
+ with open(self.file, 'rb') as fh:
+ # windows uses '\r\n' with py3k, but uses '\n' with py2.x
+ lines = fh.readlines()
+ self._saved_lines = lines
+ if any(b'\r\n' in line for line in lines):
+ endline = '\r\n'
+ else:
+ endline = '\n'
+ for entry in self.entries:
+ try:
+ logger.debug('Removing entry: %s', entry)
+ lines.remove((entry + endline).encode("utf-8"))
+ except ValueError:
+ pass
+ with open(self.file, 'wb') as fh:
+ fh.writelines(lines)
+
+ def rollback(self):
+ if self._saved_lines is None:
+ logger.error(
+ 'Cannot roll back changes to %s, none were made', self.file
+ )
+ return False
+ logger.debug('Rolling %s back to previous state', self.file)
+ with open(self.file, 'wb') as fh:
+ fh.writelines(self._saved_lines)
+ return True