added a flask venv
This commit is contained in:
@@ -0,0 +1,69 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
from .req_install import InstallRequirement
|
||||
from .req_set import RequirementSet
|
||||
from .req_file import parse_requirements
|
||||
from pip._internal.utils.logging import indent_log
|
||||
|
||||
|
||||
__all__ = [
|
||||
"RequirementSet", "InstallRequirement",
|
||||
"parse_requirements", "install_given_reqs",
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install_given_reqs(to_install, install_options, global_options=(),
|
||||
*args, **kwargs):
|
||||
"""
|
||||
Install everything in the given list.
|
||||
|
||||
(to be called after having downloaded and unpacked the packages)
|
||||
"""
|
||||
|
||||
if to_install:
|
||||
logger.info(
|
||||
'Installing collected packages: %s',
|
||||
', '.join([req.name for req in to_install]),
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
for requirement in to_install:
|
||||
if requirement.conflicts_with:
|
||||
logger.info(
|
||||
'Found existing installation: %s',
|
||||
requirement.conflicts_with,
|
||||
)
|
||||
with indent_log():
|
||||
uninstalled_pathset = requirement.uninstall(
|
||||
auto_confirm=True
|
||||
)
|
||||
try:
|
||||
requirement.install(
|
||||
install_options,
|
||||
global_options,
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
except Exception:
|
||||
should_rollback = (
|
||||
requirement.conflicts_with and
|
||||
not requirement.install_succeeded
|
||||
)
|
||||
# if install did not succeed, rollback previous uninstall
|
||||
if should_rollback:
|
||||
uninstalled_pathset.rollback()
|
||||
raise
|
||||
else:
|
||||
should_commit = (
|
||||
requirement.conflicts_with and
|
||||
requirement.install_succeeded
|
||||
)
|
||||
if should_commit:
|
||||
uninstalled_pathset.commit()
|
||||
requirement.remove_temporary_source()
|
||||
|
||||
return to_install
|
||||
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
@@ -0,0 +1,298 @@
|
||||
"""Backing implementation for InstallRequirement's various constructors
|
||||
|
||||
The idea here is that these formed a major chunk of InstallRequirement's size
|
||||
so, moving them and support code dedicated to them outside of that class
|
||||
helps creates for better understandability for the rest of the code.
|
||||
|
||||
These are meant to be used elsewhere within pip to create instances of
|
||||
InstallRequirement.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||
from pip._vendor.packaging.specifiers import Specifier
|
||||
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
||||
|
||||
from pip._internal.download import (
|
||||
is_archive_file, is_url, path_to_url, url_to_path,
|
||||
)
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.models.index import PyPI, TestPyPI
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.misc import is_installable_dir
|
||||
from pip._internal.vcs import vcs
|
||||
from pip._internal.wheel import Wheel
|
||||
|
||||
__all__ = [
|
||||
"install_req_from_editable", "install_req_from_line",
|
||||
"parse_editable"
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
operators = Specifier._operators.keys()
|
||||
|
||||
|
||||
def _strip_extras(path):
|
||||
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
||||
extras = None
|
||||
if m:
|
||||
path_no_extras = m.group(1)
|
||||
extras = m.group(2)
|
||||
else:
|
||||
path_no_extras = path
|
||||
|
||||
return path_no_extras, extras
|
||||
|
||||
|
||||
def parse_editable(editable_req):
|
||||
"""Parses an editable requirement into:
|
||||
- a requirement name
|
||||
- an URL
|
||||
- extras
|
||||
- editable options
|
||||
Accepted requirements:
|
||||
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
|
||||
.[some_extra]
|
||||
"""
|
||||
|
||||
url = editable_req
|
||||
|
||||
# If a file path is specified with extras, strip off the extras.
|
||||
url_no_extras, extras = _strip_extras(url)
|
||||
|
||||
if os.path.isdir(url_no_extras):
|
||||
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
|
||||
raise InstallationError(
|
||||
"Directory %r is not installable. File 'setup.py' not found." %
|
||||
url_no_extras
|
||||
)
|
||||
# Treating it as code that has already been checked out
|
||||
url_no_extras = path_to_url(url_no_extras)
|
||||
|
||||
if url_no_extras.lower().startswith('file:'):
|
||||
package_name = Link(url_no_extras).egg_fragment
|
||||
if extras:
|
||||
return (
|
||||
package_name,
|
||||
url_no_extras,
|
||||
Requirement("placeholder" + extras.lower()).extras,
|
||||
)
|
||||
else:
|
||||
return package_name, url_no_extras, None
|
||||
|
||||
for version_control in vcs:
|
||||
if url.lower().startswith('%s:' % version_control):
|
||||
url = '%s+%s' % (version_control, url)
|
||||
break
|
||||
|
||||
if '+' not in url:
|
||||
raise InstallationError(
|
||||
'%s should either be a path to a local project or a VCS url '
|
||||
'beginning with svn+, git+, hg+, or bzr+' %
|
||||
editable_req
|
||||
)
|
||||
|
||||
vc_type = url.split('+', 1)[0].lower()
|
||||
|
||||
if not vcs.get_backend(vc_type):
|
||||
error_message = 'For --editable=%s only ' % editable_req + \
|
||||
', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
|
||||
' is currently supported'
|
||||
raise InstallationError(error_message)
|
||||
|
||||
package_name = Link(url).egg_fragment
|
||||
if not package_name:
|
||||
raise InstallationError(
|
||||
"Could not detect requirement name for '%s', please specify one "
|
||||
"with #egg=your_package_name" % editable_req
|
||||
)
|
||||
return package_name, url, None
|
||||
|
||||
|
||||
def deduce_helpful_msg(req):
|
||||
"""Returns helpful msg in case requirements file does not exist,
|
||||
or cannot be parsed.
|
||||
|
||||
:params req: Requirements file path
|
||||
"""
|
||||
msg = ""
|
||||
if os.path.exists(req):
|
||||
msg = " It does exist."
|
||||
# Try to parse and check if it is a requirements file.
|
||||
try:
|
||||
with open(req, 'r') as fp:
|
||||
# parse first line only
|
||||
next(parse_requirements(fp.read()))
|
||||
msg += " The argument you provided " + \
|
||||
"(%s) appears to be a" % (req) + \
|
||||
" requirements file. If that is the" + \
|
||||
" case, use the '-r' flag to install" + \
|
||||
" the packages specified within it."
|
||||
except RequirementParseError:
|
||||
logger.debug("Cannot parse '%s' as requirements \
|
||||
file" % (req), exc_info=1)
|
||||
else:
|
||||
msg += " File '%s' does not exist." % (req)
|
||||
return msg
|
||||
|
||||
|
||||
# ---- The actual constructors follow ----
|
||||
|
||||
|
||||
def install_req_from_editable(
|
||||
editable_req, comes_from=None, isolated=False, options=None,
|
||||
wheel_cache=None, constraint=False
|
||||
):
|
||||
name, url, extras_override = parse_editable(editable_req)
|
||||
if url.startswith('file:'):
|
||||
source_dir = url_to_path(url)
|
||||
else:
|
||||
source_dir = None
|
||||
|
||||
if name is not None:
|
||||
try:
|
||||
req = Requirement(name)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError("Invalid requirement: '%s'" % name)
|
||||
else:
|
||||
req = None
|
||||
return InstallRequirement(
|
||||
req, comes_from, source_dir=source_dir,
|
||||
editable=True,
|
||||
link=Link(url),
|
||||
constraint=constraint,
|
||||
isolated=isolated,
|
||||
options=options if options else {},
|
||||
wheel_cache=wheel_cache,
|
||||
extras=extras_override or (),
|
||||
)
|
||||
|
||||
|
||||
def install_req_from_line(
|
||||
name, comes_from=None, isolated=False, options=None, wheel_cache=None,
|
||||
constraint=False
|
||||
):
|
||||
"""Creates an InstallRequirement from a name, which might be a
|
||||
requirement, directory containing 'setup.py', filename, or URL.
|
||||
"""
|
||||
if is_url(name):
|
||||
marker_sep = '; '
|
||||
else:
|
||||
marker_sep = ';'
|
||||
if marker_sep in name:
|
||||
name, markers = name.split(marker_sep, 1)
|
||||
markers = markers.strip()
|
||||
if not markers:
|
||||
markers = None
|
||||
else:
|
||||
markers = Marker(markers)
|
||||
else:
|
||||
markers = None
|
||||
name = name.strip()
|
||||
req = None
|
||||
path = os.path.normpath(os.path.abspath(name))
|
||||
link = None
|
||||
extras = None
|
||||
|
||||
if is_url(name):
|
||||
link = Link(name)
|
||||
else:
|
||||
p, extras = _strip_extras(path)
|
||||
looks_like_dir = os.path.isdir(p) and (
|
||||
os.path.sep in name or
|
||||
(os.path.altsep is not None and os.path.altsep in name) or
|
||||
name.startswith('.')
|
||||
)
|
||||
if looks_like_dir:
|
||||
if not is_installable_dir(p):
|
||||
raise InstallationError(
|
||||
"Directory %r is not installable. Neither 'setup.py' "
|
||||
"nor 'pyproject.toml' found." % name
|
||||
)
|
||||
link = Link(path_to_url(p))
|
||||
elif is_archive_file(p):
|
||||
if not os.path.isfile(p):
|
||||
logger.warning(
|
||||
'Requirement %r looks like a filename, but the '
|
||||
'file does not exist',
|
||||
name
|
||||
)
|
||||
link = Link(path_to_url(p))
|
||||
|
||||
# it's a local file, dir, or url
|
||||
if link:
|
||||
# Handle relative file URLs
|
||||
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
||||
link = Link(
|
||||
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
||||
# wheel file
|
||||
if link.is_wheel:
|
||||
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
||||
req = "%s==%s" % (wheel.name, wheel.version)
|
||||
else:
|
||||
# set the req to the egg fragment. when it's not there, this
|
||||
# will become an 'unnamed' requirement
|
||||
req = link.egg_fragment
|
||||
|
||||
# a requirement specifier
|
||||
else:
|
||||
req = name
|
||||
|
||||
if extras:
|
||||
extras = Requirement("placeholder" + extras.lower()).extras
|
||||
else:
|
||||
extras = ()
|
||||
if req is not None:
|
||||
try:
|
||||
req = Requirement(req)
|
||||
except InvalidRequirement:
|
||||
if os.path.sep in req:
|
||||
add_msg = "It looks like a path."
|
||||
add_msg += deduce_helpful_msg(req)
|
||||
elif '=' in req and not any(op in req for op in operators):
|
||||
add_msg = "= is not a valid operator. Did you mean == ?"
|
||||
else:
|
||||
add_msg = traceback.format_exc()
|
||||
raise InstallationError(
|
||||
"Invalid requirement: '%s'\n%s" % (req, add_msg)
|
||||
)
|
||||
|
||||
return InstallRequirement(
|
||||
req, comes_from, link=link, markers=markers,
|
||||
isolated=isolated,
|
||||
options=options if options else {},
|
||||
wheel_cache=wheel_cache,
|
||||
constraint=constraint,
|
||||
extras=extras,
|
||||
)
|
||||
|
||||
|
||||
def install_req_from_req(
|
||||
req, comes_from=None, isolated=False, wheel_cache=None
|
||||
):
|
||||
try:
|
||||
req = Requirement(req)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError("Invalid requirement: '%s'" % req)
|
||||
|
||||
domains_not_allowed = [
|
||||
PyPI.file_storage_domain,
|
||||
TestPyPI.file_storage_domain,
|
||||
]
|
||||
if req.url and comes_from.link.netloc in domains_not_allowed:
|
||||
# Explicitly disallow pypi packages that depend on external urls
|
||||
raise InstallationError(
|
||||
"Packages installed from PyPI cannot depend on packages "
|
||||
"which are not also hosted on PyPI.\n"
|
||||
"%s depends on %s " % (comes_from.name, req)
|
||||
)
|
||||
|
||||
return InstallRequirement(
|
||||
req, comes_from, isolated=isolated, wheel_cache=wheel_cache
|
||||
)
|
||||
@@ -0,0 +1,340 @@
|
||||
"""
|
||||
Requirements file parsing
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from pip._vendor.six.moves import filterfalse
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.download import get_file_content
|
||||
from pip._internal.exceptions import RequirementsFileParseError
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable, install_req_from_line,
|
||||
)
|
||||
|
||||
__all__ = ['parse_requirements']
|
||||
|
||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
||||
COMMENT_RE = re.compile(r'(^|\s)+#.*$')
|
||||
|
||||
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
||||
# variable name consisting of only uppercase letters, digits or the '_'
|
||||
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
||||
# 2013 Edition.
|
||||
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
||||
|
||||
SUPPORTED_OPTIONS = [
|
||||
cmdoptions.constraints,
|
||||
cmdoptions.editable,
|
||||
cmdoptions.requirements,
|
||||
cmdoptions.no_index,
|
||||
cmdoptions.index_url,
|
||||
cmdoptions.find_links,
|
||||
cmdoptions.extra_index_url,
|
||||
cmdoptions.always_unzip,
|
||||
cmdoptions.no_binary,
|
||||
cmdoptions.only_binary,
|
||||
cmdoptions.pre,
|
||||
cmdoptions.process_dependency_links,
|
||||
cmdoptions.trusted_host,
|
||||
cmdoptions.require_hashes,
|
||||
]
|
||||
|
||||
# options to be passed to requirements
|
||||
SUPPORTED_OPTIONS_REQ = [
|
||||
cmdoptions.install_options,
|
||||
cmdoptions.global_options,
|
||||
cmdoptions.hash,
|
||||
]
|
||||
|
||||
# the 'dest' string values
|
||||
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
|
||||
|
||||
|
||||
def parse_requirements(filename, finder=None, comes_from=None, options=None,
|
||||
session=None, constraint=False, wheel_cache=None):
|
||||
"""Parse a requirements file and yield InstallRequirement instances.
|
||||
|
||||
:param filename: Path or url of requirements file.
|
||||
:param finder: Instance of pip.index.PackageFinder.
|
||||
:param comes_from: Origin description of requirements.
|
||||
:param options: cli options.
|
||||
:param session: Instance of pip.download.PipSession.
|
||||
:param constraint: If true, parsing a constraint file rather than
|
||||
requirements file.
|
||||
:param wheel_cache: Instance of pip.wheel.WheelCache
|
||||
"""
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"parse_requirements() missing 1 required keyword argument: "
|
||||
"'session'"
|
||||
)
|
||||
|
||||
_, content = get_file_content(
|
||||
filename, comes_from=comes_from, session=session
|
||||
)
|
||||
|
||||
lines_enum = preprocess(content, options)
|
||||
|
||||
for line_number, line in lines_enum:
|
||||
req_iter = process_line(line, filename, line_number, finder,
|
||||
comes_from, options, session, wheel_cache,
|
||||
constraint=constraint)
|
||||
for req in req_iter:
|
||||
yield req
|
||||
|
||||
|
||||
def preprocess(content, options):
|
||||
"""Split, filter, and join lines, and return a line iterator
|
||||
|
||||
:param content: the content of the requirements file
|
||||
:param options: cli options
|
||||
"""
|
||||
lines_enum = enumerate(content.splitlines(), start=1)
|
||||
lines_enum = join_lines(lines_enum)
|
||||
lines_enum = ignore_comments(lines_enum)
|
||||
lines_enum = skip_regex(lines_enum, options)
|
||||
lines_enum = expand_env_variables(lines_enum)
|
||||
return lines_enum
|
||||
|
||||
|
||||
def process_line(line, filename, line_number, finder=None, comes_from=None,
|
||||
options=None, session=None, wheel_cache=None,
|
||||
constraint=False):
|
||||
"""Process a single requirements line; This can result in creating/yielding
|
||||
requirements, or updating the finder.
|
||||
|
||||
For lines that contain requirements, the only options that have an effect
|
||||
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
||||
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
||||
ignored.
|
||||
|
||||
For lines that do not contain requirements, the only options that have an
|
||||
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
||||
be present, but are ignored. These lines may contain multiple options
|
||||
(although our docs imply only one is supported), and all our parsed and
|
||||
affect the finder.
|
||||
|
||||
:param constraint: If True, parsing a constraints file.
|
||||
:param options: OptionParser options that we may update
|
||||
"""
|
||||
parser = build_parser(line)
|
||||
defaults = parser.get_default_values()
|
||||
defaults.index_url = None
|
||||
if finder:
|
||||
# `finder.format_control` will be updated during parsing
|
||||
defaults.format_control = finder.format_control
|
||||
args_str, options_str = break_args_options(line)
|
||||
if sys.version_info < (2, 7, 3):
|
||||
# Prior to 2.7.3, shlex cannot deal with unicode entries
|
||||
options_str = options_str.encode('utf8')
|
||||
opts, _ = parser.parse_args(shlex.split(options_str), defaults)
|
||||
|
||||
# preserve for the nested code path
|
||||
line_comes_from = '%s %s (line %s)' % (
|
||||
'-c' if constraint else '-r', filename, line_number,
|
||||
)
|
||||
|
||||
# yield a line requirement
|
||||
if args_str:
|
||||
isolated = options.isolated_mode if options else False
|
||||
if options:
|
||||
cmdoptions.check_install_build_global(options, opts)
|
||||
# get the options that apply to requirements
|
||||
req_options = {}
|
||||
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||
if dest in opts.__dict__ and opts.__dict__[dest]:
|
||||
req_options[dest] = opts.__dict__[dest]
|
||||
yield install_req_from_line(
|
||||
args_str, line_comes_from, constraint=constraint,
|
||||
isolated=isolated, options=req_options, wheel_cache=wheel_cache
|
||||
)
|
||||
|
||||
# yield an editable requirement
|
||||
elif opts.editables:
|
||||
isolated = options.isolated_mode if options else False
|
||||
yield install_req_from_editable(
|
||||
opts.editables[0], comes_from=line_comes_from,
|
||||
constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
|
||||
)
|
||||
|
||||
# parse a nested requirements file
|
||||
elif opts.requirements or opts.constraints:
|
||||
if opts.requirements:
|
||||
req_path = opts.requirements[0]
|
||||
nested_constraint = False
|
||||
else:
|
||||
req_path = opts.constraints[0]
|
||||
nested_constraint = True
|
||||
# original file is over http
|
||||
if SCHEME_RE.search(filename):
|
||||
# do a url join so relative paths work
|
||||
req_path = urllib_parse.urljoin(filename, req_path)
|
||||
# original file and nested file are paths
|
||||
elif not SCHEME_RE.search(req_path):
|
||||
# do a join so relative paths work
|
||||
req_path = os.path.join(os.path.dirname(filename), req_path)
|
||||
# TODO: Why not use `comes_from='-r {} (line {})'` here as well?
|
||||
parser = parse_requirements(
|
||||
req_path, finder, comes_from, options, session,
|
||||
constraint=nested_constraint, wheel_cache=wheel_cache
|
||||
)
|
||||
for req in parser:
|
||||
yield req
|
||||
|
||||
# percolate hash-checking option upward
|
||||
elif opts.require_hashes:
|
||||
options.require_hashes = opts.require_hashes
|
||||
|
||||
# set finder options
|
||||
elif finder:
|
||||
if opts.index_url:
|
||||
finder.index_urls = [opts.index_url]
|
||||
if opts.no_index is True:
|
||||
finder.index_urls = []
|
||||
if opts.extra_index_urls:
|
||||
finder.index_urls.extend(opts.extra_index_urls)
|
||||
if opts.find_links:
|
||||
# FIXME: it would be nice to keep track of the source
|
||||
# of the find_links: support a find-links local path
|
||||
# relative to a requirements file.
|
||||
value = opts.find_links[0]
|
||||
req_dir = os.path.dirname(os.path.abspath(filename))
|
||||
relative_to_reqs_file = os.path.join(req_dir, value)
|
||||
if os.path.exists(relative_to_reqs_file):
|
||||
value = relative_to_reqs_file
|
||||
finder.find_links.append(value)
|
||||
if opts.pre:
|
||||
finder.allow_all_prereleases = True
|
||||
if opts.process_dependency_links:
|
||||
finder.process_dependency_links = True
|
||||
if opts.trusted_hosts:
|
||||
finder.secure_origins.extend(
|
||||
("*", host, "*") for host in opts.trusted_hosts)
|
||||
|
||||
|
||||
def break_args_options(line):
|
||||
"""Break up the line into an args and options string. We only want to shlex
|
||||
(and then optparse) the options, not the args. args can contain markers
|
||||
which are corrupted by shlex.
|
||||
"""
|
||||
tokens = line.split(' ')
|
||||
args = []
|
||||
options = tokens[:]
|
||||
for token in tokens:
|
||||
if token.startswith('-') or token.startswith('--'):
|
||||
break
|
||||
else:
|
||||
args.append(token)
|
||||
options.pop(0)
|
||||
return ' '.join(args), ' '.join(options)
|
||||
|
||||
|
||||
def build_parser(line):
|
||||
"""
|
||||
Return a parser for parsing requirement lines
|
||||
"""
|
||||
parser = optparse.OptionParser(add_help_option=False)
|
||||
|
||||
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
||||
for option_factory in option_factories:
|
||||
option = option_factory()
|
||||
parser.add_option(option)
|
||||
|
||||
# By default optparse sys.exits on parsing errors. We want to wrap
|
||||
# that in our own exception.
|
||||
def parser_exit(self, msg):
|
||||
# add offending line
|
||||
msg = 'Invalid requirement: %s\n%s' % (line, msg)
|
||||
raise RequirementsFileParseError(msg)
|
||||
parser.exit = parser_exit
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def join_lines(lines_enum):
|
||||
"""Joins a line ending in '\' with the previous line (except when following
|
||||
comments). The joined line takes on the index of the first line.
|
||||
"""
|
||||
primary_line_number = None
|
||||
new_line = []
|
||||
for line_number, line in lines_enum:
|
||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
||||
if COMMENT_RE.match(line):
|
||||
# this ensures comments are always matched later
|
||||
line = ' ' + line
|
||||
if new_line:
|
||||
new_line.append(line)
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
new_line = []
|
||||
else:
|
||||
yield line_number, line
|
||||
else:
|
||||
if not new_line:
|
||||
primary_line_number = line_number
|
||||
new_line.append(line.strip('\\'))
|
||||
|
||||
# last line contains \
|
||||
if new_line:
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
|
||||
# TODO: handle space after '\'.
|
||||
|
||||
|
||||
def ignore_comments(lines_enum):
|
||||
"""
|
||||
Strips comments and filter empty lines.
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
line = COMMENT_RE.sub('', line)
|
||||
line = line.strip()
|
||||
if line:
|
||||
yield line_number, line
|
||||
|
||||
|
||||
def skip_regex(lines_enum, options):
|
||||
"""
|
||||
Skip lines that match '--skip-requirements-regex' pattern
|
||||
|
||||
Note: the regex pattern is only built once
|
||||
"""
|
||||
skip_regex = options.skip_requirements_regex if options else None
|
||||
if skip_regex:
|
||||
pattern = re.compile(skip_regex)
|
||||
lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
|
||||
return lines_enum
|
||||
|
||||
|
||||
def expand_env_variables(lines_enum):
|
||||
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
||||
|
||||
The only allowed format for environment variables defined in the
|
||||
requirement file is `${MY_VARIABLE_1}` to ensure two things:
|
||||
|
||||
1. Strings that contain a `$` aren't accidentally (partially) expanded.
|
||||
2. Ensure consistency across platforms for requirement files.
|
||||
|
||||
These points are the result of a discusssion on the `github pull
|
||||
request #3514 <https://github.com/pypa/pip/pull/3514>`_.
|
||||
|
||||
Valid characters in variable names follow the `POSIX standard
|
||||
<http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
|
||||
to uppercase letter, digits and the `_` (underscore).
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
for env_var, var_name in ENV_VAR_RE.findall(line):
|
||||
value = os.getenv(var_name)
|
||||
if not value:
|
||||
continue
|
||||
|
||||
line = line.replace(env_var, value)
|
||||
|
||||
yield line_number, line
|
||||
@@ -0,0 +1,860 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import sysconfig
|
||||
import zipfile
|
||||
from distutils.util import change_root
|
||||
|
||||
from pip._vendor import pkg_resources, six
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import Version
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
|
||||
from pip._internal import wheel
|
||||
from pip._internal.build_env import NoOpBuildEnvironment
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.locations import (
|
||||
PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
|
||||
)
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.pyproject import load_pyproject_toml
|
||||
from pip._internal.req.req_uninstall import UninstallPathSet
|
||||
from pip._internal.utils.compat import native_str
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
_make_build_dir, ask_path_exists, backup_dir, call_subprocess,
|
||||
display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
|
||||
get_installed_version, rmtree,
|
||||
)
|
||||
from pip._internal.utils.packaging import get_metadata
|
||||
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.ui import open_spinner
|
||||
from pip._internal.vcs import vcs
|
||||
from pip._internal.wheel import move_wheel_files
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InstallRequirement(object):
|
||||
"""
|
||||
Represents something that may be installed later on, may have information
|
||||
about where to fetch the relavant requirement and also contains logic for
|
||||
installing the said requirement.
|
||||
"""
|
||||
|
||||
def __init__(self, req, comes_from, source_dir=None, editable=False,
|
||||
link=None, update=True, markers=None,
|
||||
isolated=False, options=None, wheel_cache=None,
|
||||
constraint=False, extras=()):
|
||||
assert req is None or isinstance(req, Requirement), req
|
||||
self.req = req
|
||||
self.comes_from = comes_from
|
||||
self.constraint = constraint
|
||||
if source_dir is not None:
|
||||
self.source_dir = os.path.normpath(os.path.abspath(source_dir))
|
||||
else:
|
||||
self.source_dir = None
|
||||
self.editable = editable
|
||||
|
||||
self._wheel_cache = wheel_cache
|
||||
if link is not None:
|
||||
self.link = self.original_link = link
|
||||
else:
|
||||
self.link = self.original_link = req and req.url and Link(req.url)
|
||||
|
||||
if extras:
|
||||
self.extras = extras
|
||||
elif req:
|
||||
self.extras = {
|
||||
pkg_resources.safe_extra(extra) for extra in req.extras
|
||||
}
|
||||
else:
|
||||
self.extras = set()
|
||||
if markers is not None:
|
||||
self.markers = markers
|
||||
else:
|
||||
self.markers = req and req.marker
|
||||
self._egg_info_path = None
|
||||
# This holds the pkg_resources.Distribution object if this requirement
|
||||
# is already available:
|
||||
self.satisfied_by = None
|
||||
# This hold the pkg_resources.Distribution object if this requirement
|
||||
# conflicts with another installed distribution:
|
||||
self.conflicts_with = None
|
||||
# Temporary build location
|
||||
self._temp_build_dir = TempDirectory(kind="req-build")
|
||||
# Used to store the global directory where the _temp_build_dir should
|
||||
# have been created. Cf _correct_build_location method.
|
||||
self._ideal_build_dir = None
|
||||
# True if the editable should be updated:
|
||||
self.update = update
|
||||
# Set to True after successful installation
|
||||
self.install_succeeded = None
|
||||
# UninstallPathSet of uninstalled distribution (for possible rollback)
|
||||
self.uninstalled_pathset = None
|
||||
self.options = options if options else {}
|
||||
# Set to True after successful preparation of this requirement
|
||||
self.prepared = False
|
||||
self.is_direct = False
|
||||
|
||||
self.isolated = isolated
|
||||
self.build_env = NoOpBuildEnvironment()
|
||||
|
||||
# The static build requirements (from pyproject.toml)
|
||||
self.pyproject_requires = None
|
||||
|
||||
# Build requirements that we will check are available
|
||||
# TODO: We don't do this for --no-build-isolation. Should we?
|
||||
self.requirements_to_check = []
|
||||
|
||||
# The PEP 517 backend we should use to build the project
|
||||
self.pep517_backend = None
|
||||
|
||||
# Are we using PEP 517 for this requirement?
|
||||
# After pyproject.toml has been loaded, the only valid values are True
|
||||
# and False. Before loading, None is valid (meaning "use the default").
|
||||
# Setting an explicit value before loading pyproject.toml is supported,
|
||||
# but after loading this flag should be treated as read only.
|
||||
self.use_pep517 = None
|
||||
|
||||
def __str__(self):
|
||||
if self.req:
|
||||
s = str(self.req)
|
||||
if self.link:
|
||||
s += ' from %s' % self.link.url
|
||||
elif self.link:
|
||||
s = self.link.url
|
||||
else:
|
||||
s = '<InstallRequirement>'
|
||||
if self.satisfied_by is not None:
|
||||
s += ' in %s' % display_path(self.satisfied_by.location)
|
||||
if self.comes_from:
|
||||
if isinstance(self.comes_from, six.string_types):
|
||||
comes_from = self.comes_from
|
||||
else:
|
||||
comes_from = self.comes_from.from_path()
|
||||
if comes_from:
|
||||
s += ' (from %s)' % comes_from
|
||||
return s
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s object: %s editable=%r>' % (
|
||||
self.__class__.__name__, str(self), self.editable)
|
||||
|
||||
def populate_link(self, finder, upgrade, require_hashes):
|
||||
"""Ensure that if a link can be found for this, that it is found.
|
||||
|
||||
Note that self.link may still be None - if Upgrade is False and the
|
||||
requirement is already installed.
|
||||
|
||||
If require_hashes is True, don't use the wheel cache, because cached
|
||||
wheels, always built locally, have different hashes than the files
|
||||
downloaded from the index server and thus throw false hash mismatches.
|
||||
Furthermore, cached wheels at present have undeterministic contents due
|
||||
to file modification times.
|
||||
"""
|
||||
if self.link is None:
|
||||
self.link = finder.find_requirement(self, upgrade)
|
||||
if self._wheel_cache is not None and not require_hashes:
|
||||
old_link = self.link
|
||||
self.link = self._wheel_cache.get(self.link, self.name)
|
||||
if old_link != self.link:
|
||||
logger.debug('Using cached wheel link: %s', self.link)
|
||||
|
||||
# Things that are valid for all kinds of requirements?
|
||||
@property
|
||||
def name(self):
|
||||
if self.req is None:
|
||||
return None
|
||||
return native_str(pkg_resources.safe_name(self.req.name))
|
||||
|
||||
@property
|
||||
def specifier(self):
|
||||
return self.req.specifier
|
||||
|
||||
@property
|
||||
def is_pinned(self):
|
||||
"""Return whether I am pinned to an exact version.
|
||||
|
||||
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
||||
"""
|
||||
specifiers = self.specifier
|
||||
return (len(specifiers) == 1 and
|
||||
next(iter(specifiers)).operator in {'==', '==='})
|
||||
|
||||
@property
|
||||
def installed_version(self):
|
||||
return get_installed_version(self.name)
|
||||
|
||||
def match_markers(self, extras_requested=None):
|
||||
if not extras_requested:
|
||||
# Provide an extra to safely evaluate the markers
|
||||
# without matching any extra
|
||||
extras_requested = ('',)
|
||||
if self.markers is not None:
|
||||
return any(
|
||||
self.markers.evaluate({'extra': extra})
|
||||
for extra in extras_requested)
|
||||
else:
|
||||
return True
|
||||
|
||||
@property
|
||||
def has_hash_options(self):
|
||||
"""Return whether any known-good hashes are specified as options.
|
||||
|
||||
These activate --require-hashes mode; hashes specified as part of a
|
||||
URL do not.
|
||||
|
||||
"""
|
||||
return bool(self.options.get('hashes', {}))
|
||||
|
||||
def hashes(self, trust_internet=True):
|
||||
"""Return a hash-comparer that considers my option- and URL-based
|
||||
hashes to be known-good.
|
||||
|
||||
Hashes in URLs--ones embedded in the requirements file, not ones
|
||||
downloaded from an index server--are almost peers with ones from
|
||||
flags. They satisfy --require-hashes (whether it was implicitly or
|
||||
explicitly activated) but do not activate it. md5 and sha224 are not
|
||||
allowed in flags, which should nudge people toward good algos. We
|
||||
always OR all hashes together, even ones from URLs.
|
||||
|
||||
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
||||
downloaded from the internet, as by populate_link()
|
||||
|
||||
"""
|
||||
good_hashes = self.options.get('hashes', {}).copy()
|
||||
link = self.link if trust_internet else self.original_link
|
||||
if link and link.hash:
|
||||
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
||||
return Hashes(good_hashes)
|
||||
|
||||
def from_path(self):
|
||||
"""Format a nice indicator to show where this "comes from"
|
||||
"""
|
||||
if self.req is None:
|
||||
return None
|
||||
s = str(self.req)
|
||||
if self.comes_from:
|
||||
if isinstance(self.comes_from, six.string_types):
|
||||
comes_from = self.comes_from
|
||||
else:
|
||||
comes_from = self.comes_from.from_path()
|
||||
if comes_from:
|
||||
s += '->' + comes_from
|
||||
return s
|
||||
|
||||
def build_location(self, build_dir):
|
||||
assert build_dir is not None
|
||||
if self._temp_build_dir.path is not None:
|
||||
return self._temp_build_dir.path
|
||||
if self.req is None:
|
||||
# for requirement via a path to a directory: the name of the
|
||||
# package is not available yet so we create a temp directory
|
||||
# Once run_egg_info will have run, we'll be able
|
||||
# to fix it via _correct_build_location
|
||||
# Some systems have /tmp as a symlink which confuses custom
|
||||
# builds (such as numpy). Thus, we ensure that the real path
|
||||
# is returned.
|
||||
self._temp_build_dir.create()
|
||||
self._ideal_build_dir = build_dir
|
||||
|
||||
return self._temp_build_dir.path
|
||||
if self.editable:
|
||||
name = self.name.lower()
|
||||
else:
|
||||
name = self.name
|
||||
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
||||
# need this)
|
||||
if not os.path.exists(build_dir):
|
||||
logger.debug('Creating directory %s', build_dir)
|
||||
_make_build_dir(build_dir)
|
||||
return os.path.join(build_dir, name)
|
||||
|
||||
def _correct_build_location(self):
|
||||
"""Move self._temp_build_dir to self._ideal_build_dir/self.req.name
|
||||
|
||||
For some requirements (e.g. a path to a directory), the name of the
|
||||
package is not available until we run egg_info, so the build_location
|
||||
will return a temporary directory and store the _ideal_build_dir.
|
||||
|
||||
This is only called by self.run_egg_info to fix the temporary build
|
||||
directory.
|
||||
"""
|
||||
if self.source_dir is not None:
|
||||
return
|
||||
assert self.req is not None
|
||||
assert self._temp_build_dir.path
|
||||
assert self._ideal_build_dir.path
|
||||
old_location = self._temp_build_dir.path
|
||||
self._temp_build_dir.path = None
|
||||
|
||||
new_location = self.build_location(self._ideal_build_dir)
|
||||
if os.path.exists(new_location):
|
||||
raise InstallationError(
|
||||
'A package already exists in %s; please remove it to continue'
|
||||
% display_path(new_location))
|
||||
logger.debug(
|
||||
'Moving package %s from %s to new location %s',
|
||||
self, display_path(old_location), display_path(new_location),
|
||||
)
|
||||
shutil.move(old_location, new_location)
|
||||
self._temp_build_dir.path = new_location
|
||||
self._ideal_build_dir = None
|
||||
self.source_dir = os.path.normpath(os.path.abspath(new_location))
|
||||
self._egg_info_path = None
|
||||
|
||||
def remove_temporary_source(self):
|
||||
"""Remove the source files from this requirement, if they are marked
|
||||
for deletion"""
|
||||
if self.source_dir and os.path.exists(
|
||||
os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
|
||||
logger.debug('Removing source in %s', self.source_dir)
|
||||
rmtree(self.source_dir)
|
||||
self.source_dir = None
|
||||
self._temp_build_dir.cleanup()
|
||||
self.build_env.cleanup()
|
||||
|
||||
def check_if_exists(self, use_user_site):
|
||||
"""Find an installed distribution that satisfies or conflicts
|
||||
with this requirement, and set self.satisfied_by or
|
||||
self.conflicts_with appropriately.
|
||||
"""
|
||||
if self.req is None:
|
||||
return False
|
||||
try:
|
||||
# get_distribution() will resolve the entire list of requirements
|
||||
# anyway, and we've already determined that we need the requirement
|
||||
# in question, so strip the marker so that we don't try to
|
||||
# evaluate it.
|
||||
no_marker = Requirement(str(self.req))
|
||||
no_marker.marker = None
|
||||
self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
|
||||
if self.editable and self.satisfied_by:
|
||||
self.conflicts_with = self.satisfied_by
|
||||
# when installing editables, nothing pre-existing should ever
|
||||
# satisfy
|
||||
self.satisfied_by = None
|
||||
return True
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return False
|
||||
except pkg_resources.VersionConflict:
|
||||
existing_dist = pkg_resources.get_distribution(
|
||||
self.req.name
|
||||
)
|
||||
if use_user_site:
|
||||
if dist_in_usersite(existing_dist):
|
||||
self.conflicts_with = existing_dist
|
||||
elif (running_under_virtualenv() and
|
||||
dist_in_site_packages(existing_dist)):
|
||||
raise InstallationError(
|
||||
"Will not install to the user site because it will "
|
||||
"lack sys.path precedence to %s in %s" %
|
||||
(existing_dist.project_name, existing_dist.location)
|
||||
)
|
||||
else:
|
||||
self.conflicts_with = existing_dist
|
||||
return True
|
||||
|
||||
# Things valid for wheels
|
||||
@property
|
||||
def is_wheel(self):
|
||||
return self.link and self.link.is_wheel
|
||||
|
||||
def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None,
|
||||
warn_script_location=True, use_user_site=False,
|
||||
pycompile=True):
|
||||
move_wheel_files(
|
||||
self.name, self.req, wheeldir,
|
||||
user=use_user_site,
|
||||
home=home,
|
||||
root=root,
|
||||
prefix=prefix,
|
||||
pycompile=pycompile,
|
||||
isolated=self.isolated,
|
||||
warn_script_location=warn_script_location,
|
||||
)
|
||||
|
||||
# Things valid for sdists
|
||||
@property
|
||||
def setup_py_dir(self):
|
||||
return os.path.join(
|
||||
self.source_dir,
|
||||
self.link and self.link.subdirectory_fragment or '')
|
||||
|
||||
@property
|
||||
def setup_py(self):
|
||||
assert self.source_dir, "No source dir for %s" % self
|
||||
|
||||
setup_py = os.path.join(self.setup_py_dir, 'setup.py')
|
||||
|
||||
# Python2 __file__ should not be unicode
|
||||
if six.PY2 and isinstance(setup_py, six.text_type):
|
||||
setup_py = setup_py.encode(sys.getfilesystemencoding())
|
||||
|
||||
return setup_py
|
||||
|
||||
@property
|
||||
def pyproject_toml(self):
|
||||
assert self.source_dir, "No source dir for %s" % self
|
||||
|
||||
pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml')
|
||||
|
||||
# Python2 __file__ should not be unicode
|
||||
if six.PY2 and isinstance(pp_toml, six.text_type):
|
||||
pp_toml = pp_toml.encode(sys.getfilesystemencoding())
|
||||
|
||||
return pp_toml
|
||||
|
||||
def load_pyproject_toml(self):
|
||||
"""Load the pyproject.toml file.
|
||||
|
||||
After calling this routine, all of the attributes related to PEP 517
|
||||
processing for this requirement have been set. In particular, the
|
||||
use_pep517 attribute can be used to determine whether we should
|
||||
follow the PEP 517 or legacy (setup.py) code path.
|
||||
"""
|
||||
pep517_data = load_pyproject_toml(
|
||||
self.use_pep517,
|
||||
self.pyproject_toml,
|
||||
self.setup_py,
|
||||
str(self)
|
||||
)
|
||||
|
||||
if pep517_data is None:
|
||||
self.use_pep517 = False
|
||||
else:
|
||||
self.use_pep517 = True
|
||||
requires, backend, check = pep517_data
|
||||
self.requirements_to_check = check
|
||||
self.pyproject_requires = requires
|
||||
self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend)
|
||||
|
||||
def run_egg_info(self):
|
||||
assert self.source_dir
|
||||
if self.name:
|
||||
logger.debug(
|
||||
'Running setup.py (path:%s) egg_info for package %s',
|
||||
self.setup_py, self.name,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
'Running setup.py (path:%s) egg_info for package from %s',
|
||||
self.setup_py, self.link,
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
script = SETUPTOOLS_SHIM % self.setup_py
|
||||
base_cmd = [sys.executable, '-c', script]
|
||||
if self.isolated:
|
||||
base_cmd += ["--no-user-cfg"]
|
||||
egg_info_cmd = base_cmd + ['egg_info']
|
||||
# We can't put the .egg-info files at the root, because then the
|
||||
# source code will be mistaken for an installed egg, causing
|
||||
# problems
|
||||
if self.editable:
|
||||
egg_base_option = []
|
||||
else:
|
||||
egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
|
||||
ensure_dir(egg_info_dir)
|
||||
egg_base_option = ['--egg-base', 'pip-egg-info']
|
||||
with self.build_env:
|
||||
call_subprocess(
|
||||
egg_info_cmd + egg_base_option,
|
||||
cwd=self.setup_py_dir,
|
||||
show_stdout=False,
|
||||
command_desc='python setup.py egg_info')
|
||||
|
||||
if not self.req:
|
||||
if isinstance(parse_version(self.metadata["Version"]), Version):
|
||||
op = "=="
|
||||
else:
|
||||
op = "==="
|
||||
self.req = Requirement(
|
||||
"".join([
|
||||
self.metadata["Name"],
|
||||
op,
|
||||
self.metadata["Version"],
|
||||
])
|
||||
)
|
||||
self._correct_build_location()
|
||||
else:
|
||||
metadata_name = canonicalize_name(self.metadata["Name"])
|
||||
if canonicalize_name(self.req.name) != metadata_name:
|
||||
logger.warning(
|
||||
'Running setup.py (path:%s) egg_info for package %s '
|
||||
'produced metadata for project name %s. Fix your '
|
||||
'#egg=%s fragments.',
|
||||
self.setup_py, self.name, metadata_name, self.name
|
||||
)
|
||||
self.req = Requirement(metadata_name)
|
||||
|
||||
@property
|
||||
def egg_info_path(self):
|
||||
if self._egg_info_path is None:
|
||||
if self.editable:
|
||||
base = self.source_dir
|
||||
else:
|
||||
base = os.path.join(self.setup_py_dir, 'pip-egg-info')
|
||||
filenames = os.listdir(base)
|
||||
if self.editable:
|
||||
filenames = []
|
||||
for root, dirs, files in os.walk(base):
|
||||
for dir in vcs.dirnames:
|
||||
if dir in dirs:
|
||||
dirs.remove(dir)
|
||||
# Iterate over a copy of ``dirs``, since mutating
|
||||
# a list while iterating over it can cause trouble.
|
||||
# (See https://github.com/pypa/pip/pull/462.)
|
||||
for dir in list(dirs):
|
||||
# Don't search in anything that looks like a virtualenv
|
||||
# environment
|
||||
if (
|
||||
os.path.lexists(
|
||||
os.path.join(root, dir, 'bin', 'python')
|
||||
) or
|
||||
os.path.exists(
|
||||
os.path.join(
|
||||
root, dir, 'Scripts', 'Python.exe'
|
||||
)
|
||||
)):
|
||||
dirs.remove(dir)
|
||||
# Also don't search through tests
|
||||
elif dir == 'test' or dir == 'tests':
|
||||
dirs.remove(dir)
|
||||
filenames.extend([os.path.join(root, dir)
|
||||
for dir in dirs])
|
||||
filenames = [f for f in filenames if f.endswith('.egg-info')]
|
||||
|
||||
if not filenames:
|
||||
raise InstallationError(
|
||||
"Files/directories not found in %s" % base
|
||||
)
|
||||
# if we have more than one match, we pick the toplevel one. This
|
||||
# can easily be the case if there is a dist folder which contains
|
||||
# an extracted tarball for testing purposes.
|
||||
if len(filenames) > 1:
|
||||
filenames.sort(
|
||||
key=lambda x: x.count(os.path.sep) +
|
||||
(os.path.altsep and x.count(os.path.altsep) or 0)
|
||||
)
|
||||
self._egg_info_path = os.path.join(base, filenames[0])
|
||||
return self._egg_info_path
|
||||
|
||||
@property
|
||||
def metadata(self):
|
||||
if not hasattr(self, '_metadata'):
|
||||
self._metadata = get_metadata(self.get_dist())
|
||||
|
||||
return self._metadata
|
||||
|
||||
def get_dist(self):
|
||||
"""Return a pkg_resources.Distribution built from self.egg_info_path"""
|
||||
egg_info = self.egg_info_path.rstrip(os.path.sep)
|
||||
base_dir = os.path.dirname(egg_info)
|
||||
metadata = pkg_resources.PathMetadata(base_dir, egg_info)
|
||||
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
|
||||
return pkg_resources.Distribution(
|
||||
os.path.dirname(egg_info),
|
||||
project_name=dist_name,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
def assert_source_matches_version(self):
|
||||
assert self.source_dir
|
||||
version = self.metadata['version']
|
||||
if self.req.specifier and version not in self.req.specifier:
|
||||
logger.warning(
|
||||
'Requested %s, but installing version %s',
|
||||
self,
|
||||
version,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
'Source in %s has version %s, which satisfies requirement %s',
|
||||
display_path(self.source_dir),
|
||||
version,
|
||||
self,
|
||||
)
|
||||
|
||||
# For both source distributions and editables
|
||||
def ensure_has_source_dir(self, parent_dir):
|
||||
"""Ensure that a source_dir is set.
|
||||
|
||||
This will create a temporary build dir if the name of the requirement
|
||||
isn't known yet.
|
||||
|
||||
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
||||
Generally src_dir for editables and build_dir for sdists.
|
||||
:return: self.source_dir
|
||||
"""
|
||||
if self.source_dir is None:
|
||||
self.source_dir = self.build_location(parent_dir)
|
||||
return self.source_dir
|
||||
|
||||
# For editable installations
|
||||
def install_editable(self, install_options,
|
||||
global_options=(), prefix=None):
|
||||
logger.info('Running setup.py develop for %s', self.name)
|
||||
|
||||
if self.isolated:
|
||||
global_options = list(global_options) + ["--no-user-cfg"]
|
||||
|
||||
if prefix:
|
||||
prefix_param = ['--prefix={}'.format(prefix)]
|
||||
install_options = list(install_options) + prefix_param
|
||||
|
||||
with indent_log():
|
||||
# FIXME: should we do --install-headers here too?
|
||||
with self.build_env:
|
||||
call_subprocess(
|
||||
[
|
||||
sys.executable,
|
||||
'-c',
|
||||
SETUPTOOLS_SHIM % self.setup_py
|
||||
] +
|
||||
list(global_options) +
|
||||
['develop', '--no-deps'] +
|
||||
list(install_options),
|
||||
|
||||
cwd=self.setup_py_dir,
|
||||
show_stdout=False,
|
||||
)
|
||||
|
||||
self.install_succeeded = True
|
||||
|
||||
def update_editable(self, obtain=True):
|
||||
if not self.link:
|
||||
logger.debug(
|
||||
"Cannot update repository at %s; repository location is "
|
||||
"unknown",
|
||||
self.source_dir,
|
||||
)
|
||||
return
|
||||
assert self.editable
|
||||
assert self.source_dir
|
||||
if self.link.scheme == 'file':
|
||||
# Static paths don't get updated
|
||||
return
|
||||
assert '+' in self.link.url, "bad url: %r" % self.link.url
|
||||
if not self.update:
|
||||
return
|
||||
vc_type, url = self.link.url.split('+', 1)
|
||||
backend = vcs.get_backend(vc_type)
|
||||
if backend:
|
||||
vcs_backend = backend(self.link.url)
|
||||
if obtain:
|
||||
vcs_backend.obtain(self.source_dir)
|
||||
else:
|
||||
vcs_backend.export(self.source_dir)
|
||||
else:
|
||||
assert 0, (
|
||||
'Unexpected version control type (in %s): %s'
|
||||
% (self.link, vc_type))
|
||||
|
||||
# Top-level Actions
|
||||
def uninstall(self, auto_confirm=False, verbose=False,
|
||||
use_user_site=False):
|
||||
"""
|
||||
Uninstall the distribution currently satisfying this requirement.
|
||||
|
||||
Prompts before removing or modifying files unless
|
||||
``auto_confirm`` is True.
|
||||
|
||||
Refuses to delete or modify files outside of ``sys.prefix`` -
|
||||
thus uninstallation within a virtual environment can only
|
||||
modify that virtual environment, even if the virtualenv is
|
||||
linked to global site-packages.
|
||||
|
||||
"""
|
||||
if not self.check_if_exists(use_user_site):
|
||||
logger.warning("Skipping %s as it is not installed.", self.name)
|
||||
return
|
||||
dist = self.satisfied_by or self.conflicts_with
|
||||
|
||||
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
||||
uninstalled_pathset.remove(auto_confirm, verbose)
|
||||
return uninstalled_pathset
|
||||
|
||||
def _clean_zip_name(self, name, prefix): # only used by archive.
|
||||
assert name.startswith(prefix + os.path.sep), (
|
||||
"name %r doesn't start with prefix %r" % (name, prefix)
|
||||
)
|
||||
name = name[len(prefix) + 1:]
|
||||
name = name.replace(os.path.sep, '/')
|
||||
return name
|
||||
|
||||
# TODO: Investigate if this should be kept in InstallRequirement
|
||||
# Seems to be used only when VCS + downloads
|
||||
def archive(self, build_dir):
|
||||
assert self.source_dir
|
||||
create_archive = True
|
||||
archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
|
||||
archive_path = os.path.join(build_dir, archive_name)
|
||||
if os.path.exists(archive_path):
|
||||
response = ask_path_exists(
|
||||
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
|
||||
display_path(archive_path), ('i', 'w', 'b', 'a'))
|
||||
if response == 'i':
|
||||
create_archive = False
|
||||
elif response == 'w':
|
||||
logger.warning('Deleting %s', display_path(archive_path))
|
||||
os.remove(archive_path)
|
||||
elif response == 'b':
|
||||
dest_file = backup_dir(archive_path)
|
||||
logger.warning(
|
||||
'Backing up %s to %s',
|
||||
display_path(archive_path),
|
||||
display_path(dest_file),
|
||||
)
|
||||
shutil.move(archive_path, dest_file)
|
||||
elif response == 'a':
|
||||
sys.exit(-1)
|
||||
if create_archive:
|
||||
zip = zipfile.ZipFile(
|
||||
archive_path, 'w', zipfile.ZIP_DEFLATED,
|
||||
allowZip64=True
|
||||
)
|
||||
dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
|
||||
for dirpath, dirnames, filenames in os.walk(dir):
|
||||
if 'pip-egg-info' in dirnames:
|
||||
dirnames.remove('pip-egg-info')
|
||||
for dirname in dirnames:
|
||||
dirname = os.path.join(dirpath, dirname)
|
||||
name = self._clean_zip_name(dirname, dir)
|
||||
zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
|
||||
zipdir.external_attr = 0x1ED << 16 # 0o755
|
||||
zip.writestr(zipdir, '')
|
||||
for filename in filenames:
|
||||
if filename == PIP_DELETE_MARKER_FILENAME:
|
||||
continue
|
||||
filename = os.path.join(dirpath, filename)
|
||||
name = self._clean_zip_name(filename, dir)
|
||||
zip.write(filename, self.name + '/' + name)
|
||||
zip.close()
|
||||
logger.info('Saved %s', display_path(archive_path))
|
||||
|
||||
def install(self, install_options, global_options=None, root=None,
|
||||
home=None, prefix=None, warn_script_location=True,
|
||||
use_user_site=False, pycompile=True):
|
||||
global_options = global_options if global_options is not None else []
|
||||
if self.editable:
|
||||
self.install_editable(
|
||||
install_options, global_options, prefix=prefix,
|
||||
)
|
||||
return
|
||||
if self.is_wheel:
|
||||
version = wheel.wheel_version(self.source_dir)
|
||||
wheel.check_compatibility(version, self.name)
|
||||
|
||||
self.move_wheel_files(
|
||||
self.source_dir, root=root, prefix=prefix, home=home,
|
||||
warn_script_location=warn_script_location,
|
||||
use_user_site=use_user_site, pycompile=pycompile,
|
||||
)
|
||||
self.install_succeeded = True
|
||||
return
|
||||
|
||||
# Extend the list of global and install options passed on to
|
||||
# the setup.py call with the ones from the requirements file.
|
||||
# Options specified in requirements file override those
|
||||
# specified on the command line, since the last option given
|
||||
# to setup.py is the one that is used.
|
||||
global_options = list(global_options) + \
|
||||
self.options.get('global_options', [])
|
||||
install_options = list(install_options) + \
|
||||
self.options.get('install_options', [])
|
||||
|
||||
if self.isolated:
|
||||
global_options = global_options + ["--no-user-cfg"]
|
||||
|
||||
with TempDirectory(kind="record") as temp_dir:
|
||||
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
|
||||
install_args = self.get_install_args(
|
||||
global_options, record_filename, root, prefix, pycompile,
|
||||
)
|
||||
msg = 'Running setup.py install for %s' % (self.name,)
|
||||
with open_spinner(msg) as spinner:
|
||||
with indent_log():
|
||||
with self.build_env:
|
||||
call_subprocess(
|
||||
install_args + install_options,
|
||||
cwd=self.setup_py_dir,
|
||||
show_stdout=False,
|
||||
spinner=spinner,
|
||||
)
|
||||
|
||||
if not os.path.exists(record_filename):
|
||||
logger.debug('Record file %s not found', record_filename)
|
||||
return
|
||||
self.install_succeeded = True
|
||||
|
||||
def prepend_root(path):
|
||||
if root is None or not os.path.isabs(path):
|
||||
return path
|
||||
else:
|
||||
return change_root(root, path)
|
||||
|
||||
with open(record_filename) as f:
|
||||
for line in f:
|
||||
directory = os.path.dirname(line)
|
||||
if directory.endswith('.egg-info'):
|
||||
egg_info_dir = prepend_root(directory)
|
||||
break
|
||||
else:
|
||||
logger.warning(
|
||||
'Could not find .egg-info directory in install record'
|
||||
' for %s',
|
||||
self,
|
||||
)
|
||||
# FIXME: put the record somewhere
|
||||
# FIXME: should this be an error?
|
||||
return
|
||||
new_lines = []
|
||||
with open(record_filename) as f:
|
||||
for line in f:
|
||||
filename = line.strip()
|
||||
if os.path.isdir(filename):
|
||||
filename += os.path.sep
|
||||
new_lines.append(
|
||||
os.path.relpath(prepend_root(filename), egg_info_dir)
|
||||
)
|
||||
new_lines.sort()
|
||||
ensure_dir(egg_info_dir)
|
||||
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
||||
with open(inst_files_path, 'w') as f:
|
||||
f.write('\n'.join(new_lines) + '\n')
|
||||
|
||||
def get_install_args(self, global_options, record_filename, root, prefix,
|
||||
pycompile):
|
||||
install_args = [sys.executable, "-u"]
|
||||
install_args.append('-c')
|
||||
install_args.append(SETUPTOOLS_SHIM % self.setup_py)
|
||||
install_args += list(global_options) + \
|
||||
['install', '--record', record_filename]
|
||||
install_args += ['--single-version-externally-managed']
|
||||
|
||||
if root is not None:
|
||||
install_args += ['--root', root]
|
||||
if prefix is not None:
|
||||
install_args += ['--prefix', prefix]
|
||||
|
||||
if pycompile:
|
||||
install_args += ["--compile"]
|
||||
else:
|
||||
install_args += ["--no-compile"]
|
||||
|
||||
if running_under_virtualenv():
|
||||
py_ver_str = 'python' + sysconfig.get_python_version()
|
||||
install_args += ['--install-headers',
|
||||
os.path.join(sys.prefix, 'include', 'site',
|
||||
py_ver_str, self.name)]
|
||||
|
||||
return install_args
|
||||
@@ -0,0 +1,181 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.wheel import Wheel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequirementSet(object):
|
||||
|
||||
def __init__(self, require_hashes=False, check_supported_wheels=True):
|
||||
"""Create a RequirementSet.
|
||||
"""
|
||||
|
||||
self.requirements = OrderedDict()
|
||||
self.require_hashes = require_hashes
|
||||
self.check_supported_wheels = check_supported_wheels
|
||||
|
||||
# Mapping of alias: real_name
|
||||
self.requirement_aliases = {}
|
||||
self.unnamed_requirements = []
|
||||
self.successfully_downloaded = []
|
||||
self.reqs_to_cleanup = []
|
||||
|
||||
def __str__(self):
|
||||
reqs = [req for req in self.requirements.values()
|
||||
if not req.comes_from]
|
||||
reqs.sort(key=lambda req: req.name.lower())
|
||||
return ' '.join([str(req.req) for req in reqs])
|
||||
|
||||
def __repr__(self):
|
||||
reqs = [req for req in self.requirements.values()]
|
||||
reqs.sort(key=lambda req: req.name.lower())
|
||||
reqs_str = ', '.join([str(req.req) for req in reqs])
|
||||
return ('<%s object; %d requirement(s): %s>'
|
||||
% (self.__class__.__name__, len(reqs), reqs_str))
|
||||
|
||||
def add_requirement(self, install_req, parent_req_name=None,
|
||||
extras_requested=None):
|
||||
"""Add install_req as a requirement to install.
|
||||
|
||||
:param parent_req_name: The name of the requirement that needed this
|
||||
added. The name is used because when multiple unnamed requirements
|
||||
resolve to the same name, we could otherwise end up with dependency
|
||||
links that point outside the Requirements set. parent_req must
|
||||
already be added. Note that None implies that this is a user
|
||||
supplied requirement, vs an inferred one.
|
||||
:param extras_requested: an iterable of extras used to evaluate the
|
||||
environment markers.
|
||||
:return: Additional requirements to scan. That is either [] if
|
||||
the requirement is not applicable, or [install_req] if the
|
||||
requirement is applicable and has just been added.
|
||||
"""
|
||||
name = install_req.name
|
||||
|
||||
# If the markers do not match, ignore this requirement.
|
||||
if not install_req.match_markers(extras_requested):
|
||||
logger.info(
|
||||
"Ignoring %s: markers '%s' don't match your environment",
|
||||
name, install_req.markers,
|
||||
)
|
||||
return [], None
|
||||
|
||||
# If the wheel is not supported, raise an error.
|
||||
# Should check this after filtering out based on environment markers to
|
||||
# allow specifying different wheels based on the environment/OS, in a
|
||||
# single requirements file.
|
||||
if install_req.link and install_req.link.is_wheel:
|
||||
wheel = Wheel(install_req.link.filename)
|
||||
if self.check_supported_wheels and not wheel.supported():
|
||||
raise InstallationError(
|
||||
"%s is not a supported wheel on this platform." %
|
||||
wheel.filename
|
||||
)
|
||||
|
||||
# This next bit is really a sanity check.
|
||||
assert install_req.is_direct == (parent_req_name is None), (
|
||||
"a direct req shouldn't have a parent and also, "
|
||||
"a non direct req should have a parent"
|
||||
)
|
||||
|
||||
# Unnamed requirements are scanned again and the requirement won't be
|
||||
# added as a dependency until after scanning.
|
||||
if not name:
|
||||
# url or path requirement w/o an egg fragment
|
||||
self.unnamed_requirements.append(install_req)
|
||||
return [install_req], None
|
||||
|
||||
try:
|
||||
existing_req = self.get_requirement(name)
|
||||
except KeyError:
|
||||
existing_req = None
|
||||
|
||||
has_conflicting_requirement = (
|
||||
parent_req_name is None and
|
||||
existing_req and
|
||||
not existing_req.constraint and
|
||||
existing_req.extras == install_req.extras and
|
||||
existing_req.req.specifier != install_req.req.specifier
|
||||
)
|
||||
if has_conflicting_requirement:
|
||||
raise InstallationError(
|
||||
"Double requirement given: %s (already in %s, name=%r)"
|
||||
% (install_req, existing_req, name)
|
||||
)
|
||||
|
||||
# When no existing requirement exists, add the requirement as a
|
||||
# dependency and it will be scanned again after.
|
||||
if not existing_req:
|
||||
self.requirements[name] = install_req
|
||||
# FIXME: what about other normalizations? E.g., _ vs. -?
|
||||
if name.lower() != name:
|
||||
self.requirement_aliases[name.lower()] = name
|
||||
# We'd want to rescan this requirements later
|
||||
return [install_req], install_req
|
||||
|
||||
# Assume there's no need to scan, and that we've already
|
||||
# encountered this for scanning.
|
||||
if install_req.constraint or not existing_req.constraint:
|
||||
return [], existing_req
|
||||
|
||||
does_not_satisfy_constraint = (
|
||||
install_req.link and
|
||||
not (
|
||||
existing_req.link and
|
||||
install_req.link.path == existing_req.link.path
|
||||
)
|
||||
)
|
||||
if does_not_satisfy_constraint:
|
||||
self.reqs_to_cleanup.append(install_req)
|
||||
raise InstallationError(
|
||||
"Could not satisfy constraints for '%s': "
|
||||
"installation from path or url cannot be "
|
||||
"constrained to a version" % name,
|
||||
)
|
||||
# If we're now installing a constraint, mark the existing
|
||||
# object for real installation.
|
||||
existing_req.constraint = False
|
||||
existing_req.extras = tuple(sorted(
|
||||
set(existing_req.extras) | set(install_req.extras)
|
||||
))
|
||||
logger.debug(
|
||||
"Setting %s extras to: %s",
|
||||
existing_req, existing_req.extras,
|
||||
)
|
||||
# Return the existing requirement for addition to the parent and
|
||||
# scanning again.
|
||||
return [existing_req], existing_req
|
||||
|
||||
def has_requirement(self, project_name):
|
||||
name = project_name.lower()
|
||||
if (name in self.requirements and
|
||||
not self.requirements[name].constraint or
|
||||
name in self.requirement_aliases and
|
||||
not self.requirements[self.requirement_aliases[name]].constraint):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def has_requirements(self):
|
||||
return list(req for req in self.requirements.values() if not
|
||||
req.constraint) or self.unnamed_requirements
|
||||
|
||||
def get_requirement(self, project_name):
|
||||
for name in project_name, project_name.lower():
|
||||
if name in self.requirements:
|
||||
return self.requirements[name]
|
||||
if name in self.requirement_aliases:
|
||||
return self.requirements[self.requirement_aliases[name]]
|
||||
raise KeyError("No project with the name %r" % project_name)
|
||||
|
||||
def cleanup_files(self):
|
||||
"""Clean up files, remove builds."""
|
||||
logger.debug('Cleaning up...')
|
||||
with indent_log():
|
||||
for req in self.reqs_to_cleanup:
|
||||
req.remove_temporary_source()
|
||||
@@ -0,0 +1,76 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequirementTracker(object):
|
||||
|
||||
def __init__(self):
|
||||
self._root = os.environ.get('PIP_REQ_TRACKER')
|
||||
if self._root is None:
|
||||
self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
|
||||
self._temp_dir.create()
|
||||
self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
|
||||
logger.debug('Created requirements tracker %r', self._root)
|
||||
else:
|
||||
self._temp_dir = None
|
||||
logger.debug('Re-using requirements tracker %r', self._root)
|
||||
self._entries = set()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.cleanup()
|
||||
|
||||
def _entry_path(self, link):
|
||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||
return os.path.join(self._root, hashed)
|
||||
|
||||
def add(self, req):
|
||||
link = req.link
|
||||
info = str(req)
|
||||
entry_path = self._entry_path(link)
|
||||
try:
|
||||
with open(entry_path) as fp:
|
||||
# Error, these's already a build in progress.
|
||||
raise LookupError('%s is already being built: %s'
|
||||
% (link, fp.read()))
|
||||
except IOError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
assert req not in self._entries
|
||||
with open(entry_path, 'w') as fp:
|
||||
fp.write(info)
|
||||
self._entries.add(req)
|
||||
logger.debug('Added %s to build tracker %r', req, self._root)
|
||||
|
||||
def remove(self, req):
|
||||
link = req.link
|
||||
self._entries.remove(req)
|
||||
os.unlink(self._entry_path(link))
|
||||
logger.debug('Removed %s from build tracker %r', req, self._root)
|
||||
|
||||
def cleanup(self):
|
||||
for req in set(self._entries):
|
||||
self.remove(req)
|
||||
remove = self._temp_dir is not None
|
||||
if remove:
|
||||
self._temp_dir.cleanup()
|
||||
logger.debug('%s build tracker %r',
|
||||
'Removed' if remove else 'Cleaned',
|
||||
self._root)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def track(self, req):
|
||||
self.add(req)
|
||||
yield
|
||||
self.remove(req)
|
||||
@@ -0,0 +1,460 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import csv
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
|
||||
from pip._internal.exceptions import UninstallationError
|
||||
from pip._internal.locations import bin_py, bin_user
|
||||
from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
|
||||
normalize_path, renames,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _script_names(dist, script_name, is_gui):
|
||||
"""Create the fully qualified name of the files created by
|
||||
{console,gui}_scripts for the given ``dist``.
|
||||
Returns the list of file names
|
||||
"""
|
||||
if dist_in_usersite(dist):
|
||||
bin_dir = bin_user
|
||||
else:
|
||||
bin_dir = bin_py
|
||||
exe_name = os.path.join(bin_dir, script_name)
|
||||
paths_to_remove = [exe_name]
|
||||
if WINDOWS:
|
||||
paths_to_remove.append(exe_name + '.exe')
|
||||
paths_to_remove.append(exe_name + '.exe.manifest')
|
||||
if is_gui:
|
||||
paths_to_remove.append(exe_name + '-script.pyw')
|
||||
else:
|
||||
paths_to_remove.append(exe_name + '-script.py')
|
||||
return paths_to_remove
|
||||
|
||||
|
||||
def _unique(fn):
|
||||
@functools.wraps(fn)
|
||||
def unique(*args, **kw):
|
||||
seen = set()
|
||||
for item in fn(*args, **kw):
|
||||
if item not in seen:
|
||||
seen.add(item)
|
||||
yield item
|
||||
return unique
|
||||
|
||||
|
||||
@_unique
|
||||
def uninstallation_paths(dist):
|
||||
"""
|
||||
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
||||
|
||||
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
||||
the .pyc and .pyo in the same directory.
|
||||
|
||||
UninstallPathSet.add() takes care of the __pycache__ .py[co].
|
||||
"""
|
||||
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
||||
for row in r:
|
||||
path = os.path.join(dist.location, row[0])
|
||||
yield path
|
||||
if path.endswith('.py'):
|
||||
dn, fn = os.path.split(path)
|
||||
base = fn[:-3]
|
||||
path = os.path.join(dn, base + '.pyc')
|
||||
yield path
|
||||
path = os.path.join(dn, base + '.pyo')
|
||||
yield path
|
||||
|
||||
|
||||
def compact(paths):
|
||||
"""Compact a path set to contain the minimal number of paths
|
||||
necessary to contain all paths in the set. If /a/path/ and
|
||||
/a/path/to/a/file.txt are both in the set, leave only the
|
||||
shorter path."""
|
||||
|
||||
sep = os.path.sep
|
||||
short_paths = set()
|
||||
for path in sorted(paths, key=len):
|
||||
should_add = any(
|
||||
path.startswith(shortpath.rstrip("*")) and
|
||||
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
||||
for shortpath in short_paths
|
||||
)
|
||||
if not should_add:
|
||||
short_paths.add(path)
|
||||
return short_paths
|
||||
|
||||
|
||||
def compress_for_output_listing(paths):
|
||||
"""Returns a tuple of 2 sets of which paths to display to user
|
||||
|
||||
The first set contains paths that would be deleted. Files of a package
|
||||
are not added and the top-level directory of the package has a '*' added
|
||||
at the end - to signify that all it's contents are removed.
|
||||
|
||||
The second set contains files that would have been skipped in the above
|
||||
folders.
|
||||
"""
|
||||
|
||||
will_remove = list(paths)
|
||||
will_skip = set()
|
||||
|
||||
# Determine folders and files
|
||||
folders = set()
|
||||
files = set()
|
||||
for path in will_remove:
|
||||
if path.endswith(".pyc"):
|
||||
continue
|
||||
if path.endswith("__init__.py") or ".dist-info" in path:
|
||||
folders.add(os.path.dirname(path))
|
||||
files.add(path)
|
||||
|
||||
_normcased_files = set(map(os.path.normcase, files))
|
||||
|
||||
folders = compact(folders)
|
||||
|
||||
# This walks the tree using os.walk to not miss extra folders
|
||||
# that might get added.
|
||||
for folder in folders:
|
||||
for dirpath, _, dirfiles in os.walk(folder):
|
||||
for fname in dirfiles:
|
||||
if fname.endswith(".pyc"):
|
||||
continue
|
||||
|
||||
file_ = os.path.join(dirpath, fname)
|
||||
if (os.path.isfile(file_) and
|
||||
os.path.normcase(file_) not in _normcased_files):
|
||||
# We are skipping this file. Add it to the set.
|
||||
will_skip.add(file_)
|
||||
|
||||
will_remove = files | {
|
||||
os.path.join(folder, "*") for folder in folders
|
||||
}
|
||||
|
||||
return will_remove, will_skip
|
||||
|
||||
|
||||
class UninstallPathSet(object):
|
||||
"""A set of file paths to be removed in the uninstallation of a
|
||||
requirement."""
|
||||
def __init__(self, dist):
|
||||
self.paths = set()
|
||||
self._refuse = set()
|
||||
self.pth = {}
|
||||
self.dist = dist
|
||||
self.save_dir = TempDirectory(kind="uninstall")
|
||||
self._moved_paths = []
|
||||
|
||||
def _permitted(self, path):
|
||||
"""
|
||||
Return True if the given path is one we are permitted to
|
||||
remove/modify, False otherwise.
|
||||
|
||||
"""
|
||||
return is_local(path)
|
||||
|
||||
def add(self, path):
|
||||
head, tail = os.path.split(path)
|
||||
|
||||
# we normalize the head to resolve parent directory symlinks, but not
|
||||
# the tail, since we only want to uninstall symlinks, not their targets
|
||||
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
||||
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
if self._permitted(path):
|
||||
self.paths.add(path)
|
||||
else:
|
||||
self._refuse.add(path)
|
||||
|
||||
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||
# due to imports
|
||||
if os.path.splitext(path)[1] == '.py' and uses_pycache:
|
||||
self.add(cache_from_source(path))
|
||||
|
||||
def add_pth(self, pth_file, entry):
|
||||
pth_file = normalize_path(pth_file)
|
||||
if self._permitted(pth_file):
|
||||
if pth_file not in self.pth:
|
||||
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
||||
self.pth[pth_file].add(entry)
|
||||
else:
|
||||
self._refuse.add(pth_file)
|
||||
|
||||
def _stash(self, path):
|
||||
return os.path.join(
|
||||
self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep)
|
||||
)
|
||||
|
||||
def remove(self, auto_confirm=False, verbose=False):
|
||||
"""Remove paths in ``self.paths`` with confirmation (unless
|
||||
``auto_confirm`` is True)."""
|
||||
|
||||
if not self.paths:
|
||||
logger.info(
|
||||
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return
|
||||
|
||||
dist_name_version = (
|
||||
self.dist.project_name + "-" + self.dist.version
|
||||
)
|
||||
logger.info('Uninstalling %s:', dist_name_version)
|
||||
|
||||
with indent_log():
|
||||
if auto_confirm or self._allowed_to_proceed(verbose):
|
||||
self.save_dir.create()
|
||||
|
||||
for path in sorted(compact(self.paths)):
|
||||
new_path = self._stash(path)
|
||||
logger.debug('Removing file or directory %s', path)
|
||||
self._moved_paths.append(path)
|
||||
renames(path, new_path)
|
||||
for pth in self.pth.values():
|
||||
pth.remove()
|
||||
|
||||
logger.info('Successfully uninstalled %s', dist_name_version)
|
||||
|
||||
def _allowed_to_proceed(self, verbose):
|
||||
"""Display which files would be deleted and prompt for confirmation
|
||||
"""
|
||||
|
||||
def _display(msg, paths):
|
||||
if not paths:
|
||||
return
|
||||
|
||||
logger.info(msg)
|
||||
with indent_log():
|
||||
for path in sorted(compact(paths)):
|
||||
logger.info(path)
|
||||
|
||||
if not verbose:
|
||||
will_remove, will_skip = compress_for_output_listing(self.paths)
|
||||
else:
|
||||
# In verbose mode, display all the files that are going to be
|
||||
# deleted.
|
||||
will_remove = list(self.paths)
|
||||
will_skip = set()
|
||||
|
||||
_display('Would remove:', will_remove)
|
||||
_display('Would not remove (might be manually added):', will_skip)
|
||||
_display('Would not remove (outside of prefix):', self._refuse)
|
||||
|
||||
return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
|
||||
|
||||
def rollback(self):
|
||||
"""Rollback the changes previously made by remove()."""
|
||||
if self.save_dir.path is None:
|
||||
logger.error(
|
||||
"Can't roll back %s; was not uninstalled",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return False
|
||||
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||
for path in self._moved_paths:
|
||||
tmp_path = self._stash(path)
|
||||
logger.debug('Replacing %s', path)
|
||||
renames(tmp_path, path)
|
||||
for pth in self.pth.values():
|
||||
pth.rollback()
|
||||
|
||||
def commit(self):
|
||||
"""Remove temporary save dir: rollback will no longer be possible."""
|
||||
self.save_dir.cleanup()
|
||||
self._moved_paths = []
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist):
|
||||
dist_path = normalize_path(dist.location)
|
||||
if not dist_is_local(dist):
|
||||
logger.info(
|
||||
"Not uninstalling %s at %s, outside environment %s",
|
||||
dist.key,
|
||||
dist_path,
|
||||
sys.prefix,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
|
||||
sysconfig.get_path("platstdlib")}
|
||||
if p}:
|
||||
logger.info(
|
||||
"Not uninstalling %s at %s, as it is in the standard library.",
|
||||
dist.key,
|
||||
dist_path,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
paths_to_remove = cls(dist)
|
||||
develop_egg_link = egg_link_path(dist)
|
||||
develop_egg_link_egg_info = '{}.egg-info'.format(
|
||||
pkg_resources.to_filename(dist.project_name))
|
||||
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
||||
# Special case for distutils installed package
|
||||
distutils_egg_info = getattr(dist._provider, 'path', None)
|
||||
|
||||
# Uninstall cases order do matter as in the case of 2 installs of the
|
||||
# same package, pip needs to uninstall the currently detected version
|
||||
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
||||
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
||||
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
||||
# are in fact in the develop_egg_link case
|
||||
paths_to_remove.add(dist.egg_info)
|
||||
if dist.has_metadata('installed-files.txt'):
|
||||
for installed_file in dist.get_metadata(
|
||||
'installed-files.txt').splitlines():
|
||||
path = os.path.normpath(
|
||||
os.path.join(dist.egg_info, installed_file)
|
||||
)
|
||||
paths_to_remove.add(path)
|
||||
# FIXME: need a test for this elif block
|
||||
# occurs with --single-version-externally-managed/--record outside
|
||||
# of pip
|
||||
elif dist.has_metadata('top_level.txt'):
|
||||
if dist.has_metadata('namespace_packages.txt'):
|
||||
namespaces = dist.get_metadata('namespace_packages.txt')
|
||||
else:
|
||||
namespaces = []
|
||||
for top_level_pkg in [
|
||||
p for p
|
||||
in dist.get_metadata('top_level.txt').splitlines()
|
||||
if p and p not in namespaces]:
|
||||
path = os.path.join(dist.location, top_level_pkg)
|
||||
paths_to_remove.add(path)
|
||||
paths_to_remove.add(path + '.py')
|
||||
paths_to_remove.add(path + '.pyc')
|
||||
paths_to_remove.add(path + '.pyo')
|
||||
|
||||
elif distutils_egg_info:
|
||||
raise UninstallationError(
|
||||
"Cannot uninstall {!r}. It is a distutils installed project "
|
||||
"and thus we cannot accurately determine which files belong "
|
||||
"to it which would lead to only a partial uninstall.".format(
|
||||
dist.project_name,
|
||||
)
|
||||
)
|
||||
|
||||
elif dist.location.endswith('.egg'):
|
||||
# package installed by easy_install
|
||||
# We cannot match on dist.egg_name because it can slightly vary
|
||||
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
||||
paths_to_remove.add(dist.location)
|
||||
easy_install_egg = os.path.split(dist.location)[1]
|
||||
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
||||
'easy-install.pth')
|
||||
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
||||
|
||||
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
||||
for path in uninstallation_paths(dist):
|
||||
paths_to_remove.add(path)
|
||||
|
||||
elif develop_egg_link:
|
||||
# develop egg
|
||||
with open(develop_egg_link, 'r') as fh:
|
||||
link_pointer = os.path.normcase(fh.readline().strip())
|
||||
assert (link_pointer == dist.location), (
|
||||
'Egg-link %s does not match installed location of %s '
|
||||
'(at %s)' % (link_pointer, dist.project_name, dist.location)
|
||||
)
|
||||
paths_to_remove.add(develop_egg_link)
|
||||
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
||||
'easy-install.pth')
|
||||
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
||||
|
||||
else:
|
||||
logger.debug(
|
||||
'Not sure how to uninstall: %s - Check: %s',
|
||||
dist, dist.location,
|
||||
)
|
||||
|
||||
# find distutils scripts= scripts
|
||||
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
||||
for script in dist.metadata_listdir('scripts'):
|
||||
if dist_in_usersite(dist):
|
||||
bin_dir = bin_user
|
||||
else:
|
||||
bin_dir = bin_py
|
||||
paths_to_remove.add(os.path.join(bin_dir, script))
|
||||
if WINDOWS:
|
||||
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
||||
|
||||
# find console_scripts
|
||||
_scripts_to_remove = []
|
||||
console_scripts = dist.get_entry_map(group='console_scripts')
|
||||
for name in console_scripts.keys():
|
||||
_scripts_to_remove.extend(_script_names(dist, name, False))
|
||||
# find gui_scripts
|
||||
gui_scripts = dist.get_entry_map(group='gui_scripts')
|
||||
for name in gui_scripts.keys():
|
||||
_scripts_to_remove.extend(_script_names(dist, name, True))
|
||||
|
||||
for s in _scripts_to_remove:
|
||||
paths_to_remove.add(s)
|
||||
|
||||
return paths_to_remove
|
||||
|
||||
|
||||
class UninstallPthEntries(object):
|
||||
def __init__(self, pth_file):
|
||||
if not os.path.isfile(pth_file):
|
||||
raise UninstallationError(
|
||||
"Cannot remove entries from nonexistent file %s" % pth_file
|
||||
)
|
||||
self.file = pth_file
|
||||
self.entries = set()
|
||||
self._saved_lines = None
|
||||
|
||||
def add(self, entry):
|
||||
entry = os.path.normcase(entry)
|
||||
# On Windows, os.path.normcase converts the entry to use
|
||||
# backslashes. This is correct for entries that describe absolute
|
||||
# paths outside of site-packages, but all the others use forward
|
||||
# slashes.
|
||||
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||
entry = entry.replace('\\', '/')
|
||||
self.entries.add(entry)
|
||||
|
||||
def remove(self):
|
||||
logger.debug('Removing pth entries from %s:', self.file)
|
||||
with open(self.file, 'rb') as fh:
|
||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||
lines = fh.readlines()
|
||||
self._saved_lines = lines
|
||||
if any(b'\r\n' in line for line in lines):
|
||||
endline = '\r\n'
|
||||
else:
|
||||
endline = '\n'
|
||||
# handle missing trailing newline
|
||||
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
||||
lines[-1] = lines[-1] + endline.encode("utf-8")
|
||||
for entry in self.entries:
|
||||
try:
|
||||
logger.debug('Removing entry: %s', entry)
|
||||
lines.remove((entry + endline).encode("utf-8"))
|
||||
except ValueError:
|
||||
pass
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(lines)
|
||||
|
||||
def rollback(self):
|
||||
if self._saved_lines is None:
|
||||
logger.error(
|
||||
'Cannot roll back changes to %s, none were made', self.file
|
||||
)
|
||||
return False
|
||||
logger.debug('Rolling %s back to previous state', self.file)
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(self._saved_lines)
|
||||
return True
|
||||
Reference in New Issue
Block a user