demo + utils venv
This commit is contained in:
@@ -0,0 +1,114 @@
|
||||
"""
|
||||
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
|
||||
depend on something external.
|
||||
|
||||
Files inside of pip._vendor should be considered immutable and should only be
|
||||
updated to versions from upstream.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
# Downstream redistributors which have debundled our dependencies should also
|
||||
# patch this value to be true. This will trigger the additional patching
|
||||
# to cause things like "six" to be available as pip.
|
||||
DEBUNDLED = True
|
||||
|
||||
# By default, look in this directory for a bunch of .whl files which we will
|
||||
# add to the beginning of sys.path before attempting to import anything. This
|
||||
# is done to support downstream re-distributors like Debian and Fedora who
|
||||
# wish to create their own Wheels for our dependencies to aid in debundling.
|
||||
WHEEL_DIR = os.path.abspath(os.path.join(sys.prefix, 'share', 'python-wheels'))
|
||||
|
||||
|
||||
# Define a small helper function to alias our vendored modules to the real ones
|
||||
# if the vendored ones do not exist. This idea of this was taken from
|
||||
# https://github.com/kennethreitz/requests/pull/2567.
|
||||
def vendored(modulename):
|
||||
vendored_name = "{0}.{1}".format(__name__, modulename)
|
||||
|
||||
try:
|
||||
__import__(vendored_name, globals(), locals(), level=0)
|
||||
except ImportError:
|
||||
try:
|
||||
__import__(modulename, globals(), locals(), level=0)
|
||||
except ImportError:
|
||||
# We can just silently allow import failures to pass here. If we
|
||||
# got to this point it means that ``import pip._vendor.whatever``
|
||||
# failed and so did ``import whatever``. Since we're importing this
|
||||
# upfront in an attempt to alias imports, not erroring here will
|
||||
# just mean we get a regular import error whenever pip *actually*
|
||||
# tries to import one of these modules to use it, which actually
|
||||
# gives us a better error message than we would have otherwise
|
||||
# gotten.
|
||||
pass
|
||||
else:
|
||||
sys.modules[vendored_name] = sys.modules[modulename]
|
||||
base, head = vendored_name.rsplit(".", 1)
|
||||
setattr(sys.modules[base], head, sys.modules[modulename])
|
||||
|
||||
|
||||
# If we're operating in a debundled setup, then we want to go ahead and trigger
|
||||
# the aliasing of our vendored libraries as well as looking for wheels to add
|
||||
# to our sys.path. This will cause all of this code to be a no-op typically
|
||||
# however downstream redistributors can enable it in a consistent way across
|
||||
# all platforms.
|
||||
if DEBUNDLED:
|
||||
# Actually look inside of WHEEL_DIR to find .whl files and add them to the
|
||||
# front of our sys.path.
|
||||
sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
|
||||
|
||||
# Actually alias all of our vendored dependencies.
|
||||
vendored("cachecontrol")
|
||||
vendored("colorama")
|
||||
vendored("distlib")
|
||||
vendored("distro")
|
||||
vendored("html5lib")
|
||||
vendored("lockfile")
|
||||
vendored("six")
|
||||
vendored("six.moves")
|
||||
vendored("six.moves.urllib")
|
||||
vendored("six.moves.urllib.parse")
|
||||
vendored("packaging")
|
||||
vendored("packaging.version")
|
||||
vendored("packaging.specifiers")
|
||||
vendored("pkg_resources")
|
||||
vendored("progress")
|
||||
vendored("pytoml")
|
||||
vendored("retrying")
|
||||
vendored("requests")
|
||||
vendored("requests.packages")
|
||||
vendored("requests.packages.urllib3")
|
||||
vendored("requests.packages.urllib3._collections")
|
||||
vendored("requests.packages.urllib3.connection")
|
||||
vendored("requests.packages.urllib3.connectionpool")
|
||||
vendored("requests.packages.urllib3.contrib")
|
||||
vendored("requests.packages.urllib3.contrib.ntlmpool")
|
||||
vendored("requests.packages.urllib3.contrib.pyopenssl")
|
||||
vendored("requests.packages.urllib3.exceptions")
|
||||
vendored("requests.packages.urllib3.fields")
|
||||
vendored("requests.packages.urllib3.filepost")
|
||||
vendored("requests.packages.urllib3.packages")
|
||||
try:
|
||||
vendored("requests.packages.urllib3.packages.ordered_dict")
|
||||
vendored("requests.packages.urllib3.packages.six")
|
||||
except ImportError:
|
||||
# Debian already unbundles these from requests.
|
||||
pass
|
||||
vendored("requests.packages.urllib3.packages.ssl_match_hostname")
|
||||
vendored("requests.packages.urllib3.packages.ssl_match_hostname."
|
||||
"_implementation")
|
||||
vendored("requests.packages.urllib3.poolmanager")
|
||||
vendored("requests.packages.urllib3.request")
|
||||
vendored("requests.packages.urllib3.response")
|
||||
vendored("requests.packages.urllib3.util")
|
||||
vendored("requests.packages.urllib3.util.connection")
|
||||
vendored("requests.packages.urllib3.util.request")
|
||||
vendored("requests.packages.urllib3.util.response")
|
||||
vendored("requests.packages.urllib3.util.retry")
|
||||
vendored("requests.packages.urllib3.util.ssl_")
|
||||
vendored("requests.packages.urllib3.util.timeout")
|
||||
vendored("requests.packages.urllib3.util.url")
|
||||
vendored("urllib3")
|
||||
BIN
Binary file not shown.
@@ -0,0 +1,4 @@
|
||||
"""Wrappers to build Python packages using PEP 517 hooks
|
||||
"""
|
||||
|
||||
__version__ = '0.2'
|
||||
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
BIN
Binary file not shown.
@@ -0,0 +1,182 @@
|
||||
"""This is invoked in a subprocess to call the build backend hooks.
|
||||
|
||||
It expects:
|
||||
- Command line args: hook_name, control_dir
|
||||
- Environment variable: PEP517_BUILD_BACKEND=entry.point:spec
|
||||
- control_dir/input.json:
|
||||
- {"kwargs": {...}}
|
||||
|
||||
Results:
|
||||
- control_dir/output.json
|
||||
- {"return_val": ...}
|
||||
"""
|
||||
from glob import glob
|
||||
from importlib import import_module
|
||||
import os
|
||||
from os.path import join as pjoin
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
# This is run as a script, not a module, so it can't do a relative import
|
||||
import compat
|
||||
|
||||
def _build_backend():
|
||||
"""Find and load the build backend"""
|
||||
ep = os.environ['PEP517_BUILD_BACKEND']
|
||||
mod_path, _, obj_path = ep.partition(':')
|
||||
obj = import_module(mod_path)
|
||||
if obj_path:
|
||||
for path_part in obj_path.split('.'):
|
||||
obj = getattr(obj, path_part)
|
||||
return obj
|
||||
|
||||
def get_requires_for_build_wheel(config_settings):
|
||||
"""Invoke the optional get_requires_for_build_wheel hook
|
||||
|
||||
Returns [] if the hook is not defined.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.get_requires_for_build_wheel
|
||||
except AttributeError:
|
||||
return []
|
||||
else:
|
||||
return hook(config_settings)
|
||||
|
||||
def prepare_metadata_for_build_wheel(metadata_directory, config_settings):
|
||||
"""Invoke optional prepare_metadata_for_build_wheel
|
||||
|
||||
Implements a fallback by building a wheel if the hook isn't defined.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.prepare_metadata_for_build_wheel
|
||||
except AttributeError:
|
||||
return _get_wheel_metadata_from_wheel(backend, metadata_directory,
|
||||
config_settings)
|
||||
else:
|
||||
return hook(metadata_directory, config_settings)
|
||||
|
||||
WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL'
|
||||
|
||||
def _dist_info_files(whl_zip):
|
||||
"""Identify the .dist-info folder inside a wheel ZipFile."""
|
||||
res = []
|
||||
for path in whl_zip.namelist():
|
||||
m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path)
|
||||
if m:
|
||||
res.append(path)
|
||||
if res:
|
||||
return res
|
||||
raise Exception("No .dist-info folder found in wheel")
|
||||
|
||||
def _get_wheel_metadata_from_wheel(backend, metadata_directory, config_settings):
|
||||
"""Build a wheel and extract the metadata from it.
|
||||
|
||||
Fallback for when the build backend does not define the 'get_wheel_metadata'
|
||||
hook.
|
||||
"""
|
||||
from zipfile import ZipFile
|
||||
whl_basename = backend.build_wheel(metadata_directory, config_settings)
|
||||
with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'):
|
||||
pass # Touch marker file
|
||||
|
||||
whl_file = os.path.join(metadata_directory, whl_basename)
|
||||
with ZipFile(whl_file) as zipf:
|
||||
dist_info = _dist_info_files(zipf)
|
||||
zipf.extractall(path=metadata_directory, members=dist_info)
|
||||
return dist_info[0].split('/')[0]
|
||||
|
||||
def _find_already_built_wheel(metadata_directory):
|
||||
"""Check for a wheel already built during the get_wheel_metadata hook.
|
||||
"""
|
||||
if not metadata_directory:
|
||||
return None
|
||||
metadata_parent = os.path.dirname(metadata_directory)
|
||||
if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)):
|
||||
return None
|
||||
|
||||
whl_files = glob(os.path.join(metadata_parent, '*.whl'))
|
||||
if not whl_files:
|
||||
print('Found wheel built marker, but no .whl files')
|
||||
return None
|
||||
if len(whl_files) > 1:
|
||||
print('Found multiple .whl files; unspecified behaviour. '
|
||||
'Will call build_wheel.')
|
||||
return None
|
||||
|
||||
# Exactly one .whl file
|
||||
return whl_files[0]
|
||||
|
||||
def build_wheel(wheel_directory, config_settings, metadata_directory=None):
|
||||
"""Invoke the mandatory build_wheel hook.
|
||||
|
||||
If a wheel was already built in the prepare_metadata_for_build_wheel fallback, this
|
||||
will copy it rather than rebuilding the wheel.
|
||||
"""
|
||||
prebuilt_whl = _find_already_built_wheel(metadata_directory)
|
||||
if prebuilt_whl:
|
||||
shutil.copy2(prebuilt_whl, wheel_directory)
|
||||
return os.path.basename(prebuilt_whl)
|
||||
|
||||
return _build_backend().build_wheel(wheel_directory, config_settings,
|
||||
metadata_directory)
|
||||
|
||||
|
||||
def get_requires_for_build_sdist(config_settings):
|
||||
"""Invoke the optional get_requires_for_build_wheel hook
|
||||
|
||||
Returns [] if the hook is not defined.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.get_requires_for_build_sdist
|
||||
except AttributeError:
|
||||
return []
|
||||
else:
|
||||
return hook(config_settings)
|
||||
|
||||
class _DummyException(Exception):
|
||||
"""Nothing should ever raise this exception"""
|
||||
|
||||
class GotUnsupportedOperation(Exception):
|
||||
"""For internal use when backend raises UnsupportedOperation"""
|
||||
|
||||
def build_sdist(sdist_directory, config_settings):
|
||||
"""Invoke the mandatory build_sdist hook."""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
return backend.build_sdist(sdist_directory, config_settings)
|
||||
except getattr(backend, 'UnsupportedOperation', _DummyException):
|
||||
raise GotUnsupportedOperation
|
||||
|
||||
HOOK_NAMES = {
|
||||
'get_requires_for_build_wheel',
|
||||
'prepare_metadata_for_build_wheel',
|
||||
'build_wheel',
|
||||
'get_requires_for_build_sdist',
|
||||
'build_sdist',
|
||||
}
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 3:
|
||||
sys.exit("Needs args: hook_name, control_dir")
|
||||
hook_name = sys.argv[1]
|
||||
control_dir = sys.argv[2]
|
||||
if hook_name not in HOOK_NAMES:
|
||||
sys.exit("Unknown hook: %s" % hook_name)
|
||||
hook = globals()[hook_name]
|
||||
|
||||
hook_input = compat.read_json(pjoin(control_dir, 'input.json'))
|
||||
|
||||
json_out = {'unsupported': False, 'return_val': None}
|
||||
try:
|
||||
json_out['return_val'] = hook(**hook_input['kwargs'])
|
||||
except GotUnsupportedOperation:
|
||||
json_out['unsupported'] = True
|
||||
|
||||
compat.write_json(json_out, pjoin(control_dir, 'output.json'), indent=2)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -0,0 +1,194 @@
|
||||
"""Check a project and backend by attempting to build using PEP 517 hooks.
|
||||
"""
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
from os.path import isfile, join as pjoin
|
||||
from pip._vendor.pytoml import TomlError, load as toml_load
|
||||
import shutil
|
||||
from subprocess import CalledProcessError
|
||||
import sys
|
||||
import tarfile
|
||||
from tempfile import mkdtemp
|
||||
import zipfile
|
||||
|
||||
from .colorlog import enable_colourful_output
|
||||
from .envbuild import BuildEnvironment
|
||||
from .wrappers import Pep517HookCaller
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def check_build_sdist(hooks):
|
||||
with BuildEnvironment() as env:
|
||||
try:
|
||||
env.pip_install(hooks.build_sys_requires)
|
||||
log.info('Installed static build dependencies')
|
||||
except CalledProcessError:
|
||||
log.error('Failed to install static build dependencies')
|
||||
return False
|
||||
|
||||
try:
|
||||
reqs = hooks.get_requires_for_build_sdist({})
|
||||
log.info('Got build requires: %s', reqs)
|
||||
except:
|
||||
log.error('Failure in get_requires_for_build_sdist', exc_info=True)
|
||||
return False
|
||||
|
||||
try:
|
||||
env.pip_install(reqs)
|
||||
log.info('Installed dynamic build dependencies')
|
||||
except CalledProcessError:
|
||||
log.error('Failed to install dynamic build dependencies')
|
||||
return False
|
||||
|
||||
td = mkdtemp()
|
||||
log.info('Trying to build sdist in %s', td)
|
||||
try:
|
||||
try:
|
||||
filename = hooks.build_sdist(td, {})
|
||||
log.info('build_sdist returned %r', filename)
|
||||
except:
|
||||
log.info('Failure in build_sdist', exc_info=True)
|
||||
return False
|
||||
|
||||
if not filename.endswith('.tar.gz'):
|
||||
log.error("Filename %s doesn't have .tar.gz extension", filename)
|
||||
return False
|
||||
|
||||
path = pjoin(td, filename)
|
||||
if isfile(path):
|
||||
log.info("Output file %s exists", path)
|
||||
else:
|
||||
log.error("Output file %s does not exist", path)
|
||||
return False
|
||||
|
||||
if tarfile.is_tarfile(path):
|
||||
log.info("Output file is a tar file")
|
||||
else:
|
||||
log.error("Output file is not a tar file")
|
||||
return False
|
||||
|
||||
finally:
|
||||
shutil.rmtree(td)
|
||||
|
||||
return True
|
||||
|
||||
def check_build_wheel(hooks):
|
||||
with BuildEnvironment() as env:
|
||||
try:
|
||||
env.pip_install(hooks.build_sys_requires)
|
||||
log.info('Installed static build dependencies')
|
||||
except CalledProcessError:
|
||||
log.error('Failed to install static build dependencies')
|
||||
return False
|
||||
|
||||
try:
|
||||
reqs = hooks.get_requires_for_build_wheel({})
|
||||
log.info('Got build requires: %s', reqs)
|
||||
except:
|
||||
log.error('Failure in get_requires_for_build_sdist', exc_info=True)
|
||||
return False
|
||||
|
||||
try:
|
||||
env.pip_install(reqs)
|
||||
log.info('Installed dynamic build dependencies')
|
||||
except CalledProcessError:
|
||||
log.error('Failed to install dynamic build dependencies')
|
||||
return False
|
||||
|
||||
td = mkdtemp()
|
||||
log.info('Trying to build wheel in %s', td)
|
||||
try:
|
||||
try:
|
||||
filename = hooks.build_wheel(td, {})
|
||||
log.info('build_wheel returned %r', filename)
|
||||
except:
|
||||
log.info('Failure in build_wheel', exc_info=True)
|
||||
return False
|
||||
|
||||
if not filename.endswith('.whl'):
|
||||
log.error("Filename %s doesn't have .whl extension", filename)
|
||||
return False
|
||||
|
||||
path = pjoin(td, filename)
|
||||
if isfile(path):
|
||||
log.info("Output file %s exists", path)
|
||||
else:
|
||||
log.error("Output file %s does not exist", path)
|
||||
return False
|
||||
|
||||
if zipfile.is_zipfile(path):
|
||||
log.info("Output file is a zip file")
|
||||
else:
|
||||
log.error("Output file is not a zip file")
|
||||
return False
|
||||
|
||||
finally:
|
||||
shutil.rmtree(td)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check(source_dir):
|
||||
pyproject = pjoin(source_dir, 'pyproject.toml')
|
||||
if isfile(pyproject):
|
||||
log.info('Found pyproject.toml')
|
||||
else:
|
||||
log.error('Missing pyproject.toml')
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(pyproject) as f:
|
||||
pyproject_data = toml_load(f)
|
||||
# Ensure the mandatory data can be loaded
|
||||
buildsys = pyproject_data['build-system']
|
||||
requires = buildsys['requires']
|
||||
backend = buildsys['build-backend']
|
||||
log.info('Loaded pyproject.toml')
|
||||
except (TomlError, KeyError):
|
||||
log.error("Invalid pyproject.toml", exc_info=True)
|
||||
return False
|
||||
|
||||
hooks = Pep517HookCaller(source_dir, backend)
|
||||
|
||||
sdist_ok = check_build_sdist(hooks)
|
||||
wheel_ok = check_build_wheel(hooks)
|
||||
|
||||
if not sdist_ok:
|
||||
log.warning('Sdist checks failed; scroll up to see')
|
||||
if not wheel_ok:
|
||||
log.warning('Wheel checks failed')
|
||||
|
||||
return sdist_ok
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument('source_dir',
|
||||
help="A directory containing pyproject.toml")
|
||||
args = ap.parse_args(argv)
|
||||
|
||||
enable_colourful_output()
|
||||
|
||||
ok = check(args.source_dir)
|
||||
|
||||
if ok:
|
||||
print(ansi('Checks passed', 'green'))
|
||||
else:
|
||||
print(ansi('Checks failed', 'red'))
|
||||
sys.exit(1)
|
||||
|
||||
ansi_codes = {
|
||||
'reset': '\x1b[0m',
|
||||
'bold': '\x1b[1m',
|
||||
'red': '\x1b[31m',
|
||||
'green': '\x1b[32m',
|
||||
}
|
||||
def ansi(s, attr):
|
||||
if os.name != 'nt' and sys.stdout.isatty():
|
||||
return ansi_codes[attr] + str(s) + ansi_codes['reset']
|
||||
else:
|
||||
return str(s)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -0,0 +1,110 @@
|
||||
"""Nicer log formatting with colours.
|
||||
|
||||
Code copied from Tornado, Apache licensed.
|
||||
"""
|
||||
# Copyright 2012 Facebook
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
try:
|
||||
import curses
|
||||
except ImportError:
|
||||
curses = None
|
||||
|
||||
def _stderr_supports_color():
|
||||
color = False
|
||||
if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
|
||||
try:
|
||||
curses.setupterm()
|
||||
if curses.tigetnum("colors") > 0:
|
||||
color = True
|
||||
except Exception:
|
||||
pass
|
||||
return color
|
||||
|
||||
class LogFormatter(logging.Formatter):
|
||||
"""Log formatter with colour support
|
||||
"""
|
||||
DEFAULT_COLORS = {
|
||||
logging.INFO: 2, # Green
|
||||
logging.WARNING: 3, # Yellow
|
||||
logging.ERROR: 1, # Red
|
||||
logging.CRITICAL: 1,
|
||||
}
|
||||
|
||||
def __init__(self, color=True, datefmt=None):
|
||||
r"""
|
||||
:arg bool color: Enables color support.
|
||||
:arg string fmt: Log message format.
|
||||
It will be applied to the attributes dict of log records. The
|
||||
text between ``%(color)s`` and ``%(end_color)s`` will be colored
|
||||
depending on the level if color support is on.
|
||||
:arg dict colors: color mappings from logging level to terminal color
|
||||
code
|
||||
:arg string datefmt: Datetime format.
|
||||
Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
|
||||
.. versionchanged:: 3.2
|
||||
Added ``fmt`` and ``datefmt`` arguments.
|
||||
"""
|
||||
logging.Formatter.__init__(self, datefmt=datefmt)
|
||||
self._colors = {}
|
||||
if color and _stderr_supports_color():
|
||||
# The curses module has some str/bytes confusion in
|
||||
# python3. Until version 3.2.3, most methods return
|
||||
# bytes, but only accept strings. In addition, we want to
|
||||
# output these strings with the logging module, which
|
||||
# works with unicode strings. The explicit calls to
|
||||
# unicode() below are harmless in python2 but will do the
|
||||
# right conversion in python 3.
|
||||
fg_color = (curses.tigetstr("setaf") or
|
||||
curses.tigetstr("setf") or "")
|
||||
if (3, 0) < sys.version_info < (3, 2, 3):
|
||||
fg_color = str(fg_color, "ascii")
|
||||
|
||||
for levelno, code in self.DEFAULT_COLORS.items():
|
||||
self._colors[levelno] = str(curses.tparm(fg_color, code), "ascii")
|
||||
self._normal = str(curses.tigetstr("sgr0"), "ascii")
|
||||
|
||||
scr = curses.initscr()
|
||||
self.termwidth = scr.getmaxyx()[1]
|
||||
curses.endwin()
|
||||
else:
|
||||
self._normal = ''
|
||||
# Default width is usually 80, but too wide is worse than too narrow
|
||||
self.termwidth = 70
|
||||
|
||||
def formatMessage(self, record):
|
||||
l = len(record.message)
|
||||
right_text = '{initial}-{name}'.format(initial=record.levelname[0],
|
||||
name=record.name)
|
||||
if l + len(right_text) < self.termwidth:
|
||||
space = ' ' * (self.termwidth - (l + len(right_text)))
|
||||
else:
|
||||
space = ' '
|
||||
|
||||
if record.levelno in self._colors:
|
||||
start_color = self._colors[record.levelno]
|
||||
end_color = self._normal
|
||||
else:
|
||||
start_color = end_color = ''
|
||||
|
||||
return record.message + space + start_color + right_text + end_color
|
||||
|
||||
def enable_colourful_output(level=logging.INFO):
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(LogFormatter())
|
||||
logging.root.addHandler(handler)
|
||||
logging.root.setLevel(level)
|
||||
@@ -0,0 +1,23 @@
|
||||
"""Handle reading and writing JSON in UTF-8, on Python 3 and 2."""
|
||||
import json
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
# Python 3
|
||||
def write_json(obj, path, **kwargs):
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
json.dump(obj, f, **kwargs)
|
||||
|
||||
def read_json(path):
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
else:
|
||||
# Python 2
|
||||
def write_json(obj, path, **kwargs):
|
||||
with open(path, 'wb') as f:
|
||||
json.dump(obj, f, encoding='utf-8', **kwargs)
|
||||
|
||||
def read_json(path):
|
||||
with open(path, 'rb') as f:
|
||||
return json.load(f)
|
||||
@@ -0,0 +1,150 @@
|
||||
"""Build wheels/sdists by installing build deps to a temporary environment.
|
||||
"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
from pip._vendor import pytoml
|
||||
import shutil
|
||||
from subprocess import check_call
|
||||
import sys
|
||||
from sysconfig import get_paths
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from .wrappers import Pep517HookCaller
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def _load_pyproject(source_dir):
|
||||
with open(os.path.join(source_dir, 'pyproject.toml')) as f:
|
||||
pyproject_data = pytoml.load(f)
|
||||
buildsys = pyproject_data['build-system']
|
||||
return buildsys['requires'], buildsys['build-backend']
|
||||
|
||||
|
||||
class BuildEnvironment(object):
|
||||
"""Context manager to install build deps in a simple temporary environment
|
||||
|
||||
Based on code I wrote for pip, which is MIT licensed.
|
||||
"""
|
||||
# Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file)
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
path = None
|
||||
|
||||
def __init__(self, cleanup=True):
|
||||
self._cleanup = cleanup
|
||||
|
||||
def __enter__(self):
|
||||
self.path = mkdtemp(prefix='pep517-build-env-')
|
||||
log.info('Temporary build environment: %s', self.path)
|
||||
|
||||
self.save_path = os.environ.get('PATH', None)
|
||||
self.save_pythonpath = os.environ.get('PYTHONPATH', None)
|
||||
|
||||
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
|
||||
install_dirs = get_paths(install_scheme, vars={
|
||||
'base': self.path,
|
||||
'platbase': self.path,
|
||||
})
|
||||
|
||||
scripts = install_dirs['scripts']
|
||||
if self.save_path:
|
||||
os.environ['PATH'] = scripts + os.pathsep + self.save_path
|
||||
else:
|
||||
os.environ['PATH'] = scripts + os.pathsep + os.defpath
|
||||
|
||||
if install_dirs['purelib'] == install_dirs['platlib']:
|
||||
lib_dirs = install_dirs['purelib']
|
||||
else:
|
||||
lib_dirs = install_dirs['purelib'] + os.pathsep + \
|
||||
install_dirs['platlib']
|
||||
if self.save_pythonpath:
|
||||
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
|
||||
self.save_pythonpath
|
||||
else:
|
||||
os.environ['PYTHONPATH'] = lib_dirs
|
||||
|
||||
return self
|
||||
|
||||
def pip_install(self, reqs):
|
||||
"""Install dependencies into this env by calling pip in a subprocess"""
|
||||
if not reqs:
|
||||
return
|
||||
log.info('Calling pip to install %s', reqs)
|
||||
check_call([sys.executable, '-m', 'pip', 'install', '--ignore-installed',
|
||||
'--prefix', self.path] + list(reqs))
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._cleanup and (self.path is not None) and os.path.isdir(self.path):
|
||||
shutil.rmtree(self.path)
|
||||
|
||||
if self.save_path is None:
|
||||
os.environ.pop('PATH', None)
|
||||
else:
|
||||
os.environ['PATH'] = self.save_path
|
||||
|
||||
if self.save_pythonpath is None:
|
||||
os.environ.pop('PYTHONPATH', None)
|
||||
else:
|
||||
os.environ['PYTHONPATH'] = self.save_pythonpath
|
||||
|
||||
def build_wheel(source_dir, wheel_dir, config_settings=None):
|
||||
"""Build a wheel from a source directory using PEP 517 hooks.
|
||||
|
||||
:param str source_dir: Source directory containing pyproject.toml
|
||||
:param str wheel_dir: Target directory to create wheel in
|
||||
:param dict config_settings: Options to pass to build backend
|
||||
|
||||
This is a blocking function which will run pip in a subprocess to install
|
||||
build requirements.
|
||||
"""
|
||||
if config_settings is None:
|
||||
config_settings = {}
|
||||
requires, backend = _load_pyproject(source_dir)
|
||||
hooks = Pep517HookCaller(source_dir, backend)
|
||||
|
||||
with BuildEnvironment() as env:
|
||||
env.pip_install(requires)
|
||||
reqs = hooks.get_requires_for_build_wheel(config_settings)
|
||||
env.pip_install(reqs)
|
||||
return hooks.build_wheel(wheel_dir, config_settings)
|
||||
|
||||
|
||||
def build_sdist(source_dir, sdist_dir, config_settings=None):
|
||||
"""Build an sdist from a source directory using PEP 517 hooks.
|
||||
|
||||
:param str source_dir: Source directory containing pyproject.toml
|
||||
:param str sdist_dir: Target directory to place sdist in
|
||||
:param dict config_settings: Options to pass to build backend
|
||||
|
||||
This is a blocking function which will run pip in a subprocess to install
|
||||
build requirements.
|
||||
"""
|
||||
if config_settings is None:
|
||||
config_settings = {}
|
||||
requires, backend = _load_pyproject(source_dir)
|
||||
hooks = Pep517HookCaller(source_dir, backend)
|
||||
|
||||
with BuildEnvironment() as env:
|
||||
env.pip_install(requires)
|
||||
reqs = hooks.get_requires_for_build_sdist(config_settings)
|
||||
env.pip_install(reqs)
|
||||
return hooks.build_sdist(sdist_dir, config_settings)
|
||||
@@ -0,0 +1,134 @@
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
from os.path import dirname, abspath, join as pjoin
|
||||
import shutil
|
||||
from subprocess import check_call
|
||||
import sys
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from . import compat
|
||||
|
||||
_in_proc_script = pjoin(dirname(abspath(__file__)), '_in_process.py')
|
||||
|
||||
@contextmanager
|
||||
def tempdir():
|
||||
td = mkdtemp()
|
||||
try:
|
||||
yield td
|
||||
finally:
|
||||
shutil.rmtree(td)
|
||||
|
||||
class UnsupportedOperation(Exception):
|
||||
"""May be raised by build_sdist if the backend indicates that it can't."""
|
||||
|
||||
class Pep517HookCaller(object):
|
||||
"""A wrapper around a source directory to be built with a PEP 517 backend.
|
||||
|
||||
source_dir : The path to the source directory, containing pyproject.toml.
|
||||
backend : The build backend spec, as per PEP 517, from pyproject.toml.
|
||||
"""
|
||||
def __init__(self, source_dir, build_backend):
|
||||
self.source_dir = abspath(source_dir)
|
||||
self.build_backend = build_backend
|
||||
|
||||
def get_requires_for_build_wheel(self, config_settings=None):
|
||||
"""Identify packages required for building a wheel
|
||||
|
||||
Returns a list of dependency specifications, e.g.:
|
||||
["wheel >= 0.25", "setuptools"]
|
||||
|
||||
This does not include requirements specified in pyproject.toml.
|
||||
It returns the result of calling the equivalently named hook in a
|
||||
subprocess.
|
||||
"""
|
||||
return self._call_hook('get_requires_for_build_wheel', {
|
||||
'config_settings': config_settings
|
||||
})
|
||||
|
||||
def prepare_metadata_for_build_wheel(self, metadata_directory, config_settings=None):
|
||||
"""Prepare a *.dist-info folder with metadata for this project.
|
||||
|
||||
Returns the name of the newly created folder.
|
||||
|
||||
If the build backend defines a hook with this name, it will be called
|
||||
in a subprocess. If not, the backend will be asked to build a wheel,
|
||||
and the dist-info extracted from that.
|
||||
"""
|
||||
return self._call_hook('prepare_metadata_for_build_wheel', {
|
||||
'metadata_directory': abspath(metadata_directory),
|
||||
'config_settings': config_settings,
|
||||
})
|
||||
|
||||
def build_wheel(self, wheel_directory, config_settings=None, metadata_directory=None):
|
||||
"""Build a wheel from this project.
|
||||
|
||||
Returns the name of the newly created file.
|
||||
|
||||
In general, this will call the 'build_wheel' hook in the backend.
|
||||
However, if that was previously called by
|
||||
'prepare_metadata_for_build_wheel', and the same metadata_directory is
|
||||
used, the previously built wheel will be copied to wheel_directory.
|
||||
"""
|
||||
if metadata_directory is not None:
|
||||
metadata_directory = abspath(metadata_directory)
|
||||
return self._call_hook('build_wheel', {
|
||||
'wheel_directory': abspath(wheel_directory),
|
||||
'config_settings': config_settings,
|
||||
'metadata_directory': metadata_directory,
|
||||
})
|
||||
|
||||
def get_requires_for_build_sdist(self, config_settings=None):
|
||||
"""Identify packages required for building a wheel
|
||||
|
||||
Returns a list of dependency specifications, e.g.:
|
||||
["setuptools >= 26"]
|
||||
|
||||
This does not include requirements specified in pyproject.toml.
|
||||
It returns the result of calling the equivalently named hook in a
|
||||
subprocess.
|
||||
"""
|
||||
return self._call_hook('get_requires_for_build_sdist', {
|
||||
'config_settings': config_settings
|
||||
})
|
||||
|
||||
def build_sdist(self, sdist_directory, config_settings=None):
|
||||
"""Build an sdist from this project.
|
||||
|
||||
Returns the name of the newly created file.
|
||||
|
||||
This calls the 'build_sdist' backend hook in a subprocess.
|
||||
"""
|
||||
return self._call_hook('build_sdist', {
|
||||
'sdist_directory': abspath(sdist_directory),
|
||||
'config_settings': config_settings,
|
||||
})
|
||||
|
||||
|
||||
def _call_hook(self, hook_name, kwargs):
|
||||
env = os.environ.copy()
|
||||
|
||||
# On Python 2, pytoml returns Unicode values (which is correct) but the
|
||||
# environment passed to check_call needs to contain string values. We
|
||||
# convert here by encoding using ASCII (the backend can only contain
|
||||
# letters, digits and _, . and : characters, and will be used as a
|
||||
# Python identifier, so non-ASCII content is wrong on Python 2 in
|
||||
# any case).
|
||||
if sys.version_info[0] == 2:
|
||||
build_backend = self.build_backend.encode('ASCII')
|
||||
else:
|
||||
build_backend = self.build_backend
|
||||
|
||||
env['PEP517_BUILD_BACKEND'] = build_backend
|
||||
with tempdir() as td:
|
||||
compat.write_json({'kwargs': kwargs}, pjoin(td, 'input.json'),
|
||||
indent=2)
|
||||
|
||||
# Run the hook in a subprocess
|
||||
check_call([sys.executable, _in_proc_script, hook_name, td],
|
||||
cwd=self.source_dir, env=env)
|
||||
|
||||
data = compat.read_json(pjoin(td, 'output.json'))
|
||||
if data.get('unsupported'):
|
||||
raise UnsupportedOperation
|
||||
return data['return_val']
|
||||
|
||||
Reference in New Issue
Block a user