Add python venv
This commit is contained in:
@ -0,0 +1,6 @@
|
||||
"""Wrappers to build Python packages using PEP 517 hooks
|
||||
"""
|
||||
|
||||
__version__ = '0.12.0'
|
||||
|
||||
from .wrappers import * # noqa: F401, F403
|
127
utils/python-venv/Lib/site-packages/pip/_vendor/pep517/build.py
Normal file
127
utils/python-venv/Lib/site-packages/pip/_vendor/pep517/build.py
Normal file
@ -0,0 +1,127 @@
|
||||
"""Build a project using PEP 517 hooks.
|
||||
"""
|
||||
import argparse
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from .envbuild import BuildEnvironment
|
||||
from .wrappers import Pep517HookCaller
|
||||
from .dirtools import tempdir, mkdir_p
|
||||
from .compat import FileNotFoundError, toml_load
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_system(system):
|
||||
"""
|
||||
Ensure build system has the requisite fields.
|
||||
"""
|
||||
required = {'requires', 'build-backend'}
|
||||
if not (required <= set(system)):
|
||||
message = "Missing required fields: {missing}".format(
|
||||
missing=required-set(system),
|
||||
)
|
||||
raise ValueError(message)
|
||||
|
||||
|
||||
def load_system(source_dir):
|
||||
"""
|
||||
Load the build system from a source dir (pyproject.toml).
|
||||
"""
|
||||
pyproject = os.path.join(source_dir, 'pyproject.toml')
|
||||
with io.open(pyproject, 'rb') as f:
|
||||
pyproject_data = toml_load(f)
|
||||
return pyproject_data['build-system']
|
||||
|
||||
|
||||
def compat_system(source_dir):
|
||||
"""
|
||||
Given a source dir, attempt to get a build system backend
|
||||
and requirements from pyproject.toml. Fallback to
|
||||
setuptools but only if the file was not found or a build
|
||||
system was not indicated.
|
||||
"""
|
||||
try:
|
||||
system = load_system(source_dir)
|
||||
except (FileNotFoundError, KeyError):
|
||||
system = {}
|
||||
system.setdefault(
|
||||
'build-backend',
|
||||
'setuptools.build_meta:__legacy__',
|
||||
)
|
||||
system.setdefault('requires', ['setuptools', 'wheel'])
|
||||
return system
|
||||
|
||||
|
||||
def _do_build(hooks, env, dist, dest):
|
||||
get_requires_name = 'get_requires_for_build_{dist}'.format(**locals())
|
||||
get_requires = getattr(hooks, get_requires_name)
|
||||
reqs = get_requires({})
|
||||
log.info('Got build requires: %s', reqs)
|
||||
|
||||
env.pip_install(reqs)
|
||||
log.info('Installed dynamic build dependencies')
|
||||
|
||||
with tempdir() as td:
|
||||
log.info('Trying to build %s in %s', dist, td)
|
||||
build_name = 'build_{dist}'.format(**locals())
|
||||
build = getattr(hooks, build_name)
|
||||
filename = build(td, {})
|
||||
source = os.path.join(td, filename)
|
||||
shutil.move(source, os.path.join(dest, os.path.basename(filename)))
|
||||
|
||||
|
||||
def build(source_dir, dist, dest=None, system=None):
|
||||
system = system or load_system(source_dir)
|
||||
dest = os.path.join(source_dir, dest or 'dist')
|
||||
mkdir_p(dest)
|
||||
|
||||
validate_system(system)
|
||||
hooks = Pep517HookCaller(
|
||||
source_dir, system['build-backend'], system.get('backend-path')
|
||||
)
|
||||
|
||||
with BuildEnvironment() as env:
|
||||
env.pip_install(system['requires'])
|
||||
_do_build(hooks, env, dist, dest)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'source_dir',
|
||||
help="A directory containing pyproject.toml",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--binary', '-b',
|
||||
action='store_true',
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--source', '-s',
|
||||
action='store_true',
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
'--out-dir', '-o',
|
||||
help="Destination in which to save the builds relative to source dir",
|
||||
)
|
||||
|
||||
|
||||
def main(args):
|
||||
log.warning('pep517.build is deprecated. '
|
||||
'Consider switching to https://pypi.org/project/build/')
|
||||
|
||||
# determine which dists to build
|
||||
dists = list(filter(None, (
|
||||
'sdist' if args.source or not args.binary else None,
|
||||
'wheel' if args.binary or not args.source else None,
|
||||
)))
|
||||
|
||||
for dist in dists:
|
||||
build(args.source_dir, dist, args.out_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(parser.parse_args())
|
207
utils/python-venv/Lib/site-packages/pip/_vendor/pep517/check.py
Normal file
207
utils/python-venv/Lib/site-packages/pip/_vendor/pep517/check.py
Normal file
@ -0,0 +1,207 @@
|
||||
"""Check a project and backend by attempting to build using PEP 517 hooks.
|
||||
"""
|
||||
import argparse
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
from os.path import isfile, join as pjoin
|
||||
import shutil
|
||||
from subprocess import CalledProcessError
|
||||
import sys
|
||||
import tarfile
|
||||
from tempfile import mkdtemp
|
||||
import zipfile
|
||||
|
||||
from .colorlog import enable_colourful_output
|
||||
from .compat import TOMLDecodeError, toml_load
|
||||
from .envbuild import BuildEnvironment
|
||||
from .wrappers import Pep517HookCaller
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_build_sdist(hooks, build_sys_requires):
|
||||
with BuildEnvironment() as env:
|
||||
try:
|
||||
env.pip_install(build_sys_requires)
|
||||
log.info('Installed static build dependencies')
|
||||
except CalledProcessError:
|
||||
log.error('Failed to install static build dependencies')
|
||||
return False
|
||||
|
||||
try:
|
||||
reqs = hooks.get_requires_for_build_sdist({})
|
||||
log.info('Got build requires: %s', reqs)
|
||||
except Exception:
|
||||
log.error('Failure in get_requires_for_build_sdist', exc_info=True)
|
||||
return False
|
||||
|
||||
try:
|
||||
env.pip_install(reqs)
|
||||
log.info('Installed dynamic build dependencies')
|
||||
except CalledProcessError:
|
||||
log.error('Failed to install dynamic build dependencies')
|
||||
return False
|
||||
|
||||
td = mkdtemp()
|
||||
log.info('Trying to build sdist in %s', td)
|
||||
try:
|
||||
try:
|
||||
filename = hooks.build_sdist(td, {})
|
||||
log.info('build_sdist returned %r', filename)
|
||||
except Exception:
|
||||
log.info('Failure in build_sdist', exc_info=True)
|
||||
return False
|
||||
|
||||
if not filename.endswith('.tar.gz'):
|
||||
log.error(
|
||||
"Filename %s doesn't have .tar.gz extension", filename)
|
||||
return False
|
||||
|
||||
path = pjoin(td, filename)
|
||||
if isfile(path):
|
||||
log.info("Output file %s exists", path)
|
||||
else:
|
||||
log.error("Output file %s does not exist", path)
|
||||
return False
|
||||
|
||||
if tarfile.is_tarfile(path):
|
||||
log.info("Output file is a tar file")
|
||||
else:
|
||||
log.error("Output file is not a tar file")
|
||||
return False
|
||||
|
||||
finally:
|
||||
shutil.rmtree(td)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_build_wheel(hooks, build_sys_requires):
|
||||
with BuildEnvironment() as env:
|
||||
try:
|
||||
env.pip_install(build_sys_requires)
|
||||
log.info('Installed static build dependencies')
|
||||
except CalledProcessError:
|
||||
log.error('Failed to install static build dependencies')
|
||||
return False
|
||||
|
||||
try:
|
||||
reqs = hooks.get_requires_for_build_wheel({})
|
||||
log.info('Got build requires: %s', reqs)
|
||||
except Exception:
|
||||
log.error('Failure in get_requires_for_build_sdist', exc_info=True)
|
||||
return False
|
||||
|
||||
try:
|
||||
env.pip_install(reqs)
|
||||
log.info('Installed dynamic build dependencies')
|
||||
except CalledProcessError:
|
||||
log.error('Failed to install dynamic build dependencies')
|
||||
return False
|
||||
|
||||
td = mkdtemp()
|
||||
log.info('Trying to build wheel in %s', td)
|
||||
try:
|
||||
try:
|
||||
filename = hooks.build_wheel(td, {})
|
||||
log.info('build_wheel returned %r', filename)
|
||||
except Exception:
|
||||
log.info('Failure in build_wheel', exc_info=True)
|
||||
return False
|
||||
|
||||
if not filename.endswith('.whl'):
|
||||
log.error("Filename %s doesn't have .whl extension", filename)
|
||||
return False
|
||||
|
||||
path = pjoin(td, filename)
|
||||
if isfile(path):
|
||||
log.info("Output file %s exists", path)
|
||||
else:
|
||||
log.error("Output file %s does not exist", path)
|
||||
return False
|
||||
|
||||
if zipfile.is_zipfile(path):
|
||||
log.info("Output file is a zip file")
|
||||
else:
|
||||
log.error("Output file is not a zip file")
|
||||
return False
|
||||
|
||||
finally:
|
||||
shutil.rmtree(td)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check(source_dir):
|
||||
pyproject = pjoin(source_dir, 'pyproject.toml')
|
||||
if isfile(pyproject):
|
||||
log.info('Found pyproject.toml')
|
||||
else:
|
||||
log.error('Missing pyproject.toml')
|
||||
return False
|
||||
|
||||
try:
|
||||
with io.open(pyproject, 'rb') as f:
|
||||
pyproject_data = toml_load(f)
|
||||
# Ensure the mandatory data can be loaded
|
||||
buildsys = pyproject_data['build-system']
|
||||
requires = buildsys['requires']
|
||||
backend = buildsys['build-backend']
|
||||
backend_path = buildsys.get('backend-path')
|
||||
log.info('Loaded pyproject.toml')
|
||||
except (TOMLDecodeError, KeyError):
|
||||
log.error("Invalid pyproject.toml", exc_info=True)
|
||||
return False
|
||||
|
||||
hooks = Pep517HookCaller(source_dir, backend, backend_path)
|
||||
|
||||
sdist_ok = check_build_sdist(hooks, requires)
|
||||
wheel_ok = check_build_wheel(hooks, requires)
|
||||
|
||||
if not sdist_ok:
|
||||
log.warning('Sdist checks failed; scroll up to see')
|
||||
if not wheel_ok:
|
||||
log.warning('Wheel checks failed')
|
||||
|
||||
return sdist_ok
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
log.warning('pep517.check is deprecated. '
|
||||
'Consider switching to https://pypi.org/project/build/')
|
||||
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument(
|
||||
'source_dir',
|
||||
help="A directory containing pyproject.toml")
|
||||
args = ap.parse_args(argv)
|
||||
|
||||
enable_colourful_output()
|
||||
|
||||
ok = check(args.source_dir)
|
||||
|
||||
if ok:
|
||||
print(ansi('Checks passed', 'green'))
|
||||
else:
|
||||
print(ansi('Checks failed', 'red'))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
ansi_codes = {
|
||||
'reset': '\x1b[0m',
|
||||
'bold': '\x1b[1m',
|
||||
'red': '\x1b[31m',
|
||||
'green': '\x1b[32m',
|
||||
}
|
||||
|
||||
|
||||
def ansi(s, attr):
|
||||
if os.name != 'nt' and sys.stdout.isatty():
|
||||
return ansi_codes[attr] + str(s) + ansi_codes['reset']
|
||||
else:
|
||||
return str(s)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,115 @@
|
||||
"""Nicer log formatting with colours.
|
||||
|
||||
Code copied from Tornado, Apache licensed.
|
||||
"""
|
||||
# Copyright 2012 Facebook
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
try:
|
||||
import curses
|
||||
except ImportError:
|
||||
curses = None
|
||||
|
||||
|
||||
def _stderr_supports_color():
|
||||
color = False
|
||||
if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
|
||||
try:
|
||||
curses.setupterm()
|
||||
if curses.tigetnum("colors") > 0:
|
||||
color = True
|
||||
except Exception:
|
||||
pass
|
||||
return color
|
||||
|
||||
|
||||
class LogFormatter(logging.Formatter):
|
||||
"""Log formatter with colour support
|
||||
"""
|
||||
DEFAULT_COLORS = {
|
||||
logging.INFO: 2, # Green
|
||||
logging.WARNING: 3, # Yellow
|
||||
logging.ERROR: 1, # Red
|
||||
logging.CRITICAL: 1,
|
||||
}
|
||||
|
||||
def __init__(self, color=True, datefmt=None):
|
||||
r"""
|
||||
:arg bool color: Enables color support.
|
||||
:arg string fmt: Log message format.
|
||||
It will be applied to the attributes dict of log records. The
|
||||
text between ``%(color)s`` and ``%(end_color)s`` will be colored
|
||||
depending on the level if color support is on.
|
||||
:arg dict colors: color mappings from logging level to terminal color
|
||||
code
|
||||
:arg string datefmt: Datetime format.
|
||||
Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
|
||||
.. versionchanged:: 3.2
|
||||
Added ``fmt`` and ``datefmt`` arguments.
|
||||
"""
|
||||
logging.Formatter.__init__(self, datefmt=datefmt)
|
||||
self._colors = {}
|
||||
if color and _stderr_supports_color():
|
||||
# The curses module has some str/bytes confusion in
|
||||
# python3. Until version 3.2.3, most methods return
|
||||
# bytes, but only accept strings. In addition, we want to
|
||||
# output these strings with the logging module, which
|
||||
# works with unicode strings. The explicit calls to
|
||||
# unicode() below are harmless in python2 but will do the
|
||||
# right conversion in python 3.
|
||||
fg_color = (curses.tigetstr("setaf") or
|
||||
curses.tigetstr("setf") or "")
|
||||
if (3, 0) < sys.version_info < (3, 2, 3):
|
||||
fg_color = str(fg_color, "ascii")
|
||||
|
||||
for levelno, code in self.DEFAULT_COLORS.items():
|
||||
self._colors[levelno] = str(
|
||||
curses.tparm(fg_color, code), "ascii")
|
||||
self._normal = str(curses.tigetstr("sgr0"), "ascii")
|
||||
|
||||
scr = curses.initscr()
|
||||
self.termwidth = scr.getmaxyx()[1]
|
||||
curses.endwin()
|
||||
else:
|
||||
self._normal = ''
|
||||
# Default width is usually 80, but too wide is
|
||||
# worse than too narrow
|
||||
self.termwidth = 70
|
||||
|
||||
def formatMessage(self, record):
|
||||
mlen = len(record.message)
|
||||
right_text = '{initial}-{name}'.format(initial=record.levelname[0],
|
||||
name=record.name)
|
||||
if mlen + len(right_text) < self.termwidth:
|
||||
space = ' ' * (self.termwidth - (mlen + len(right_text)))
|
||||
else:
|
||||
space = ' '
|
||||
|
||||
if record.levelno in self._colors:
|
||||
start_color = self._colors[record.levelno]
|
||||
end_color = self._normal
|
||||
else:
|
||||
start_color = end_color = ''
|
||||
|
||||
return record.message + space + start_color + right_text + end_color
|
||||
|
||||
|
||||
def enable_colourful_output(level=logging.INFO):
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(LogFormatter())
|
||||
logging.root.addHandler(handler)
|
||||
logging.root.setLevel(level)
|
@ -0,0 +1,51 @@
|
||||
"""Python 2/3 compatibility"""
|
||||
import io
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
# Handle reading and writing JSON in UTF-8, on Python 3 and 2.
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
# Python 3
|
||||
def write_json(obj, path, **kwargs):
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
json.dump(obj, f, **kwargs)
|
||||
|
||||
def read_json(path):
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
else:
|
||||
# Python 2
|
||||
def write_json(obj, path, **kwargs):
|
||||
with open(path, 'wb') as f:
|
||||
json.dump(obj, f, encoding='utf-8', **kwargs)
|
||||
|
||||
def read_json(path):
|
||||
with open(path, 'rb') as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
# FileNotFoundError
|
||||
|
||||
try:
|
||||
FileNotFoundError = FileNotFoundError
|
||||
except NameError:
|
||||
FileNotFoundError = IOError
|
||||
|
||||
|
||||
if sys.version_info < (3, 6):
|
||||
from toml import load as _toml_load # noqa: F401
|
||||
|
||||
def toml_load(f):
|
||||
w = io.TextIOWrapper(f, encoding="utf8", newline="")
|
||||
try:
|
||||
return _toml_load(w)
|
||||
finally:
|
||||
w.detach()
|
||||
|
||||
from toml import TomlDecodeError as TOMLDecodeError # noqa: F401
|
||||
else:
|
||||
from pip._vendor.tomli import load as toml_load # noqa: F401
|
||||
from pip._vendor.tomli import TOMLDecodeError # noqa: F401
|
@ -0,0 +1,44 @@
|
||||
import os
|
||||
import io
|
||||
import contextlib
|
||||
import tempfile
|
||||
import shutil
|
||||
import errno
|
||||
import zipfile
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def tempdir():
|
||||
"""Create a temporary directory in a context manager."""
|
||||
td = tempfile.mkdtemp()
|
||||
try:
|
||||
yield td
|
||||
finally:
|
||||
shutil.rmtree(td)
|
||||
|
||||
|
||||
def mkdir_p(*args, **kwargs):
|
||||
"""Like `mkdir`, but does not raise an exception if the
|
||||
directory already exists.
|
||||
"""
|
||||
try:
|
||||
return os.mkdir(*args, **kwargs)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
||||
def dir_to_zipfile(root):
|
||||
"""Construct an in-memory zip file for a directory."""
|
||||
buffer = io.BytesIO()
|
||||
zip_file = zipfile.ZipFile(buffer, 'w')
|
||||
for root, dirs, files in os.walk(root):
|
||||
for path in dirs:
|
||||
fs_path = os.path.join(root, path)
|
||||
rel_path = os.path.relpath(fs_path, root)
|
||||
zip_file.writestr(rel_path + '/', '')
|
||||
for path in files:
|
||||
fs_path = os.path.join(root, path)
|
||||
rel_path = os.path.relpath(fs_path, root)
|
||||
zip_file.write(fs_path, rel_path)
|
||||
return zip_file
|
@ -0,0 +1,171 @@
|
||||
"""Build wheels/sdists by installing build deps to a temporary environment.
|
||||
"""
|
||||
|
||||
import io
|
||||
import os
|
||||
import logging
|
||||
import shutil
|
||||
from subprocess import check_call
|
||||
import sys
|
||||
from sysconfig import get_paths
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from .compat import toml_load
|
||||
from .wrappers import Pep517HookCaller, LoggerWrapper
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _load_pyproject(source_dir):
|
||||
with io.open(
|
||||
os.path.join(source_dir, 'pyproject.toml'),
|
||||
'rb',
|
||||
) as f:
|
||||
pyproject_data = toml_load(f)
|
||||
buildsys = pyproject_data['build-system']
|
||||
return (
|
||||
buildsys['requires'],
|
||||
buildsys['build-backend'],
|
||||
buildsys.get('backend-path'),
|
||||
)
|
||||
|
||||
|
||||
class BuildEnvironment(object):
|
||||
"""Context manager to install build deps in a simple temporary environment
|
||||
|
||||
Based on code I wrote for pip, which is MIT licensed.
|
||||
"""
|
||||
# Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file)
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
path = None
|
||||
|
||||
def __init__(self, cleanup=True):
|
||||
self._cleanup = cleanup
|
||||
|
||||
def __enter__(self):
|
||||
self.path = mkdtemp(prefix='pep517-build-env-')
|
||||
log.info('Temporary build environment: %s', self.path)
|
||||
|
||||
self.save_path = os.environ.get('PATH', None)
|
||||
self.save_pythonpath = os.environ.get('PYTHONPATH', None)
|
||||
|
||||
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
|
||||
install_dirs = get_paths(install_scheme, vars={
|
||||
'base': self.path,
|
||||
'platbase': self.path,
|
||||
})
|
||||
|
||||
scripts = install_dirs['scripts']
|
||||
if self.save_path:
|
||||
os.environ['PATH'] = scripts + os.pathsep + self.save_path
|
||||
else:
|
||||
os.environ['PATH'] = scripts + os.pathsep + os.defpath
|
||||
|
||||
if install_dirs['purelib'] == install_dirs['platlib']:
|
||||
lib_dirs = install_dirs['purelib']
|
||||
else:
|
||||
lib_dirs = install_dirs['purelib'] + os.pathsep + \
|
||||
install_dirs['platlib']
|
||||
if self.save_pythonpath:
|
||||
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
|
||||
self.save_pythonpath
|
||||
else:
|
||||
os.environ['PYTHONPATH'] = lib_dirs
|
||||
|
||||
return self
|
||||
|
||||
def pip_install(self, reqs):
|
||||
"""Install dependencies into this env by calling pip in a subprocess"""
|
||||
if not reqs:
|
||||
return
|
||||
log.info('Calling pip to install %s', reqs)
|
||||
cmd = [
|
||||
sys.executable, '-m', 'pip', 'install', '--ignore-installed',
|
||||
'--prefix', self.path] + list(reqs)
|
||||
check_call(
|
||||
cmd,
|
||||
stdout=LoggerWrapper(log, logging.INFO),
|
||||
stderr=LoggerWrapper(log, logging.ERROR),
|
||||
)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
needs_cleanup = (
|
||||
self._cleanup and
|
||||
self.path is not None and
|
||||
os.path.isdir(self.path)
|
||||
)
|
||||
if needs_cleanup:
|
||||
shutil.rmtree(self.path)
|
||||
|
||||
if self.save_path is None:
|
||||
os.environ.pop('PATH', None)
|
||||
else:
|
||||
os.environ['PATH'] = self.save_path
|
||||
|
||||
if self.save_pythonpath is None:
|
||||
os.environ.pop('PYTHONPATH', None)
|
||||
else:
|
||||
os.environ['PYTHONPATH'] = self.save_pythonpath
|
||||
|
||||
|
||||
def build_wheel(source_dir, wheel_dir, config_settings=None):
|
||||
"""Build a wheel from a source directory using PEP 517 hooks.
|
||||
|
||||
:param str source_dir: Source directory containing pyproject.toml
|
||||
:param str wheel_dir: Target directory to create wheel in
|
||||
:param dict config_settings: Options to pass to build backend
|
||||
|
||||
This is a blocking function which will run pip in a subprocess to install
|
||||
build requirements.
|
||||
"""
|
||||
if config_settings is None:
|
||||
config_settings = {}
|
||||
requires, backend, backend_path = _load_pyproject(source_dir)
|
||||
hooks = Pep517HookCaller(source_dir, backend, backend_path)
|
||||
|
||||
with BuildEnvironment() as env:
|
||||
env.pip_install(requires)
|
||||
reqs = hooks.get_requires_for_build_wheel(config_settings)
|
||||
env.pip_install(reqs)
|
||||
return hooks.build_wheel(wheel_dir, config_settings)
|
||||
|
||||
|
||||
def build_sdist(source_dir, sdist_dir, config_settings=None):
|
||||
"""Build an sdist from a source directory using PEP 517 hooks.
|
||||
|
||||
:param str source_dir: Source directory containing pyproject.toml
|
||||
:param str sdist_dir: Target directory to place sdist in
|
||||
:param dict config_settings: Options to pass to build backend
|
||||
|
||||
This is a blocking function which will run pip in a subprocess to install
|
||||
build requirements.
|
||||
"""
|
||||
if config_settings is None:
|
||||
config_settings = {}
|
||||
requires, backend, backend_path = _load_pyproject(source_dir)
|
||||
hooks = Pep517HookCaller(source_dir, backend, backend_path)
|
||||
|
||||
with BuildEnvironment() as env:
|
||||
env.pip_install(requires)
|
||||
reqs = hooks.get_requires_for_build_sdist(config_settings)
|
||||
env.pip_install(reqs)
|
||||
return hooks.build_sdist(sdist_dir, config_settings)
|
@ -0,0 +1,17 @@
|
||||
"""This is a subpackage because the directory is on sys.path for _in_process.py
|
||||
|
||||
The subpackage should stay as empty as possible to avoid shadowing modules that
|
||||
the backend might import.
|
||||
"""
|
||||
from os.path import dirname, abspath, join as pjoin
|
||||
from contextlib import contextmanager
|
||||
|
||||
try:
|
||||
import importlib.resources as resources
|
||||
|
||||
def _in_proc_script_path():
|
||||
return resources.path(__package__, '_in_process.py')
|
||||
except ImportError:
|
||||
@contextmanager
|
||||
def _in_proc_script_path():
|
||||
yield pjoin(dirname(abspath(__file__)), '_in_process.py')
|
@ -0,0 +1,363 @@
|
||||
"""This is invoked in a subprocess to call the build backend hooks.
|
||||
|
||||
It expects:
|
||||
- Command line args: hook_name, control_dir
|
||||
- Environment variables:
|
||||
PEP517_BUILD_BACKEND=entry.point:spec
|
||||
PEP517_BACKEND_PATH=paths (separated with os.pathsep)
|
||||
- control_dir/input.json:
|
||||
- {"kwargs": {...}}
|
||||
|
||||
Results:
|
||||
- control_dir/output.json
|
||||
- {"return_val": ...}
|
||||
"""
|
||||
from glob import glob
|
||||
from importlib import import_module
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
from os.path import join as pjoin
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
# This file is run as a script, and `import compat` is not zip-safe, so we
|
||||
# include write_json() and read_json() from compat.py.
|
||||
#
|
||||
# Handle reading and writing JSON in UTF-8, on Python 3 and 2.
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
# Python 3
|
||||
def write_json(obj, path, **kwargs):
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
json.dump(obj, f, **kwargs)
|
||||
|
||||
def read_json(path):
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
else:
|
||||
# Python 2
|
||||
def write_json(obj, path, **kwargs):
|
||||
with open(path, 'wb') as f:
|
||||
json.dump(obj, f, encoding='utf-8', **kwargs)
|
||||
|
||||
def read_json(path):
|
||||
with open(path, 'rb') as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
class BackendUnavailable(Exception):
|
||||
"""Raised if we cannot import the backend"""
|
||||
def __init__(self, traceback):
|
||||
self.traceback = traceback
|
||||
|
||||
|
||||
class BackendInvalid(Exception):
|
||||
"""Raised if the backend is invalid"""
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
|
||||
class HookMissing(Exception):
|
||||
"""Raised if a hook is missing and we are not executing the fallback"""
|
||||
def __init__(self, hook_name=None):
|
||||
super(HookMissing, self).__init__(hook_name)
|
||||
self.hook_name = hook_name
|
||||
|
||||
|
||||
def contained_in(filename, directory):
|
||||
"""Test if a file is located within the given directory."""
|
||||
filename = os.path.normcase(os.path.abspath(filename))
|
||||
directory = os.path.normcase(os.path.abspath(directory))
|
||||
return os.path.commonprefix([filename, directory]) == directory
|
||||
|
||||
|
||||
def _build_backend():
|
||||
"""Find and load the build backend"""
|
||||
# Add in-tree backend directories to the front of sys.path.
|
||||
backend_path = os.environ.get('PEP517_BACKEND_PATH')
|
||||
if backend_path:
|
||||
extra_pathitems = backend_path.split(os.pathsep)
|
||||
sys.path[:0] = extra_pathitems
|
||||
|
||||
ep = os.environ['PEP517_BUILD_BACKEND']
|
||||
mod_path, _, obj_path = ep.partition(':')
|
||||
try:
|
||||
obj = import_module(mod_path)
|
||||
except ImportError:
|
||||
raise BackendUnavailable(traceback.format_exc())
|
||||
|
||||
if backend_path:
|
||||
if not any(
|
||||
contained_in(obj.__file__, path)
|
||||
for path in extra_pathitems
|
||||
):
|
||||
raise BackendInvalid("Backend was not loaded from backend-path")
|
||||
|
||||
if obj_path:
|
||||
for path_part in obj_path.split('.'):
|
||||
obj = getattr(obj, path_part)
|
||||
return obj
|
||||
|
||||
|
||||
def _supported_features():
|
||||
"""Return the list of options features supported by the backend.
|
||||
|
||||
Returns a list of strings.
|
||||
The only possible value is 'build_editable'.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
features = []
|
||||
if hasattr(backend, "build_editable"):
|
||||
features.append("build_editable")
|
||||
return features
|
||||
|
||||
|
||||
def get_requires_for_build_wheel(config_settings):
|
||||
"""Invoke the optional get_requires_for_build_wheel hook
|
||||
|
||||
Returns [] if the hook is not defined.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.get_requires_for_build_wheel
|
||||
except AttributeError:
|
||||
return []
|
||||
else:
|
||||
return hook(config_settings)
|
||||
|
||||
|
||||
def get_requires_for_build_editable(config_settings):
|
||||
"""Invoke the optional get_requires_for_build_editable hook
|
||||
|
||||
Returns [] if the hook is not defined.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.get_requires_for_build_editable
|
||||
except AttributeError:
|
||||
return []
|
||||
else:
|
||||
return hook(config_settings)
|
||||
|
||||
|
||||
def prepare_metadata_for_build_wheel(
|
||||
metadata_directory, config_settings, _allow_fallback):
|
||||
"""Invoke optional prepare_metadata_for_build_wheel
|
||||
|
||||
Implements a fallback by building a wheel if the hook isn't defined,
|
||||
unless _allow_fallback is False in which case HookMissing is raised.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.prepare_metadata_for_build_wheel
|
||||
except AttributeError:
|
||||
if not _allow_fallback:
|
||||
raise HookMissing()
|
||||
whl_basename = backend.build_wheel(metadata_directory, config_settings)
|
||||
return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory,
|
||||
config_settings)
|
||||
else:
|
||||
return hook(metadata_directory, config_settings)
|
||||
|
||||
|
||||
def prepare_metadata_for_build_editable(
|
||||
metadata_directory, config_settings, _allow_fallback):
|
||||
"""Invoke optional prepare_metadata_for_build_editable
|
||||
|
||||
Implements a fallback by building an editable wheel if the hook isn't
|
||||
defined, unless _allow_fallback is False in which case HookMissing is
|
||||
raised.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.prepare_metadata_for_build_editable
|
||||
except AttributeError:
|
||||
if not _allow_fallback:
|
||||
raise HookMissing()
|
||||
try:
|
||||
build_hook = backend.build_editable
|
||||
except AttributeError:
|
||||
raise HookMissing(hook_name='build_editable')
|
||||
else:
|
||||
whl_basename = build_hook(metadata_directory, config_settings)
|
||||
return _get_wheel_metadata_from_wheel(whl_basename,
|
||||
metadata_directory,
|
||||
config_settings)
|
||||
else:
|
||||
return hook(metadata_directory, config_settings)
|
||||
|
||||
|
||||
WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL'
|
||||
|
||||
|
||||
def _dist_info_files(whl_zip):
|
||||
"""Identify the .dist-info folder inside a wheel ZipFile."""
|
||||
res = []
|
||||
for path in whl_zip.namelist():
|
||||
m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path)
|
||||
if m:
|
||||
res.append(path)
|
||||
if res:
|
||||
return res
|
||||
raise Exception("No .dist-info folder found in wheel")
|
||||
|
||||
|
||||
def _get_wheel_metadata_from_wheel(
|
||||
whl_basename, metadata_directory, config_settings):
|
||||
"""Extract the metadata from a wheel.
|
||||
|
||||
Fallback for when the build backend does not
|
||||
define the 'get_wheel_metadata' hook.
|
||||
"""
|
||||
from zipfile import ZipFile
|
||||
with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'):
|
||||
pass # Touch marker file
|
||||
|
||||
whl_file = os.path.join(metadata_directory, whl_basename)
|
||||
with ZipFile(whl_file) as zipf:
|
||||
dist_info = _dist_info_files(zipf)
|
||||
zipf.extractall(path=metadata_directory, members=dist_info)
|
||||
return dist_info[0].split('/')[0]
|
||||
|
||||
|
||||
def _find_already_built_wheel(metadata_directory):
|
||||
"""Check for a wheel already built during the get_wheel_metadata hook.
|
||||
"""
|
||||
if not metadata_directory:
|
||||
return None
|
||||
metadata_parent = os.path.dirname(metadata_directory)
|
||||
if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)):
|
||||
return None
|
||||
|
||||
whl_files = glob(os.path.join(metadata_parent, '*.whl'))
|
||||
if not whl_files:
|
||||
print('Found wheel built marker, but no .whl files')
|
||||
return None
|
||||
if len(whl_files) > 1:
|
||||
print('Found multiple .whl files; unspecified behaviour. '
|
||||
'Will call build_wheel.')
|
||||
return None
|
||||
|
||||
# Exactly one .whl file
|
||||
return whl_files[0]
|
||||
|
||||
|
||||
def build_wheel(wheel_directory, config_settings, metadata_directory=None):
|
||||
"""Invoke the mandatory build_wheel hook.
|
||||
|
||||
If a wheel was already built in the
|
||||
prepare_metadata_for_build_wheel fallback, this
|
||||
will copy it rather than rebuilding the wheel.
|
||||
"""
|
||||
prebuilt_whl = _find_already_built_wheel(metadata_directory)
|
||||
if prebuilt_whl:
|
||||
shutil.copy2(prebuilt_whl, wheel_directory)
|
||||
return os.path.basename(prebuilt_whl)
|
||||
|
||||
return _build_backend().build_wheel(wheel_directory, config_settings,
|
||||
metadata_directory)
|
||||
|
||||
|
||||
def build_editable(wheel_directory, config_settings, metadata_directory=None):
|
||||
"""Invoke the optional build_editable hook.
|
||||
|
||||
If a wheel was already built in the
|
||||
prepare_metadata_for_build_editable fallback, this
|
||||
will copy it rather than rebuilding the wheel.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.build_editable
|
||||
except AttributeError:
|
||||
raise HookMissing()
|
||||
else:
|
||||
prebuilt_whl = _find_already_built_wheel(metadata_directory)
|
||||
if prebuilt_whl:
|
||||
shutil.copy2(prebuilt_whl, wheel_directory)
|
||||
return os.path.basename(prebuilt_whl)
|
||||
|
||||
return hook(wheel_directory, config_settings, metadata_directory)
|
||||
|
||||
|
||||
def get_requires_for_build_sdist(config_settings):
|
||||
"""Invoke the optional get_requires_for_build_wheel hook
|
||||
|
||||
Returns [] if the hook is not defined.
|
||||
"""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
hook = backend.get_requires_for_build_sdist
|
||||
except AttributeError:
|
||||
return []
|
||||
else:
|
||||
return hook(config_settings)
|
||||
|
||||
|
||||
class _DummyException(Exception):
|
||||
"""Nothing should ever raise this exception"""
|
||||
|
||||
|
||||
class GotUnsupportedOperation(Exception):
|
||||
"""For internal use when backend raises UnsupportedOperation"""
|
||||
def __init__(self, traceback):
|
||||
self.traceback = traceback
|
||||
|
||||
|
||||
def build_sdist(sdist_directory, config_settings):
|
||||
"""Invoke the mandatory build_sdist hook."""
|
||||
backend = _build_backend()
|
||||
try:
|
||||
return backend.build_sdist(sdist_directory, config_settings)
|
||||
except getattr(backend, 'UnsupportedOperation', _DummyException):
|
||||
raise GotUnsupportedOperation(traceback.format_exc())
|
||||
|
||||
|
||||
HOOK_NAMES = {
|
||||
'get_requires_for_build_wheel',
|
||||
'prepare_metadata_for_build_wheel',
|
||||
'build_wheel',
|
||||
'get_requires_for_build_editable',
|
||||
'prepare_metadata_for_build_editable',
|
||||
'build_editable',
|
||||
'get_requires_for_build_sdist',
|
||||
'build_sdist',
|
||||
'_supported_features',
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 3:
|
||||
sys.exit("Needs args: hook_name, control_dir")
|
||||
hook_name = sys.argv[1]
|
||||
control_dir = sys.argv[2]
|
||||
if hook_name not in HOOK_NAMES:
|
||||
sys.exit("Unknown hook: %s" % hook_name)
|
||||
hook = globals()[hook_name]
|
||||
|
||||
hook_input = read_json(pjoin(control_dir, 'input.json'))
|
||||
|
||||
json_out = {'unsupported': False, 'return_val': None}
|
||||
try:
|
||||
json_out['return_val'] = hook(**hook_input['kwargs'])
|
||||
except BackendUnavailable as e:
|
||||
json_out['no_backend'] = True
|
||||
json_out['traceback'] = e.traceback
|
||||
except BackendInvalid as e:
|
||||
json_out['backend_invalid'] = True
|
||||
json_out['backend_error'] = e.message
|
||||
except GotUnsupportedOperation as e:
|
||||
json_out['unsupported'] = True
|
||||
json_out['traceback'] = e.traceback
|
||||
except HookMissing as e:
|
||||
json_out['hook_missing'] = True
|
||||
json_out['missing_hook_name'] = e.hook_name or hook_name
|
||||
|
||||
write_json(json_out, pjoin(control_dir, 'output.json'), indent=2)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,92 @@
|
||||
"""Build metadata for a project using PEP 517 hooks.
|
||||
"""
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import functools
|
||||
|
||||
try:
|
||||
import importlib.metadata as imp_meta
|
||||
except ImportError:
|
||||
import importlib_metadata as imp_meta
|
||||
|
||||
try:
|
||||
from zipfile import Path
|
||||
except ImportError:
|
||||
from zipp import Path
|
||||
|
||||
from .envbuild import BuildEnvironment
|
||||
from .wrappers import Pep517HookCaller, quiet_subprocess_runner
|
||||
from .dirtools import tempdir, mkdir_p, dir_to_zipfile
|
||||
from .build import validate_system, load_system, compat_system
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _prep_meta(hooks, env, dest):
|
||||
reqs = hooks.get_requires_for_build_wheel({})
|
||||
log.info('Got build requires: %s', reqs)
|
||||
|
||||
env.pip_install(reqs)
|
||||
log.info('Installed dynamic build dependencies')
|
||||
|
||||
with tempdir() as td:
|
||||
log.info('Trying to build metadata in %s', td)
|
||||
filename = hooks.prepare_metadata_for_build_wheel(td, {})
|
||||
source = os.path.join(td, filename)
|
||||
shutil.move(source, os.path.join(dest, os.path.basename(filename)))
|
||||
|
||||
|
||||
def build(source_dir='.', dest=None, system=None):
|
||||
system = system or load_system(source_dir)
|
||||
dest = os.path.join(source_dir, dest or 'dist')
|
||||
mkdir_p(dest)
|
||||
validate_system(system)
|
||||
hooks = Pep517HookCaller(
|
||||
source_dir, system['build-backend'], system.get('backend-path')
|
||||
)
|
||||
|
||||
with hooks.subprocess_runner(quiet_subprocess_runner):
|
||||
with BuildEnvironment() as env:
|
||||
env.pip_install(system['requires'])
|
||||
_prep_meta(hooks, env, dest)
|
||||
|
||||
|
||||
def build_as_zip(builder=build):
|
||||
with tempdir() as out_dir:
|
||||
builder(dest=out_dir)
|
||||
return dir_to_zipfile(out_dir)
|
||||
|
||||
|
||||
def load(root):
|
||||
"""
|
||||
Given a source directory (root) of a package,
|
||||
return an importlib.metadata.Distribution object
|
||||
with metadata build from that package.
|
||||
"""
|
||||
root = os.path.expanduser(root)
|
||||
system = compat_system(root)
|
||||
builder = functools.partial(build, source_dir=root, system=system)
|
||||
path = Path(build_as_zip(builder))
|
||||
return imp_meta.PathDistribution(path)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'source_dir',
|
||||
help="A directory containing pyproject.toml",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--out-dir', '-o',
|
||||
help="Destination in which to save the builds relative to source dir",
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
args = parser.parse_args()
|
||||
build(args.source_dir, args.out_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -0,0 +1,375 @@
|
||||
import threading
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
from os.path import abspath, join as pjoin
|
||||
import shutil
|
||||
from subprocess import check_call, check_output, STDOUT
|
||||
import sys
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from . import compat
|
||||
from .in_process import _in_proc_script_path
|
||||
|
||||
__all__ = [
|
||||
'BackendUnavailable',
|
||||
'BackendInvalid',
|
||||
'HookMissing',
|
||||
'UnsupportedOperation',
|
||||
'default_subprocess_runner',
|
||||
'quiet_subprocess_runner',
|
||||
'Pep517HookCaller',
|
||||
]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def tempdir():
|
||||
td = mkdtemp()
|
||||
try:
|
||||
yield td
|
||||
finally:
|
||||
shutil.rmtree(td)
|
||||
|
||||
|
||||
class BackendUnavailable(Exception):
|
||||
"""Will be raised if the backend cannot be imported in the hook process."""
|
||||
def __init__(self, traceback):
|
||||
self.traceback = traceback
|
||||
|
||||
|
||||
class BackendInvalid(Exception):
|
||||
"""Will be raised if the backend is invalid."""
|
||||
def __init__(self, backend_name, backend_path, message):
|
||||
self.backend_name = backend_name
|
||||
self.backend_path = backend_path
|
||||
self.message = message
|
||||
|
||||
|
||||
class HookMissing(Exception):
|
||||
"""Will be raised on missing hooks."""
|
||||
def __init__(self, hook_name):
|
||||
super(HookMissing, self).__init__(hook_name)
|
||||
self.hook_name = hook_name
|
||||
|
||||
|
||||
class UnsupportedOperation(Exception):
|
||||
"""May be raised by build_sdist if the backend indicates that it can't."""
|
||||
def __init__(self, traceback):
|
||||
self.traceback = traceback
|
||||
|
||||
|
||||
def default_subprocess_runner(cmd, cwd=None, extra_environ=None):
|
||||
"""The default method of calling the wrapper subprocess."""
|
||||
env = os.environ.copy()
|
||||
if extra_environ:
|
||||
env.update(extra_environ)
|
||||
|
||||
check_call(cmd, cwd=cwd, env=env)
|
||||
|
||||
|
||||
def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None):
|
||||
"""A method of calling the wrapper subprocess while suppressing output."""
|
||||
env = os.environ.copy()
|
||||
if extra_environ:
|
||||
env.update(extra_environ)
|
||||
|
||||
check_output(cmd, cwd=cwd, env=env, stderr=STDOUT)
|
||||
|
||||
|
||||
def norm_and_check(source_tree, requested):
|
||||
"""Normalise and check a backend path.
|
||||
|
||||
Ensure that the requested backend path is specified as a relative path,
|
||||
and resolves to a location under the given source tree.
|
||||
|
||||
Return an absolute version of the requested path.
|
||||
"""
|
||||
if os.path.isabs(requested):
|
||||
raise ValueError("paths must be relative")
|
||||
|
||||
abs_source = os.path.abspath(source_tree)
|
||||
abs_requested = os.path.normpath(os.path.join(abs_source, requested))
|
||||
# We have to use commonprefix for Python 2.7 compatibility. So we
|
||||
# normalise case to avoid problems because commonprefix is a character
|
||||
# based comparison :-(
|
||||
norm_source = os.path.normcase(abs_source)
|
||||
norm_requested = os.path.normcase(abs_requested)
|
||||
if os.path.commonprefix([norm_source, norm_requested]) != norm_source:
|
||||
raise ValueError("paths must be inside source tree")
|
||||
|
||||
return abs_requested
|
||||
|
||||
|
||||
class Pep517HookCaller(object):
|
||||
"""A wrapper around a source directory to be built with a PEP 517 backend.
|
||||
|
||||
:param source_dir: The path to the source directory, containing
|
||||
pyproject.toml.
|
||||
:param build_backend: The build backend spec, as per PEP 517, from
|
||||
pyproject.toml.
|
||||
:param backend_path: The backend path, as per PEP 517, from pyproject.toml.
|
||||
:param runner: A callable that invokes the wrapper subprocess.
|
||||
:param python_executable: The Python executable used to invoke the backend
|
||||
|
||||
The 'runner', if provided, must expect the following:
|
||||
|
||||
- cmd: a list of strings representing the command and arguments to
|
||||
execute, as would be passed to e.g. 'subprocess.check_call'.
|
||||
- cwd: a string representing the working directory that must be
|
||||
used for the subprocess. Corresponds to the provided source_dir.
|
||||
- extra_environ: a dict mapping environment variable names to values
|
||||
which must be set for the subprocess execution.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
source_dir,
|
||||
build_backend,
|
||||
backend_path=None,
|
||||
runner=None,
|
||||
python_executable=None,
|
||||
):
|
||||
if runner is None:
|
||||
runner = default_subprocess_runner
|
||||
|
||||
self.source_dir = abspath(source_dir)
|
||||
self.build_backend = build_backend
|
||||
if backend_path:
|
||||
backend_path = [
|
||||
norm_and_check(self.source_dir, p) for p in backend_path
|
||||
]
|
||||
self.backend_path = backend_path
|
||||
self._subprocess_runner = runner
|
||||
if not python_executable:
|
||||
python_executable = sys.executable
|
||||
self.python_executable = python_executable
|
||||
|
||||
@contextmanager
|
||||
def subprocess_runner(self, runner):
|
||||
"""A context manager for temporarily overriding the default subprocess
|
||||
runner.
|
||||
"""
|
||||
prev = self._subprocess_runner
|
||||
self._subprocess_runner = runner
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._subprocess_runner = prev
|
||||
|
||||
def _supported_features(self):
|
||||
"""Return the list of optional features supported by the backend."""
|
||||
return self._call_hook('_supported_features', {})
|
||||
|
||||
def get_requires_for_build_wheel(self, config_settings=None):
|
||||
"""Identify packages required for building a wheel
|
||||
|
||||
Returns a list of dependency specifications, e.g.::
|
||||
|
||||
["wheel >= 0.25", "setuptools"]
|
||||
|
||||
This does not include requirements specified in pyproject.toml.
|
||||
It returns the result of calling the equivalently named hook in a
|
||||
subprocess.
|
||||
"""
|
||||
return self._call_hook('get_requires_for_build_wheel', {
|
||||
'config_settings': config_settings
|
||||
})
|
||||
|
||||
def prepare_metadata_for_build_wheel(
|
||||
self, metadata_directory, config_settings=None,
|
||||
_allow_fallback=True):
|
||||
"""Prepare a ``*.dist-info`` folder with metadata for this project.
|
||||
|
||||
Returns the name of the newly created folder.
|
||||
|
||||
If the build backend defines a hook with this name, it will be called
|
||||
in a subprocess. If not, the backend will be asked to build a wheel,
|
||||
and the dist-info extracted from that (unless _allow_fallback is
|
||||
False).
|
||||
"""
|
||||
return self._call_hook('prepare_metadata_for_build_wheel', {
|
||||
'metadata_directory': abspath(metadata_directory),
|
||||
'config_settings': config_settings,
|
||||
'_allow_fallback': _allow_fallback,
|
||||
})
|
||||
|
||||
def build_wheel(
|
||||
self, wheel_directory, config_settings=None,
|
||||
metadata_directory=None):
|
||||
"""Build a wheel from this project.
|
||||
|
||||
Returns the name of the newly created file.
|
||||
|
||||
In general, this will call the 'build_wheel' hook in the backend.
|
||||
However, if that was previously called by
|
||||
'prepare_metadata_for_build_wheel', and the same metadata_directory is
|
||||
used, the previously built wheel will be copied to wheel_directory.
|
||||
"""
|
||||
if metadata_directory is not None:
|
||||
metadata_directory = abspath(metadata_directory)
|
||||
return self._call_hook('build_wheel', {
|
||||
'wheel_directory': abspath(wheel_directory),
|
||||
'config_settings': config_settings,
|
||||
'metadata_directory': metadata_directory,
|
||||
})
|
||||
|
||||
def get_requires_for_build_editable(self, config_settings=None):
|
||||
"""Identify packages required for building an editable wheel
|
||||
|
||||
Returns a list of dependency specifications, e.g.::
|
||||
|
||||
["wheel >= 0.25", "setuptools"]
|
||||
|
||||
This does not include requirements specified in pyproject.toml.
|
||||
It returns the result of calling the equivalently named hook in a
|
||||
subprocess.
|
||||
"""
|
||||
return self._call_hook('get_requires_for_build_editable', {
|
||||
'config_settings': config_settings
|
||||
})
|
||||
|
||||
def prepare_metadata_for_build_editable(
|
||||
self, metadata_directory, config_settings=None,
|
||||
_allow_fallback=True):
|
||||
"""Prepare a ``*.dist-info`` folder with metadata for this project.
|
||||
|
||||
Returns the name of the newly created folder.
|
||||
|
||||
If the build backend defines a hook with this name, it will be called
|
||||
in a subprocess. If not, the backend will be asked to build an editable
|
||||
wheel, and the dist-info extracted from that (unless _allow_fallback is
|
||||
False).
|
||||
"""
|
||||
return self._call_hook('prepare_metadata_for_build_editable', {
|
||||
'metadata_directory': abspath(metadata_directory),
|
||||
'config_settings': config_settings,
|
||||
'_allow_fallback': _allow_fallback,
|
||||
})
|
||||
|
||||
def build_editable(
|
||||
self, wheel_directory, config_settings=None,
|
||||
metadata_directory=None):
|
||||
"""Build an editable wheel from this project.
|
||||
|
||||
Returns the name of the newly created file.
|
||||
|
||||
In general, this will call the 'build_editable' hook in the backend.
|
||||
However, if that was previously called by
|
||||
'prepare_metadata_for_build_editable', and the same metadata_directory
|
||||
is used, the previously built wheel will be copied to wheel_directory.
|
||||
"""
|
||||
if metadata_directory is not None:
|
||||
metadata_directory = abspath(metadata_directory)
|
||||
return self._call_hook('build_editable', {
|
||||
'wheel_directory': abspath(wheel_directory),
|
||||
'config_settings': config_settings,
|
||||
'metadata_directory': metadata_directory,
|
||||
})
|
||||
|
||||
def get_requires_for_build_sdist(self, config_settings=None):
|
||||
"""Identify packages required for building a wheel
|
||||
|
||||
Returns a list of dependency specifications, e.g.::
|
||||
|
||||
["setuptools >= 26"]
|
||||
|
||||
This does not include requirements specified in pyproject.toml.
|
||||
It returns the result of calling the equivalently named hook in a
|
||||
subprocess.
|
||||
"""
|
||||
return self._call_hook('get_requires_for_build_sdist', {
|
||||
'config_settings': config_settings
|
||||
})
|
||||
|
||||
def build_sdist(self, sdist_directory, config_settings=None):
|
||||
"""Build an sdist from this project.
|
||||
|
||||
Returns the name of the newly created file.
|
||||
|
||||
This calls the 'build_sdist' backend hook in a subprocess.
|
||||
"""
|
||||
return self._call_hook('build_sdist', {
|
||||
'sdist_directory': abspath(sdist_directory),
|
||||
'config_settings': config_settings,
|
||||
})
|
||||
|
||||
def _call_hook(self, hook_name, kwargs):
|
||||
# On Python 2, pytoml returns Unicode values (which is correct) but the
|
||||
# environment passed to check_call needs to contain string values. We
|
||||
# convert here by encoding using ASCII (the backend can only contain
|
||||
# letters, digits and _, . and : characters, and will be used as a
|
||||
# Python identifier, so non-ASCII content is wrong on Python 2 in
|
||||
# any case).
|
||||
# For backend_path, we use sys.getfilesystemencoding.
|
||||
if sys.version_info[0] == 2:
|
||||
build_backend = self.build_backend.encode('ASCII')
|
||||
else:
|
||||
build_backend = self.build_backend
|
||||
extra_environ = {'PEP517_BUILD_BACKEND': build_backend}
|
||||
|
||||
if self.backend_path:
|
||||
backend_path = os.pathsep.join(self.backend_path)
|
||||
if sys.version_info[0] == 2:
|
||||
backend_path = backend_path.encode(sys.getfilesystemencoding())
|
||||
extra_environ['PEP517_BACKEND_PATH'] = backend_path
|
||||
|
||||
with tempdir() as td:
|
||||
hook_input = {'kwargs': kwargs}
|
||||
compat.write_json(hook_input, pjoin(td, 'input.json'),
|
||||
indent=2)
|
||||
|
||||
# Run the hook in a subprocess
|
||||
with _in_proc_script_path() as script:
|
||||
python = self.python_executable
|
||||
self._subprocess_runner(
|
||||
[python, abspath(str(script)), hook_name, td],
|
||||
cwd=self.source_dir,
|
||||
extra_environ=extra_environ
|
||||
)
|
||||
|
||||
data = compat.read_json(pjoin(td, 'output.json'))
|
||||
if data.get('unsupported'):
|
||||
raise UnsupportedOperation(data.get('traceback', ''))
|
||||
if data.get('no_backend'):
|
||||
raise BackendUnavailable(data.get('traceback', ''))
|
||||
if data.get('backend_invalid'):
|
||||
raise BackendInvalid(
|
||||
backend_name=self.build_backend,
|
||||
backend_path=self.backend_path,
|
||||
message=data.get('backend_error', '')
|
||||
)
|
||||
if data.get('hook_missing'):
|
||||
raise HookMissing(data.get('missing_hook_name') or hook_name)
|
||||
return data['return_val']
|
||||
|
||||
|
||||
class LoggerWrapper(threading.Thread):
|
||||
"""
|
||||
Read messages from a pipe and redirect them
|
||||
to a logger (see python's logging module).
|
||||
"""
|
||||
|
||||
def __init__(self, logger, level):
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
|
||||
self.logger = logger
|
||||
self.level = level
|
||||
|
||||
# create the pipe and reader
|
||||
self.fd_read, self.fd_write = os.pipe()
|
||||
self.reader = os.fdopen(self.fd_read)
|
||||
|
||||
self.start()
|
||||
|
||||
def fileno(self):
|
||||
return self.fd_write
|
||||
|
||||
@staticmethod
|
||||
def remove_newline(msg):
|
||||
return msg[:-1] if msg.endswith(os.linesep) else msg
|
||||
|
||||
def run(self):
|
||||
for line in self.reader:
|
||||
self._write(self.remove_newline(line))
|
||||
|
||||
def _write(self, message):
|
||||
self.logger.log(self.level, message)
|
Reference in New Issue
Block a user